Skip to content

Commit

Permalink
Update tests and schemas
Browse files Browse the repository at this point in the history
  • Loading branch information
cephalization committed Jan 27, 2025
1 parent 94929af commit d80dbe5
Show file tree
Hide file tree
Showing 11 changed files with 821 additions and 193 deletions.
3 changes: 2 additions & 1 deletion js/packages/phoenix-client/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@
"scripts": {
"clean": "rimraf dist",
"prebuild": "pnpm run clean && pnpm run generate",
"generate": "openapi-typescript ../../../schemas/openapi.json -o ./src/__generated__/api/v1.ts",
"generate": "openapi-typescript --empty-objects-unknown=true ../../../schemas/openapi.json -o ./src/__generated__/api/v1.ts",
"build": "tsc --build tsconfig.json tsconfig.esm.json && tsc-alias -p tsconfig.esm.json",
"postbuild": "echo '{\"type\": \"module\"}' > ./dist/esm/package.json && rimraf dist/test dist/examples",
"type:check": "tsc --noEmit",
Expand All @@ -53,6 +53,7 @@
},
"dependencies": {
"openapi-fetch": "^0.12.2",
"remeda": "^2.20.0",
"tiny-invariant": "^1.3.3",
"zod": "^3.24.1",
"zod-to-json-schema": "^3.24.1"
Expand Down
32 changes: 16 additions & 16 deletions js/packages/phoenix-client/src/__generated__/api/v1.ts

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

4 changes: 2 additions & 2 deletions js/packages/phoenix-client/src/prompts/sdks/toOpenAI.ts
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ export const toOpenAI = ({
// parts of the prompt that can be directly converted to OpenAI params
const baseCompletionParams = {
model: prompt.model_name,
// TODO: Do we need to map over the invocation_parameters? Probably.
// Invocation parameters are validated on the phoenix-side
...prompt.invocation_parameters,
} satisfies Partial<ChatCompletionCreateParams>;

Expand All @@ -48,7 +48,7 @@ export const toOpenAI = ({

const messages = formattedMessages.map((message) =>
promptMessageToOpenAI.parse(message)
) as ChatCompletionMessageParam[];
);

const tools = prompt.tools?.tool_definitions.map((tool) =>
openAIToolDefinitionSchema.parse(tool.definition)
Expand Down
210 changes: 210 additions & 0 deletions js/packages/phoenix-client/src/schemas/llm/messagePartSchemas.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,210 @@
import { z } from "zod";
import { anthropicToolCallSchema } from "./toolCallSchemas";
import { assertUnreachable } from "../../utils/assertUnreachable";
import { PhoenixModelProvider } from "../../constants";
import { promptPartSchema } from "./promptSchemas";

/**
*
* OpenAI Message Part Schemas
*
*/

export const openaiChatPartTextSchema = z.object({
type: z.literal("text"),
text: z.string(),
});

export type OpenAIChatPartText = z.infer<typeof openaiChatPartTextSchema>;

export const openaiChatPartImageSchema = z.object({
type: z.literal("image_url"),
image_url: z.object({
url: z.string(),
}),
});

export type OpenAIChatPartImage = z.infer<typeof openaiChatPartImageSchema>;

export const openaiChatPartSchema = z.discriminatedUnion("type", [
openaiChatPartTextSchema,
openaiChatPartImageSchema,
]);

export type OpenAIChatPart = z.infer<typeof openaiChatPartSchema>;

/**
*
* Anthropic Message Part Schemas
*
*/

export const anthropicTextBlockSchema = z.object({
type: z.literal("text"),
text: z.string(),
});

export type AnthropicTextBlock = z.infer<typeof anthropicTextBlockSchema>;

export const anthropicImageBlockSchema = z.object({
type: z.literal("image"),
source: z.object({
data: z.string(),
media_type: z.enum(["image/jpeg", "image/png", "image/gif", "image/webp"]),
type: z.literal("base64"),
}),
});

export type AnthropicImageBlock = z.infer<typeof anthropicImageBlockSchema>;

export const anthropicToolUseBlockSchema = anthropicToolCallSchema;

export type AnthropicToolUseBlock = z.infer<typeof anthropicToolUseBlockSchema>;

export const anthropicToolResultBlockSchema = z.object({
type: z.literal("tool_result"),
tool_use_id: z.string(),
content: z.union([
z.string(),
z.union([anthropicTextBlockSchema, anthropicImageBlockSchema]).array(),
]),
is_error: z.boolean().optional(),
});

export type AnthropicToolResultBlock = z.infer<
typeof anthropicToolResultBlockSchema
>;

export const anthropicMessagePartSchema = z.discriminatedUnion("type", [
anthropicTextBlockSchema,
anthropicImageBlockSchema,
anthropicToolUseBlockSchema,
anthropicToolResultBlockSchema,
]);

export type AnthropicMessagePart = z.infer<typeof anthropicMessagePartSchema>;

export type LLMMessagePart = OpenAIChatPart | AnthropicMessagePart;

/*
*
* Hub and Spoke Message Part Transformers
*
*/

export const promptMessagePartToOpenAIChatPart = promptPartSchema.transform(
(part) => {
const type = part.type;
switch (type) {
case "text":
return {
type: "text",
text: part.text.text,
} satisfies OpenAIChatPartText;
case "tool_call":
return null;
case "tool_result":
return null;
case "image":
return {
type: "image_url",
image_url: { url: part.image.url },
} satisfies OpenAIChatPartImage;
default:
return assertUnreachable(type);
}
}
);

export const anthropicMessagePartToOpenAIChatPart =
anthropicMessagePartSchema.transform((anthropic) => {
const type = anthropic.type;
switch (type) {
case "text":
return {
type: "text",
text: anthropic.text,
} satisfies OpenAIChatPartText;
case "image":
return {
type: "image_url",
image_url: { url: anthropic.source.data },
} satisfies OpenAIChatPartImage;
case "tool_use":
return null;
case "tool_result":
return null;
default:
return assertUnreachable(type);
}
});

export const openAIChatPartToAnthropicMessagePart =
openaiChatPartSchema.transform((openai) => {
const type = openai.type;
switch (type) {
case "text":
return { type: "text", text: openai.text } satisfies AnthropicTextBlock;
case "image_url":
return {
type: "image",
source: {
data: openai.image_url.url,
// TODO: these are bad assumptions. We should get the actual media type from the image / url
media_type: "image/jpeg",
type: "base64",
},
} satisfies AnthropicImageBlock;
default:
return assertUnreachable(type);
}
});

export type MessagePartProvider = PhoenixModelProvider | "UNKNOWN";

export type MessagePartWithProvider =
| {
provider: Extract<PhoenixModelProvider, "OPENAI" | "AZURE_OPENAI">;
validatedMessage: OpenAIChatPart;
}
| {
provider: Extract<PhoenixModelProvider, "ANTHROPIC">;
validatedMessage: AnthropicMessagePart;
}
| { provider: "UNKNOWN"; validatedMessage: null };

export const detectMessagePartProvider = (
part: LLMMessagePart
): MessagePartWithProvider => {
const { success: openaiSuccess, data: openaiData } =
openaiChatPartSchema.safeParse(part);
if (openaiSuccess) {
return {
provider: "OPENAI",
validatedMessage: openaiData,
};
}
const { success: anthropicSuccess, data: anthropicData } =
anthropicMessagePartSchema.safeParse(part);
if (anthropicSuccess) {
return {
provider: "ANTHROPIC",
validatedMessage: anthropicData,
};
}
return { provider: "UNKNOWN", validatedMessage: null };
};

export const toOpenAIChatPart = (
part: LLMMessagePart
): OpenAIChatPart | null => {
const { provider, validatedMessage } = detectMessagePartProvider(part);
switch (provider) {
case "OPENAI":
return validatedMessage;
case "ANTHROPIC":
return anthropicMessagePartToOpenAIChatPart.parse(validatedMessage);
default:
return null;
}
};
Loading

0 comments on commit d80dbe5

Please sign in to comment.