From 629830d4a8e587cfc82794afc176049381c51d23 Mon Sep 17 00:00:00 2001 From: Roo Code Date: Sat, 25 Oct 2025 22:43:17 +0000 Subject: [PATCH] fix: send base64 images without data URL prefix to LM Studio LM Studio expects raw base64 encoded image data without the data: URL prefix. This fix adds an optional lmStudioFormat parameter to convertToOpenAiMessages that strips the prefix when sending images to LM Studio. Fixes #8827 --- src/api/providers/lm-studio.ts | 2 +- .../transform/__tests__/openai-format.spec.ts | 46 +++++++++++++++++++ src/api/transform/openai-format.ts | 7 ++- 3 files changed, 53 insertions(+), 2 deletions(-) diff --git a/src/api/providers/lm-studio.ts b/src/api/providers/lm-studio.ts index 6c58a96ae1fa..d1bbf7854b1e 100644 --- a/src/api/providers/lm-studio.ts +++ b/src/api/providers/lm-studio.ts @@ -43,7 +43,7 @@ export class LmStudioHandler extends BaseProvider implements SingleCompletionHan ): ApiStream { const openAiMessages: OpenAI.Chat.ChatCompletionMessageParam[] = [ { role: "system", content: systemPrompt }, - ...convertToOpenAiMessages(messages), + ...convertToOpenAiMessages(messages, { lmStudioFormat: true }), ] // ------------------------- diff --git a/src/api/transform/__tests__/openai-format.spec.ts b/src/api/transform/__tests__/openai-format.spec.ts index bab655dcb591..5f259d41333b 100644 --- a/src/api/transform/__tests__/openai-format.spec.ts +++ b/src/api/transform/__tests__/openai-format.spec.ts @@ -70,6 +70,52 @@ describe("convertToOpenAiMessages", () => { }) }) + it("should handle images with LM Studio format", () => { + const anthropicMessages: Anthropic.Messages.MessageParam[] = [ + { + role: "user", + content: [ + { + type: "text", + text: "Analyze this image", + }, + { + type: "image", + source: { + type: "base64", + media_type: "image/png", + data: "base64imagedata", + }, + }, + ], + }, + ] + + // Test default format (with data URL prefix) + const openAiMessages = convertToOpenAiMessages(anthropicMessages) + const content = openAiMessages[0].content as Array<{ + type: string + text?: string + image_url?: { url: string } + }> + expect(content[1]).toEqual({ + type: "image_url", + image_url: { url: "data:image/png;base64,base64imagedata" }, + }) + + // Test LM Studio format (without data URL prefix) + const lmStudioMessages = convertToOpenAiMessages(anthropicMessages, { lmStudioFormat: true }) + const lmStudioContent = lmStudioMessages[0].content as Array<{ + type: string + text?: string + image_url?: { url: string } + }> + expect(lmStudioContent[1]).toEqual({ + type: "image_url", + image_url: { url: "base64imagedata" }, + }) + }) + it("should handle assistant messages with tool use", () => { const anthropicMessages: Anthropic.Messages.MessageParam[] = [ { diff --git a/src/api/transform/openai-format.ts b/src/api/transform/openai-format.ts index 134f9f2ed6e0..d2253eb2a09a 100644 --- a/src/api/transform/openai-format.ts +++ b/src/api/transform/openai-format.ts @@ -3,6 +3,7 @@ import OpenAI from "openai" export function convertToOpenAiMessages( anthropicMessages: Anthropic.Messages.MessageParam[], + options?: { lmStudioFormat?: boolean }, ): OpenAI.Chat.ChatCompletionMessageParam[] { const openAiMessages: OpenAI.Chat.ChatCompletionMessageParam[] = [] @@ -83,9 +84,13 @@ export function convertToOpenAiMessages( role: "user", content: nonToolMessages.map((part) => { if (part.type === "image") { + // LM Studio expects just the base64 data without the data URL prefix + const imageUrl = options?.lmStudioFormat + ? part.source.data + : `data:${part.source.media_type};base64,${part.source.data}` return { type: "image_url", - image_url: { url: `data:${part.source.media_type};base64,${part.source.data}` }, + image_url: { url: imageUrl }, } } return { type: "text", text: part.text }