Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
25 changes: 21 additions & 4 deletions packages/opencode/src/provider/provider.ts
Original file line number Diff line number Diff line change
Expand Up @@ -586,6 +586,13 @@ export namespace Provider {
})
export type Info = z.infer<typeof Info>

export function isAzureAnthropic(model: Model): boolean {
return (
model.providerID === "azure-cognitive-services" &&
(model.api.id.includes("claude") || model.api.id.includes("anthropic"))
)
}

function fromModelsDevModel(provider: ModelsDev.Provider, model: ModelsDev.Model): Model {
const m: Model = {
id: model.id,
Expand Down Expand Up @@ -1006,9 +1013,16 @@ export namespace Provider {
})
}

// Special case: google-vertex-anthropic uses a subpath import
const bundledKey =
model.providerID === "google-vertex-anthropic" ? "@ai-sdk/google-vertex/anthropic" : model.api.npm
// Special cases for providers that use different npm packages
if (isAzureAnthropic(model)) {
const resourceName = Env.get("AZURE_COGNITIVE_SERVICES_RESOURCE_NAME")
if (resourceName) options["baseURL"] = `https://${resourceName}.services.ai.azure.com/anthropic/v1/`
}
const bundledKey = iife(() => {
if (model.providerID === "google-vertex-anthropic") return "@ai-sdk/google-vertex/anthropic"
if (isAzureAnthropic(model)) return "@ai-sdk/anthropic"
return model.api.npm
})
const bundledFn = BUNDLED_PROVIDERS[bundledKey]
if (bundledFn) {
log.info("using bundled provider", { providerID: model.providerID, pkg: bundledKey })
Expand Down Expand Up @@ -1074,8 +1088,11 @@ export namespace Provider {
const provider = s.providers[model.providerID]
const sdk = await getSDK(model)

// Skip custom model loader for Azure Anthropic models since they use @ai-sdk/anthropic
const useCustomLoader = s.modelLoaders[model.providerID] && !isAzureAnthropic(model)

try {
const language = s.modelLoaders[model.providerID]
const language = useCustomLoader
? await s.modelLoaders[model.providerID](sdk, model.api.id, provider.options)
: sdk.languageModel(model.api.id)
s.models.set(key, language)
Expand Down
48 changes: 45 additions & 3 deletions packages/opencode/src/provider/transform.ts
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,17 @@ function mimeToModality(mime: string): Modality | undefined {
}

export namespace ProviderTransform {
function isAzureAnthropic(model: Provider.Model): boolean {
return (
model.providerID === "azure-cognitive-services" &&
(model.api.id.includes("claude") || model.api.id.includes("anthropic"))
)
}

function usesAnthropicSDK(model: Provider.Model): boolean {
return model.api.npm === "@ai-sdk/anthropic" || isAzureAnthropic(model)
}

function normalizeMessages(
msgs: ModelMessage[],
model: Provider.Model,
Expand All @@ -42,7 +53,7 @@ export namespace ProviderTransform {

// Anthropic rejects messages with empty content - filter out empty string messages
// and remove empty text/reasoning parts from array content
if (model.api.npm === "@ai-sdk/anthropic") {
if (usesAnthropicSDK(model)) {
msgs = msgs
.map((msg) => {
if (typeof msg.content === "string") {
Expand Down Expand Up @@ -248,7 +259,7 @@ export namespace ProviderTransform {
model.providerID === "anthropic" ||
model.api.id.includes("anthropic") ||
model.api.id.includes("claude") ||
model.api.npm === "@ai-sdk/anthropic"
usesAnthropicSDK(model)
) {
msgs = applyCaching(msgs, model.providerID)
}
Expand Down Expand Up @@ -300,6 +311,23 @@ export namespace ProviderTransform {
const id = model.id.toLowerCase()
if (id.includes("deepseek") || id.includes("minimax") || id.includes("glm") || id.includes("mistral")) return {}

if (isAzureAnthropic(model)) {
return {
high: {
thinking: {
type: "enabled",
budgetTokens: 16000,
},
},
max: {
thinking: {
type: "enabled",
budgetTokens: 31999,
},
},
}
}

switch (model.api.npm) {
case "@openrouter/ai-sdk-provider":
if (!model.id.includes("gpt") && !model.id.includes("gemini-3") && !model.id.includes("grok-4")) return {}
Expand Down Expand Up @@ -570,6 +598,9 @@ export namespace ProviderTransform {
}

export function providerOptions(model: Provider.Model, options: { [x: string]: any }) {
if (isAzureAnthropic(model)) {
return { ["anthropic" as string]: options }
}
switch (model.api.npm) {
case "@ai-sdk/github-copilot":
case "@ai-sdk/openai":
Expand Down Expand Up @@ -605,16 +636,27 @@ export namespace ProviderTransform {
}
}

export function maxOutputTokens(model: Provider.Model, options: Record<string, any>, globalLimit: number): number
export function maxOutputTokens(
npm: string,
options: Record<string, any>,
modelLimit: number,
globalLimit: number,
): number
export function maxOutputTokens(
arg1: Provider.Model | string,
options: Record<string, any>,
arg3: number,
arg4?: number,
): number {
const model = typeof arg1 === "object" ? arg1 : null
const npm = model ? model.api.npm : (arg1 as string)
const modelLimit = model ? model.limit.output : arg3
const globalLimit = model ? arg3 : arg4!
const modelCap = modelLimit || globalLimit
const standardLimit = Math.min(modelCap, globalLimit)

Comment on lines +646 to 658
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Accidentally merged and had to revert, but what's going on in here? This look super messy why is it a union for model type?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

thats my bad tbh, just tried to make things work without considering the cleanliness

another pr attempt here: #9000

thanks!

if (npm === "@ai-sdk/anthropic") {
if (model ? usesAnthropicSDK(model) : npm === "@ai-sdk/anthropic") {
const thinking = options?.["thinking"]
const budgetTokens = typeof thinking?.["budgetTokens"] === "number" ? thinking["budgetTokens"] : 0
const enabled = thinking?.["type"] === "enabled"
Expand Down
7 changes: 1 addition & 6 deletions packages/opencode/src/session/llm.ts
Original file line number Diff line number Diff line change
Expand Up @@ -133,12 +133,7 @@ export namespace LLM {

const maxOutputTokens = isCodex
? undefined
: ProviderTransform.maxOutputTokens(
input.model.api.npm,
params.options,
input.model.limit.output,
OUTPUT_TOKEN_MAX,
)
: ProviderTransform.maxOutputTokens(input.model, params.options, OUTPUT_TOKEN_MAX)

const tools = await resolveTools(input)

Expand Down