Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions packages/types/src/provider-settings.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@ import { z } from "zod"
import { modelInfoSchema, reasoningEffortSettingSchema, verbosityLevelsSchema, serviceTierSchema } from "./model.js"
import { codebaseIndexProviderSchema } from "./codebase-index.js"
import {
anthropicAuthHeaderModes,
anthropicEndpointModes,
anthropicModels,
basetenModels,
bedrockModels,
Expand Down Expand Up @@ -192,6 +194,10 @@ const anthropicSchema = apiModelIdProviderModelSchema.extend({
apiKey: z.string().optional(),
anthropicBaseUrl: z.string().optional(),
anthropicUseAuthToken: z.boolean().optional(),
anthropicEndpointMode: z.enum(anthropicEndpointModes).optional(),
anthropicModelOverride: z.string().optional(),
anthropicMessagesUrlOverride: z.string().optional(),
anthropicAuthHeaderMode: z.enum(anthropicAuthHeaderModes).optional(),
anthropicBeta1MContext: z.boolean().optional(), // Enable 'context-1m-2025-08-07' beta for 1M context window.
})

Expand Down
6 changes: 6 additions & 0 deletions packages/types/src/providers/anthropic.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,12 @@ import type { ModelInfo } from "../model.js"
export type AnthropicModelId = keyof typeof anthropicModels
export const anthropicDefaultModelId: AnthropicModelId = "claude-sonnet-4-5"

export const anthropicEndpointModes = ["anthropic", "azure-ai-foundry"] as const
export type AnthropicEndpointMode = (typeof anthropicEndpointModes)[number]

export const anthropicAuthHeaderModes = ["x-api-key", "api-key", "bearer"] as const
export type AnthropicAuthHeaderMode = (typeof anthropicAuthHeaderModes)[number]

export const anthropicModels = {
"claude-sonnet-4-5": {
maxTokens: 64_000, // Overridden to 8k if `enableReasoningEffort` is false.
Expand Down
87 changes: 87 additions & 0 deletions src/api/providers/__tests__/anthropic.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -77,6 +77,7 @@ const mockAnthropicConstructor = vitest.mocked(Anthropic)
describe("AnthropicHandler", () => {
let handler: AnthropicHandler
let mockOptions: ApiHandlerOptions
const originalFetch = globalThis.fetch

beforeEach(() => {
mockOptions = {
Expand All @@ -87,6 +88,10 @@ describe("AnthropicHandler", () => {
vitest.clearAllMocks()
})

afterEach(() => {
;(globalThis as any).fetch = originalFetch
})

describe("constructor", () => {
it("should initialize with provided options", () => {
expect(handler).toBeInstanceOf(AnthropicHandler)
Expand Down Expand Up @@ -144,6 +149,51 @@ describe("AnthropicHandler", () => {
expect(mockAnthropicConstructor.mock.calls[0]![0]!.authToken).toEqual("test-api-key")
expect(mockAnthropicConstructor.mock.calls[0]![0]!.apiKey).toBeUndefined()
})

it("requires messages URL override in Azure AI Foundry mode", () => {
expect(
() =>
new AnthropicHandler({
...mockOptions,
anthropicEndpointMode: "azure-ai-foundry",
}),
).toThrow("anthropicMessagesUrlOverride is required when anthropicEndpointMode is azure-ai-foundry")
})

it("should route Foundry mode requests through messages URL override and selected auth header", async () => {
const mockFetch = vitest.fn().mockResolvedValue({ ok: true } as Response)
;(globalThis as any).fetch = mockFetch

new AnthropicHandler({
...mockOptions,
anthropicEndpointMode: "azure-ai-foundry",
anthropicMessagesUrlOverride:
"https://test.services.ai.azure.com/models/claude/messages?api-version=2024-05-01-preview",
anthropicAuthHeaderMode: "api-key",
})

const fetchOverride = mockAnthropicConstructor.mock.calls[0]![0]!.fetch as (
input: Parameters<typeof fetch>[0],
init?: Parameters<typeof fetch>[1],
) => Promise<Response>

expect(fetchOverride).toBeTypeOf("function")

await fetchOverride("https://api.anthropic.com/v1/messages", {
method: "POST",
headers: { "x-api-key": "old-key" },
})

expect(mockFetch).toHaveBeenCalledWith(
"https://test.services.ai.azure.com/models/claude/messages?api-version=2024-05-01-preview",
expect.any(Object),
)

const headers = new Headers(mockFetch.mock.calls[0]![1]!.headers as HeadersInit)
expect(headers.get("api-key")).toBe("test-api-key")
expect(headers.get("x-api-key")).toBeNull()
expect(headers.get("anthropic-version")).toBe("2023-06-01")
})
})

describe("createMessage", () => {
Expand Down Expand Up @@ -187,6 +237,28 @@ describe("AnthropicHandler", () => {
// Verify API
expect(mockCreate).toHaveBeenCalled()
})

it("should use anthropicModelOverride when provided", async () => {
const overrideModel = "claude-sonnet-4-5-deployment"
const handlerWithOverride = new AnthropicHandler({
...mockOptions,
anthropicModelOverride: overrideModel,
})

const stream = handlerWithOverride.createMessage(systemPrompt, [
{
role: "user",
content: [{ type: "text" as const, text: "hello" }],
},
])

for await (const _chunk of stream) {
// consume stream
}

expect(mockCreate).toHaveBeenCalled()
expect(mockCreate.mock.calls[0]![0]!.model).toBe(overrideModel)
})
})

describe("completePrompt", () => {
Expand All @@ -203,6 +275,21 @@ describe("AnthropicHandler", () => {
})
})

it("should apply model override in completePrompt", async () => {
const handlerWithOverride = new AnthropicHandler({
...mockOptions,
anthropicModelOverride: "claude-opus-4-6-deployment",
})

await handlerWithOverride.completePrompt("Test prompt")

expect(mockCreate).toHaveBeenCalledWith(
expect.objectContaining({
model: "claude-opus-4-6-deployment",
}),
)
})

it("should handle API errors", async () => {
mockCreate.mockRejectedValueOnce(new Error("Anthropic completion error: API Error"))
await expect(handler.completePrompt("Test prompt")).rejects.toThrow("Anthropic completion error: API Error")
Expand Down
29 changes: 29 additions & 0 deletions src/api/providers/__tests__/openai.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,7 @@ vitest.mock("openai", () => {
},
},
})),
AzureOpenAI: mockConstructor,
}
})

Expand Down Expand Up @@ -601,6 +602,34 @@ describe("OpenAiHandler", () => {
expect(azureHandler.getModel().id).toBe(azureOptions.openAiModelId)
})

it("should not treat /messages endpoints as Azure AI Inference", async () => {
const foundryMessagesHandler = new OpenAiHandler({
...azureOptions,
openAiBaseUrl:
"https://test.services.ai.azure.com/models/claude/messages?api-version=2024-05-01-preview",
openAiModelId: "claude-sonnet-4-5",
})
const systemPrompt = "You are a helpful assistant."
const messages: Anthropic.Messages.MessageParam[] = [
{
role: "user",
content: "Hello!",
},
]

const stream = foundryMessagesHandler.createMessage(systemPrompt, messages)
for await (const _chunk of stream) {
// consume stream
}

expect(mockCreate).toHaveBeenCalledWith(
expect.objectContaining({
model: "claude-sonnet-4-5",
}),
{},
)
})

it("should handle streaming responses with Azure AI Inference Service", async () => {
const azureHandler = new OpenAiHandler(azureOptions)
const systemPrompt = "You are a helpful assistant."
Expand Down
111 changes: 101 additions & 10 deletions src/api/providers/anthropic.ts
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
import { Anthropic } from "@anthropic-ai/sdk"
import { Stream as AnthropicStream } from "@anthropic-ai/sdk/streaming"
import { CacheControlEphemeral } from "@anthropic-ai/sdk/resources"
import OpenAI from "openai"

import {
type ModelInfo,
type AnthropicAuthHeaderMode,
type AnthropicModelId,
anthropicDefaultModelId,
anthropicModels,
Expand All @@ -18,7 +18,6 @@ import type { ApiHandlerOptions } from "../../shared/api"
import { ApiStream } from "../transform/stream"
import { getModelParams } from "../transform/model-params"
import { filterNonAnthropicBlocks } from "../transform/anthropic-filter"
import { handleProviderError } from "./utils/error-handler"

import { BaseProvider } from "./base-provider"
import type { SingleCompletionHandler, ApiHandlerCreateMessageMetadata } from "../index"
Expand All @@ -28,6 +27,9 @@ import {
convertOpenAIToolChoiceToAnthropic,
} from "../../core/prompts/tools/native-tools/converters"

const ANTHROPIC_API_VERSION = "2023-06-01"
type AnthropicClientOptions = NonNullable<ConstructorParameters<typeof Anthropic>[0]>

export class AnthropicHandler extends BaseProvider implements SingleCompletionHandler {
private options: ApiHandlerOptions
private client: Anthropic
Expand All @@ -37,13 +39,100 @@ export class AnthropicHandler extends BaseProvider implements SingleCompletionHa
super()
this.options = options

const apiKeyFieldName =
this.options.anthropicBaseUrl && this.options.anthropicUseAuthToken ? "authToken" : "apiKey"
const messagesUrlOverride = this.getAnthropicMessagesUrlOverride()
const useAzureAiFoundry = this.options.anthropicEndpointMode === "azure-ai-foundry" || !!messagesUrlOverride
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Bug: if a user explicitly sets anthropicEndpointMode to "anthropic" but has a leftover anthropicMessagesUrlOverride value (e.g. from previously using Azure AI Foundry mode), the || !!messagesUrlOverride condition still forces Foundry mode. The explicit mode setting is ignored. The UI also doesn't clear anthropicMessagesUrlOverride when switching back to Anthropic mode, so this scenario is easy to trigger. The auto-detection fallback should only apply when anthropicEndpointMode is unset, not when it's explicitly "anthropic".

Suggested change
const useAzureAiFoundry = this.options.anthropicEndpointMode === "azure-ai-foundry" || !!messagesUrlOverride
const useAzureAiFoundry = this.options.anthropicEndpointMode === "azure-ai-foundry" || (this.options.anthropicEndpointMode == null && !!messagesUrlOverride)

Fix it with Roo Code or mention @roomote and request a fix.


this.client = new Anthropic({
if (useAzureAiFoundry) {
if (!messagesUrlOverride) {
throw new Error(
"anthropicMessagesUrlOverride is required when anthropicEndpointMode is azure-ai-foundry",
)
}

const clientOptions: AnthropicClientOptions = {
// Required by the SDK, but all requests are rerouted by the custom fetch to messagesUrlOverride.
apiKey: this.options.apiKey ?? "not-provided",
defaultHeaders: { "anthropic-version": ANTHROPIC_API_VERSION },
fetch: this.createAzureAiFoundryFetch(messagesUrlOverride, this.resolveAnthropicAuthHeaderMode()),
}
this.client = new Anthropic(clientOptions)
return
}

const clientOptions: AnthropicClientOptions = {
baseURL: this.options.anthropicBaseUrl || undefined,
[apiKeyFieldName]: this.options.apiKey,
})
defaultHeaders: { "anthropic-version": ANTHROPIC_API_VERSION },
apiKey: this.options.apiKey,
}

if (this.options.anthropicBaseUrl && this.options.anthropicUseAuthToken) {
delete clientOptions.apiKey
clientOptions.authToken = this.options.apiKey
}

this.client = new Anthropic(clientOptions)
}

private getAnthropicMessagesUrlOverride(): string | undefined {
const override = this.options.anthropicMessagesUrlOverride?.trim()
if (!override) {
return undefined
}

if (!URL.canParse(override)) {
throw new Error("anthropicMessagesUrlOverride must be a valid URL")
}

return override
}

private resolveAnthropicAuthHeaderMode(): AnthropicAuthHeaderMode {
if (this.options.anthropicAuthHeaderMode) {
return this.options.anthropicAuthHeaderMode
}

// Keep backward compatibility with the old toggle when endpoint mode is Foundry.
if (this.options.anthropicUseAuthToken) {
return "bearer"
}

return "x-api-key"
}

private createAzureAiFoundryFetch(messagesUrlOverride: string, authHeaderMode: AnthropicAuthHeaderMode) {
const apiKey = this.options.apiKey

return async (_input: Parameters<typeof fetch>[0], init?: Parameters<typeof fetch>[1]): Promise<Response> => {
const headers = new Headers(init?.headers ?? {})

// Remove SDK defaults so only the selected authentication mode is sent.
headers.delete("x-api-key")
headers.delete("api-key")
headers.delete("authorization")

if (apiKey) {
switch (authHeaderMode) {
case "api-key":
headers.set("api-key", apiKey)
break
case "bearer":
headers.set("authorization", `Bearer ${apiKey}`)
break
case "x-api-key":
default:
headers.set("x-api-key", apiKey)
}
}

headers.set("anthropic-version", ANTHROPIC_API_VERSION)

return fetch(messagesUrlOverride, { ...init, headers })
}
}

private resolveRequestModelId(defaultModelId: string): string {
const override = this.options.anthropicModelOverride?.trim()
return override && override.length > 0 ? override : defaultModelId
}

async *createMessage(
Expand All @@ -60,6 +149,7 @@ export class AnthropicHandler extends BaseProvider implements SingleCompletionHa
temperature,
reasoning: thinking,
} = this.getModel()
const requestModelId = this.resolveRequestModelId(modelId)

// Filter out non-Anthropic blocks (reasoning, thoughtSignature, etc.) before sending to the API
const sanitizedMessages = filterNonAnthropicBlocks(messages)
Expand Down Expand Up @@ -113,7 +203,7 @@ export class AnthropicHandler extends BaseProvider implements SingleCompletionHa
try {
stream = await this.client.messages.create(
{
model: modelId,
model: requestModelId,
max_tokens: maxTokens ?? ANTHROPIC_DEFAULT_MAX_TOKENS,
temperature,
thinking,
Expand Down Expand Up @@ -180,7 +270,7 @@ export class AnthropicHandler extends BaseProvider implements SingleCompletionHa
default: {
try {
stream = (await this.client.messages.create({
model: modelId,
model: requestModelId,
max_tokens: maxTokens ?? ANTHROPIC_DEFAULT_MAX_TOKENS,
temperature,
system: [{ text: systemPrompt, type: "text" }],
Expand Down Expand Up @@ -375,11 +465,12 @@ export class AnthropicHandler extends BaseProvider implements SingleCompletionHa

async completePrompt(prompt: string) {
let { id: model, temperature } = this.getModel()
const requestModel = this.resolveRequestModelId(model)

let message
try {
message = await this.client.messages.create({
model,
model: requestModel,
max_tokens: ANTHROPIC_DEFAULT_MAX_TOKENS,
thinking: undefined,
temperature,
Expand Down
Loading
Loading