diff --git a/packages/types/src/provider-settings.ts b/packages/types/src/provider-settings.ts index 555513500b..4124c0eecf 100644 --- a/packages/types/src/provider-settings.ts +++ b/packages/types/src/provider-settings.ts @@ -3,6 +3,8 @@ import { z } from "zod" import { modelInfoSchema, reasoningEffortSettingSchema, verbosityLevelsSchema, serviceTierSchema } from "./model.js" import { codebaseIndexProviderSchema } from "./codebase-index.js" import { + anthropicAuthHeaderModes, + anthropicEndpointModes, anthropicModels, basetenModels, bedrockModels, @@ -192,6 +194,10 @@ const anthropicSchema = apiModelIdProviderModelSchema.extend({ apiKey: z.string().optional(), anthropicBaseUrl: z.string().optional(), anthropicUseAuthToken: z.boolean().optional(), + anthropicEndpointMode: z.enum(anthropicEndpointModes).optional(), + anthropicModelOverride: z.string().optional(), + anthropicMessagesUrlOverride: z.string().optional(), + anthropicAuthHeaderMode: z.enum(anthropicAuthHeaderModes).optional(), anthropicBeta1MContext: z.boolean().optional(), // Enable 'context-1m-2025-08-07' beta for 1M context window. }) diff --git a/packages/types/src/providers/anthropic.ts b/packages/types/src/providers/anthropic.ts index 62e377c7e5..2def009b4d 100644 --- a/packages/types/src/providers/anthropic.ts +++ b/packages/types/src/providers/anthropic.ts @@ -6,6 +6,12 @@ import type { ModelInfo } from "../model.js" export type AnthropicModelId = keyof typeof anthropicModels export const anthropicDefaultModelId: AnthropicModelId = "claude-sonnet-4-5" +export const anthropicEndpointModes = ["anthropic", "azure-ai-foundry"] as const +export type AnthropicEndpointMode = (typeof anthropicEndpointModes)[number] + +export const anthropicAuthHeaderModes = ["x-api-key", "api-key", "bearer"] as const +export type AnthropicAuthHeaderMode = (typeof anthropicAuthHeaderModes)[number] + export const anthropicModels = { "claude-sonnet-4-5": { maxTokens: 64_000, // Overridden to 8k if `enableReasoningEffort` is false. diff --git a/src/api/providers/__tests__/anthropic.spec.ts b/src/api/providers/__tests__/anthropic.spec.ts index 7a107edbc8..df93a2f2de 100644 --- a/src/api/providers/__tests__/anthropic.spec.ts +++ b/src/api/providers/__tests__/anthropic.spec.ts @@ -77,6 +77,7 @@ const mockAnthropicConstructor = vitest.mocked(Anthropic) describe("AnthropicHandler", () => { let handler: AnthropicHandler let mockOptions: ApiHandlerOptions + const originalFetch = globalThis.fetch beforeEach(() => { mockOptions = { @@ -87,6 +88,10 @@ describe("AnthropicHandler", () => { vitest.clearAllMocks() }) + afterEach(() => { + ;(globalThis as any).fetch = originalFetch + }) + describe("constructor", () => { it("should initialize with provided options", () => { expect(handler).toBeInstanceOf(AnthropicHandler) @@ -144,6 +149,51 @@ describe("AnthropicHandler", () => { expect(mockAnthropicConstructor.mock.calls[0]![0]!.authToken).toEqual("test-api-key") expect(mockAnthropicConstructor.mock.calls[0]![0]!.apiKey).toBeUndefined() }) + + it("requires messages URL override in Azure AI Foundry mode", () => { + expect( + () => + new AnthropicHandler({ + ...mockOptions, + anthropicEndpointMode: "azure-ai-foundry", + }), + ).toThrow("anthropicMessagesUrlOverride is required when anthropicEndpointMode is azure-ai-foundry") + }) + + it("should route Foundry mode requests through messages URL override and selected auth header", async () => { + const mockFetch = vitest.fn().mockResolvedValue({ ok: true } as Response) + ;(globalThis as any).fetch = mockFetch + + new AnthropicHandler({ + ...mockOptions, + anthropicEndpointMode: "azure-ai-foundry", + anthropicMessagesUrlOverride: + "https://test.services.ai.azure.com/models/claude/messages?api-version=2024-05-01-preview", + anthropicAuthHeaderMode: "api-key", + }) + + const fetchOverride = mockAnthropicConstructor.mock.calls[0]![0]!.fetch as ( + input: Parameters[0], + init?: Parameters[1], + ) => Promise + + expect(fetchOverride).toBeTypeOf("function") + + await fetchOverride("https://api.anthropic.com/v1/messages", { + method: "POST", + headers: { "x-api-key": "old-key" }, + }) + + expect(mockFetch).toHaveBeenCalledWith( + "https://test.services.ai.azure.com/models/claude/messages?api-version=2024-05-01-preview", + expect.any(Object), + ) + + const headers = new Headers(mockFetch.mock.calls[0]![1]!.headers as HeadersInit) + expect(headers.get("api-key")).toBe("test-api-key") + expect(headers.get("x-api-key")).toBeNull() + expect(headers.get("anthropic-version")).toBe("2023-06-01") + }) }) describe("createMessage", () => { @@ -187,6 +237,28 @@ describe("AnthropicHandler", () => { // Verify API expect(mockCreate).toHaveBeenCalled() }) + + it("should use anthropicModelOverride when provided", async () => { + const overrideModel = "claude-sonnet-4-5-deployment" + const handlerWithOverride = new AnthropicHandler({ + ...mockOptions, + anthropicModelOverride: overrideModel, + }) + + const stream = handlerWithOverride.createMessage(systemPrompt, [ + { + role: "user", + content: [{ type: "text" as const, text: "hello" }], + }, + ]) + + for await (const _chunk of stream) { + // consume stream + } + + expect(mockCreate).toHaveBeenCalled() + expect(mockCreate.mock.calls[0]![0]!.model).toBe(overrideModel) + }) }) describe("completePrompt", () => { @@ -203,6 +275,21 @@ describe("AnthropicHandler", () => { }) }) + it("should apply model override in completePrompt", async () => { + const handlerWithOverride = new AnthropicHandler({ + ...mockOptions, + anthropicModelOverride: "claude-opus-4-6-deployment", + }) + + await handlerWithOverride.completePrompt("Test prompt") + + expect(mockCreate).toHaveBeenCalledWith( + expect.objectContaining({ + model: "claude-opus-4-6-deployment", + }), + ) + }) + it("should handle API errors", async () => { mockCreate.mockRejectedValueOnce(new Error("Anthropic completion error: API Error")) await expect(handler.completePrompt("Test prompt")).rejects.toThrow("Anthropic completion error: API Error") diff --git a/src/api/providers/__tests__/openai.spec.ts b/src/api/providers/__tests__/openai.spec.ts index 73b542dbc7..a9e6ee4bad 100644 --- a/src/api/providers/__tests__/openai.spec.ts +++ b/src/api/providers/__tests__/openai.spec.ts @@ -66,6 +66,7 @@ vitest.mock("openai", () => { }, }, })), + AzureOpenAI: mockConstructor, } }) @@ -601,6 +602,34 @@ describe("OpenAiHandler", () => { expect(azureHandler.getModel().id).toBe(azureOptions.openAiModelId) }) + it("should not treat /messages endpoints as Azure AI Inference", async () => { + const foundryMessagesHandler = new OpenAiHandler({ + ...azureOptions, + openAiBaseUrl: + "https://test.services.ai.azure.com/models/claude/messages?api-version=2024-05-01-preview", + openAiModelId: "claude-sonnet-4-5", + }) + const systemPrompt = "You are a helpful assistant." + const messages: Anthropic.Messages.MessageParam[] = [ + { + role: "user", + content: "Hello!", + }, + ] + + const stream = foundryMessagesHandler.createMessage(systemPrompt, messages) + for await (const _chunk of stream) { + // consume stream + } + + expect(mockCreate).toHaveBeenCalledWith( + expect.objectContaining({ + model: "claude-sonnet-4-5", + }), + {}, + ) + }) + it("should handle streaming responses with Azure AI Inference Service", async () => { const azureHandler = new OpenAiHandler(azureOptions) const systemPrompt = "You are a helpful assistant." diff --git a/src/api/providers/anthropic.ts b/src/api/providers/anthropic.ts index b2b158f095..61e2ee275c 100644 --- a/src/api/providers/anthropic.ts +++ b/src/api/providers/anthropic.ts @@ -1,10 +1,10 @@ import { Anthropic } from "@anthropic-ai/sdk" import { Stream as AnthropicStream } from "@anthropic-ai/sdk/streaming" import { CacheControlEphemeral } from "@anthropic-ai/sdk/resources" -import OpenAI from "openai" import { type ModelInfo, + type AnthropicAuthHeaderMode, type AnthropicModelId, anthropicDefaultModelId, anthropicModels, @@ -18,7 +18,6 @@ import type { ApiHandlerOptions } from "../../shared/api" import { ApiStream } from "../transform/stream" import { getModelParams } from "../transform/model-params" import { filterNonAnthropicBlocks } from "../transform/anthropic-filter" -import { handleProviderError } from "./utils/error-handler" import { BaseProvider } from "./base-provider" import type { SingleCompletionHandler, ApiHandlerCreateMessageMetadata } from "../index" @@ -28,6 +27,9 @@ import { convertOpenAIToolChoiceToAnthropic, } from "../../core/prompts/tools/native-tools/converters" +const ANTHROPIC_API_VERSION = "2023-06-01" +type AnthropicClientOptions = NonNullable[0]> + export class AnthropicHandler extends BaseProvider implements SingleCompletionHandler { private options: ApiHandlerOptions private client: Anthropic @@ -37,13 +39,100 @@ export class AnthropicHandler extends BaseProvider implements SingleCompletionHa super() this.options = options - const apiKeyFieldName = - this.options.anthropicBaseUrl && this.options.anthropicUseAuthToken ? "authToken" : "apiKey" + const messagesUrlOverride = this.getAnthropicMessagesUrlOverride() + const useAzureAiFoundry = this.options.anthropicEndpointMode === "azure-ai-foundry" || !!messagesUrlOverride - this.client = new Anthropic({ + if (useAzureAiFoundry) { + if (!messagesUrlOverride) { + throw new Error( + "anthropicMessagesUrlOverride is required when anthropicEndpointMode is azure-ai-foundry", + ) + } + + const clientOptions: AnthropicClientOptions = { + // Required by the SDK, but all requests are rerouted by the custom fetch to messagesUrlOverride. + apiKey: this.options.apiKey ?? "not-provided", + defaultHeaders: { "anthropic-version": ANTHROPIC_API_VERSION }, + fetch: this.createAzureAiFoundryFetch(messagesUrlOverride, this.resolveAnthropicAuthHeaderMode()), + } + this.client = new Anthropic(clientOptions) + return + } + + const clientOptions: AnthropicClientOptions = { baseURL: this.options.anthropicBaseUrl || undefined, - [apiKeyFieldName]: this.options.apiKey, - }) + defaultHeaders: { "anthropic-version": ANTHROPIC_API_VERSION }, + apiKey: this.options.apiKey, + } + + if (this.options.anthropicBaseUrl && this.options.anthropicUseAuthToken) { + delete clientOptions.apiKey + clientOptions.authToken = this.options.apiKey + } + + this.client = new Anthropic(clientOptions) + } + + private getAnthropicMessagesUrlOverride(): string | undefined { + const override = this.options.anthropicMessagesUrlOverride?.trim() + if (!override) { + return undefined + } + + if (!URL.canParse(override)) { + throw new Error("anthropicMessagesUrlOverride must be a valid URL") + } + + return override + } + + private resolveAnthropicAuthHeaderMode(): AnthropicAuthHeaderMode { + if (this.options.anthropicAuthHeaderMode) { + return this.options.anthropicAuthHeaderMode + } + + // Keep backward compatibility with the old toggle when endpoint mode is Foundry. + if (this.options.anthropicUseAuthToken) { + return "bearer" + } + + return "x-api-key" + } + + private createAzureAiFoundryFetch(messagesUrlOverride: string, authHeaderMode: AnthropicAuthHeaderMode) { + const apiKey = this.options.apiKey + + return async (_input: Parameters[0], init?: Parameters[1]): Promise => { + const headers = new Headers(init?.headers ?? {}) + + // Remove SDK defaults so only the selected authentication mode is sent. + headers.delete("x-api-key") + headers.delete("api-key") + headers.delete("authorization") + + if (apiKey) { + switch (authHeaderMode) { + case "api-key": + headers.set("api-key", apiKey) + break + case "bearer": + headers.set("authorization", `Bearer ${apiKey}`) + break + case "x-api-key": + default: + headers.set("x-api-key", apiKey) + } + } + + headers.set("anthropic-version", ANTHROPIC_API_VERSION) + + return fetch(messagesUrlOverride, { ...init, headers }) + } + } + + private resolveRequestModelId(defaultModelId: string): string { + const override = this.options.anthropicModelOverride?.trim() + return override && override.length > 0 ? override : defaultModelId } async *createMessage( @@ -60,6 +149,7 @@ export class AnthropicHandler extends BaseProvider implements SingleCompletionHa temperature, reasoning: thinking, } = this.getModel() + const requestModelId = this.resolveRequestModelId(modelId) // Filter out non-Anthropic blocks (reasoning, thoughtSignature, etc.) before sending to the API const sanitizedMessages = filterNonAnthropicBlocks(messages) @@ -113,7 +203,7 @@ export class AnthropicHandler extends BaseProvider implements SingleCompletionHa try { stream = await this.client.messages.create( { - model: modelId, + model: requestModelId, max_tokens: maxTokens ?? ANTHROPIC_DEFAULT_MAX_TOKENS, temperature, thinking, @@ -180,7 +270,7 @@ export class AnthropicHandler extends BaseProvider implements SingleCompletionHa default: { try { stream = (await this.client.messages.create({ - model: modelId, + model: requestModelId, max_tokens: maxTokens ?? ANTHROPIC_DEFAULT_MAX_TOKENS, temperature, system: [{ text: systemPrompt, type: "text" }], @@ -375,11 +465,12 @@ export class AnthropicHandler extends BaseProvider implements SingleCompletionHa async completePrompt(prompt: string) { let { id: model, temperature } = this.getModel() + const requestModel = this.resolveRequestModelId(model) let message try { message = await this.client.messages.create({ - model, + model: requestModel, max_tokens: ANTHROPIC_DEFAULT_MAX_TOKENS, thinking: undefined, temperature, diff --git a/src/api/providers/openai.ts b/src/api/providers/openai.ts index 33b29abcaf..fe09f60bc2 100644 --- a/src/api/providers/openai.ts +++ b/src/api/providers/openai.ts @@ -41,7 +41,11 @@ export class OpenAiHandler extends BaseProvider implements SingleCompletionHandl const apiKey = this.options.openAiApiKey ?? "not-provided" const isAzureAiInference = this._isAzureAiInference(this.options.openAiBaseUrl) const urlHost = this._getUrlHost(this.options.openAiBaseUrl) - const isAzureOpenAi = urlHost === "azure.com" || urlHost.endsWith(".azure.com") || options.openAiUseAzure + const isAzureAiInferenceHost = urlHost.endsWith(".services.ai.azure.com") + const isAzureOpenAi = + urlHost === "azure.com" || + (urlHost.endsWith(".azure.com") && !isAzureAiInferenceHost) || + options.openAiUseAzure const headers = { ...DEFAULT_HEADERS, @@ -504,6 +508,14 @@ export class OpenAiHandler extends BaseProvider implements SingleCompletionHandl } } + protected _getUrlPath(baseUrl?: string): string { + try { + return new URL(baseUrl ?? "").pathname.toLowerCase() + } catch (error) { + return "" + } + } + private _isGrokXAI(baseUrl?: string): boolean { const urlHost = this._getUrlHost(baseUrl) return urlHost.includes("x.ai") @@ -511,7 +523,14 @@ export class OpenAiHandler extends BaseProvider implements SingleCompletionHandl protected _isAzureAiInference(baseUrl?: string): boolean { const urlHost = this._getUrlHost(baseUrl) - return urlHost.endsWith(".services.ai.azure.com") + if (!urlHost.endsWith(".services.ai.azure.com")) { + return false + } + + // Anthropic-compatible Azure AI Foundry endpoints terminate at /messages and + // should not use OpenAI Azure AI Inference path/header handling. + const urlPath = this._getUrlPath(baseUrl) + return !urlPath.includes("/messages") } /** diff --git a/src/package.json b/src/package.json index 70cc99ba73..edb90add95 100644 --- a/src/package.json +++ b/src/package.json @@ -3,7 +3,7 @@ "displayName": "%extension.displayName%", "description": "%extension.description%", "publisher": "RooVeterinaryInc", - "version": "3.47.3", + "version": "3.47.3-maxruby.1", "icon": "assets/icons/icon.png", "galleryBanner": { "color": "#617A91", diff --git a/webview-ui/src/components/settings/providers/Anthropic.tsx b/webview-ui/src/components/settings/providers/Anthropic.tsx index 58fa81d6bc..ba95da4642 100644 --- a/webview-ui/src/components/settings/providers/Anthropic.tsx +++ b/webview-ui/src/components/settings/providers/Anthropic.tsx @@ -7,6 +7,7 @@ import type { ProviderSettings } from "@roo-code/types" import { useAppTranslation } from "@src/i18n/TranslationContext" import { VSCodeButtonLink } from "@src/components/common/VSCodeButtonLink" import { useSelectedModel } from "@src/components/ui/hooks/useSelectedModel" +import { Select, SelectContent, SelectItem, SelectTrigger, SelectValue } from "@src/components/ui" import { inputEventTransform, noTransform } from "../transforms" @@ -19,6 +20,12 @@ type AnthropicProps = { export const Anthropic = ({ apiConfiguration, setApiConfigurationField }: AnthropicProps) => { const { t } = useAppTranslation() const selectedModel = useSelectedModel(apiConfiguration) + const endpointMode = + apiConfiguration?.anthropicEndpointMode || + (apiConfiguration?.anthropicMessagesUrlOverride ? "azure-ai-foundry" : "anthropic") + const isAzureAiFoundryMode = endpointMode === "azure-ai-foundry" + const anthropicAuthHeaderMode = + apiConfiguration?.anthropicAuthHeaderMode || (apiConfiguration?.anthropicUseAuthToken ? "bearer" : "x-api-key") const [anthropicBaseUrlSelected, setAnthropicBaseUrlSelected] = useState(!!apiConfiguration?.anthropicBaseUrl) @@ -57,34 +64,96 @@ export const Anthropic = ({ apiConfiguration, setApiConfigurationField }: Anthro {t("settings:providers.getAnthropicApiKey")} )} +
+ + +
- { - setAnthropicBaseUrlSelected(checked) - - if (!checked) { - setApiConfigurationField("anthropicBaseUrl", "") - setApiConfigurationField("anthropicUseAuthToken", false) - } - }}> - {t("settings:providers.useCustomBaseUrl")} - - {anthropicBaseUrlSelected && ( - <> + {isAzureAiFoundryMode ? ( +
- - {t("settings:providers.anthropicUseAuthToken")} + + + + + +
+ + +
+
+ ) : ( + <> + { + setAnthropicBaseUrlSelected(checked) + + if (!checked) { + setApiConfigurationField("anthropicBaseUrl", "") + setApiConfigurationField("anthropicUseAuthToken", false) + } + }}> + {t("settings:providers.useCustomBaseUrl")} + {anthropicBaseUrlSelected && ( + <> + + + {t("settings:providers.anthropicUseAuthToken")} + + + )} )}