Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
19 changes: 15 additions & 4 deletions src/api/providers/anthropic-vertex.ts
Original file line number Diff line number Diff line change
Expand Up @@ -175,6 +175,7 @@ export class AnthropicVertexHandler extends BaseProvider implements SingleComple
try {
const result = streamText(requestOptions)

let lastStreamError: string | undefined
for await (const part of result.fullStream) {
// Capture thinking signature from stream events
// The AI SDK's @ai-sdk/anthropic emits the signature as a reasoning-delta
Expand All @@ -193,15 +194,25 @@ export class AnthropicVertexHandler extends BaseProvider implements SingleComple
}

for (const chunk of processAiSdkStreamPart(part)) {
if (chunk.type === "error") {
lastStreamError = chunk.message
}
yield chunk
}
}

// Yield usage metrics at the end, including cache metrics from providerMetadata
const usage = await result.usage
const providerMetadata = await result.providerMetadata
if (usage) {
yield this.processUsageMetrics(usage, modelConfig.info, providerMetadata)
try {
const usage = await result.usage
const providerMetadata = await result.providerMetadata
if (usage) {
yield this.processUsageMetrics(usage, modelConfig.info, providerMetadata)
}
} catch (usageError) {
if (lastStreamError) {
throw new Error(lastStreamError)
}
throw usageError
}
} catch (error) {
const errorMessage = error instanceof Error ? error.message : String(error)
Expand Down
19 changes: 15 additions & 4 deletions src/api/providers/anthropic.ts
Original file line number Diff line number Diff line change
Expand Up @@ -151,6 +151,7 @@ export class AnthropicHandler extends BaseProvider implements SingleCompletionHa
try {
const result = streamText(requestOptions)

let lastStreamError: string | undefined
for await (const part of result.fullStream) {
// Capture thinking signature from stream events
// The AI SDK's @ai-sdk/anthropic emits the signature as a reasoning-delta
Expand All @@ -169,15 +170,25 @@ export class AnthropicHandler extends BaseProvider implements SingleCompletionHa
}

for (const chunk of processAiSdkStreamPart(part)) {
if (chunk.type === "error") {
lastStreamError = chunk.message
}
yield chunk
}
}

// Yield usage metrics at the end, including cache metrics from providerMetadata
const usage = await result.usage
const providerMetadata = await result.providerMetadata
if (usage) {
yield this.processUsageMetrics(usage, modelConfig.info, providerMetadata)
try {
const usage = await result.usage
const providerMetadata = await result.providerMetadata
if (usage) {
yield this.processUsageMetrics(usage, modelConfig.info, providerMetadata)
}
} catch (usageError) {
if (lastStreamError) {
throw new Error(lastStreamError)
}
throw usageError
}
} catch (error) {
const errorMessage = error instanceof Error ? error.message : String(error)
Expand Down
20 changes: 6 additions & 14 deletions src/api/providers/azure.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ import type { ApiHandlerOptions } from "../../shared/api"
import {
convertToAiSdkMessages,
convertToolsForAiSdk,
processAiSdkStreamPart,
consumeAiSdkStream,
mapToolChoice,
handleAiSdkError,
} from "../transform/ai-sdk"
Expand Down Expand Up @@ -159,19 +159,11 @@ export class AzureHandler extends BaseProvider implements SingleCompletionHandle
const result = streamText(requestOptions)

try {
// Process the full stream to get all events including reasoning
for await (const part of result.fullStream) {
for (const chunk of processAiSdkStreamPart(part)) {
yield chunk
}
}

// Yield usage metrics at the end, including cache metrics from providerMetadata
const usage = await result.usage
const providerMetadata = await result.providerMetadata
if (usage) {
yield this.processUsageMetrics(usage, providerMetadata as any)
}
const processUsage = this.processUsageMetrics.bind(this)
yield* consumeAiSdkStream(result, async function* () {
const [usage, providerMetadata] = await Promise.all([result.usage, result.providerMetadata])
yield processUsage(usage, providerMetadata as Parameters<typeof processUsage>[1])
})
} catch (error) {
// Handle AI SDK errors (AI_RetryError, AI_APICallError, etc.)
throw handleAiSdkError(error, "Azure AI Foundry")
Expand Down
17 changes: 6 additions & 11 deletions src/api/providers/baseten.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ import type { ApiHandlerOptions } from "../../shared/api"
import {
convertToAiSdkMessages,
convertToolsForAiSdk,
processAiSdkStreamPart,
consumeAiSdkStream,
mapToolChoice,
handleAiSdkError,
} from "../transform/ai-sdk"
Expand Down Expand Up @@ -118,16 +118,11 @@ export class BasetenHandler extends BaseProvider implements SingleCompletionHand
const result = streamText(requestOptions)

try {
for await (const part of result.fullStream) {
for (const chunk of processAiSdkStreamPart(part)) {
yield chunk
}
}

const usage = await result.usage
if (usage) {
yield this.processUsageMetrics(usage)
}
const processUsage = this.processUsageMetrics.bind(this)
yield* consumeAiSdkStream(result, async function* () {
const usage = await result.usage
yield processUsage(usage)
})
} catch (error) {
throw handleAiSdkError(error, "Baseten")
}
Expand Down
20 changes: 16 additions & 4 deletions src/api/providers/bedrock.ts
Original file line number Diff line number Diff line change
Expand Up @@ -343,6 +343,8 @@ export class AwsBedrockHandler extends BaseProvider implements SingleCompletionH
try {
const result = streamText(requestOptions)

let lastStreamError: string | undefined

// Process the full stream
for await (const part of result.fullStream) {
// Capture thinking signature from stream events.
Expand Down Expand Up @@ -371,15 +373,25 @@ export class AwsBedrockHandler extends BaseProvider implements SingleCompletionH
}

for (const chunk of processAiSdkStreamPart(part)) {
if (chunk.type === "error") {
lastStreamError = chunk.message
}
yield chunk
}
}

// Yield usage metrics at the end
const usage = await result.usage
const providerMetadata = await result.providerMetadata
if (usage) {
yield this.processUsageMetrics(usage, modelConfig.info, providerMetadata)
try {
const usage = await result.usage
const providerMetadata = await result.providerMetadata
if (usage) {
yield this.processUsageMetrics(usage, modelConfig.info, providerMetadata)
}
} catch (usageError) {
if (lastStreamError) {
throw new Error(lastStreamError)
}
throw usageError
}
} catch (error) {
const errorMessage = error instanceof Error ? error.message : String(error)
Expand Down
21 changes: 6 additions & 15 deletions src/api/providers/deepseek.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ import type { ApiHandlerOptions } from "../../shared/api"
import {
convertToAiSdkMessages,
convertToolsForAiSdk,
processAiSdkStreamPart,
consumeAiSdkStream,
mapToolChoice,
handleAiSdkError,
} from "../transform/ai-sdk"
Expand Down Expand Up @@ -137,21 +137,12 @@ export class DeepSeekHandler extends BaseProvider implements SingleCompletionHan
const result = streamText(requestOptions)

try {
// Process the full stream to get all events including reasoning
for await (const part of result.fullStream) {
for (const chunk of processAiSdkStreamPart(part)) {
yield chunk
}
}

// Yield usage metrics at the end, including cache metrics from providerMetadata
const usage = await result.usage
const providerMetadata = await result.providerMetadata
if (usage) {
yield this.processUsageMetrics(usage, providerMetadata as any)
}
const processUsage = this.processUsageMetrics.bind(this)
yield* consumeAiSdkStream(result, async function* () {
const [usage, providerMetadata] = await Promise.all([result.usage, result.providerMetadata])
yield processUsage(usage, providerMetadata as Parameters<typeof processUsage>[1])
})
} catch (error) {
// Handle AI SDK errors (AI_RetryError, AI_APICallError, etc.)
throw handleAiSdkError(error, "DeepSeek")
}
}
Expand Down
21 changes: 6 additions & 15 deletions src/api/providers/fireworks.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ import type { ApiHandlerOptions } from "../../shared/api"
import {
convertToAiSdkMessages,
convertToolsForAiSdk,
processAiSdkStreamPart,
consumeAiSdkStream,
mapToolChoice,
handleAiSdkError,
} from "../transform/ai-sdk"
Expand Down Expand Up @@ -137,21 +137,12 @@ export class FireworksHandler extends BaseProvider implements SingleCompletionHa
const result = streamText(requestOptions)

try {
// Process the full stream to get all events including reasoning
for await (const part of result.fullStream) {
for (const chunk of processAiSdkStreamPart(part)) {
yield chunk
}
}

// Yield usage metrics at the end, including cache metrics from providerMetadata
const usage = await result.usage
const providerMetadata = await result.providerMetadata
if (usage) {
yield this.processUsageMetrics(usage, providerMetadata as any)
}
const processUsage = this.processUsageMetrics.bind(this)
yield* consumeAiSdkStream(result, async function* () {
const [usage, providerMetadata] = await Promise.all([result.usage, result.providerMetadata])
yield processUsage(usage, providerMetadata as Parameters<typeof processUsage>[1])
})
} catch (error) {
// Handle AI SDK errors (AI_RetryError, AI_APICallError, etc.)
throw handleAiSdkError(error, "Fireworks")
}
}
Expand Down
17 changes: 16 additions & 1 deletion src/api/providers/gemini.ts
Original file line number Diff line number Diff line change
Expand Up @@ -133,6 +133,7 @@ export class GeminiHandler extends BaseProvider implements SingleCompletionHandl

// Track whether any text content was yielded (not just reasoning/thinking)
let hasContent = false
let lastStreamError: string | undefined

// Process the full stream to get all events including reasoning
for await (const part of result.fullStream) {
Expand All @@ -146,6 +147,9 @@ export class GeminiHandler extends BaseProvider implements SingleCompletionHandl
}

for (const chunk of processAiSdkStreamPart(part)) {
if (chunk.type === "error") {
lastStreamError = chunk.message
}
if (chunk.type === "text" || chunk.type === "tool_call_start") {
hasContent = true
}
Expand All @@ -163,7 +167,15 @@ export class GeminiHandler extends BaseProvider implements SingleCompletionHandl
}

// Extract grounding sources from providerMetadata if available
const providerMetadata = await result.providerMetadata
let providerMetadata: Awaited<typeof result.providerMetadata>
try {
providerMetadata = await result.providerMetadata
} catch (metaError) {
if (lastStreamError) {
throw new Error(lastStreamError)
}
throw metaError
}
const groundingMetadata = providerMetadata?.google as
| {
groundingMetadata?: {
Expand All @@ -190,6 +202,9 @@ export class GeminiHandler extends BaseProvider implements SingleCompletionHandl
yield this.processUsageMetrics(usage, info, providerMetadata)
}
} catch (usageError) {
if (lastStreamError) {
throw new Error(lastStreamError)
}
if (usageError instanceof NoOutputGeneratedError) {
// If we already yielded the empty-stream message, suppress this error
if (hasContent) {
Expand Down
17 changes: 6 additions & 11 deletions src/api/providers/lm-studio.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ import type { ApiHandlerOptions } from "../../shared/api"
import {
convertToAiSdkMessages,
convertToolsForAiSdk,
processAiSdkStreamPart,
consumeAiSdkStream,
mapToolChoice,
handleAiSdkError,
} from "../transform/ai-sdk"
Expand Down Expand Up @@ -79,16 +79,11 @@ export class LmStudioHandler extends OpenAICompatibleHandler implements SingleCo
const result = streamText(requestOptions)

try {
for await (const part of result.fullStream) {
for (const chunk of processAiSdkStreamPart(part)) {
yield chunk
}
}

const usage = await result.usage
if (usage) {
yield this.processUsageMetrics(usage)
}
const processUsage = this.processUsageMetrics.bind(this)
yield* consumeAiSdkStream(result, async function* () {
const usage = await result.usage
yield processUsage(usage)
})
} catch (error) {
throw handleAiSdkError(error, "LM Studio")
}
Expand Down
20 changes: 16 additions & 4 deletions src/api/providers/minimax.ts
Original file line number Diff line number Diff line change
Expand Up @@ -128,6 +128,8 @@ export class MiniMaxHandler extends BaseProvider implements SingleCompletionHand

try {
const result = streamText(requestOptions as Parameters<typeof streamText>[0])

let lastStreamError: string | undefined

for await (const part of result.fullStream) {
const anthropicMetadata = (
Expand All @@ -153,14 +155,24 @@ export class MiniMaxHandler extends BaseProvider implements SingleCompletionHand
}

for (const chunk of processAiSdkStreamPart(part)) {
if (chunk.type === "error") {
lastStreamError = chunk.message
}
yield chunk
}
}

const usage = await result.usage
const providerMetadata = await result.providerMetadata
if (usage) {
yield this.processUsageMetrics(usage, modelConfig.info, providerMetadata)
try {
const usage = await result.usage
const providerMetadata = await result.providerMetadata
if (usage) {
yield this.processUsageMetrics(usage, modelConfig.info, providerMetadata)
}
} catch (usageError) {
if (lastStreamError) {
throw new Error(lastStreamError)
}
throw usageError
}
} catch (error) {
throw handleAiSdkError(error, this.providerName)
Expand Down
Loading
Loading