diff --git a/package-lock.json b/package-lock.json index 216003f..5dea2d3 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "edgee", - "version": "0.1.0", + "version": "0.1.1", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "edgee", - "version": "0.1.0", + "version": "0.1.1", "devDependencies": { "@eslint/js": "^9.39.2", "@types/node": "^25.0.3", @@ -1069,7 +1069,6 @@ "integrity": "sha512-W609buLVRVmeW693xKfzHeIV6nJGGz98uCPfeXI1ELMLXVeKYZ9m15fAMSaUPBHYLGFsVRcMmSCksQOrZV9BYA==", "dev": true, "license": "MIT", - "peer": true, "dependencies": { "undici-types": "~7.16.0" } @@ -1109,7 +1108,6 @@ "integrity": "sha512-3xP4XzzDNQOIqBMWogftkwxhg5oMKApqY0BAflmLZiFYHqyhSOxv/cd/zPQLTcCXr4AkaKb25joocY0BD1WC6A==", "dev": true, "license": "MIT", - "peer": true, "dependencies": { "@typescript-eslint/scope-manager": "8.51.0", "@typescript-eslint/types": "8.51.0", @@ -1411,7 +1409,6 @@ "integrity": "sha512-rkoPH+RqWopVxDnCBE/ysIdfQ2A7j1eDmW8tCxxrR9nnFBa9jKf86VgsSAzxBd1x+ny0GC4JgiD3SNfRHv3pOg==", "dev": true, "license": "MIT", - "peer": true, "dependencies": { "@vitest/utils": "4.0.16", "fflate": "^0.8.2", @@ -1448,7 +1445,6 @@ "integrity": "sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg==", "dev": true, "license": "MIT", - "peer": true, "bin": { "acorn": "bin/acorn" }, @@ -1705,7 +1701,6 @@ "integrity": "sha512-LEyamqS7W5HB3ujJyvi0HQK/dtVINZvd5mAAp9eT5S/ujByGjiZLCzPcHVzuXbpJDJF/cxwHlfceVUDZ2lnSTw==", "dev": true, "license": "MIT", - "peer": true, "dependencies": { "@eslint-community/eslint-utils": "^4.8.0", "@eslint-community/regexpp": "^4.12.1", @@ -2715,7 +2710,6 @@ "integrity": "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==", "dev": true, "license": "Apache-2.0", - "peer": true, "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" @@ -2771,7 +2765,6 @@ "integrity": "sha512-+Oxm7q9hDoLMyJOYfUYBuHQo+dkAloi33apOPP56pzj+vsdJDzr+j1NISE5pyaAuKL4A3UD34qd0lx5+kfKp2g==", "dev": true, "license": "MIT", - "peer": true, "dependencies": { "esbuild": "^0.25.0", "fdir": "^6.4.4", @@ -2847,7 +2840,6 @@ "integrity": "sha512-E4t7DJ9pESL6E3I8nFjPa4xGUd3PmiWDLsDztS2qXSJWfHtbQnwAWylaBvSNY48I3vr8PTqIZlyK8TE3V3CA4Q==", "dev": true, "license": "MIT", - "peer": true, "dependencies": { "@vitest/expect": "4.0.16", "@vitest/mocker": "4.0.16", diff --git a/src/index.ts b/src/index.ts index 6b71533..0df5b68 100644 --- a/src/index.ts +++ b/src/index.ts @@ -59,13 +59,86 @@ export interface Choice { finish_reason: string | null; } -export interface SendResponse { +export class SendResponse { choices: Choice[]; usage?: { prompt_tokens: number; completion_tokens: number; total_tokens: number; }; + + constructor( + choices: Choice[], + usage?: { + prompt_tokens: number; + completion_tokens: number; + total_tokens: number; + } + ) { + this.choices = choices; + this.usage = usage; + } + + get text(): string | null { + if (this.choices[0]?.message?.content) { + return this.choices[0].message.content; + } + return null; + } + + get message() { + return this.choices[0]?.message ?? null; + } + + get finishReason(): string | null { + return this.choices[0]?.finish_reason ?? null; + } + + get toolCalls(): ToolCall[] | null { + return this.choices[0]?.message?.tool_calls ?? null; + } +} + +// Streaming types +export interface StreamDelta { + role?: string; + content?: string; + tool_calls?: ToolCall[]; +} + +export interface StreamChoice { + index: number; + delta: StreamDelta; + finish_reason?: string | null; +} + +export class StreamChunk { + choices: StreamChoice[]; + + constructor(choices: StreamChoice[]) { + this.choices = choices; + } + + get text(): string | null { + if (this.choices[0]?.delta?.content) { + return this.choices[0].delta.content; + } + return null; + } + + get role(): string | null { + if (this.choices[0]?.delta?.role) { + return this.choices[0].delta.role; + } + return null; + } + + get finishReason(): string | null { + if (this.choices[0]?.finish_reason) { + return this.choices[0].finish_reason; + } + return null; + } } export interface EdgeeConfig { @@ -123,11 +196,100 @@ export default class Edgee { body: JSON.stringify(body), }); - const data = (await res.json()) as SendResponse; + if (!res.ok) { + const errorBody = await res.text(); + throw new Error(`API error ${res.status}: ${errorBody}`); + } + + const data = await res.json() as { + choices: Choice[]; + usage?: { + prompt_tokens: number; + completion_tokens: number; + total_tokens: number; + } + }; + + return new SendResponse(data.choices, data.usage); + } + + private async *_handleStreamingResponse( + url: string, + body: Record + ): AsyncGenerator { + const res = await fetch(url, { + method: "POST", + headers: { + "Content-Type": "application/json", + Authorization: `Bearer ${this.apiKey}`, + }, + body: JSON.stringify(body), + }); + + if (!res.ok) { + const errorBody = await res.text(); + throw new Error(`API error ${res.status}: ${errorBody}`); + } + + if (!res.body) { + throw new Error("Response body is null"); + } + + const reader = res.body.getReader(); + const decoder = new TextDecoder(); + let buffer = ""; + + while (true) { + const { done, value } = await reader.read(); + if (done) break; - return { - choices: data.choices, - usage: data.usage, + buffer += decoder.decode(value, { stream: true }); + const lines = buffer.split("\n"); + buffer = lines.pop() || ""; + + for (const line of lines) { + const trimmed = line.trim(); + if (trimmed === "" || !trimmed.startsWith("data: ")) { + continue; + } + + const data = trimmed.slice(6); + if (data === "[DONE]") { + return; + } + + try { + const parsed = JSON.parse(data); + yield new StreamChunk(parsed.choices); + } catch { + // Skip malformed JSON + continue; + } + } + } + } + + async *stream( + model: string, + input: string | InputObject + ): AsyncGenerator { + const body: Record = { + model, + messages: + typeof input === "string" + ? [{ role: "user", content: input }] + : input.messages, + stream: true, }; + + if (typeof input !== "string") { + if (input.tools) body.tools = input.tools; + if (input.tool_choice) body.tool_choice = input.tool_choice; + } + + yield* this._handleStreamingResponse( + `${this.baseUrl}/v1/chat/completions`, + body + ); } } diff --git a/tests/index.test.ts b/tests/index.test.ts index af8fc34..3b1e646 100644 --- a/tests/index.test.ts +++ b/tests/index.test.ts @@ -1,5 +1,5 @@ import { describe, it, expect, beforeEach, vi } from 'vitest'; -import Edgee, { type SendOptions, type SendResponse } from '../src/index.js'; +import Edgee, { type SendOptions, type SendResponse, StreamChunk } from '../src/index.js'; describe('Edgee', () => { const mockApiKey = 'test-api-key-12345'; @@ -490,6 +490,414 @@ describe('Edgee', () => { expect.any(Object) ); }); + + it('should throw error when API returns non-OK status', async () => { + mockFetch.mockResolvedValueOnce({ + ok: false, + status: 401, + text: async () => 'Unauthorized', + }); + + await expect( + client.send({ + model: 'gpt-4', + input: 'Test', + }) + ).rejects.toThrow('API error 401: Unauthorized'); + }); + + it('should throw error on 500 status', async () => { + mockFetch.mockResolvedValueOnce({ + ok: false, + status: 500, + text: async () => 'Internal Server Error', + }); + + await expect( + client.send({ + model: 'gpt-4', + input: 'Test', + }) + ).rejects.toThrow('API error 500: Internal Server Error'); + }); + }); + + describe('convenience properties', () => { + let client: Edgee; + + beforeEach(() => { + client = new Edgee(mockApiKey); + }); + + it('should provide .text property for SendResponse', async () => { + const mockResponse = { + choices: [ + { + index: 0, + message: { + role: 'assistant', + content: 'Hello, world!', + }, + finish_reason: 'stop', + }, + ], + }; + + mockFetch.mockResolvedValueOnce({ + ok: true, + json: async () => mockResponse, + }); + + const result = await client.send({ + model: 'gpt-4', + input: 'Hello', + }); + + expect(result.text).toBe('Hello, world!'); + }); + + it('should provide .message property for SendResponse', async () => { + const mockResponse = { + choices: [ + { + index: 0, + message: { + role: 'assistant', + content: 'Hello, world!', + }, + finish_reason: 'stop', + }, + ], + }; + + mockFetch.mockResolvedValueOnce({ + ok: true, + json: async () => mockResponse, + }); + + const result = await client.send({ + model: 'gpt-4', + input: 'Hello', + }); + + expect(result.message).toEqual({ + role: 'assistant', + content: 'Hello, world!', + }); + }); + + it('should provide .finishReason property for SendResponse', async () => { + const mockResponse = { + choices: [ + { + index: 0, + message: { + role: 'assistant', + content: 'Hello', + }, + finish_reason: 'stop', + }, + ], + }; + + mockFetch.mockResolvedValueOnce({ + ok: true, + json: async () => mockResponse, + }); + + const result = await client.send({ + model: 'gpt-4', + input: 'Hello', + }); + + expect(result.finishReason).toBe('stop'); + }); + + it('should provide .toolCalls property for SendResponse', async () => { + const toolCalls = [ + { + id: 'call_123', + type: 'function' as const, + function: { + name: 'get_weather', + arguments: '{"location": "San Francisco"}', + }, + }, + ]; + + const mockResponse = { + choices: [ + { + index: 0, + message: { + role: 'assistant', + content: null, + tool_calls: toolCalls, + }, + finish_reason: 'tool_calls', + }, + ], + }; + + mockFetch.mockResolvedValueOnce({ + ok: true, + json: async () => mockResponse, + }); + + const result = await client.send({ + model: 'gpt-4', + input: 'What is the weather?', + }); + + expect(result.toolCalls).toEqual(toolCalls); + }); + + it('should return null for convenience properties when choices are empty', async () => { + const mockResponse = { + choices: [], + }; + + mockFetch.mockResolvedValueOnce({ + ok: true, + json: async () => mockResponse, + }); + + const result = await client.send({ + model: 'gpt-4', + input: 'Hello', + }); + + expect(result.text).toBeNull(); + expect(result.message).toBeNull(); + expect(result.finishReason).toBeNull(); + expect(result.toolCalls).toBeNull(); + }); + }); + + describe('stream', () => { + let client: Edgee; + + beforeEach(() => { + client = new Edgee(mockApiKey); + }); + + it('should stream chunks with string input', async () => { + const mockChunks = [ + { + choices: [ + { + index: 0, + delta: { role: 'assistant', content: '' }, + finish_reason: null, + }, + ], + }, + { + choices: [ + { + index: 0, + delta: { content: 'Hello' }, + finish_reason: null, + }, + ], + }, + { + choices: [ + { + index: 0, + delta: { content: ' world' }, + finish_reason: null, + }, + ], + }, + { + choices: [ + { + index: 0, + delta: {}, + finish_reason: 'stop', + }, + ], + }, + ]; + + const encoder = new TextEncoder(); + const stream = new ReadableStream({ + start(controller) { + for (const chunk of mockChunks) { + controller.enqueue( + encoder.encode(`data: ${JSON.stringify(chunk)}\n\n`) + ); + } + controller.enqueue(encoder.encode('data: [DONE]\n\n')); + controller.close(); + }, + }); + + mockFetch.mockResolvedValueOnce({ + ok: true, + body: stream, + }); + + const chunks: StreamChunk[] = []; + for await (const chunk of client.stream('gpt-4', 'Hello')) { + chunks.push(chunk); + } + + expect(chunks).toHaveLength(4); + expect(chunks[0].role).toBe('assistant'); + expect(chunks[1].text).toBe('Hello'); + expect(chunks[2].text).toBe(' world'); + expect(chunks[3].finishReason).toBe('stop'); + }); + + it('should stream chunks with InputObject', async () => { + const mockChunks = [ + { + choices: [ + { + index: 0, + delta: { role: 'assistant', content: 'Response' }, + finish_reason: null, + }, + ], + }, + ]; + + const encoder = new TextEncoder(); + const stream = new ReadableStream({ + start(controller) { + for (const chunk of mockChunks) { + controller.enqueue( + encoder.encode(`data: ${JSON.stringify(chunk)}\n\n`) + ); + } + controller.enqueue(encoder.encode('data: [DONE]\n\n')); + controller.close(); + }, + }); + + mockFetch.mockResolvedValueOnce({ + ok: true, + body: stream, + }); + + const chunks: StreamChunk[] = []; + for await (const chunk of client.stream('gpt-4', { + messages: [{ role: 'user', content: 'Hello' }], + })) { + chunks.push(chunk); + } + + expect(chunks).toHaveLength(1); + expect(chunks[0].text).toBe('Response'); + }); + + it('should handle streaming errors', async () => { + mockFetch.mockResolvedValueOnce({ + ok: false, + status: 429, + text: async () => 'Rate limit exceeded', + }); + + const stream = client.stream('gpt-4', 'Hello'); + + await expect(stream.next()).rejects.toThrow( + 'API error 429: Rate limit exceeded' + ); + }); + + it('should skip malformed JSON in stream', async () => { + const encoder = new TextEncoder(); + const stream = new ReadableStream({ + start(controller) { + controller.enqueue(encoder.encode('data: {invalid json}\n\n')); + controller.enqueue( + encoder.encode( + `data: ${JSON.stringify({ + choices: [{ index: 0, delta: { content: 'Valid' }, finish_reason: null }], + })}\n\n` + ) + ); + controller.enqueue(encoder.encode('data: [DONE]\n\n')); + controller.close(); + }, + }); + + mockFetch.mockResolvedValueOnce({ + ok: true, + body: stream, + }); + + const chunks: StreamChunk[] = []; + for await (const chunk of client.stream('gpt-4', 'Hello')) { + chunks.push(chunk); + } + + // Should skip the malformed JSON and only return the valid chunk + expect(chunks).toHaveLength(1); + expect(chunks[0].text).toBe('Valid'); + }); + }); + + describe('StreamChunk convenience properties', () => { + it('should provide .text property', () => { + const chunk = new StreamChunk([ + { + index: 0, + delta: { content: 'Hello' }, + finish_reason: null, + }, + ]); + + expect(chunk.text).toBe('Hello'); + }); + + it('should provide .role property', () => { + const chunk = new StreamChunk([ + { + index: 0, + delta: { role: 'assistant' }, + finish_reason: null, + }, + ]); + + expect(chunk.role).toBe('assistant'); + }); + + it('should provide .finishReason property', () => { + const chunk = new StreamChunk([ + { + index: 0, + delta: {}, + finish_reason: 'stop', + }, + ]); + + expect(chunk.finishReason).toBe('stop'); + }); + + it('should return null for properties when not present', () => { + const chunk = new StreamChunk([ + { + index: 0, + delta: {}, + finish_reason: null, + }, + ]); + + expect(chunk.text).toBeNull(); + expect(chunk.role).toBeNull(); + expect(chunk.finishReason).toBeNull(); + }); + + it('should return null for properties when choices are empty', () => { + const chunk = new StreamChunk([]); + + expect(chunk.text).toBeNull(); + expect(chunk.role).toBeNull(); + expect(chunk.finishReason).toBeNull(); + }); }); });