From ede37285b67dcae44d0a5bee8db4019948d43e81 Mon Sep 17 00:00:00 2001 From: Nicolas Girardot Date: Fri, 16 Jan 2026 10:58:22 +0100 Subject: [PATCH 1/3] feat: add auto-tools handling --- Cargo.lock | 23 ++ Cargo.toml | 9 + examples/simple.rs | 40 +-- examples/stream_tools.rs | 59 ++++ examples/streaming.rs | 69 +---- examples/tools.rs | 153 +--------- examples/tools_auto.rs | 38 +++ src/client.rs | 607 ++++++++++++++++++++++++++++++++++++++- src/error.rs | 8 + src/lib.rs | 2 +- src/models.rs | 311 ++++++++++++++++++++ 11 files changed, 1074 insertions(+), 245 deletions(-) create mode 100644 examples/stream_tools.rs create mode 100644 examples/tools_auto.rs diff --git a/Cargo.lock b/Cargo.lock index e3077e2e..e1c064f3 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -21,6 +21,28 @@ dependencies = [ "serde_json", ] +[[package]] +name = "async-stream" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b5a71a6f37880a80d1d7f19efd781e4b5de42c88f0722cc13bcb6cc2cfe8476" +dependencies = [ + "async-stream-impl", + "futures-core", + "pin-project-lite", +] + +[[package]] +name = "async-stream-impl" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c7c24de15d275a1ecfd47a380fb4d5ec9bfe0933f309ed5e705b775596a3574d" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "atomic-waker" version = "1.1.2" @@ -107,6 +129,7 @@ dependencies = [ name = "edgee" version = "2.0.1" dependencies = [ + "async-stream", "bytes", "futures", "mockito", diff --git a/Cargo.toml b/Cargo.toml index f4b3970e..52016802 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -29,6 +29,7 @@ thiserror = "1.0" # SSE parsing for streaming futures = "0.3" bytes = "1.7" +async-stream = "0.3" [dev-dependencies] tokio-test = "0.4" @@ -45,3 +46,11 @@ path = "examples/streaming.rs" [[example]] name = "tools" path = "examples/tools.rs" + +[[example]] +name = "tools_auto" +path = "examples/tools_auto.rs" + +[[example]] +name = "stream_tools" +path = "examples/stream_tools.rs" diff --git a/examples/simple.rs b/examples/simple.rs index d7c13aa1..e2d98bef 100644 --- a/examples/simple.rs +++ b/examples/simple.rs @@ -1,45 +1,13 @@ //! Simple example demonstrating basic usage of the Edgee SDK -use edgee::{Edgee, InputObject, Message}; +use edgee::{Edgee, EdgeeConfig}; #[tokio::main] async fn main() -> Result<(), Box> { - // Create client from environment variables (EDGEE_API_KEY) - let client = Edgee::from_env()?; + let client = Edgee::new(EdgeeConfig::new("your-api-key")); - println!("=== Simple Text Input ==="); - let response = client.send("gpt-4o", "Say 'Hello, Rust!'").await?; - println!("Response: {}\n", response.text().unwrap_or("")); - - println!("=== Multi-turn Conversation ==="); - let messages = vec![ - Message::system("You are a helpful assistant that speaks like a pirate."), - Message::user("What's your name?"), - ]; - - let response = client.send("gpt-4o", messages).await?; - println!("Assistant: {}\n", response.text().unwrap_or("")); - - println!("=== Using InputObject ==="); - let input = InputObject::new(vec![ - Message::system("You are a helpful coding assistant."), - Message::user("Write a hello world in Rust"), - ]); - - let response = client.send("gpt-4o", input).await?; - println!("Assistant: {}\n", response.text().unwrap_or("")); - - println!("=== Response Metadata ==="); - let response = client.send("gpt-4o", "Count to 5").await?; - println!("Model: {}", response.model); - println!("Finish Reason: {:?}", response.finish_reason()); - if let Some(usage) = &response.usage { - println!( - "Token Usage: {} prompt + {} completion = {} total", - usage.prompt_tokens, usage.completion_tokens, usage.total_tokens - ); - } - println!("Response: {}\n", response.text().unwrap_or("")); + let response = client.send("devstral2", "Say 'Hello, Rust!'").await?; + println!("Response: {}", response.text().unwrap_or("")); Ok(()) } diff --git a/examples/stream_tools.rs b/examples/stream_tools.rs new file mode 100644 index 00000000..e245d807 --- /dev/null +++ b/examples/stream_tools.rs @@ -0,0 +1,59 @@ +//! Streaming with automatic tool execution example + +use edgee::{tool, Edgee, EdgeeConfig, StreamEvent}; +use serde_json::json; +use tokio_stream::StreamExt; + +#[tokio::main] +async fn main() -> Result<(), Box> { + let client = Edgee::new(EdgeeConfig::new("your-api-key")); + + let get_weather = tool!( + "get_weather", + "Get the current weather for a location", + { + "location" => {"type": "string", "description": "The city name"} + }, + required: ["location"], + |args| async move { + let location = args["location"].as_str().unwrap_or("Unknown"); + json!({ + "location": location, + "temperature": 22, + "condition": "sunny" + }) + } + ); + + let mut stream = client + .stream_with_tools( + "devstral2", + "What's the weather in Paris?", + vec![get_weather], + ) + .execute() + .await?; + + while let Some(event) = stream.next().await { + match event? { + StreamEvent::Chunk(chunk) => { + if let Some(text) = chunk.text() { + print!("{}", text); + std::io::Write::flush(&mut std::io::stdout())?; + } + } + StreamEvent::ToolStart { tool_call } => { + println!("\n[Tool: {}]", tool_call.function.name); + } + StreamEvent::ToolResult { + tool_name, result, .. + } => { + println!("[Result: {} -> {}]", tool_name, result); + } + StreamEvent::IterationComplete { .. } => {} + } + } + + println!(); + Ok(()) +} diff --git a/examples/streaming.rs b/examples/streaming.rs index 2ca42c8b..edabea15 100644 --- a/examples/streaming.rs +++ b/examples/streaming.rs @@ -1,78 +1,25 @@ //! Streaming example demonstrating real-time response processing -use edgee::{Edgee, Message}; +use edgee::{Edgee, EdgeeConfig}; use tokio_stream::StreamExt; #[tokio::main] async fn main() -> Result<(), Box> { - let client = Edgee::from_env()?; + let client = Edgee::new( + EdgeeConfig::new("your-api-key") + ); - println!("=== Simple Streaming ==="); - println!("Streaming response: "); - - let mut stream = client.stream("gpt-4o", "Count from 1 to 10 slowly").await?; + let mut stream = client.stream("devstral2", "Count from 1 to 10").await?; - while let Some(result) = stream.next().await { - match result { - Ok(chunk) => { - if let Some(text) = chunk.text() { - print!("{}", text); - std::io::Write::flush(&mut std::io::stdout())?; - } - - if let Some(reason) = chunk.finish_reason() { - println!("\n[Finish reason: {}]", reason); - } - } - Err(e) => { - eprintln!("\nError: {}", e); - break; - } - } - } - - println!("\n"); - - println!("=== Streaming with System Message ==="); - println!("Streaming response: "); - - let messages = vec![ - Message::system("You are a poetic assistant. Respond in haiku format."), - Message::user("Describe Rust programming language"), - ]; - - let mut stream = client.stream("gpt-4o", messages).await?; - - while let Some(result) = stream.next().await { - match result { - Ok(chunk) => { - if let Some(text) = chunk.text() { - print!("{}", text); - std::io::Write::flush(&mut std::io::stdout())?; - } - } - Err(e) => { - eprintln!("\nError: {}", e); - break; - } - } - } - - println!("\n"); - - println!("=== Collecting Full Response from Stream ==="); - let mut stream = client.stream("gpt-4o", "Say hello in 5 languages").await?; - - let mut full_text = String::new(); while let Some(result) = stream.next().await { if let Ok(chunk) = result { if let Some(text) = chunk.text() { - full_text.push_str(text); + print!("{}", text); + std::io::Write::flush(&mut std::io::stdout())?; } } } - println!("Full response: {}", full_text); - + println!(); Ok(()) } diff --git a/examples/tools.rs b/examples/tools.rs index 1169392d..96aaa40f 100644 --- a/examples/tools.rs +++ b/examples/tools.rs @@ -1,14 +1,12 @@ -//! Tool calling example demonstrating function calling capabilities +//! Manual tool calling example (without auto-execution) -use edgee::{Edgee, FunctionDefinition, InputObject, JsonSchema, Message, Tool}; +use edgee::{Edgee, EdgeeConfig, FunctionDefinition, InputObject, JsonSchema, Message, Tool}; use serde_json::json; use std::collections::HashMap; #[tokio::main] async fn main() -> Result<(), Box> { - let client = Edgee::from_env()?; - - println!("=== Tool Calling Example ===\n"); + let client = Edgee::new(EdgeeConfig::new("your-api-key")); // Define a weather function let get_weather = FunctionDefinition { @@ -18,21 +16,7 @@ async fn main() -> Result<(), Box> { schema_type: "object".to_string(), properties: Some({ let mut props = HashMap::new(); - props.insert( - "location".to_string(), - json!({ - "type": "string", - "description": "The city and state, e.g. San Francisco, CA" - }), - ); - props.insert( - "unit".to_string(), - json!({ - "type": "string", - "enum": ["celsius", "fahrenheit"], - "description": "The temperature unit" - }), - ); + props.insert("location".to_string(), json!({"type": "string"})); props }), required: Some(vec!["location".to_string()]), @@ -40,133 +24,18 @@ async fn main() -> Result<(), Box> { }, }; - // Define a calculator function - let calculate = FunctionDefinition { - name: "calculate".to_string(), - description: Some("Perform a mathematical calculation".to_string()), - parameters: JsonSchema { - schema_type: "object".to_string(), - properties: Some({ - let mut props = HashMap::new(); - props.insert( - "operation".to_string(), - json!({ - "type": "string", - "enum": ["add", "subtract", "multiply", "divide"], - "description": "The operation to perform" - }), - ); - props.insert( - "a".to_string(), - json!({ - "type": "number", - "description": "First operand" - }), - ); - props.insert( - "b".to_string(), - json!({ - "type": "number", - "description": "Second operand" - }), - ); - props - }), - required: Some(vec![ - "operation".to_string(), - "a".to_string(), - "b".to_string(), - ]), - description: None, - }, - }; + // Send request with tool + let input = InputObject::new(vec![Message::user("What's the weather in Paris?")]) + .with_tools(vec![Tool::function(get_weather)]); - // Create input with tools - let input = InputObject::new(vec![Message::user( - "What's the weather in San Francisco? Also, what's 15 multiplied by 7?", - )]) - .with_tools(vec![Tool::function(get_weather), Tool::function(calculate)]); + let response = client.send("devstral2", input).await?; - println!("Sending request with tools...\n"); - let response = client.send("gpt-4o", input).await?; - - // Check if the model made tool calls + // Handle tool calls manually if let Some(tool_calls) = response.tool_calls() { - println!("Model made {} tool call(s):\n", tool_calls.len()); - - for (i, call) in tool_calls.iter().enumerate() { - println!("Tool Call {}:", i + 1); - println!(" ID: {}", call.id); - println!(" Function: {}", call.function.name); - println!(" Arguments: {}", call.function.arguments); - println!(); - - // Simulate executing the function - let result = match call.function.name.as_str() { - "get_weather" => { - let args: serde_json::Value = serde_json::from_str(&call.function.arguments)?; - let location = args["location"].as_str().unwrap_or("Unknown"); - format!("The weather in {} is sunny, 72°F", location) - } - "calculate" => { - let args: serde_json::Value = serde_json::from_str(&call.function.arguments)?; - let op = args["operation"].as_str().unwrap_or("add"); - let a = args["a"].as_f64().unwrap_or(0.0); - let b = args["b"].as_f64().unwrap_or(0.0); - - let result = match op { - "add" => a + b, - "subtract" => a - b, - "multiply" => a * b, - "divide" => { - if b != 0.0 { - a / b - } else { - return Err("Division by zero".into()); - } - } - _ => 0.0, - }; - - format!("The result is {}", result) - } - _ => "Unknown function".to_string(), - }; - - println!(" Result: {}\n", result); - } - - // Send tool results back to get final answer - println!("Sending tool results back to model...\n"); - - let mut messages = vec![Message::user( - "What's the weather in San Francisco? Also, what's 15 multiplied by 7?", - )]; - - // Add the assistant's response with tool calls - if let Some(first_choice) = response.choices.first() { - messages.push(first_choice.message.clone()); - } - - // Add tool responses for call in tool_calls { - let result = match call.function.name.as_str() { - "get_weather" => "The weather in San Francisco is sunny, 72°F".to_string(), - "calculate" => "The result is 105".to_string(), - _ => "Unknown function".to_string(), - }; - - messages.push(Message::tool(call.id.clone(), result)); + println!("Tool called: {}", call.function.name); + println!("Arguments: {}", call.function.arguments); } - - let final_input = InputObject::new(messages); - let final_response = client.send("gpt-4o", final_input).await?; - - println!("Final response:"); - println!("{}\n", final_response.text().unwrap_or("")); - } else { - println!("No tool calls made. Response:"); - println!("{}\n", response.text().unwrap_or("")); } Ok(()) diff --git a/examples/tools_auto.rs b/examples/tools_auto.rs new file mode 100644 index 00000000..b9cad6af --- /dev/null +++ b/examples/tools_auto.rs @@ -0,0 +1,38 @@ +//! Auto tool execution example with automatic tool calling + +use edgee::{tool, Edgee, EdgeeConfig, SimpleInput}; +use serde_json::json; + +#[tokio::main] +async fn main() -> Result<(), Box> { + let client = Edgee::new( + EdgeeConfig::new("your-api-key") + ); + + let get_weather = tool!( + "get_weather", + "Get the current weather for a location", + { + "location" => {"type": "string", "description": "The city name"} + }, + required: ["location"], + |args| async move { + let location = args["location"].as_str().unwrap_or("Unknown"); + json!({ + "location": location, + "temperature": 22, + "condition": "sunny" + }) + } + ); + + let input = SimpleInput::new( + "What's the weather in Paris?", + vec![get_weather], + ); + + let response = client.send("devstral2", input).await?; + println!("Response: {}", response.text().unwrap_or("")); + + Ok(()) +} diff --git a/src/client.rs b/src/client.rs index 0cfb288e..d3b2e890 100644 --- a/src/client.rs +++ b/src/client.rs @@ -2,10 +2,12 @@ use crate::{ error::{Error, Result}, models::*, }; +use async_stream::try_stream; use bytes::Bytes; use futures::stream::{Stream, StreamExt}; use reqwest::Client; use serde_json::json; +use std::collections::HashMap; use std::pin::Pin; /// Input types accepted by the send method @@ -13,8 +15,40 @@ use std::pin::Pin; pub enum Input { /// Simple text input (converted to a user message) Text(String), - /// Structured input with messages and tools + /// Structured input with messages and tools (advanced mode - manual tool handling) Object(InputObject), + /// Simple text input with executable tools (simple mode - auto tool execution) + SimpleWithTools(SimpleInput), +} + +/// Simple mode input with executable tools +/// +/// Used for automatic tool execution where the SDK handles the agentic loop. +#[derive(Debug, Clone)] +pub struct SimpleInput { + /// The user's text input + pub text: String, + /// Executable tools with handlers + pub tools: Vec, + /// Maximum number of tool execution iterations (default: 10) + pub max_iterations: u32, +} + +impl SimpleInput { + /// Create a new simple input with tools + pub fn new(text: impl Into, tools: Vec) -> Self { + Self { + text: text.into(), + tools, + max_iterations: 10, + } + } + + /// Set the maximum number of tool iterations + pub fn with_max_iterations(mut self, max: u32) -> Self { + self.max_iterations = max; + self + } } impl From for Input { @@ -41,6 +75,12 @@ impl From> for Input { } } +impl From for Input { + fn from(input: SimpleInput) -> Self { + Input::SimpleWithTools(input) + } +} + /// Main client for interacting with the Edgee AI Gateway #[derive(Debug, Clone)] pub struct Edgee { @@ -71,11 +111,19 @@ impl Edgee { /// Send a chat completion request (non-streaming) /// + /// This method is polymorphic and handles different input types: + /// - **String/&str**: Simple text input (converted to a user message) + /// - **Vec**: Multi-turn conversation + /// - **InputObject**: Advanced mode with manual tool handling + /// - **SimpleInput**: Simple mode with automatic tool execution + /// /// # Arguments /// * `model` - The model to use (e.g., "gpt-4o", "mistral-large-latest") - /// * `input` - The input (can be a string, InputObject, or `Vec`) + /// * `input` - The input (polymorphic - see examples below) /// - /// # Example + /// # Examples + /// + /// Simple text: /// ```no_run /// # async fn example() -> Result<(), Box> { /// use edgee::Edgee; @@ -86,16 +134,61 @@ impl Edgee { /// # Ok(()) /// # } /// ``` + /// + /// With auto-executed tools (simple mode): + /// ```no_run + /// # async fn example() -> Result<(), Box> { + /// use edgee::{Edgee, SimpleInput, ExecutableTool, JsonSchema}; + /// use serde_json::json; + /// use std::collections::HashMap; + /// + /// let client = Edgee::from_env()?; + /// + /// let weather_tool = ExecutableTool::new( + /// "get_weather", "Get weather", JsonSchema { + /// schema_type: "object".to_string(), + /// properties: None, required: None, description: None, + /// }, + /// |_args| async move { json!({"temp": 72}) }, + /// ); + /// + /// // Simple mode: tools are auto-executed + /// let input = SimpleInput::new("What's the weather?", vec![weather_tool]); + /// let response = client.send("gpt-4o", input).await?; + /// # Ok(()) + /// # } + /// ``` pub async fn send( &self, model: impl Into, input: impl Into, ) -> Result { + let model = model.into(); let input = input.into(); + + // Polymorphic dispatch based on input type + match input { + // Simple mode with auto tool execution + Input::SimpleWithTools(simple) => { + self.execute_with_tools( + model, + simple.text, + simple.tools, + simple.max_iterations, + ) + .await + } + // Text or Object mode - standard API call + _ => self.send_standard(model, input).await, + } + } + + /// Standard send without auto tool execution + async fn send_standard(&self, model: String, input: Input) -> Result { let (messages, tools, tool_choice) = self.parse_input(input); let mut body = json!({ - "model": model.into(), + "model": model, "messages": messages, "stream": false, }); @@ -248,7 +341,7 @@ impl Edgee { }) } - /// Parse input into components + /// Parse input into components (for standard mode only) fn parse_input( &self, input: Input, @@ -259,7 +352,511 @@ impl Edgee { (messages, None, None) } Input::Object(obj) => (obj.messages, obj.tools, obj.tool_choice), + // SimpleWithTools is handled separately in send() before reaching here + Input::SimpleWithTools(_) => unreachable!( + "SimpleWithTools should be handled by execute_with_tools, not parse_input" + ), + } + } + + /// Send a request with executable tools that are automatically called + /// + /// This is "simple mode" - the SDK will automatically execute tool calls + /// and feed the results back to the model in an agentic loop until the + /// model produces a final response or max iterations is reached. + /// + /// # Arguments + /// * `model` - The model to use (e.g., "gpt-4o", "mistral-large-latest") + /// * `input` - The text input (converted to a user message) + /// * `tools` - Executable tools with handlers + /// + /// # Example + /// ```no_run + /// # async fn example() -> Result<(), Box> { + /// use edgee::{Edgee, ExecutableTool, JsonSchema}; + /// use serde_json::json; + /// use std::collections::HashMap; + /// + /// let client = Edgee::from_env()?; + /// + /// let weather_tool = ExecutableTool::new( + /// "get_weather", + /// "Get weather for a location", + /// JsonSchema { + /// schema_type: "object".to_string(), + /// properties: Some({ + /// let mut props = HashMap::new(); + /// props.insert("location".to_string(), json!({ + /// "type": "string", + /// "description": "The city name" + /// })); + /// props + /// }), + /// required: Some(vec!["location".to_string()]), + /// description: None, + /// }, + /// |args| async move { + /// let location = args["location"].as_str().unwrap_or("Unknown"); + /// json!({"temperature": 72, "unit": "F", "location": location}) + /// }, + /// ); + /// + /// let response = client + /// .send_with_tools("gpt-4o", "What's the weather in Paris?", vec![weather_tool]) + /// .await?; + /// + /// println!("{}", response.text().unwrap_or("")); + /// # Ok(()) + /// # } + /// ``` + pub fn send_with_tools( + &self, + model: impl Into, + input: impl Into, + tools: Vec, + ) -> SendWithToolsBuilder { + SendWithToolsBuilder::new(self.clone(), model.into(), input.into(), tools) + } + + /// Stream a request with executable tools that are automatically called + /// + /// This combines streaming with automatic tool execution. The SDK streams + /// the response and automatically executes tool calls, yielding events + /// for chunks, tool starts, and tool results. + /// + /// # Arguments + /// * `model` - The model to use (e.g., "gpt-4o", "mistral-large-latest") + /// * `input` - The text input (converted to a user message) + /// * `tools` - Executable tools with handlers + /// + /// # Example + /// ```no_run + /// # async fn example() -> Result<(), Box> { + /// use edgee::{Edgee, StreamEvent, tool}; + /// use serde_json::json; + /// use tokio_stream::StreamExt; + /// + /// let client = Edgee::from_env()?; + /// + /// let weather = tool!( + /// "get_weather", + /// "Get weather for a location", + /// { "location" => {"type": "string"} }, + /// required: ["location"], + /// |args| async move { + /// json!({"temperature": 72}) + /// } + /// ); + /// + /// let mut stream = client + /// .stream_with_tools("gpt-4o", "What's the weather in Paris?", vec![weather]) + /// .execute() + /// .await?; + /// + /// while let Some(event) = stream.next().await { + /// match event? { + /// StreamEvent::Chunk(chunk) => { + /// if let Some(text) = chunk.text() { + /// print!("{}", text); + /// } + /// } + /// StreamEvent::ToolStart { tool_call } => { + /// println!("\n[Tool: {}]", tool_call.function.name); + /// } + /// StreamEvent::ToolResult { tool_name, result, .. } => { + /// println!("[Result: {} -> {}]", tool_name, result); + /// } + /// StreamEvent::IterationComplete { iteration } => { + /// println!("[Iteration {} complete]", iteration); + /// } + /// } + /// } + /// # Ok(()) + /// # } + /// ``` + pub fn stream_with_tools( + &self, + model: impl Into, + input: impl Into, + tools: Vec, + ) -> StreamWithToolsBuilder { + StreamWithToolsBuilder::new(self.clone(), model.into(), input.into(), tools) + } + + /// Internal method to execute the agentic loop with tools + async fn execute_with_tools( + &self, + model: String, + input: String, + tools: Vec, + max_iterations: u32, + ) -> Result { + // Build initial messages + let mut messages: Vec = vec![Message::user(input)]; + + // Convert ExecutableTool to API format + let api_tools: Vec = tools.iter().map(|t| t.definition.clone()).collect(); + + // Create a map for quick tool lookup + let tool_map: HashMap<&str, &ExecutableTool> = + tools.iter().map(|t| (t.name(), t)).collect(); + + let mut iterations = 0; + let mut total_usage: Option = None; + + // The agentic loop + while iterations < max_iterations { + iterations += 1; + + // Call the API + let response = self.call_api(&model, &messages, Some(&api_tools)).await?; + + // Accumulate usage + if let Some(usage) = &response.usage { + if let Some(ref mut total) = total_usage { + total.prompt_tokens += usage.prompt_tokens; + total.completion_tokens += usage.completion_tokens; + total.total_tokens += usage.total_tokens; + } else { + total_usage = Some(usage.clone()); + } + } + + // Get the first choice + let choice = match response.choices.first() { + Some(c) => c, + None => { + // No choices, return what we have + return Ok(SendResponse { + usage: total_usage, + ..response + }); + } + }; + + // Check for tool calls + let tool_calls = match &choice.message.tool_calls { + Some(calls) if !calls.is_empty() => calls, + _ => { + // No tool calls, we're done - return final response with accumulated usage + return Ok(SendResponse { + usage: total_usage, + ..response + }); + } + }; + + // Add assistant's response (with tool_calls) to messages + messages.push(choice.message.clone()); + + // Execute each tool call and add results + for tool_call in tool_calls { + let tool_name = &tool_call.function.name; + + let result = if let Some(tool) = tool_map.get(tool_name.as_str()) { + // Parse arguments and execute + match serde_json::from_str::(&tool_call.function.arguments) { + Ok(args) => { + let result = tool.execute(args).await; + if result.is_string() { + result.as_str().unwrap_or("").to_string() + } else { + serde_json::to_string(&result).unwrap_or_else(|_| "{}".to_string()) + } + } + Err(e) => { + json!({"error": format!("Invalid arguments: {}", e)}).to_string() + } + } + } else { + json!({"error": format!("Unknown tool: {}", tool_name)}).to_string() + }; + + // Add tool result to messages + messages.push(Message::tool(&tool_call.id, result)); + } + + // Loop continues - model will process tool results + } + + // Max iterations reached + Err(Error::MaxIterationsExceeded(max_iterations)) + } + + /// Internal helper to call the API + async fn call_api( + &self, + model: &str, + messages: &[Message], + tools: Option<&[Tool]>, + ) -> Result { + let mut body = json!({ + "model": model, + "messages": messages, + "stream": false, + }); + + if let Some(tools) = tools { + body["tools"] = json!(tools); } + + let response = self + .client + .post(format!("{}/v1/chat/completions", self.config.base_url)) + .header("Authorization", format!("Bearer {}", self.config.api_key)) + .header("Content-Type", "application/json") + .json(&body) + .send() + .await?; + + if !response.status().is_success() { + let status = response.status().as_u16(); + let message = response + .text() + .await + .unwrap_or_else(|_| "Unknown error".to_string()); + return Err(Error::Api { status, message }); + } + + let send_response: SendResponse = response.json().await?; + Ok(send_response) + } +} + +/// Builder for send_with_tools requests +/// +/// Allows optional configuration like max_iterations before executing the request. +pub struct SendWithToolsBuilder { + client: Edgee, + model: String, + input: String, + tools: Vec, + max_iterations: u32, +} + +impl SendWithToolsBuilder { + fn new(client: Edgee, model: String, input: String, tools: Vec) -> Self { + Self { + client, + model, + input, + tools, + max_iterations: 10, // Default matching TypeScript SDK + } + } + + /// Set the maximum number of tool execution iterations + /// + /// Default is 10. If the model keeps requesting tool calls beyond this limit, + /// an error is returned. + pub fn max_iterations(mut self, max: u32) -> Self { + self.max_iterations = max; + self + } + + /// Execute the request and return the final response + pub async fn execute(self) -> Result { + self.client + .execute_with_tools(self.model, self.input, self.tools, self.max_iterations) + .await + } +} + +// Implement IntoFuture for ergonomic .await directly on the builder +impl std::future::IntoFuture for SendWithToolsBuilder { + type Output = Result; + type IntoFuture = Pin + Send>>; + + fn into_future(self) -> Self::IntoFuture { + Box::pin(self.execute()) + } +} + +/// Builder for stream_with_tools requests +/// +/// Allows optional configuration like max_iterations before executing the request. +pub struct StreamWithToolsBuilder { + client: Edgee, + model: String, + input: String, + tools: Vec, + max_iterations: u32, +} + +impl StreamWithToolsBuilder { + fn new(client: Edgee, model: String, input: String, tools: Vec) -> Self { + Self { + client, + model, + input, + tools, + max_iterations: 10, + } + } + + /// Set the maximum number of tool execution iterations + /// + /// Default is 10. If the model keeps requesting tool calls beyond this limit, + /// an error is returned. + pub fn max_iterations(mut self, max: u32) -> Self { + self.max_iterations = max; + self + } + + /// Execute the streaming request and return a stream of events + pub async fn execute( + self, + ) -> Result> + Send>>> { + let client = self.client; + let model = self.model; + let input = self.input; + let tools = self.tools; + let max_iterations = self.max_iterations; + + // Build initial messages + let mut messages: Vec = vec![Message::user(input)]; + + // Convert ExecutableTool to API format + let api_tools: Vec = tools.iter().map(|t| t.definition.clone()).collect(); + + // Create a map for quick tool lookup + let tool_map: HashMap = tools + .into_iter() + .map(|t| (t.name().to_string(), t)) + .collect(); + + let stream = try_stream! { + for iteration in 1..=max_iterations { + // Accumulate the full response from stream + let mut role: Option = None; + let mut content = String::new(); + let mut tool_calls_accumulator: HashMap = HashMap::new(); + + // Stream the response + let mut body = json!({ + "model": &model, + "messages": &messages, + "stream": true, + }); + body["tools"] = json!(&api_tools); + + let response = client + .client + .post(format!("{}/v1/chat/completions", client.config.base_url)) + .header("Authorization", format!("Bearer {}", client.config.api_key)) + .header("Content-Type", "application/json") + .json(&body) + .send() + .await?; + + let status = response.status(); + let status_code = status.as_u16(); + + // Must get byte_stream before any potential consumption of response + let byte_stream = if status.is_success() { + Some(response.bytes_stream()) + } else { + let message = response + .text() + .await + .unwrap_or_else(|_| "Unknown error".to_string()); + Err(Error::Api { status: status_code, message })? + }; + + let mut chunk_stream = Edgee::parse_sse_stream(byte_stream.unwrap()); + + while let Some(result) = chunk_stream.next().await { + let chunk = result?; + + // Yield the chunk as an event + yield StreamEvent::Chunk(chunk.clone()); + + // Accumulate role + if let Some(r) = chunk.role() { + role = Some(r.clone()); + } + + // Accumulate content + if let Some(text) = chunk.text() { + content.push_str(text); + } + + // Accumulate tool calls from deltas + if let Some(deltas) = chunk.tool_call_deltas() { + for delta in deltas { + // Use index from the delta, defaulting to parsing from id + let idx = delta.id.parse::().unwrap_or(0); + if let Some(existing) = tool_calls_accumulator.get_mut(&idx) { + // Append arguments to existing tool call + existing.function.arguments.push_str(&delta.function.arguments); + } else { + // Start new tool call + tool_calls_accumulator.insert(idx, delta.clone()); + } + } + } + } + + // Convert accumulated tool calls to vec + let tool_calls: Vec = tool_calls_accumulator.into_values().collect(); + + // No tool calls? We're done + if tool_calls.is_empty() { + return; + } + + // Add assistant's message (with tool_calls) to messages + let assistant_msg = Message { + role: role.unwrap_or(Role::Assistant), + content: if content.is_empty() { None } else { Some(content) }, + tool_calls: Some(tool_calls.clone()), + tool_call_id: None, + }; + messages.push(assistant_msg); + + // Execute each tool call and add results + for tool_call in tool_calls { + let tool_name = &tool_call.function.name; + + // Yield tool_start event + yield StreamEvent::ToolStart { tool_call: tool_call.clone() }; + + let result = if let Some(tool) = tool_map.get(tool_name) { + match serde_json::from_str::(&tool_call.function.arguments) { + Ok(args) => tool.execute(args).await, + Err(e) => json!({"error": format!("Invalid arguments: {}", e)}), + } + } else { + json!({"error": format!("Unknown tool: {}", tool_name)}) + }; + + // Yield tool_result event + yield StreamEvent::ToolResult { + tool_call_id: tool_call.id.clone(), + tool_name: tool_name.clone(), + result: result.clone(), + }; + + // Add tool result to messages + let result_str = if result.is_string() { + result.as_str().unwrap_or("").to_string() + } else { + serde_json::to_string(&result).unwrap_or_else(|_| "{}".to_string()) + }; + messages.push(Message::tool(&tool_call.id, result_str)); + } + + // Yield iteration complete event + yield StreamEvent::IterationComplete { iteration }; + + // Loop continues - model will process tool results + } + + // Max iterations reached + Err(Error::MaxIterationsExceeded(max_iterations))?; + }; + + Ok(Box::pin(stream)) } } diff --git a/src/error.rs b/src/error.rs index 5557a326..d0a38faf 100644 --- a/src/error.rs +++ b/src/error.rs @@ -24,6 +24,14 @@ pub enum Error { /// Invalid configuration #[error("Invalid configuration: {0}")] InvalidConfig(String), + + /// Maximum tool iterations exceeded + #[error("Maximum tool iterations ({0}) exceeded")] + MaxIterationsExceeded(u32), + + /// Tool execution error + #[error("Tool execution error for '{tool_name}': {message}")] + ToolExecution { tool_name: String, message: String }, } /// Result type alias for Edgee operations diff --git a/src/lib.rs b/src/lib.rs index dc73199f..09629bde 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -106,7 +106,7 @@ pub mod error; pub mod models; // Re-export main types for convenience -pub use client::{Edgee, Input}; +pub use client::{Edgee, Input, SendWithToolsBuilder, SimpleInput, StreamWithToolsBuilder}; pub use error::{Error, Result}; pub use models::*; diff --git a/src/models.rs b/src/models.rs index 38c0c8c2..fbf1f654 100644 --- a/src/models.rs +++ b/src/models.rs @@ -1,5 +1,8 @@ use serde::{Deserialize, Serialize}; use std::collections::HashMap; +use std::future::Future; +use std::pin::Pin; +use std::sync::Arc; /// Configuration for the Edgee client #[derive(Debug, Clone)] @@ -168,6 +171,106 @@ impl Tool { } } +/// Type alias for async tool handler function +pub type ToolHandlerFn = Arc< + dyn Fn(serde_json::Value) -> Pin + Send>> + + Send + + Sync, +>; + +/// A tool with an executable handler for automatic tool calling +/// +/// This is used in "simple mode" where the SDK automatically executes +/// tool calls and feeds results back to the model. +/// +/// # Example +/// ```no_run +/// use edgee::{ExecutableTool, JsonSchema}; +/// use serde_json::json; +/// use std::collections::HashMap; +/// +/// let tool = ExecutableTool::new( +/// "get_weather", +/// "Get the weather for a location", +/// JsonSchema { +/// schema_type: "object".to_string(), +/// properties: Some({ +/// let mut props = HashMap::new(); +/// props.insert("location".to_string(), json!({ +/// "type": "string", +/// "description": "The city name" +/// })); +/// props +/// }), +/// required: Some(vec!["location".to_string()]), +/// description: None, +/// }, +/// |args| async move { +/// let location = args["location"].as_str().unwrap_or("Unknown"); +/// json!({"temperature": 72, "location": location}) +/// }, +/// ); +/// ``` +#[derive(Clone)] +pub struct ExecutableTool { + /// The tool definition (sent to the API) + pub definition: Tool, + /// The handler function to execute when this tool is called + handler: ToolHandlerFn, +} + +impl std::fmt::Debug for ExecutableTool { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("ExecutableTool") + .field("definition", &self.definition) + .field("handler", &"") + .finish() + } +} + +impl ExecutableTool { + /// Create a new executable tool with an async handler + /// + /// # Arguments + /// * `name` - The name of the tool/function + /// * `description` - A description of what the tool does + /// * `parameters` - JSON schema describing the parameters + /// * `handler` - An async function that takes parsed arguments and returns a result + pub fn new( + name: impl Into, + description: impl Into, + parameters: JsonSchema, + handler: F, + ) -> Self + where + F: Fn(serde_json::Value) -> Fut + Send + Sync + 'static, + Fut: Future + Send + 'static, + { + let handler = Arc::new(handler); + Self { + definition: Tool::function(FunctionDefinition { + name: name.into(), + description: Some(description.into()), + parameters, + }), + handler: Arc::new(move |args| { + let handler = handler.clone(); + Box::pin(async move { handler(args).await }) + }), + } + } + + /// Get the name of this tool + pub fn name(&self) -> &str { + &self.definition.function.name + } + + /// Execute the tool handler with the given arguments + pub async fn execute(&self, args: serde_json::Value) -> serde_json::Value { + (self.handler)(args).await + } +} + /// Tool choice configuration #[derive(Debug, Clone, Serialize, Deserialize)] #[serde(untagged)] @@ -321,4 +424,212 @@ impl StreamChunk { .first() .and_then(|c| c.finish_reason.as_deref()) } + + /// Get tool call deltas from the first choice + pub fn tool_call_deltas(&self) -> Option<&Vec> { + self.choices + .first() + .and_then(|c| c.delta.tool_calls.as_ref()) + } +} + +/// Stream events for tool-enabled streaming +#[derive(Debug, Clone)] +pub enum StreamEvent { + /// A chunk of streamed content + Chunk(StreamChunk), + /// Tool execution is starting + ToolStart { tool_call: ToolCall }, + /// Tool execution completed + ToolResult { + tool_call_id: String, + tool_name: String, + result: serde_json::Value, + }, + /// One iteration of the tool loop completed + IterationComplete { iteration: u32 }, +} + +impl StreamEvent { + /// Get text if this is a chunk event + pub fn text(&self) -> Option<&str> { + match self { + StreamEvent::Chunk(chunk) => chunk.text(), + _ => None, + } + } + + /// Check if this is a chunk event + pub fn is_chunk(&self) -> bool { + matches!(self, StreamEvent::Chunk(_)) + } + + /// Check if this is a tool start event + pub fn is_tool_start(&self) -> bool { + matches!(self, StreamEvent::ToolStart { .. }) + } + + /// Check if this is a tool result event + pub fn is_tool_result(&self) -> bool { + matches!(self, StreamEvent::ToolResult { .. }) + } +} + +/// Builder for creating executable tools with a fluent API +/// +/// # Example +/// ```no_run +/// use edgee::ToolBuilder; +/// use serde_json::json; +/// +/// let tool = ToolBuilder::new("get_weather") +/// .description("Get the weather for a location") +/// .param("location", json!({"type": "string", "description": "City name"})) +/// .param("unit", json!({"type": "string", "enum": ["celsius", "fahrenheit"]})) +/// .required(vec!["location"]) +/// .handler(|args| async move { +/// let location = args["location"].as_str().unwrap_or("Unknown"); +/// json!({"temperature": 72, "location": location}) +/// }) +/// .build(); +/// ``` +pub struct ToolBuilder { + name: String, + description: Option, + properties: HashMap, + required: Vec, +} + +impl ToolBuilder { + /// Create a new tool builder with the given name + pub fn new(name: impl Into) -> Self { + Self { + name: name.into(), + description: None, + properties: HashMap::new(), + required: Vec::new(), + } + } + + /// Set the tool description + pub fn description(mut self, desc: impl Into) -> Self { + self.description = Some(desc.into()); + self + } + + /// Add a parameter to the tool + pub fn param(mut self, name: impl Into, schema: serde_json::Value) -> Self { + self.properties.insert(name.into(), schema); + self + } + + /// Set required parameters + pub fn required(mut self, required: Vec<&str>) -> Self { + self.required = required.into_iter().map(String::from).collect(); + self + } + + /// Build the executable tool with the given handler + pub fn handler(self, handler: F) -> ExecutableTool + where + F: Fn(serde_json::Value) -> Fut + Send + Sync + 'static, + Fut: Future + Send + 'static, + { + ExecutableTool::new( + self.name, + self.description.unwrap_or_default(), + JsonSchema { + schema_type: "object".to_string(), + properties: if self.properties.is_empty() { + None + } else { + Some(self.properties) + }, + required: if self.required.is_empty() { + None + } else { + Some(self.required) + }, + description: None, + }, + handler, + ) + } +} + +/// Macro to create an ExecutableTool with less boilerplate +/// +/// # Examples +/// +/// Basic usage: +/// ```no_run +/// use edgee::tool; +/// use serde_json::json; +/// +/// let weather = tool!( +/// "get_weather", +/// "Get the weather for a location", +/// { +/// "location" => {"type": "string", "description": "City name"}, +/// "unit" => {"type": "string", "enum": ["celsius", "fahrenheit"]} +/// }, +/// required: ["location"], +/// |args| async move { +/// let location = args["location"].as_str().unwrap_or("Unknown"); +/// json!({"temperature": 72, "location": location}) +/// } +/// ); +/// ``` +/// +/// Without parameters: +/// ```no_run +/// use edgee::tool; +/// use serde_json::json; +/// +/// let time = tool!( +/// "get_time", +/// "Get the current time", +/// |_args| async move { json!({"time": "12:00"}) } +/// ); +/// ``` +#[macro_export] +macro_rules! tool { + // With parameters and required fields + ( + $name:expr, + $description:expr, + { $($param_name:expr => $param_schema:tt),* $(,)? }, + required: [$($required:expr),* $(,)?], + $handler:expr + ) => {{ + $crate::ToolBuilder::new($name) + .description($description) + $(.param($param_name, serde_json::json!($param_schema)))* + .required(vec![$($required),*]) + .handler($handler) + }}; + + // With parameters, no required fields + ( + $name:expr, + $description:expr, + { $($param_name:expr => $param_schema:tt),* $(,)? }, + $handler:expr + ) => {{ + $crate::ToolBuilder::new($name) + .description($description) + $(.param($param_name, serde_json::json!($param_schema)))* + .handler($handler) + }}; + + // No parameters + ( + $name:expr, + $description:expr, + $handler:expr + ) => {{ + $crate::ToolBuilder::new($name) + .description($description) + .handler($handler) + }}; } From 6c9003b3749a04d66a96de9444992fd4f86d15bd Mon Sep 17 00:00:00 2001 From: Nicolas Girardot Date: Fri, 16 Jan 2026 11:01:29 +0100 Subject: [PATCH 2/3] feat/better examples --- examples/simple.rs | 25 +++++++++++++++++-- examples/stream_tools.rs | 43 +++++++++++++++++++++++++------- examples/streaming.rs | 37 +++++++++++++++++++++------ examples/tools.rs | 54 ++++++++++++++++++++++++++++++++++------ examples/tools_auto.rs | 44 +++++++++++++++++++++++++------- 5 files changed, 168 insertions(+), 35 deletions(-) diff --git a/examples/simple.rs b/examples/simple.rs index e2d98bef..dec16245 100644 --- a/examples/simple.rs +++ b/examples/simple.rs @@ -1,13 +1,34 @@ //! Simple example demonstrating basic usage of the Edgee SDK +//! +//! This example shows how to: +//! - Create an Edgee client with your API key +//! - Send a simple text prompt to a model +//! - Get the response text +//! +//! Run with: cargo run --example simple use edgee::{Edgee, EdgeeConfig}; #[tokio::main] async fn main() -> Result<(), Box> { + // Create the Edgee client with your API key let client = Edgee::new(EdgeeConfig::new("your-api-key")); - let response = client.send("devstral2", "Say 'Hello, Rust!'").await?; - println!("Response: {}", response.text().unwrap_or("")); + // Send a simple text prompt to the model + let response = client.send("devstral2", "What is the capital of France?").await?; + + // Print the response text + println!("Response: {}", response.text().unwrap_or("No response")); + + // You can also access metadata about the response + println!("Model used: {}", response.model); + + if let Some(usage) = &response.usage { + println!( + "Tokens: {} prompt + {} completion = {} total", + usage.prompt_tokens, usage.completion_tokens, usage.total_tokens + ); + } Ok(()) } diff --git a/examples/stream_tools.rs b/examples/stream_tools.rs index e245d807..7556dd33 100644 --- a/examples/stream_tools.rs +++ b/examples/stream_tools.rs @@ -1,4 +1,14 @@ //! Streaming with automatic tool execution example +//! +//! This example shows how to: +//! - Combine streaming with automatic tool execution +//! - Receive real-time events for chunks, tool calls, and results +//! - Display the response progressively while tools are being executed +//! +//! This is useful when you want to show progress to the user while +//! tools are being executed in the background. +//! +//! Run with: cargo run --example stream_tools use edgee::{tool, Edgee, EdgeeConfig, StreamEvent}; use serde_json::json; @@ -6,8 +16,10 @@ use tokio_stream::StreamExt; #[tokio::main] async fn main() -> Result<(), Box> { + // Create the Edgee client let client = Edgee::new(EdgeeConfig::new("your-api-key")); + // Define a tool using the `tool!` macro let get_weather = tool!( "get_weather", "Get the current weather for a location", @@ -17,43 +29,56 @@ async fn main() -> Result<(), Box> { required: ["location"], |args| async move { let location = args["location"].as_str().unwrap_or("Unknown"); + + // Simulate an API call json!({ "location": location, "temperature": 22, + "unit": "celsius", "condition": "sunny" }) } ); + println!("Streaming request with auto tool execution...\n"); + + // Start a streaming request with tools let mut stream = client - .stream_with_tools( - "devstral2", - "What's the weather in Paris?", - vec![get_weather], - ) + .stream_with_tools("devstral2", "What's the weather in Paris?", vec![get_weather]) .execute() .await?; + // Process events as they arrive while let Some(event) = stream.next().await { match event? { + // Text chunks from the model StreamEvent::Chunk(chunk) => { if let Some(text) = chunk.text() { print!("{}", text); std::io::Write::flush(&mut std::io::stdout())?; } } + + // A tool is about to be executed StreamEvent::ToolStart { tool_call } => { - println!("\n[Tool: {}]", tool_call.function.name); + println!("\n[Calling tool: {}]", tool_call.function.name); } + + // A tool has finished executing StreamEvent::ToolResult { tool_name, result, .. } => { - println!("[Result: {} -> {}]", tool_name, result); + println!("[Tool result: {} returned {}]", tool_name, result); + } + + // An iteration of the agentic loop is complete + StreamEvent::IterationComplete { iteration } => { + println!("[Iteration {} complete]", iteration); } - StreamEvent::IterationComplete { .. } => {} } } - println!(); + println!("\n\nDone!"); + Ok(()) } diff --git a/examples/streaming.rs b/examples/streaming.rs index edabea15..98b59b70 100644 --- a/examples/streaming.rs +++ b/examples/streaming.rs @@ -1,25 +1,46 @@ //! Streaming example demonstrating real-time response processing +//! +//! This example shows how to: +//! - Stream responses from a model in real-time +//! - Process chunks as they arrive +//! - Display text progressively (like a typing effect) +//! +//! Run with: cargo run --example streaming use edgee::{Edgee, EdgeeConfig}; use tokio_stream::StreamExt; #[tokio::main] async fn main() -> Result<(), Box> { - let client = Edgee::new( - EdgeeConfig::new("your-api-key") - ); + // Create the Edgee client + let client = Edgee::new(EdgeeConfig::new("your-api-key")); + println!("Asking the model to count from 1 to 10...\n"); + + // Start a streaming request let mut stream = client.stream("devstral2", "Count from 1 to 10").await?; + // Process each chunk as it arrives while let Some(result) = stream.next().await { - if let Ok(chunk) = result { - if let Some(text) = chunk.text() { - print!("{}", text); - std::io::Write::flush(&mut std::io::stdout())?; + match result { + Ok(chunk) => { + // Print each text chunk as it arrives (no newline, for typing effect) + if let Some(text) = chunk.text() { + print!("{}", text); + std::io::Write::flush(&mut std::io::stdout())?; + } + + // Check if the stream is complete + if let Some(reason) = chunk.finish_reason() { + println!("\n\n[Stream finished: {}]", reason); + } + } + Err(e) => { + eprintln!("\nError during streaming: {}", e); + break; } } } - println!(); Ok(()) } diff --git a/examples/tools.rs b/examples/tools.rs index 96aaa40f..92361ce5 100644 --- a/examples/tools.rs +++ b/examples/tools.rs @@ -1,4 +1,16 @@ -//! Manual tool calling example (without auto-execution) +//! Manual tool calling example +//! +//! This example shows how to: +//! - Define tools manually (without the `tool!` macro) +//! - Send a request with tools +//! - Handle tool calls yourself (manual execution) +//! +//! Use this approach when you need full control over tool execution, +//! such as when tools require user confirmation or have side effects. +//! +//! For automatic tool execution, see the `tools_auto` example instead. +//! +//! Run with: cargo run --example tools use edgee::{Edgee, EdgeeConfig, FunctionDefinition, InputObject, JsonSchema, Message, Tool}; use serde_json::json; @@ -6,9 +18,11 @@ use std::collections::HashMap; #[tokio::main] async fn main() -> Result<(), Box> { + // Create the Edgee client let client = Edgee::new(EdgeeConfig::new("your-api-key")); - // Define a weather function + // Define a tool manually using FunctionDefinition + // This gives you full control over the tool schema let get_weather = FunctionDefinition { name: "get_weather".to_string(), description: Some("Get the current weather for a location".to_string()), @@ -16,7 +30,13 @@ async fn main() -> Result<(), Box> { schema_type: "object".to_string(), properties: Some({ let mut props = HashMap::new(); - props.insert("location".to_string(), json!({"type": "string"})); + props.insert( + "location".to_string(), + json!({ + "type": "string", + "description": "The city name, e.g. Paris" + }), + ); props }), required: Some(vec!["location".to_string()]), @@ -24,18 +44,38 @@ async fn main() -> Result<(), Box> { }, }; - // Send request with tool + // Create an input with messages and tools let input = InputObject::new(vec![Message::user("What's the weather in Paris?")]) .with_tools(vec![Tool::function(get_weather)]); + println!("Sending request with tools...\n"); + + // Send the request let response = client.send("devstral2", input).await?; - // Handle tool calls manually + // Check if the model requested any tool calls if let Some(tool_calls) = response.tool_calls() { + println!("Model requested {} tool call(s):\n", tool_calls.len()); + for call in tool_calls { - println!("Tool called: {}", call.function.name); - println!("Arguments: {}", call.function.arguments); + println!(" Tool: {}", call.function.name); + println!(" Arguments: {}", call.function.arguments); + println!(" Call ID: {}", call.id); + println!(); + + // Here you would: + // 1. Execute the tool with the provided arguments + // 2. Create a new request with the tool result + // 3. Send it back to the model for a final response + // + // Example: + // let result = execute_my_tool(&call.function.name, &call.function.arguments); + // let tool_message = Message::tool(call.id.clone(), result); + // ... send another request with the tool message } + } else { + // No tool calls - the model responded directly + println!("Response: {}", response.text().unwrap_or("No response")); } Ok(()) diff --git a/examples/tools_auto.rs b/examples/tools_auto.rs index b9cad6af..df0e4fec 100644 --- a/examples/tools_auto.rs +++ b/examples/tools_auto.rs @@ -1,14 +1,30 @@ -//! Auto tool execution example with automatic tool calling +//! Auto tool execution example +//! +//! This example shows how to: +//! - Define tools using the `tool!` macro +//! - Let the SDK automatically execute tools when the model calls them +//! - Get the final response after all tool calls are processed +//! +//! The SDK handles the agentic loop automatically: when the model requests +//! a tool call, the SDK executes your handler and sends the result back +//! to the model until a final response is generated. +//! +//! Run with: cargo run --example tools_auto use edgee::{tool, Edgee, EdgeeConfig, SimpleInput}; use serde_json::json; #[tokio::main] async fn main() -> Result<(), Box> { - let client = Edgee::new( - EdgeeConfig::new("your-api-key") - ); + // Create the Edgee client + let client = Edgee::new(EdgeeConfig::new("your-api-key")); + // Define a tool using the `tool!` macro + // The macro creates an ExecutableTool with: + // - name: the function name the model will call + // - description: helps the model understand when to use this tool + // - parameters: JSON schema for the function arguments + // - handler: async function that executes when the tool is called let get_weather = tool!( "get_weather", "Get the current weather for a location", @@ -17,22 +33,32 @@ async fn main() -> Result<(), Box> { }, required: ["location"], |args| async move { + // This handler is called automatically when the model uses this tool let location = args["location"].as_str().unwrap_or("Unknown"); + + // In a real app, you would call an actual weather API here + println!("[Tool executed: get_weather for {}]", location); + json!({ "location": location, "temperature": 22, + "unit": "celsius", "condition": "sunny" }) } ); - let input = SimpleInput::new( - "What's the weather in Paris?", - vec![get_weather], - ); + // Create a SimpleInput with your prompt and tools + // The SDK will automatically handle tool execution + let input = SimpleInput::new("What's the weather in Paris?", vec![get_weather]); + + println!("Sending request with auto tool execution...\n"); + // Send the request - the SDK handles the agentic loop automatically let response = client.send("devstral2", input).await?; - println!("Response: {}", response.text().unwrap_or("")); + + // Print the final response (after all tools have been executed) + println!("\nFinal response: {}", response.text().unwrap_or("No response")); Ok(()) } From 690fe377b219954e9afbf08ac655df88a748784b Mon Sep 17 00:00:00 2001 From: Nicolas Girardot Date: Fri, 16 Jan 2026 14:16:23 +0100 Subject: [PATCH 3/3] fix:issues --- examples/simple.rs | 4 +++- examples/stream_tools.rs | 6 +++++- examples/tools_auto.rs | 5 ++++- src/client.rs | 19 +++++-------------- src/models.rs | 3 +-- 5 files changed, 18 insertions(+), 19 deletions(-) diff --git a/examples/simple.rs b/examples/simple.rs index dec16245..7471850c 100644 --- a/examples/simple.rs +++ b/examples/simple.rs @@ -15,7 +15,9 @@ async fn main() -> Result<(), Box> { let client = Edgee::new(EdgeeConfig::new("your-api-key")); // Send a simple text prompt to the model - let response = client.send("devstral2", "What is the capital of France?").await?; + let response = client + .send("devstral2", "What is the capital of France?") + .await?; // Print the response text println!("Response: {}", response.text().unwrap_or("No response")); diff --git a/examples/stream_tools.rs b/examples/stream_tools.rs index 7556dd33..bc34303b 100644 --- a/examples/stream_tools.rs +++ b/examples/stream_tools.rs @@ -44,7 +44,11 @@ async fn main() -> Result<(), Box> { // Start a streaming request with tools let mut stream = client - .stream_with_tools("devstral2", "What's the weather in Paris?", vec![get_weather]) + .stream_with_tools( + "devstral2", + "What's the weather in Paris?", + vec![get_weather], + ) .execute() .await?; diff --git a/examples/tools_auto.rs b/examples/tools_auto.rs index df0e4fec..96e54ce1 100644 --- a/examples/tools_auto.rs +++ b/examples/tools_auto.rs @@ -58,7 +58,10 @@ async fn main() -> Result<(), Box> { let response = client.send("devstral2", input).await?; // Print the final response (after all tools have been executed) - println!("\nFinal response: {}", response.text().unwrap_or("No response")); + println!( + "\nFinal response: {}", + response.text().unwrap_or("No response") + ); Ok(()) } diff --git a/src/client.rs b/src/client.rs index d3b2e890..78c69a97 100644 --- a/src/client.rs +++ b/src/client.rs @@ -113,7 +113,7 @@ impl Edgee { /// /// This method is polymorphic and handles different input types: /// - **String/&str**: Simple text input (converted to a user message) - /// - **Vec**: Multi-turn conversation + /// - **`Vec`**: Multi-turn conversation /// - **InputObject**: Advanced mode with manual tool handling /// - **SimpleInput**: Simple mode with automatic tool execution /// @@ -170,13 +170,8 @@ impl Edgee { match input { // Simple mode with auto tool execution Input::SimpleWithTools(simple) => { - self.execute_with_tools( - model, - simple.text, - simple.tools, - simple.max_iterations, - ) - .await + self.execute_with_tools(model, simple.text, simple.tools, simple.max_iterations) + .await } // Text or Object mode - standard API call _ => self.send_standard(model, input).await, @@ -564,9 +559,7 @@ impl Edgee { serde_json::to_string(&result).unwrap_or_else(|_| "{}".to_string()) } } - Err(e) => { - json!({"error": format!("Invalid arguments: {}", e)}).to_string() - } + Err(e) => json!({"error": format!("Invalid arguments: {}", e)}).to_string(), } } else { json!({"error": format!("Unknown tool: {}", tool_name)}).to_string() @@ -704,9 +697,7 @@ impl StreamWithToolsBuilder { } /// Execute the streaming request and return a stream of events - pub async fn execute( - self, - ) -> Result> + Send>>> { + pub async fn execute(self) -> Result> + Send>>> { let client = self.client; let model = self.model; let input = self.input; diff --git a/src/models.rs b/src/models.rs index fbf1f654..80929696 100644 --- a/src/models.rs +++ b/src/models.rs @@ -490,8 +490,7 @@ impl StreamEvent { /// .handler(|args| async move { /// let location = args["location"].as_str().unwrap_or("Unknown"); /// json!({"temperature": 72, "location": location}) -/// }) -/// .build(); +/// }); /// ``` pub struct ToolBuilder { name: String,