Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
23 changes: 23 additions & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

9 changes: 9 additions & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@ thiserror = "1.0"
# SSE parsing for streaming
futures = "0.3"
bytes = "1.7"
async-stream = "0.3"

[dev-dependencies]
tokio-test = "0.4"
Expand All @@ -45,3 +46,11 @@ path = "examples/streaming.rs"
[[example]]
name = "tools"
path = "examples/tools.rs"

[[example]]
name = "tools_auto"
path = "examples/tools_auto.rs"

[[example]]
name = "stream_tools"
path = "examples/stream_tools.rs"
47 changes: 19 additions & 28 deletions examples/simple.rs
Original file line number Diff line number Diff line change
@@ -1,45 +1,36 @@
//! Simple example demonstrating basic usage of the Edgee SDK
//!
//! This example shows how to:
//! - Create an Edgee client with your API key
//! - Send a simple text prompt to a model
//! - Get the response text
//!
//! Run with: cargo run --example simple

use edgee::{Edgee, InputObject, Message};
use edgee::{Edgee, EdgeeConfig};

#[tokio::main]
async fn main() -> Result<(), Box<dyn std::error::Error>> {
// Create client from environment variables (EDGEE_API_KEY)
let client = Edgee::from_env()?;
// Create the Edgee client with your API key
let client = Edgee::new(EdgeeConfig::new("your-api-key"));

println!("=== Simple Text Input ===");
let response = client.send("gpt-4o", "Say 'Hello, Rust!'").await?;
println!("Response: {}\n", response.text().unwrap_or(""));
// Send a simple text prompt to the model
let response = client
.send("devstral2", "What is the capital of France?")
.await?;

println!("=== Multi-turn Conversation ===");
let messages = vec![
Message::system("You are a helpful assistant that speaks like a pirate."),
Message::user("What's your name?"),
];
// Print the response text
println!("Response: {}", response.text().unwrap_or("No response"));

let response = client.send("gpt-4o", messages).await?;
println!("Assistant: {}\n", response.text().unwrap_or(""));
// You can also access metadata about the response
println!("Model used: {}", response.model);

println!("=== Using InputObject ===");
let input = InputObject::new(vec![
Message::system("You are a helpful coding assistant."),
Message::user("Write a hello world in Rust"),
]);

let response = client.send("gpt-4o", input).await?;
println!("Assistant: {}\n", response.text().unwrap_or(""));

println!("=== Response Metadata ===");
let response = client.send("gpt-4o", "Count to 5").await?;
println!("Model: {}", response.model);
println!("Finish Reason: {:?}", response.finish_reason());
if let Some(usage) = &response.usage {
println!(
"Token Usage: {} prompt + {} completion = {} total",
"Tokens: {} prompt + {} completion = {} total",
usage.prompt_tokens, usage.completion_tokens, usage.total_tokens
);
}
println!("Response: {}\n", response.text().unwrap_or(""));

Ok(())
}
88 changes: 88 additions & 0 deletions examples/stream_tools.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,88 @@
//! Streaming with automatic tool execution example
//!
//! This example shows how to:
//! - Combine streaming with automatic tool execution
//! - Receive real-time events for chunks, tool calls, and results
//! - Display the response progressively while tools are being executed
//!
//! This is useful when you want to show progress to the user while
//! tools are being executed in the background.
//!
//! Run with: cargo run --example stream_tools

use edgee::{tool, Edgee, EdgeeConfig, StreamEvent};
use serde_json::json;
use tokio_stream::StreamExt;

#[tokio::main]
async fn main() -> Result<(), Box<dyn std::error::Error>> {
// Create the Edgee client
let client = Edgee::new(EdgeeConfig::new("your-api-key"));

// Define a tool using the `tool!` macro
let get_weather = tool!(
"get_weather",
"Get the current weather for a location",
{
"location" => {"type": "string", "description": "The city name"}
},
required: ["location"],
|args| async move {
let location = args["location"].as_str().unwrap_or("Unknown");

// Simulate an API call
json!({
"location": location,
"temperature": 22,
"unit": "celsius",
"condition": "sunny"
})
}
);

println!("Streaming request with auto tool execution...\n");

// Start a streaming request with tools
let mut stream = client
.stream_with_tools(
"devstral2",
"What's the weather in Paris?",
vec![get_weather],
)
.execute()
.await?;

// Process events as they arrive
while let Some(event) = stream.next().await {
match event? {
// Text chunks from the model
StreamEvent::Chunk(chunk) => {
if let Some(text) = chunk.text() {
print!("{}", text);
std::io::Write::flush(&mut std::io::stdout())?;
}
}

// A tool is about to be executed
StreamEvent::ToolStart { tool_call } => {
println!("\n[Calling tool: {}]", tool_call.function.name);
}

// A tool has finished executing
StreamEvent::ToolResult {
tool_name, result, ..
} => {
println!("[Tool result: {} returned {}]", tool_name, result);
}

// An iteration of the agentic loop is complete
StreamEvent::IterationComplete { iteration } => {
println!("[Iteration {} complete]", iteration);
}
}
}

println!("\n\nDone!");

Ok(())
}
70 changes: 19 additions & 51 deletions examples/streaming.rs
Original file line number Diff line number Diff line change
@@ -1,78 +1,46 @@
//! Streaming example demonstrating real-time response processing

use edgee::{Edgee, Message};
//!
//! This example shows how to:
//! - Stream responses from a model in real-time
//! - Process chunks as they arrive
//! - Display text progressively (like a typing effect)
//!
//! Run with: cargo run --example streaming

use edgee::{Edgee, EdgeeConfig};
use tokio_stream::StreamExt;

#[tokio::main]
async fn main() -> Result<(), Box<dyn std::error::Error>> {
let client = Edgee::from_env()?;
// Create the Edgee client
let client = Edgee::new(EdgeeConfig::new("your-api-key"));

println!("=== Simple Streaming ===");
println!("Streaming response: ");
println!("Asking the model to count from 1 to 10...\n");

let mut stream = client.stream("gpt-4o", "Count from 1 to 10 slowly").await?;
// Start a streaming request
let mut stream = client.stream("devstral2", "Count from 1 to 10").await?;

// Process each chunk as it arrives
while let Some(result) = stream.next().await {
match result {
Ok(chunk) => {
// Print each text chunk as it arrives (no newline, for typing effect)
if let Some(text) = chunk.text() {
print!("{}", text);
std::io::Write::flush(&mut std::io::stdout())?;
}

// Check if the stream is complete
if let Some(reason) = chunk.finish_reason() {
println!("\n[Finish reason: {}]", reason);
println!("\n\n[Stream finished: {}]", reason);
}
}
Err(e) => {
eprintln!("\nError: {}", e);
eprintln!("\nError during streaming: {}", e);
break;
}
}
}

println!("\n");

println!("=== Streaming with System Message ===");
println!("Streaming response: ");

let messages = vec![
Message::system("You are a poetic assistant. Respond in haiku format."),
Message::user("Describe Rust programming language"),
];

let mut stream = client.stream("gpt-4o", messages).await?;

while let Some(result) = stream.next().await {
match result {
Ok(chunk) => {
if let Some(text) = chunk.text() {
print!("{}", text);
std::io::Write::flush(&mut std::io::stdout())?;
}
}
Err(e) => {
eprintln!("\nError: {}", e);
break;
}
}
}

println!("\n");

println!("=== Collecting Full Response from Stream ===");
let mut stream = client.stream("gpt-4o", "Say hello in 5 languages").await?;

let mut full_text = String::new();
while let Some(result) = stream.next().await {
if let Ok(chunk) = result {
if let Some(text) = chunk.text() {
full_text.push_str(text);
}
}
}

println!("Full response: {}", full_text);

Ok(())
}
Loading