Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 11 additions & 0 deletions 02-samples/14-agentic-ai-at-the-edge/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,8 @@
# Disable FAISS GPU warnings globally - we only use CPU
os.environ["CUDA_VISIBLE_DEVICES"] = "-1"

from opentelemetry import baggage, context

from strands import Agent
from strands.models import BedrockModel
from strands.models.llamacpp import LlamaCppModel
Expand Down Expand Up @@ -644,6 +646,13 @@ def display_welcome_message():
print()


def set_session_context(session_id=None):
"""Set the session ID in OpenTelemetry baggage for trace correlation"""
ctx = baggage.set_baggage("session.id", session_id)
token = context.attach(ctx)
return token


def main():
"""Main entry point"""
global USE_API_CLIENT
Expand Down Expand Up @@ -677,6 +686,8 @@ def main():

display_welcome_message()

set_session_context(SESSION_ID)

if USE_RICH_UI and console:
console.print("🚀 [bold green]Assistant is ready![/bold green]")
console.print()
Expand Down
5 changes: 4 additions & 1 deletion 02-samples/14-agentic-ai-at-the-edge/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ classifiers = [

dependencies = [
# Core Framework
"strands-agents @ git+https://github.com/westonbrown/sdk-python.git@main",
"strands-agents[otel] @ git+https://github.com/westonbrown/sdk-python.git@main",

# Configuration & Environment
"python-dotenv>=1.0.0,<2.0.0",
Expand All @@ -52,6 +52,9 @@ dependencies = [
"markdownify>=0.11.0,<1.0.0",
"readabilipy>=0.2.0,<1.0.0",

# Observation
"aws-opentelemetry-distro~=0.10.1",

# System & Utilities
"psutil>=5.9.0,<8.0.0",
"py-cpuinfo>=9.0.0,<10.0.0",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
from strands.models.llamacpp import LlamaCppModel
import logging
import json
from config import BEDROCK_MODEL_ID, LLAMACPP_URL
from config import BEDROCK_MODEL_ID, LLAMACPP_URL, SESSION_ID

logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -93,7 +93,10 @@ def select_model(query: str, context: Optional[str] = None) -> Dict[str, Any]:

# Create analysis agent
analysis_agent = Agent(
model=local_model, system_prompt=analysis_prompt, callback_handler=None
model=local_model,
system_prompt=analysis_prompt,
callback_handler=None,
trace_attributes={"session.id", SESSION_ID},
)

# Prepare analysis query
Expand Down