diff --git a/.env_example b/.env_example index f23e93c..5cd4b4f 100644 --- a/.env_example +++ b/.env_example @@ -8,6 +8,8 @@ # XAI_API_KEY="FILL-IN-YOUR-XAI_API_KEY" # OLLAMA_HOST="http://localhost:11434" # OLLAMA_MODELS="ollama/llama3.2,ollama/llama3.1,ollama/gemma2,ollama/phi3.5" +# AWS_ACCESS_KEY_ID="FILL-IN-YOUR-AWS-ACCESS-KEY-ID" +# AWS_SECRET_ACCESS_KEY="FILL-IN-YOUR-AWS-SECRET-ACCESS-KEY" # SERPER_API_KEY="your-serper-api-key" # SCRAPFLY_API_KEY="your-scrapfly-api-key" # DB_URL=postgresql://crewai_user:secret@db:5432/crewai diff --git a/app/llms.py b/app/llms.py index 08c8419..e437759 100644 --- a/app/llms.py +++ b/app/llms.py @@ -126,6 +126,12 @@ def create_lmstudio_llm(model, temperature): ) else: raise ValueError("LM Studio API base not set in .env file") + +def create_bedrock_llm(model, temperature): + return LLM( + model=model, + temperature=temperature + ) LLM_CONFIG = { "OpenAI": { @@ -152,6 +158,10 @@ def create_lmstudio_llm(model, temperature): "models": ["xai/grok-2-1212", "xai/grok-beta"], "create_llm": create_xai_llm, }, + "Bedrock":{ + "models":["bedrock/amazon.nova-pro-v1:0","bedrock/amazon.titan-text-express-v1","bedrock/anthropic.claude-3-5-sonnet-20240620-v1:0","bedrock/meta.llama3-1-70b-instruct-v1:0","bedrock/mistral.mixtral-8x7b-instruct-v0:1"], + "create_llm":create_bedrock_llm + } } def llm_providers_and_models(): diff --git a/requirements.txt b/requirements.txt index bd1dbf1..945e99d 100644 --- a/requirements.txt +++ b/requirements.txt @@ -15,3 +15,4 @@ snowflake-connector-python markdown docling duckduckgo-search>=8.0.2 +boto3