From 38981cd6f7ce4d921b9537896caa36721ec173f4 Mon Sep 17 00:00:00 2001 From: Malte Herrmann Date: Sun, 28 Dec 2025 22:30:01 +0100 Subject: [PATCH 1/7] add wip adding first implementation --- src/error/mod.rs | 8 ++++++++ src/lib.rs | 2 ++ src/llm/mod.rs | 31 +++++++++++++++++++++++++++++++ src/main.rs | 6 ++++++ 4 files changed, 47 insertions(+) create mode 100644 src/error/mod.rs create mode 100644 src/lib.rs create mode 100644 src/llm/mod.rs create mode 100644 src/main.rs diff --git a/src/error/mod.rs b/src/error/mod.rs new file mode 100644 index 0000000..446ad27 --- /dev/null +++ b/src/error/mod.rs @@ -0,0 +1,8 @@ +/// This module contains the custom errors for this project. +use thiserror::Error; + +#[derive(Debug, Error)] +pub enum LLMError { + #[error("failed to prompt")] + PromptError, +} diff --git a/src/lib.rs b/src/lib.rs new file mode 100644 index 0000000..85e16d1 --- /dev/null +++ b/src/lib.rs @@ -0,0 +1,2 @@ +mod error; +pub mod llm; diff --git a/src/llm/mod.rs b/src/llm/mod.rs new file mode 100644 index 0000000..e6cb319 --- /dev/null +++ b/src/llm/mod.rs @@ -0,0 +1,31 @@ +/// The `llm` module contains the required logic +/// to interact with a generalized selection of +/// supported models. +use crate::error::LLMError; + +use std::env::var; + +/// Promptable defines the required functionality +/// to interact with a language model. +pub trait Promptable { + fn prompt(&self, input: str) -> Result<(), LLMError>; +} + +/// Returns the available models in the current +/// system context. +pub fn get_available_models() -> Result, LLMError> { + let mut models = vec![]; + + if check_for_anthropic_key() { + models.push("anthropic".to_string()) + } + + Ok(models) +} + +fn check_for_anthropic_key() -> bool { + match var("ANTHROPIC_API_KEY") { + Ok(v) => v.is_empty(), + Err(_) => false, + } +} diff --git a/src/main.rs b/src/main.rs new file mode 100644 index 0000000..e8e9cf9 --- /dev/null +++ b/src/main.rs @@ -0,0 +1,6 @@ +use parrot::llm::get_available_models; + +fn main() { + let available_models = get_available_models().expect("failed to get models"); + available_models.iter().for_each(|m| println!("{}", m)) +} From 09d0f501bbd8c51c8b4faae68390b22b858ff819 Mon Sep 17 00:00:00 2001 From: Malte Herrmann Date: Sun, 28 Dec 2025 23:33:51 +0100 Subject: [PATCH 2/7] add basic logic to check for claude cli and key, cursor-agent cli and open ai key --- src/llm/mod.rs | 38 ++++++++++++++++++++++++++++++++++++-- 1 file changed, 36 insertions(+), 2 deletions(-) diff --git a/src/llm/mod.rs b/src/llm/mod.rs index e6cb319..9a9bdfd 100644 --- a/src/llm/mod.rs +++ b/src/llm/mod.rs @@ -3,11 +3,12 @@ /// supported models. use crate::error::LLMError; -use std::env::var; +use std::{env::var, process::Command}; /// Promptable defines the required functionality /// to interact with a language model. pub trait Promptable { + fn get_name(&self) -> String; fn prompt(&self, input: str) -> Result<(), LLMError>; } @@ -20,12 +21,45 @@ pub fn get_available_models() -> Result, LLMError> { models.push("anthropic".to_string()) } + if check_for_claude() { + models.push("claude".to_string()) + } + + if check_for_cursor_agent() { + models.push("cursor-agent".to_string()) + } + + if check_for_openai_key() { + models.push("openai".to_string()) + } + Ok(models) } fn check_for_anthropic_key() -> bool { match var("ANTHROPIC_API_KEY") { - Ok(v) => v.is_empty(), + Ok(v) => v != "", + Err(_) => false, + } +} + +fn check_for_claude() -> bool { + match Command::new("claude").arg("-h").output() { + Ok(_) => true, + Err(_) => false, + } +} + +fn check_for_cursor_agent() -> bool { + match Command::new("cursor-agent").arg("-h").output() { + Ok(_) => true, + Err(_) => false, + } +} + +fn check_for_openai_key() -> bool { + match var("OPENAI_API_KEY") { + Ok(v) => v != "", Err(_) => false, } } From 6e3357dd4968b93200b980290861df2dc7700282 Mon Sep 17 00:00:00 2001 From: Malte Herrmann Date: Mon, 29 Dec 2025 00:28:13 +0100 Subject: [PATCH 3/7] add general wiring for types to implement promptable --- src/error/mod.rs | 7 ++++- src/llm/anthropic.rs | 28 +++++++++++++++++++ src/llm/claude.rs | 26 ++++++++++++++++++ src/llm/cursor.rs | 26 ++++++++++++++++++ src/llm/mod.rs | 64 +++++++++++++++----------------------------- src/llm/openai.rs | 24 +++++++++++++++++ src/main.rs | 8 +++++- 7 files changed, 138 insertions(+), 45 deletions(-) create mode 100644 src/llm/anthropic.rs create mode 100644 src/llm/claude.rs create mode 100644 src/llm/cursor.rs create mode 100644 src/llm/openai.rs diff --git a/src/error/mod.rs b/src/error/mod.rs index 446ad27..e9083b6 100644 --- a/src/error/mod.rs +++ b/src/error/mod.rs @@ -1,8 +1,13 @@ /// This module contains the custom errors for this project. +use std::env::VarError; use thiserror::Error; #[derive(Debug, Error)] pub enum LLMError { + #[error("cli not found: {0}")] + CLINotFound(String), + #[error("missing env variable: {0}")] + Env(#[from] VarError), #[error("failed to prompt")] - PromptError, + Prompt, } diff --git a/src/llm/anthropic.rs b/src/llm/anthropic.rs new file mode 100644 index 0000000..d458de6 --- /dev/null +++ b/src/llm/anthropic.rs @@ -0,0 +1,28 @@ +use crate::error::LLMError; + +use super::Model; + +use std::env::var; + +pub struct Anthropic { + api_key: String, +} + +impl Anthropic { + // TODO: also add a trait for this? e.g. `Initializable`? + pub fn init() -> Result { + let api_key = var("ANTHROPIC_API_KEY")?; + + Ok(Self { api_key }) + } +} + +impl Model for Anthropic { + fn get_name(&self) -> String { + return "Anthropic API".into(); + } + + fn prompt(&self, _: &str) -> Result { + unimplemented!("anthropic api") + } +} diff --git a/src/llm/claude.rs b/src/llm/claude.rs new file mode 100644 index 0000000..209cb2f --- /dev/null +++ b/src/llm/claude.rs @@ -0,0 +1,26 @@ +use std::process::Command; + +use crate::{error::LLMError, llm::Model}; + +pub struct Claude {} + +impl Claude { + pub fn init() -> Result { + Command::new("claude") + .arg("-h") + .output() + .map_err(|_| LLMError::CLINotFound("claude".into()))?; + + Ok(Self {}) + } +} + +impl Model for Claude { + fn get_name(&self) -> String { + return "Claude CLI".into(); + } + + fn prompt(&self, _: &str) -> Result { + unimplemented!("claude cli") + } +} diff --git a/src/llm/cursor.rs b/src/llm/cursor.rs new file mode 100644 index 0000000..5f8f0e4 --- /dev/null +++ b/src/llm/cursor.rs @@ -0,0 +1,26 @@ +use crate::{error::LLMError, llm::Model}; + +use std::process::Command; + +pub struct CursorCLI {} + +impl CursorCLI { + pub fn init() -> Result { + Command::new("cursor-agent") + .arg("-h") + .output() + .map_err(|_| LLMError::CLINotFound("cursor-agent".into()))?; + + Ok(Self {}) + } +} + +impl Model for CursorCLI { + fn get_name(&self) -> String { + return "Cursor CLI".into(); + } + + fn prompt(&self, _: &str) -> Result { + unimplemented!("cursor cli") + } +} diff --git a/src/llm/mod.rs b/src/llm/mod.rs index 9a9bdfd..efbf966 100644 --- a/src/llm/mod.rs +++ b/src/llm/mod.rs @@ -1,65 +1,43 @@ /// The `llm` module contains the required logic /// to interact with a generalized selection of /// supported models. -use crate::error::LLMError; +mod anthropic; +mod claude; +mod cursor; +mod openai; -use std::{env::var, process::Command}; +use crate::{ + error::LLMError, + llm::{anthropic::Anthropic, claude::Claude, cursor::CursorCLI, openai::OpenAI}, +}; -/// Promptable defines the required functionality +/// Prompt defines the required functionality /// to interact with a language model. -pub trait Promptable { +pub trait Model { fn get_name(&self) -> String; - fn prompt(&self, input: str) -> Result<(), LLMError>; + fn prompt(&self, input: &str) -> Result; } /// Returns the available models in the current /// system context. -pub fn get_available_models() -> Result, LLMError> { - let mut models = vec![]; +pub fn get_available_models() -> Result>, LLMError> { + let mut models: Vec> = vec![]; - if check_for_anthropic_key() { - models.push("anthropic".to_string()) + if let Ok(m) = Anthropic::init() { + models.push(Box::new(m)) } - if check_for_claude() { - models.push("claude".to_string()) + if let Ok(m) = Claude::init() { + models.push(Box::new(m)) } - if check_for_cursor_agent() { - models.push("cursor-agent".to_string()) + if let Ok(m) = CursorCLI::init() { + models.push(Box::new(m)) } - if check_for_openai_key() { - models.push("openai".to_string()) + if let Ok(m) = OpenAI::init() { + models.push(Box::new(m)) } Ok(models) } - -fn check_for_anthropic_key() -> bool { - match var("ANTHROPIC_API_KEY") { - Ok(v) => v != "", - Err(_) => false, - } -} - -fn check_for_claude() -> bool { - match Command::new("claude").arg("-h").output() { - Ok(_) => true, - Err(_) => false, - } -} - -fn check_for_cursor_agent() -> bool { - match Command::new("cursor-agent").arg("-h").output() { - Ok(_) => true, - Err(_) => false, - } -} - -fn check_for_openai_key() -> bool { - match var("OPENAI_API_KEY") { - Ok(v) => v != "", - Err(_) => false, - } -} diff --git a/src/llm/openai.rs b/src/llm/openai.rs new file mode 100644 index 0000000..6398925 --- /dev/null +++ b/src/llm/openai.rs @@ -0,0 +1,24 @@ +use std::env::var; + +use crate::{error::LLMError, llm::Model}; + +pub struct OpenAI { + api_key: String, +} + +impl OpenAI { + pub fn init() -> Result { + let api_key = var("OPENAI_API_KEY")?.into(); + Ok(Self { api_key }) + } +} + +impl Model for OpenAI { + fn get_name(&self) -> String { + return "OpenAI API".into(); + } + + fn prompt(&self, _: &str) -> Result { + unimplemented!("open ai api") + } +} diff --git a/src/main.rs b/src/main.rs index e8e9cf9..48f7514 100644 --- a/src/main.rs +++ b/src/main.rs @@ -2,5 +2,11 @@ use parrot::llm::get_available_models; fn main() { let available_models = get_available_models().expect("failed to get models"); - available_models.iter().for_each(|m| println!("{}", m)) + available_models.iter().for_each(|m| { + let out = m + .prompt("say hello to my friends") + .expect("failed to prompt"); + + println!("{} - {}", m.get_name(), out); + }) } From 218ba1d496157e491dcc22648e5d2045814a2254 Mon Sep 17 00:00:00 2001 From: Malte Herrmann Date: Mon, 29 Dec 2025 21:17:49 +0100 Subject: [PATCH 4/7] add logic for prompting with claude cli and cursor-agent cli --- src/error/mod.rs | 8 +++++--- src/llm/claude.rs | 9 +++++++-- src/llm/cursor.rs | 9 +++++++-- 3 files changed, 19 insertions(+), 7 deletions(-) diff --git a/src/error/mod.rs b/src/error/mod.rs index e9083b6..f6b1a55 100644 --- a/src/error/mod.rs +++ b/src/error/mod.rs @@ -1,13 +1,15 @@ /// This module contains the custom errors for this project. -use std::env::VarError; +use std::{env::VarError, string::FromUtf8Error}; use thiserror::Error; #[derive(Debug, Error)] pub enum LLMError { + #[error("conversion error: {0}")] + BytesConversion(#[from] FromUtf8Error), #[error("cli not found: {0}")] CLINotFound(String), #[error("missing env variable: {0}")] Env(#[from] VarError), - #[error("failed to prompt")] - Prompt, + #[error("failed to prompt: {0}")] + Prompt(String), } diff --git a/src/llm/claude.rs b/src/llm/claude.rs index 209cb2f..c492189 100644 --- a/src/llm/claude.rs +++ b/src/llm/claude.rs @@ -20,7 +20,12 @@ impl Model for Claude { return "Claude CLI".into(); } - fn prompt(&self, _: &str) -> Result { - unimplemented!("claude cli") + fn prompt(&self, input: &str) -> Result { + let out = Command::new("claude") + .args([input, "-p"]) + .output() + .map_err(|e| LLMError::Prompt(e.to_string()))?; + + Ok(String::from_utf8(out.stdout)?) } } diff --git a/src/llm/cursor.rs b/src/llm/cursor.rs index 5f8f0e4..61adb8a 100644 --- a/src/llm/cursor.rs +++ b/src/llm/cursor.rs @@ -20,7 +20,12 @@ impl Model for CursorCLI { return "Cursor CLI".into(); } - fn prompt(&self, _: &str) -> Result { - unimplemented!("cursor cli") + fn prompt(&self, input: &str) -> Result { + let out = Command::new("cursor-agent") + .args([input, "-p"]) + .output() + .map_err(|e| LLMError::Prompt(e.to_string()))?; + + Ok(String::from_utf8(out.stdout)?) } } From 79db943ae78011456c4bde86e1523096c7fb7397 Mon Sep 17 00:00:00 2001 From: Malte Herrmann Date: Mon, 29 Dec 2025 21:24:27 +0100 Subject: [PATCH 5/7] introduce constants for cli names --- src/llm/claude.rs | 8 +++++--- src/llm/cursor.rs | 8 +++++--- 2 files changed, 10 insertions(+), 6 deletions(-) diff --git a/src/llm/claude.rs b/src/llm/claude.rs index c492189..5f997fa 100644 --- a/src/llm/claude.rs +++ b/src/llm/claude.rs @@ -2,14 +2,16 @@ use std::process::Command; use crate::{error::LLMError, llm::Model}; +const CLAUDE_CLI_NAME: &str = "claude"; + pub struct Claude {} impl Claude { pub fn init() -> Result { - Command::new("claude") + Command::new(CLAUDE_CLI_NAME) .arg("-h") .output() - .map_err(|_| LLMError::CLINotFound("claude".into()))?; + .map_err(|_| LLMError::CLINotFound(CLAUDE_CLI_NAME.into()))?; Ok(Self {}) } @@ -21,7 +23,7 @@ impl Model for Claude { } fn prompt(&self, input: &str) -> Result { - let out = Command::new("claude") + let out = Command::new(CLAUDE_CLI_NAME) .args([input, "-p"]) .output() .map_err(|e| LLMError::Prompt(e.to_string()))?; diff --git a/src/llm/cursor.rs b/src/llm/cursor.rs index 61adb8a..3bc24d8 100644 --- a/src/llm/cursor.rs +++ b/src/llm/cursor.rs @@ -2,14 +2,16 @@ use crate::{error::LLMError, llm::Model}; use std::process::Command; +const CURSOR_CLI_NAME: &str = "cursor-agent"; + pub struct CursorCLI {} impl CursorCLI { pub fn init() -> Result { - Command::new("cursor-agent") + Command::new(CURSOR_CLI_NAME) .arg("-h") .output() - .map_err(|_| LLMError::CLINotFound("cursor-agent".into()))?; + .map_err(|_| LLMError::CLINotFound(CURSOR_CLI_NAME.into()))?; Ok(Self {}) } @@ -21,7 +23,7 @@ impl Model for CursorCLI { } fn prompt(&self, input: &str) -> Result { - let out = Command::new("cursor-agent") + let out = Command::new(CURSOR_CLI_NAME) .args([input, "-p"]) .output() .map_err(|e| LLMError::Prompt(e.to_string()))?; From ea07130189696a5cf66b62490634c242e7c6fc70 Mon Sep 17 00:00:00 2001 From: Malte Herrmann Date: Mon, 29 Dec 2025 22:04:22 +0100 Subject: [PATCH 6/7] add model factory trait --- src/llm/anthropic.rs | 9 ++++----- src/llm/claude.rs | 11 +++++++---- src/llm/cursor.rs | 11 +++++++---- src/llm/mod.rs | 17 ++++++++++++++++- src/llm/openai.rs | 13 ++++++++----- 5 files changed, 42 insertions(+), 19 deletions(-) diff --git a/src/llm/anthropic.rs b/src/llm/anthropic.rs index d458de6..a0618c3 100644 --- a/src/llm/anthropic.rs +++ b/src/llm/anthropic.rs @@ -1,6 +1,6 @@ use crate::error::LLMError; -use super::Model; +use super::{Model, ModelFactory}; use std::env::var; @@ -8,9 +8,8 @@ pub struct Anthropic { api_key: String, } -impl Anthropic { - // TODO: also add a trait for this? e.g. `Initializable`? - pub fn init() -> Result { +impl ModelFactory for Anthropic { + fn init() -> Result { let api_key = var("ANTHROPIC_API_KEY")?; Ok(Self { api_key }) @@ -19,7 +18,7 @@ impl Anthropic { impl Model for Anthropic { fn get_name(&self) -> String { - return "Anthropic API".into(); + "Anthropic API".into() } fn prompt(&self, _: &str) -> Result { diff --git a/src/llm/claude.rs b/src/llm/claude.rs index 5f997fa..5432fc8 100644 --- a/src/llm/claude.rs +++ b/src/llm/claude.rs @@ -1,13 +1,16 @@ use std::process::Command; -use crate::{error::LLMError, llm::Model}; +use crate::{ + error::LLMError, + llm::{Model, ModelFactory}, +}; const CLAUDE_CLI_NAME: &str = "claude"; pub struct Claude {} -impl Claude { - pub fn init() -> Result { +impl ModelFactory for Claude { + fn init() -> Result { Command::new(CLAUDE_CLI_NAME) .arg("-h") .output() @@ -19,7 +22,7 @@ impl Claude { impl Model for Claude { fn get_name(&self) -> String { - return "Claude CLI".into(); + "Claude CLI".into() } fn prompt(&self, input: &str) -> Result { diff --git a/src/llm/cursor.rs b/src/llm/cursor.rs index 3bc24d8..2a9c7fb 100644 --- a/src/llm/cursor.rs +++ b/src/llm/cursor.rs @@ -1,4 +1,7 @@ -use crate::{error::LLMError, llm::Model}; +use crate::{ + error::LLMError, + llm::{Model, ModelFactory}, +}; use std::process::Command; @@ -6,8 +9,8 @@ const CURSOR_CLI_NAME: &str = "cursor-agent"; pub struct CursorCLI {} -impl CursorCLI { - pub fn init() -> Result { +impl ModelFactory for CursorCLI { + fn init() -> Result { Command::new(CURSOR_CLI_NAME) .arg("-h") .output() @@ -19,7 +22,7 @@ impl CursorCLI { impl Model for CursorCLI { fn get_name(&self) -> String { - return "Cursor CLI".into(); + "Cursor CLI".into() } fn prompt(&self, input: &str) -> Result { diff --git a/src/llm/mod.rs b/src/llm/mod.rs index efbf966..0b1879c 100644 --- a/src/llm/mod.rs +++ b/src/llm/mod.rs @@ -11,7 +11,22 @@ use crate::{ llm::{anthropic::Anthropic, claude::Claude, cursor::CursorCLI, openai::OpenAI}, }; -/// Prompt defines the required functionality +/// Defines the expected interface for the initialization +/// of the different supported models. +/// +/// Note: It's required to separate this from the actual model +/// trait, that defines the interface for interaction with the +/// LLMs. This is because we store the actual `Model` in a boxed +/// vector. +/// +/// To allow this, the instances of `Model` have to be `dyn`-compatible, +/// which requires the trait not to be `Sized` as it is described here: +/// https://doc.rust-lang.org/reference/items/traits.html#dyn-compatibility. +pub trait ModelFactory: Model + Sized { + fn init() -> Result; +} + +/// Defines the required functionality /// to interact with a language model. pub trait Model { fn get_name(&self) -> String; diff --git a/src/llm/openai.rs b/src/llm/openai.rs index 6398925..383a0b3 100644 --- a/src/llm/openai.rs +++ b/src/llm/openai.rs @@ -1,21 +1,24 @@ use std::env::var; -use crate::{error::LLMError, llm::Model}; +use crate::{ + error::LLMError, + llm::{Model, ModelFactory}, +}; pub struct OpenAI { api_key: String, } -impl OpenAI { - pub fn init() -> Result { - let api_key = var("OPENAI_API_KEY")?.into(); +impl ModelFactory for OpenAI { + fn init() -> Result { + let api_key = var("OPENAI_API_KEY")?; Ok(Self { api_key }) } } impl Model for OpenAI { fn get_name(&self) -> String { - return "OpenAI API".into(); + "OpenAI API".into() } fn prompt(&self, _: &str) -> Result { From 63a604edc367ac1bdc2e1c3cb3ab9e46432fb11b Mon Sep 17 00:00:00 2001 From: Malte Herrmann Date: Tue, 30 Dec 2025 22:31:05 +0100 Subject: [PATCH 7/7] add linter ci --- .github/workflows/lint.yaml | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) create mode 100644 .github/workflows/lint.yaml diff --git a/.github/workflows/lint.yaml b/.github/workflows/lint.yaml new file mode 100644 index 0000000..ac8aec8 --- /dev/null +++ b/.github/workflows/lint.yaml @@ -0,0 +1,18 @@ +name: Lint codebase +on: + push: + branches: + - main + pull_request: + +# Make sure CI fails on all warnings, including Clippy lints +env: + RUSTFLAGS: "-Dwarnings" + +jobs: + clippy_check: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v5 + - name: Run Clippy + run: cargo clippy --all-targets --all-features