diff --git a/.github/workflows/lint.yaml b/.github/workflows/lint.yaml new file mode 100644 index 0000000..ac8aec8 --- /dev/null +++ b/.github/workflows/lint.yaml @@ -0,0 +1,18 @@ +name: Lint codebase +on: + push: + branches: + - main + pull_request: + +# Make sure CI fails on all warnings, including Clippy lints +env: + RUSTFLAGS: "-Dwarnings" + +jobs: + clippy_check: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v5 + - name: Run Clippy + run: cargo clippy --all-targets --all-features diff --git a/src/error/mod.rs b/src/error/mod.rs new file mode 100644 index 0000000..f6b1a55 --- /dev/null +++ b/src/error/mod.rs @@ -0,0 +1,15 @@ +/// This module contains the custom errors for this project. +use std::{env::VarError, string::FromUtf8Error}; +use thiserror::Error; + +#[derive(Debug, Error)] +pub enum LLMError { + #[error("conversion error: {0}")] + BytesConversion(#[from] FromUtf8Error), + #[error("cli not found: {0}")] + CLINotFound(String), + #[error("missing env variable: {0}")] + Env(#[from] VarError), + #[error("failed to prompt: {0}")] + Prompt(String), +} diff --git a/src/lib.rs b/src/lib.rs new file mode 100644 index 0000000..85e16d1 --- /dev/null +++ b/src/lib.rs @@ -0,0 +1,2 @@ +mod error; +pub mod llm; diff --git a/src/llm/anthropic.rs b/src/llm/anthropic.rs new file mode 100644 index 0000000..a0618c3 --- /dev/null +++ b/src/llm/anthropic.rs @@ -0,0 +1,27 @@ +use crate::error::LLMError; + +use super::{Model, ModelFactory}; + +use std::env::var; + +pub struct Anthropic { + api_key: String, +} + +impl ModelFactory for Anthropic { + fn init() -> Result { + let api_key = var("ANTHROPIC_API_KEY")?; + + Ok(Self { api_key }) + } +} + +impl Model for Anthropic { + fn get_name(&self) -> String { + "Anthropic API".into() + } + + fn prompt(&self, _: &str) -> Result { + unimplemented!("anthropic api") + } +} diff --git a/src/llm/claude.rs b/src/llm/claude.rs new file mode 100644 index 0000000..5432fc8 --- /dev/null +++ b/src/llm/claude.rs @@ -0,0 +1,36 @@ +use std::process::Command; + +use crate::{ + error::LLMError, + llm::{Model, ModelFactory}, +}; + +const CLAUDE_CLI_NAME: &str = "claude"; + +pub struct Claude {} + +impl ModelFactory for Claude { + fn init() -> Result { + Command::new(CLAUDE_CLI_NAME) + .arg("-h") + .output() + .map_err(|_| LLMError::CLINotFound(CLAUDE_CLI_NAME.into()))?; + + Ok(Self {}) + } +} + +impl Model for Claude { + fn get_name(&self) -> String { + "Claude CLI".into() + } + + fn prompt(&self, input: &str) -> Result { + let out = Command::new(CLAUDE_CLI_NAME) + .args([input, "-p"]) + .output() + .map_err(|e| LLMError::Prompt(e.to_string()))?; + + Ok(String::from_utf8(out.stdout)?) + } +} diff --git a/src/llm/cursor.rs b/src/llm/cursor.rs new file mode 100644 index 0000000..2a9c7fb --- /dev/null +++ b/src/llm/cursor.rs @@ -0,0 +1,36 @@ +use crate::{ + error::LLMError, + llm::{Model, ModelFactory}, +}; + +use std::process::Command; + +const CURSOR_CLI_NAME: &str = "cursor-agent"; + +pub struct CursorCLI {} + +impl ModelFactory for CursorCLI { + fn init() -> Result { + Command::new(CURSOR_CLI_NAME) + .arg("-h") + .output() + .map_err(|_| LLMError::CLINotFound(CURSOR_CLI_NAME.into()))?; + + Ok(Self {}) + } +} + +impl Model for CursorCLI { + fn get_name(&self) -> String { + "Cursor CLI".into() + } + + fn prompt(&self, input: &str) -> Result { + let out = Command::new(CURSOR_CLI_NAME) + .args([input, "-p"]) + .output() + .map_err(|e| LLMError::Prompt(e.to_string()))?; + + Ok(String::from_utf8(out.stdout)?) + } +} diff --git a/src/llm/mod.rs b/src/llm/mod.rs new file mode 100644 index 0000000..0b1879c --- /dev/null +++ b/src/llm/mod.rs @@ -0,0 +1,58 @@ +/// The `llm` module contains the required logic +/// to interact with a generalized selection of +/// supported models. +mod anthropic; +mod claude; +mod cursor; +mod openai; + +use crate::{ + error::LLMError, + llm::{anthropic::Anthropic, claude::Claude, cursor::CursorCLI, openai::OpenAI}, +}; + +/// Defines the expected interface for the initialization +/// of the different supported models. +/// +/// Note: It's required to separate this from the actual model +/// trait, that defines the interface for interaction with the +/// LLMs. This is because we store the actual `Model` in a boxed +/// vector. +/// +/// To allow this, the instances of `Model` have to be `dyn`-compatible, +/// which requires the trait not to be `Sized` as it is described here: +/// https://doc.rust-lang.org/reference/items/traits.html#dyn-compatibility. +pub trait ModelFactory: Model + Sized { + fn init() -> Result; +} + +/// Defines the required functionality +/// to interact with a language model. +pub trait Model { + fn get_name(&self) -> String; + fn prompt(&self, input: &str) -> Result; +} + +/// Returns the available models in the current +/// system context. +pub fn get_available_models() -> Result>, LLMError> { + let mut models: Vec> = vec![]; + + if let Ok(m) = Anthropic::init() { + models.push(Box::new(m)) + } + + if let Ok(m) = Claude::init() { + models.push(Box::new(m)) + } + + if let Ok(m) = CursorCLI::init() { + models.push(Box::new(m)) + } + + if let Ok(m) = OpenAI::init() { + models.push(Box::new(m)) + } + + Ok(models) +} diff --git a/src/llm/openai.rs b/src/llm/openai.rs new file mode 100644 index 0000000..383a0b3 --- /dev/null +++ b/src/llm/openai.rs @@ -0,0 +1,27 @@ +use std::env::var; + +use crate::{ + error::LLMError, + llm::{Model, ModelFactory}, +}; + +pub struct OpenAI { + api_key: String, +} + +impl ModelFactory for OpenAI { + fn init() -> Result { + let api_key = var("OPENAI_API_KEY")?; + Ok(Self { api_key }) + } +} + +impl Model for OpenAI { + fn get_name(&self) -> String { + "OpenAI API".into() + } + + fn prompt(&self, _: &str) -> Result { + unimplemented!("open ai api") + } +} diff --git a/src/main.rs b/src/main.rs new file mode 100644 index 0000000..48f7514 --- /dev/null +++ b/src/main.rs @@ -0,0 +1,12 @@ +use parrot::llm::get_available_models; + +fn main() { + let available_models = get_available_models().expect("failed to get models"); + available_models.iter().for_each(|m| { + let out = m + .prompt("say hello to my friends") + .expect("failed to prompt"); + + println!("{} - {}", m.get_name(), out); + }) +}