From fd27d7b3884fd924a2e6b79f97b35b5c4481639e Mon Sep 17 00:00:00 2001 From: victorpolisetty <52013101+victorpolisetty@users.noreply.github.com> Date: Fri, 31 Jan 2025 17:32:04 -0800 Subject: [PATCH 01/10] initial scaffold commands --- auto_dev/commands/scaffold.py | 24 +++ auto_dev/mech/__init__.py | 1 + auto_dev/mech/create_mech_tool.py | 320 ++++++++++++++++++++++++++++++ 3 files changed, 345 insertions(+) create mode 100644 auto_dev/mech/__init__.py create mode 100644 auto_dev/mech/create_mech_tool.py diff --git a/auto_dev/commands/scaffold.py b/auto_dev/commands/scaffold.py index 42edaa38..02c69548 100644 --- a/auto_dev/commands/scaffold.py +++ b/auto_dev/commands/scaffold.py @@ -728,6 +728,30 @@ def dao(ctx, auto_confirm) -> None: logger.exception(f"Failed to scaffold DAO: {e!s}") msg = "Error during DAO scaffolding and test generation" raise ValueError(msg) from e + +@scaffold.command() +@click.option("--type", type=click.Choice(["mech", "other_types"]), required=True, help="Specify the type of scaffold to create") +@click.argument("api_file", type=str, required=False) +@click.argument("tool_name", type=str, required=False) +@click.argument("author_name", type=str, required=False) +@click.argument("gpt_key", type=str, required=False) +@click.pass_context +def custom(ctx, type, api_file, tool_name, author_name, gpt_key): + """Scaffold a custom tool, such as a Mech tool.""" + + if type == "mech": + if not api_file or not tool_name or not author_name or not gpt_key: + raise click.ClickException("For --type mech, you must provide api_file, tool_name, author_name, and gpt_key.") + + from mech.create_mech_tool import main as create_mech_tool + + # Run the Mech tool creator script + create_mech_tool(api_file=api_file, tool_name=tool_name, author_name=author_name, gpt_key=gpt_key) + click.echo(f"Mech tool '{tool_name}' successfully scaffolded!") + + else: + click.echo(f"Scaffolding for type '{type}' is not implemented yet.") + if __name__ == "__main__": diff --git a/auto_dev/mech/__init__.py b/auto_dev/mech/__init__.py new file mode 100644 index 00000000..2d8a55be --- /dev/null +++ b/auto_dev/mech/__init__.py @@ -0,0 +1 @@ +"""Initialisation.""" diff --git a/auto_dev/mech/create_mech_tool.py b/auto_dev/mech/create_mech_tool.py new file mode 100644 index 00000000..92da21e4 --- /dev/null +++ b/auto_dev/mech/create_mech_tool.py @@ -0,0 +1,320 @@ +import os +import sys +import re +import argparse +from openai import OpenAI + +def create_directory_structure(base_path, author_name): + # Create the directory structure for the tool + tool_path = os.path.join(base_path, 'packages', author_name) + if os.path.exists(tool_path): + print(f"Directory for author '{author_name}' already exists. Skipping creation.") + return tool_path + os.makedirs(tool_path, exist_ok=True) + + return tool_path + +def generate_init_file(tool_path): + """ + Generates an __init__.py file with predefined content in the specified tool path + if it does not already exist. + Args: + tool_path (str): The path where the __init__.py file will be created. + Returns: + None + """ + init_file_path = os.path.join(tool_path, '__init__.py') + if os.path.exists(init_file_path): + print(f"__init__.py already exists at {init_file_path}. Skipping creation.") + return + + init_content = '''#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +# ------------------------------------------------------------------------------ +# +# Copyright 2024 Valory AG +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# ------------------------------------------------------------------------------ +''' + with open(init_file_path, 'w') as f: + f.write(init_content) + print(f"__init__.py created at {init_file_path}") + + +def create_customs_folder(tool_path): + """ + Creates a 'customs' folder within the given tool directory. + Args: + tool_path (str): The path to the tool directory. + Returns: + str: The path to the created 'customs' folder. + """ + customs_path = os.path.join(tool_path, 'customs') + if not os.path.exists(customs_path): + os.makedirs(customs_path, exist_ok=True) + print(f"'customs' folder created at {customs_path}") + else: + print(f"'customs' folder already exists at {customs_path}") + return customs_path + +def create_tool_folder(customs_path, tool_name): + """ + Creates a folder inside the customs folder with the name of the tool. + Args: + customs_path (str): The path to the customs folder. + tool_name (str): The name of the tool. + Returns: + str: The path to the created tool folder. + """ + tool_folder_path = os.path.join(customs_path, tool_name) + if not os.path.exists(tool_folder_path): + os.makedirs(tool_folder_path, exist_ok=True) + print(f"Tool folder '{tool_name}' created at {tool_folder_path}") + else: + print(f"Tool folder '{tool_name}' already exists at {tool_folder_path}") + return tool_folder_path + +def create_component_yaml(tools_folder_path, tool_name, author_name): + yaml_path = os.path.join(tools_folder_path, 'component.yaml') + if os.path.exists(yaml_path): + print(f"component.yaml already exists at {yaml_path}. Skipping creation.") + return + + component_yaml_content = f'''name: {tool_name} +author: {author_name} +version: 0.1.0 +type: custom +description: Custom tool created using the CLI +license: Apache-2.0 +aea_version: '>=1.0.0, <2.0.0' +fingerprint: + __init__.py: bafybeidlhllgpf65xwk357wukpguuaz6hxhkyh7dwplv2xkxlrlk4b7zty + {tool_name}.py: bafybeicytmdkgdehao6obnqoff6fpugr6gpbjw4ztxcsswn5ne76vhboqi +fingerprint_ignore_patterns: [] +entry_point: {tool_name}.py +callable: run +dependencies: {{}} +''' + + with open(os.path.join(tools_folder_path, 'component.yaml'), 'w') as f: + f.write(component_yaml_content) + +def generate_and_write_tool_file(tool_folder_path, tool_name, api_file, gpt_api_key): + """ + Generates and writes the content for .py using GPT. + Args: + tool_path (str): The path where the tool files are stored. + tool_name (str): The name of the tool. + api_file (str): The path to the file containing API logic. + gpt_api_key (str): The API key for OpenAI GPT. + Returns: + None + """ + tool_py_path = os.path.join(tool_folder_path, f"{tool_name}.py") + if os.path.exists(tool_py_path): + user_input = input(f"The file {tool_py_path} already exists. Do you want to override it? (yes/no): ").strip().lower() + if user_input != "yes": + print(f"Skipping file generation for {tool_py_path}") + return False + client = OpenAI(api_key=gpt_api_key) + try: + # Read the content of the API logic file + with open(api_file, 'r') as f: + api_logic_content = f.read() + except Exception as e: + print(f"Error reading the API file: {e}") + sys.exit(1) + + # Define the prompt for GPT to adjust the API logic + prompt = f""" + # -*- coding: utf-8 -*- + # ------------------------------------------------------------------------------ + # + # Copyright 2023-2024 Valory AG + # + # Licensed under the Apache License, Version 2.0 (the "License"); + # you may not use this file except in compliance with the License. + # You may obtain a copy of the License at + # + # http://www.apache.org/licenses/LICENSE-2.0 + # + # Unless required by applicable law or agreed to in writing, software + # distributed under the License is distributed on an "AS IS" BASIS, + # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + # See the License for the specific language governing permissions and + # limitations under the License. + # + # ------------------------------------------------------------------------------ + \"\"\"Contains the job definitions\"\"\" + import requests + from typing import Any, Dict, Optional, Tuple + DEFAULT_PERPLEXITY_SETTINGS = {{ + "max_": 1, + "stop_sequences": None, + "max_output_tokens": 500, + "temperature": 0.7, + }} + PREFIX = "llama-" + ENGINES = {{ + "chat": ["3.1-sonar-small-128k-online", "3.1-sonar-large-128k-online", "3.1-sonar-huge-128k-online"], + }} + ALLOWED_TOOLS = [PREFIX + value for value in ENGINES["chat"]] + url = "https://api.perplexity.ai/chat/completions" + # def count_tokens(text: str) -> int: + # \"\"\"Count the number of tokens in a text using the Gemini model's tokenizer.\"\"\" + # return genai.count_message_tokens(prompt=text) + def run(**kwargs) -> Tuple[Optional[str], Optional[Dict[str, Any]], Any, Any]: + \"\"\"Run the task\"\"\" + api_key = kwargs["api_keys"]["perplexity"] + tool = kwargs["tool"] + prompt = kwargs["prompt"] + if tool not in ALLOWED_TOOLS: + return ( + f"Model {{tool}} is not in the list of supported models.", + None, + None, + None, + ) + max_tokens = kwargs.get("candidate_count") + stop_sequences = kwargs.get( + "stop_sequences", DEFAULT_GEMINI_SETTINGS["stop_sequences"] + ) + max_output_tokens = kwargs.get( + "max_output_tokens", DEFAULT_GEMINI_SETTINGS["max_output_tokens"] + ) + temperature = kwargs.get("temperature", DEFAULT_GEMINI_SETTINGS["temperature"]) + counter_callback = kwargs.get("counter_callback", None) + genai.configure(api_key=api_key) + engine = genai.GenerativeModel(tool) + try: + response = engine.generate_content( + prompt, + generation_config=genai.types.GenerationConfig( + candidate_count=candidate_count, + stop_sequences=stop_sequences, + max_output_tokens=max_output_tokens, + temperature=temperature, + ), + ) + # Ensure response has a .text attribute + response_text = getattr(response, "text", None) + except Exception as e: + return f"An error occurred: {{str(e)}}", None, None, None + return response.text, prompt, None, counter_callback + ....Edit this to work for the code I about to give you for {tool_name} based on the documentation and only give the code. + Output only code, no words. This is being put directly in a Python file. Do not put the coding quotation formatting for python files. + Also give me a commented out main function to run this code at the bottom of the file for testing. + ..... + {api_logic_content} + """ + + + # Call GPT to generate the content + try: + response = client.chat.completions.create( + model="gpt-4o", + messages=[ + {"role": "user", "content": prompt} + ] + ) + + # Extract GPT's response + gpt_response = response.choices[0].message.content.strip() + except Exception as e: + print(f"Error calling GPT: {e}") + sys.exit(1) + + # Write the generated content into the .py file + tool_py_path = os.path.join(tool_folder_path, f"{tool_name}.py") + try: + with open(tool_py_path, 'w') as f: + f.write(gpt_response) + print(f"Generated content written to {tool_py_path}") + return True + except Exception as e: + print(f"Error writing to {tool_py_path}: {e}") + sys.exit(1) + +def append_comments_to_tool_file(tool_file_path, comments): + """ + Appends comments to the bottom of the specified tool file. + Args: + tool_file_path (str): The path to the tool file. + comments (str): The comments to append. + Returns: + None + """ + try: + with open(tool_file_path, 'a') as f: + f.write("\n\n# " + "\n# ".join(comments.splitlines())) + print(f"Comments successfully appended to {tool_file_path}") + except Exception as e: + print(f"Error appending comments to {tool_file_path}: {e}") + +def main(): + #TODO: Input your GPT KEY HERE + parser = argparse.ArgumentParser(description="CLI tool to create a custom Mech tool") + parser.add_argument("gpt_key", help="Your OpenAi API Key") + parser.add_argument("api_file", help="Python file implementing the API logic") + parser.add_argument("tool_name", help="The name for the new tool") + parser.add_argument("author_name", help="The name of the author") + + + comments= """ + 1. The main() function should only be used for testing purposes. Do NOT push this. + 2. Once main() works as expected run 'autonomy packages lock && autonomy push-all' + 3. Add to API_KEY list in .example.env and adhere to the current structure. Only do this if the API_KEY doesn't already exist for your key. + 4. Next, add all new models to FILE_HASH_TO_TOOLS and use the new hash from packages/packages.json for your tool. + Check this PR for reference. https://github.com/valory-xyz/mech/pull/228/files + """ + + args = parser.parse_args() + GPT_KEY = args.gpt_key + base_path = os.path.abspath(os.path.join(os.getcwd(), *[".."] * 4)) + + print("The base path is") + print(base_path) + + # Create the tool's directory structure and necessary files + tool_base_path = create_directory_structure(base_path, args.author_name) + # Create the init file within the author's folder + generate_init_file(tool_base_path) + + + + # Create the customs folder + customs_path = create_customs_folder(tool_base_path) + + # Create the tool folder + tools_folder_path = create_tool_folder(customs_path, args.tool_name) + + # Create the init file within the tool_name folder + generate_init_file(tools_folder_path) + + # Create the component.yaml file + create_component_yaml(tools_folder_path, args.tool_name, args.author_name) + + # Create the `.py` file + file_generated = generate_and_write_tool_file(tools_folder_path, args.tool_name, args.api_file, GPT_KEY) + + # Append instructions to tool_name.py file only if the file was generated + if file_generated: + tool_py_path = os.path.join(tool_base_path, 'customs', args.tool_name, f"{args.tool_name}.py") + append_comments_to_tool_file(tool_py_path, comments) + + print(f"Custom tool '{args.tool_name}' has been created successfully!") + +if __name__ == "__main__": + main() \ No newline at end of file From eda7d12e22e4256dc0139360c58bce664e6be1ae Mon Sep 17 00:00:00 2001 From: victorpolisetty <52013101+victorpolisetty@users.noreply.github.com> Date: Wed, 5 Feb 2025 10:39:01 -0800 Subject: [PATCH 02/10] fix comments --- auto_dev/commands/scaffold.py | 55 ++-- auto_dev/mech/create_mech_tool.py | 320 -------------------- auto_dev/{ => services}/mech/__init__.py | 0 auto_dev/services/mech/constants/prompts.py | 128 ++++++++ auto_dev/services/mech/create_mech_tool.py | 188 ++++++++++++ 5 files changed, 347 insertions(+), 344 deletions(-) delete mode 100644 auto_dev/mech/create_mech_tool.py rename auto_dev/{ => services}/mech/__init__.py (100%) create mode 100644 auto_dev/services/mech/constants/prompts.py create mode 100644 auto_dev/services/mech/create_mech_tool.py diff --git a/auto_dev/commands/scaffold.py b/auto_dev/commands/scaffold.py index 02c69548..4c48858e 100644 --- a/auto_dev/commands/scaffold.py +++ b/auto_dev/commands/scaffold.py @@ -8,6 +8,7 @@ """ +import enum from pathlib import Path import yaml @@ -33,6 +34,9 @@ from auto_dev.contracts.block_explorer import BlockExplorer from auto_dev.contracts.contract_scafolder import ContractScaffolder +class ScaffoldType(enum.Enum): + MECH = "mech" + OTHER_TYPES = "other_types" cli = build_cli() @@ -100,6 +104,33 @@ def _process_from_file(ctx, yaml_dict, network, read_functions, write_functions, ) +@scaffold.command() +@click.option("--type", type=click.Choice([e.value for e in ScaffoldType]), required=True, help="Specify the type of scaffold to create") +@click.argument("gpt_key", type=str) +@click.argument("api_file", type=str) +@click.argument("tool_name", type=str) +@click.argument("author_name", type=str) +@click.pass_context +def custom(ctx, type, api_file, tool_name, author_name, gpt_key): + """Scaffold a custom tool, such as a Mech tool.""" + + if type == "mech": + if not api_file or not tool_name or not author_name or not gpt_key: + raise click.ClickException("For --type mech, you must provide api_file, tool_name, author_name, and gpt_key.") + + from services.mech.create_mech_tool import main as create_mech_tool + + click.echo(f"Creating Mech tool '{tool_name}' by {author_name}...") + + # Run the Mech tool creator script + create_mech_tool(api_file=api_file, tool_name=tool_name, author_name=author_name, gpt_key=gpt_key) + + click.echo(f"Mech tool '{tool_name}' successfully scaffolded!") + + else: + click.echo(f"Scaffolding for type '{type}' is not implemented yet.") + + @scaffold.command() @click.argument("public_id", type=PublicId.from_str, default=None, required=False) @click.option("--address", default=DEFAULT_NULL_ADDRESS, required=False, help="The address of the contract.") @@ -728,30 +759,6 @@ def dao(ctx, auto_confirm) -> None: logger.exception(f"Failed to scaffold DAO: {e!s}") msg = "Error during DAO scaffolding and test generation" raise ValueError(msg) from e - -@scaffold.command() -@click.option("--type", type=click.Choice(["mech", "other_types"]), required=True, help="Specify the type of scaffold to create") -@click.argument("api_file", type=str, required=False) -@click.argument("tool_name", type=str, required=False) -@click.argument("author_name", type=str, required=False) -@click.argument("gpt_key", type=str, required=False) -@click.pass_context -def custom(ctx, type, api_file, tool_name, author_name, gpt_key): - """Scaffold a custom tool, such as a Mech tool.""" - - if type == "mech": - if not api_file or not tool_name or not author_name or not gpt_key: - raise click.ClickException("For --type mech, you must provide api_file, tool_name, author_name, and gpt_key.") - - from mech.create_mech_tool import main as create_mech_tool - - # Run the Mech tool creator script - create_mech_tool(api_file=api_file, tool_name=tool_name, author_name=author_name, gpt_key=gpt_key) - click.echo(f"Mech tool '{tool_name}' successfully scaffolded!") - - else: - click.echo(f"Scaffolding for type '{type}' is not implemented yet.") - if __name__ == "__main__": diff --git a/auto_dev/mech/create_mech_tool.py b/auto_dev/mech/create_mech_tool.py deleted file mode 100644 index 92da21e4..00000000 --- a/auto_dev/mech/create_mech_tool.py +++ /dev/null @@ -1,320 +0,0 @@ -import os -import sys -import re -import argparse -from openai import OpenAI - -def create_directory_structure(base_path, author_name): - # Create the directory structure for the tool - tool_path = os.path.join(base_path, 'packages', author_name) - if os.path.exists(tool_path): - print(f"Directory for author '{author_name}' already exists. Skipping creation.") - return tool_path - os.makedirs(tool_path, exist_ok=True) - - return tool_path - -def generate_init_file(tool_path): - """ - Generates an __init__.py file with predefined content in the specified tool path - if it does not already exist. - Args: - tool_path (str): The path where the __init__.py file will be created. - Returns: - None - """ - init_file_path = os.path.join(tool_path, '__init__.py') - if os.path.exists(init_file_path): - print(f"__init__.py already exists at {init_file_path}. Skipping creation.") - return - - init_content = '''#!/usr/bin/env python3 -# -*- coding: utf-8 -*- -# ------------------------------------------------------------------------------ -# -# Copyright 2024 Valory AG -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# ------------------------------------------------------------------------------ -''' - with open(init_file_path, 'w') as f: - f.write(init_content) - print(f"__init__.py created at {init_file_path}") - - -def create_customs_folder(tool_path): - """ - Creates a 'customs' folder within the given tool directory. - Args: - tool_path (str): The path to the tool directory. - Returns: - str: The path to the created 'customs' folder. - """ - customs_path = os.path.join(tool_path, 'customs') - if not os.path.exists(customs_path): - os.makedirs(customs_path, exist_ok=True) - print(f"'customs' folder created at {customs_path}") - else: - print(f"'customs' folder already exists at {customs_path}") - return customs_path - -def create_tool_folder(customs_path, tool_name): - """ - Creates a folder inside the customs folder with the name of the tool. - Args: - customs_path (str): The path to the customs folder. - tool_name (str): The name of the tool. - Returns: - str: The path to the created tool folder. - """ - tool_folder_path = os.path.join(customs_path, tool_name) - if not os.path.exists(tool_folder_path): - os.makedirs(tool_folder_path, exist_ok=True) - print(f"Tool folder '{tool_name}' created at {tool_folder_path}") - else: - print(f"Tool folder '{tool_name}' already exists at {tool_folder_path}") - return tool_folder_path - -def create_component_yaml(tools_folder_path, tool_name, author_name): - yaml_path = os.path.join(tools_folder_path, 'component.yaml') - if os.path.exists(yaml_path): - print(f"component.yaml already exists at {yaml_path}. Skipping creation.") - return - - component_yaml_content = f'''name: {tool_name} -author: {author_name} -version: 0.1.0 -type: custom -description: Custom tool created using the CLI -license: Apache-2.0 -aea_version: '>=1.0.0, <2.0.0' -fingerprint: - __init__.py: bafybeidlhllgpf65xwk357wukpguuaz6hxhkyh7dwplv2xkxlrlk4b7zty - {tool_name}.py: bafybeicytmdkgdehao6obnqoff6fpugr6gpbjw4ztxcsswn5ne76vhboqi -fingerprint_ignore_patterns: [] -entry_point: {tool_name}.py -callable: run -dependencies: {{}} -''' - - with open(os.path.join(tools_folder_path, 'component.yaml'), 'w') as f: - f.write(component_yaml_content) - -def generate_and_write_tool_file(tool_folder_path, tool_name, api_file, gpt_api_key): - """ - Generates and writes the content for .py using GPT. - Args: - tool_path (str): The path where the tool files are stored. - tool_name (str): The name of the tool. - api_file (str): The path to the file containing API logic. - gpt_api_key (str): The API key for OpenAI GPT. - Returns: - None - """ - tool_py_path = os.path.join(tool_folder_path, f"{tool_name}.py") - if os.path.exists(tool_py_path): - user_input = input(f"The file {tool_py_path} already exists. Do you want to override it? (yes/no): ").strip().lower() - if user_input != "yes": - print(f"Skipping file generation for {tool_py_path}") - return False - client = OpenAI(api_key=gpt_api_key) - try: - # Read the content of the API logic file - with open(api_file, 'r') as f: - api_logic_content = f.read() - except Exception as e: - print(f"Error reading the API file: {e}") - sys.exit(1) - - # Define the prompt for GPT to adjust the API logic - prompt = f""" - # -*- coding: utf-8 -*- - # ------------------------------------------------------------------------------ - # - # Copyright 2023-2024 Valory AG - # - # Licensed under the Apache License, Version 2.0 (the "License"); - # you may not use this file except in compliance with the License. - # You may obtain a copy of the License at - # - # http://www.apache.org/licenses/LICENSE-2.0 - # - # Unless required by applicable law or agreed to in writing, software - # distributed under the License is distributed on an "AS IS" BASIS, - # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - # See the License for the specific language governing permissions and - # limitations under the License. - # - # ------------------------------------------------------------------------------ - \"\"\"Contains the job definitions\"\"\" - import requests - from typing import Any, Dict, Optional, Tuple - DEFAULT_PERPLEXITY_SETTINGS = {{ - "max_": 1, - "stop_sequences": None, - "max_output_tokens": 500, - "temperature": 0.7, - }} - PREFIX = "llama-" - ENGINES = {{ - "chat": ["3.1-sonar-small-128k-online", "3.1-sonar-large-128k-online", "3.1-sonar-huge-128k-online"], - }} - ALLOWED_TOOLS = [PREFIX + value for value in ENGINES["chat"]] - url = "https://api.perplexity.ai/chat/completions" - # def count_tokens(text: str) -> int: - # \"\"\"Count the number of tokens in a text using the Gemini model's tokenizer.\"\"\" - # return genai.count_message_tokens(prompt=text) - def run(**kwargs) -> Tuple[Optional[str], Optional[Dict[str, Any]], Any, Any]: - \"\"\"Run the task\"\"\" - api_key = kwargs["api_keys"]["perplexity"] - tool = kwargs["tool"] - prompt = kwargs["prompt"] - if tool not in ALLOWED_TOOLS: - return ( - f"Model {{tool}} is not in the list of supported models.", - None, - None, - None, - ) - max_tokens = kwargs.get("candidate_count") - stop_sequences = kwargs.get( - "stop_sequences", DEFAULT_GEMINI_SETTINGS["stop_sequences"] - ) - max_output_tokens = kwargs.get( - "max_output_tokens", DEFAULT_GEMINI_SETTINGS["max_output_tokens"] - ) - temperature = kwargs.get("temperature", DEFAULT_GEMINI_SETTINGS["temperature"]) - counter_callback = kwargs.get("counter_callback", None) - genai.configure(api_key=api_key) - engine = genai.GenerativeModel(tool) - try: - response = engine.generate_content( - prompt, - generation_config=genai.types.GenerationConfig( - candidate_count=candidate_count, - stop_sequences=stop_sequences, - max_output_tokens=max_output_tokens, - temperature=temperature, - ), - ) - # Ensure response has a .text attribute - response_text = getattr(response, "text", None) - except Exception as e: - return f"An error occurred: {{str(e)}}", None, None, None - return response.text, prompt, None, counter_callback - ....Edit this to work for the code I about to give you for {tool_name} based on the documentation and only give the code. - Output only code, no words. This is being put directly in a Python file. Do not put the coding quotation formatting for python files. - Also give me a commented out main function to run this code at the bottom of the file for testing. - ..... - {api_logic_content} - """ - - - # Call GPT to generate the content - try: - response = client.chat.completions.create( - model="gpt-4o", - messages=[ - {"role": "user", "content": prompt} - ] - ) - - # Extract GPT's response - gpt_response = response.choices[0].message.content.strip() - except Exception as e: - print(f"Error calling GPT: {e}") - sys.exit(1) - - # Write the generated content into the .py file - tool_py_path = os.path.join(tool_folder_path, f"{tool_name}.py") - try: - with open(tool_py_path, 'w') as f: - f.write(gpt_response) - print(f"Generated content written to {tool_py_path}") - return True - except Exception as e: - print(f"Error writing to {tool_py_path}: {e}") - sys.exit(1) - -def append_comments_to_tool_file(tool_file_path, comments): - """ - Appends comments to the bottom of the specified tool file. - Args: - tool_file_path (str): The path to the tool file. - comments (str): The comments to append. - Returns: - None - """ - try: - with open(tool_file_path, 'a') as f: - f.write("\n\n# " + "\n# ".join(comments.splitlines())) - print(f"Comments successfully appended to {tool_file_path}") - except Exception as e: - print(f"Error appending comments to {tool_file_path}: {e}") - -def main(): - #TODO: Input your GPT KEY HERE - parser = argparse.ArgumentParser(description="CLI tool to create a custom Mech tool") - parser.add_argument("gpt_key", help="Your OpenAi API Key") - parser.add_argument("api_file", help="Python file implementing the API logic") - parser.add_argument("tool_name", help="The name for the new tool") - parser.add_argument("author_name", help="The name of the author") - - - comments= """ - 1. The main() function should only be used for testing purposes. Do NOT push this. - 2. Once main() works as expected run 'autonomy packages lock && autonomy push-all' - 3. Add to API_KEY list in .example.env and adhere to the current structure. Only do this if the API_KEY doesn't already exist for your key. - 4. Next, add all new models to FILE_HASH_TO_TOOLS and use the new hash from packages/packages.json for your tool. - Check this PR for reference. https://github.com/valory-xyz/mech/pull/228/files - """ - - args = parser.parse_args() - GPT_KEY = args.gpt_key - base_path = os.path.abspath(os.path.join(os.getcwd(), *[".."] * 4)) - - print("The base path is") - print(base_path) - - # Create the tool's directory structure and necessary files - tool_base_path = create_directory_structure(base_path, args.author_name) - # Create the init file within the author's folder - generate_init_file(tool_base_path) - - - - # Create the customs folder - customs_path = create_customs_folder(tool_base_path) - - # Create the tool folder - tools_folder_path = create_tool_folder(customs_path, args.tool_name) - - # Create the init file within the tool_name folder - generate_init_file(tools_folder_path) - - # Create the component.yaml file - create_component_yaml(tools_folder_path, args.tool_name, args.author_name) - - # Create the `.py` file - file_generated = generate_and_write_tool_file(tools_folder_path, args.tool_name, args.api_file, GPT_KEY) - - # Append instructions to tool_name.py file only if the file was generated - if file_generated: - tool_py_path = os.path.join(tool_base_path, 'customs', args.tool_name, f"{args.tool_name}.py") - append_comments_to_tool_file(tool_py_path, comments) - - print(f"Custom tool '{args.tool_name}' has been created successfully!") - -if __name__ == "__main__": - main() \ No newline at end of file diff --git a/auto_dev/mech/__init__.py b/auto_dev/services/mech/__init__.py similarity index 100% rename from auto_dev/mech/__init__.py rename to auto_dev/services/mech/__init__.py diff --git a/auto_dev/services/mech/constants/prompts.py b/auto_dev/services/mech/constants/prompts.py new file mode 100644 index 00000000..9b482ab0 --- /dev/null +++ b/auto_dev/services/mech/constants/prompts.py @@ -0,0 +1,128 @@ +GENERATE_MECH_TOOL = f""" +# -*- coding: utf-8 -*- +# ------------------------------------------------------------------------------ +# +# Copyright 2023-2024 Valory AG +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# ------------------------------------------------------------------------------ +\"\"\"Contains the job definitions\"\"\" +import requests +from typing import Any, Dict, Optional, Tuple +DEFAULT_PERPLEXITY_SETTINGS = {{ + "max_": 1, + "stop_sequences": None, + "max_output_tokens": 500, + "temperature": 0.7, +}} +PREFIX = "llama-" +ENGINES = {{ + "chat": ["3.1-sonar-small-128k-online", "3.1-sonar-large-128k-online", "3.1-sonar-huge-128k-online"], +}} +ALLOWED_TOOLS = [PREFIX + value for value in ENGINES["chat"]] +url = "https://api.perplexity.ai/chat/completions" +# def count_tokens(text: str) -> int: +# \"\"\"Count the number of tokens in a text using the Gemini model's tokenizer.\"\"\" +# return genai.count_message_tokens(prompt=text) +def run(**kwargs) -> Tuple[Optional[str], Optional[Dict[str, Any]], Any, Any]: + \"\"\"Run the task\"\"\" + api_key = kwargs["api_keys"]["perplexity"] + tool = kwargs["tool"] + prompt = kwargs["prompt"] + if tool not in ALLOWED_TOOLS: + return ( + f"Model {{tool}} is not in the list of supported models.", + None, + None, + None, + ) + max_tokens = kwargs.get("candidate_count") + stop_sequences = kwargs.get( + "stop_sequences", DEFAULT_GEMINI_SETTINGS["stop_sequences"] + ) + max_output_tokens = kwargs.get( + "max_output_tokens", DEFAULT_GEMINI_SETTINGS["max_output_tokens"] + ) + temperature = kwargs.get("temperature", DEFAULT_GEMINI_SETTINGS["temperature"]) + counter_callback = kwargs.get("counter_callback", None) + genai.configure(api_key=api_key) + engine = genai.GenerativeModel(tool) + try: + response = engine.generate_content( + prompt, + generation_config=genai.types.GenerationConfig( + candidate_count=candidate_count, + stop_sequences=stop_sequences, + max_output_tokens=max_output_tokens, + temperature=temperature, + ), + ) + # Ensure response has a .text attribute + response_text = getattr(response, "text", None) + except Exception as e: + return f"An error occurred: {{str(e)}}", None, None, None + return response.text, prompt, None, counter_callback + ....Edit this to work for the code I about to give you for {tool_name} based on the documentation and only give the code. + Output only code, no words. This is being put directly in a Python file. Do not put the coding quotation formatting for python files. + Also give me a commented out main function to run this code at the bottom of the file for testing. + ..... + {api_logic_content} +""" + + +COMPONENT_YAML_CONTENT = f'''name: {tool_name} +author: {author_name} +version: 0.1.0 +type: custom +description: Custom tool created using the CLI +license: Apache-2.0 +aea_version: '>=1.0.0, <2.0.0' +fingerprint: +__init__.py: bafybeidlhllgpf65xwk357wukpguuaz6hxhkyh7dwplv2xkxlrlk4b7zty +{tool_name}.py: bafybeicytmdkgdehao6obnqoff6fpugr6gpbjw4ztxcsswn5ne76vhboqi +fingerprint_ignore_patterns: [] +entry_point: {tool_name}.py +callable: run +dependencies: {{}} +''' + + +COMMENTS = """ +1. The main() function should only be used for testing purposes. Do NOT push this. +2. Once main() works as expected run 'autonomy packages lock && autonomy push-all' +3. Add to API_KEY list in .example.env and adhere to the current structure. Only do this if the API_KEY doesn't already exist for your key. +4. Next, add all new models to FILE_HASH_TO_TOOLS and use the new hash from packages/packages.json for your tool. +Check this PR for reference. https://github.com/valory-xyz/mech/pull/228/files +""" + +INIT_CONTENT = '''#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +# ------------------------------------------------------------------------------ +# +# Copyright 2024 Valory AG +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# ------------------------------------------------------------------------------ +''' \ No newline at end of file diff --git a/auto_dev/services/mech/create_mech_tool.py b/auto_dev/services/mech/create_mech_tool.py new file mode 100644 index 00000000..fa23e877 --- /dev/null +++ b/auto_dev/services/mech/create_mech_tool.py @@ -0,0 +1,188 @@ +import os +from pathlib import Path +import sys +import re +import argparse +from openai import OpenAI + +from auto_dev import cli +from auto_dev.services.mech.constants.prompts import COMMENTS, COMPONENT_YAML_CONTENT, GENERATE_MECH_TOOL, INIT_CONTENT +from auto_dev.utils import get_logger, write_to_file + +logger = get_logger() + +def create_directory_structure(base_path, author_name): + # Create the directory structure for the tool + tool_path = os.path.join(base_path, 'packages', author_name) + if os.path.exists(tool_path): + print(f"Directory for author '{author_name}' already exists. Skipping creation.") + return tool_path + os.makedirs(tool_path, exist_ok=True) + + return tool_path + +def generate_init_file(tool_path): + """ + Generates an __init__.py file with predefined content in the specified tool path + if it does not already exist. + Args: + tool_path (str): The path where the __init__.py file will be created. + Returns: + None + """ + init_file_path = os.path.join(tool_path, '__init__.py') + if os.path.exists(init_file_path): + logger.info(f"__init__.py already exists at {init_file_path}. Skipping creation.") + return + + write_to_file(init_file_path, INIT_CONTENT) + logger.info(f"__init__.py created at {init_file_path}") + + +def create_customs_folder(tool_path): + """ + Creates a 'customs' folder within the given tool directory. + Args: + tool_path (str): The path to the tool directory. + Returns: + str: The path to the created 'customs' folder. + """ + customs_path = os.path.join(tool_path, 'customs') + if not os.path.exists(customs_path): + os.makedirs(customs_path, exist_ok=True) + logger.info(f"'customs' folder created at {customs_path}") + else: + logger.info(f"'customs' folder already exists at {customs_path}") + return customs_path + +def create_tool_folder(customs_path, tool_name): + """ + Creates a folder inside the customs folder with the name of the tool. + Args: + customs_path (str): The path to the customs folder. + tool_name (str): The name of the tool. + Returns: + str: The path to the created tool folder. + """ + tool_folder_path = os.path.join(customs_path, tool_name) + if not os.path.exists(tool_folder_path): + os.makedirs(tool_folder_path, exist_ok=True) + logger.info(f"Tool folder '{tool_name}' created at {tool_folder_path}") + else: + logger.info(f"Tool folder '{tool_name}' already exists at {tool_folder_path}") + return tool_folder_path + +def create_component_yaml(tools_folder_path, tool_name, author_name): + yaml_path = os.path.join(tools_folder_path, 'component.yaml') + if os.path.exists(yaml_path): + logger.info(f"component.yaml already exists at {yaml_path}. Skipping creation.") + return + + write_to_file(os.path.join(tools_folder_path, 'component.yaml'), COMPONENT_YAML_CONTENT) + + +def generate_and_write_tool_file(tool_folder_path, tool_name, api_file, gpt_api_key): + """ + Generates and writes the content for .py using GPT. + Args: + tool_path (str): The path where the tool files are stored. + tool_name (str): The name of the tool. + api_file (str): The path to the file containing API logic. + gpt_api_key (str): The API key for OpenAI GPT. + Returns: + None + """ + tool_py_path = Path(tool_folder_path) / f"{tool_name}.py" + if os.path.exists(tool_py_path): + user_input = input(f"The file {tool_py_path} already exists. Do you want to override it? (yes/no): ").strip().lower() + if user_input != "yes": + logger.info(f"Skipping file generation for {tool_py_path}") + return False + client = OpenAI(api_key=gpt_api_key) + try: + # Read the content of the API logic file + with open(api_file, 'r') as f: + api_logic_content = f.read() + except Exception as e: + logger.error(f"Error reading the API file: {e}") + sys.exit(1) + # Call GPT to generate the content + + try: + response = client.chat.completions.create( + model="gpt-4o", + messages=[ + {"role": "user", "content": GENERATE_MECH_TOOL} + ] + ) + + # Extract GPT's response + gpt_response = response.choices[0].message.content.strip() + except Exception as e: + logger.error(f"Error calling GPT: {e}") + sys.exit(1) + + # Write the generated content into the .py file + tool_py_path = Path(tool_folder_path) / f"{tool_name}.py" + try: + with open(tool_py_path, 'w') as f: + f.write(gpt_response) + logger.info(f"Generated content written to {tool_py_path}") + return True + except Exception as e: + logger.error(f"Error writing to {tool_py_path}: {e}") + sys.exit(1) + +def append_comments_to_tool_file(tool_file_path, comments): + """ + Appends comments to the bottom of the specified tool file. + Args: + tool_file_path (str): The path to the tool file. + comments (str): The comments to append. + Returns: + None + """ + try: + write_to_file(tool_file_path, "\n\n# " + "\n# ".join(comments.splitlines()), mode="a") + logger.info(f"Comments successfully appended to {tool_file_path}") + except Exception as e: + logger.error(f"Error appending comments to {tool_file_path}: {e}") + +@cli.group() +def main(api_file, tool_name, author_name, gpt_key): + + GPT_KEY = gpt_key + base_path = Path.cwd().parents[3] # Equivalent to going up 4 directories + + logger.info("The base path is" + base_path) + # Create the tool's directory structure and necessary files + tool_base_path = create_directory_structure(base_path, author_name) + # Create the init file within the author's folder + generate_init_file(tool_base_path) + + + + # Create the customs folder + customs_path = create_customs_folder(tool_base_path) + + # Create the tool folder + tools_folder_path = create_tool_folder(customs_path, tool_name) + + # Create the init file within the tool_name folder + generate_init_file(tools_folder_path) + + # Create the component.yaml file + create_component_yaml(tools_folder_path, tool_name, author_name) + + # Create the `.py` file + file_generated = generate_and_write_tool_file(tools_folder_path, tool_name, api_file, GPT_KEY) + + # Append instructions to tool_name.py file only if the file was generated + if file_generated: + tool_py_path = os.path.join(tool_base_path, 'customs', tool_name, f"{tool_name}.py") + append_comments_to_tool_file(tool_py_path, COMMENTS) + + logger.info(f"Custom tool '{tool_name}' has been created successfully!") + +if __name__ == "__main__": + main() \ No newline at end of file From ef4a98cec2eb383c7f1e9b76623bca1441e25abe Mon Sep 17 00:00:00 2001 From: victorpolisetty <52013101+victorpolisetty@users.noreply.github.com> Date: Wed, 5 Feb 2025 11:02:31 -0800 Subject: [PATCH 03/10] add jinja templates --- auto_dev/services/mech/constants/prompts.py | 100 ------------------ auto_dev/services/mech/create_mech_tool.py | 32 +++++- .../services/mech/templates/component.yaml.j2 | 14 +++ .../mech/templates/generate_mech_tool.py.j2 | 79 ++++++++++++++ 4 files changed, 120 insertions(+), 105 deletions(-) create mode 100644 auto_dev/services/mech/templates/component.yaml.j2 create mode 100644 auto_dev/services/mech/templates/generate_mech_tool.py.j2 diff --git a/auto_dev/services/mech/constants/prompts.py b/auto_dev/services/mech/constants/prompts.py index 9b482ab0..03f400aa 100644 --- a/auto_dev/services/mech/constants/prompts.py +++ b/auto_dev/services/mech/constants/prompts.py @@ -1,103 +1,3 @@ -GENERATE_MECH_TOOL = f""" -# -*- coding: utf-8 -*- -# ------------------------------------------------------------------------------ -# -# Copyright 2023-2024 Valory AG -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# ------------------------------------------------------------------------------ -\"\"\"Contains the job definitions\"\"\" -import requests -from typing import Any, Dict, Optional, Tuple -DEFAULT_PERPLEXITY_SETTINGS = {{ - "max_": 1, - "stop_sequences": None, - "max_output_tokens": 500, - "temperature": 0.7, -}} -PREFIX = "llama-" -ENGINES = {{ - "chat": ["3.1-sonar-small-128k-online", "3.1-sonar-large-128k-online", "3.1-sonar-huge-128k-online"], -}} -ALLOWED_TOOLS = [PREFIX + value for value in ENGINES["chat"]] -url = "https://api.perplexity.ai/chat/completions" -# def count_tokens(text: str) -> int: -# \"\"\"Count the number of tokens in a text using the Gemini model's tokenizer.\"\"\" -# return genai.count_message_tokens(prompt=text) -def run(**kwargs) -> Tuple[Optional[str], Optional[Dict[str, Any]], Any, Any]: - \"\"\"Run the task\"\"\" - api_key = kwargs["api_keys"]["perplexity"] - tool = kwargs["tool"] - prompt = kwargs["prompt"] - if tool not in ALLOWED_TOOLS: - return ( - f"Model {{tool}} is not in the list of supported models.", - None, - None, - None, - ) - max_tokens = kwargs.get("candidate_count") - stop_sequences = kwargs.get( - "stop_sequences", DEFAULT_GEMINI_SETTINGS["stop_sequences"] - ) - max_output_tokens = kwargs.get( - "max_output_tokens", DEFAULT_GEMINI_SETTINGS["max_output_tokens"] - ) - temperature = kwargs.get("temperature", DEFAULT_GEMINI_SETTINGS["temperature"]) - counter_callback = kwargs.get("counter_callback", None) - genai.configure(api_key=api_key) - engine = genai.GenerativeModel(tool) - try: - response = engine.generate_content( - prompt, - generation_config=genai.types.GenerationConfig( - candidate_count=candidate_count, - stop_sequences=stop_sequences, - max_output_tokens=max_output_tokens, - temperature=temperature, - ), - ) - # Ensure response has a .text attribute - response_text = getattr(response, "text", None) - except Exception as e: - return f"An error occurred: {{str(e)}}", None, None, None - return response.text, prompt, None, counter_callback - ....Edit this to work for the code I about to give you for {tool_name} based on the documentation and only give the code. - Output only code, no words. This is being put directly in a Python file. Do not put the coding quotation formatting for python files. - Also give me a commented out main function to run this code at the bottom of the file for testing. - ..... - {api_logic_content} -""" - - -COMPONENT_YAML_CONTENT = f'''name: {tool_name} -author: {author_name} -version: 0.1.0 -type: custom -description: Custom tool created using the CLI -license: Apache-2.0 -aea_version: '>=1.0.0, <2.0.0' -fingerprint: -__init__.py: bafybeidlhllgpf65xwk357wukpguuaz6hxhkyh7dwplv2xkxlrlk4b7zty -{tool_name}.py: bafybeicytmdkgdehao6obnqoff6fpugr6gpbjw4ztxcsswn5ne76vhboqi -fingerprint_ignore_patterns: [] -entry_point: {tool_name}.py -callable: run -dependencies: {{}} -''' - - COMMENTS = """ 1. The main() function should only be used for testing purposes. Do NOT push this. 2. Once main() works as expected run 'autonomy packages lock && autonomy push-all' diff --git a/auto_dev/services/mech/create_mech_tool.py b/auto_dev/services/mech/create_mech_tool.py index fa23e877..0c740c0e 100644 --- a/auto_dev/services/mech/create_mech_tool.py +++ b/auto_dev/services/mech/create_mech_tool.py @@ -3,10 +3,11 @@ import sys import re import argparse +from jinja2 import Environment, FileSystemLoader from openai import OpenAI from auto_dev import cli -from auto_dev.services.mech.constants.prompts import COMMENTS, COMPONENT_YAML_CONTENT, GENERATE_MECH_TOOL, INIT_CONTENT +from auto_dev.services.mech.constants.prompts import COMMENTS, GENERATE_MECH_TOOL, INIT_CONTENT from auto_dev.utils import get_logger, write_to_file logger = get_logger() @@ -73,12 +74,25 @@ def create_tool_folder(customs_path, tool_name): return tool_folder_path def create_component_yaml(tools_folder_path, tool_name, author_name): - yaml_path = os.path.join(tools_folder_path, 'component.yaml') - if os.path.exists(yaml_path): + yaml_path = tools_folder_path / "component.yaml" + if yaml_path.exists(): logger.info(f"component.yaml already exists at {yaml_path}. Skipping creation.") return + + script_dir = Path(__file__).resolve().parent + + templates_path = script_dir / "templates" + + # Load the Jinja environment and template + env = Environment(loader=FileSystemLoader(str(templates_path))) + template = env.get_template("component.yaml.j2") - write_to_file(os.path.join(tools_folder_path, 'component.yaml'), COMPONENT_YAML_CONTENT) + # Render the template with collected variables + component_yaml_content = template.render(tool_name=tool_name, author_name=author_name) + + # Write output to component.yaml + write_to_file(yaml_path, component_yaml_content) + logger.info(f"component.yaml created at {yaml_path}") def generate_and_write_tool_file(tool_folder_path, tool_name, api_file, gpt_api_key): @@ -107,12 +121,20 @@ def generate_and_write_tool_file(tool_folder_path, tool_name, api_file, gpt_api_ logger.error(f"Error reading the API file: {e}") sys.exit(1) # Call GPT to generate the content + + # Use Jinja to load and render the template + templates_path = Path(__file__).resolve().parent / "templates" + env = Environment(loader=FileSystemLoader(str(templates_path))) + template = env.get_template("generate_mech_tool.py.j2") + + # Render the template with collected variables + generated_code_prompt = template.render(tool_name=tool_name, api_logic_content=api_logic_content) try: response = client.chat.completions.create( model="gpt-4o", messages=[ - {"role": "user", "content": GENERATE_MECH_TOOL} + {"role": "user", "content": generated_code_prompt} ] ) diff --git a/auto_dev/services/mech/templates/component.yaml.j2 b/auto_dev/services/mech/templates/component.yaml.j2 new file mode 100644 index 00000000..2a497846 --- /dev/null +++ b/auto_dev/services/mech/templates/component.yaml.j2 @@ -0,0 +1,14 @@ +name: {{ tool_name }} +author: {{ author_name }} +version: 0.1.0 +type: custom +description: Custom tool created using the CLI +license: Apache-2.0 +aea_version: '>=1.0.0, <2.0.0' +fingerprint: + __init__.py: bafybeidlhllgpf65xwk357wukpguuaz6hxhkyh7dwplv2xkxlrlk4b7zty + {{ tool_name }}.py: bafybeicytmdkgdehao6obnqoff6fpugr6gpbjw4ztxcsswn5ne76vhboqi +fingerprint_ignore_patterns: [] +entry_point: {{ tool_name }}.py +callable: run +dependencies: {} \ No newline at end of file diff --git a/auto_dev/services/mech/templates/generate_mech_tool.py.j2 b/auto_dev/services/mech/templates/generate_mech_tool.py.j2 new file mode 100644 index 00000000..3fc77449 --- /dev/null +++ b/auto_dev/services/mech/templates/generate_mech_tool.py.j2 @@ -0,0 +1,79 @@ +# -*- coding: utf-8 -*- +# ------------------------------------------------------------------------------ +# +# Copyright 2023-2024 Valory AG +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# ------------------------------------------------------------------------------ +\"\"\"Contains the job definitions\"\"\" +import requests +from typing import Any, Dict, Optional, Tuple +DEFAULT_PERPLEXITY_SETTINGS = {{ + "max_": 1, + "stop_sequences": None, + "max_output_tokens": 500, + "temperature": 0.7, +}} +PREFIX = "llama-" +ENGINES = {{ + "chat": ["3.1-sonar-small-128k-online", "3.1-sonar-large-128k-online", "3.1-sonar-huge-128k-online"], +}} +ALLOWED_TOOLS = [PREFIX + value for value in ENGINES["chat"]] +url = "https://api.perplexity.ai/chat/completions" +# def count_tokens(text: str) -> int: +# \"\"\"Count the number of tokens in a text using the Gemini model's tokenizer.\"\"\" +# return genai.count_message_tokens(prompt=text) +def run(**kwargs) -> Tuple[Optional[str], Optional[Dict[str, Any]], Any, Any]: + \"\"\"Run the task\"\"\" + api_key = kwargs["api_keys"]["perplexity"] + tool = kwargs["tool"] + prompt = kwargs["prompt"] + if tool not in ALLOWED_TOOLS: + return ( + f"Model {{tool}} is not in the list of supported models.", + None, + None, + None, + ) + max_tokens = kwargs.get("candidate_count") + stop_sequences = kwargs.get( + "stop_sequences", DEFAULT_GEMINI_SETTINGS["stop_sequences"] + ) + max_output_tokens = kwargs.get( + "max_output_tokens", DEFAULT_GEMINI_SETTINGS["max_output_tokens"] + ) + temperature = kwargs.get("temperature", DEFAULT_GEMINI_SETTINGS["temperature"]) + counter_callback = kwargs.get("counter_callback", None) + genai.configure(api_key=api_key) + engine = genai.GenerativeModel(tool) + try: + response = engine.generate_content( + prompt, + generation_config=genai.types.GenerationConfig( + candidate_count=candidate_count, + stop_sequences=stop_sequences, + max_output_tokens=max_output_tokens, + temperature=temperature, + ), + ) + # Ensure response has a .text attribute + response_text = getattr(response, "text", None) + except Exception as e: + return f"An error occurred: {{str(e)}}", None, None, None + return response.text, prompt, None, counter_callback + ....Edit this to work for the code I about to give you for {tool_name} based on the documentation and only give the code. + Output only code, no words. This is being put directly in a Python file. Do not put the coding quotation formatting for python files. + Also give me a commented out main function to run this code at the bottom of the file for testing. + ..... + {api_logic_content} \ No newline at end of file From 25a6e7d737517b68b3ae22042381d59e5a523a36 Mon Sep 17 00:00:00 2001 From: victorpolisetty <52013101+victorpolisetty@users.noreply.github.com> Date: Wed, 5 Feb 2025 11:05:27 -0800 Subject: [PATCH 04/10] fix spacing --- auto_dev/services/mech/constants/prompts.py | 2 +- auto_dev/services/mech/create_mech_tool.py | 3 ++- auto_dev/services/mech/templates/component.yaml.j2 | 2 +- auto_dev/services/mech/templates/generate_mech_tool.py.j2 | 3 ++- 4 files changed, 6 insertions(+), 4 deletions(-) diff --git a/auto_dev/services/mech/constants/prompts.py b/auto_dev/services/mech/constants/prompts.py index 03f400aa..44d9e4f4 100644 --- a/auto_dev/services/mech/constants/prompts.py +++ b/auto_dev/services/mech/constants/prompts.py @@ -25,4 +25,4 @@ # limitations under the License. # # ------------------------------------------------------------------------------ -''' \ No newline at end of file +''' diff --git a/auto_dev/services/mech/create_mech_tool.py b/auto_dev/services/mech/create_mech_tool.py index 0c740c0e..3fd5e4c3 100644 --- a/auto_dev/services/mech/create_mech_tool.py +++ b/auto_dev/services/mech/create_mech_tool.py @@ -207,4 +207,5 @@ def main(api_file, tool_name, author_name, gpt_key): logger.info(f"Custom tool '{tool_name}' has been created successfully!") if __name__ == "__main__": - main() \ No newline at end of file + main() + \ No newline at end of file diff --git a/auto_dev/services/mech/templates/component.yaml.j2 b/auto_dev/services/mech/templates/component.yaml.j2 index 2a497846..23018517 100644 --- a/auto_dev/services/mech/templates/component.yaml.j2 +++ b/auto_dev/services/mech/templates/component.yaml.j2 @@ -11,4 +11,4 @@ fingerprint: fingerprint_ignore_patterns: [] entry_point: {{ tool_name }}.py callable: run -dependencies: {} \ No newline at end of file +dependencies: {} diff --git a/auto_dev/services/mech/templates/generate_mech_tool.py.j2 b/auto_dev/services/mech/templates/generate_mech_tool.py.j2 index 3fc77449..7493fda6 100644 --- a/auto_dev/services/mech/templates/generate_mech_tool.py.j2 +++ b/auto_dev/services/mech/templates/generate_mech_tool.py.j2 @@ -76,4 +76,5 @@ def run(**kwargs) -> Tuple[Optional[str], Optional[Dict[str, Any]], Any, Any]: Output only code, no words. This is being put directly in a Python file. Do not put the coding quotation formatting for python files. Also give me a commented out main function to run this code at the bottom of the file for testing. ..... - {api_logic_content} \ No newline at end of file + {api_logic_content} + \ No newline at end of file From 1eb7ad0d2aedbd682c4ab2b91658aa4b1fc99e66 Mon Sep 17 00:00:00 2001 From: victorpolisetty <52013101+victorpolisetty@users.noreply.github.com> Date: Wed, 5 Feb 2025 11:06:58 -0800 Subject: [PATCH 05/10] fix spacing --- auto_dev/services/mech/create_mech_tool.py | 1 - auto_dev/services/mech/templates/generate_mech_tool.py.j2 | 3 +-- 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/auto_dev/services/mech/create_mech_tool.py b/auto_dev/services/mech/create_mech_tool.py index 3fd5e4c3..db8950ad 100644 --- a/auto_dev/services/mech/create_mech_tool.py +++ b/auto_dev/services/mech/create_mech_tool.py @@ -208,4 +208,3 @@ def main(api_file, tool_name, author_name, gpt_key): if __name__ == "__main__": main() - \ No newline at end of file diff --git a/auto_dev/services/mech/templates/generate_mech_tool.py.j2 b/auto_dev/services/mech/templates/generate_mech_tool.py.j2 index 7493fda6..3fc77449 100644 --- a/auto_dev/services/mech/templates/generate_mech_tool.py.j2 +++ b/auto_dev/services/mech/templates/generate_mech_tool.py.j2 @@ -76,5 +76,4 @@ def run(**kwargs) -> Tuple[Optional[str], Optional[Dict[str, Any]], Any, Any]: Output only code, no words. This is being put directly in a Python file. Do not put the coding quotation formatting for python files. Also give me a commented out main function to run this code at the bottom of the file for testing. ..... - {api_logic_content} - \ No newline at end of file + {api_logic_content} \ No newline at end of file From 965edba383824d1a111a16a8a489dab73a4d9908 Mon Sep 17 00:00:00 2001 From: victorpolisetty <52013101+victorpolisetty@users.noreply.github.com> Date: Wed, 5 Feb 2025 11:07:39 -0800 Subject: [PATCH 06/10] fix spacing --- auto_dev/services/mech/templates/generate_mech_tool.py.j2 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/auto_dev/services/mech/templates/generate_mech_tool.py.j2 b/auto_dev/services/mech/templates/generate_mech_tool.py.j2 index 3fc77449..b1ca3657 100644 --- a/auto_dev/services/mech/templates/generate_mech_tool.py.j2 +++ b/auto_dev/services/mech/templates/generate_mech_tool.py.j2 @@ -76,4 +76,4 @@ def run(**kwargs) -> Tuple[Optional[str], Optional[Dict[str, Any]], Any, Any]: Output only code, no words. This is being put directly in a Python file. Do not put the coding quotation formatting for python files. Also give me a commented out main function to run this code at the bottom of the file for testing. ..... - {api_logic_content} \ No newline at end of file + {api_logic_content} From ace4e024a67cf8f215681fba6ca9aedd79d4772b Mon Sep 17 00:00:00 2001 From: victorpolisetty <52013101+victorpolisetty@users.noreply.github.com> Date: Tue, 18 Feb 2025 12:02:51 -0800 Subject: [PATCH 07/10] Fix errors --- auto_dev/commands/scaffold.py | 4 +- auto_dev/services/mech/create_mech_tool.py | 97 +++++++++++-------- .../mech/templates/generate_mech_tool.py.j2 | 92 +++++++++++------- 3 files changed, 117 insertions(+), 76 deletions(-) diff --git a/auto_dev/commands/scaffold.py b/auto_dev/commands/scaffold.py index 4c48858e..ffcd7099 100644 --- a/auto_dev/commands/scaffold.py +++ b/auto_dev/commands/scaffold.py @@ -106,10 +106,10 @@ def _process_from_file(ctx, yaml_dict, network, read_functions, write_functions, @scaffold.command() @click.option("--type", type=click.Choice([e.value for e in ScaffoldType]), required=True, help="Specify the type of scaffold to create") -@click.argument("gpt_key", type=str) @click.argument("api_file", type=str) @click.argument("tool_name", type=str) @click.argument("author_name", type=str) +@click.argument("gpt_key", type=str) @click.pass_context def custom(ctx, type, api_file, tool_name, author_name, gpt_key): """Scaffold a custom tool, such as a Mech tool.""" @@ -118,7 +118,7 @@ def custom(ctx, type, api_file, tool_name, author_name, gpt_key): if not api_file or not tool_name or not author_name or not gpt_key: raise click.ClickException("For --type mech, you must provide api_file, tool_name, author_name, and gpt_key.") - from services.mech.create_mech_tool import main as create_mech_tool + from auto_dev.services.mech.create_mech_tool import main as create_mech_tool click.echo(f"Creating Mech tool '{tool_name}' by {author_name}...") diff --git a/auto_dev/services/mech/create_mech_tool.py b/auto_dev/services/mech/create_mech_tool.py index db8950ad..487e082c 100644 --- a/auto_dev/services/mech/create_mech_tool.py +++ b/auto_dev/services/mech/create_mech_tool.py @@ -7,7 +7,7 @@ from openai import OpenAI from auto_dev import cli -from auto_dev.services.mech.constants.prompts import COMMENTS, GENERATE_MECH_TOOL, INIT_CONTENT +from auto_dev.services.mech.constants.prompts import COMMENTS, INIT_CONTENT from auto_dev.utils import get_logger, write_to_file logger = get_logger() @@ -74,7 +74,7 @@ def create_tool_folder(customs_path, tool_name): return tool_folder_path def create_component_yaml(tools_folder_path, tool_name, author_name): - yaml_path = tools_folder_path / "component.yaml" + yaml_path = Path(tools_folder_path) / "component.yaml" if yaml_path.exists(): logger.info(f"component.yaml already exists at {yaml_path}. Skipping creation.") return @@ -95,59 +95,65 @@ def create_component_yaml(tools_folder_path, tool_name, author_name): logger.info(f"component.yaml created at {yaml_path}") -def generate_and_write_tool_file(tool_folder_path, tool_name, api_file, gpt_api_key): - """ - Generates and writes the content for .py using GPT. - Args: - tool_path (str): The path where the tool files are stored. - tool_name (str): The name of the tool. - api_file (str): The path to the file containing API logic. - gpt_api_key (str): The API key for OpenAI GPT. - Returns: - None - """ +def generate_and_write_tool_file(tool_folder_path, tool_name, api_file, gpt_api_key, force=False): + """Generates and writes the content for .py using GPT.""" tool_py_path = Path(tool_folder_path) / f"{tool_name}.py" - if os.path.exists(tool_py_path): + + # Always overwrite unless force=False and user declines + if os.path.exists(tool_py_path) and not force: user_input = input(f"The file {tool_py_path} already exists. Do you want to override it? (yes/no): ").strip().lower() if user_input != "yes": logger.info(f"Skipping file generation for {tool_py_path}") return False + client = OpenAI(api_key=gpt_api_key) + try: - # Read the content of the API logic file with open(api_file, 'r') as f: api_logic_content = f.read() except Exception as e: - logger.error(f"Error reading the API file: {e}") + logger.error(f"Error reading API file: {e}") sys.exit(1) - # Call GPT to generate the content - # Use Jinja to load and render the template + # Render the template templates_path = Path(__file__).resolve().parent / "templates" env = Environment(loader=FileSystemLoader(str(templates_path))) template = env.get_template("generate_mech_tool.py.j2") - - # Render the template with collected variables + logger.info(f"tool_name: {tool_name}") + logger.info(f"api_logic_content (first 500 chars): {api_logic_content[:500]}") generated_code_prompt = template.render(tool_name=tool_name, api_logic_content=api_logic_content) - + # Write the rendered template to a file for debugging + debug_template_path = Path(tool_folder_path) / f"{tool_name}_debug_template.py" + with open(debug_template_path, "w", encoding="utf-8") as debug_file: + debug_file.write(generated_code_prompt) + + + + logger.info(f"Rendered template written to {debug_template_path} for debugging.") try: response = client.chat.completions.create( model="gpt-4o", - messages=[ - {"role": "user", "content": generated_code_prompt} - ] + messages=[{"role": "user", "content": generated_code_prompt}] ) - - # Extract GPT's response gpt_response = response.choices[0].message.content.strip() + + # Remove unwanted triple backticks from GPT response + if gpt_response.startswith("```python"): + gpt_response = gpt_response[9:] # Remove first 9 characters (` ```python `) + if gpt_response.endswith("```"): + gpt_response = gpt_response[:-3] # Remove last 3 characters (` ``` `) + except Exception as e: logger.error(f"Error calling GPT: {e}") sys.exit(1) - # Write the generated content into the .py file - tool_py_path = Path(tool_folder_path) / f"{tool_name}.py" + # Write to file + if not gpt_response or gpt_response.strip() == "": + logger.error("GPT response is empty! Aborting file write.") + sys.exit(1) try: - with open(tool_py_path, 'w') as f: + logger.info(f"Writing generated code to {tool_py_path}") + with open(tool_py_path, 'w', encoding='utf-8') as f: f.write(gpt_response) logger.info(f"Generated content written to {tool_py_path}") return True @@ -155,28 +161,43 @@ def generate_and_write_tool_file(tool_folder_path, tool_name, api_file, gpt_api_ logger.error(f"Error writing to {tool_py_path}: {e}") sys.exit(1) +def write_to_file(file_path, content, mode="a"): + """Writes content to a file, ensuring safe append mode.""" + with open(file_path, mode, encoding="utf-8") as f: + f.write(content) + + def append_comments_to_tool_file(tool_file_path, comments): """ Appends comments to the bottom of the specified tool file. - Args: - tool_file_path (str): The path to the tool file. - comments (str): The comments to append. - Returns: - None + Ensures existing content is not erased. """ try: - write_to_file(tool_file_path, "\n\n# " + "\n# ".join(comments.splitlines()), mode="a") + # ✅ Read existing file content before appending + with open(tool_file_path, "r", encoding="utf-8") as f: + existing_content = f.read() + + # ✅ Append mode ensures content is not erased + with open(tool_file_path, "a", encoding="utf-8") as f: + f.write("\n\n# " + "\n# ".join(comments.splitlines())) + + # ✅ Read file after appending + with open(tool_file_path, "r", encoding="utf-8") as f: + new_content = f.read() + logger.info(f"Comments successfully appended to {tool_file_path}") + except Exception as e: logger.error(f"Error appending comments to {tool_file_path}: {e}") -@cli.group() + + def main(api_file, tool_name, author_name, gpt_key): GPT_KEY = gpt_key - base_path = Path.cwd().parents[3] # Equivalent to going up 4 directories + base_path = Path.cwd() - logger.info("The base path is" + base_path) + logger.info(f"The base path is {base_path}") # Create the tool's directory structure and necessary files tool_base_path = create_directory_structure(base_path, author_name) # Create the init file within the author's folder diff --git a/auto_dev/services/mech/templates/generate_mech_tool.py.j2 b/auto_dev/services/mech/templates/generate_mech_tool.py.j2 index b1ca3657..428f8e59 100644 --- a/auto_dev/services/mech/templates/generate_mech_tool.py.j2 +++ b/auto_dev/services/mech/templates/generate_mech_tool.py.j2 @@ -16,62 +16,82 @@ # limitations under the License. # # ------------------------------------------------------------------------------ -\"\"\"Contains the job definitions\"\"\" +"""Contains the job definitions""" import requests from typing import Any, Dict, Optional, Tuple -DEFAULT_PERPLEXITY_SETTINGS = {{ - "max_": 1, - "stop_sequences": None, - "max_output_tokens": 500, + +DEFAULT_PERPLEXITY_SETTINGS = { + "model": "llama-3.1-sonar-small-128k-online", + "max_tokens": 500, "temperature": 0.7, -}} + "top_p": 0.9, + "search_recency_filter": "month", + "presence_penalty": 0, + "frequency_penalty": 1, + "stream": False, + "return_images": False, + "return_related_questions": False, + "search_domain_filter": ["perplexity.ai"], +} + PREFIX = "llama-" -ENGINES = {{ +ENGINES = { "chat": ["3.1-sonar-small-128k-online", "3.1-sonar-large-128k-online", "3.1-sonar-huge-128k-online"], -}} +} ALLOWED_TOOLS = [PREFIX + value for value in ENGINES["chat"]] + url = "https://api.perplexity.ai/chat/completions" -# def count_tokens(text: str) -> int: -# \"\"\"Count the number of tokens in a text using the Gemini model's tokenizer.\"\"\" -# return genai.count_message_tokens(prompt=text) + def run(**kwargs) -> Tuple[Optional[str], Optional[Dict[str, Any]], Any, Any]: - \"\"\"Run the task\"\"\" + """Run the task""" api_key = kwargs["api_keys"]["perplexity"] tool = kwargs["tool"] prompt = kwargs["prompt"] + if tool not in ALLOWED_TOOLS: return ( - f"Model {{tool}} is not in the list of supported models.", + f"Model {tool} is not in the list of supported models.", None, None, None, ) - max_tokens = kwargs.get("candidate_count") - stop_sequences = kwargs.get( - "stop_sequences", DEFAULT_GEMINI_SETTINGS["stop_sequences"] - ) - max_output_tokens = kwargs.get( - "max_output_tokens", DEFAULT_GEMINI_SETTINGS["max_output_tokens"] - ) - temperature = kwargs.get("temperature", DEFAULT_GEMINI_SETTINGS["temperature"]) + counter_callback = kwargs.get("counter_callback", None) - genai.configure(api_key=api_key) - engine = genai.GenerativeModel(tool) + + payload = { + "model": tool, + "messages": [ + {"role": "system", "content": kwargs.get("system_prompt", "Be precise and concise.")}, + {"role": "user", "content": prompt} + ], + "max_tokens": kwargs.get("max_tokens", DEFAULT_PERPLEXITY_SETTINGS["max_tokens"]), + "temperature": kwargs.get("temperature", DEFAULT_PERPLEXITY_SETTINGS["temperature"]), + "top_p": kwargs.get("top_p", DEFAULT_PERPLEXITY_SETTINGS["top_p"]), + "search_recency_filter": kwargs.get("search_recency_filter", DEFAULT_PERPLEXITY_SETTINGS["search_recency_filter"]), + "presence_penalty": kwargs.get("presence_penalty", DEFAULT_PERPLEXITY_SETTINGS["presence_penalty"]), + "frequency_penalty": kwargs.get("frequency_penalty", DEFAULT_PERPLEXITY_SETTINGS["frequency_penalty"]), + "stream": kwargs.get("stream", DEFAULT_PERPLEXITY_SETTINGS["stream"]), + "return_images": kwargs.get("return_images", DEFAULT_PERPLEXITY_SETTINGS["return_images"]), + "return_related_questions": kwargs.get("return_related_questions", DEFAULT_PERPLEXITY_SETTINGS["return_related_questions"]), + "search_domain_filter": kwargs.get("search_domain_filter", DEFAULT_PERPLEXITY_SETTINGS["search_domain_filter"]), + } + + headers = { + "Authorization": f"Bearer {api_key}", + "Content-Type": "application/json", + } + try: - response = engine.generate_content( - prompt, - generation_config=genai.types.GenerationConfig( - candidate_count=candidate_count, - stop_sequences=stop_sequences, - max_output_tokens=max_output_tokens, - temperature=temperature, - ), - ) - # Ensure response has a .text attribute - response_text = getattr(response, "text", None) + response = requests.post(url, json=payload, headers=headers) + response.raise_for_status() + data = response.json() + message = data.get("choices", [{}])[0].get("message", {}).get("content", None) + if not message: + return "No content received from the assistant.", None, None, None except Exception as e: - return f"An error occurred: {{str(e)}}", None, None, None - return response.text, prompt, None, counter_callback + return f"An error occurred: {str(e)}", None, None, None + + return message, prompt, None, counter_callback ....Edit this to work for the code I about to give you for {tool_name} based on the documentation and only give the code. Output only code, no words. This is being put directly in a Python file. Do not put the coding quotation formatting for python files. Also give me a commented out main function to run this code at the bottom of the file for testing. From 92f028a10dc0e5446846f4aa41f7deb1e23ddf5e Mon Sep 17 00:00:00 2001 From: victorpolisetty <52013101+victorpolisetty@users.noreply.github.com> Date: Tue, 18 Feb 2025 12:58:08 -0800 Subject: [PATCH 08/10] fix errors --- auto_dev/services/mech/create_mech_tool.py | 4 ---- auto_dev/services/mech/templates/generate_mech_tool.py.j2 | 4 ++-- 2 files changed, 2 insertions(+), 6 deletions(-) diff --git a/auto_dev/services/mech/create_mech_tool.py b/auto_dev/services/mech/create_mech_tool.py index 487e082c..c56ac08a 100644 --- a/auto_dev/services/mech/create_mech_tool.py +++ b/auto_dev/services/mech/create_mech_tool.py @@ -196,15 +196,11 @@ def main(api_file, tool_name, author_name, gpt_key): GPT_KEY = gpt_key base_path = Path.cwd() - - logger.info(f"The base path is {base_path}") # Create the tool's directory structure and necessary files tool_base_path = create_directory_structure(base_path, author_name) # Create the init file within the author's folder generate_init_file(tool_base_path) - - # Create the customs folder customs_path = create_customs_folder(tool_base_path) diff --git a/auto_dev/services/mech/templates/generate_mech_tool.py.j2 b/auto_dev/services/mech/templates/generate_mech_tool.py.j2 index 428f8e59..85d6182c 100644 --- a/auto_dev/services/mech/templates/generate_mech_tool.py.j2 +++ b/auto_dev/services/mech/templates/generate_mech_tool.py.j2 @@ -92,8 +92,8 @@ def run(**kwargs) -> Tuple[Optional[str], Optional[Dict[str, Any]], Any, Any]: return f"An error occurred: {str(e)}", None, None, None return message, prompt, None, counter_callback - ....Edit this to work for the code I about to give you for {tool_name} based on the documentation and only give the code. + ....Edit this to work for the code I about to give you for {{tool_name}} based on the documentation and only give the code. Output only code, no words. This is being put directly in a Python file. Do not put the coding quotation formatting for python files. Also give me a commented out main function to run this code at the bottom of the file for testing. ..... - {api_logic_content} + {{api_logic_content}} From ed718d9c0577602f3eda0c911d8099a7d8c62352 Mon Sep 17 00:00:00 2001 From: victorpolisetty <52013101+victorpolisetty@users.noreply.github.com> Date: Tue, 18 Feb 2025 13:00:19 -0800 Subject: [PATCH 09/10] clean up logging --- auto_dev/services/mech/create_mech_tool.py | 11 +---------- 1 file changed, 1 insertion(+), 10 deletions(-) diff --git a/auto_dev/services/mech/create_mech_tool.py b/auto_dev/services/mech/create_mech_tool.py index c56ac08a..77056556 100644 --- a/auto_dev/services/mech/create_mech_tool.py +++ b/auto_dev/services/mech/create_mech_tool.py @@ -119,17 +119,8 @@ def generate_and_write_tool_file(tool_folder_path, tool_name, api_file, gpt_api_ templates_path = Path(__file__).resolve().parent / "templates" env = Environment(loader=FileSystemLoader(str(templates_path))) template = env.get_template("generate_mech_tool.py.j2") - logger.info(f"tool_name: {tool_name}") - logger.info(f"api_logic_content (first 500 chars): {api_logic_content[:500]}") generated_code_prompt = template.render(tool_name=tool_name, api_logic_content=api_logic_content) - # Write the rendered template to a file for debugging - debug_template_path = Path(tool_folder_path) / f"{tool_name}_debug_template.py" - with open(debug_template_path, "w", encoding="utf-8") as debug_file: - debug_file.write(generated_code_prompt) - - - - logger.info(f"Rendered template written to {debug_template_path} for debugging.") + try: response = client.chat.completions.create( model="gpt-4o", From fe9bf3e04d967b670b16b113640b85e6b3aa8397 Mon Sep 17 00:00:00 2001 From: victorpolisetty <52013101+victorpolisetty@users.noreply.github.com> Date: Thu, 20 Feb 2025 13:16:30 -0800 Subject: [PATCH 10/10] fix linter errors --- auto_dev/commands/scaffold.py | 34 ++-- auto_dev/services/mech/constants/__init__.py | 1 + auto_dev/services/mech/constants/prompts.py | 8 +- auto_dev/services/mech/create_mech_tool.py | 171 ++++++++++++----- poetry.lock | 192 ++++++++++++++++++- pyproject.toml | 1 + 6 files changed, 336 insertions(+), 71 deletions(-) create mode 100644 auto_dev/services/mech/constants/__init__.py diff --git a/auto_dev/commands/scaffold.py b/auto_dev/commands/scaffold.py index 3d3cac49..768fcd9c 100644 --- a/auto_dev/commands/scaffold.py +++ b/auto_dev/commands/scaffold.py @@ -8,15 +8,19 @@ """ -import enum import sys +import enum from pathlib import Path import yaml import rich_click as click from web3 import Web3 from jinja2 import Environment, FileSystemLoader -from aea.configurations.constants import DEFAULT_AEA_CONFIG_FILE, PROTOCOL_LANGUAGE_PYTHON, SUPPORTED_PROTOCOL_LANGUAGES +from aea.configurations.constants import ( + DEFAULT_AEA_CONFIG_FILE, + PROTOCOL_LANGUAGE_PYTHON, + SUPPORTED_PROTOCOL_LANGUAGES, +) from aea.configurations.data_types import PublicId from auto_dev.base import build_cli @@ -26,6 +30,7 @@ from auto_dev.cli_executor import CommandExecutor from auto_dev.handlers.base import HandlerTypes, HandlerScaffolder from auto_dev.dao.scaffolder import DAOScaffolder +from auto_dev.workflow_manager import Task from auto_dev.contracts.contract import DEFAULT_NULL_ADDRESS from auto_dev.handler.scaffolder import HandlerScaffoldBuilder from auto_dev.dialogues.scaffolder import DialogueTypes, DialogueScaffolder @@ -34,12 +39,15 @@ from auto_dev.connections.scaffolder import ConnectionScaffolder from auto_dev.contracts.block_explorer import BlockExplorer from auto_dev.contracts.contract_scafolder import ContractScaffolder -from auto_dev.workflow_manager import Task +from auto_dev.services.mech.create_mech_tool import main as create_mech_tool + class ScaffoldType(enum.Enum): + """Enum representing different types of scaffolds.""" MECH = "mech" OTHER_TYPES = "other_types" + cli = build_cli() @@ -108,28 +116,32 @@ def _process_from_file(ctx, yaml_dict, network, read_functions, write_functions, @scaffold.command() -@click.option("--type", type=click.Choice([e.value for e in ScaffoldType]), required=True, help="Specify the type of scaffold to create") +@click.option( + "--type", + type=click.Choice([e.value for e in ScaffoldType]), + required=True, + help="Specify the type of scaffold to create", +) @click.argument("api_file", type=str) @click.argument("tool_name", type=str) @click.argument("author_name", type=str) @click.argument("gpt_key", type=str) @click.pass_context -def custom(ctx, type, api_file, tool_name, author_name, gpt_key): +def custom(type, api_file, tool_name, author_name, gpt_key): """Scaffold a custom tool, such as a Mech tool.""" - + if type == "mech": if not api_file or not tool_name or not author_name or not gpt_key: - raise click.ClickException("For --type mech, you must provide api_file, tool_name, author_name, and gpt_key.") + msg = "For --type mech, you must provide api_file, tool_name, author_name, and gpt_key." + raise click.ClickException(msg) - from auto_dev.services.mech.create_mech_tool import main as create_mech_tool - click.echo(f"Creating Mech tool '{tool_name}' by {author_name}...") # Run the Mech tool creator script create_mech_tool(api_file=api_file, tool_name=tool_name, author_name=author_name, gpt_key=gpt_key) - + click.echo(f"Mech tool '{tool_name}' successfully scaffolded!") - + else: click.echo(f"Scaffolding for type '{type}' is not implemented yet.") diff --git a/auto_dev/services/mech/constants/__init__.py b/auto_dev/services/mech/constants/__init__.py new file mode 100644 index 00000000..2d8a55be --- /dev/null +++ b/auto_dev/services/mech/constants/__init__.py @@ -0,0 +1 @@ +"""Initialisation.""" diff --git a/auto_dev/services/mech/constants/prompts.py b/auto_dev/services/mech/constants/prompts.py index 44d9e4f4..8cb21736 100644 --- a/auto_dev/services/mech/constants/prompts.py +++ b/auto_dev/services/mech/constants/prompts.py @@ -1,12 +1,14 @@ +"""Module containing prompt-related constants for Mech services.""" + COMMENTS = """ 1. The main() function should only be used for testing purposes. Do NOT push this. 2. Once main() works as expected run 'autonomy packages lock && autonomy push-all' -3. Add to API_KEY list in .example.env and adhere to the current structure. Only do this if the API_KEY doesn't already exist for your key. +3. Add to API_KEY list in .example.env and adhere to the current structure. 4. Next, add all new models to FILE_HASH_TO_TOOLS and use the new hash from packages/packages.json for your tool. Check this PR for reference. https://github.com/valory-xyz/mech/pull/228/files """ -INIT_CONTENT = '''#!/usr/bin/env python3 +INIT_CONTENT = """#!/usr/bin/env python3 # -*- coding: utf-8 -*- # ------------------------------------------------------------------------------ # @@ -25,4 +27,4 @@ # limitations under the License. # # ------------------------------------------------------------------------------ -''' +""" diff --git a/auto_dev/services/mech/create_mech_tool.py b/auto_dev/services/mech/create_mech_tool.py index 77056556..fef4d09e 100644 --- a/auto_dev/services/mech/create_mech_tool.py +++ b/auto_dev/services/mech/create_mech_tool.py @@ -1,38 +1,58 @@ +"""This module provides utilities for generating Mech tools using OpenAI's GPT API. + +It creates directory structures, initializes tool files, and integrates Jinja templates. +""" + import os -from pathlib import Path import sys -import re -import argparse -from jinja2 import Environment, FileSystemLoader +from pathlib import Path + +from jinja2 import Environment, FileSystemLoader, select_autoescape from openai import OpenAI -from auto_dev import cli +from auto_dev.utils import get_logger from auto_dev.services.mech.constants.prompts import COMMENTS, INIT_CONTENT -from auto_dev.utils import get_logger, write_to_file + logger = get_logger() + def create_directory_structure(base_path, author_name): - # Create the directory structure for the tool - tool_path = os.path.join(base_path, 'packages', author_name) - if os.path.exists(tool_path): - print(f"Directory for author '{author_name}' already exists. Skipping creation.") + """Create the directory structure for the tool under the author's namespace. + + Args: + ---- + base_path (Path): The base path where the tool directory should be created. + author_name (str): The name of the author. + + Returns: + ------- + str: The path to the created tool directory. + + """ + tool_path = os.path.join(base_path, "packages", author_name) + if Path.exists(tool_path): return tool_path - os.makedirs(tool_path, exist_ok=True) + Path(tool_path).mkdir(parents=True, exist_ok=True) return tool_path + def generate_init_file(tool_path): - """ - Generates an __init__.py file with predefined content in the specified tool path + """Generates an __init__.py file with predefined content in the specified tool path if it does not already exist. + Args: + ---- tool_path (str): The path where the __init__.py file will be created. + Returns: - None + ------- + None. + """ - init_file_path = os.path.join(tool_path, '__init__.py') - if os.path.exists(init_file_path): + init_file_path = os.path.join(tool_path, "__init__.py") + if Path.exists(init_file_path): logger.info(f"__init__.py already exists at {init_file_path}. Skipping creation.") return @@ -41,50 +61,76 @@ def generate_init_file(tool_path): def create_customs_folder(tool_path): - """ - Creates a 'customs' folder within the given tool directory. + """Creates a 'customs' folder within the given tool directory. + Args: + ---- tool_path (str): The path to the tool directory. + Returns: + ------- str: The path to the created 'customs' folder. + """ - customs_path = os.path.join(tool_path, 'customs') - if not os.path.exists(customs_path): - os.makedirs(customs_path, exist_ok=True) + customs_path = os.path.join(tool_path, "customs") + if not Path.exists(customs_path): + Path(customs_path).mkdir(parents=True, exist_ok=True) logger.info(f"'customs' folder created at {customs_path}") else: logger.info(f"'customs' folder already exists at {customs_path}") return customs_path + def create_tool_folder(customs_path, tool_name): - """ - Creates a folder inside the customs folder with the name of the tool. + """Creates a folder inside the customs folder with the name of the tool. + Args: + ---- customs_path (str): The path to the customs folder. tool_name (str): The name of the tool. + Returns: + ------- str: The path to the created tool folder. + """ tool_folder_path = os.path.join(customs_path, tool_name) - if not os.path.exists(tool_folder_path): - os.makedirs(tool_folder_path, exist_ok=True) + if not Path.exists(tool_folder_path): + Path(tool_folder_path).mkdir(parents=True, exist_ok=True) logger.info(f"Tool folder '{tool_name}' created at {tool_folder_path}") else: logger.info(f"Tool folder '{tool_name}' already exists at {tool_folder_path}") return tool_folder_path + def create_component_yaml(tools_folder_path, tool_name, author_name): + """Create a `component.yaml` file for the specified tool. + + This function generates a `component.yaml` file inside the given tools folder + using a Jinja2 template. If the file already exists, it logs a message and skips creation. + + Args: + ---- + tools_folder_path (str | Path): The path to the folder where `component.yaml` should be created. + tool_name (str): The name of the tool. + author_name (str): The name of the author. + + Returns: + ------- + None + + """ yaml_path = Path(tools_folder_path) / "component.yaml" if yaml_path.exists(): logger.info(f"component.yaml already exists at {yaml_path}. Skipping creation.") return - + script_dir = Path(__file__).resolve().parent templates_path = script_dir / "templates" # Load the Jinja environment and template - env = Environment(loader=FileSystemLoader(str(templates_path))) + env = Environment(loader=FileSystemLoader(str(templates_path)), autoescape=True) template = env.get_template("component.yaml.j2") # Render the template with collected variables @@ -100,8 +146,10 @@ def generate_and_write_tool_file(tool_folder_path, tool_name, api_file, gpt_api_ tool_py_path = Path(tool_folder_path) / f"{tool_name}.py" # Always overwrite unless force=False and user declines - if os.path.exists(tool_py_path) and not force: - user_input = input(f"The file {tool_py_path} already exists. Do you want to override it? (yes/no): ").strip().lower() + if Path.exists(tool_py_path) and not force: + user_input = input( + f"The file {tool_py_path} already exists. Do you want to override it? (yes/no): " + ).strip().lower() if user_input != "yes": logger.info(f"Skipping file generation for {tool_py_path}") return False @@ -109,18 +157,21 @@ def generate_and_write_tool_file(tool_folder_path, tool_name, api_file, gpt_api_ client = OpenAI(api_key=gpt_api_key) try: - with open(api_file, 'r') as f: + with open(api_file, encoding="utf-8") as f: api_logic_content = f.read() except Exception as e: - logger.error(f"Error reading API file: {e}") + logger.exception(f"Error reading API file: {e}") sys.exit(1) # Render the template templates_path = Path(__file__).resolve().parent / "templates" - env = Environment(loader=FileSystemLoader(str(templates_path))) + env = Environment( + loader=FileSystemLoader(str(templates_path)), + autoescape=select_autoescape(["html", "xml", "j2"]) # Autoescapes HTML, XML, and Jinja2 templates + ) template = env.get_template("generate_mech_tool.py.j2") generated_code_prompt = template.render(tool_name=tool_name, api_logic_content=api_logic_content) - + try: response = client.chat.completions.create( model="gpt-4o", @@ -132,10 +183,10 @@ def generate_and_write_tool_file(tool_folder_path, tool_name, api_file, gpt_api_ if gpt_response.startswith("```python"): gpt_response = gpt_response[9:] # Remove first 9 characters (` ```python `) if gpt_response.endswith("```"): - gpt_response = gpt_response[:-3] # Remove last 3 characters (` ``` `) - + gpt_response = gpt_response[:-3] # Remove last 3 characters (` ``` `) + except Exception as e: - logger.error(f"Error calling GPT: {e}") + logger.exception(f"Error calling GPT: {e}") sys.exit(1) # Write to file @@ -144,14 +195,15 @@ def generate_and_write_tool_file(tool_folder_path, tool_name, api_file, gpt_api_ sys.exit(1) try: logger.info(f"Writing generated code to {tool_py_path}") - with open(tool_py_path, 'w', encoding='utf-8') as f: + with open(tool_py_path, "w", encoding="utf-8") as f: f.write(gpt_response) logger.info(f"Generated content written to {tool_py_path}") return True except Exception as e: - logger.error(f"Error writing to {tool_py_path}: {e}") + logger.exception(f"Error writing to {tool_py_path}: {e}") sys.exit(1) + def write_to_file(file_path, content, mode="a"): """Writes content to a file, ensuring safe append mode.""" with open(file_path, mode, encoding="utf-8") as f: @@ -159,33 +211,53 @@ def write_to_file(file_path, content, mode="a"): def append_comments_to_tool_file(tool_file_path, comments): - """ - Appends comments to the bottom of the specified tool file. + """Appends comments to the bottom of the specified tool file. Ensures existing content is not erased. """ try: # ✅ Read existing file content before appending - with open(tool_file_path, "r", encoding="utf-8") as f: - existing_content = f.read() + with open(tool_file_path, encoding="utf-8") as f: + f.read() # ✅ Append mode ensures content is not erased with open(tool_file_path, "a", encoding="utf-8") as f: f.write("\n\n# " + "\n# ".join(comments.splitlines())) # ✅ Read file after appending - with open(tool_file_path, "r", encoding="utf-8") as f: - new_content = f.read() + with open(tool_file_path, encoding="utf-8") as f: + f.read() logger.info(f"Comments successfully appended to {tool_file_path}") except Exception as e: - logger.error(f"Error appending comments to {tool_file_path}: {e}") - + logger.exception(f"Error appending comments to {tool_file_path}: {e}") def main(api_file, tool_name, author_name, gpt_key): + """Main function to scaffold a custom Mech tool. + + This function creates the necessary directory structure and files for a Mech tool, + including initialization files, a `component.yaml` configuration file, and the + main tool Python file. It also generates the tool's logic using OpenAI's GPT API. + + Args: + ---- + api_file (str | Path): Path to the API definition file used for tool generation. + tool_name (str): Name of the tool to be created. + author_name (str): Name of the author or organization creating the tool. + gpt_key (str): API key for OpenAI GPT, used to generate the tool's code. - GPT_KEY = gpt_key + Returns: + ------- + None + + Side Effects: + - Creates necessary directories and initialization files. + - Generates a `component.yaml` file using Jinja templates. + - Uses GPT to generate and save the tool's Python script. + - Appends documentation comments to the generated file. + + """ base_path = Path.cwd() # Create the tool's directory structure and necessary files tool_base_path = create_directory_structure(base_path, author_name) @@ -205,14 +277,15 @@ def main(api_file, tool_name, author_name, gpt_key): create_component_yaml(tools_folder_path, tool_name, author_name) # Create the `.py` file - file_generated = generate_and_write_tool_file(tools_folder_path, tool_name, api_file, GPT_KEY) + file_generated = generate_and_write_tool_file(tools_folder_path, tool_name, api_file, gpt_key) # Append instructions to tool_name.py file only if the file was generated if file_generated: - tool_py_path = os.path.join(tool_base_path, 'customs', tool_name, f"{tool_name}.py") + tool_py_path = os.path.join(tool_base_path, "customs", tool_name, f"{tool_name}.py") append_comments_to_tool_file(tool_py_path, COMMENTS) logger.info(f"Custom tool '{tool_name}' has been created successfully!") + if __name__ == "__main__": main() diff --git a/poetry.lock b/poetry.lock index bb8a538c..561da221 100644 --- a/poetry.lock +++ b/poetry.lock @@ -133,6 +133,7 @@ description = "The Python Anchor client." optional = false python-versions = ">=3.9,<4.0" groups = ["main"] +markers = "extra == \"all\"" files = [ {file = "anchorpy-0.18.0-py3-none-any.whl", hash = "sha256:664672c2de94ed8910173fd3235d918513a927de80612db625f395bc8a0160d1"}, {file = "anchorpy-0.18.0.tar.gz", hash = "sha256:606c49f2dba41046ba60947d19c19eb17587b490b5d1a88642722b42159aa998"}, @@ -167,6 +168,7 @@ description = "Python bindings for Anchor Rust code" optional = false python-versions = ">=3.9" groups = ["main"] +markers = "extra == \"all\"" files = [ {file = "anchorpy_core-0.1.3-cp37-abi3-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:c9af073115feaab9a7fd14bc9f0d19a87650042bd430e44e9c7714b18f5aeb3a"}, {file = "anchorpy_core-0.1.3-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4511a3a9a0425a84305e56087b81969d2929ac642b2c1d6fb2a500c8f987d8d3"}, @@ -191,7 +193,6 @@ description = "Reusable constraint types to use with typing.Annotated" optional = false python-versions = ">=3.8" groups = ["main"] -markers = "extra == \"all\"" files = [ {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, @@ -301,6 +302,7 @@ description = "Function decoration for backoff and retry" optional = false python-versions = ">=3.7,<4.0" groups = ["main"] +markers = "extra == \"all\"" files = [ {file = "backoff-2.2.1-py3-none-any.whl", hash = "sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8"}, {file = "backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba"}, @@ -344,6 +346,7 @@ description = "A fast Python library for Base58 and Base58Check" optional = false python-versions = ">=3.7" groups = ["main"] +markers = "extra == \"all\"" files = [ {file = "based58-0.1.1-cp37-abi3-macosx_10_7_x86_64.whl", hash = "sha256:745851792ce5fada615f05ec61d7f360d19c76950d1e86163b2293c63a5d43bc"}, {file = "based58-0.1.1-cp37-abi3-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:f8448a71678bd1edc0a464033695686461ab9d6d0bc3282cb29b94f883583572"}, @@ -370,6 +373,7 @@ description = "Modern password hashing for your software and your servers" optional = false python-versions = ">=3.7" groups = ["main"] +markers = "extra == \"all\"" files = [ {file = "bcrypt-4.2.1-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:1340411a0894b7d3ef562fb233e4b6ed58add185228650942bdc885362f32c17"}, {file = "bcrypt-4.2.1-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1ee315739bc8387aa36ff127afc99120ee452924e0df517a8f3e4c0187a0f5f"}, @@ -432,6 +436,7 @@ description = "Reference implementation for Bech32 and segwit addresses." optional = false python-versions = ">=3.5" groups = ["main"] +markers = "extra == \"all\"" files = [ {file = "bech32-1.2.0-py3-none-any.whl", hash = "sha256:990dc8e5a5e4feabbdf55207b5315fdd9b73db40be294a19b3752cde9e79d981"}, {file = "bech32-1.2.0.tar.gz", hash = "sha256:7d6db8214603bd7871fcfa6c0826ef68b85b0abd90fa21c285a9c5e21d2bd899"}, @@ -640,6 +645,7 @@ description = "Python implementation of Borsh serialization, built on the Constr optional = false python-versions = ">=3.8.3,<4.0.0" groups = ["main"] +markers = "extra == \"all\"" files = [ {file = "borsh-construct-0.1.0.tar.gz", hash = "sha256:c916758ceba70085d8f456a1cc26991b88cb64233d347767766473b651b37263"}, {file = "borsh_construct-0.1.0-py3-none-any.whl", hash = "sha256:f584c791e2a03f8fc36e6c13011a27bcaf028c9c54ba89cd70f485a7d1c687ed"}, @@ -680,6 +686,7 @@ description = "Extensible memoizing collections and decorators" optional = false python-versions = "~=3.5" groups = ["main"] +markers = "extra == \"all\"" files = [ {file = "cachetools-4.2.4-py3-none-any.whl", hash = "sha256:92971d3cb7d2a97efff7c7bb1657f21a8f5fb309a37530537c71b1774189f2d1"}, {file = "cachetools-4.2.4.tar.gz", hash = "sha256:89ea6f1b638d5a73a4f9226be57ac5e4f399d22770b92355f92dcb0f7f001693"}, @@ -773,7 +780,7 @@ files = [ {file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"}, {file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"}, ] -markers = {dev = "sys_platform == \"linux\" and platform_python_implementation != \"PyPy\""} +markers = {main = "extra == \"all\"", dev = "sys_platform == \"linux\" and platform_python_implementation != \"PyPy\""} [package.dependencies] pycparser = "*" @@ -941,6 +948,7 @@ description = "A powerful declarative symmetric parser/builder for binary data" optional = false python-versions = ">=3.6" groups = ["main"] +markers = "extra == \"all\"" files = [ {file = "construct-2.10.68.tar.gz", hash = "sha256:7b2a3fd8e5f597a5aa1d614c3bd516fa065db01704c72a1efaaeec6ef23d8b45"}, ] @@ -955,6 +963,7 @@ description = "Extension for the python package 'construct' that adds typing fea optional = false python-versions = ">=3.7" groups = ["main"] +markers = "extra == \"all\"" files = [ {file = "construct-typing-0.5.6.tar.gz", hash = "sha256:0dc501351cd6b308f15ec54e5fe7c0fbc07cc1530a1b77b4303062a0a93c1297"}, {file = "construct_typing-0.5.6-py3-none-any.whl", hash = "sha256:39c948329e880564e33521cba497b21b07967c465b9c9037d6334e2cffa1ced9"}, @@ -970,6 +979,7 @@ description = "A library for interacting with the cosmos networks" optional = false python-versions = ">=3.8,<4.0" groups = ["main"] +markers = "extra == \"all\"" files = [ {file = "cosmpy-0.9.2-py3-none-any.whl", hash = "sha256:3591311198b08a0aa75340851ca166669974f17ffaa207a8d2cb26504fb0fa19"}, {file = "cosmpy-0.9.2.tar.gz", hash = "sha256:0f0eb80152f28ef5ee4d846d581d2e34ba2d952900f0e3570cacb84bb376f664"}, @@ -1078,6 +1088,7 @@ files = [ {file = "cryptography-44.0.0-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:761817a3377ef15ac23cd7834715081791d4ec77f9297ee694ca1ee9c2c7e5eb"}, {file = "cryptography-44.0.0-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3c672a53c0fb4725a29c303be906d3c1fa99c32f58abe008a82705f9ee96f40b"}, {file = "cryptography-44.0.0-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:4ac4c9f37eba52cb6fbeaf5b59c152ea976726b865bd4cf87883a7e7006cc543"}, + {file = "cryptography-44.0.0-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:60eb32934076fa07e4316b7b2742fa52cbb190b42c2df2863dbc4230a0a9b385"}, {file = "cryptography-44.0.0-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ed3534eb1090483c96178fcb0f8893719d96d5274dfde98aa6add34614e97c8e"}, {file = "cryptography-44.0.0-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:f3f6fdfa89ee2d9d496e2c087cebef9d4fcbb0ad63c40e821b39f74bf48d9c5e"}, {file = "cryptography-44.0.0-cp37-abi3-win32.whl", hash = "sha256:eb33480f1bad5b78233b0ad3e1b0be21e8ef1da745d8d2aecbb20671658b9053"}, @@ -1088,6 +1099,7 @@ files = [ {file = "cryptography-44.0.0-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:c5eb858beed7835e5ad1faba59e865109f3e52b3783b9ac21e7e47dc5554e289"}, {file = "cryptography-44.0.0-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f53c2c87e0fb4b0c00fa9571082a057e37690a8f12233306161c8f4b819960b7"}, {file = "cryptography-44.0.0-cp39-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:9e6fc8a08e116fb7c7dd1f040074c9d7b51d74a8ea40d4df2fc7aa08b76b9e6c"}, + {file = "cryptography-44.0.0-cp39-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:9abcc2e083cbe8dde89124a47e5e53ec38751f0d7dfd36801008f316a127d7ba"}, {file = "cryptography-44.0.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:d2436114e46b36d00f8b72ff57e598978b37399d2786fd39793c36c6d5cb1c64"}, {file = "cryptography-44.0.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a01956ddfa0a6790d594f5b34fc1bfa6098aca434696a03cfdbe469b8ed79285"}, {file = "cryptography-44.0.0-cp39-abi3-win32.whl", hash = "sha256:eca27345e1214d1b9f9490d200f9db5a874479be914199194e746c893788d417"}, @@ -1100,7 +1112,7 @@ files = [ {file = "cryptography-44.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:62901fb618f74d7d81bf408c8719e9ec14d863086efe4185afd07c352aee1d2c"}, {file = "cryptography-44.0.0.tar.gz", hash = "sha256:cd4e834f340b4293430701e772ec543b0fbe6c2dea510a5286fe0acabe153a02"}, ] -markers = {dev = "sys_platform == \"linux\""} +markers = {main = "extra == \"all\"", dev = "sys_platform == \"linux\""} [package.dependencies] cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} @@ -1306,6 +1318,7 @@ description = "Python library to use the pseudo-tty of a docker container" optional = false python-versions = "*" groups = ["main"] +markers = "extra == \"all\"" files = [ {file = "dockerpty-0.4.1.tar.gz", hash = "sha256:69a9d69d573a0daa31bcd1c0774eeed5c15c295fe719c61aca550ed1393156ce"}, ] @@ -1320,6 +1333,7 @@ description = "Pythonic argument parser, that will make you smile" optional = false python-versions = "*" groups = ["main"] +markers = "extra == \"all\"" files = [ {file = "docopt-0.6.2.tar.gz", hash = "sha256:49b3a825280bd66b3aa83585ef59c4a8c82f2c8a522dbe754a8bc8d08c85c491"}, ] @@ -1622,6 +1636,7 @@ description = "A simple framework for building complex web applications." optional = false python-versions = ">=3.7" groups = ["main"] +markers = "extra == \"all\"" files = [ {file = "Flask-2.1.3-py3-none-any.whl", hash = "sha256:9013281a7402ad527f8fd56375164f3aa021ecfaff89bfe3825346c24f87e04c"}, {file = "Flask-2.1.3.tar.gz", hash = "sha256:15972e5017df0575c3d6c090ba168b6db90259e620ac8d7ea813a396bad5b6cb"}, @@ -1764,6 +1779,7 @@ description = "Common protobufs used in Google APIs" optional = false python-versions = ">=3.7" groups = ["main"] +markers = "extra == \"all\"" files = [ {file = "googleapis_common_protos-1.66.0-py2.py3-none-any.whl", hash = "sha256:d7abcd75fabb2e0ec9f74466401f6c119a0b498e27370e9be4c94cb7e382b8ed"}, {file = "googleapis_common_protos-1.66.0.tar.gz", hash = "sha256:c3e7b33d15fdca5374cc0a7346dd92ffa847425cc4ea941d970f13680052ec8c"}, @@ -1782,6 +1798,7 @@ description = "GraphQL client for Python" optional = false python-versions = "*" groups = ["main"] +markers = "extra == \"all\"" files = [ {file = "gql-3.5.0-py2.py3-none-any.whl", hash = "sha256:70dda5694a5b194a8441f077aa5fb70cc94e4ec08016117523f013680901ecb7"}, {file = "gql-3.5.0.tar.gz", hash = "sha256:ccb9c5db543682b28f577069950488218ed65d4ac70bb03b6929aaadaf636de9"}, @@ -1811,6 +1828,7 @@ description = "GraphQL implementation for Python, a port of GraphQL.js, the Java optional = false python-versions = "<4,>=3.6" groups = ["main"] +markers = "extra == \"all\"" files = [ {file = "graphql_core-3.2.6-py3-none-any.whl", hash = "sha256:78b016718c161a6fb20a7d97bbf107f331cd1afe53e45566c59f776ed7f0b45f"}, {file = "graphql_core-3.2.6.tar.gz", hash = "sha256:c08eec22f9e40f0bd61d805907e3b3b1b9a320bc606e23dc145eebca07c8fbab"}, @@ -2099,6 +2117,7 @@ description = "Safely pass data to untrusted environments and back." optional = false python-versions = ">=3.8" groups = ["main"] +markers = "extra == \"all\"" files = [ {file = "itsdangerous-2.2.0-py3-none-any.whl", hash = "sha256:c6242fc49e35958c8b15141343aa660db5fc54d4f13a1db01a3f5891b98700ef"}, {file = "itsdangerous-2.2.0.tar.gz", hash = "sha256:e0050c0b7da1eea53ffaf149c0cfbb5c6e2e2b69c4bef22c81fa6eb73e5f6173"}, @@ -2193,6 +2212,7 @@ files = [ {file = "jinja2-3.1.5-py3-none-any.whl", hash = "sha256:aba0f4dc9ed8013c424088f68a5c226f7d6097ed89b246d7749c2ec4175c6adb"}, {file = "jinja2-3.1.5.tar.gz", hash = "sha256:8fefff8dc3034e27bb80d67c671eb8a9bc424c0ef4c0826edbff304cceff43bb"}, ] +markers = {main = "extra == \"all\""} [package.dependencies] MarkupSafe = ">=2.0" @@ -2200,6 +2220,92 @@ MarkupSafe = ">=2.0" [package.extras] i18n = ["Babel (>=2.7)"] +[[package]] +name = "jiter" +version = "0.8.2" +description = "Fast iterable JSON parser." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "jiter-0.8.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:ca8577f6a413abe29b079bc30f907894d7eb07a865c4df69475e868d73e71c7b"}, + {file = "jiter-0.8.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b25bd626bde7fb51534190c7e3cb97cee89ee76b76d7585580e22f34f5e3f393"}, + {file = "jiter-0.8.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5c826a221851a8dc028eb6d7d6429ba03184fa3c7e83ae01cd6d3bd1d4bd17d"}, + {file = "jiter-0.8.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d35c864c2dff13dfd79fb070fc4fc6235d7b9b359efe340e1261deb21b9fcb66"}, + {file = "jiter-0.8.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f557c55bc2b7676e74d39d19bcb8775ca295c7a028246175d6a8b431e70835e5"}, + {file = "jiter-0.8.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:580ccf358539153db147e40751a0b41688a5ceb275e6f3e93d91c9467f42b2e3"}, + {file = "jiter-0.8.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af102d3372e917cffce49b521e4c32c497515119dc7bd8a75665e90a718bbf08"}, + {file = "jiter-0.8.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cadcc978f82397d515bb2683fc0d50103acff2a180552654bb92d6045dec2c49"}, + {file = "jiter-0.8.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ba5bdf56969cad2019d4e8ffd3f879b5fdc792624129741d3d83fc832fef8c7d"}, + {file = "jiter-0.8.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3b94a33a241bee9e34b8481cdcaa3d5c2116f575e0226e421bed3f7a6ea71cff"}, + {file = "jiter-0.8.2-cp310-cp310-win32.whl", hash = "sha256:6e5337bf454abddd91bd048ce0dca5134056fc99ca0205258766db35d0a2ea43"}, + {file = "jiter-0.8.2-cp310-cp310-win_amd64.whl", hash = "sha256:4a9220497ca0cb1fe94e3f334f65b9b5102a0b8147646118f020d8ce1de70105"}, + {file = "jiter-0.8.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:2dd61c5afc88a4fda7d8b2cf03ae5947c6ac7516d32b7a15bf4b49569a5c076b"}, + {file = "jiter-0.8.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a6c710d657c8d1d2adbbb5c0b0c6bfcec28fd35bd6b5f016395f9ac43e878a15"}, + {file = "jiter-0.8.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a9584de0cd306072635fe4b89742bf26feae858a0683b399ad0c2509011b9dc0"}, + {file = "jiter-0.8.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5a90a923338531b7970abb063cfc087eebae6ef8ec8139762007188f6bc69a9f"}, + {file = "jiter-0.8.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d21974d246ed0181558087cd9f76e84e8321091ebfb3a93d4c341479a736f099"}, + {file = "jiter-0.8.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:32475a42b2ea7b344069dc1e81445cfc00b9d0e3ca837f0523072432332e9f74"}, + {file = "jiter-0.8.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b9931fd36ee513c26b5bf08c940b0ac875de175341cbdd4fa3be109f0492586"}, + {file = "jiter-0.8.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ce0820f4a3a59ddced7fce696d86a096d5cc48d32a4183483a17671a61edfddc"}, + {file = "jiter-0.8.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:8ffc86ae5e3e6a93765d49d1ab47b6075a9c978a2b3b80f0f32628f39caa0c88"}, + {file = "jiter-0.8.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5127dc1abd809431172bc3fbe8168d6b90556a30bb10acd5ded41c3cfd6f43b6"}, + {file = "jiter-0.8.2-cp311-cp311-win32.whl", hash = "sha256:66227a2c7b575720c1871c8800d3a0122bb8ee94edb43a5685aa9aceb2782d44"}, + {file = "jiter-0.8.2-cp311-cp311-win_amd64.whl", hash = "sha256:cde031d8413842a1e7501e9129b8e676e62a657f8ec8166e18a70d94d4682855"}, + {file = "jiter-0.8.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:e6ec2be506e7d6f9527dae9ff4b7f54e68ea44a0ef6b098256ddf895218a2f8f"}, + {file = "jiter-0.8.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:76e324da7b5da060287c54f2fabd3db5f76468006c811831f051942bf68c9d44"}, + {file = "jiter-0.8.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:180a8aea058f7535d1c84183c0362c710f4750bef66630c05f40c93c2b152a0f"}, + {file = "jiter-0.8.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:025337859077b41548bdcbabe38698bcd93cfe10b06ff66617a48ff92c9aec60"}, + {file = "jiter-0.8.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ecff0dc14f409599bbcafa7e470c00b80f17abc14d1405d38ab02e4b42e55b57"}, + {file = "jiter-0.8.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ffd9fee7d0775ebaba131f7ca2e2d83839a62ad65e8e02fe2bd8fc975cedeb9e"}, + {file = "jiter-0.8.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14601dcac4889e0a1c75ccf6a0e4baf70dbc75041e51bcf8d0e9274519df6887"}, + {file = "jiter-0.8.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:92249669925bc1c54fcd2ec73f70f2c1d6a817928480ee1c65af5f6b81cdf12d"}, + {file = "jiter-0.8.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e725edd0929fa79f8349ab4ec7f81c714df51dc4e991539a578e5018fa4a7152"}, + {file = "jiter-0.8.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:bf55846c7b7a680eebaf9c3c48d630e1bf51bdf76c68a5f654b8524335b0ad29"}, + {file = "jiter-0.8.2-cp312-cp312-win32.whl", hash = "sha256:7efe4853ecd3d6110301665a5178b9856be7e2a9485f49d91aa4d737ad2ae49e"}, + {file = "jiter-0.8.2-cp312-cp312-win_amd64.whl", hash = "sha256:83c0efd80b29695058d0fd2fa8a556490dbce9804eac3e281f373bbc99045f6c"}, + {file = "jiter-0.8.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:ca1f08b8e43dc3bd0594c992fb1fd2f7ce87f7bf0d44358198d6da8034afdf84"}, + {file = "jiter-0.8.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5672a86d55416ccd214c778efccf3266b84f87b89063b582167d803246354be4"}, + {file = "jiter-0.8.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58dc9bc9767a1101f4e5e22db1b652161a225874d66f0e5cb8e2c7d1c438b587"}, + {file = "jiter-0.8.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:37b2998606d6dadbb5ccda959a33d6a5e853252d921fec1792fc902351bb4e2c"}, + {file = "jiter-0.8.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4ab9a87f3784eb0e098f84a32670cfe4a79cb6512fd8f42ae3d0709f06405d18"}, + {file = "jiter-0.8.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:79aec8172b9e3c6d05fd4b219d5de1ac616bd8da934107325a6c0d0e866a21b6"}, + {file = "jiter-0.8.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:711e408732d4e9a0208008e5892c2966b485c783cd2d9a681f3eb147cf36c7ef"}, + {file = "jiter-0.8.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:653cf462db4e8c41995e33d865965e79641ef45369d8a11f54cd30888b7e6ff1"}, + {file = "jiter-0.8.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:9c63eaef32b7bebac8ebebf4dabebdbc6769a09c127294db6babee38e9f405b9"}, + {file = "jiter-0.8.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:eb21aaa9a200d0a80dacc7a81038d2e476ffe473ffdd9c91eb745d623561de05"}, + {file = "jiter-0.8.2-cp313-cp313-win32.whl", hash = "sha256:789361ed945d8d42850f919342a8665d2dc79e7e44ca1c97cc786966a21f627a"}, + {file = "jiter-0.8.2-cp313-cp313-win_amd64.whl", hash = "sha256:ab7f43235d71e03b941c1630f4b6e3055d46b6cb8728a17663eaac9d8e83a865"}, + {file = "jiter-0.8.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:b426f72cd77da3fec300ed3bc990895e2dd6b49e3bfe6c438592a3ba660e41ca"}, + {file = "jiter-0.8.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2dd880785088ff2ad21ffee205e58a8c1ddabc63612444ae41e5e4b321b39c0"}, + {file = "jiter-0.8.2-cp313-cp313t-win_amd64.whl", hash = "sha256:3ac9f578c46f22405ff7f8b1f5848fb753cc4b8377fbec8470a7dc3997ca7566"}, + {file = "jiter-0.8.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:9e1fa156ee9454642adb7e7234a383884452532bc9d53d5af2d18d98ada1d79c"}, + {file = "jiter-0.8.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0cf5dfa9956d96ff2efb0f8e9c7d055904012c952539a774305aaaf3abdf3d6c"}, + {file = "jiter-0.8.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e52bf98c7e727dd44f7c4acb980cb988448faeafed8433c867888268899b298b"}, + {file = "jiter-0.8.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a2ecaa3c23e7a7cf86d00eda3390c232f4d533cd9ddea4b04f5d0644faf642c5"}, + {file = "jiter-0.8.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:08d4c92bf480e19fc3f2717c9ce2aa31dceaa9163839a311424b6862252c943e"}, + {file = "jiter-0.8.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:99d9a1eded738299ba8e106c6779ce5c3893cffa0e32e4485d680588adae6db8"}, + {file = "jiter-0.8.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d20be8b7f606df096e08b0b1b4a3c6f0515e8dac296881fe7461dfa0fb5ec817"}, + {file = "jiter-0.8.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d33f94615fcaf872f7fd8cd98ac3b429e435c77619777e8a449d9d27e01134d1"}, + {file = "jiter-0.8.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:317b25e98a35ffec5c67efe56a4e9970852632c810d35b34ecdd70cc0e47b3b6"}, + {file = "jiter-0.8.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fc9043259ee430ecd71d178fccabd8c332a3bf1e81e50cae43cc2b28d19e4cb7"}, + {file = "jiter-0.8.2-cp38-cp38-win32.whl", hash = "sha256:fc5adda618205bd4678b146612ce44c3cbfdee9697951f2c0ffdef1f26d72b63"}, + {file = "jiter-0.8.2-cp38-cp38-win_amd64.whl", hash = "sha256:cd646c827b4f85ef4a78e4e58f4f5854fae0caf3db91b59f0d73731448a970c6"}, + {file = "jiter-0.8.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:e41e75344acef3fc59ba4765df29f107f309ca9e8eace5baacabd9217e52a5ee"}, + {file = "jiter-0.8.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7f22b16b35d5c1df9dfd58843ab2cd25e6bf15191f5a236bed177afade507bfc"}, + {file = "jiter-0.8.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f7200b8f7619d36aa51c803fd52020a2dfbea36ffec1b5e22cab11fd34d95a6d"}, + {file = "jiter-0.8.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:70bf4c43652cc294040dbb62256c83c8718370c8b93dd93d934b9a7bf6c4f53c"}, + {file = "jiter-0.8.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f9d471356dc16f84ed48768b8ee79f29514295c7295cb41e1133ec0b2b8d637d"}, + {file = "jiter-0.8.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:859e8eb3507894093d01929e12e267f83b1d5f6221099d3ec976f0c995cb6bd9"}, + {file = "jiter-0.8.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eaa58399c01db555346647a907b4ef6d4f584b123943be6ed5588c3f2359c9f4"}, + {file = "jiter-0.8.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8f2d5ed877f089862f4c7aacf3a542627c1496f972a34d0474ce85ee7d939c27"}, + {file = "jiter-0.8.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:03c9df035d4f8d647f8c210ddc2ae0728387275340668fb30d2421e17d9a0841"}, + {file = "jiter-0.8.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8bd2a824d08d8977bb2794ea2682f898ad3d8837932e3a74937e93d62ecbb637"}, + {file = "jiter-0.8.2-cp39-cp39-win32.whl", hash = "sha256:ca29b6371ebc40e496995c94b988a101b9fbbed48a51190a4461fcb0a68b4a36"}, + {file = "jiter-0.8.2-cp39-cp39-win_amd64.whl", hash = "sha256:1c0dfbd1be3cbefc7510102370d86e35d1d53e5a93d48519688b1bf0f761160a"}, + {file = "jiter-0.8.2.tar.gz", hash = "sha256:cd73d3e740666d0e639f678adb176fad25c1bcbdae88d8d7b857e1783bb4212d"}, +] + [[package]] name = "jsbeautifier" version = "1.15.1" @@ -2222,6 +2328,7 @@ description = "A microlibrary that defines a Json type alias for Python." optional = false python-versions = ">=3.7,<4.0" groups = ["main"] +markers = "extra == \"all\"" files = [ {file = "jsonalias-0.1.1-py3-none-any.whl", hash = "sha256:a56d2888e6397812c606156504e861e8ec00e188005af149f003c787db3d3f18"}, {file = "jsonalias-0.1.1.tar.gz", hash = "sha256:64f04d935397d579fc94509e1fcb6212f2d081235d9d6395bd10baedf760a769"}, @@ -2234,6 +2341,7 @@ description = "Send JSON-RPC requests" optional = false python-versions = ">=3.6" groups = ["main"] +markers = "extra == \"all\"" files = [ {file = "jsonrpcclient-4.0.3-py3-none-any.whl", hash = "sha256:3cbb9e27e1be29821becf135ea183144a836215422727e1ffe5056a49a670f0d"}, ] @@ -2497,6 +2605,7 @@ files = [ {file = "MarkupSafe-3.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:6e296a513ca3d94054c2c881cc913116e90fd030ad1c656b3869762b754f5f8a"}, {file = "markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0"}, ] +markers = {main = "extra == \"all\""} [[package]] name = "mdurl" @@ -2727,6 +2836,7 @@ files = [ {file = "more-itertools-8.14.0.tar.gz", hash = "sha256:c09443cd3d5438b8dafccd867a6bc1cb0894389e90cb53d227456b0b0bccb750"}, {file = "more_itertools-8.14.0-py3-none-any.whl", hash = "sha256:1bc4f91ee5b1b31ac7ceacc17c09befe6a40a503907baf9c839c229b5095cfd2"}, ] +markers = {main = "extra == \"all\""} [[package]] name = "morphys" @@ -2990,6 +3100,7 @@ description = "Python package wrapping the public and private key cryptography a optional = false python-versions = "*" groups = ["main"] +markers = "extra == \"all\"" files = [ {file = "open_aea_ledger_cosmos-1.64.0-py3-none-any.whl", hash = "sha256:1bc4752e696871588c391659c0df5907eccc4d938f3b14a4fa5346cc04ad953f"}, {file = "open_aea_ledger_cosmos-1.64.0.tar.gz", hash = "sha256:c836c753c8a4150ea49f19723dce808e357243bdebceb556dbc46abb0d2a6f14"}, @@ -3027,6 +3138,7 @@ description = "Python package wrapping the public and private key cryptography a optional = false python-versions = "*" groups = ["main"] +markers = "extra == \"all\"" files = [ {file = "open_aea_ledger_solana-1.64.0-py3-none-any.whl", hash = "sha256:96214c7e60c36863e49c052455575840cd06d788d4194a31061cc19ad08d57e9"}, {file = "open_aea_ledger_solana-1.64.0.tar.gz", hash = "sha256:45f97dbba7424d88211ceb38c5d07a198e2c1d74751b0a7a908feabb13b847ee"}, @@ -3065,6 +3177,7 @@ description = "A framework for the creation of autonomous agent services." optional = false python-versions = ">=3.8" groups = ["main"] +markers = "extra == \"all\"" files = [ {file = "open_autonomy-0.19.3-py3-none-any.whl", hash = "sha256:5fc8d227d4c816c152ee615bd7128e2094429642a9d5d652cbb9145072c8b421"}, {file = "open_autonomy-0.19.3.tar.gz", hash = "sha256:5e0a6b48ce3a456ea3ccd0ea9e3d43a84bb3070ca3deea2909a47e527c66a145"}, @@ -3096,6 +3209,32 @@ werkzeug = "2.0.3" all = ["click (>=8.1.0,<9)", "coverage (>=6.4.4,<8.0.0)", "open-aea-cli-ipfs (==1.64.0)", "pytest (>=7.0.0,<7.3.0)", "python-dotenv (>=0.14.5,<0.22.0)", "texttable (==1.6.7)"] cli = ["click (>=8.1.0,<9)", "coverage (>=6.4.4,<8.0.0)", "open-aea-cli-ipfs (==1.64.0)", "pytest (>=7.0.0,<7.3.0)", "python-dotenv (>=0.14.5,<0.22.0)", "texttable (==1.6.7)"] +[[package]] +name = "openai" +version = "1.63.2" +description = "The official Python library for the openai API" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "openai-1.63.2-py3-none-any.whl", hash = "sha256:1f38b27b5a40814c2b7d8759ec78110df58c4a614c25f182809ca52b080ff4d4"}, + {file = "openai-1.63.2.tar.gz", hash = "sha256:aeabeec984a7d2957b4928ceaa339e2ead19c61cfcf35ae62b7c363368d26360"}, +] + +[package.dependencies] +anyio = ">=3.5.0,<5" +distro = ">=1.7.0,<2" +httpx = ">=0.23.0,<1" +jiter = ">=0.4.0,<1" +pydantic = ">=1.9.0,<3" +sniffio = "*" +tqdm = ">4" +typing-extensions = ">=4.11,<5" + +[package.extras] +datalib = ["numpy (>=1)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)"] +realtime = ["websockets (>=13,<15)"] + [[package]] name = "openapi-spec-validator" version = "0.2.8" @@ -3149,6 +3288,7 @@ description = "SSH2 protocol library" optional = false python-versions = ">=3.6" groups = ["main"] +markers = "extra == \"all\"" files = [ {file = "paramiko-3.5.1-py3-none-any.whl", hash = "sha256:43b9a0501fc2b5e70680388d9346cf252cfb7d00b0667c39e80eb43a408b8f61"}, {file = "paramiko-3.5.1.tar.gz", hash = "sha256:b2c665bc45b2b215bd7d7f039901b14b067da00f3a11e6640995fd58f2664822"}, @@ -3361,6 +3501,7 @@ description = "Cross-platform lib for process and system monitoring in Python." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" groups = ["main"] +markers = "extra == \"all\"" files = [ {file = "psutil-6.1.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:9ccc4316f24409159897799b83004cb1e24f9819b0dcf9c0b68bdcb6cefee6a8"}, {file = "psutil-6.1.1-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:ca9609c77ea3b8481ab005da74ed894035936223422dc591d6772b147421f777"}, @@ -3482,7 +3623,7 @@ files = [ {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, ] -markers = {dev = "sys_platform == \"linux\" and platform_python_implementation != \"PyPy\""} +markers = {main = "extra == \"all\"", dev = "sys_platform == \"linux\" and platform_python_implementation != \"PyPy\""} [[package]] name = "pycryptodome" @@ -3533,7 +3674,6 @@ description = "Data validation using Python type hints" optional = false python-versions = ">=3.8" groups = ["main"] -markers = "extra == \"all\"" files = [ {file = "pydantic-2.10.6-py3-none-any.whl", hash = "sha256:427d664bf0b8a2b34ff5dd0f5a18df00591adcee7198fbd71981054cef37b584"}, {file = "pydantic-2.10.6.tar.gz", hash = "sha256:ca5daa827cce33de7a42be142548b0096bf05a7e7b365aebfa5f8eeec7128236"}, @@ -3555,7 +3695,6 @@ description = "Core functionality for Pydantic validation and serialization" optional = false python-versions = ">=3.8" groups = ["main"] -markers = "extra == \"all\"" files = [ {file = "pydantic_core-2.27.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2d367ca20b2f14095a8f4fa1210f5a7b78b8a20009ecced6b12818f455b1e9fa"}, {file = "pydantic_core-2.27.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:491a2b73db93fab69731eaee494f320faa4e093dbed776be1a829c2eb222c34c"}, @@ -3705,6 +3844,7 @@ description = "Python bindings for heck, the Rust case conversion library" optional = false python-versions = ">=3.7" groups = ["main"] +markers = "extra == \"all\"" files = [ {file = "pyheck-0.1.5-cp37-abi3-macosx_10_7_x86_64.whl", hash = "sha256:44caf2b7a49d71fdeb0469e9f35886987ad815a8638b3c5b5c83f351d6aed413"}, {file = "pyheck-0.1.5-cp37-abi3-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:316a842b94beff6e59a97dbcc590e9be92a932e59126b0faa9ac750384f27eaf"}, @@ -3766,6 +3906,7 @@ description = "Python binding to the Networking and Cryptography (NaCl) library" optional = false python-versions = ">=3.6" groups = ["main"] +markers = "extra == \"all\"" files = [ {file = "PyNaCl-1.5.0-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:401002a4aaa07c9414132aaed7f6836ff98f59277a234704ff66878c2ee4a0d1"}, {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:52cb72a79269189d4e0dc537556f4740f7f0a9ec41c1322598799b0bdad4ef92"}, @@ -3949,6 +4090,7 @@ description = "A pytest plugin for managing processes across test runs." optional = false python-versions = ">=3.5" groups = ["main"] +markers = "extra == \"all\"" files = [ {file = "pytest-xprocess-0.18.1.tar.gz", hash = "sha256:fd9f30ed1584b5833bc34494748adf0fb9de3ca7bacc4e88ad71989c21cba266"}, {file = "pytest_xprocess-0.18.1-py3-none-any.whl", hash = "sha256:6f2aba817d842518d9d9dfb7e9adfe2a6e354a4359f4166bef0822ef4be1c9db"}, @@ -3980,6 +4122,7 @@ files = [ {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, ] +markers = {main = "extra == \"all\""} [package.dependencies] six = ">=1.5" @@ -4018,7 +4161,7 @@ description = "Python for Window Extensions" optional = false python-versions = "*" groups = ["main"] -markers = "platform_system == \"Windows\" or sys_platform == \"win32\"" +markers = "sys_platform == \"win32\" or platform_system == \"Windows\"" files = [ {file = "pywin32-308-cp310-cp310-win32.whl", hash = "sha256:796ff4426437896550d2981b9c2ac0ffd75238ad9ea2d3bfa67a1abd546d262e"}, {file = "pywin32-308-cp310-cp310-win_amd64.whl", hash = "sha256:4fc888c59b3c0bef905ce7eb7e2106a07712015ea1c8234b703a088d46110e8e"}, @@ -4286,6 +4429,7 @@ files = [ {file = "requests-toolbelt-1.0.0.tar.gz", hash = "sha256:7681a0a3d047012b5bdc0ee37d7f8f07ebe76ab08caeccfc3921ce23c88d5bc6"}, {file = "requests_toolbelt-1.0.0-py2.py3-none-any.whl", hash = "sha256:cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06"}, ] +markers = {main = "extra == \"all\""} [package.dependencies] requests = ">=2.0.1,<3.0.0" @@ -4516,6 +4660,7 @@ description = "Solana Python API" optional = false python-versions = ">=3.7,<4.0" groups = ["main"] +markers = "extra == \"all\"" files = [ {file = "solana-0.30.2-py3-none-any.whl", hash = "sha256:d7e8295a1f86982ba51e78a65c16ce55f4a9e9caa8938564922a209ddfb2a01f"}, {file = "solana-0.30.2.tar.gz", hash = "sha256:7b16e76cdd1d3024219679cdb73c20324d6d79e3c9766fe0ca52be79ef5ff691"}, @@ -4537,6 +4682,7 @@ description = "Python bindings for Solana Rust tools" optional = false python-versions = ">=3.7" groups = ["main"] +markers = "extra == \"all\"" files = [ {file = "solders-0.18.1-cp37-abi3-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:1b20230838626fad26d5bdaf8ebe3db3b660ef9f56cc271feca8970d464ea11f"}, {file = "solders-0.18.1-cp37-abi3-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:3d2503693d0fb0efd37e3f921277327ff664bd04fff551346fad565dd8b9185a"}, @@ -4584,6 +4730,7 @@ description = "Algebraic types for Python (notably providing Sum Types, aka Tagg optional = false python-versions = "*" groups = ["main"] +markers = "extra == \"all\"" files = [ {file = "sumtypes-0.1a6-py2.py3-none-any.whl", hash = "sha256:3e9d71322dd927d25d935072f8be7daec655ea292fd392359a5bb2c1e53dfdc3"}, {file = "sumtypes-0.1a6.tar.gz", hash = "sha256:1a6ff095e06a1885f340ddab803e0f38e3f9bed81f9090164ca9682e04e96b43"}, @@ -4634,6 +4781,7 @@ description = "module to create simple ASCII tables" optional = false python-versions = "*" groups = ["main"] +markers = "extra == \"all\"" files = [ {file = "texttable-1.6.7-py2.py3-none-any.whl", hash = "sha256:b7b68139aa8a6339d2c320ca8b1dc42d13a7831a346b446cb9eb385f0c76310c"}, {file = "texttable-1.6.7.tar.gz", hash = "sha256:290348fb67f7746931bcdfd55ac7584ecd4e5b0846ab164333f0794b121760f2"}, @@ -4753,6 +4901,7 @@ description = "List processing tools and functional utilities" optional = false python-versions = ">=3.5" groups = ["main"] +markers = "implementation_name == \"cpython\" or implementation_name == \"pypy\" or extra == \"all\"" files = [ {file = "toolz-0.11.2-py3-none-any.whl", hash = "sha256:a5700ce83414c64514d82d60bcda8aabfde092d1c1a8663f9200c07fdcc6da8f"}, {file = "toolz-0.11.2.tar.gz", hash = "sha256:6b312d5e15138552f1bda8a4e66c30e236c831b612b2bf0005f8a1df10a4bc33"}, @@ -4784,6 +4933,28 @@ virtualenv = ">=16.0.0,<20.0.0 || >20.0.0,<20.0.1 || >20.0.1,<20.0.2 || >20.0.2, docs = ["pygments-github-lexers (>=0.0.5)", "sphinx (>=2.0.0)", "sphinxcontrib-autoprogram (>=0.1.5)", "towncrier (>=18.5.0)"] testing = ["flaky (>=3.4.0)", "freezegun (>=0.3.11)", "pathlib2 (>=2.3.3)", "psutil (>=5.6.1)", "pytest (>=4.0.0)", "pytest-cov (>=2.5.1)", "pytest-mock (>=1.10.0)", "pytest-randomly (>=1.0.0)"] +[[package]] +name = "tqdm" +version = "4.67.1" +description = "Fast, Extensible Progress Meter" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "tqdm-4.67.1-py3-none-any.whl", hash = "sha256:26445eca388f82e72884e0d580d5464cd801a3ea01e63e5601bdff9ba6a48de2"}, + {file = "tqdm-4.67.1.tar.gz", hash = "sha256:f8aef9c52c08c13a65f30ea34f4e5aac3fd1a34959879d7e59e63027286627f2"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[package.extras] +dev = ["nbval", "pytest (>=6)", "pytest-asyncio (>=0.24)", "pytest-cov", "pytest-timeout"] +discord = ["requests"] +notebook = ["ipywidgets (>=6)"] +slack = ["slack-sdk"] +telegram = ["requests"] + [[package]] name = "twine" version = "5.1.1" @@ -4814,6 +4985,7 @@ description = "Typing stubs for cachetools" optional = false python-versions = "*" groups = ["main"] +markers = "extra == \"all\"" files = [ {file = "types-cachetools-4.2.10.tar.gz", hash = "sha256:b1cb18aaff25d2ad47a060413c660c39fadddb01f72012dd1134584b1fdaada5"}, {file = "types_cachetools-4.2.10-py3-none-any.whl", hash = "sha256:48301115189d4879d0960baac5a8a2b2d31ce6129b2ce3b915000ed337284898"}, @@ -4880,6 +5052,7 @@ description = "Multi-container orchestration for Docker" optional = false python-versions = ">=3.4" groups = ["main"] +markers = "extra == \"all\"" files = [ {file = "valory-docker-compose-1.29.3.tar.gz", hash = "sha256:77469ff7493e0317ccc8d92264323d62eee23c810536184d705b833e0cbb6a03"}, {file = "valory_docker_compose-1.29.3-py2.py3-none-any.whl", hash = "sha256:7f5d1eceb080d0512e2f5d16b57eaaf9a742b02da0878b71041217ec7cef53fe"}, @@ -4973,6 +5146,7 @@ files = [ {file = "watchdog-6.0.0-py3-none-win_ia64.whl", hash = "sha256:a1914259fa9e1454315171103c6a30961236f508b9b623eae470268bbcc6a22f"}, {file = "watchdog-6.0.0.tar.gz", hash = "sha256:9ddf7c82fda3ae8e24decda1338ede66e1c99883db93711d8fb941eaa2d8c282"}, ] +markers = {main = "extra == \"all\""} [package.extras] watchmedo = ["PyYAML (>=3.10)"] @@ -5129,6 +5303,7 @@ description = "The comprehensive WSGI web application library." optional = false python-versions = ">=3.6" groups = ["main"] +markers = "extra == \"all\"" files = [ {file = "Werkzeug-2.0.3-py3-none-any.whl", hash = "sha256:1421ebfc7648a39a5c58c601b154165d05cf47a3cd0ccb70857cbdacf6c8f2b8"}, {file = "Werkzeug-2.0.3.tar.gz", hash = "sha256:b863f8ff057c522164b6067c9e28b041161b4be5ba4d0daceeaa50a163822d3c"}, @@ -5261,6 +5436,7 @@ description = "Zstandard bindings for Python" optional = false python-versions = ">=3.6" groups = ["main"] +markers = "extra == \"all\"" files = [ {file = "zstandard-0.18.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ef7e8a200e4c8ac9102ed3c90ed2aa379f6b880f63032200909c1be21951f556"}, {file = "zstandard-0.18.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2dc466207016564805e56d28375f4f533b525ff50d6776946980dff5465566ac"}, @@ -5320,4 +5496,4 @@ all = ["isort", "open-aea", "open-aea-ledger-cosmos", "open-aea-ledger-ethereum" [metadata] lock-version = "2.1" python-versions = ">=3.10,<3.12" -content-hash = "4b9893df28bf3bfe97487bc183840f597a46918e88bb42265d864c3ce34f0fa3" +content-hash = "506ad95e535c2f447cf892758dde25c62e9eeced2ce7448c858d1ad5c0915016" diff --git a/pyproject.toml b/pyproject.toml index e34a2aac..2228b35e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -71,6 +71,7 @@ isort = "^5.13.2" openapi-spec-validator = "0.2.8" disutils = "^1.4.32.post2" setuptools = "^75.8.0" +openai = "^1.63.2" [tool.poetry.group.dev.dependencies]