From 7db6a3befbe6164c4f9315e3b4f6d0616185f322 Mon Sep 17 00:00:00 2001 From: fishmingyu <1661342068@qq.com> Date: Wed, 26 Feb 2025 20:55:21 -0800 Subject: [PATCH 1/5] [Infra] point to mage --- tests/.gitignore | 1 + tests/test_llm_chat.py | 9 +++++---- tests/test_rtl_generator.py | 8 ++++---- tests/test_single_agent.py | 8 ++++---- tests/test_top_agent.py | 14 +++++++------- 5 files changed, 21 insertions(+), 19 deletions(-) create mode 100644 tests/.gitignore diff --git a/tests/.gitignore b/tests/.gitignore new file mode 100644 index 0000000..dc32033 --- /dev/null +++ b/tests/.gitignore @@ -0,0 +1 @@ +output_* \ No newline at end of file diff --git a/tests/test_llm_chat.py b/tests/test_llm_chat.py index 677be13..8d2bc2d 100644 --- a/tests/test_llm_chat.py +++ b/tests/test_llm_chat.py @@ -1,20 +1,21 @@ import argparse -from mage_rtl.gen_config import Config, get_llm -from mage_rtl.log_utils import get_logger +from mage.gen_config import Config, get_llm +from mage.log_utils import get_logger logger = get_logger(__name__) args_dict = { "model": "claude-3-5-sonnet-20241022", + "provider" : "anthropic", } def main(): args = argparse.Namespace(**args_dict) cfg = Config("./key.cfg") - get_llm(model=args.model, api_key=cfg["ANTHROPIC_API_KEY"], max_tokens=8192) - get_llm(model="gpt-4o", api_key=cfg["OPENAI_API_KEY"], max_tokens=8192) + get_llm(model=args.model, api_key=cfg["ANTHROPIC_API_KEY"], max_tokens=8192, cfg_path="./key.cfg") + get_llm(model="gpt-4o", api_key=cfg["OPENAI_API_KEY"], max_tokens=8192, cfg_path="./key.cfg") if __name__ == "__main__": diff --git a/tests/test_rtl_generator.py b/tests/test_rtl_generator.py index da4a300..e49db16 100644 --- a/tests/test_rtl_generator.py +++ b/tests/test_rtl_generator.py @@ -1,13 +1,13 @@ import argparse -from mage_rtl.benchmark_read_helper import ( +from mage.benchmark_read_helper import ( TypeBenchmark, TypeBenchmarkFile, get_benchmark_contents, ) -from mage_rtl.gen_config import Config, get_llm -from mage_rtl.log_utils import get_logger -from mage_rtl.rtl_generator import RTLGenerator +from mage.gen_config import Config, get_llm +from mage.log_utils import get_logger +from mage.rtl_generator import RTLGenerator logger = get_logger(__name__) diff --git a/tests/test_single_agent.py b/tests/test_single_agent.py index 2122dd2..c4b95d1 100644 --- a/tests/test_single_agent.py +++ b/tests/test_single_agent.py @@ -6,14 +6,14 @@ import openai import pandas as pd -from mage_rtl.benchmark_read_helper import ( +from mage.benchmark_read_helper import ( TypeBenchmark, TypeBenchmarkFile, get_benchmark_contents, ) -from mage_rtl.gen_config import Config, get_llm -from mage_rtl.log_utils import get_logger -from mage_rtl.rtl_generator import RTLGenerator +from mage.gen_config import Config, get_llm +from mage.log_utils import get_logger +from mage.rtl_generator import RTLGenerator # Configuration and Constants # model = "gpt-4-0314" diff --git a/tests/test_top_agent.py b/tests/test_top_agent.py index 4c4e04c..6728987 100644 --- a/tests/test_top_agent.py +++ b/tests/test_top_agent.py @@ -6,16 +6,16 @@ from llama_index.core.llms import LLM -from mage_rtl.agent import TopAgent -from mage_rtl.benchmark_read_helper import ( +from mage.agent import TopAgent +from mage.benchmark_read_helper import ( TypeBenchmark, TypeBenchmarkFile, get_benchmark_contents, ) -from mage_rtl.gen_config import get_llm, set_exp_setting -from mage_rtl.log_utils import get_logger -from mage_rtl.sim_reviewer import sim_review_golden_benchmark -from mage_rtl.token_counter import TokenCount +from mage.gen_config import get_llm, set_exp_setting +from mage.log_utils import get_logger +from mage.sim_reviewer import sim_review_golden_benchmark +from mage.token_counter import TokenCount logger = get_logger(__name__) @@ -37,7 +37,7 @@ "top_p": 0.95, "max_token": 8192, "use_golden_tb_in_mage": True, - "key_cfg_path": "./key.cfg", + "key_cfg_path": "../key.cfg", } From cdea9852b27d9dc33df00b81a6c4f8815799a717 Mon Sep 17 00:00:00 2001 From: fishmingyu <1661342068@qq.com> Date: Wed, 26 Feb 2025 21:06:06 -0800 Subject: [PATCH 2/5] [TP] add verilog-eval as third party --- .gitignore | 4 +++- .gitmodules | 3 +++ README.md | 6 +++--- pyproject.toml | 38 +++++++++++++++++++++++++++++++++++ requirements.txt | 9 --------- setup.py | 43 ---------------------------------------- src/__init__.py | 1 + src/mage_rtl/__init__.py | 0 verilog-eval | 1 + 9 files changed, 49 insertions(+), 56 deletions(-) create mode 100644 .gitmodules create mode 100644 pyproject.toml delete mode 100644 requirements.txt delete mode 100644 setup.py create mode 100644 src/__init__.py create mode 100644 src/mage_rtl/__init__.py create mode 160000 verilog-eval diff --git a/.gitignore b/.gitignore index 60c6984..760532b 100644 --- a/.gitignore +++ b/.gitignore @@ -1,4 +1,4 @@ -*egg-info/ +*.egg-info/ *.vcd __pycache__/ key.cfg @@ -10,3 +10,5 @@ data/ *.o *.d *.vvp +dist/ +build/ \ No newline at end of file diff --git a/.gitmodules b/.gitmodules new file mode 100644 index 0000000..e2431a2 --- /dev/null +++ b/.gitmodules @@ -0,0 +1,3 @@ +[submodule "verilog-eval"] + path = verilog-eval + url = https://github.com/NVlabs/verilog-eval diff --git a/README.md b/README.md index 009431e..c46fc43 100644 --- a/README.md +++ b/README.md @@ -17,7 +17,7 @@ cd MAGE # Continue after successfully installed conda conda create -n mage python=3.11 conda activate mage -pip install -Ue . +pip install . ``` ### 2.>To set api key: @@ -153,7 +153,7 @@ args_dict = { "top_p": 0.95, "max_token": 8192, "use_golden_tb_in_mage": True, - "key_cfg_path": os.path.join(os.path.dirname(os.path.abspath(__file__)), "key.cfg"), + "key_cfg_path": "key.cfg", } ``` Where each argument means: @@ -167,7 +167,7 @@ Where each argument means: 8. temperature: Argument for LLM generation randomness. Usually between [0, 1] 9. top_p: Argument for LLM generation randomness. Usually between [0, 1] 10. max_token: Maximum number of tokens the model is allowed to generate in its output. -11. key_cfg_path: Path to your key.cfg file. Defaulted to be under MAGE/tests +11. key_cfg_path: Path to your key.cfg file. Defaulted to be under MAGE ## Development Guide diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..4ce5a27 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,38 @@ +[build-system] +requires = ["setuptools>=61.0", "wheel"] +build-backend = "setuptools.build_meta" + +[project] +name = "mage" +version = "1.0.1" +description = "MAGE: Open-source multi-agent LLM RTL code generator" +readme = { file = "README.md", content-type = "text/markdown" } +requires-python = ">=3.11" +dependencies = [ + "config", + "fsspec[http]<=2024.9.0,>=2023.1.0", + "httpx<1,>=0.23.0", + "llama-index-core", + "llama-index-llms-anthropic", + "llama-index-llms-openai", + "llama-index-llms-vertex", + "pre-commit", + "pydantic", + "rich", + "tiktoken" +] +classifiers = [ + "Development Status :: 3 - Alpha", + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Operating System :: POSIX", + "Programming Language :: Python :: 3" +] + +[tool.setuptools] +include-package-data = true +zip-safe = false +packages = ["mage"] + +[tool.setuptools.package-dir] +mage = "src/mage_rtl" \ No newline at end of file diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index dff87b5..0000000 --- a/requirements.txt +++ /dev/null @@ -1,9 +0,0 @@ -config -httpx -llama-index-core -llama-index-llms-anthropic -llama-index-llms-openai -pre-commit -pydantic -rich -tiktoken diff --git a/setup.py b/setup.py deleted file mode 100644 index 7005dbb..0000000 --- a/setup.py +++ /dev/null @@ -1,43 +0,0 @@ -import os - -from setuptools import find_packages, setup - - -def read_requirements(filename="requirements.txt"): - try: - with open(filename, "r") as f: - return [ - line.strip() for line in f if line.strip() and not line.startswith("#") - ] - except FileNotFoundError: - return [] - - -setup( - name="mage_rtl", - version="1.0.0", - packages=find_packages(where="src"), - package_dir={"": "src"}, - # Metadata - description="MAGE: Open-source multi-agent LLM RTL code generator", - long_description=open("README.md").read() if os.path.exists("README.md") else "", - long_description_content_type="text/markdown", - # Requirements - install_requires=read_requirements(), - # Python version requirement - python_requires=">=3.11", - # Entry points for CLI - # entry_points={ - # 'console_scripts': [ - # 'soyo=soyo.cli:main', - # ], - # }, - # Project classification - classifiers=[ - "Development Status :: 3 - Alpha", - "Intended Audience :: Developers", - "License :: OSI Approved :: Apache Software License", - "Operating System :: POSIX", - "Programming Language :: Python :: 3", - ], -) diff --git a/src/__init__.py b/src/__init__.py new file mode 100644 index 0000000..cb2120f --- /dev/null +++ b/src/__init__.py @@ -0,0 +1 @@ +from mage_rtl import * diff --git a/src/mage_rtl/__init__.py b/src/mage_rtl/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/verilog-eval b/verilog-eval new file mode 160000 index 0000000..c498220 --- /dev/null +++ b/verilog-eval @@ -0,0 +1 @@ +Subproject commit c498220d0a52248f8e3fdffe279075215bde2da6 From 79dde3ec4b29a45c97893d8deaa84c1c0c73d3d9 Mon Sep 17 00:00:00 2001 From: fishmingyu <1661342068@qq.com> Date: Wed, 26 Feb 2025 21:06:48 -0800 Subject: [PATCH 3/5] [Infra] point to mage --- tests/.gitignore | 2 +- tests/test_llm_chat.py | 16 +++++++++++++--- tests/test_single_agent.py | 1 - tests/test_top_agent.py | 1 - 4 files changed, 14 insertions(+), 6 deletions(-) diff --git a/tests/.gitignore b/tests/.gitignore index dc32033..5c7e730 100644 --- a/tests/.gitignore +++ b/tests/.gitignore @@ -1 +1 @@ -output_* \ No newline at end of file +output_* diff --git a/tests/test_llm_chat.py b/tests/test_llm_chat.py index 8d2bc2d..26a5184 100644 --- a/tests/test_llm_chat.py +++ b/tests/test_llm_chat.py @@ -7,15 +7,25 @@ args_dict = { "model": "claude-3-5-sonnet-20241022", - "provider" : "anthropic", + "provider": "anthropic", } def main(): args = argparse.Namespace(**args_dict) cfg = Config("./key.cfg") - get_llm(model=args.model, api_key=cfg["ANTHROPIC_API_KEY"], max_tokens=8192, cfg_path="./key.cfg") - get_llm(model="gpt-4o", api_key=cfg["OPENAI_API_KEY"], max_tokens=8192, cfg_path="./key.cfg") + get_llm( + model=args.model, + api_key=cfg["ANTHROPIC_API_KEY"], + max_tokens=8192, + cfg_path="./key.cfg", + ) + get_llm( + model="gpt-4o", + api_key=cfg["OPENAI_API_KEY"], + max_tokens=8192, + cfg_path="./key.cfg", + ) if __name__ == "__main__": diff --git a/tests/test_single_agent.py b/tests/test_single_agent.py index c4b95d1..08f3524 100644 --- a/tests/test_single_agent.py +++ b/tests/test_single_agent.py @@ -5,7 +5,6 @@ import backoff import openai import pandas as pd - from mage.benchmark_read_helper import ( TypeBenchmark, TypeBenchmarkFile, diff --git a/tests/test_top_agent.py b/tests/test_top_agent.py index 6728987..1fdd61d 100644 --- a/tests/test_top_agent.py +++ b/tests/test_top_agent.py @@ -5,7 +5,6 @@ from typing import Any, Dict from llama_index.core.llms import LLM - from mage.agent import TopAgent from mage.benchmark_read_helper import ( TypeBenchmark, From 288ce6f1a7628f9578b8bed478e697e12d4bd235 Mon Sep 17 00:00:00 2001 From: fishmingyu <1661342068@qq.com> Date: Wed, 26 Feb 2025 21:07:52 -0800 Subject: [PATCH 4/5] [Infra] pyproject toml --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 4ce5a27..4714ab4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -35,4 +35,4 @@ zip-safe = false packages = ["mage"] [tool.setuptools.package-dir] -mage = "src/mage_rtl" \ No newline at end of file +mage = "src/mage_rtl" From fbec00c44e15a6baaa7f30f28ebf808db6745cef Mon Sep 17 00:00:00 2001 From: fishmingyu <1661342068@qq.com> Date: Wed, 26 Feb 2025 21:08:12 -0800 Subject: [PATCH 5/5] revise readme --- .gitignore | 2 +- README.md | 12 +++++++----- src/__init__.py | 1 - 3 files changed, 8 insertions(+), 7 deletions(-) delete mode 100644 src/__init__.py diff --git a/.gitignore b/.gitignore index 760532b..5c461c2 100644 --- a/.gitignore +++ b/.gitignore @@ -11,4 +11,4 @@ data/ *.d *.vvp dist/ -build/ \ No newline at end of file +build/ diff --git a/README.md b/README.md index c46fc43..fc5e977 100644 --- a/README.md +++ b/README.md @@ -10,6 +10,8 @@ MAGE is an open-source multi-agent LLM RTL code generator. ### 1.> To install the repo itself: ``` git clone https://github.com/stable-lab/MAGE.git +# To get submodules at the same time +git clone --recursive https://github.com/stable-lab/MAGE.git cd MAGE # Install conda first if it's not on your machine like "apt install conda" @@ -26,10 +28,10 @@ You can either: 2. Create key.cfg file. The file should be in format of: ``` -OPENAI_API_KEY: 'xxxxxxx' -ANTHROPIC_API_KEY: 'xxxxxxx' -VERTEX_SERVICE_ACCOUNT_PATH: 'xxxxxxx' -VERTEX_REGION: 'xxxxxxx' +OPENAI_API_KEY= 'xxxxxxx' +ANTHROPIC_API_KEY= 'xxxxxxx' +VERTEX_SERVICE_ACCOUNT_PATH= 'xxxxxxx' +VERTEX_REGION= 'xxxxxxx' ``` ### To install iverilog {.tabset} @@ -97,7 +99,7 @@ python3 setup.py install --user ``` ``` -git clone https://github.com/NVlabs/verilog-eval +git submodule update -- init -- recursive ``` ## File structure diff --git a/src/__init__.py b/src/__init__.py deleted file mode 100644 index cb2120f..0000000 --- a/src/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from mage_rtl import *