From 077fc338b016e13ed74b4656041ad81113740efe Mon Sep 17 00:00:00 2001 From: YongshiZhan Date: Wed, 19 Feb 2025 07:21:23 +0800 Subject: [PATCH 1/4] 25.2.18 updatedkey.cfg path and set provider --- src/mage_rtl/gen_config.py | 30 +++++++++++++++++++++--------- tests/test_top_agent.py | 12 +++++++++--- 2 files changed, 30 insertions(+), 12 deletions(-) diff --git a/src/mage_rtl/gen_config.py b/src/mage_rtl/gen_config.py index 7fbf60f..1188aee 100644 --- a/src/mage_rtl/gen_config.py +++ b/src/mage_rtl/gen_config.py @@ -35,18 +35,30 @@ def __getitem__(self, index): def get_llm(**kwargs) -> LLM: err_msgs = [] - for LLM_func in [OpenAI, Anthropic]: - try: - llm: LLM = LLM_func(**kwargs) - _ = llm.complete("Say 'Hi'") - break - except Exception as e: - err_msgs.append(str(e)) - else: + args_d = kwargs["args_d"] + cfg = Config(args_d["key_cfg_path"]) + + LLM_func = Anthropic + if args_d["provider"] == "anthropic": + LLM_func = Anthropic + api_key_cfg = cfg["ANTHROPIC_API_KEY"] + elif args_d["provider"] == "openai": + LLM_func = OpenAI + api_key_cfg = cfg["OPENAI_API_KEY"] + # add more providers if needed + + #for LLM_func in [OpenAI, Anthropic]: + try: + llm: LLM = LLM_func(model=args_d["model"], api_key=api_key_cfg, max_tokens=8192) + _ = llm.complete("Say 'Hi'") + #break + except Exception as e: + err_msgs.append(str(e)) + """ else: raise Exception( f"gen_config: Failed to get LLM. Error msgs include:\n" + "\n".join(err_msgs) - ) + ) """ return llm diff --git a/tests/test_top_agent.py b/tests/test_top_agent.py index f017d17..cd98120 100644 --- a/tests/test_top_agent.py +++ b/tests/test_top_agent.py @@ -1,6 +1,7 @@ import argparse import json import time +import os from datetime import timedelta from typing import Any, Dict @@ -21,8 +22,9 @@ args_dict = { + "provider" : "anthropic", "model": "claude-3-5-sonnet-20241022", - # "model": "gpt-4o-2024-08-06", + #"model": "gpt-4o-2024-08-06", # "filter_instance": "^(Prob070_ece241_2013_q2|Prob151_review2015_fsm)$", "filter_instance": "^(Prob011_norgate)$", # "filter_instance": "^(.*)$", @@ -32,7 +34,9 @@ "n": 1, "temperature": 0.85, "top_p": 0.95, + "max_token": 8192, "use_golden_tb_in_mage": True, + "key_cfg_path": os.path.join(os.path.dirname(os.path.abspath(__file__)), 'key.cfg') } @@ -148,8 +152,10 @@ def run_round(args: argparse.Namespace, llm: LLM): def main(): args = argparse.Namespace(**args_dict) - cfg = Config("./key.cfg") - llm = get_llm(model=args.model, api_key=cfg["ANTHROPIC_API_KEY"], max_tokens=8192) + #cfg = Config(args.key_cfg_path) + #cfg = Config("./key.cfg") + #llm = get_llm(model=args.model, api_key=cfg["ANTHROPIC_API_KEY"], max_tokens=8192) + llm = get_llm(args_d=args_dict) identifier_head = args.run_identifier n = args.n set_exp_setting(temperature=args.temperature, top_p=args.top_p) From 1462df63fd24920a53290f18f55a7df43e6d19cf Mon Sep 17 00:00:00 2001 From: YongshiZhan Date: Wed, 19 Feb 2025 09:09:24 +0800 Subject: [PATCH 2/4] Fixed gen_config --- README.md | 22 ++++++++++++++-------- src/mage_rtl/gen_config.py | 21 ++++++++++----------- tests/test_top_agent.py | 30 ++++++++++++++++++++++-------- 3 files changed, 46 insertions(+), 27 deletions(-) diff --git a/README.md b/README.md index 0177d3f..d6161c0 100644 --- a/README.md +++ b/README.md @@ -137,6 +137,7 @@ Run arguments can be set in the file like: ``` args_dict = { + "provider": "anthropic", "model": "claude-3-5-sonnet-20241022", # "model": "gpt-4o-2024-08-06", # "filter_instance": "^(Prob070_ece241_2013_q2|Prob151_review2015_fsm)$", @@ -148,18 +149,23 @@ args_dict = { "n": 1, "temperature": 0.85, "top_p": 0.95, + "max_token": 8192, "use_golden_tb_in_mage": True, + "key_cfg_path": os.path.join(os.path.dirname(os.path.abspath(__file__)), "key.cfg"), } ``` Where each argument means: -1. model: The LLM model used. Support for gpt-4o and claude has been verified. -2. filter_instance: A RegEx style instance name filter. -3. type_benchmark: Support running verilog_eval_v1 or verilog_eval_v2 -4. path_benchmark: Where the benchmark repo is cloned -5. run_identifier: Unique name to disguish different runs -6. n: Number of repeated run to execute -7. temperature: Argument for LLM generation randomness. Usually between [0, 1] -8. top_p: Argument for LLM generation randomness. Usually between [0, 1] +1. provider: The api provider of the LLM model used. e.g. anthropic-->claude, openai-->gpt-4o +2. model: The LLM model used. Support for gpt-4o and claude has been verified. +3. filter_instance: A RegEx style instance name filter. +4. type_benchmark: Support running verilog_eval_v1 or verilog_eval_v2 +5. path_benchmark: Where the benchmark repo is cloned +6. run_identifier: Unique name to disguish different runs +7. n: Number of repeated run to execute +8. temperature: Argument for LLM generation randomness. Usually between [0, 1] +9. top_p: Argument for LLM generation randomness. Usually between [0, 1] +10. max_token: Maximum number of tokens the model is allowed to generate in its output. +11. key_cfg_path: Path to your key.cfg file. Defaulted to be under MAGE/tests ## Development Guide diff --git a/src/mage_rtl/gen_config.py b/src/mage_rtl/gen_config.py index 1188aee..fccdf8d 100644 --- a/src/mage_rtl/gen_config.py +++ b/src/mage_rtl/gen_config.py @@ -35,23 +35,22 @@ def __getitem__(self, index): def get_llm(**kwargs) -> LLM: err_msgs = [] - args_d = kwargs["args_d"] - cfg = Config(args_d["key_cfg_path"]) - + LLM_func = Anthropic - if args_d["provider"] == "anthropic": + if kwargs["provider"] == "anthropic": LLM_func = Anthropic - api_key_cfg = cfg["ANTHROPIC_API_KEY"] - elif args_d["provider"] == "openai": + elif kwargs["provider"] == "openai": LLM_func = OpenAI - api_key_cfg = cfg["OPENAI_API_KEY"] # add more providers if needed - - #for LLM_func in [OpenAI, Anthropic]: + try: - llm: LLM = LLM_func(model=args_d["model"], api_key=api_key_cfg, max_tokens=8192) + llm: LLM = LLM_func( + model=kwargs["model"], + api_key=kwargs["api_key"], + max_tokens=kwargs["max_token"], + ) _ = llm.complete("Say 'Hi'") - #break + except Exception as e: err_msgs.append(str(e)) """ else: diff --git a/tests/test_top_agent.py b/tests/test_top_agent.py index cd98120..a1c3d73 100644 --- a/tests/test_top_agent.py +++ b/tests/test_top_agent.py @@ -1,7 +1,7 @@ import argparse import json -import time import os +import time from datetime import timedelta from typing import Any, Dict @@ -22,9 +22,9 @@ args_dict = { - "provider" : "anthropic", + "provider": "anthropic", "model": "claude-3-5-sonnet-20241022", - #"model": "gpt-4o-2024-08-06", + # "model": "gpt-4o-2024-08-06", # "filter_instance": "^(Prob070_ece241_2013_q2|Prob151_review2015_fsm)$", "filter_instance": "^(Prob011_norgate)$", # "filter_instance": "^(.*)$", @@ -36,7 +36,7 @@ "top_p": 0.95, "max_token": 8192, "use_golden_tb_in_mage": True, - "key_cfg_path": os.path.join(os.path.dirname(os.path.abspath(__file__)), 'key.cfg') + "key_cfg_path": os.path.join(os.path.dirname(os.path.abspath(__file__)), "key.cfg"), } @@ -152,10 +152,24 @@ def run_round(args: argparse.Namespace, llm: LLM): def main(): args = argparse.Namespace(**args_dict) - #cfg = Config(args.key_cfg_path) - #cfg = Config("./key.cfg") - #llm = get_llm(model=args.model, api_key=cfg["ANTHROPIC_API_KEY"], max_tokens=8192) - llm = get_llm(args_d=args_dict) + cfg = Config(args.key_cfg_path) + # cfg = Config("./key.cfg") + # llm = get_llm(model=args.model, api_key=cfg["ANTHROPIC_API_KEY"], max_tokens=8192) + # llm = get_llm(args_d=args_dict) + + api_key_cfg = "" + if args.provider == "anthropic": + api_key_cfg = cfg["ANTHROPIC_API_KEY"] + elif args.provider == "openai": + api_key_cfg = cfg["OPENAI_API_KEY"] + # add more providers if needed + + llm = get_llm( + model=args.model, + api_key=api_key_cfg, + max_token=args.max_token, + provider=args.provider, + ) identifier_head = args.run_identifier n = args.n set_exp_setting(temperature=args.temperature, top_p=args.top_p) From cb5fae1d756c37ca1c25066841489863a424cabb Mon Sep 17 00:00:00 2001 From: YongshiZhan Date: Thu, 20 Feb 2025 14:13:01 +0800 Subject: [PATCH 3/4] 25.2.19 fixed pull request comments from hejia --- src/mage_rtl/gen_config.py | 16 +++++++--------- tests/test_top_agent.py | 13 +------------ 2 files changed, 8 insertions(+), 21 deletions(-) diff --git a/src/mage_rtl/gen_config.py b/src/mage_rtl/gen_config.py index fccdf8d..e0f6e7d 100644 --- a/src/mage_rtl/gen_config.py +++ b/src/mage_rtl/gen_config.py @@ -34,30 +34,28 @@ def __getitem__(self, index): def get_llm(**kwargs) -> LLM: - err_msgs = [] - LLM_func = Anthropic + cfg = Config(kwargs["cfg_path"]) + api_key_cfg = "" if kwargs["provider"] == "anthropic": LLM_func = Anthropic + api_key_cfg = cfg["ANTHROPIC_API_KEY"] elif kwargs["provider"] == "openai": LLM_func = OpenAI + api_key_cfg = cfg["OPENAI_API_KEY"] # add more providers if needed try: llm: LLM = LLM_func( model=kwargs["model"], - api_key=kwargs["api_key"], + api_key=api_key_cfg, max_tokens=kwargs["max_token"], ) _ = llm.complete("Say 'Hi'") except Exception as e: - err_msgs.append(str(e)) - """ else: - raise Exception( - f"gen_config: Failed to get LLM. Error msgs include:\n" - + "\n".join(err_msgs) - ) """ + raise Exception("gen_config: Failed to get LLM") from e + return llm diff --git a/tests/test_top_agent.py b/tests/test_top_agent.py index a1c3d73..2a1390c 100644 --- a/tests/test_top_agent.py +++ b/tests/test_top_agent.py @@ -152,21 +152,10 @@ def run_round(args: argparse.Namespace, llm: LLM): def main(): args = argparse.Namespace(**args_dict) - cfg = Config(args.key_cfg_path) - # cfg = Config("./key.cfg") - # llm = get_llm(model=args.model, api_key=cfg["ANTHROPIC_API_KEY"], max_tokens=8192) - # llm = get_llm(args_d=args_dict) - - api_key_cfg = "" - if args.provider == "anthropic": - api_key_cfg = cfg["ANTHROPIC_API_KEY"] - elif args.provider == "openai": - api_key_cfg = cfg["OPENAI_API_KEY"] - # add more providers if needed llm = get_llm( model=args.model, - api_key=api_key_cfg, + cfg_path=args.key_cfg_path, max_token=args.max_token, provider=args.provider, ) From a83f874d2936f6399f20c8449b378c0c22ef8d37 Mon Sep 17 00:00:00 2001 From: YongshiZhan Date: Thu, 20 Feb 2025 14:19:24 +0800 Subject: [PATCH 4/4] ran pre-commit --- src/mage_rtl/gen_config.py | 2 +- tests/test_top_agent.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/mage_rtl/gen_config.py b/src/mage_rtl/gen_config.py index e0f6e7d..85abdeb 100644 --- a/src/mage_rtl/gen_config.py +++ b/src/mage_rtl/gen_config.py @@ -55,7 +55,7 @@ def get_llm(**kwargs) -> LLM: except Exception as e: raise Exception("gen_config: Failed to get LLM") from e - + return llm diff --git a/tests/test_top_agent.py b/tests/test_top_agent.py index 2a1390c..0b91392 100644 --- a/tests/test_top_agent.py +++ b/tests/test_top_agent.py @@ -13,7 +13,7 @@ TypeBenchmarkFile, get_benchmark_contents, ) -from mage_rtl.gen_config import Config, get_llm, set_exp_setting +from mage_rtl.gen_config import get_llm, set_exp_setting from mage_rtl.log_utils import get_logger from mage_rtl.sim_reviewer import sim_review_golden_benchmark from mage_rtl.token_counter import TokenCount