Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 0 additions & 1 deletion openad/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@ def __init__(self, name="No Name"):
# import openad.app.main as main_app

self.main_app = self._load_main()
self.main_app.GLOBAL_SETTINGS["VERBOSE"] = False
self.name = name

def _load_main(self):
Expand Down
5 changes: 0 additions & 5 deletions openad/app/global_var_lib.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,11 +47,6 @@
# - web: not yet used
"display": None,
#
# Setting verbose to false will hide spinners
# and some other non-essential output
# Todo: this is diplocate of "display" is "notebook" (to be confirmed)
"VERBOSE": True,
#
# The width in characters we limit the printed output to.
"max_print_width": 150,
#
Expand Down
11 changes: 6 additions & 5 deletions openad/core/grammar.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,9 +45,8 @@

# Helpers
from openad.helpers.general import is_notebook_mode
from openad.helpers.output import output_error, output_text
from openad.helpers.output import output_error
from openad.helpers.output_msgs import msg
from openad.helpers.spinner import spinner
from openad.openad_model_plugin.openad_model_toolkit import service_grammar_add

from openad.openad_model_plugin.catalog_model_services import get_cataloged_service_defs, service_catalog_grammar
Expand Down Expand Up @@ -1031,8 +1030,6 @@ def create_statements(cmd_pointer):

# cmd_pointer.current_statements.extend(service_statements)

spinner.stop() # Spinner may be started from within get_cataloged_service_defs -> get_short_status -> maybe_refresh_auth

cmd_pointer.current_help.help_model_services.clear()
cmd_pointer.current_help.help_model_services.extend(temp_help)
cmd_pointer.current_help.reset_help()
Expand Down Expand Up @@ -1410,7 +1407,11 @@ def output_train_statements(cmd_pointer):
for training_file in glob.glob(
os.path.expanduser(str(os.path.expanduser(cmd_pointer.home_dir + "/prompt_train/")) + "/*")
):
os.remove(training_file)
# Fail silently when runnning concurrent sessions
try:
os.remove(training_file)
except Exception: # pylint: disable=broad-except
pass

while i < len(grammar_help):
training_statements.append(
Expand Down
27 changes: 13 additions & 14 deletions openad/helpers/spinner.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
from time import sleep
from openad.helpers.general import is_notebook_mode
from openad.helpers.output import output_text
from openad.app.global_var_lib import GLOBAL_SETTINGS


if is_notebook_mode():
Expand All @@ -25,13 +26,11 @@


class Spinner(Halo):
verbose = True

def __init__(self, verbose=True):
self.verbose = verbose
def __init__(self):

# Fancy spinner, but requires more CPU, blocking the main thread
# To do: see is separating thread for spinner resolves this
# To do: see if separating thread for spinner resolves this
wave_spinner = {
"interval": 700,
"frames": [
Expand All @@ -47,7 +46,7 @@ def __init__(self, verbose=True):
],
}

if self.verbose is True:
if GLOBAL_SETTINGS["display"] != "api":
super().__init__(spinner="triangle", color="white", interval=700)

# Fancy spinner
Expand All @@ -57,7 +56,7 @@ def __init__(self, verbose=True):
# https://github.com/sindresorhus/cli-spinners/blob/dac4fc6571059bb9e9bc204711e9dfe8f72e5c6f/spinners.json

def start(self, text=None, no_format=False):
if self.verbose is True:
if GLOBAL_SETTINGS["display"] != "api":
if no_format:
text = output_text(text, return_val=True, jup_return_format="plain") if text else None
else:
Expand All @@ -67,24 +66,24 @@ def start(self, text=None, no_format=False):
super().start(text)

def succeed(self, *args, **kwargs):
if self.verbose is True:
if GLOBAL_SETTINGS["display"] != "api":
return super().succeed(*args, **kwargs)

def info(self, *args, **kwargs):
if self.verbose is True:
if GLOBAL_SETTINGS["display"] != "api":
super().info(*args, **kwargs)
return super().start(*args, **kwargs)

def warn(self, *args, **kwargs):
if self.verbose is True:
if GLOBAL_SETTINGS["display"] != "api":
return super().warn(*args, **kwargs)

def fail(self, *args, **kwargs):
if self.verbose is True:
if GLOBAL_SETTINGS["display"] != "api":
return super().fail(*args, **kwargs)

def stop(self):
if self.verbose is True:
if GLOBAL_SETTINGS["display"] != "api":
return super().stop()

def countdown(
Expand All @@ -108,15 +107,15 @@ def countdown(
"""

msg = msg or "Waiting {sec} seconds before retrying"
spinner.start(msg.format(sec=seconds))
self.start(msg.format(sec=seconds))
sleep(1)
if seconds > 1:
self.countdown(seconds - 1, msg, stop_msg)
else:
if stop_msg:
spinner.start(stop_msg)
self.start(stop_msg)
else:
spinner.stop()
self.stop()
return True


Expand Down
9 changes: 4 additions & 5 deletions openad/llm_assist/llm_interface.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@

from openad.helpers.credentials import write_credentials, get_credentials
from openad.app.global_var_lib import GLOBAL_SETTINGS
from openad.helpers.spinner import Spinner
from openad.helpers.spinner import spinner

# Constants
TRAINING_LLM_DIR = "/prompt_train/"
Expand Down Expand Up @@ -127,23 +127,22 @@ def how_do_i(cmd_pointer, parser):
else:
from halo import Halo # pylint: disable=import-outside-toplevel

newspin = Spinner(GLOBAL_SETTINGS["VERBOSE"])
newspin.start("Processing Request ")
spinner.start("Processing Request ")
# Now we are asking the prompt a Question

try:
# text = cmd_pointer.llm_handle.how_to_search(CHAT_PRIMER + " ".join(parser["Chat_String"]) + CHAT_PRIMER_SUFFIX)
text = cmd_pointer.llm_handle.how_to_search(CHAT_PRIMER + " ".join(parser["Chat_String"]))
except Exception as e:
newspin.fail("Running Request Failed")
spinner.fail("Running Request Failed")
output_text(
"Unable to Execute request. check LLM credentials and or Connectivity",
return_val=False,
pad=1,
edge=True,
)
return False
newspin.succeed("See Answer Below.")
spinner.succeed("See Answer Below.")
text = clean_up_llm_text(cmd_pointer, text)

if GLOBAL_SETTINGS["display"] == "notebook":
Expand Down
2 changes: 1 addition & 1 deletion openad/openad_model_plugin/catalog_model_services.py
Original file line number Diff line number Diff line change
Expand Up @@ -143,6 +143,7 @@ def get_cataloged_service_defs() -> Dict[str, dict]:
logger.warning(f"remote service defs not found, sevice not available | {name=}")
service_definitions[name] = remote_definitions

spinner.stop()
return service_definitions


Expand Down Expand Up @@ -1217,7 +1218,6 @@ def service_catalog_grammar(statements: list, help: list):
- <cmd>model service demo</cmd>
- <cmd>model service demo restart</cmd>
- <cmd>model service demo debug</cmd>

""",
)
)
4 changes: 1 addition & 3 deletions openad/openad_model_plugin/openad_model_toolkit.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,9 +19,7 @@
from openad.openad_model_plugin.catalog_model_services import Dispatcher
from openad.app.global_var_lib import GLOBAL_SETTINGS
from openad.smols.smol_batch_files import merge_molecule_property_data
from openad.helpers.spinner import Spinner

spinner = Spinner(GLOBAL_SETTINGS["VERBOSE"])
from openad.helpers.spinner import spinner

from pyparsing import ( # replaceWith,; Combine,; pyparsing_test,; ParseException,
CaselessKeyword,
Expand Down
2 changes: 1 addition & 1 deletion openad/openad_model_plugin/services.py
Original file line number Diff line number Diff line change
Expand Up @@ -369,7 +369,6 @@ def get_remote_service_definitions(self, name: str) -> list | None:
# @auth
def refresh_remote_service(self, service_name, endpoint, auth_token) -> bool:
"""Refresh remote service with new auth token"""
output_warning(f"Refreshing remote service: {service_name}")
logger.debug(f"refreshing remote service | {service_name=}")
if service_name not in self.list():
output_error(f"Service <yellow>{service_name}</yellow> not found in catalog")
Expand Down Expand Up @@ -447,6 +446,7 @@ def maybe_refresh_auth(self, service_name, service_data):
update_lookup_table(auth_group=auth_group_name, service=service_name, api_key=auth_token)
elif "authorization" in params_lower:
self.refresh_remote_service(service_name, endpoint, auth_token)
spinner.stop()

def get_service_cache(self) -> LruCache[dict]:
return REMOTE_SERVICES_CACHE
Expand Down
3 changes: 1 addition & 2 deletions openad/smols/smol_batch_files.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
from openad.app.global_var_lib import GLOBAL_SETTINGS
from openad.helpers.output import output_error, output_warning, output_success, output_text
from openad.helpers.output_msgs import msg
from openad.helpers.spinner import Spinner
from openad.helpers.spinner import spinner
from openad.helpers.paths import parse_path
from openad.plugins.style_parser import style

Expand Down Expand Up @@ -235,7 +235,6 @@ def _enrich_with_pubchem_data(cmd_pointer, molset):

output_molset = []

spinner = Spinner(GLOBAL_SETTINGS["VERBOSE"])
spinner.start("Fetching from PubChem")

for i, smol in enumerate(molset):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -164,7 +164,7 @@ def predict_reaction(inputs: dict, cmd_pointer):

output_text("<green>Reaction:</green> " + sources + " ----> " + x_y, return_val=False)
output_text("<green>Confidence:</green> " + str(confidence), return_val=False)
if GLOBAL_SETTINGS["display"] == "notebook" or GLOBAL_SETTINGS["VERBOSE"] == False:
if GLOBAL_SETTINGS["display"] == "notebook" or GLOBAL_SETTINGS["display"] == "api":
return get_reaction_from_smiles(predict_reaction_results["response"]["payload"]["attempts"][0]["smiles"])
else:
output_text("", return_val=False)
Expand Down
23 changes: 11 additions & 12 deletions openad/user_toolkits/RXN/fn_reactions/fn_predict_reaction_batch.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
from openad.helpers.output import output_text, output_error, output_warning, output_table
from openad.helpers.output_msgs import msg
from openad.helpers.general import load_tk_module
from openad.helpers.spinner import Spinner
from openad.helpers.spinner import spinner


def get_reaction_from_smiles(reaction_smiles: str) -> Chem.rdChemReactions.ChemicalReaction:
Expand Down Expand Up @@ -74,7 +74,6 @@ def predict_reaction_batch(inputs: dict, cmd_pointer):
except Exception: # pylint: disable=broad-exception-caught
output_error("Could not load valid list from file column 'reactions' ", return_val=False)
return True
newspin = Spinner(GLOBAL_SETTINGS["VERBOSE"])

### setting up default values... note to put into json metdata file in future

Expand Down Expand Up @@ -138,7 +137,7 @@ def predict_reaction_batch(inputs: dict, cmd_pointer):
display(get_reaction_from_smiles(reaction_prediction["smiles"]))

if len(new_from_list) > 0:
newspin.start("Starting Prediction")
spinner.start("Starting Prediction")
from_list = new_from_list
rxn4chemistry_wrapper = cmd_pointer.login_settings["client"][
cmd_pointer.login_settings["toolkits"].index("RXN")
Expand All @@ -148,24 +147,24 @@ def predict_reaction_batch(inputs: dict, cmd_pointer):
while status == False:
try:
if retries == 0:
newspin.info("Processing Prediction")
spinner.info("Processing Prediction")

predict_reaction_batch_response = rxn4chemistry_wrapper.predict_reaction_batch(from_list)
sleep(2)
status = True
except Exception as e: # pylint: disable=broad-exception-caught
retries = retries + 1
if retries > 4:
newspin.fail("Unable to Process")
newspin.stop()
spinner.fail("Unable to Process")
spinner.stop()
raise Exception("Server unresponsive" + str(e)) from e # pylint: disable=broad-exception-raised

retries = 0

reaction_predictions = {}
while "predictions" not in reaction_predictions:
try:
newspin.text = "Processing Prediction"
spinner.text = "Processing Prediction"

reaction_predictions = rxn4chemistry_wrapper.get_predict_reaction_batch_results(
predict_reaction_batch_response["task_id"]
Expand All @@ -175,11 +174,11 @@ def predict_reaction_batch(inputs: dict, cmd_pointer):
except Exception as e: # pylint: disable=broad-exception-caught
retries = retries + 1
if retries > 10:
newspin.fail("Unable to Process")
newspin.stop()
spinner.fail("Unable to Process")
spinner.stop()
raise BaseException("Server unresponsive" + str(e)) from e # pylint: disable=broad-exception-raised
newspin.succeed("Finished Processing")
newspin.stop()
spinner.succeed("Finished Processing")
spinner.stop()
if GLOBAL_SETTINGS["display"] == "notebook":
from IPython.display import display # pylint: disable=import-outside-toplevel
for reaction_prediction in reaction_predictions["predictions"]:
Expand Down Expand Up @@ -215,7 +214,7 @@ def predict_reaction_batch(inputs: dict, cmd_pointer):
display(get_reaction_from_smiles(reaction_prediction["smiles"]))

output_text(" ", return_val=False)
if not GLOBAL_SETTINGS["VERBOSE"]:
if GLOBAL_SETTINGS["display"] == "api":
return reaction_predictions["predictions"]
else:
return True
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
from openad.helpers.output import output_text, output_warning, output_error, output_table
from openad.helpers.output_msgs import msg
from openad.helpers.general import load_tk_module
from openad.helpers.spinner import Spinner
from openad.helpers.spinner import spinner


def get_reaction_from_smiles(
Expand Down Expand Up @@ -142,19 +142,18 @@ def predict_reaction_batch_topn(inputs: dict, cmd_pointer):
if len(new_from_list) > 0:
val = "val"
from_list = new_from_list
newspin = Spinner(GLOBAL_SETTINGS["VERBOSE"])
retries = 0
status = False
rxn4chemistry_wrapper = cmd_pointer.login_settings["client"][
cmd_pointer.login_settings["toolkits"].index("RXN")
]

newspin.start("Starting Prediction")
spinner.start("Starting Prediction")

while status is False:
try:
if retries == 0:
newspin.info("Processing Prediction")
spinner.info("Processing Prediction")
sleep(2)
predict_rection_batch_response = rxn4chemistry_wrapper.predict_reaction_batch_topn(
precursors_lists=new_from_list,
Expand All @@ -165,8 +164,8 @@ def predict_reaction_batch_topn(inputs: dict, cmd_pointer):
except Exception as e: # pylint: disable=broad-exception-caught
retries = retries + 1
if retries > 4:
newspin.fail("Unable to Process")
newspin.stop()
spinner.fail("Unable to Process")
spinner.stop()
raise Exception("Server unresponsive" + str(e)) from e # pylint: disable=broad-exception-raised

x = {}
Expand All @@ -186,14 +185,14 @@ def predict_reaction_batch_topn(inputs: dict, cmd_pointer):
except Exception as e: # pylint: disable=broad-exception-caught
retries = retries + 1
if retries > 10:
newspin.fail("Unable to Process")
newspin.stop()
spinner.fail("Unable to Process")
spinner.stop()
raise Exception("Server unresponsive " + str(e)) from e # pylint: disable=broad-exception-raised

reaction_no = 0
newspin.succeed("Finished Processing")
newspin.start()
newspin.stop()
spinner.succeed("Finished Processing")
spinner.start()
spinner.stop()
for i, reaction_predictions in enumerate(x["predictions"], 1):
output_text("\n", return_val=False)
output_text(
Expand All @@ -216,7 +215,7 @@ def predict_reaction_batch_topn(inputs: dict, cmd_pointer):
)
output_text(" ", return_val=False)

if not GLOBAL_SETTINGS["VERBOSE"]:
if GLOBAL_SETTINGS["display"] == "api":
return x
else:
return True
Loading
Loading