diff --git a/openad/api.py b/openad/api.py
index 1acbaa5b..b9a3fd6c 100644
--- a/openad/api.py
+++ b/openad/api.py
@@ -22,7 +22,6 @@ def __init__(self, name="No Name"):
# import openad.app.main as main_app
self.main_app = self._load_main()
- self.main_app.GLOBAL_SETTINGS["VERBOSE"] = False
self.name = name
def _load_main(self):
diff --git a/openad/app/global_var_lib.py b/openad/app/global_var_lib.py
index a176f4db..01499c47 100644
--- a/openad/app/global_var_lib.py
+++ b/openad/app/global_var_lib.py
@@ -47,11 +47,6 @@
# - web: not yet used
"display": None,
#
- # Setting verbose to false will hide spinners
- # and some other non-essential output
- # Todo: this is diplocate of "display" is "notebook" (to be confirmed)
- "VERBOSE": True,
- #
# The width in characters we limit the printed output to.
"max_print_width": 150,
#
diff --git a/openad/core/grammar.py b/openad/core/grammar.py
index 143d55c2..d980e911 100644
--- a/openad/core/grammar.py
+++ b/openad/core/grammar.py
@@ -45,9 +45,8 @@
# Helpers
from openad.helpers.general import is_notebook_mode
-from openad.helpers.output import output_error, output_text
+from openad.helpers.output import output_error
from openad.helpers.output_msgs import msg
-from openad.helpers.spinner import spinner
from openad.openad_model_plugin.openad_model_toolkit import service_grammar_add
from openad.openad_model_plugin.catalog_model_services import get_cataloged_service_defs, service_catalog_grammar
@@ -1031,8 +1030,6 @@ def create_statements(cmd_pointer):
# cmd_pointer.current_statements.extend(service_statements)
- spinner.stop() # Spinner may be started from within get_cataloged_service_defs -> get_short_status -> maybe_refresh_auth
-
cmd_pointer.current_help.help_model_services.clear()
cmd_pointer.current_help.help_model_services.extend(temp_help)
cmd_pointer.current_help.reset_help()
@@ -1410,7 +1407,11 @@ def output_train_statements(cmd_pointer):
for training_file in glob.glob(
os.path.expanduser(str(os.path.expanduser(cmd_pointer.home_dir + "/prompt_train/")) + "/*")
):
- os.remove(training_file)
+ # Fail silently when runnning concurrent sessions
+ try:
+ os.remove(training_file)
+ except Exception: # pylint: disable=broad-except
+ pass
while i < len(grammar_help):
training_statements.append(
diff --git a/openad/helpers/spinner.py b/openad/helpers/spinner.py
index 6ee06c88..b8f95bd3 100644
--- a/openad/helpers/spinner.py
+++ b/openad/helpers/spinner.py
@@ -16,6 +16,7 @@
from time import sleep
from openad.helpers.general import is_notebook_mode
from openad.helpers.output import output_text
+from openad.app.global_var_lib import GLOBAL_SETTINGS
if is_notebook_mode():
@@ -25,13 +26,11 @@
class Spinner(Halo):
- verbose = True
- def __init__(self, verbose=True):
- self.verbose = verbose
+ def __init__(self):
# Fancy spinner, but requires more CPU, blocking the main thread
- # To do: see is separating thread for spinner resolves this
+ # To do: see if separating thread for spinner resolves this
wave_spinner = {
"interval": 700,
"frames": [
@@ -47,7 +46,7 @@ def __init__(self, verbose=True):
],
}
- if self.verbose is True:
+ if GLOBAL_SETTINGS["display"] != "api":
super().__init__(spinner="triangle", color="white", interval=700)
# Fancy spinner
@@ -57,7 +56,7 @@ def __init__(self, verbose=True):
# https://github.com/sindresorhus/cli-spinners/blob/dac4fc6571059bb9e9bc204711e9dfe8f72e5c6f/spinners.json
def start(self, text=None, no_format=False):
- if self.verbose is True:
+ if GLOBAL_SETTINGS["display"] != "api":
if no_format:
text = output_text(text, return_val=True, jup_return_format="plain") if text else None
else:
@@ -67,24 +66,24 @@ def start(self, text=None, no_format=False):
super().start(text)
def succeed(self, *args, **kwargs):
- if self.verbose is True:
+ if GLOBAL_SETTINGS["display"] != "api":
return super().succeed(*args, **kwargs)
def info(self, *args, **kwargs):
- if self.verbose is True:
+ if GLOBAL_SETTINGS["display"] != "api":
super().info(*args, **kwargs)
return super().start(*args, **kwargs)
def warn(self, *args, **kwargs):
- if self.verbose is True:
+ if GLOBAL_SETTINGS["display"] != "api":
return super().warn(*args, **kwargs)
def fail(self, *args, **kwargs):
- if self.verbose is True:
+ if GLOBAL_SETTINGS["display"] != "api":
return super().fail(*args, **kwargs)
def stop(self):
- if self.verbose is True:
+ if GLOBAL_SETTINGS["display"] != "api":
return super().stop()
def countdown(
@@ -108,15 +107,15 @@ def countdown(
"""
msg = msg or "Waiting {sec} seconds before retrying"
- spinner.start(msg.format(sec=seconds))
+ self.start(msg.format(sec=seconds))
sleep(1)
if seconds > 1:
self.countdown(seconds - 1, msg, stop_msg)
else:
if stop_msg:
- spinner.start(stop_msg)
+ self.start(stop_msg)
else:
- spinner.stop()
+ self.stop()
return True
diff --git a/openad/llm_assist/llm_interface.py b/openad/llm_assist/llm_interface.py
index 6bc2b74a..eb621e19 100644
--- a/openad/llm_assist/llm_interface.py
+++ b/openad/llm_assist/llm_interface.py
@@ -16,7 +16,7 @@
from openad.helpers.credentials import write_credentials, get_credentials
from openad.app.global_var_lib import GLOBAL_SETTINGS
-from openad.helpers.spinner import Spinner
+from openad.helpers.spinner import spinner
# Constants
TRAINING_LLM_DIR = "/prompt_train/"
@@ -127,15 +127,14 @@ def how_do_i(cmd_pointer, parser):
else:
from halo import Halo # pylint: disable=import-outside-toplevel
- newspin = Spinner(GLOBAL_SETTINGS["VERBOSE"])
- newspin.start("Processing Request ")
+ spinner.start("Processing Request ")
# Now we are asking the prompt a Question
try:
# text = cmd_pointer.llm_handle.how_to_search(CHAT_PRIMER + " ".join(parser["Chat_String"]) + CHAT_PRIMER_SUFFIX)
text = cmd_pointer.llm_handle.how_to_search(CHAT_PRIMER + " ".join(parser["Chat_String"]))
except Exception as e:
- newspin.fail("Running Request Failed")
+ spinner.fail("Running Request Failed")
output_text(
"Unable to Execute request. check LLM credentials and or Connectivity",
return_val=False,
@@ -143,7 +142,7 @@ def how_do_i(cmd_pointer, parser):
edge=True,
)
return False
- newspin.succeed("See Answer Below.")
+ spinner.succeed("See Answer Below.")
text = clean_up_llm_text(cmd_pointer, text)
if GLOBAL_SETTINGS["display"] == "notebook":
diff --git a/openad/openad_model_plugin/catalog_model_services.py b/openad/openad_model_plugin/catalog_model_services.py
index b35f2654..d01dfdad 100644
--- a/openad/openad_model_plugin/catalog_model_services.py
+++ b/openad/openad_model_plugin/catalog_model_services.py
@@ -143,6 +143,7 @@ def get_cataloged_service_defs() -> Dict[str, dict]:
logger.warning(f"remote service defs not found, sevice not available | {name=}")
service_definitions[name] = remote_definitions
+ spinner.stop()
return service_definitions
@@ -1217,7 +1218,6 @@ def service_catalog_grammar(statements: list, help: list):
- model service demo
- model service demo restart
- model service demo debug
-
""",
)
)
diff --git a/openad/openad_model_plugin/openad_model_toolkit.py b/openad/openad_model_plugin/openad_model_toolkit.py
index 60431af8..79676cf3 100644
--- a/openad/openad_model_plugin/openad_model_toolkit.py
+++ b/openad/openad_model_plugin/openad_model_toolkit.py
@@ -19,9 +19,7 @@
from openad.openad_model_plugin.catalog_model_services import Dispatcher
from openad.app.global_var_lib import GLOBAL_SETTINGS
from openad.smols.smol_batch_files import merge_molecule_property_data
-from openad.helpers.spinner import Spinner
-
-spinner = Spinner(GLOBAL_SETTINGS["VERBOSE"])
+from openad.helpers.spinner import spinner
from pyparsing import ( # replaceWith,; Combine,; pyparsing_test,; ParseException,
CaselessKeyword,
diff --git a/openad/openad_model_plugin/services.py b/openad/openad_model_plugin/services.py
index 68d9916d..940ff4f6 100644
--- a/openad/openad_model_plugin/services.py
+++ b/openad/openad_model_plugin/services.py
@@ -369,7 +369,6 @@ def get_remote_service_definitions(self, name: str) -> list | None:
# @auth
def refresh_remote_service(self, service_name, endpoint, auth_token) -> bool:
"""Refresh remote service with new auth token"""
- output_warning(f"Refreshing remote service: {service_name}")
logger.debug(f"refreshing remote service | {service_name=}")
if service_name not in self.list():
output_error(f"Service {service_name} not found in catalog")
@@ -447,6 +446,7 @@ def maybe_refresh_auth(self, service_name, service_data):
update_lookup_table(auth_group=auth_group_name, service=service_name, api_key=auth_token)
elif "authorization" in params_lower:
self.refresh_remote_service(service_name, endpoint, auth_token)
+ spinner.stop()
def get_service_cache(self) -> LruCache[dict]:
return REMOTE_SERVICES_CACHE
diff --git a/openad/smols/smol_batch_files.py b/openad/smols/smol_batch_files.py
index 20650513..504c54b1 100644
--- a/openad/smols/smol_batch_files.py
+++ b/openad/smols/smol_batch_files.py
@@ -19,7 +19,7 @@
from openad.app.global_var_lib import GLOBAL_SETTINGS
from openad.helpers.output import output_error, output_warning, output_success, output_text
from openad.helpers.output_msgs import msg
-from openad.helpers.spinner import Spinner
+from openad.helpers.spinner import spinner
from openad.helpers.paths import parse_path
from openad.plugins.style_parser import style
@@ -235,7 +235,6 @@ def _enrich_with_pubchem_data(cmd_pointer, molset):
output_molset = []
- spinner = Spinner(GLOBAL_SETTINGS["VERBOSE"])
spinner.start("Fetching from PubChem")
for i, smol in enumerate(molset):
diff --git a/openad/user_toolkits/RXN/fn_reactions/fn_predict_reaction.py b/openad/user_toolkits/RXN/fn_reactions/fn_predict_reaction.py
index 9520049c..4da622fa 100644
--- a/openad/user_toolkits/RXN/fn_reactions/fn_predict_reaction.py
+++ b/openad/user_toolkits/RXN/fn_reactions/fn_predict_reaction.py
@@ -164,7 +164,7 @@ def predict_reaction(inputs: dict, cmd_pointer):
output_text("Reaction: " + sources + " ----> " + x_y, return_val=False)
output_text("Confidence: " + str(confidence), return_val=False)
- if GLOBAL_SETTINGS["display"] == "notebook" or GLOBAL_SETTINGS["VERBOSE"] == False:
+ if GLOBAL_SETTINGS["display"] == "notebook" or GLOBAL_SETTINGS["display"] == "api":
return get_reaction_from_smiles(predict_reaction_results["response"]["payload"]["attempts"][0]["smiles"])
else:
output_text("", return_val=False)
diff --git a/openad/user_toolkits/RXN/fn_reactions/fn_predict_reaction_batch.py b/openad/user_toolkits/RXN/fn_reactions/fn_predict_reaction_batch.py
index d0732fe6..3bf9dbe5 100644
--- a/openad/user_toolkits/RXN/fn_reactions/fn_predict_reaction_batch.py
+++ b/openad/user_toolkits/RXN/fn_reactions/fn_predict_reaction_batch.py
@@ -12,7 +12,7 @@
from openad.helpers.output import output_text, output_error, output_warning, output_table
from openad.helpers.output_msgs import msg
from openad.helpers.general import load_tk_module
-from openad.helpers.spinner import Spinner
+from openad.helpers.spinner import spinner
def get_reaction_from_smiles(reaction_smiles: str) -> Chem.rdChemReactions.ChemicalReaction:
@@ -74,7 +74,6 @@ def predict_reaction_batch(inputs: dict, cmd_pointer):
except Exception: # pylint: disable=broad-exception-caught
output_error("Could not load valid list from file column 'reactions' ", return_val=False)
return True
- newspin = Spinner(GLOBAL_SETTINGS["VERBOSE"])
### setting up default values... note to put into json metdata file in future
@@ -138,7 +137,7 @@ def predict_reaction_batch(inputs: dict, cmd_pointer):
display(get_reaction_from_smiles(reaction_prediction["smiles"]))
if len(new_from_list) > 0:
- newspin.start("Starting Prediction")
+ spinner.start("Starting Prediction")
from_list = new_from_list
rxn4chemistry_wrapper = cmd_pointer.login_settings["client"][
cmd_pointer.login_settings["toolkits"].index("RXN")
@@ -148,7 +147,7 @@ def predict_reaction_batch(inputs: dict, cmd_pointer):
while status == False:
try:
if retries == 0:
- newspin.info("Processing Prediction")
+ spinner.info("Processing Prediction")
predict_reaction_batch_response = rxn4chemistry_wrapper.predict_reaction_batch(from_list)
sleep(2)
@@ -156,8 +155,8 @@ def predict_reaction_batch(inputs: dict, cmd_pointer):
except Exception as e: # pylint: disable=broad-exception-caught
retries = retries + 1
if retries > 4:
- newspin.fail("Unable to Process")
- newspin.stop()
+ spinner.fail("Unable to Process")
+ spinner.stop()
raise Exception("Server unresponsive" + str(e)) from e # pylint: disable=broad-exception-raised
retries = 0
@@ -165,7 +164,7 @@ def predict_reaction_batch(inputs: dict, cmd_pointer):
reaction_predictions = {}
while "predictions" not in reaction_predictions:
try:
- newspin.text = "Processing Prediction"
+ spinner.text = "Processing Prediction"
reaction_predictions = rxn4chemistry_wrapper.get_predict_reaction_batch_results(
predict_reaction_batch_response["task_id"]
@@ -175,11 +174,11 @@ def predict_reaction_batch(inputs: dict, cmd_pointer):
except Exception as e: # pylint: disable=broad-exception-caught
retries = retries + 1
if retries > 10:
- newspin.fail("Unable to Process")
- newspin.stop()
+ spinner.fail("Unable to Process")
+ spinner.stop()
raise BaseException("Server unresponsive" + str(e)) from e # pylint: disable=broad-exception-raised
- newspin.succeed("Finished Processing")
- newspin.stop()
+ spinner.succeed("Finished Processing")
+ spinner.stop()
if GLOBAL_SETTINGS["display"] == "notebook":
from IPython.display import display # pylint: disable=import-outside-toplevel
for reaction_prediction in reaction_predictions["predictions"]:
@@ -215,7 +214,7 @@ def predict_reaction_batch(inputs: dict, cmd_pointer):
display(get_reaction_from_smiles(reaction_prediction["smiles"]))
output_text(" ", return_val=False)
- if not GLOBAL_SETTINGS["VERBOSE"]:
+ if GLOBAL_SETTINGS["display"] == "api":
return reaction_predictions["predictions"]
else:
return True
diff --git a/openad/user_toolkits/RXN/fn_reactions/fn_predict_reaction_batch_topn.py b/openad/user_toolkits/RXN/fn_reactions/fn_predict_reaction_batch_topn.py
index 488749ee..0a793813 100644
--- a/openad/user_toolkits/RXN/fn_reactions/fn_predict_reaction_batch_topn.py
+++ b/openad/user_toolkits/RXN/fn_reactions/fn_predict_reaction_batch_topn.py
@@ -13,7 +13,7 @@
from openad.helpers.output import output_text, output_warning, output_error, output_table
from openad.helpers.output_msgs import msg
from openad.helpers.general import load_tk_module
-from openad.helpers.spinner import Spinner
+from openad.helpers.spinner import spinner
def get_reaction_from_smiles(
@@ -142,19 +142,18 @@ def predict_reaction_batch_topn(inputs: dict, cmd_pointer):
if len(new_from_list) > 0:
val = "val"
from_list = new_from_list
- newspin = Spinner(GLOBAL_SETTINGS["VERBOSE"])
retries = 0
status = False
rxn4chemistry_wrapper = cmd_pointer.login_settings["client"][
cmd_pointer.login_settings["toolkits"].index("RXN")
]
- newspin.start("Starting Prediction")
+ spinner.start("Starting Prediction")
while status is False:
try:
if retries == 0:
- newspin.info("Processing Prediction")
+ spinner.info("Processing Prediction")
sleep(2)
predict_rection_batch_response = rxn4chemistry_wrapper.predict_reaction_batch_topn(
precursors_lists=new_from_list,
@@ -165,8 +164,8 @@ def predict_reaction_batch_topn(inputs: dict, cmd_pointer):
except Exception as e: # pylint: disable=broad-exception-caught
retries = retries + 1
if retries > 4:
- newspin.fail("Unable to Process")
- newspin.stop()
+ spinner.fail("Unable to Process")
+ spinner.stop()
raise Exception("Server unresponsive" + str(e)) from e # pylint: disable=broad-exception-raised
x = {}
@@ -186,14 +185,14 @@ def predict_reaction_batch_topn(inputs: dict, cmd_pointer):
except Exception as e: # pylint: disable=broad-exception-caught
retries = retries + 1
if retries > 10:
- newspin.fail("Unable to Process")
- newspin.stop()
+ spinner.fail("Unable to Process")
+ spinner.stop()
raise Exception("Server unresponsive " + str(e)) from e # pylint: disable=broad-exception-raised
reaction_no = 0
- newspin.succeed("Finished Processing")
- newspin.start()
- newspin.stop()
+ spinner.succeed("Finished Processing")
+ spinner.start()
+ spinner.stop()
for i, reaction_predictions in enumerate(x["predictions"], 1):
output_text("\n", return_val=False)
output_text(
@@ -216,7 +215,7 @@ def predict_reaction_batch_topn(inputs: dict, cmd_pointer):
)
output_text(" ", return_val=False)
- if not GLOBAL_SETTINGS["VERBOSE"]:
+ if GLOBAL_SETTINGS["display"] == "api":
return x
else:
return True
diff --git a/openad/user_toolkits/RXN/fn_reactions/fn_predict_retro.py b/openad/user_toolkits/RXN/fn_reactions/fn_predict_retro.py
index 7db3a33d..1e2abd8a 100644
--- a/openad/user_toolkits/RXN/fn_reactions/fn_predict_retro.py
+++ b/openad/user_toolkits/RXN/fn_reactions/fn_predict_retro.py
@@ -13,7 +13,7 @@
from openad.smols.smol_cache import create_analysis_record, save_result
from openad.smols.smol_functions import canonicalize, valid_smiles
from openad.helpers.general import load_tk_module
-from openad.helpers.spinner import Spinner
+from openad.helpers.spinner import spinner
def get_reaction_from_smiles(reaction_smiles: str) -> Chem.rdChemReactions.ChemicalReaction:
@@ -145,8 +145,7 @@ def predict_retro(inputs: dict, cmd_pointer):
# Prepare the data query
print("\n")
- newspin = Spinner(GLOBAL_SETTINGS["VERBOSE"])
- newspin.start("Starting Retrosynthesis")
+ spinner.start("Starting Retrosynthesis")
try:
retries = 0
status = False
@@ -155,7 +154,7 @@ def predict_retro(inputs: dict, cmd_pointer):
while retries < 10 and status is False:
try:
if retries == 0:
- newspin.info("Submitting Retrosynthesis ")
+ spinner.info("Submitting Retrosynthesis ")
predict_retro_response = rxn4chemistry_wrapper.predict_automatic_retrosynthesis(
product_smiles,
availability_pricing_threshold=availability_pricing_threshold,
@@ -174,9 +173,9 @@ def predict_retro(inputs: dict, cmd_pointer):
sleep(2)
retries = retries + 1
if retries >= 10:
- newspin.fail("Unable to Process")
- newspin.start()
- newspin.stop()
+ spinner.fail("Unable to Process")
+ spinner.start()
+ spinner.stop()
raise Exception(
"Server unresponsive: Unable to submit for processing after 10 retires" + str(e)
) from e # pylint: disable=broad-exception-raised
@@ -195,7 +194,7 @@ def predict_retro(inputs: dict, cmd_pointer):
while status != "SUCCESS":
try:
if status != previous_status:
- newspin.info("Processing Retrosynthesis :" + status)
+ spinner.info("Processing Retrosynthesis :" + status)
previous_status = status
predict_automatic_retrosynthesis_results = (
@@ -220,7 +219,7 @@ def predict_retro(inputs: dict, cmd_pointer):
retries = retries + 1
sleep(15)
status = "Waiting"
- newspin.info("Processing Retrosynthesis: Waiting")
+ spinner.info("Processing Retrosynthesis: Waiting")
if retries > 20:
raise Exception(
"Server unresponsive: Unable to complete processing for prediction id:'"
@@ -229,9 +228,9 @@ def predict_retro(inputs: dict, cmd_pointer):
+ str(e)
) from e # pylint: disable=broad-exception-raised
except Exception as e:
- newspin.fail("Unable to Process")
- newspin.start()
- newspin.stop()
+ spinner.fail("Unable to Process")
+ spinner.start()
+ spinner.stop()
raise Exception("Unable to complete processing " + str(e)) from e # pylint: disable=broad-exception-raised
reactions_text = []
# print(predict_automatic_retrosynthesis_results)
@@ -243,17 +242,17 @@ def predict_retro(inputs: dict, cmd_pointer):
# print("inner")
except Exception as e: # pylint: disable=broad-exception-caught
- newspin.fail("Unable to Process")
- newspin.stop()
+ spinner.fail("Unable to Process")
+ spinner.stop()
raise Exception(
"The following Error message was received while trying to process results:" + str(e)
) from e # pylint: disable=broad-exception-raised
num_results = 0
# print(reactions_text)
try:
- newspin.succeed("Finished Processing")
- newspin.start()
- newspin.stop()
+ spinner.succeed("Finished Processing")
+ spinner.start()
+ spinner.stop()
results = {}
i = 0
for index, tree in enumerate(predict_automatic_retrosynthesis_results["retrosynthetic_paths"]):
@@ -262,7 +261,7 @@ def predict_retro(inputs: dict, cmd_pointer):
"",
return_val=False,
)
- if num_results < 4 or GLOBAL_SETTINGS["VERBOSE"] == False:
+ if num_results < 4 or GLOBAL_SETTINGS["display"] == "api"::
results[str(index)] = {"confidence": tree["confidence"], "reactions": []}
output_text(
@@ -274,7 +273,7 @@ def predict_retro(inputs: dict, cmd_pointer):
)
for reaction in collect_reactions_from_retrosynthesis(tree):
- if num_results < 4 or GLOBAL_SETTINGS["VERBOSE"] == False:
+ if num_results < 4 or GLOBAL_SETTINGS["display"] == "api"::
results[str(index)]["reactions"].append(reactions_text[i])
output_text(" Reaction: " + reactions_text[i], return_val=False)
i = i + 1
@@ -295,7 +294,7 @@ def predict_retro(inputs: dict, cmd_pointer):
return False
i = 0
- if GLOBAL_SETTINGS["VERBOSE"] == False:
+ if GLOBAL_SETTINGS["display"] == "api":
return results
else:
return True