diff --git a/pyproject.toml b/pyproject.toml index 67da7435..06dbae8a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -12,7 +12,7 @@ build-backend = "poetry.masonry.api" [tool.poetry] name = "together" -version = "1.3.9" +version = "1.3.10" authors = [ "Together AI " ] diff --git a/src/together/cli/api/finetune.py b/src/together/cli/api/finetune.py index 0eb2b00c..7bc02744 100644 --- a/src/together/cli/api/finetune.py +++ b/src/together/cli/api/finetune.py @@ -197,10 +197,7 @@ def create( "batch_size": model_limits.lora_training.max_batch_size, "learning_rate": 1e-3, } - log_warn_once( - f"The default LoRA rank for {model} has been changed to {default_values['lora_r']} as the max available.\n" - f"Also, the default learning rate for LoRA fine-tuning has been changed to {default_values['learning_rate']}." - ) + for arg in default_values: arg_source = ctx.get_parameter_source("arg") # type: ignore[attr-defined] if arg_source == ParameterSource.DEFAULT: