From e5e740b54d3b8114021875dcdff40299d8764f8e Mon Sep 17 00:00:00 2001 From: hiyouga Date: Sun, 15 Oct 2023 16:18:25 +0800 Subject: [PATCH] disable tqdm in webui mode Former-commit-id: 0d63584c036a65cc81e5db504274bdf9808d1849 --- src/llmtuner/webui/runner.py | 1 + src/llmtuner/webui/utils.py | 4 ++-- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/src/llmtuner/webui/runner.py b/src/llmtuner/webui/runner.py index 89cb56a0..ed423978 100644 --- a/src/llmtuner/webui/runner.py +++ b/src/llmtuner/webui/runner.py @@ -118,6 +118,7 @@ class Runner: output_dir=output_dir ) args[get("train.compute_type")] = True + args["disable_tqdm"] = True if TRAINING_STAGES[get("train.training_stage")] in ["rm", "ppo", "dpo"]: args["resume_lora_training"] = (args["quantization_bit"] is not None) diff --git a/src/llmtuner/webui/utils.py b/src/llmtuner/webui/utils.py index 181f0810..b178e79a 100644 --- a/src/llmtuner/webui/utils.py +++ b/src/llmtuner/webui/utils.py @@ -72,8 +72,8 @@ def can_quantize(finetuning_type: str) -> Dict[str, Any]: def gen_cmd(args: Dict[str, Any]) -> str: - if args.get("do_train", None): - args["plot_loss"] = True + args.pop("disable_tqdm", None) + args["plot_loss"] = args.get("do_train", None) cmd_lines = ["CUDA_VISIBLE_DEVICES=0 python src/train_bash.py "] for k, v in args.items(): if v is not None and v != "":