diff --git a/src/llamafactory/webui/runner.py b/src/llamafactory/webui/runner.py index 13dbba03..6cd21b07 100644 --- a/src/llamafactory/webui/runner.py +++ b/src/llamafactory/webui/runner.py @@ -24,7 +24,7 @@ from ..extras.misc import is_gpu_or_npu_available, torch_gc from ..extras.packages import is_gradio_available from .common import DEFAULT_CACHE_DIR, DEFAULT_CONFIG_DIR, get_save_dir, load_config from .locales import ALERTS, LOCALES -from .utils import abort_leaf_process, gen_cmd, get_eval_results, get_trainer_info, load_args, save_args, save_cmd +from .utils import abort_process, gen_cmd, get_eval_results, get_trainer_info, load_args, save_args, save_cmd if is_gradio_available(): @@ -52,7 +52,7 @@ class Runner: def set_abort(self) -> None: self.aborted = True if self.trainer is not None: - abort_leaf_process(self.trainer.pid) + abort_process(self.trainer.pid) def _initialize(self, data: Dict["Component", Any], do_train: bool, from_preview: bool) -> str: get = lambda elem_id: data[self.manager.get_elem_by_id(elem_id)] diff --git a/src/llamafactory/webui/utils.py b/src/llamafactory/webui/utils.py index 6ce2a8e7..a616bcba 100644 --- a/src/llamafactory/webui/utils.py +++ b/src/llamafactory/webui/utils.py @@ -33,16 +33,16 @@ if is_gradio_available(): import gradio as gr -def abort_leaf_process(pid: int) -> None: +def abort_process(pid: int) -> None: r""" - Aborts the leaf processes. + Aborts the processes recursively in a bottom-up way. """ children = psutil.Process(pid).children() if children: for child in children: - abort_leaf_process(child.pid) - else: - os.kill(pid, signal.SIGABRT) + abort_process(child.pid) + + os.kill(pid, signal.SIGABRT) def can_quantize(finetuning_type: str) -> "gr.Dropdown":