From 07aa7b71a36adb60e75e8403327a7df1a2e8b205 Mon Sep 17 00:00:00 2001 From: hoshi-hiyouga Date: Thu, 13 Feb 2025 00:58:10 +0800 Subject: [PATCH] [misc] update readme (#6917) Former-commit-id: 499ea45d1f1ea7704ee82f58c35af123a6c2632b --- .gitignore | 3 +++ README.md | 16 ++++++++++++++++ README_zh.md | 17 +++++++++++++++++ examples/README_zh.md | 2 +- 4 files changed, 37 insertions(+), 1 deletion(-) diff --git a/.gitignore b/.gitignore index 3e1f97dd..603806e0 100644 --- a/.gitignore +++ b/.gitignore @@ -162,6 +162,9 @@ cython_debug/ # vscode .vscode/ +# uv +uv.lock + # custom .gitignore ms_cache/ hf_cache/ diff --git a/README.md b/README.md index 00aa05a4..3452a5c5 100644 --- a/README.md +++ b/README.md @@ -436,6 +436,22 @@ Extra dependencies available: torch, torch-npu, metrics, deepspeed, liger-kernel > [!TIP] > Use `pip install --no-deps -e .` to resolve package conflicts. +
Setting up a virtual environment with uv + +Create an isolated Python environment with uv: + +```bash +uv sync --extra torch --extra metrics --prerelease=allow +``` + +Run LLaMA-Factory in the isolated environment: + +```bash +uv run --prerelease=allow llamafactory-cli train examples/train_lora/llama3_lora_pretrain.yaml +``` + +
+
For Windows users #### Install BitsAndBytes diff --git a/README_zh.md b/README_zh.md index b369bcb2..ef36870a 100644 --- a/README_zh.md +++ b/README_zh.md @@ -438,6 +438,23 @@ pip install -e ".[torch,metrics]" > [!TIP] > 遇到包冲突时,可使用 `pip install --no-deps -e .` 解决。 +
使用 uv 构建虚拟环境 + +创建隔离的 Python 环境: + +```bash +uv sync --extra torch --extra metrics --prerelease=allow +``` + +在环境中运行 LLaMA-Factory: + +```bash +uv run --prerelease=allow llamafactory-cli train examples/train_lora/llama3_lora_pretrain.yaml +``` + +
+ +
Windows 用户指南 #### 安装 BitsAndBytes diff --git a/examples/README_zh.md b/examples/README_zh.md index 316013f7..4899e279 100644 --- a/examples/README_zh.md +++ b/examples/README_zh.md @@ -98,7 +98,7 @@ FORCE_TORCHRUN=1 llamafactory-cli train examples/train_lora/llama3_lora_sft_ds3. #### 使用 Ray 在 4 张 GPU 上微调 ```bash -USE_RAY=1 llamafactory-cli train examples/train_full/llama3_lora_sft_ray.yaml +USE_RAY=1 llamafactory-cli train examples/train_lora/llama3_lora_sft_ray.yaml ``` ### QLoRA 微调