[doc] add no build isolation (#8103)

This commit is contained in:
hoshi-hiyouga 2025-05-19 19:25:13 +08:00 committed by GitHub
parent a0b4b91577
commit beae231af6
5 changed files with 8 additions and 7 deletions

View File

@ -483,13 +483,13 @@ huggingface-cli login
```bash ```bash
git clone --depth 1 https://github.com/hiyouga/LLaMA-Factory.git git clone --depth 1 https://github.com/hiyouga/LLaMA-Factory.git
cd LLaMA-Factory cd LLaMA-Factory
pip install -e ".[torch,metrics]" pip install -e ".[torch,metrics]" --no-build-isolation
``` ```
Extra dependencies available: torch, torch-npu, metrics, deepspeed, liger-kernel, bitsandbytes, hqq, eetq, gptq, aqlm, vllm, sglang, galore, apollo, badam, adam-mini, qwen, minicpm_v, modelscope, openmind, swanlab, quality Extra dependencies available: torch, torch-npu, metrics, deepspeed, liger-kernel, bitsandbytes, hqq, eetq, gptq, aqlm, vllm, sglang, galore, apollo, badam, adam-mini, qwen, minicpm_v, modelscope, openmind, swanlab, quality
> [!TIP] > [!TIP]
> Use `pip install --no-deps -e .` to resolve package conflicts. > Use `pip install -e . --no-deps --no-build-isolation` to resolve package conflicts.
<details><summary>Setting up a virtual environment with <b>uv</b></summary> <details><summary>Setting up a virtual environment with <b>uv</b></summary>

View File

@ -470,13 +470,13 @@ huggingface-cli login
```bash ```bash
git clone --depth 1 https://github.com/hiyouga/LLaMA-Factory.git git clone --depth 1 https://github.com/hiyouga/LLaMA-Factory.git
cd LLaMA-Factory cd LLaMA-Factory
pip install -e ".[torch,metrics]" pip install -e ".[torch,metrics]" --no-build-isolation
``` ```
可选的额外依赖项torch、torch-npu、metrics、deepspeed、liger-kernel、bitsandbytes、hqq、eetq、gptq、aqlm、vllm、sglang、galore、apollo、badam、adam-mini、qwen、minicpm_v、modelscope、openmind、swanlab、quality 可选的额外依赖项torch、torch-npu、metrics、deepspeed、liger-kernel、bitsandbytes、hqq、eetq、gptq、aqlm、vllm、sglang、galore、apollo、badam、adam-mini、qwen、minicpm_v、modelscope、openmind、swanlab、quality
> [!TIP] > [!TIP]
> 遇到包冲突时,可使用 `pip install --no-deps -e .` 解决。 > 遇到包冲突时,可使用 `pip install -e . --no-deps --no-build-isolation` 解决。
<details><summary>使用 <b>uv</b> 构建虚拟环境</summary> <details><summary>使用 <b>uv</b> 构建虚拟环境</summary>

View File

@ -17,8 +17,8 @@ import json
from typing import Optional from typing import Optional
import fire import fire
from transformers import Seq2SeqTrainingArguments
from tqdm import tqdm from tqdm import tqdm
from transformers import Seq2SeqTrainingArguments
from llamafactory.data import get_dataset, get_template_and_fix_tokenizer from llamafactory.data import get_dataset, get_template_and_fix_tokenizer
from llamafactory.extras.constants import IGNORE_INDEX from llamafactory.extras.constants import IGNORE_INDEX

View File

@ -63,8 +63,8 @@ if is_transformers_version_greater_than("4.49.0"):
except ImportError: except ImportError:
try: try:
# If that fails, try importing from the new location # If that fails, try importing from the new location
from transformers.video_utils import make_batched_videos
from transformers.image_utils import make_flat_list_of_images from transformers.image_utils import make_flat_list_of_images
from transformers.video_utils import make_batched_videos
except ImportError: except ImportError:
raise ImportError( raise ImportError(
"Could not import make_batched_videos and make_flat_list_of_images. " "Could not import make_batched_videos and make_flat_list_of_images. "

View File

@ -84,14 +84,15 @@ def run_pt(
perplexity = math.exp(metrics[f"eval_{key}_loss"]) perplexity = math.exp(metrics[f"eval_{key}_loss"])
except OverflowError: except OverflowError:
perplexity = float("inf") perplexity = float("inf")
metrics[f"eval_{key}_perplexity"] = perplexity metrics[f"eval_{key}_perplexity"] = perplexity
else: else:
try: try:
perplexity = math.exp(metrics["eval_loss"]) perplexity = math.exp(metrics["eval_loss"])
except OverflowError: except OverflowError:
perplexity = float("inf") perplexity = float("inf")
metrics["eval_perplexity"] = perplexity
metrics["eval_perplexity"] = perplexity
trainer.log_metrics("eval", metrics) trainer.log_metrics("eval", metrics)
trainer.save_metrics("eval", metrics) trainer.save_metrics("eval", metrics)