From f61a000e73aec61ce1b63bd183e2f0d053ab5c27 Mon Sep 17 00:00:00 2001 From: hiyouga Date: Thu, 28 Sep 2023 01:03:04 +0800 Subject: [PATCH] tiny fix Former-commit-id: 5d4118b09639ea4ee46d3d750cdd542c30555a03 --- src/llmtuner/extras/patches/llama_patch.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/src/llmtuner/extras/patches/llama_patch.py b/src/llmtuner/extras/patches/llama_patch.py index cc22041d..42691194 100644 --- a/src/llmtuner/extras/patches/llama_patch.py +++ b/src/llmtuner/extras/patches/llama_patch.py @@ -1,7 +1,3 @@ -# coding=utf-8 -# Modified from: -# [1] https://github.com/huggingface/transformers/blob/main/src/transformers/models/llama/modeling_llama.py - import math import torch import torch.nn as nn @@ -19,6 +15,7 @@ except ImportError: logger = logging.get_logger(__name__) +# Modified from: https://github.com/huggingface/transformers/blob/main/src/transformers/models/llama/modeling_llama.py class LlamaShiftShortAttention(LlamaAttention): def forward(