mirror of
				https://github.com/hiyouga/LLaMA-Factory.git
				synced 2025-11-04 18:02:19 +08:00 
			
		
		
		
	use robust envs
Former-commit-id: f3e194c3b3c40a3e6c3c5397ec0d859e6db614b5
This commit is contained in:
		
							parent
							
								
									84ff56c3a0
								
							
						
					
					
						commit
						6670b36c49
					
				@ -51,7 +51,7 @@ def create_app(chat_model: "ChatModel") -> "FastAPI":
 | 
			
		||||
        allow_methods=["*"],
 | 
			
		||||
        allow_headers=["*"],
 | 
			
		||||
    )
 | 
			
		||||
    api_key = os.environ.get("API_KEY", None)
 | 
			
		||||
    api_key = os.environ.get("API_KEY")
 | 
			
		||||
    security = HTTPBearer(auto_error=False)
 | 
			
		||||
 | 
			
		||||
    async def verify_api_key(auth: Annotated[Optional[HTTPAuthorizationCredentials], Depends(security)]):
 | 
			
		||||
 | 
			
		||||
@ -53,7 +53,7 @@ class LogCallback(TrainerCallback):
 | 
			
		||||
        self.aborted = False
 | 
			
		||||
        self.do_train = False
 | 
			
		||||
        """ Web UI """
 | 
			
		||||
        self.webui_mode = bool(int(os.environ.get("LLAMABOARD_ENABLED", "0")))
 | 
			
		||||
        self.webui_mode = os.environ.get("LLAMABOARD_ENABLED", "0").lower() in ["true", "1"]
 | 
			
		||||
        if self.webui_mode:
 | 
			
		||||
            signal.signal(signal.SIGABRT, self._set_abort)
 | 
			
		||||
            self.logger_handler = LoggerHandler(output_dir)
 | 
			
		||||
 | 
			
		||||
@ -58,7 +58,7 @@ class AverageMeter:
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def check_dependencies() -> None:
 | 
			
		||||
    if int(os.environ.get("DISABLE_VERSION_CHECK", "0")):
 | 
			
		||||
    if os.environ.get("DISABLE_VERSION_CHECK", "0").lower() in ["true", "1"]:
 | 
			
		||||
        logger.warning("Version checking has been disabled, may lead to unexpected behaviors.")
 | 
			
		||||
    else:
 | 
			
		||||
        require_version("transformers>=4.37.2", "To fix: pip install transformers>=4.37.2")
 | 
			
		||||
 | 
			
		||||
@ -71,12 +71,12 @@ def create_web_demo() -> gr.Blocks:
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def run_web_ui() -> None:
 | 
			
		||||
    gradio_share = bool(int(os.environ.get("GRADIO_SHARE", "0")))
 | 
			
		||||
    gradio_share = os.environ.get("GRADIO_SHARE", "0").lower() in ["true", "1"]
 | 
			
		||||
    server_name = os.environ.get("GRADIO_SERVER_NAME", "0.0.0.0")
 | 
			
		||||
    create_ui().queue().launch(share=gradio_share, server_name=server_name)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def run_web_demo() -> None:
 | 
			
		||||
    gradio_share = bool(int(os.environ.get("GRADIO_SHARE", "0")))
 | 
			
		||||
    gradio_share = os.environ.get("GRADIO_SHARE", "0").lower() in ["true", "1"]
 | 
			
		||||
    server_name = os.environ.get("GRADIO_SERVER_NAME", "0.0.0.0")
 | 
			
		||||
    create_web_demo().queue().launch(share=gradio_share, server_name=server_name)
 | 
			
		||||
 | 
			
		||||
@ -4,7 +4,7 @@ from llmtuner.webui.interface import create_ui
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def main():
 | 
			
		||||
    gradio_share = bool(int(os.environ.get("GRADIO_SHARE", "0")))
 | 
			
		||||
    gradio_share = os.environ.get("GRADIO_SHARE", "0").lower() in ["true", "1"]
 | 
			
		||||
    server_name = os.environ.get("GRADIO_SERVER_NAME", "0.0.0.0")
 | 
			
		||||
    create_ui().queue().launch(share=gradio_share, server_name=server_name)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
		Loading…
	
	
			
			x
			
			
		
	
		Reference in New Issue
	
	Block a user