diff --git a/fastdeploy/utils.py b/fastdeploy/utils.py index d59de85df5..668073298a 100644 --- a/fastdeploy/utils.py +++ b/fastdeploy/utils.py @@ -532,7 +532,15 @@ def retrive_model_from_server(model_name_or_path, revision="master"): aistudio_download(repo_id=repo_id, revision=revision, local_dir=local_path) model_name_or_path = local_path except Exception: - raise Exception(f"The setting model_name_or_path:{model_name_or_path} is not exist.") + if os.path.exists(local_path): + llm_logger.error( + f"Failed to connect to aistudio, but detected that the model directory {local_path} exists. Attempting to start." + ) + return local_path + else: + raise Exception( + f"The {revision} of {model_name_or_path} is not exist. Please check the model name or revision." + ) elif model_source == "MODELSCOPE": try: from modelscope.hub.snapshot_download import ( @@ -547,7 +555,9 @@ def retrive_model_from_server(model_name_or_path, revision="master"): modelscope_download(repo_id=repo_id, revision=revision, local_dir=local_path) model_name_or_path = local_path except Exception: - raise Exception(f"The setting model_name_or_path:{model_name_or_path} is not exist.") + raise Exception( + f"The {revision} of {model_name_or_path} is not exist. Please check the model name or revision." + ) elif model_source == "HUGGINGFACE": try: from huggingface_hub._snapshot_download import ( @@ -565,7 +575,9 @@ def retrive_model_from_server(model_name_or_path, revision="master"): huggingface_download(repo_id=repo_id, revision=revision, local_dir=local_path) model_name_or_path = local_path except Exception: - raise Exception(f"The setting model_name_or_path:{model_name_or_path} is not exist.") + raise Exception( + f"The {revision} of {model_name_or_path} is not exist. Please check the model name or revision." + ) else: raise ValueError( f"Unsupported model source: {model_source}, please choose one of ['MODELSCOPE', 'AISTUDIO', 'HUGGINGFACE']" diff --git a/requirements.txt b/requirements.txt index f9166c8c28..4717f532a7 100644 --- a/requirements.txt +++ b/requirements.txt @@ -30,6 +30,7 @@ use-triton-in-paddle crcmod fastsafetensors==0.1.14 msgpack +modelscope opentelemetry-api>=1.24.0 opentelemetry-sdk>=1.24.0 opentelemetry-instrumentation-redis diff --git a/scripts/coverage_run.sh b/scripts/coverage_run.sh index 6b6cbbf850..73f51c32a6 100644 --- a/scripts/coverage_run.sh +++ b/scripts/coverage_run.sh @@ -38,7 +38,6 @@ disabled_tests=( operators/test_fused_moe.py layers/test_repetition_early_stopper.py operators/test_stop_generation_multi_ends.py - utils/test_download.py graph_optimization/test_cuda_graph.py ) is_disabled() { diff --git a/test/utils/test_download.py b/test/utils/test_download.py index f479c693f1..44be39cd5e 100644 --- a/test/utils/test_download.py +++ b/test/utils/test_download.py @@ -5,19 +5,14 @@ class TestAistudioDownload(unittest.TestCase): - def test_retrive_model_from_server_MODELSCOPE(self): - os.environ["FD_MODEL_SOURCE"] = "MODELSCOPE" - os.environ["FD_MODEL_CACHE"] = "./models" - - model_name_or_path = "baidu/ERNIE-4.5-0.3B-PT" - revision = "master" - expected_path = f"./models/PaddlePaddle/ERNIE-4.5-0.3B-PT/{revision}" - result = retrive_model_from_server(model_name_or_path, revision) - self.assertEqual(expected_path, result) - - os.environ.clear() + """ + Test cases for downloading models from different sources using FastDeploy utilities. + """ def test_retrive_model_from_server_unsupported_source(self): + """ + Test case for retrieving a model from an unsupported source. + """ os.environ["FD_MODEL_SOURCE"] = "UNSUPPORTED_SOURCE" os.environ["FD_MODEL_CACHE"] = "./models" @@ -27,17 +22,49 @@ def test_retrive_model_from_server_unsupported_source(self): os.environ.clear() - def test_retrive_model_from_server_model_not_exist(self): + def test_retrive_model_from_modelscope_server_model_not_exist(self): + """ + Test case for retrieving a model from ModelScope server when it doesn't exist. + """ os.environ["FD_MODEL_SOURCE"] = "MODELSCOPE" + os.environ["FD_MODEL_CACHE"] = "./model" + + model_name_or_path = "non_existing_model_modelscope" + + with self.assertRaises(Exception): + retrive_model_from_server(model_name_or_path) + + os.environ.clear() + + def test_retrive_model_from_huggingface_server_model_not_exist(self): + """ + Test case for retrieving a model from Hugging Face server when it doesn't exist. + """ + os.environ["FD_MODEL_SOURCE"] = "HUGGINGFACE" os.environ["FD_MODEL_CACHE"] = "./models" - model_name_or_path = "non_existing_model" + model_name_or_path = "non_existing_model_hf" with self.assertRaises(Exception): retrive_model_from_server(model_name_or_path) os.environ.clear() + def test_retrive_model_from_aistudio_server_(self): + """ + Test case for retrieving a model from AI Studio server. + """ + os.environ["FD_MODEL_SOURCE"] = "AISTUDIO" + os.environ["FD_MODEL_CACHE"] = "./models" + + model_name_or_path = "baidu/ERNIE-4.5-0.3B-PT" + revision = "aaa" + expected_path = "./models/PaddlePaddle/ERNIE-4.5-0.3B-PT" + result = retrive_model_from_server(model_name_or_path, revision) + self.assertEqual(expected_path, result) + + os.environ.clear() + if __name__ == "__main__": unittest.main()