diff --git a/fastchat/model/model_adapter.py b/fastchat/model/model_adapter.py index 1c214f0b3..cbf18e4aa 100644 --- a/fastchat/model/model_adapter.py +++ b/fastchat/model/model_adapter.py @@ -48,7 +48,6 @@ os.environ.get("PEFT_SHARE_BASE_WEIGHTS", "false").lower() == "true" ) - ANTHROPIC_MODEL_LIST = ( "claude-1", "claude-2", @@ -79,11 +78,17 @@ def load_model(self, model_path: str, from_pretrained_kwargs: dict): ) try: model = AutoModelForCausalLM.from_pretrained( - model_path, low_cpu_mem_usage=True, **from_pretrained_kwargs + model_path, + low_cpu_mem_usage=True, + trust_remote_code=True, + **from_pretrained_kwargs, ) except NameError: model = AutoModel.from_pretrained( - model_path, low_cpu_mem_usage=True, **from_pretrained_kwargs + model_path, + low_cpu_mem_usage=True, + trust_remote_code=True, + **from_pretrained_kwargs, ) return model, tokenizer