Skip to content
Prev Previous commit
Fixed test error because of using isinstance() rather than type() for…
… a typecheck.
  • Loading branch information
wangzhen263 committed Sep 1, 2023
commit 061e69a2a5b1f2e3a508144d843f1c56d37a9efd
2 changes: 1 addition & 1 deletion fastchat/model/compression.py
Original file line number Diff line number Diff line change
Expand Up @@ -128,7 +128,7 @@ def load_compress_model(model_path, device, torch_dtype, use_fast, revision="mai
# such as chatglm, chatglm2
try:
# google/flan-* models are based on an AutoModelForSeq2SeqLM.
if 'T5Config' in str(type(config))
if 'T5Config' in str(type(config)):
model = AutoModelForSeq2SeqLM.from_config(config, trust_remote_code=True)
else:
model = AutoModelForCausalLM.from_config(config, trust_remote_code=True)
Expand Down