Skip to content
This repository has been archived by the owner on Aug 30, 2024. It is now read-only.

Commit

Permalink
fixed qwen convert issues.
Browse files Browse the repository at this point in the history
  • Loading branch information
Zhenzhong1 committed Mar 6, 2024
1 parent 98b5358 commit 3d9a9bd
Showing 1 changed file with 6 additions and 6 deletions.
12 changes: 6 additions & 6 deletions neural_speed/convert/convert_quantized_qwen.py
Original file line number Diff line number Diff line change
Expand Up @@ -121,6 +121,8 @@ def main(args_in: Optional[List[str]] = None) -> None:
model, hparams, quantize_config = load_quantized_safetensors(model_path)
list_vars = model

print(hparams)

# orinal QWEN
# model = AutoModelForCausalLM.from_pretrained(model_path, trust_remote_code=True)
# hparams = model.config.to_dict()
Expand Down Expand Up @@ -176,11 +178,11 @@ def main(args_in: Optional[List[str]] = None) -> None:
f.write(struct.pack("i", 0)) # params["rope_scaling"]["type"] =="yarn" else 0))

f.write(
struct.pack("i",
hparams["bos_token_id"] if hparams["bos_token_id"] else tokenizer.special_tokens['<|endoftext|>']))
struct.pack(
"i", hparams["bos_token_id"] if "bos_token_id" in hparams else tokenizer.special_tokens['<|endoftext|>']))
f.write(
struct.pack("i",
hparams["eos_token_id"] if hparams["eos_token_id"] else tokenizer.special_tokens['<|endoftext|>']))
struct.pack(
"i", hparams["eos_token_id"] if "eos_token_id" in hparams else tokenizer.special_tokens['<|endoftext|>']))
f.write(struct.pack("i", tokenizer.pad_token_id if tokenizer.pad_token_id is not None else -1))
f.write(struct.pack("i", tokenizer.sep_token_id if tokenizer.sep_token_id is not None else -1))

Expand All @@ -197,8 +199,6 @@ def main(args_in: Optional[List[str]] = None) -> None:
f.write(text)
f.write(struct.pack("f", -10000))

print(hparams)

def convert_qwen_to_fp32_tensor(src_name, dst_name, model, fout):
# qwen-gptq is torch.bfloat16 mostly.
if model[src_name].dtype == torch.float32:
Expand Down

0 comments on commit 3d9a9bd

Please sign in to comment.