Skip to content

Commit

Permalink
[Qwen] support qwen
Browse files Browse the repository at this point in the history
  • Loading branch information
chuxiaoyi2023 committed Feb 8, 2024
1 parent d736824 commit c1ccd53
Show file tree
Hide file tree
Showing 28 changed files with 162,089 additions and 8 deletions.
5 changes: 5 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
tmp*/
build/
*.bmodel
.vscode
*.npz
6 changes: 6 additions & 0 deletions .gitmodules
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
[submodule "models/Qwen/demo/third_party/abseil-cpp"]
path = models/Qwen/demo/third_party/abseil-cpp
url = https://github.com/abseil/abseil-cpp.git
[submodule "models/Qwen/demo/third_party/re2"]
path = models/Qwen/demo/third_party/re2
url = https://github.com/google/re2.git
37 changes: 37 additions & 0 deletions models/Qwen/compile/files/Qwen-14B-Chat/config.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
{
"architectures": [
"QWenLMHeadModel"
],
"auto_map": {
"AutoConfig": "configuration_qwen.QWenConfig",
"AutoModelForCausalLM": "modeling_qwen.QWenLMHeadModel"
},
"attn_dropout_prob": 0.0,
"bf16": true,
"emb_dropout_prob": 0.0,
"fp16": false,
"fp32": false,
"hidden_size": 5120,
"intermediate_size": 27392,
"initializer_range": 0.02,
"kv_channels": 128,
"layer_norm_epsilon": 1e-06,
"max_position_embeddings": 8192,
"model_type": "qwen",
"no_bias": true,
"num_attention_heads": 40,
"num_hidden_layers": 40,
"onnx_safe": null,
"rotary_emb_base": 10000,
"rotary_pct": 1.0,
"scale_attn_weights": true,
"seq_length": 2048,
"tie_word_embeddings": false,
"tokenizer_class": "QWenTokenizer",
"transformers_version": "4.32.0",
"use_cache": true,
"use_dynamic_ntk": true,
"use_flash_attn": "auto",
"use_logn_attn": true,
"vocab_size": 152064
}
Loading

0 comments on commit c1ccd53

Please sign in to comment.