Skip to content

Commit

Permalink
[Preset] Add Phi-3.5-mini to preset (mlc-ai#2845)
Browse files Browse the repository at this point in the history
This PR adds `phi-3_5` to model preset, allowing CI to test `longrope`.
Commented out `phi-3` to save CI time.

Additionally, we add token id `2` (i.e. `</s>`) to stop token ids for
`phi-3` conv template for the same reason as mlc-ai#2455
  • Loading branch information
CharlieFRuan authored Aug 24, 2024
1 parent 9336aab commit 83d0fe3
Show file tree
Hide file tree
Showing 2 changed files with 143 additions and 7 deletions.
2 changes: 1 addition & 1 deletion python/mlc_llm/conversation_template/phi.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@
role_empty_sep="\n",
system_prefix_token_ids=[1],
stop_str=["<|endoftext|>"],
stop_token_ids=[32000, 32001, 32007],
stop_token_ids=[2, 32000, 32001, 32007],
)
)

Expand Down
148 changes: 142 additions & 6 deletions python/mlc_llm/model/model_preset.py
Original file line number Diff line number Diff line change
Expand Up @@ -439,8 +439,41 @@
"transformers_version": "4.35.2",
"vocab_size": 51200,
},
"phi-3": {
"_name_or_path": "Phi-3-mini-4k-instruct",
# "phi-3": {
# "_name_or_path": "Phi-3-mini-4k-instruct",
# "architectures": ["Phi3ForCausalLM"],
# "attention_dropout": 0.0,
# "auto_map": {
# "AutoConfig": "configuration_phi3.Phi3Config",
# "AutoModelForCausalLM": "modeling_phi3.Phi3ForCausalLM",
# },
# "bos_token_id": 1,
# "embd_pdrop": 0.0,
# "eos_token_id": 32000,
# "hidden_act": "silu",
# "hidden_size": 3072,
# "initializer_range": 0.02,
# "intermediate_size": 8192,
# "max_position_embeddings": 4096,
# "model_type": "phi3",
# "num_attention_heads": 32,
# "num_hidden_layers": 32,
# "num_key_value_heads": 32,
# "original_max_position_embeddings": 4096,
# "pad_token_id": 32000,
# "resid_pdrop": 0.0,
# "rms_norm_eps": 1e-05,
# "rope_scaling": None,
# "rope_theta": 10000.0,
# "sliding_window": 2047,
# "tie_word_embeddings": False,
# "torch_dtype": "bfloat16",
# "transformers_version": "4.39.3",
# "use_cache": True,
# "vocab_size": 32064,
# },
"phi-3_5": {
"_name_or_path": "Phi-3.5-mini-instruct",
"architectures": ["Phi3ForCausalLM"],
"attention_dropout": 0.0,
"auto_map": {
Expand All @@ -454,7 +487,7 @@
"hidden_size": 3072,
"initializer_range": 0.02,
"intermediate_size": 8192,
"max_position_embeddings": 4096,
"max_position_embeddings": 131072,
"model_type": "phi3",
"num_attention_heads": 32,
"num_hidden_layers": 32,
Expand All @@ -463,13 +496,116 @@
"pad_token_id": 32000,
"resid_pdrop": 0.0,
"rms_norm_eps": 1e-05,
"rope_scaling": None,
"rope_scaling": {
"long_factor": [
1.0800000429153442,
1.1100000143051147,
1.1399999856948853,
1.340000033378601,
1.5899999141693115,
1.600000023841858,
1.6200000047683716,
2.620000123977661,
3.2300000190734863,
3.2300000190734863,
4.789999961853027,
7.400000095367432,
7.700000286102295,
9.09000015258789,
12.199999809265137,
17.670000076293945,
24.46000099182129,
28.57000160217285,
30.420001983642578,
30.840002059936523,
32.590003967285156,
32.93000411987305,
42.320003509521484,
44.96000289916992,
50.340003967285156,
50.45000457763672,
57.55000305175781,
57.93000411987305,
58.21000289916992,
60.1400032043457,
62.61000442504883,
62.62000274658203,
62.71000289916992,
63.1400032043457,
63.1400032043457,
63.77000427246094,
63.93000411987305,
63.96000289916992,
63.970001220703125,
64.02999877929688,
64.06999969482422,
64.08000183105469,
64.12000274658203,
64.41000366210938,
64.4800033569336,
64.51000213623047,
64.52999877929688,
64.83999633789062,
],
"short_factor": [
1.0,
1.0199999809265137,
1.0299999713897705,
1.0299999713897705,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0699999332427979,
1.0999999046325684,
1.1099998950958252,
1.1599998474121094,
1.1599998474121094,
1.1699998378753662,
1.2899998426437378,
1.339999794960022,
1.679999828338623,
1.7899998426437378,
1.8199998140335083,
1.8499997854232788,
1.8799997568130493,
1.9099997282028198,
1.9399996995925903,
1.9899996519088745,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0799996852874756,
2.0899996757507324,
2.189999580383301,
2.2199995517730713,
2.5899994373321533,
2.729999542236328,
2.749999523162842,
2.8399994373321533,
],
"type": "longrope",
},
"rope_theta": 10000.0,
"sliding_window": 2047,
"sliding_window": 262144,
"tie_word_embeddings": False,
"torch_dtype": "bfloat16",
"transformers_version": "4.39.3",
"transformers_version": "4.43.3",
"use_cache": True,
"attention_bias": False,
"vocab_size": 32064,
},
"qwen": {
Expand Down

0 comments on commit 83d0fe3

Please sign in to comment.