@@ -92,6 +92,10 @@ const char * llm_type_name(llm_type type) {
92
92
case LLM_TYPE_290B: return "290B";
93
93
case LLM_TYPE_17B_16E: return "17Bx16E (Scout)";
94
94
case LLM_TYPE_17B_128E: return "17Bx128E (Maverick)";
95
+ case LLM_TYPE_0_6B: return "0.6B";
96
+ case LLM_TYPE_1_7B: return "1.7B";
97
+ case LLM_TYPE_30B_A3B: return "30B.A3B";
98
+ case LLM_TYPE_235B_A22B: return "235B.A22B";
95
99
default: return "?B";
96
100
}
97
101
}
@@ -793,6 +797,10 @@ void llama_model::load_hparams(llama_model_loader & ml) {
793
797
{
794
798
ml.get_key(LLM_KV_ATTENTION_LAYERNORM_RMS_EPS, hparams.f_norm_rms_eps);
795
799
switch (hparams.n_layer) {
800
+ case 28: type = hparams.n_embd == 1024 ? LLM_TYPE_0_6B : LLM_TYPE_1_7B; break;
801
+ case 36: type = hparams.n_embd == 2560 ? LLM_TYPE_4B : LLM_TYPE_8B; break;
802
+ case 40: type = LLM_TYPE_14B; break;
803
+ case 64: type = LLM_TYPE_32B; break;
796
804
default: type = LLM_TYPE_UNKNOWN;
797
805
}
798
806
} break;
@@ -802,6 +810,8 @@ void llama_model::load_hparams(llama_model_loader & ml) {
802
810
803
811
ml.get_key(LLM_KV_ATTENTION_LAYERNORM_RMS_EPS, hparams.f_norm_rms_eps);
804
812
switch (hparams.n_layer) {
813
+ case 48: type = LLM_TYPE_30B_A3B; break;
814
+ case 94: type = LLM_TYPE_235B_A22B; break;
805
815
default: type = LLM_TYPE_UNKNOWN;
806
816
}
807
817
} break;
0 commit comments