Skip to content

Commit 8545522

Browse files
committed
modellist: revert a buggy change from #2086
Signed-off-by: Jared Van Bortel <[email protected]>
1 parent cd100c8 commit 8545522

File tree

1 file changed

+31
-15
lines changed

1 file changed

+31
-15
lines changed

gpt4all-chat/modellist.cpp

Lines changed: 31 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -903,7 +903,22 @@ void ModelList::updateData(const QString &id, const QVector<QPair<int, QVariant>
903903
break;
904904
}
905905
case IsEmbeddingModelRole:
906-
info->isEmbeddingModel = value.toBool(); break;
906+
{
907+
if (value.isValid()) {
908+
info->isEmbeddingModel = value.toBool();
909+
} else if (!info->checkedEmbeddingModel) {
910+
auto filename = info->filename();
911+
if (!info->installed || info->isOnline) {
912+
info->isEmbeddingModel = false; // can only check installed offline models
913+
} else {
914+
// read GGUF and decide based on model architecture
915+
auto path = (info->dirpath + filename).toStdString();
916+
info->isEmbeddingModel = LLModel::Implementation::isEmbeddingModel(path);
917+
}
918+
}
919+
info->checkedEmbeddingModel = true;
920+
break;
921+
}
907922
case TemperatureRole:
908923
info->setTemperature(value.toDouble()); break;
909924
case TopPRole:
@@ -956,21 +971,11 @@ void ModelList::updateData(const QString &id, const QVector<QPair<int, QVariant>
956971
}
957972

958973
// Extra guarantee that these always remains in sync with filesystem
959-
QString modelPath = info->dirpath + info->filename();
960-
const QFileInfo fileInfo(modelPath);
974+
const QFileInfo fileInfo(info->dirpath + info->filename());
961975
info->installed = fileInfo.exists();
962976
const QFileInfo incompleteInfo(incompleteDownloadPath(info->filename()));
963977
info->isIncomplete = incompleteInfo.exists();
964978

965-
// check installed, discovered/sideloaded models only (including clones)
966-
if (!info->checkedEmbeddingModel && !info->isEmbeddingModel && info->installed
967-
&& (info->isDiscovered() || info->description().isEmpty()))
968-
{
969-
// read GGUF and decide based on model architecture
970-
info->isEmbeddingModel = LLModel::Implementation::isEmbeddingModel(modelPath.toStdString());
971-
info->checkedEmbeddingModel = true;
972-
}
973-
974979
if (shouldSort) {
975980
auto s = m_discoverSort;
976981
auto d = m_discoverSortDirection;
@@ -999,9 +1004,6 @@ void ModelList::resortModel()
9991004

10001005
void ModelList::updateDataByFilename(const QString &filename, QVector<QPair<int, QVariant>> data)
10011006
{
1002-
if (data.isEmpty())
1003-
return; // no-op
1004-
10051007
QVector<QString> modelsById;
10061008
{
10071009
QMutexLocker locker(&m_mutex);
@@ -1015,6 +1017,12 @@ void ModelList::updateDataByFilename(const QString &filename, QVector<QPair<int,
10151017
return;
10161018
}
10171019

1020+
if (data.isEmpty())
1021+
return;
1022+
1023+
if (data.constLast().first != IsEmbeddingModelRole)
1024+
data.append({ IsEmbeddingModelRole, QVariant() });
1025+
10181026
for (const QString &id : modelsById)
10191027
updateData(id, data);
10201028
}
@@ -1071,6 +1079,7 @@ QString ModelList::clone(const ModelInfo &model)
10711079
{ ModelList::RepeatPenaltyTokensRole, model.repeatPenaltyTokens() },
10721080
{ ModelList::PromptTemplateRole, model.promptTemplate() },
10731081
{ ModelList::SystemPromptRole, model.systemPrompt() },
1082+
{ ModelList::IsEmbeddingModelRole, QVariant() },
10741083
};
10751084
updateData(id, data);
10761085
return id;
@@ -1491,6 +1500,7 @@ void ModelList::parseModelsJsonFile(const QByteArray &jsonData, bool save)
14911500
data.append({ ModelList::PromptTemplateRole, obj["promptTemplate"].toString() });
14921501
if (obj.contains("systemPrompt"))
14931502
data.append({ ModelList::SystemPromptRole, obj["systemPrompt"].toString() });
1503+
data.append({ ModelList::IsEmbeddingModelRole, QVariant() });
14941504
updateData(id, data);
14951505
}
14961506

@@ -1521,6 +1531,7 @@ void ModelList::parseModelsJsonFile(const QByteArray &jsonData, bool save)
15211531
{ ModelList::QuantRole, "NA" },
15221532
{ ModelList::TypeRole, "GPT" },
15231533
{ ModelList::UrlRole, "https://api.openai.com/v1/chat/completions"},
1534+
{ ModelList::IsEmbeddingModelRole, QVariant() },
15241535
};
15251536
updateData(id, data);
15261537
}
@@ -1549,6 +1560,7 @@ void ModelList::parseModelsJsonFile(const QByteArray &jsonData, bool save)
15491560
{ ModelList::QuantRole, "NA" },
15501561
{ ModelList::TypeRole, "GPT" },
15511562
{ ModelList::UrlRole, "https://api.openai.com/v1/chat/completions"},
1563+
{ ModelList::IsEmbeddingModelRole, QVariant() },
15521564
};
15531565
updateData(id, data);
15541566
}
@@ -1580,6 +1592,7 @@ void ModelList::parseModelsJsonFile(const QByteArray &jsonData, bool save)
15801592
{ ModelList::QuantRole, "NA" },
15811593
{ ModelList::TypeRole, "Mistral" },
15821594
{ ModelList::UrlRole, "https://api.mistral.ai/v1/chat/completions"},
1595+
{ ModelList::IsEmbeddingModelRole, QVariant() },
15831596
};
15841597
updateData(id, data);
15851598
}
@@ -1605,6 +1618,7 @@ void ModelList::parseModelsJsonFile(const QByteArray &jsonData, bool save)
16051618
{ ModelList::QuantRole, "NA" },
16061619
{ ModelList::TypeRole, "Mistral" },
16071620
{ ModelList::UrlRole, "https://api.mistral.ai/v1/chat/completions"},
1621+
{ ModelList::IsEmbeddingModelRole, QVariant() },
16081622
};
16091623
updateData(id, data);
16101624
}
@@ -1631,6 +1645,7 @@ void ModelList::parseModelsJsonFile(const QByteArray &jsonData, bool save)
16311645
{ ModelList::QuantRole, "NA" },
16321646
{ ModelList::TypeRole, "Mistral" },
16331647
{ ModelList::UrlRole, "https://api.mistral.ai/v1/chat/completions"},
1648+
{ ModelList::IsEmbeddingModelRole, QVariant() },
16341649
};
16351650
updateData(id, data);
16361651
}
@@ -1766,6 +1781,7 @@ void ModelList::updateModelsFromSettings()
17661781
const QString systemPrompt = settings.value(g + "/systemPrompt").toString();
17671782
data.append({ ModelList::SystemPromptRole, systemPrompt });
17681783
}
1784+
data.append({ ModelList::IsEmbeddingModelRole, QVariant() });
17691785
updateData(id, data);
17701786
}
17711787
}

0 commit comments

Comments
 (0)