Skip to content
This repository was archived by the owner on Jul 4, 2025. It is now read-only.

Commit db93ded

Browse files
authored
Merge pull request #197 from janhq/196-bug-issue-with-model-loading
bug: fix the model loaded result in error
2 parents 710342e + 2835f90 commit db93ded

File tree

1 file changed

+19
-0
lines changed

1 file changed

+19
-0
lines changed

controllers/llamaCPP.cc

Lines changed: 19 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -149,6 +149,15 @@ void llamaCPP::chatCompletion(
149149
const HttpRequestPtr &req,
150150
std::function<void(const HttpResponsePtr &)> &&callback) {
151151

152+
if (!model_loaded) {
153+
Json::Value jsonResp;
154+
jsonResp["message"] =
155+
"Model has not been loaded, please load model into nitro";
156+
auto resp = nitro_utils::nitroHttpJsonResponse(jsonResp);
157+
resp->setStatusCode(drogon::k409Conflict);
158+
callback(resp);
159+
}
160+
152161
const auto &jsonBody = req->getJsonObject();
153162
std::string formatted_output = pre_prompt;
154163

@@ -338,6 +347,16 @@ void llamaCPP::loadModel(
338347
const HttpRequestPtr &req,
339348
std::function<void(const HttpResponsePtr &)> &&callback) {
340349

350+
if (model_loaded) {
351+
LOG_INFO << "model loaded";
352+
Json::Value jsonResp;
353+
jsonResp["message"] = "Model already loaded";
354+
auto resp = nitro_utils::nitroHttpJsonResponse(jsonResp);
355+
resp->setStatusCode(drogon::k409Conflict);
356+
callback(resp);
357+
return;
358+
}
359+
341360
const auto &jsonBody = req->getJsonObject();
342361

343362
gpt_params params;

0 commit comments

Comments
 (0)