Skip to content

Commit

Permalink
Merge pull request #197 from janhq/196-bug-issue-with-model-loading
Browse files Browse the repository at this point in the history
bug: fix the model loaded result in error
  • Loading branch information
tikikun authored Nov 27, 2023
2 parents 710342e + 2835f90 commit db93ded
Showing 1 changed file with 19 additions and 0 deletions.
19 changes: 19 additions & 0 deletions controllers/llamaCPP.cc
Original file line number Diff line number Diff line change
Expand Up @@ -149,6 +149,15 @@ void llamaCPP::chatCompletion(
const HttpRequestPtr &req,
std::function<void(const HttpResponsePtr &)> &&callback) {

if (!model_loaded) {
Json::Value jsonResp;
jsonResp["message"] =
"Model has not been loaded, please load model into nitro";
auto resp = nitro_utils::nitroHttpJsonResponse(jsonResp);
resp->setStatusCode(drogon::k409Conflict);
callback(resp);
}

const auto &jsonBody = req->getJsonObject();
std::string formatted_output = pre_prompt;

Expand Down Expand Up @@ -338,6 +347,16 @@ void llamaCPP::loadModel(
const HttpRequestPtr &req,
std::function<void(const HttpResponsePtr &)> &&callback) {

if (model_loaded) {
LOG_INFO << "model loaded";
Json::Value jsonResp;
jsonResp["message"] = "Model already loaded";
auto resp = nitro_utils::nitroHttpJsonResponse(jsonResp);
resp->setStatusCode(drogon::k409Conflict);
callback(resp);
return;
}

const auto &jsonBody = req->getJsonObject();

gpt_params params;
Expand Down

0 comments on commit db93ded

Please sign in to comment.