From af3bdb99ea090c04279ebc36a102a25dff8691be Mon Sep 17 00:00:00 2001 From: nhu anh thu Date: Thu, 2 Nov 2023 18:12:43 +0700 Subject: [PATCH] fix wrong api endpoind - handler mapping --- controllers/llamaCPP.cc | 2 +- controllers/llamaCPP.h | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/controllers/llamaCPP.cc b/controllers/llamaCPP.cc index cd167c326..49fc290ff 100644 --- a/controllers/llamaCPP.cc +++ b/controllers/llamaCPP.cc @@ -348,12 +348,12 @@ void llamaCPP::loadModel( void inferences::llamaCPP::unloadModel(const HttpRequestPtr &req, std::function &&callback) { Json::Value jsonResp; + jsonResp["message"] = "No model loaded"; if (model_loaded) { llama.unloadModel(); model_loaded = false; jsonResp["message"] = "Model unloaded successfully"; } - jsonResp["message"] = "No model loaded"; auto resp = nitro_utils::nitroHttpJsonResponse(jsonResp); callback(resp); } diff --git a/controllers/llamaCPP.h b/controllers/llamaCPP.h index fbbc1842a..67b5d979c 100644 --- a/controllers/llamaCPP.h +++ b/controllers/llamaCPP.h @@ -1279,7 +1279,7 @@ class llamaCPP : public drogon::HttpController { METHOD_ADD(llamaCPP::chatCompletion, "chat_completion", Post); METHOD_ADD(llamaCPP::embedding, "embedding", Post); METHOD_ADD(llamaCPP::loadModel, "loadmodel", Post); - METHOD_ADD(llamaCPP::loadModel, "unloadmodel", Delete); + METHOD_ADD(llamaCPP::unloadModel, "unloadmodel", Delete); // PATH_ADD("/llama/chat_completion", Post); METHOD_LIST_END void chatCompletion(const HttpRequestPtr &req,