From c434ac284ebee4beb5d0727c64a8951e7f181e60 Mon Sep 17 00:00:00 2001 From: vansangpfiev Date: Tue, 22 Oct 2024 15:16:57 +0700 Subject: [PATCH] fix: improve error message when load model failed (#1533) * fix: error for models start * fix: add unit tests --------- Co-authored-by: vansangpfiev --- engine/cli/commands/model_start_cmd.cc | 6 ++-- engine/services/inference_service.cc | 6 ++-- engine/services/model_service.cc | 5 ++-- engine/test/components/test_json_helper.cc | 35 ++++++++++++++++++++++ engine/utils/json_helper.h | 11 +++++++ 5 files changed, 56 insertions(+), 7 deletions(-) create mode 100644 engine/test/components/test_json_helper.cc create mode 100644 engine/utils/json_helper.h diff --git a/engine/cli/commands/model_start_cmd.cc b/engine/cli/commands/model_start_cmd.cc index 8f2549dcb..d806a4f13 100644 --- a/engine/cli/commands/model_start_cmd.cc +++ b/engine/cli/commands/model_start_cmd.cc @@ -6,6 +6,7 @@ #include "run_cmd.h" #include "server_start_cmd.h" #include "utils/cli_selection_utils.h" +#include "utils/json_helper.h" #include "utils/logging_utils.h" namespace commands { @@ -14,7 +15,7 @@ bool ModelStartCmd::Exec(const std::string& host, int port, std::optional model_id = SelectLocalModel(model_service_, model_handle); - if(!model_id.has_value()) { + if (!model_id.has_value()) { return false; } @@ -41,7 +42,8 @@ bool ModelStartCmd::Exec(const std::string& host, int port, << *model_id << "` for interactive chat shell"); return true; } else { - CTL_ERR("Model failed to load with status code: " << res->status); + auto root = json_helper::ParseJsonString(res->body); + CLI_LOG(root["message"].asString()); return false; } } else { diff --git a/engine/services/inference_service.cc b/engine/services/inference_service.cc index aff72f802..35cdab098 100644 --- a/engine/services/inference_service.cc +++ b/engine/services/inference_service.cc @@ -140,9 +140,9 @@ InferResult InferenceService::LoadModel( if (IsEngineLoaded(kLlamaRepo) && ne == kTrtLlmRepo) { // Remove llamacpp dll directory if (!RemoveDllDirectory(engines_[kLlamaRepo].cookie)) { - LOG_INFO << "Could not remove dll directory: " << kLlamaRepo; + LOG_WARN << "Could not remove dll directory: " << kLlamaRepo; } else { - LOG_WARN << "Removed dll directory: " << kLlamaRepo; + LOG_INFO << "Removed dll directory: " << kLlamaRepo; } add_dll(ne, abs_path); @@ -158,7 +158,7 @@ InferResult InferenceService::LoadModel( LOG_ERROR << "Could not load engine: " << e.what(); engines_.erase(ne); - r["message"] = "Could not load engine " + ne; + r["message"] = "Could not load engine " + ne + ": " + e.what(); stt["status_code"] = k500InternalServerError; return std::make_pair(stt, r); } diff --git a/engine/services/model_service.cc b/engine/services/model_service.cc index 57a26c1be..fb6875119 100644 --- a/engine/services/model_service.cc +++ b/engine/services/model_service.cc @@ -14,6 +14,7 @@ #include "utils/logging_utils.h" #include "utils/result.hpp" #include "utils/string_utils.h" +#include "utils/json_helper.h" namespace { void ParseGguf(const DownloadItem& ggufDownloadItem, @@ -622,9 +623,9 @@ cpp::result ModelService::StartModel( CTL_INF("Model '" + model_handle + "' is already loaded"); return true; } else { + auto root = json_helper::ParseJsonString(res->body); CTL_ERR("Model failed to load with status code: " << res->status); - return cpp::fail("Model failed to load with status code: " + - std::to_string(res->status)); + return cpp::fail("Model failed to start: " + root["message"].asString()); } } else { auto err = res.error(); diff --git a/engine/test/components/test_json_helper.cc b/engine/test/components/test_json_helper.cc new file mode 100644 index 000000000..cb3f4683a --- /dev/null +++ b/engine/test/components/test_json_helper.cc @@ -0,0 +1,35 @@ +#include +#include +#include +#include "utils/json_helper.h" + +// Test Suite +TEST(ParseJsonStringTest, ValidJsonObject) { + std::string json_str = R"({"name": "John", "age": 30})"; + Json::Value result = json_helper::ParseJsonString(json_str); + + EXPECT_EQ(result["name"].asString(), "John"); + EXPECT_EQ(result["age"].asInt(), 30); +} + +TEST(ParseJsonStringTest, ValidJsonArray) { + std::string json_str = R"([1, 2, 3, 4, 5])"; + Json::Value result = json_helper::ParseJsonString(json_str); + + EXPECT_EQ(result.size(), 5); + EXPECT_EQ(result[0].asInt(), 1); +} + +TEST(ParseJsonStringTest, InvalidJson) { + std::string json_str = R"({"name": "John", "age": )"; + Json::Value result = json_helper::ParseJsonString(json_str); + + EXPECT_TRUE(result["age"].isNull()); +} + +TEST(ParseJsonStringTest, EmptyString) { + std::string json_str = ""; + Json::Value result = json_helper::ParseJsonString(json_str); + + EXPECT_TRUE(result.isNull()); +} diff --git a/engine/utils/json_helper.h b/engine/utils/json_helper.h new file mode 100644 index 000000000..3a2023fef --- /dev/null +++ b/engine/utils/json_helper.h @@ -0,0 +1,11 @@ +#pragma once +#include +#include +namespace json_helper { +inline Json::Value ParseJsonString(const std::string& json_str) { + Json::Value root; + Json::Reader reader; + reader.parse(json_str, root); + return root; +} +} \ No newline at end of file