From b8ca665652fd272a156859f1224d1dddf3755fd3 Mon Sep 17 00:00:00 2001 From: vansangpfiev Date: Sat, 8 Jun 2024 15:59:12 +0700 Subject: [PATCH 1/3] test: CI --- cortex-cpp/controllers/server.cc | 1 + 1 file changed, 1 insertion(+) diff --git a/cortex-cpp/controllers/server.cc b/cortex-cpp/controllers/server.cc index 8bdab4757..6d7c20941 100644 --- a/cortex-cpp/controllers/server.cc +++ b/cortex-cpp/controllers/server.cc @@ -59,6 +59,7 @@ void server::ChatCompletion( ProcessNonStreamRes(std::move(callback), *q); } LOG_TRACE << "Done chat completion"; + LOG_DEBUG << "Test"; } void server::Embedding(const HttpRequestPtr& req, From 7ff1063f991705eb70b8dc98d53744443aa6ac65 Mon Sep 17 00:00:00 2001 From: vansangpfiev Date: Sat, 8 Jun 2024 16:23:44 +0700 Subject: [PATCH 2/3] test: change windows agent --- .github/workflows/cortex-cpp-quality-gate.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/cortex-cpp-quality-gate.yml b/.github/workflows/cortex-cpp-quality-gate.yml index e84efa4e7..f5d686845 100644 --- a/.github/workflows/cortex-cpp-quality-gate.yml +++ b/.github/workflows/cortex-cpp-quality-gate.yml @@ -80,28 +80,28 @@ jobs: - os: "windows" name: "amd64-avx2" - runs-on: "windows-latest" + runs-on: "windows-cuda-12-0" cmake-flags: "-DLLAMA_AVX2=ON -DLLAMA_NATIVE=OFF -DLLAMA_BUILD_SERVER=ON -DLLAMA_BLAS=ON -DBUILD_SHARED_LIBS=OFF -DCMAKE_BUILD_TYPE=RELEASE" run-e2e: true run-python-e2e: true - os: "windows" name: "amd64-avx" - runs-on: "windows-latest" + runs-on: "windows-cuda-12-0" cmake-flags: "-DLLAMA_AVX2=OFF -DLLAMA_NATIVE=OFF -DLLAMA_BUILD_SERVER=ON -DLLAMA_BLAS=ON -DBUILD_SHARED_LIBS=OFF -DCMAKE_BUILD_TYPE=RELEASE" run-e2e: false run-python-e2e: false - os: "windows" name: "amd64-avx512" - runs-on: "windows-latest" + runs-on: "windows-cuda-12-0" cmake-flags: "-DLLAMA_AVX512=ON -DLLAMA_NATIVE=OFF -DLLAMA_BUILD_SERVER=ON -DLLAMA_BLAS=ON -DBUILD_SHARED_LIBS=OFF -DCMAKE_BUILD_TYPE=RELEASE" run-e2e: false run-python-e2e: false - os: "windows" name: "amd64-vulkan" - runs-on: "windows-latest" + runs-on: "windows-cuda-12-0" cmake-flags: "-DLLAMA_VULKAN=ON -DLLAMA_NATIVE=OFF -DLLAMA_BUILD_SERVER=ON -DBUILD_SHARED_LIBS=OFF -DCMAKE_BUILD_TYPE=RELEASE" run-e2e: false run-python-e2e: false From 9ef51358f99293c2e0efa9594b0addeeabb638cf Mon Sep 17 00:00:00 2001 From: vansangpfiev Date: Sat, 8 Jun 2024 16:46:50 +0700 Subject: [PATCH 3/3] fix: print logs windows --- .github/scripts/e2e-test-llama-windows.bat | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/scripts/e2e-test-llama-windows.bat b/.github/scripts/e2e-test-llama-windows.bat index c54c741d4..ac7481b98 100644 --- a/.github/scripts/e2e-test-llama-windows.bat +++ b/.github/scripts/e2e-test-llama-windows.bat @@ -31,7 +31,7 @@ set /a range=max-min+1 set /a PORT=%min% + %RANDOM% %% %range% rem Start the binary file -start /B "" "%BINARY_PATH%" 1 "127.0.0.1" %PORT% > %TEMP%\cortex-cpp.log 2>&1 +start /B "" "%BINARY_PATH%" 1 "127.0.0.1" %PORT% ping -n 6 127.0.0.1 %PORT% > nul