Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Update CI & fix failures #232

Merged
merged 1 commit into from
Jul 3, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 7 additions & 0 deletions .clang-tidy
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ Checks: "*,\
-llvmlibc-restrict-system-libc-headers,\
-llvmlibc-callee-namespace,\
-llvmlibc-implementation-in-namespace,\
-llvmlibc-inline-function-decl,\
-altera-*,\
-fuchsia-*,\
-google-readability-namespace-comments,\
Expand All @@ -14,6 +15,7 @@ Checks: "*,\
-modernize-deprecated-headers,\
-modernize-use-trailing-return-type,\
-modernize-concat-nested-namespaces,\
-modernize-type-traits,\
-hicpp-special-member-functions,\
-hicpp-vararg,\
-hicpp-no-malloc,\
Expand All @@ -36,15 +38,20 @@ Checks: "*,\
-cppcoreguidelines-pro-type-union-access,\
-misc-non-private-member-variables-in-classes,\
-misc-no-recursion,\
-misc-include-cleaner,\
-readability-magic-numbers,\
-readability-implicit-bool-conversion,\
-readability-braces-around-statements,\
-readability-isolate-declaration,\
-readability-identifier-length,\
-readability-function-cognitive-complexity,\
-readability-avoid-nested-conditional-operator,\
-bugprone-unused-return-value,\
-bugprone-easily-swappable-parameters,\
-bugprone-exception-escape,\
-bugprone-optional-value-conversion,\
-cert-err58-cpp,\
-performance-avoid-endl,\
-performance-enum-size,\
-clang-analyzer-optin.performance.Padding"

2 changes: 1 addition & 1 deletion .github/workflows/kafka_api_bazel_build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ on:
env:
KAFKA_SRC_LINK: https://archive.apache.org/dist/kafka/3.3.1/kafka_2.13-3.3.1.tgz
CPU_CORE_NUM: 2
LIBRDKAFKA_TAG: v2.0.2
LIBRDKAFKA_TAG: v2.4.0

jobs:
kafka-api-bazel-build:
Expand Down
43 changes: 22 additions & 21 deletions .github/workflows/kafka_api_ci_tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ on:
env:
KAFKA_SRC_LINK: https://archive.apache.org/dist/kafka/3.3.1/kafka_2.13-3.3.1.tgz
CPU_CORE_NUM: 2
LIBRDKAFKA_TAG: v2.0.2
LIBRDKAFKA_TAG: v2.4.0
BUILD_SUB_DIR: builds/sub-build

jobs:
Expand Down Expand Up @@ -49,48 +49,48 @@ jobs:
test-labels: unit|integration
enable-ut-stubs: true

- os: ubuntu-22.04
- os: ubuntu-24.04
build-cxx: g++
build-type: Release
test-labels: robustness

- os: ubuntu-22.04
- os: ubuntu-24.04
build-cxx: g++
build-type: Release
cxx-standard: 14
test-labels: unit|integration

- os: ubuntu-22.04
- os: ubuntu-24.04
build-cxx: g++
check-option: asan
test-labels: unit|integration

- os: ubuntu-22.04
- os: ubuntu-24.04
build-cxx: g++
check-option: tsan
test-labels: unit|integration

- os: ubuntu-22.04
- os: ubuntu-24.04
build-cxx: g++
check-option: ubsan
test-labels: unit|integration

- os: ubuntu-22.04
- os: ubuntu-24.04
build-cxx: clang++
test-labels: unit|integration
generate-doc: true
with-installation: true

- os: ubuntu-22.04
- os: ubuntu-24.04
build-cxx: clang++
check-option: clang-tidy
enable-ut-stubs: true

- os: ubuntu-20.04
- os: ubuntu-22.04
build-cxx: g++
test-labels: unit|integration

- os: ubuntu-20.04
- os: ubuntu-22.04
build-cxx: clang++
test-labels: robustness

Expand Down Expand Up @@ -155,8 +155,7 @@ jobs:
# 6. Install tools to generate document
if [ ${GENERATE_DOC} ]; then
sudo apt install -y python3-pip
sudo pip3 install markdown
sudo apt install -y python3-markdown
sudo apt install -y doxygen
fi
Expand Down Expand Up @@ -300,15 +299,17 @@ jobs:
# Install googletest
vcpkg install gtest
cp -v "C:\VCPKG\INSTALLED\x86-windows\lib\manual-link\gtest_main*" "C:\VCPKG\INSTALLED\x86-windows\lib\"
cp -v "C:\VCPKG\INSTALLED\x86-windows\lib\manual-link\gtest_main*" "C:\VCPKG\INSTALLED\x86-windows\lib\"
cp -v "C:\VCPKG\INSTALLED\x64-windows\lib\manual-link\gtest_main*" "C:\VCPKG\INSTALLED\x64-windows\lib\"
cp -v "C:\VCPKG\INSTALLED\x64-windows\lib\manual-link\gtest_main*" "C:\VCPKG\INSTALLED\x64-windows\lib\"
# Install boost headers/libraries
vcpkg install boost-optional
vcpkg install boost-algorithm
vcpkg install boost-program-options
cp -v "C:\VCPKG\INSTALLED\x86-windows\lib\boost_program_options-vc140-mt.lib" "C:\VCPKG\INSTALLED\x86-windows\lib\boost_program_options.lib"
ls "C:\VCPKG\INSTALLED\x64-windows\lib"
cp -v "C:\VCPKG\INSTALLED\x64-windows\lib\boost_program_options-vc144-mt-x64-1_85.lib" "C:\VCPKG\INSTALLED\x64-windows\lib\boost_program_options.lib"
# Install rapidjson
vcpkg install rapidjson
Expand All @@ -319,13 +320,13 @@ jobs:
run: |
cd $Env:BUILD_SUB_DIR
$Env:GTEST_ROOT='C:\VCPKG\INSTALLED\x86-windows\'
$Env:BOOST_ROOT='C:\VCPKG\INSTALLED\x86-windows\'
$Env:LIBRDKAFKA_INCLUDE_DIR='C:\VCPKG\INSTALLED\x86-windows\include\'
$Env:LIBRDKAFKA_LIBRARY_DIR='C:\VCPKG\INSTALLED\x86-windows\lib\'
$Env:RAPIDJSON_INCLUDE_DIRS='C:\VCPKG\INSTALLED\x86-windows\include\'
$Env:GTEST_ROOT='C:\VCPKG\INSTALLED\x64-windows\'
$Env:BOOST_ROOT='C:\VCPKG\INSTALLED\x64-windows\'
$Env:LIBRDKAFKA_INCLUDE_DIR='C:\VCPKG\INSTALLED\x64-windows\include\'
$Env:LIBRDKAFKA_LIBRARY_DIR='C:\VCPKG\INSTALLED\x64-windows\lib\'
$Env:RAPIDJSON_INCLUDE_DIRS='C:\VCPKG\INSTALLED\x64-windows\include\'
cmake -B ./ -A Win32 -S ../.. "-DCMAKE_TOOLCHAIN_FILE=C:/vcpkg/scripts/buildsystems/vcpkg.cmake"
cmake -B ./ -A x64 -S ../.. "-DCMAKE_TOOLCHAIN_FILE=C:/vcpkg/scripts/buildsystems/vcpkg.cmake"
- name: Build
run: |
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/kafka_api_demo_conan_build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ jobs:

- name: Prepare
run: |
pip3 install conan==1.59.0
pip3 install conan==1.64.1
- name: Build (non-windows)
if: ${{!contains(matrix.os, 'windows')}}
Expand Down
2 changes: 1 addition & 1 deletion CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ if (NOT parent_directory)
set(cppkafka_master_project ON)
# Use Strict Options
if ((CMAKE_CXX_COMPILER_ID STREQUAL "Clang") OR (CMAKE_CXX_COMPILER_ID STREQUAL "GNU"))
add_compile_options("-Wall" "-Werror" "-Wextra" "-Wshadow" "-Wno-unused-result")
add_compile_options("-Wall" "-Werror" "-Wextra" "-Wshadow" "-Wno-unused-result" "-Wno-array-bounds")
elseif (CMAKE_CXX_COMPILER_ID STREQUAL "MSVC")
add_definitions(-D_CRT_SECURE_NO_WARNINGS)
endif ()
Expand Down
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ About the *Modern C++ Kafka API*

The [modern-cpp-kafka API](http://opensource.morganstanley.com/modern-cpp-kafka/doxygen/annotated.html) is a layer of ***C++*** wrapper based on [librdkafka](https://github.com/confluentinc/librdkafka) (the ***C*** part only), with high quality, but more friendly to users.

- By now, [modern-cpp-kafka](https://github.com/morganstanley/modern-cpp-kafka) is compatible with [librdkafka v2.0.2](https://github.com/confluentinc/librdkafka/releases/tag/v2.0.2).
- By now, [modern-cpp-kafka](https://github.com/morganstanley/modern-cpp-kafka) is compatible with [librdkafka v2.4.0](https://github.com/confluentinc/librdkafka/releases/tag/v2.4.0).


```
Expand Down
4 changes: 3 additions & 1 deletion examples/example_ProducerRecordHeaders.cc
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
#include <kafka/KafkaProducer.h>
#include <kafka/ProducerRecord.h>
#include <kafka/Types.h>

#include <cstddef>
#include <iostream>
#include <string>

Expand Down
1 change: 1 addition & 0 deletions examples/kafka_auto_commit_consumer.cc
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
#include "kafka/KafkaConsumer.h"

#include <chrono>
#include <iostream>
#include <string>

Expand Down
6 changes: 3 additions & 3 deletions include/kafka/Log.h
Original file line number Diff line number Diff line change
Expand Up @@ -43,12 +43,12 @@ template <std::size_t MAX_CAPACITY>
class LogBuffer
{
public:
LogBuffer():_wptr(_buf.data()) { _buf[0] = 0; } // NOLINT
LogBuffer() { clear(); }

LogBuffer& clear()
{
_wptr = _buf.data();
_buf[0] = 0;
_wptr = _buf.data();
return *this;
}

Expand All @@ -72,7 +72,7 @@ class LogBuffer

private:
std::array<char, MAX_CAPACITY> _buf;
char* _wptr;
char* _wptr = nullptr;
};


Expand Down
1 change: 1 addition & 0 deletions include/kafka/addons/KafkaMetrics.h
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
#include <rapidjson/writer.h>

#include <algorithm>
#include <cstdint>
#include <iostream>
#include <sstream>
#include <stdexcept>
Expand Down
2 changes: 1 addition & 1 deletion scripts/start-local-kafka-cluster.py
Original file line number Diff line number Diff line change
Expand Up @@ -157,7 +157,7 @@ def main():
cmd = 'lsof -nP -iTCP:{0} | grep LISTEN'.format(brokerPort)
cmdCall = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
(out, err) = cmdCall.communicate();
matched = re.search('[^\s-]+ +([0-9]+) +.*', out.decode('utf-8'))
matched = re.search(r'[^\s-]+ +([0-9]+) +.*', out.decode('utf-8'))
if matched:
kafkaBrokerPids.append(matched.group(1))

Expand Down
2 changes: 0 additions & 2 deletions tests/integration/TestKafkaConsumer.cc
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,6 @@

#include "gtest/gtest.h"

#include <boost/algorithm/string.hpp>

#include <atomic>
#include <chrono>
#include <cstring>
Expand Down
8 changes: 4 additions & 4 deletions tests/integration/TestKafkaRecoverableProducer.cc
Original file line number Diff line number Diff line change
Expand Up @@ -104,11 +104,11 @@ TEST(KafkaRecoverableProducer, MockFatalError)
{
auto toSend = messagesToSend.front();
{
std::lock_guard<std::mutex> lock(messagesMutex);
const std::lock_guard<std::mutex> lock(messagesMutex);
messagesToSend.pop_front();
}

std::shared_ptr<std::string> payload = std::make_shared<std::string>(std::to_string(toSend));
const std::shared_ptr<std::string> payload = std::make_shared<std::string>(std::to_string(toSend));
auto record = kafka::clients::producer::ProducerRecord(topic, partition,
kafka::NullKey,
kafka::Value(payload->c_str(), payload->size()),
Expand All @@ -121,7 +121,7 @@ TEST(KafkaRecoverableProducer, MockFatalError)

// Would resend the message
if (error) {
std::lock_guard<std::mutex> lock(messagesMutex);
const std::lock_guard<std::mutex> lock(messagesMutex);
messagesToSend.push_front(static_cast<kafka::clients::producer::ProducerRecord::Id>(std::stoi(*payload)));
}

Expand Down Expand Up @@ -156,7 +156,7 @@ TEST(KafkaRecoverableProducer, MockFatalError)
std::map<kafka::clients::producer::ProducerRecord::Id, int> countMap;
for (const auto& record: records)
{
std::string payload(static_cast<const char*>(record.value().data()), record.value().size());
const std::string payload(static_cast<const char*>(record.value().data()), record.value().size());
++countMap[static_cast<kafka::clients::producer::ProducerRecord::Id>(std::stoi(payload))];
}

Expand Down
2 changes: 2 additions & 0 deletions tests/robustness/TestAdminClient.cc
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
#include "../utils/TestUtility.h"

#include "kafka/AdminClient.h"
#include "kafka/Types.h"
#include "kafka/Utility.h"

#include "gtest/gtest.h"

Expand Down
5 changes: 5 additions & 0 deletions tests/unit/TestBrokerMetadata.cc
Original file line number Diff line number Diff line change
@@ -1,7 +1,12 @@
#include "kafka/BrokerMetadata.h"
#include "kafka/Types.h"

#include "gtest/gtest.h"

#include <cstddef>
#include <string>
#include <vector>


TEST(BrokerMetadata, Node)
{
Expand Down
8 changes: 4 additions & 4 deletions tests/unit/TestKafkaMetrics.cc
Original file line number Diff line number Diff line change
Expand Up @@ -332,7 +332,7 @@ TEST(KafkaMetrics, FailureCases)
const kafka::KafkaMetrics invalidMetrics("{invalid: 3}");
EXPECT_FALSE(true);
}
catch (const std::runtime_error& e) {}
catch (const std::runtime_error& e) { std::cout << "Exception std::runtime_error(" << e.what() << " caught as expected!" << std::endl; }
catch (...) { EXPECT_FALSE(true); }

const kafka::KafkaMetrics metrics(consumerMetricsSample);
Expand All @@ -343,7 +343,7 @@ TEST(KafkaMetrics, FailureCases)
metrics.getInt({"*", "127.0.0.1:29003/2", "stateage"});
EXPECT_FALSE(true);
}
catch (const std::invalid_argument& e) {}
catch (const std::invalid_argument& e) { std::cout << "Exception std::invalid_argument(" << e.what() << ") caught as expected!" << std::endl; }
catch (...) { EXPECT_FALSE(true); }

// Try invalid inputs (end with "*")
Expand All @@ -352,7 +352,7 @@ TEST(KafkaMetrics, FailureCases)
metrics.getInt({"brokers", "127.0.0.1:29003/2", "*"});
EXPECT_FALSE(true);
}
catch (const std::invalid_argument& e) {}
catch (const std::invalid_argument& e) { std::cout << "Exception std::invalid_argument(" << e.what() << ") caught as expected!" << std::endl; }
catch (...) { EXPECT_FALSE(true); }

// Try invalid inputs (no keys)
Expand All @@ -361,7 +361,7 @@ TEST(KafkaMetrics, FailureCases)
metrics.getInt({});
EXPECT_FALSE(true);
}
catch (const std::invalid_argument& e) {}
catch (const std::invalid_argument& e) { std::cout << "Exception std::invalid_argument(" << e.what() << ") caught as expected!" << std::endl; }
catch (...) { EXPECT_FALSE(true); }

// Try non-exist keys
Expand Down
2 changes: 1 addition & 1 deletion tests/utils/TestUtility.h
Original file line number Diff line number Diff line change
Expand Up @@ -208,7 +208,7 @@ CreateKafkaTopic(const kafka::Topic& topic, int numPartitions, int replicationFa
class JoiningThread {
public:
template <typename F, typename... Args>
explicit JoiningThread(F&& f, Args&&... args): _t(f, args...) {}
explicit JoiningThread(F&& f, Args&&... args): _t(std::forward<F>(f), args...) {}
~JoiningThread() { if (_t.joinable()) _t.join(); }
private:
std::thread _t;
Expand Down
1 change: 1 addition & 0 deletions tools/console_clients/KafkaConsoleConsumer.cc
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@

#include <atomic>
#include <iostream>
#include <map>
#include <signal.h>
#include <string>
#include <vector>
Expand Down
Loading