Skip to content

Commit

Permalink
Merge branch 'master' into vasil.pashov/batch_read
Browse files Browse the repository at this point in the history
  • Loading branch information
vasil-pashov committed Dec 19, 2024
2 parents 3a9cec2 + 6931d3f commit 27cca12
Show file tree
Hide file tree
Showing 55 changed files with 2,236 additions and 410 deletions.
8 changes: 4 additions & 4 deletions .github/actions/setup_deps/action.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,10 @@ runs:
shell: bash -l {0}
run: |
dnf update -y
dnf remove -y 'gcc-toolset-13-*'
dnf remove -y 'gcc-toolset-*'
dnf install -y zip flex bison gcc-toolset-10 gcc-toolset-10-gdb gcc-toolset-10-libatomic-devel krb5-devel cyrus-sasl-devel openssl-devel \
unzip tar epel-release jq wget libcurl-devel python3 \
python3-devel python3-pip perl-IPC-Cmd
unzip tar epel-release jq wget libcurl-devel \
python3.11-devel python3.11-pip perl-IPC-Cmd
dnf groupinstall -y 'Development Tools'
Expand All @@ -19,7 +19,7 @@ runs:
echo "CXX=/opt/rh/gcc-toolset-10/root/bin/g++" | tee -a $GITHUB_ENV
echo "CMAKE_CXX_COMPILER=/opt/rh/gcc-toolset-10/root/bin/g++" | tee -a $GITHUB_ENV
echo "LD_LIBRARY_PATH=/opt/rh/gcc-toolset-10/root/usr/lib64:/opt/rh/gcc-toolset-10/root/usr/lib:/opt/rh/gcc-toolset-10/root/usr/lib64/dyninst" | tee -a $GITHUB_ENV
echo "/opt/rh/devtoolset-10/root/usr/bin" | tee -a $GITHUB_PATH
echo "/opt/rh/devtoolset-10/root/usr/bin:/opt/python/cp311-cp311/bin" | tee -a $GITHUB_PATH
echo $GITHUB_ENV
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/analysis_workflow.yml
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ on:
type: boolean
default: false

schedule: # Schdeule the job to run at 12 a.m. daily
schedule: # Schedule the job to run at 12 a.m. daily
- cron: '0 0 * * *'

pull_request_target:
Expand Down
21 changes: 14 additions & 7 deletions .github/workflows/benchmark_commits.yml
Original file line number Diff line number Diff line change
Expand Up @@ -31,8 +31,14 @@ jobs:
defaults:
run: {shell: bash}
steps:
- name: Initialize LFS
shell: bash -l {0}
run: |
dnf install -y git-lfs
- uses: actions/[email protected]
with:
lfs: 'true'
fetch-depth: 0
submodules: recursive
token: ${{ secrets.ARCTICDB_TEST_PAT }}
Expand All @@ -46,14 +52,15 @@ jobs:
- name: Install deps
uses: ./.github/actions/setup_deps

# We are changing the python here because we want to use the default python to build (it is devel version)
# and this python for the rest of the testing
- name: Select Python (Linux)
shell: bash -el {0}
- name: Extra envs
shell: bash -l {0}
run: |
ls /opt/python
echo /opt/python/cp36-cp36m/bin >> $GITHUB_PATH
. build_tooling/vcpkg_caching.sh # Linux follower needs another call in CIBW
echo -e "VCPKG_BINARY_SOURCES=$VCPKG_BINARY_SOURCES
VCPKG_ROOT=$PLATFORM_VCPKG_ROOT" | tee -a $GITHUB_ENV
cmake -P cpp/CMake/CpuCount.cmake | sed 's/^-- //' | tee -a $GITHUB_ENV
env:
CMAKE_BUILD_PARALLEL_LEVEL: ${{vars.CMAKE_BUILD_PARALLEL_LEVEL}}

- name: Set persistent storage variables
uses: ./.github/actions/set_persistent_storage_env_vars
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/docs_build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ jobs:
- id: download-wheel-artifact
name: Download wheel artifact from last successful build
if: ${{!inputs.version}}
uses: dawidd6/action-download-artifact@v2.28.0
uses: dawidd6/action-download-artifact@v6
with:
name: wheel-${{env.PY_IMPL}}-manylinux_x86_64
workflow: build.yml
Expand Down
20 changes: 20 additions & 0 deletions .github/workflows/failure_notification.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
name: Check for master failure
on:
workflow_run:
workflows: ["Build and Test", "Build with conda", "Build with analysis tools", "Coverity Static Analysis"]
types: [completed]
branches: [master]

jobs:
on-failure:
runs-on: ubuntu-latest
if: github.event.workflow_run.conclusion == 'failure' || github.event.workflow_run.conclusion == 'timed_out'
steps:
- uses: ravsamhq/notify-slack-action@be814b201e233b2dc673608aa46e5447c8ab13f2
with:
status: ${{ github.event.workflow_run.conclusion }}
notification_title: " ${{github.event.workflow_run.name}} - ${{github.event.workflow_run.conclusion}} on ${{github.event.workflow_run.head_branch}} - <${{github.server_url}}/${{github.repository}}/actions/runs/${{github.event.workflow_run.id}}|View Failure>"
message_format: ":fire: *${{github.event.workflow_run.name}}* ${{github.event.workflow_run.conclusion}} in <${{github.server_url}}/${{github.repository}}/${{github.event.workflow_run.head_branch}}|${{github.repository}}>"
footer: "Linked Repo <${{github.server_url}}/${{github.repository}}|${{github.repository}}> | <${{github.server_url}}/${{github.repository}}/actions/runs/${{github.event.workflow_run.id}}|View Failure>"
env:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
4 changes: 4 additions & 0 deletions build_tooling/transform_asv_results.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,11 @@
As of the Change Date specified in that file, in accordance with the Business Source License, use of this software will be governed by the Apache License, version 2.0.
"""

import pandas as pd

# Importing inf and nan so they can be evaluated correctly during extraction
from numpy import inf, nan
from arcticdb.storage_fixtures.s3 import real_s3_from_environment_variables
import json
from pathlib import Path
Expand Down
3 changes: 3 additions & 0 deletions cpp/arcticdb/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -308,6 +308,7 @@ set(arcticdb_srcs
storage/storage.hpp
storage/storage_override.hpp
storage/store.hpp
storage/storage_utils.hpp
stream/aggregator.hpp
stream/aggregator-inl.hpp
stream/append_map.hpp
Expand All @@ -328,6 +329,7 @@ set(arcticdb_srcs
stream/stream_utils.hpp
stream/stream_writer.hpp
toolbox/library_tool.hpp
toolbox/storage_mover.hpp
util/allocator.hpp
util/bitset.hpp
util/buffer.hpp
Expand Down Expand Up @@ -486,6 +488,7 @@ set(arcticdb_srcs
storage/s3/s3_storage.cpp
storage/s3/s3_storage_tool.cpp
storage/storage_factory.cpp
storage/storage_utils.cpp
stream/aggregator.cpp
stream/append_map.cpp
stream/index.cpp
Expand Down
4 changes: 2 additions & 2 deletions cpp/arcticdb/async/async_store.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -51,11 +51,11 @@ class AsyncStore : public Store {
public:
AsyncStore(
std::shared_ptr<storage::Library> library,
const arcticdb::proto::encoding::VariantCodec &codec,
const proto::encoding::VariantCodec &codec,
EncodingVersion encoding_version
) :
library_(std::move(library)),
codec_(std::make_shared<arcticdb::proto::encoding::VariantCodec>(codec)),
codec_(std::make_shared<proto::encoding::VariantCodec>(codec)),
encoding_version_(encoding_version) {
}

Expand Down
4 changes: 2 additions & 2 deletions cpp/arcticdb/async/task_scheduler.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -300,13 +300,13 @@ inline auto& io_executor() {
}

template <typename Task>
inline auto submit_cpu_task(Task&& task) {
auto submit_cpu_task(Task&& task) {
return TaskScheduler::instance()->submit_cpu_task(std::forward<decltype(task)>(task));
}


template <typename Task>
inline auto submit_io_task(Task&& task) {
auto submit_io_task(Task&& task) {
return TaskScheduler::instance()->submit_io_task(std::forward<decltype(task)>(task));
}

Expand Down
4 changes: 2 additions & 2 deletions cpp/arcticdb/entity/atom_key.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -91,8 +91,8 @@ class AtomKeyImpl {
}

friend bool operator<(const AtomKeyImpl &l, const AtomKeyImpl &r) {
auto lt = std::tie(l.id_, l.version_id_, l.index_start_, l.index_end_, l.creation_ts_);
auto rt = std::tie(r.id_, r.version_id_, r.index_start_, r.index_end_, r.creation_ts_);
const auto lt = std::tie(l.id_, l.version_id_, l.index_start_, l.index_end_, l.creation_ts_);
const auto rt = std::tie(r.id_, r.version_id_, r.index_start_, r.index_end_, r.creation_ts_);
return lt < rt;
}

Expand Down
4 changes: 4 additions & 0 deletions cpp/arcticdb/entity/key.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -90,6 +90,10 @@ KeyClass key_class_from_key_type(KeyType key_type) {
return get_key_data(key_type).key_class_;
}

const char* get_key_description(KeyType key_type) {
return get_key_data(key_type).description_;
}

bool is_string_key_type(KeyType key_type){
return variant_type_from_key_type(key_type) == VariantType::STRING_TYPE;
}
Expand Down
34 changes: 17 additions & 17 deletions cpp/arcticdb/entity/key.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,10 @@
#include <memory>
#include <algorithm>
#include <variant>
#include <array>
#include <ranges>

namespace rng = std::ranges;

namespace arcticdb::entity {

Expand Down Expand Up @@ -193,10 +197,10 @@ enum class KeyType : int {
UNDEFINED
};

inline std::vector<KeyType> key_types_write_precedence() {
consteval auto key_types_write_precedence() {
// TOMBSTONE[_ALL] keys are not included because they're not written to the storage,
// they just exist inside version keys
return {
return std::array {
KeyType::LIBRARY_CONFIG,
KeyType::TABLE_DATA,
KeyType::TABLE_INDEX,
Expand All @@ -215,9 +219,9 @@ inline std::vector<KeyType> key_types_write_precedence() {
};
}

inline std::vector<KeyType> key_types_read_precedence() {
consteval auto key_types_read_precedence() {
auto output = key_types_write_precedence();
std::reverse(std::begin(output), std::end(output));
rng::reverse(output);
return output;
}

Expand Down Expand Up @@ -247,7 +251,7 @@ enum class VariantType : char {

VariantType variant_type_from_key_type(KeyType key_type);

inline bool is_index_key_type(KeyType key_type) {
constexpr bool is_index_key_type(KeyType key_type) {
// TODO: Change name probably.
return (key_type == KeyType::TABLE_INDEX) || (key_type == KeyType::MULTI_KEY);
}
Expand All @@ -258,30 +262,26 @@ bool is_ref_key_class(KeyType k);

bool is_block_ref_key_class(KeyType k);

inline KeyType get_key_type_for_data_stream(const StreamId &) {
constexpr KeyType get_key_type_for_data_stream(const StreamId &) {
return KeyType::TABLE_DATA;
}

inline KeyType get_key_type_for_index_stream(const StreamId &) {
constexpr KeyType get_key_type_for_index_stream(const StreamId &) {
return KeyType::TABLE_INDEX;
}

const char* get_key_description(KeyType type);

template <typename Function>
auto foreach_key_type_read_precedence(Function&& func) {
auto types = key_types_read_precedence();
for(auto type : types) {
func(KeyType(type));
}
constexpr auto foreach_key_type_read_precedence(Function&& func) {
rng::for_each(key_types_read_precedence(), func);
}

template <typename Function>
auto foreach_key_type_write_precedence(Function&& func) {
auto types = key_types_write_precedence();
for(auto type : types) {
func(KeyType(type));
}
constexpr auto foreach_key_type_write_precedence(Function&& func) {
rng::for_each(key_types_write_precedence(), func);
}

inline KeyType key_type_from_int(int type_num) {
util::check(type_num > 0 && type_num < int(KeyType::UNDEFINED), "Unrecognized key type number {}", type_num);
return KeyType(type_num);
Expand Down
4 changes: 2 additions & 2 deletions cpp/arcticdb/entity/metrics.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -32,8 +32,8 @@ namespace arcticdb {

const std::string MONGO_INSTANCE_LABEL = "mongo_instance";
const std::string PROMETHEUS_ENV_LABEL = "env";
const int SUMMARY_MAX_AGE = 30;
const int SUMMARY_AGE_BUCKETS = 5;
constexpr int SUMMARY_MAX_AGE = 30;
constexpr int SUMMARY_AGE_BUCKETS = 5;

class MetricsConfig {
public:
Expand Down
Loading

0 comments on commit 27cca12

Please sign in to comment.