Skip to content

Commit

Permalink
redis
Browse files Browse the repository at this point in the history
  • Loading branch information
appad committed Mar 8, 2024
1 parent f223751 commit b05b93a
Show file tree
Hide file tree
Showing 6 changed files with 78 additions and 8 deletions.
2 changes: 2 additions & 0 deletions Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -69,5 +69,7 @@ RUN vcpkg install asio-grpc
RUN vcpkg install libunifex
RUN vcpkg install catch2
RUN vcpkg install spdlog
RUN vcpkg install hiredis
RUN vcpkg install redis-plus-plus


9 changes: 8 additions & 1 deletion api/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,9 @@ find_package(asio-grpc CONFIG REQUIRED)
find_package(Threads REQUIRED)
find_package(unifex CONFIG REQUIRED)
find_package(spdlog CONFIG REQUIRED)
find_package(hiredis CONFIG REQUIRED)
find_package(redis++ CONFIG REQUIRED)

# example helper
add_library(asio-grpc-helper INTERFACE)

Expand Down Expand Up @@ -34,10 +37,14 @@ target_link_libraries(
unifex::unifex
solver_lib
spdlog::spdlog
hiredis::hiredis
redis++::redis++_static
-static
-static-libgcc
-static-libstdc++
)

target_include_directories(asio-grpc-server PRIVATE ${CPP_BASE64_INCLUDE_DIRS})

# set release
set(CMAKE_BUILD_TYPE Release)
#set(CMAKE_BUILD_TYPE Release)
49 changes: 43 additions & 6 deletions api/solver/solvera.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@

#include "solvera.h"

#include <sw/redis++/redis++.h>
#include <unifex/timed_single_thread_context.hpp>
unifex::timed_single_thread_context timer;

Expand All @@ -15,7 +16,8 @@ auto build_response(const SharedData &shared_data, double elapsed_seconds) -> So
response.set_hash_table_size(shared_data.hashes.size());
response.set_time(elapsed_seconds);
response.set_boards_per_second(static_cast<double>(shared_data.board_count) / elapsed_seconds);
response.set_hashes_per_second(static_cast<double>(shared_data.hashes.size()) / elapsed_seconds);
response.set_hashes_per_second(
static_cast<double>(shared_data.hashes.size() - shared_data.redis_hash_count) / elapsed_seconds);
response.set_hash_table_hits(shared_data.hash_hit_count);

for (auto &board_piece : shared_data.max_board.board)
Expand Down Expand Up @@ -72,6 +74,24 @@ auto delay(std::chrono::milliseconds ms) -> unifex::_timed_single_thread_context
{
return unifex::schedule_after(timer.get_scheduler(), ms);
}

auto get_env_var(std::string const &key, std::string const &default_value) -> std::string
{
char const *val = std::getenv(key.c_str());
return val == nullptr ? default_value : std::string(val);
}

auto hash_pieces(const std::vector<Piece> &pieces) -> std::string
{
std::string hash;
for (const Piece &piece : pieces)
{
// zfill this bother piece is a ull
hash += std::bitset<64>(piece).to_string();
}
return hash;
}

auto handle_server_solver_request(agrpc::GrpcContext &grpc_context,
solver::v1::Solver::AsyncService &service1) -> unifex::any_sender_of<>
{
Expand All @@ -85,21 +105,35 @@ auto handle_server_solver_request(agrpc::GrpcContext &grpc_context,
std::vector<std::thread> threads;
int max_thread_count = std::max(4, static_cast<int>(std::thread::hardware_concurrency()));
int thread_count = std::min(max_thread_count, static_cast<int>(request.threads()));

auto redis = sw::redis::Redis(get_env_var("REDIS_URL", "redis://localhost:6379"));
spdlog::info("Starting solver with board size: {}", board.size);
spdlog::info("Using {} threads", thread_count);
spdlog::info("Pieces: {}", pieces.size());
spdlog::info("Timebetween: {}", request.wait_time());
spdlog::info("Hash length threshold: {}", request.hash_threshold());

threads.reserve(thread_count);
std::unordered_set<BoardHash> hashes;
SharedData shared_data = {max_board, max_count, mutex, hashes};
shared_data.hash_length_threshold = request.hash_threshold();
auto start = std::chrono::high_resolution_clock::now();
std::unordered_set<BoardHash> hashes = {};
SharedData shared_data = {max_board, max_count, mutex, hashes};
shared_data.hash_length_threshold = request.hash_threshold();
auto start = std::chrono::high_resolution_clock::now();
auto pieces_hash = hash_pieces(pieces);
// load hashes from redis if they exist
auto temp = std::unordered_set<BoardHash>{};
redis.smembers(pieces_hash, std::inserter(temp, temp.end()));
// copy the temp set into the shared data
for (const auto &hash : temp)
{
shared_data.hashes.insert(hash);
}
shared_data.redis_hash_count = shared_data.hashes.size();
spdlog::info("Loaded {} hashes from redis", shared_data.redis_hash_count);

for (int i = 0; i < thread_count; i++)
{
threads.emplace_back(thread_function, board, pieces, std::ref(shared_data));
}

// every 2 seconds, print the current max count
while (true)
{
Expand All @@ -110,6 +144,7 @@ auto handle_server_solver_request(agrpc::GrpcContext &grpc_context,
std::chrono::high_resolution_clock::now() - start)
.count())
/ 1000.0;

if (max_count == board.size * board.size)
{
spdlog::info("Found solution");
Expand Down Expand Up @@ -142,6 +177,8 @@ auto handle_server_solver_request(agrpc::GrpcContext &grpc_context,
co_await rpc.finish(grpc::Status::CANCELLED);
co_return;
}
// insert into redis
redis.sadd(pieces_hash, shared_data.hashes.begin(), shared_data.hashes.end());
}
});
}
Expand Down
21 changes: 21 additions & 0 deletions docker-compose.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -6,9 +6,30 @@ services:
build:
context: .
dockerfile: api/Dockerfile
environment:
- REDIS_URL=tcp://redis:6379

envoy:
build:
context: envoy
ports:
- '50052:50052'


redis:
image: redis:7-alpine
restart: always
ports:
- "6379:6379"
environment:
REDIS_PASSWORD: secret
redis-insight:
image: redis/redisinsight:latest
restart: always
ports:
- "5540:5540"
volumes:
- redis-insight:/data

volumes:
redis-insight:
1 change: 1 addition & 0 deletions solver/solver/solver.h
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ struct SharedData
// callback that is called when you enter the recursive function, takes in a Board
std::function<void(const Board &)> on_board_update = [](const Board &board) {};
unsigned int hash_length_threshold = 7;
unsigned long redis_hash_count = 0;
};

auto possible_pieces(const Board &board, const std::vector<PieceWAvailability> &pieces, Index index)
Expand Down
4 changes: 3 additions & 1 deletion vcpkg.json
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,8 @@
"protobuf",
"asio-grpc",
"libunifex",
"spdlog"
"spdlog",
"hiredis",
"redis-plus-plus"
]
}

0 comments on commit b05b93a

Please sign in to comment.