diff --git a/src/batchproof.cpp b/src/batchproof.cpp index 439db31..99bf9e9 100644 --- a/src/batchproof.cpp +++ b/src/batchproof.cpp @@ -56,7 +56,7 @@ bool BatchProof::Unserialize(const std::vector& bytes) return false; } - int data_offset = 0; + uint32_t data_offset = 0; uint32_t num_targets = ReadBE32(bytes.data()); data_offset += 4; uint32_t num_hashes = ReadBE32(bytes.data() + data_offset); @@ -162,7 +162,7 @@ bool UndoBatch::Unserialize(const std::vector& bytes) return false; } - int data_offset = 0; + uint8_t data_offset = 0; m_num_additions = static_cast(ReadBE32(bytes.data())); data_offset += 4; uint32_t num_targets = ReadBE32(bytes.data() + data_offset); diff --git a/src/pollard.cpp b/src/pollard.cpp index 6fd0ccc..4aa1586 100644 --- a/src/pollard.cpp +++ b/src/pollard.cpp @@ -133,7 +133,7 @@ Pollard::Pollard(const std::vector& roots, uint64_t num_leaves) assert(root_positions.size() == roots.size()); // Restore roots - for (int i = 0; i < roots.size(); ++i) { + for (size_t i = 0; i < roots.size(); ++i) { auto int_node = MakeNodePtr(nullptr, nullptr, roots.at(i)); m_roots.push_back(MakeNodePtr(int_node, int_node, nullptr, m_num_leaves, root_positions.at(i))); diff --git a/src/ram_forest.cpp b/src/ram_forest.cpp index bd638ca..273fcc9 100644 --- a/src/ram_forest.cpp +++ b/src/ram_forest.cpp @@ -173,7 +173,7 @@ bool RamForest::Commit() uint64_t num_hashes = m_num_leaves; for (uint8_t i = 0; i <= state.NumRows(); ++i) { assert(num_hashes <= m_data[i].size()); - for (int j = 0; j < num_hashes; ++j) { + for (size_t j = 0; j < num_hashes; ++j) { m_file.write(reinterpret_cast(m_data[i][j].data()), 32); } num_hashes >>= 1; @@ -324,7 +324,7 @@ bool RamForest::Prove(BatchProof& proof, const std::vector& targetHashes) // Read proof hashes from the forest using the proof positions auto proof_positions = ForestState(m_num_leaves).ProofPositions(sorted_targets); std::vector proof_hashes(proof_positions.first.size()); - for (int i = 0; i < proof_hashes.size(); i++) { + for (size_t i = 0; i < proof_hashes.size(); i++) { proof_hashes[i] = Read(proof_positions.first[i]); } @@ -401,7 +401,7 @@ bool RamForest::BuildUndoBatch(UndoBatch& undo, uint64_t num_adds, const std::ve ForestState prev_state(m_num_leaves + targets.size()); std::vector deleted_hashes; - for (int i = 0; i < targets.size(); ++i) { + for (size_t i = 0; i < targets.size(); ++i) { uint64_t pos = m_num_leaves + static_cast(i); if (m_data.size() == 0 || pos >= m_data[0].size()) return false; deleted_hashes.push_back(Read(prev_state, pos)); @@ -502,7 +502,7 @@ bool RamForest::Undo(const UndoBatch& undo) CHECK_SAFE(m_data[0].size() == m_posmap.size()); CHECK_SAFE([](const std::unordered_map& posmap, const std::vector>& data) { - int pos = 0; + size_t pos = 0; for (const Hash& hash : data[0]) { auto it = posmap.find(hash); if (it == posmap.end()) return false; diff --git a/src/state.cpp b/src/state.cpp index 96b1788..cb32dab 100644 --- a/src/state.cpp +++ b/src/state.cpp @@ -420,7 +420,7 @@ std::vector ForestState::UndoTransform(const std::vector undo_swaps; auto prev_swaps = Transform(targets); - for (int r = 0; r < prev_swaps.size(); ++r) { + for (size_t r = 0; r < prev_swaps.size(); ++r) { auto row = prev_swaps[r]; for (const ForestState::Swap& swap : row) { if (swap.m_from == swap.m_to) continue; diff --git a/src/test/accumulator_tests.cpp b/src/test/accumulator_tests.cpp index 7eb96ee..e46de59 100644 --- a/src/test/accumulator_tests.cpp +++ b/src/test/accumulator_tests.cpp @@ -60,9 +60,9 @@ Hash HashFromStr(const std::string& hex) int digits = 64; for (int i = 31; i >= 0;) { - h[i] = p_util_hexdigit[hex[--digits]]; + h[i] = p_util_hexdigit[(uint8_t)hex[--digits]]; if (digits > 0) { - h[i] |= p_util_hexdigit[hex[--digits]] << 4; + h[i] |= p_util_hexdigit[(uint8_t)hex[--digits]] << 4; i--; } } @@ -368,7 +368,6 @@ BOOST_AUTO_TEST_CASE(simple_blockchain) Pollard pruned(0); int num_blocks = 1000; int num_max_adds = 128; - int num_max_dels = 128; int unique_hash = 0; std::default_random_engine generator;