Skip to content

Commit

Permalink
Prefer the use of FxHash over Hash (#692)
Browse files Browse the repository at this point in the history
This commit adjusts more of our codebase to use the FxHash instead of the
randomly seeded Hash{Map|Set}. We want to avoid sources of non-determinism. We
believe many of our dependencies will use randomly seeded HashMaps and this
commit does nothing to address that.

REF SMP-687

Signed-off-by: Brian L. Troutwine <[email protected]>
  • Loading branch information
blt authored Aug 28, 2023
1 parent 3dade50 commit 8c14b17
Show file tree
Hide file tree
Showing 17 changed files with 66 additions and 57 deletions.
2 changes: 2 additions & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ tokio = { version = "1.32" }
tracing = { version = "0.1" }
uuid = { version = "1.2", default-features = false, features = ["v4", "serde"] }
prost = "0.11"
rustc-hash = { version = "1.1.0" }

[profile.release]
lto = true # Optimize our binary at link stage.
Expand Down
2 changes: 1 addition & 1 deletion lading/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ num_cpus = { version = "1.16" }
once_cell = "1.18"
rand = { workspace = true, default-features = false, features = ["small_rng", "std", "std_rng" ]}
reqwest = { version = "0.11", default-features = false, features = ["json"] }
rustc-hash = { version = "1.1.0" }
rustc-hash = { workspace = true }
serde = { workspace = true }
serde_json = {workspace = true }
serde_qs = "0.12"
Expand Down
11 changes: 4 additions & 7 deletions lading/src/bin/lading.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
use std::{
collections::HashMap,
env,
fmt::{self, Display},
io::Read,
Expand All @@ -13,17 +12,15 @@ use lading::{
blackhole,
captures::CaptureManager,
config::{Config, Telemetry},
generator::{
self,
process_tree::{self},
},
generator::{self, process_tree},
inspector, observer,
signals::Shutdown,
target::{self, Behavior, Output},
target_metrics,
};
use metrics_exporter_prometheus::PrometheusBuilder;
use rand::{rngs::StdRng, SeedableRng};
use rustc_hash::FxHashMap;
use tokio::{
runtime::Builder,
signal,
Expand All @@ -42,7 +39,7 @@ fn default_target_behavior() -> Behavior {

#[derive(Default, Clone)]
struct CliKeyValues {
inner: HashMap<String, String>,
inner: FxHashMap<String, String>,
}

impl Display for CliKeyValues {
Expand All @@ -59,7 +56,7 @@ impl FromStr for CliKeyValues {

fn from_str(input: &str) -> Result<Self, Self::Err> {
let pair_err = String::from("pairs must be separated by '='");
let mut labels = HashMap::new();
let mut labels = FxHashMap::default();
for kv in input.split(',') {
if kv.is_empty() {
continue;
Expand Down
4 changes: 2 additions & 2 deletions lading/src/blackhole/splunk_hec.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@
//!

use std::{
collections::HashMap,
net::SocketAddr,
sync::{
atomic::{AtomicU64, Ordering},
Expand All @@ -22,6 +21,7 @@ use hyper::{
service::{make_service_fn, service_fn},
Body, Method, Request, Response, Server, StatusCode,
};
use rustc_hash::FxHashMap;
use serde::{Deserialize, Serialize};
use tower::ServiceBuilder;
use tracing::{error, info};
Expand Down Expand Up @@ -60,7 +60,7 @@ struct HecAckRequest {

#[derive(Serialize)]
struct HecAckResponse {
acks: HashMap<u64, bool>,
acks: FxHashMap<u64, bool>,
}

impl From<HecAckRequest> for HecAckResponse {
Expand Down
6 changes: 3 additions & 3 deletions lading/src/captures.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@

use std::{
borrow::Cow,
collections::HashMap,
ffi::OsStr,
io,
path::PathBuf,
Expand All @@ -19,6 +18,7 @@ use std::{

use lading_capture::json;
use metrics_util::registry::{AtomicStorage, Registry};
use rustc_hash::FxHashMap;
use tokio::{
fs::File,
io::{AsyncWriteExt, BufWriter},
Expand Down Expand Up @@ -46,7 +46,7 @@ pub struct CaptureManager {
capture_path: PathBuf,
shutdown: Shutdown,
inner: Arc<Inner>,
global_labels: HashMap<String, String>,
global_labels: FxHashMap<String, String>,
}

impl CaptureManager {
Expand All @@ -66,7 +66,7 @@ impl CaptureManager {
inner: Arc::new(Inner {
registry: Registry::atomic(),
}),
global_labels: HashMap::new(),
global_labels: FxHashMap::default(),
}
}

Expand Down
9 changes: 5 additions & 4 deletions lading/src/config.rs
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
//! This module controls configuration parsing from the end user, providing a
//! convenience mechanism for the rest of the program. Crashes are most likely
//! to originate from this code, intentionally.
use std::{collections::HashMap, net::SocketAddr, path::PathBuf};
use std::{net::SocketAddr, path::PathBuf};

use rustc_hash::FxHashMap;
use serde::Deserialize;

use crate::{blackhole, generator, inspector, observer, target, target_metrics};
Expand Down Expand Up @@ -46,23 +47,23 @@ pub enum Telemetry {
/// Address and port for prometheus exporter
prometheus_addr: SocketAddr,
/// Additional labels to include in every metric
global_labels: HashMap<String, String>,
global_labels: FxHashMap<String, String>,
},
/// In log mode lading will emit its internal telemetry to a structured log
/// file, the "capture" file.
Log {
/// Location on disk to write captures
path: PathBuf,
/// Additional labels to include in every metric
global_labels: HashMap<String, String>,
global_labels: FxHashMap<String, String>,
},
}

impl Default for Telemetry {
fn default() -> Self {
Self::Prometheus {
prometheus_addr: "0.0.0.0:9000".parse().unwrap(),
global_labels: HashMap::default(),
global_labels: FxHashMap::default(),
}
}
}
Expand Down
19 changes: 10 additions & 9 deletions lading/src/generator/process_tree.rs
Original file line number Diff line number Diff line change
Expand Up @@ -22,9 +22,10 @@ use rand::{
rngs::StdRng,
seq::SliceRandom,
};
use rustc_hash::{FxHashMap, FxHashSet};
use serde::{Deserialize, Serialize};
use std::{
collections::{vec_deque, HashMap, HashSet, VecDeque},
collections::{vec_deque, VecDeque},
env, error, fmt,
iter::Peekable,
num::{NonZeroU32, NonZeroUsize},
Expand Down Expand Up @@ -173,8 +174,8 @@ pub struct GenerateEnvs {
impl StaticEnvs {
#[must_use]
/// return environment variables as a hashmap
pub fn to_hash(&self) -> HashMap<String, String> {
let mut envs: HashMap<String, String> = HashMap::new();
pub fn to_map(&self) -> FxHashMap<String, String> {
let mut envs: FxHashMap<String, String> = FxHashMap::default();
for env in &self.values {
if let Some(kv) = env.split_once('=') {
envs.insert(kv.0.to_string(), kv.1.to_string());
Expand Down Expand Up @@ -343,11 +344,11 @@ fn gen_rnd_args(rng: &mut StdRng, len: usize, max: u32) -> Vec<String> {
}

#[inline]
fn gen_rnd_envs(rng: &mut StdRng, len: usize, max: u32) -> HashMap<String, String> {
fn gen_rnd_envs(rng: &mut StdRng, len: usize, max: u32) -> FxHashMap<String, String> {
let key_size = len / 2;
let value_size = len - key_size;

let mut envs = HashMap::new();
let mut envs = FxHashMap::default();
for _ in 0..max {
let key = rnd_str(rng, key_size);
let value = rnd_str(rng, value_size);
Expand All @@ -361,7 +362,7 @@ fn gen_rnd_envs(rng: &mut StdRng, len: usize, max: u32) -> HashMap<String, Strin
pub struct Exec {
executable: String,
args: Vec<String>,
envs: HashMap<String, String>,
envs: FxHashMap<String, String>,
}

impl Exec {
Expand All @@ -374,7 +375,7 @@ impl Exec {
};

let envs = match &exec.envs {
Envs::Static(params) => params.to_hash(),
Envs::Static(params) => params.to_map(),
Envs::Generate(params) => gen_rnd_envs(rng, params.length.get(), params.count.get()),
};

Expand Down Expand Up @@ -407,7 +408,7 @@ impl Process {
///
pub fn spawn_tree(nodes: &VecDeque<Process>, sleep_ns: u32) {
let mut iter = nodes.iter().peekable();
let mut pids_to_wait: HashSet<Pid> = HashSet::new();
let mut pids_to_wait: FxHashSet<Pid> = FxHashSet::default();
let mut depth = 0;

loop {
Expand Down Expand Up @@ -458,7 +459,7 @@ pub fn spawn_tree(nodes: &VecDeque<Process>, sleep_ns: u32) {
}

#[inline]
fn try_wait_pid(pids: &mut HashSet<Pid>) {
fn try_wait_pid(pids: &mut FxHashSet<Pid>) {
let mut exited: Option<Pid> = None;

for pid in pids.iter() {
Expand Down
9 changes: 5 additions & 4 deletions lading/src/generator/splunk_hec/acknowledgements.rs
Original file line number Diff line number Diff line change
@@ -1,10 +1,11 @@
use core::slice;
use std::{collections::HashMap, time::Duration};
use std::time::Duration;

use futures::Future;
use http::{header::AUTHORIZATION, Method, Request, StatusCode, Uri};
use hyper::{client::HttpConnector, Body, Client};
use metrics::counter;
use rustc_hash::FxHashMap;
use serde::Deserialize;
use tokio::{
sync::mpsc::{self, Receiver, Sender},
Expand Down Expand Up @@ -130,7 +131,7 @@ impl AckService {
/// to check on a particular Splunk channel's ack id statuses. The task
/// receives new ack ids from [`super::worker::Worker`]
pub(crate) async fn spin<'a>(self, channel_id: String, mut ack_rx: Receiver<AckId>) {
let mut ack_ids: HashMap<AckId, u64> = HashMap::new();
let mut ack_ids: FxHashMap<AckId, u64> = FxHashMap::default();
let mut interval = tokio::time::interval(Duration::from_secs(
self.ack_settings.ack_query_interval_seconds,
));
Expand Down Expand Up @@ -182,7 +183,7 @@ async fn ack_request(
client: Client<HttpConnector>,
request: Request<Body>,
channel_id: String,
ack_ids: &mut HashMap<AckId, u64>,
ack_ids: &mut FxHashMap<AckId, u64>,
) {
match client.request(request).await {
Ok(response) => {
Expand Down Expand Up @@ -229,5 +230,5 @@ async fn ack_request(

#[derive(Deserialize, Debug)]
struct HecAckStatusResponse {
acks: HashMap<AckId, bool>,
acks: FxHashMap<AckId, bool>,
}
4 changes: 2 additions & 2 deletions lading/src/inspector.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@
//! run an appropriate shell script, or take samples of the target's CPU use.

use std::{
collections::HashMap,
io,
path::PathBuf,
process::{ExitStatus, Stdio},
Expand All @@ -20,6 +19,7 @@ use nix::{
sys::signal::{kill, SIGTERM},
unistd::Pid,
};
use rustc_hash::FxHashMap;
use serde::Deserialize;
use tokio::process::Command;
use tracing::{error, info};
Expand Down Expand Up @@ -49,7 +49,7 @@ pub struct Config {
pub arguments: Vec<String>,
/// Environment variables to set for the inspector sub-process. Lading's own
/// environment variables are not propagated to the sub-process.
pub environment_variables: HashMap<String, String>,
pub environment_variables: FxHashMap<String, String>,
/// Manages stderr, stdout of the inspector sub-process.
pub output: Output,
}
Expand Down
4 changes: 2 additions & 2 deletions lading/src/target.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,6 @@
//! watched process terminates early.

use std::{
collections::HashMap,
io,
num::NonZeroU32,
path::PathBuf,
Expand All @@ -30,6 +29,7 @@ use nix::{
sys::signal::{kill, SIGTERM},
unistd::Pid,
};
use rustc_hash::FxHashMap;
use tokio::process::Command;
use tracing::{error, info};

Expand Down Expand Up @@ -138,7 +138,7 @@ pub struct BinaryConfig {
/// Environment variables to set for the target sub-process. Lading's own
/// environment variables are only propagated to the target sub-process if
/// `inherit_environment` is set.
pub environment_variables: HashMap<String, String>,
pub environment_variables: FxHashMap<String, String>,
/// Manages stderr, stdout of the target sub-process.
pub output: Output,
}
Expand Down
5 changes: 3 additions & 2 deletions lading/src/target_metrics/prometheus.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,10 @@
//! software.
//!

use std::{collections::HashMap, str::FromStr, time::Duration};
use std::{str::FromStr, time::Duration};

use metrics::{counter, gauge};
use rustc_hash::FxHashMap;
use serde::Deserialize;
use tracing::{error, info, trace, warn};

Expand Down Expand Up @@ -107,7 +108,7 @@ impl Prometheus {
};

// remember the type for each metric across lines
let mut typemap = HashMap::new();
let mut typemap = FxHashMap::default();

// this deserves a real parser, but this will do for now.
// Format doc: https://github.com/prometheus/docs/blob/main/content/docs/instrumenting/exposition_formats.md
Expand Down
1 change: 1 addition & 0 deletions lading_capture/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ description = "A tool for load testing daemons."

[dependencies]
prost = "0.11"
rustc-hash = { workspace = true }
serde = { workspace = true }
serde_json = {workspace = true }
uuid = { workspace = true, features = ["serde", "v4"] }
Expand Down
5 changes: 3 additions & 2 deletions lading_capture/src/json.rs
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
//! JSON form of a Lading capture Line, meant to be read line by line from a
//! file.

use rustc_hash::FxHashMap;
use serde::{Deserialize, Serialize};
use std::{borrow::Cow, collections::HashMap};
use std::borrow::Cow;
use uuid::Uuid;

#[derive(Debug, Serialize, Deserialize, Clone, Copy)]
Expand Down Expand Up @@ -59,7 +60,7 @@ pub struct Line<'a> {
pub value: LineValue,
#[serde(flatten)]
/// The labels associated with this metric.
pub labels: HashMap<String, String>,
pub labels: FxHashMap<String, String>,
}

impl<'a> Line<'a> {
Expand Down
Loading

0 comments on commit 8c14b17

Please sign in to comment.