From dedc8306f68e397dd51d0e9055eab2c04e373cdf Mon Sep 17 00:00:00 2001 From: Matt Corallo Date: Thu, 18 Jan 2024 22:39:26 +0000 Subject: [PATCH] Bump `hashbrown` dependency to 0.13 While this isn't expected to materially improve performance, it does get us ahash 0.8, which allows us to reduce fuzzing randomness, making our fuzzers much happier. Sadly, by default `ahash` no longer tries to autodetect a randomness source, so we cannot simply rely on `hashbrown` to do randomization for us, but rather have to also explicitly depend on `ahash`. --- .github/workflows/build.yml | 2 +- bench/Cargo.toml | 2 +- ci/check-cfg-flags.py | 2 + fuzz/Cargo.toml | 2 +- lightning-invoice/Cargo.toml | 2 +- lightning/Cargo.toml | 15 ++++- lightning/src/chain/onchaintx.rs | 2 +- lightning/src/lib.rs | 100 +++++++++++++++++++++++------ lightning/src/ln/channelmanager.rs | 6 +- lightning/src/routing/router.rs | 8 +-- lightning/src/routing/scoring.rs | 4 +- 11 files changed, 109 insertions(+), 36 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index cff105470..ab10b36d2 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -110,7 +110,7 @@ jobs: run: | cd lightning RUSTFLAGS="--cfg=require_route_graph_test" cargo test - RUSTFLAGS="--cfg=require_route_graph_test" cargo test --features hashbrown + RUSTFLAGS="--cfg=require_route_graph_test" cargo test --features hashbrown,ahash cd .. - name: Run benchmarks on Rust ${{ matrix.toolchain }} run: | diff --git a/bench/Cargo.toml b/bench/Cargo.toml index 05354890c..ff254e1d9 100644 --- a/bench/Cargo.toml +++ b/bench/Cargo.toml @@ -9,7 +9,7 @@ name = "bench" harness = false [features] -hashbrown = ["lightning/hashbrown"] +hashbrown = ["lightning/hashbrown", "lightning/ahash"] [dependencies] lightning = { path = "../lightning", features = ["_test_utils", "criterion"] } diff --git a/ci/check-cfg-flags.py b/ci/check-cfg-flags.py index 277ae1077..d6e8e0a90 100755 --- a/ci/check-cfg-flags.py +++ b/ci/check-cfg-flags.py @@ -13,6 +13,8 @@ def check_feature(feature): pass elif feature == "no-std": pass + elif feature == "ahash": + pass elif feature == "hashbrown": pass elif feature == "backtrace": diff --git a/fuzz/Cargo.toml b/fuzz/Cargo.toml index 0c279a015..34c8704a7 100644 --- a/fuzz/Cargo.toml +++ b/fuzz/Cargo.toml @@ -22,7 +22,7 @@ lightning = { path = "../lightning", features = ["regex", "hashbrown", "_test_ut lightning-rapid-gossip-sync = { path = "../lightning-rapid-gossip-sync" } bitcoin = { version = "0.30.2", features = ["secp-lowmemory"] } hex = { package = "hex-conservative", version = "0.1.1", default-features = false } -hashbrown = "0.8" +hashbrown = "0.13" afl = { version = "0.12", optional = true } honggfuzz = { version = "0.5", optional = true, default-features = false } diff --git a/lightning-invoice/Cargo.toml b/lightning-invoice/Cargo.toml index e0d3560c1..4376ac0dc 100644 --- a/lightning-invoice/Cargo.toml +++ b/lightning-invoice/Cargo.toml @@ -24,7 +24,7 @@ bech32 = { version = "0.9.0", default-features = false } lightning = { version = "0.0.121", path = "../lightning", default-features = false } secp256k1 = { version = "0.27.0", default-features = false, features = ["recovery", "alloc"] } num-traits = { version = "0.2.8", default-features = false } -hashbrown = { version = "0.8", optional = true } +hashbrown = { version = "0.13", optional = true } serde = { version = "1.0.118", optional = true } bitcoin = { version = "0.30.2", default-features = false } diff --git a/lightning/Cargo.toml b/lightning/Cargo.toml index 1fe0f0dd1..8214193cc 100644 --- a/lightning/Cargo.toml +++ b/lightning/Cargo.toml @@ -31,7 +31,7 @@ unsafe_revoked_tx_signing = [] # Override signing to not include randomness when generating signatures for test vectors. _test_vectors = [] -no-std = ["hashbrown", "bitcoin/no-std", "core2/alloc", "libm"] +no-std = ["hashbrown", "ahash", "bitcoin/no-std", "core2/alloc", "libm"] std = ["bitcoin/std"] # Generates low-r bitcoin signatures, which saves 1 byte in 50% of the cases @@ -42,7 +42,8 @@ default = ["std", "grind_signatures"] [dependencies] bitcoin = { version = "0.30.2", default-features = false, features = ["secp-recovery"] } -hashbrown = { version = "0.8", optional = true } +hashbrown = { version = "0.13", optional = true } +ahash = { version = "0.8", optional = true, default-features = false } hex = { package = "hex-conservative", version = "0.1.1", default-features = false } regex = { version = "1.5.6", optional = true } backtrace = { version = "0.3", optional = true } @@ -50,6 +51,16 @@ backtrace = { version = "0.3", optional = true } core2 = { version = "0.3.0", optional = true, default-features = false } libm = { version = "0.2", optional = true, default-features = false } +# Because ahash no longer (kinda poorly) does it for us, (roughly) list out the targets that +# getrandom supports and turn on ahash's `runtime-rng` feature for them. +[target.'cfg(not(any(target_os = "unknown", target_os = "none")))'.dependencies] +ahash = { version = "0.8", optional = true, default-features = false, features = ["runtime-rng"] } + +# Not sure what target_os gets set to for sgx, so to be safe always enable runtime-rng for x86_64 +# platforms (assuming LDK isn't being used on embedded x86-64 running directly on metal). +[target.'cfg(target_arch = "x86_64")'.dependencies] +ahash = { version = "0.8", optional = true, default-features = false, features = ["runtime-rng"] } + [dev-dependencies] regex = "1.5.6" diff --git a/lightning/src/chain/onchaintx.rs b/lightning/src/chain/onchaintx.rs index 5fe4bd2c1..05d59431f 100644 --- a/lightning/src/chain/onchaintx.rs +++ b/lightning/src/chain/onchaintx.rs @@ -686,7 +686,7 @@ impl OnchainTxHandler if let Some(claim_id) = claim_id { if let Some(claim) = self.pending_claim_requests.remove(&claim_id) { for outpoint in claim.outpoints() { - self.claimable_outpoints.remove(&outpoint); + self.claimable_outpoints.remove(outpoint); } } } else { diff --git a/lightning/src/lib.rs b/lightning/src/lib.rs index 77b1f91cb..9bc158328 100644 --- a/lightning/src/lib.rs +++ b/lightning/src/lib.rs @@ -167,9 +167,19 @@ mod io_extras { mod prelude { #[cfg(feature = "hashbrown")] extern crate hashbrown; + #[cfg(feature = "ahash")] + extern crate ahash; pub use alloc::{vec, vec::Vec, string::String, collections::VecDeque, boxed::Box}; + pub use alloc::borrow::ToOwned; + pub use alloc::string::ToString; + + // For no-std builds, we need to use hashbrown, however, by default, it doesn't randomize the + // hashing and is vulnerable to HashDoS attacks. Thus, when not fuzzing, we use its default + // ahash hashing algorithm but randomize, opting to not randomize when fuzzing to avoid false + // positive branch coverage. + #[cfg(not(feature = "hashbrown"))] mod std_hashtables { pub(crate) use std::collections::{HashMap, HashSet, hash_map}; @@ -183,35 +193,85 @@ mod prelude { pub(crate) use std_hashtables::*; #[cfg(feature = "hashbrown")] - mod hashbrown_tables { - pub(crate) use hashbrown::{HashMap, HashSet, hash_map}; + pub(crate) use self::hashbrown::hash_map; + + #[cfg(all(feature = "hashbrown", fuzzing))] + mod nonrandomized_hashbrown { + pub(crate) use hashbrown::{HashMap, HashSet}; pub(crate) type OccupiedHashMapEntry<'a, K, V> = - hashbrown::hash_map::OccupiedEntry<'a, K, V, hash_map::DefaultHashBuilder>; + hashbrown::hash_map::OccupiedEntry<'a, K, V, hashbrown::hash_map::DefaultHashBuilder>; pub(crate) type VacantHashMapEntry<'a, K, V> = - hashbrown::hash_map::VacantEntry<'a, K, V, hash_map::DefaultHashBuilder>; + hashbrown::hash_map::VacantEntry<'a, K, V, hashbrown::hash_map::DefaultHashBuilder>; } - #[cfg(feature = "hashbrown")] - pub(crate) use hashbrown_tables::*; + #[cfg(all(feature = "hashbrown", fuzzing))] + pub(crate) use nonrandomized_hashbrown::*; - pub(crate) fn new_hash_map() -> HashMap { HashMap::new() } - pub(crate) fn hash_map_with_capacity(cap: usize) -> HashMap { - HashMap::with_capacity(cap) - } - pub(crate) fn hash_map_from_iter>(iter: I) -> HashMap { - HashMap::from_iter(iter) - } - pub(crate) fn new_hash_set() -> HashSet { HashSet::new() } - pub(crate) fn hash_set_with_capacity(cap: usize) -> HashSet { - HashSet::with_capacity(cap) + #[cfg(all(feature = "hashbrown", not(fuzzing)))] + mod randomized_hashtables { + use super::*; + use ahash::RandomState; + + pub(crate) type HashMap = hashbrown::HashMap; + pub(crate) type HashSet = hashbrown::HashSet; + + pub(crate) type OccupiedHashMapEntry<'a, K, V> = + hashbrown::hash_map::OccupiedEntry<'a, K, V, RandomState>; + pub(crate) type VacantHashMapEntry<'a, K, V> = + hashbrown::hash_map::VacantEntry<'a, K, V, RandomState>; + + pub(crate) fn new_hash_map() -> HashMap { + HashMap::with_hasher(RandomState::new()) + } + pub(crate) fn hash_map_with_capacity(cap: usize) -> HashMap { + HashMap::with_capacity_and_hasher(cap, RandomState::new()) + } + pub(crate) fn hash_map_from_iter>(iter: I) -> HashMap { + let iter = iter.into_iter(); + let min_size = iter.size_hint().0; + let mut res = HashMap::with_capacity_and_hasher(min_size, RandomState::new()); + res.extend(iter); + res + } + + pub(crate) fn new_hash_set() -> HashSet { + HashSet::with_hasher(RandomState::new()) + } + pub(crate) fn hash_set_with_capacity(cap: usize) -> HashSet { + HashSet::with_capacity_and_hasher(cap, RandomState::new()) + } + pub(crate) fn hash_set_from_iter>(iter: I) -> HashSet { + let iter = iter.into_iter(); + let min_size = iter.size_hint().0; + let mut res = HashSet::with_capacity_and_hasher(min_size, RandomState::new()); + res.extend(iter); + res + } } - pub(crate) fn hash_set_from_iter>(iter: I) -> HashSet { - HashSet::from_iter(iter) + + #[cfg(any(not(feature = "hashbrown"), fuzzing))] + mod randomized_hashtables { + use super::*; + + pub(crate) fn new_hash_map() -> HashMap { HashMap::new() } + pub(crate) fn hash_map_with_capacity(cap: usize) -> HashMap { + HashMap::with_capacity(cap) + } + pub(crate) fn hash_map_from_iter>(iter: I) -> HashMap { + HashMap::from_iter(iter) + } + + pub(crate) fn new_hash_set() -> HashSet { HashSet::new() } + pub(crate) fn hash_set_with_capacity(cap: usize) -> HashSet { + HashSet::with_capacity(cap) + } + pub(crate) fn hash_set_from_iter>(iter: I) -> HashSet { + HashSet::from_iter(iter) + } } - pub use alloc::borrow::ToOwned; - pub use alloc::string::ToString; + pub(crate) use randomized_hashtables::*; } #[cfg(all(not(ldk_bench), feature = "backtrace", feature = "std", test))] diff --git a/lightning/src/ln/channelmanager.rs b/lightning/src/ln/channelmanager.rs index 4d7be7d92..1a4f6035a 100644 --- a/lightning/src/ln/channelmanager.rs +++ b/lightning/src/ln/channelmanager.rs @@ -5951,7 +5951,7 @@ where // TODO: Once we can rely on the counterparty_node_id from the // monitor event, this and the outpoint_to_peer map should be removed. let outpoint_to_peer = self.outpoint_to_peer.lock().unwrap(); - match outpoint_to_peer.get(&funding_txo) { + match outpoint_to_peer.get(funding_txo) { Some(cp_id) => cp_id.clone(), None => return, } @@ -5964,7 +5964,7 @@ where peer_state_lock = peer_state_mutex_opt.unwrap().lock().unwrap(); let peer_state = &mut *peer_state_lock; let channel = - if let Some(ChannelPhase::Funded(chan)) = peer_state.channel_by_id.get_mut(&channel_id) { + if let Some(ChannelPhase::Funded(chan)) = peer_state.channel_by_id.get_mut(channel_id) { chan } else { let update_actions = peer_state.monitor_update_blocked_actions @@ -11085,7 +11085,7 @@ where downstream_counterparty_and_funding_outpoint: Some((blocked_node_id, _blocked_channel_outpoint, blocked_channel_id, blocking_action)), .. } = action { - if let Some(blocked_peer_state) = per_peer_state.get(&blocked_node_id) { + if let Some(blocked_peer_state) = per_peer_state.get(blocked_node_id) { log_trace!(logger, "Holding the next revoke_and_ack from {} until the preimage is durably persisted in the inbound edge's ChannelMonitor", blocked_channel_id); diff --git a/lightning/src/routing/router.rs b/lightning/src/routing/router.rs index 4214a1821..a429bf1eb 100644 --- a/lightning/src/routing/router.rs +++ b/lightning/src/routing/router.rs @@ -2566,9 +2566,9 @@ where L::Target: Logger { let mut aggregate_path_contribution_msat = path_value_msat; for (idx, (hop, prev_hop_id)) in hop_iter.zip(prev_hop_iter).enumerate() { - let target = private_hop_key_cache.get(&prev_hop_id).unwrap(); + let target = private_hop_key_cache.get(prev_hop_id).unwrap(); - if let Some(first_channels) = first_hop_targets.get(&target) { + if let Some(first_channels) = first_hop_targets.get(target) { if first_channels.iter().any(|d| d.outbound_scid_alias == Some(hop.short_channel_id)) { log_trace!(logger, "Ignoring route hint with SCID {} (and any previous) due to it being a direct channel of ours.", hop.short_channel_id); @@ -2578,7 +2578,7 @@ where L::Target: Logger { let candidate = network_channels .get(&hop.short_channel_id) - .and_then(|channel| channel.as_directed_to(&target)) + .and_then(|channel| channel.as_directed_to(target)) .map(|(info, _)| CandidateRouteHop::PublicHop(PublicHopCandidate { info, short_channel_id: hop.short_channel_id, @@ -2619,7 +2619,7 @@ where L::Target: Logger { .saturating_add(1); // Searching for a direct channel between last checked hop and first_hop_targets - if let Some(first_channels) = first_hop_targets.get_mut(&target) { + if let Some(first_channels) = first_hop_targets.get_mut(target) { sort_first_hop_channels(first_channels, &used_liquidities, recommended_value_msat, our_node_pubkey); for details in first_channels { diff --git a/lightning/src/routing/scoring.rs b/lightning/src/routing/scoring.rs index ec4921812..bb043164a 100644 --- a/lightning/src/routing/scoring.rs +++ b/lightning/src/routing/scoring.rs @@ -1330,7 +1330,7 @@ impl>, L: Deref> ScoreLookUp for Probabilistic _ => return 0, }; let source = candidate.source(); - if let Some(penalty) = score_params.manual_node_penalties.get(&target) { + if let Some(penalty) = score_params.manual_node_penalties.get(target) { return *penalty; } @@ -1360,7 +1360,7 @@ impl>, L: Deref> ScoreLookUp for Probabilistic let amount_msat = usage.amount_msat.saturating_add(usage.inflight_htlc_msat); let capacity_msat = usage.effective_capacity.as_msat(); self.channel_liquidities - .get(&scid) + .get(scid) .unwrap_or(&ChannelLiquidity::new(Duration::ZERO)) .as_directed(&source, &target, capacity_msat) .penalty_msat(amount_msat, score_params) -- 2.39.5