use crate::routing::router::{Path, CandidateRouteHop};
use crate::util::ser::{Readable, ReadableArgs, Writeable, Writer};
use crate::util::logger::Logger;
-use crate::util::time::Time;
use crate::prelude::*;
use core::{cmp, fmt};
}
}
-#[cfg(not(feature = "no-std"))]
-type ConfiguredTime = crate::util::time::MonotonicTime;
-#[cfg(feature = "no-std")]
-use crate::util::time::Eternity;
-#[cfg(feature = "no-std")]
-type ConfiguredTime = Eternity;
-
/// [`ScoreLookUp`] implementation using channel success probability distributions.
///
/// Channels are tracked with upper and lower liquidity bounds - when an HTLC fails at a channel,
/// [`liquidity_offset_half_life`]: ProbabilisticScoringDecayParameters::liquidity_offset_half_life
/// [`historical_liquidity_penalty_multiplier_msat`]: ProbabilisticScoringFeeParameters::historical_liquidity_penalty_multiplier_msat
/// [`historical_liquidity_penalty_amount_multiplier_msat`]: ProbabilisticScoringFeeParameters::historical_liquidity_penalty_amount_multiplier_msat
-pub type ProbabilisticScorer<G, L> = ProbabilisticScorerUsingTime::<G, L, ConfiguredTime>;
-
-/// Probabilistic [`ScoreLookUp`] implementation.
-///
-/// This is not exported to bindings users generally all users should use the [`ProbabilisticScorer`] type alias.
-pub struct ProbabilisticScorerUsingTime<G: Deref<Target = NetworkGraph<L>>, L: Deref, T: Time>
+pub struct ProbabilisticScorer<G: Deref<Target = NetworkGraph<L>>, L: Deref>
where L::Target: Logger {
decay_params: ProbabilisticScoringDecayParameters,
network_graph: G,
logger: L,
channel_liquidities: HashMap<u64, ChannelLiquidity>,
- _unused_time: core::marker::PhantomData<T>,
}
/// Parameters for configuring [`ProbabilisticScorer`].
///
/// Default value: 14 days
///
- /// [`historical_estimated_channel_liquidity_probabilities`]: ProbabilisticScorerUsingTime::historical_estimated_channel_liquidity_probabilities
+ /// [`historical_estimated_channel_liquidity_probabilities`]: ProbabilisticScorer::historical_estimated_channel_liquidity_probabilities
pub historical_no_updates_half_life: Duration,
/// Whenever this amount of time elapses since the last update to a channel's liquidity bounds,
decay_params: ProbabilisticScoringDecayParameters,
}
-impl<G: Deref<Target = NetworkGraph<L>>, L: Deref, T: Time> ProbabilisticScorerUsingTime<G, L, T> where L::Target: Logger {
+impl<G: Deref<Target = NetworkGraph<L>>, L: Deref> ProbabilisticScorer<G, L> where L::Target: Logger {
/// Creates a new scorer using the given scoring parameters for sending payments from a node
/// through a network graph.
pub fn new(decay_params: ProbabilisticScoringDecayParameters, network_graph: G, logger: L) -> Self {
network_graph,
logger,
channel_liquidities: HashMap::new(),
- _unused_time: core::marker::PhantomData,
}
}
/// Returns the lower bound of the channel liquidity balance in this direction.
#[inline(always)]
fn min_liquidity_msat(&self) -> u64 {
- self.decayed_offset_msat(*self.min_liquidity_offset_msat)
+ *self.min_liquidity_offset_msat
}
/// Returns the upper bound of the channel liquidity balance in this direction.
#[inline(always)]
fn max_liquidity_msat(&self) -> u64 {
self.capacity_msat
- .saturating_sub(self.decayed_offset_msat(*self.max_liquidity_offset_msat))
- }
-
- fn decayed_offset_msat(&self, offset_msat: u64) -> u64 {
- offset_msat
+ .saturating_sub(*self.max_liquidity_offset_msat)
}
}
self.liquidity_history.min_liquidity_offset_history.time_decay_data(half_lives);
self.liquidity_history.max_liquidity_offset_history.time_decay_data(half_lives);
- let min_liquidity_offset_msat = self.decayed_offset_msat(*self.min_liquidity_offset_msat);
self.liquidity_history.min_liquidity_offset_history.track_datapoint(
- min_liquidity_offset_msat + bucket_offset_msat, self.capacity_msat
+ *self.min_liquidity_offset_msat + bucket_offset_msat, self.capacity_msat
);
- let max_liquidity_offset_msat = self.decayed_offset_msat(*self.max_liquidity_offset_msat);
self.liquidity_history.max_liquidity_offset_history.track_datapoint(
- max_liquidity_offset_msat.saturating_sub(bucket_offset_msat), self.capacity_msat
+ self.max_liquidity_offset_msat.saturating_sub(bucket_offset_msat), self.capacity_msat
);
*self.offset_history_last_updated = duration_since_epoch;
}
}
}
-impl<G: Deref<Target = NetworkGraph<L>>, L: Deref, T: Time> ScoreLookUp for ProbabilisticScorerUsingTime<G, L, T> where L::Target: Logger {
+impl<G: Deref<Target = NetworkGraph<L>>, L: Deref> ScoreLookUp for ProbabilisticScorer<G, L> where L::Target: Logger {
type ScoreParams = ProbabilisticScoringFeeParameters;
fn channel_penalty_msat(
&self, candidate: &CandidateRouteHop, usage: ChannelUsage, score_params: &ProbabilisticScoringFeeParameters
}
}
-impl<G: Deref<Target = NetworkGraph<L>>, L: Deref, T: Time> ScoreUpdate for ProbabilisticScorerUsingTime<G, L, T> where L::Target: Logger {
+impl<G: Deref<Target = NetworkGraph<L>>, L: Deref> ScoreUpdate for ProbabilisticScorer<G, L> where L::Target: Logger {
fn payment_path_failed(&mut self, path: &Path, short_channel_id: u64, duration_since_epoch: Duration) {
let amount_msat = path.final_value_msat();
log_trace!(self.logger, "Scoring path through to SCID {} as having failed at {} msat", short_channel_id, amount_msat);
}
#[cfg(c_bindings)]
-impl<G: Deref<Target = NetworkGraph<L>>, L: Deref, T: Time> Score for ProbabilisticScorerUsingTime<G, L, T>
+impl<G: Deref<Target = NetworkGraph<L>>, L: Deref> Score for ProbabilisticScorer<G, L>
where L::Target: Logger {}
#[cfg(feature = "std")]
}
use bucketed_history::{LegacyHistoricalBucketRangeTracker, HistoricalBucketRangeTracker, HistoricalMinMaxBuckets};
-impl<G: Deref<Target = NetworkGraph<L>>, L: Deref, T: Time> Writeable for ProbabilisticScorerUsingTime<G, L, T> where L::Target: Logger {
+impl<G: Deref<Target = NetworkGraph<L>>, L: Deref> Writeable for ProbabilisticScorer<G, L> where L::Target: Logger {
#[inline]
fn write<W: Writer>(&self, w: &mut W) -> Result<(), io::Error> {
write_tlv_fields!(w, {
}
}
-impl<G: Deref<Target = NetworkGraph<L>>, L: Deref, T: Time>
-ReadableArgs<(ProbabilisticScoringDecayParameters, G, L)> for ProbabilisticScorerUsingTime<G, L, T> where L::Target: Logger {
+impl<G: Deref<Target = NetworkGraph<L>>, L: Deref>
+ReadableArgs<(ProbabilisticScoringDecayParameters, G, L)> for ProbabilisticScorer<G, L> where L::Target: Logger {
#[inline]
fn read<R: Read>(
r: &mut R, args: (ProbabilisticScoringDecayParameters, G, L)
network_graph,
logger,
channel_liquidities,
- _unused_time: core::marker::PhantomData,
})
}
}
#[cfg(test)]
mod tests {
- use super::{ChannelLiquidity, HistoricalBucketRangeTracker, ProbabilisticScoringFeeParameters, ProbabilisticScoringDecayParameters, ProbabilisticScorerUsingTime};
+ use super::{ChannelLiquidity, HistoricalBucketRangeTracker, ProbabilisticScoringFeeParameters, ProbabilisticScoringDecayParameters, ProbabilisticScorer};
use crate::blinded_path::{BlindedHop, BlindedPath};
use crate::util::config::UserConfig;
use crate::util::time::tests::SinceEpoch;
// `ProbabilisticScorer` tests
- /// A probabilistic scorer for testing with time that can be manually advanced.
- type ProbabilisticScorer<'a> = ProbabilisticScorerUsingTime::<&'a NetworkGraph<&'a TestLogger>, &'a TestLogger, SinceEpoch>;
-
fn sender_privkey() -> SecretKey {
SecretKey::from_slice(&[41; 32]).unwrap()
}
let mut serialized_scorer = io::Cursor::new(&serialized_scorer);
let deserialized_scorer =
- <ProbabilisticScorer>::read(&mut serialized_scorer, (decay_params, &network_graph, &logger)).unwrap();
+ <ProbabilisticScorer<_, _>>::read(&mut serialized_scorer, (decay_params, &network_graph, &logger)).unwrap();
assert_eq!(deserialized_scorer.channel_penalty_msat(&candidate, usage, ¶ms), 300);
}
let mut serialized_scorer = io::Cursor::new(&serialized_scorer);
let mut deserialized_scorer =
- <ProbabilisticScorer>::read(&mut serialized_scorer, (decay_params, &network_graph, &logger)).unwrap();
+ <ProbabilisticScorer<_, _>>::read(&mut serialized_scorer, (decay_params, &network_graph, &logger)).unwrap();
if !decay_before_reload {
SinceEpoch::advance(Duration::from_secs(10));
scorer.decay_liquidity_certainty(Duration::from_secs(10));
Some(0.0));
}
}
+
+#[cfg(ldk_bench)]
+pub mod benches {
+ use super::*;
+ use criterion::Criterion;
+ use crate::routing::router::{bench_utils, RouteHop};
+ use crate::util::test_utils::TestLogger;
+ use crate::ln::features::{ChannelFeatures, NodeFeatures};
+
+ pub fn decay_100k_channel_bounds(bench: &mut Criterion) {
+ let logger = TestLogger::new();
+ let network_graph = bench_utils::read_network_graph(&logger).unwrap();
+ let mut scorer = ProbabilisticScorer::new(Default::default(), &network_graph, &logger);
+ // Score a number of random channels
+ let mut seed: u64 = 0xdeadbeef;
+ for _ in 0..100_000 {
+ seed = seed.overflowing_mul(6364136223846793005).0.overflowing_add(1).0;
+ let (victim, victim_dst, amt) = {
+ let rong = network_graph.read_only();
+ let channels = rong.channels();
+ let chan = channels.unordered_iter()
+ .skip((seed as usize) % channels.len())
+ .next().unwrap();
+ seed = seed.overflowing_mul(6364136223846793005).0.overflowing_add(1).0;
+ let amt = seed % chan.1.capacity_sats.map(|c| c * 1000)
+ .or(chan.1.one_to_two.as_ref().map(|info| info.htlc_maximum_msat))
+ .or(chan.1.two_to_one.as_ref().map(|info| info.htlc_maximum_msat))
+ .unwrap_or(1_000_000_000).saturating_add(1);
+ (*chan.0, chan.1.node_two, amt)
+ };
+ let path = Path {
+ hops: vec![RouteHop {
+ pubkey: victim_dst.as_pubkey().unwrap(),
+ node_features: NodeFeatures::empty(),
+ short_channel_id: victim,
+ channel_features: ChannelFeatures::empty(),
+ fee_msat: amt,
+ cltv_expiry_delta: 42,
+ maybe_announced_channel: true,
+ }],
+ blinded_tail: None
+ };
+ seed = seed.overflowing_mul(6364136223846793005).0.overflowing_add(1).0;
+ if seed % 1 == 0 {
+ scorer.probe_failed(&path, victim, Duration::ZERO);
+ } else {
+ scorer.probe_successful(&path, Duration::ZERO);
+ }
+ }
+ let mut cur_time = Duration::ZERO;
+ cur_time += Duration::from_millis(1);
+ scorer.decay_liquidity_certainty(cur_time);
+ bench.bench_function("decay_100k_channel_bounds", |b| b.iter(|| {
+ cur_time += Duration::from_millis(1);
+ scorer.decay_liquidity_certainty(cur_time);
+ }));
+ }
+}