1 // This file is Copyright its original authors, visible in version control
4 // This file is licensed under the Apache License, Version 2.0 <LICENSE-APACHE
5 // or http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
6 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your option.
7 // You may not use this file except in accordance with one or both of these
10 //! Utilities for scoring payment channels.
12 //! [`ProbabilisticScorer`] may be given to [`find_route`] to score payment channels during path
13 //! finding when a custom [`ScoreLookUp`] implementation is not needed.
18 //! # extern crate bitcoin;
20 //! # use lightning::routing::gossip::NetworkGraph;
21 //! # use lightning::routing::router::{RouteParameters, find_route};
22 //! # use lightning::routing::scoring::{ProbabilisticScorer, ProbabilisticScoringFeeParameters, ProbabilisticScoringDecayParameters};
23 //! # use lightning::sign::KeysManager;
24 //! # use lightning::util::logger::{Logger, Record};
25 //! # use bitcoin::secp256k1::PublicKey;
27 //! # struct FakeLogger {};
28 //! # impl Logger for FakeLogger {
29 //! # fn log(&self, record: Record) { unimplemented!() }
31 //! # fn find_scored_route(payer: PublicKey, route_params: RouteParameters, network_graph: NetworkGraph<&FakeLogger>) {
32 //! # let logger = FakeLogger {};
34 //! // Use the default channel penalties.
35 //! let params = ProbabilisticScoringFeeParameters::default();
36 //! let decay_params = ProbabilisticScoringDecayParameters::default();
37 //! let scorer = ProbabilisticScorer::new(decay_params, &network_graph, &logger);
39 //! // Or use custom channel penalties.
40 //! let params = ProbabilisticScoringFeeParameters {
41 //! liquidity_penalty_multiplier_msat: 2 * 1000,
42 //! ..ProbabilisticScoringFeeParameters::default()
44 //! let decay_params = ProbabilisticScoringDecayParameters::default();
45 //! let scorer = ProbabilisticScorer::new(decay_params, &network_graph, &logger);
46 //! # let random_seed_bytes = [42u8; 32];
48 //! let route = find_route(&payer, &route_params, &network_graph, None, &logger, &scorer, ¶ms, &random_seed_bytes);
54 //! Persisting when built with feature `no-std` and restoring without it, or vice versa, uses
55 //! different types and thus is undefined.
57 //! [`find_route`]: crate::routing::router::find_route
59 use crate::ln::msgs::DecodeError;
60 use crate::routing::gossip::{EffectiveCapacity, NetworkGraph, NodeId};
61 use crate::routing::router::{Path, CandidateRouteHop};
62 use crate::routing::log_approx;
63 use crate::util::ser::{Readable, ReadableArgs, Writeable, Writer};
64 use crate::util::logger::Logger;
66 use crate::prelude::*;
68 use core::cell::{RefCell, RefMut, Ref};
69 use core::convert::TryInto;
70 use core::ops::{Deref, DerefMut};
71 use core::time::Duration;
72 use crate::io::{self, Read};
73 use crate::sync::{Mutex, MutexGuard, RwLock, RwLockReadGuard, RwLockWriteGuard};
75 /// We define Score ever-so-slightly differently based on whether we are being built for C bindings
76 /// or not. For users, `LockableScore` must somehow be writeable to disk. For Rust users, this is
77 /// no problem - you move a `Score` that implements `Writeable` into a `Mutex`, lock it, and now
78 /// you have the original, concrete, `Score` type, which presumably implements `Writeable`.
80 /// For C users, once you've moved the `Score` into a `LockableScore` all you have after locking it
81 /// is an opaque trait object with an opaque pointer with no type info. Users could take the unsafe
82 /// approach of blindly casting that opaque pointer to a concrete type and calling `Writeable` from
83 /// there, but other languages downstream of the C bindings (e.g. Java) can't even do that.
84 /// Instead, we really want `Score` and `LockableScore` to implement `Writeable` directly, which we
85 /// do here by defining `Score` differently for `cfg(c_bindings)`.
86 macro_rules! define_score { ($($supertrait: path)*) => {
87 /// An interface used to score payment channels for path finding.
89 /// `ScoreLookUp` is used to determine the penalty for a given channel.
91 /// Scoring is in terms of fees willing to be paid in order to avoid routing through a channel.
92 pub trait ScoreLookUp {
93 /// A configurable type which should contain various passed-in parameters for configuring the scorer,
94 /// on a per-routefinding-call basis through to the scorer methods,
95 /// which are used to determine the parameters for the suitability of channels for use.
97 /// Returns the fee in msats willing to be paid to avoid routing `send_amt_msat` through the
98 /// given channel in the direction from `source` to `target`.
100 /// The channel's capacity (less any other MPP parts that are also being considered for use in
101 /// the same payment) is given by `capacity_msat`. It may be determined from various sources
102 /// such as a chain data, network gossip, or invoice hints. For invoice hints, a capacity near
103 /// [`u64::max_value`] is given to indicate sufficient capacity for the invoice's full amount.
104 /// Thus, implementations should be overflow-safe.
105 fn channel_penalty_msat(
106 &self, candidate: &CandidateRouteHop, usage: ChannelUsage, score_params: &Self::ScoreParams
110 /// `ScoreUpdate` is used to update the scorer's internal state after a payment attempt.
111 pub trait ScoreUpdate {
112 /// Handles updating channel penalties after failing to route through a channel.
113 fn payment_path_failed(&mut self, path: &Path, short_channel_id: u64, duration_since_epoch: Duration);
115 /// Handles updating channel penalties after successfully routing along a path.
116 fn payment_path_successful(&mut self, path: &Path, duration_since_epoch: Duration);
118 /// Handles updating channel penalties after a probe over the given path failed.
119 fn probe_failed(&mut self, path: &Path, short_channel_id: u64, duration_since_epoch: Duration);
121 /// Handles updating channel penalties after a probe over the given path succeeded.
122 fn probe_successful(&mut self, path: &Path, duration_since_epoch: Duration);
124 /// Scorers may wish to reduce their certainty of channel liquidity information over time.
125 /// Thus, this method is provided to allow scorers to observe the passage of time - the holder
126 /// of this object should call this method regularly (generally via the
127 /// `lightning-background-processor` crate).
128 fn decay_liquidity_certainty(&mut self, duration_since_epoch: Duration);
131 /// A trait which can both lookup and update routing channel penalty scores.
133 /// This is used in places where both bounds are required and implemented for all types which
134 /// implement [`ScoreLookUp`] and [`ScoreUpdate`].
136 /// Bindings users may need to manually implement this for their custom scoring implementations.
137 pub trait Score : ScoreLookUp + ScoreUpdate $(+ $supertrait)* {}
139 #[cfg(not(c_bindings))]
140 impl<T: ScoreLookUp + ScoreUpdate $(+ $supertrait)*> Score for T {}
142 #[cfg(not(c_bindings))]
143 impl<S: ScoreLookUp, T: Deref<Target=S>> ScoreLookUp for T {
144 type ScoreParams = S::ScoreParams;
145 fn channel_penalty_msat(
146 &self, candidate: &CandidateRouteHop, usage: ChannelUsage, score_params: &Self::ScoreParams
148 self.deref().channel_penalty_msat(candidate, usage, score_params)
152 #[cfg(not(c_bindings))]
153 impl<S: ScoreUpdate, T: DerefMut<Target=S>> ScoreUpdate for T {
154 fn payment_path_failed(&mut self, path: &Path, short_channel_id: u64, duration_since_epoch: Duration) {
155 self.deref_mut().payment_path_failed(path, short_channel_id, duration_since_epoch)
158 fn payment_path_successful(&mut self, path: &Path, duration_since_epoch: Duration) {
159 self.deref_mut().payment_path_successful(path, duration_since_epoch)
162 fn probe_failed(&mut self, path: &Path, short_channel_id: u64, duration_since_epoch: Duration) {
163 self.deref_mut().probe_failed(path, short_channel_id, duration_since_epoch)
166 fn probe_successful(&mut self, path: &Path, duration_since_epoch: Duration) {
167 self.deref_mut().probe_successful(path, duration_since_epoch)
170 fn decay_liquidity_certainty(&mut self, duration_since_epoch: Duration) {
171 self.deref_mut().decay_liquidity_certainty(duration_since_epoch)
177 define_score!(Writeable);
179 #[cfg(not(c_bindings))]
182 /// A scorer that is accessed under a lock.
184 /// Needed so that calls to [`ScoreLookUp::channel_penalty_msat`] in [`find_route`] can be made while
185 /// having shared ownership of a scorer but without requiring internal locking in [`ScoreUpdate`]
186 /// implementations. Internal locking would be detrimental to route finding performance and could
187 /// result in [`ScoreLookUp::channel_penalty_msat`] returning a different value for the same channel.
189 /// [`find_route`]: crate::routing::router::find_route
190 pub trait LockableScore<'a> {
191 /// The [`ScoreUpdate`] type.
192 type ScoreUpdate: 'a + ScoreUpdate;
193 /// The [`ScoreLookUp`] type.
194 type ScoreLookUp: 'a + ScoreLookUp;
196 /// The write locked [`ScoreUpdate`] type.
197 type WriteLocked: DerefMut<Target = Self::ScoreUpdate> + Sized;
199 /// The read locked [`ScoreLookUp`] type.
200 type ReadLocked: Deref<Target = Self::ScoreLookUp> + Sized;
202 /// Returns read locked scorer.
203 fn read_lock(&'a self) -> Self::ReadLocked;
205 /// Returns write locked scorer.
206 fn write_lock(&'a self) -> Self::WriteLocked;
209 /// Refers to a scorer that is accessible under lock and also writeable to disk
211 /// We need this trait to be able to pass in a scorer to `lightning-background-processor` that will enable us to
212 /// use the Persister to persist it.
213 pub trait WriteableScore<'a>: LockableScore<'a> + Writeable {}
215 #[cfg(not(c_bindings))]
216 impl<'a, T> WriteableScore<'a> for T where T: LockableScore<'a> + Writeable {}
217 #[cfg(not(c_bindings))]
218 impl<'a, T: Score + 'a> LockableScore<'a> for Mutex<T> {
219 type ScoreUpdate = T;
220 type ScoreLookUp = T;
222 type WriteLocked = MutexGuard<'a, Self::ScoreUpdate>;
223 type ReadLocked = MutexGuard<'a, Self::ScoreLookUp>;
225 fn read_lock(&'a self) -> Self::ReadLocked {
226 Mutex::lock(self).unwrap()
229 fn write_lock(&'a self) -> Self::WriteLocked {
230 Mutex::lock(self).unwrap()
234 #[cfg(not(c_bindings))]
235 impl<'a, T: Score + 'a> LockableScore<'a> for RefCell<T> {
236 type ScoreUpdate = T;
237 type ScoreLookUp = T;
239 type WriteLocked = RefMut<'a, Self::ScoreUpdate>;
240 type ReadLocked = Ref<'a, Self::ScoreLookUp>;
242 fn write_lock(&'a self) -> Self::WriteLocked {
246 fn read_lock(&'a self) -> Self::ReadLocked {
251 #[cfg(not(c_bindings))]
252 impl<'a, T: Score + 'a> LockableScore<'a> for RwLock<T> {
253 type ScoreUpdate = T;
254 type ScoreLookUp = T;
256 type WriteLocked = RwLockWriteGuard<'a, Self::ScoreLookUp>;
257 type ReadLocked = RwLockReadGuard<'a, Self::ScoreUpdate>;
259 fn read_lock(&'a self) -> Self::ReadLocked {
260 RwLock::read(self).unwrap()
263 fn write_lock(&'a self) -> Self::WriteLocked {
264 RwLock::write(self).unwrap()
269 /// A concrete implementation of [`LockableScore`] which supports multi-threading.
270 pub struct MultiThreadedLockableScore<T: Score> {
275 impl<'a, T: Score + 'a> LockableScore<'a> for MultiThreadedLockableScore<T> {
276 type ScoreUpdate = T;
277 type ScoreLookUp = T;
278 type WriteLocked = MultiThreadedScoreLockWrite<'a, Self::ScoreUpdate>;
279 type ReadLocked = MultiThreadedScoreLockRead<'a, Self::ScoreLookUp>;
281 fn read_lock(&'a self) -> Self::ReadLocked {
282 MultiThreadedScoreLockRead(self.score.read().unwrap())
285 fn write_lock(&'a self) -> Self::WriteLocked {
286 MultiThreadedScoreLockWrite(self.score.write().unwrap())
291 impl<T: Score> Writeable for MultiThreadedLockableScore<T> {
292 fn write<W: Writer>(&self, writer: &mut W) -> Result<(), io::Error> {
293 self.score.read().unwrap().write(writer)
298 impl<'a, T: Score + 'a> WriteableScore<'a> for MultiThreadedLockableScore<T> {}
301 impl<T: Score> MultiThreadedLockableScore<T> {
302 /// Creates a new [`MultiThreadedLockableScore`] given an underlying [`Score`].
303 pub fn new(score: T) -> Self {
304 MultiThreadedLockableScore { score: RwLock::new(score) }
309 /// A locked `MultiThreadedLockableScore`.
310 pub struct MultiThreadedScoreLockRead<'a, T: Score>(RwLockReadGuard<'a, T>);
313 /// A locked `MultiThreadedLockableScore`.
314 pub struct MultiThreadedScoreLockWrite<'a, T: Score>(RwLockWriteGuard<'a, T>);
317 impl<'a, T: 'a + Score> Deref for MultiThreadedScoreLockRead<'a, T> {
320 fn deref(&self) -> &Self::Target {
326 impl<'a, T: Score> ScoreLookUp for MultiThreadedScoreLockRead<'a, T> {
327 type ScoreParams = T::ScoreParams;
328 fn channel_penalty_msat(&self, candidate:&CandidateRouteHop, usage: ChannelUsage, score_params: &Self::ScoreParams
330 self.0.channel_penalty_msat(candidate, usage, score_params)
335 impl<'a, T: Score> Writeable for MultiThreadedScoreLockWrite<'a, T> {
336 fn write<W: Writer>(&self, writer: &mut W) -> Result<(), io::Error> {
342 impl<'a, T: 'a + Score> Deref for MultiThreadedScoreLockWrite<'a, T> {
345 fn deref(&self) -> &Self::Target {
351 impl<'a, T: 'a + Score> DerefMut for MultiThreadedScoreLockWrite<'a, T> {
352 fn deref_mut(&mut self) -> &mut Self::Target {
358 impl<'a, T: Score> ScoreUpdate for MultiThreadedScoreLockWrite<'a, T> {
359 fn payment_path_failed(&mut self, path: &Path, short_channel_id: u64, duration_since_epoch: Duration) {
360 self.0.payment_path_failed(path, short_channel_id, duration_since_epoch)
363 fn payment_path_successful(&mut self, path: &Path, duration_since_epoch: Duration) {
364 self.0.payment_path_successful(path, duration_since_epoch)
367 fn probe_failed(&mut self, path: &Path, short_channel_id: u64, duration_since_epoch: Duration) {
368 self.0.probe_failed(path, short_channel_id, duration_since_epoch)
371 fn probe_successful(&mut self, path: &Path, duration_since_epoch: Duration) {
372 self.0.probe_successful(path, duration_since_epoch)
375 fn decay_liquidity_certainty(&mut self, duration_since_epoch: Duration) {
376 self.0.decay_liquidity_certainty(duration_since_epoch)
381 /// Proposed use of a channel passed as a parameter to [`ScoreLookUp::channel_penalty_msat`].
382 #[derive(Clone, Copy, Debug, PartialEq)]
383 pub struct ChannelUsage {
384 /// The amount to send through the channel, denominated in millisatoshis.
385 pub amount_msat: u64,
387 /// Total amount, denominated in millisatoshis, already allocated to send through the channel
388 /// as part of a multi-path payment.
389 pub inflight_htlc_msat: u64,
391 /// The effective capacity of the channel.
392 pub effective_capacity: EffectiveCapacity,
396 /// [`ScoreLookUp`] implementation that uses a fixed penalty.
397 pub struct FixedPenaltyScorer {
401 impl FixedPenaltyScorer {
402 /// Creates a new scorer using `penalty_msat`.
403 pub fn with_penalty(penalty_msat: u64) -> Self {
404 Self { penalty_msat }
408 impl ScoreLookUp for FixedPenaltyScorer {
409 type ScoreParams = ();
410 fn channel_penalty_msat(&self, _: &CandidateRouteHop, _: ChannelUsage, _score_params: &Self::ScoreParams) -> u64 {
415 impl ScoreUpdate for FixedPenaltyScorer {
416 fn payment_path_failed(&mut self, _path: &Path, _short_channel_id: u64, _duration_since_epoch: Duration) {}
418 fn payment_path_successful(&mut self, _path: &Path, _duration_since_epoch: Duration) {}
420 fn probe_failed(&mut self, _path: &Path, _short_channel_id: u64, _duration_since_epoch: Duration) {}
422 fn probe_successful(&mut self, _path: &Path, _duration_since_epoch: Duration) {}
424 fn decay_liquidity_certainty(&mut self, _duration_since_epoch: Duration) {}
427 impl Writeable for FixedPenaltyScorer {
429 fn write<W: Writer>(&self, w: &mut W) -> Result<(), io::Error> {
430 write_tlv_fields!(w, {});
435 impl ReadableArgs<u64> for FixedPenaltyScorer {
437 fn read<R: Read>(r: &mut R, penalty_msat: u64) -> Result<Self, DecodeError> {
438 read_tlv_fields!(r, {});
439 Ok(Self { penalty_msat })
443 /// [`ScoreLookUp`] implementation using channel success probability distributions.
445 /// Channels are tracked with upper and lower liquidity bounds - when an HTLC fails at a channel,
446 /// we learn that the upper-bound on the available liquidity is lower than the amount of the HTLC.
447 /// When a payment is forwarded through a channel (but fails later in the route), we learn the
448 /// lower-bound on the channel's available liquidity must be at least the value of the HTLC.
450 /// These bounds are then used to determine a success probability using the formula from
451 /// *Optimally Reliable & Cheap Payment Flows on the Lightning Network* by Rene Pickhardt
452 /// and Stefan Richter [[1]] (i.e. `(upper_bound - payment_amount) / (upper_bound - lower_bound)`).
454 /// This probability is combined with the [`liquidity_penalty_multiplier_msat`] and
455 /// [`liquidity_penalty_amount_multiplier_msat`] parameters to calculate a concrete penalty in
456 /// milli-satoshis. The penalties, when added across all hops, have the property of being linear in
457 /// terms of the entire path's success probability. This allows the router to directly compare
458 /// penalties for different paths. See the documentation of those parameters for the exact formulas.
460 /// The liquidity bounds are decayed by halving them every [`liquidity_offset_half_life`].
462 /// Further, we track the history of our upper and lower liquidity bounds for each channel,
463 /// allowing us to assign a second penalty (using [`historical_liquidity_penalty_multiplier_msat`]
464 /// and [`historical_liquidity_penalty_amount_multiplier_msat`]) based on the same probability
465 /// formula, but using the history of a channel rather than our latest estimates for the liquidity
468 /// [1]: https://arxiv.org/abs/2107.05322
469 /// [`liquidity_penalty_multiplier_msat`]: ProbabilisticScoringFeeParameters::liquidity_penalty_multiplier_msat
470 /// [`liquidity_penalty_amount_multiplier_msat`]: ProbabilisticScoringFeeParameters::liquidity_penalty_amount_multiplier_msat
471 /// [`liquidity_offset_half_life`]: ProbabilisticScoringDecayParameters::liquidity_offset_half_life
472 /// [`historical_liquidity_penalty_multiplier_msat`]: ProbabilisticScoringFeeParameters::historical_liquidity_penalty_multiplier_msat
473 /// [`historical_liquidity_penalty_amount_multiplier_msat`]: ProbabilisticScoringFeeParameters::historical_liquidity_penalty_amount_multiplier_msat
474 pub struct ProbabilisticScorer<G: Deref<Target = NetworkGraph<L>>, L: Deref>
475 where L::Target: Logger {
476 decay_params: ProbabilisticScoringDecayParameters,
479 channel_liquidities: HashMap<u64, ChannelLiquidity>,
482 /// Parameters for configuring [`ProbabilisticScorer`].
484 /// Used to configure base, liquidity, and amount penalties, the sum of which comprises the channel
485 /// penalty (i.e., the amount in msats willing to be paid to avoid routing through the channel).
487 /// The penalty applied to any channel by the [`ProbabilisticScorer`] is the sum of each of the
490 pub struct ProbabilisticScoringFeeParameters {
491 /// A fixed penalty in msats to apply to each channel.
493 /// Default value: 500 msat
494 pub base_penalty_msat: u64,
496 /// A multiplier used with the total amount flowing over a channel to calculate a fixed penalty
497 /// applied to each channel, in excess of the [`base_penalty_msat`].
499 /// The purpose of the amount penalty is to avoid having fees dominate the channel cost (i.e.,
500 /// fees plus penalty) for large payments. The penalty is computed as the product of this
501 /// multiplier and `2^30`ths of the total amount flowing over a channel (i.e. the payment
502 /// amount plus the amount of any other HTLCs flowing we sent over the same channel).
504 /// ie `base_penalty_amount_multiplier_msat * amount_msat / 2^30`
506 /// Default value: 8,192 msat
508 /// [`base_penalty_msat`]: Self::base_penalty_msat
509 pub base_penalty_amount_multiplier_msat: u64,
511 /// A multiplier used in conjunction with the negative `log10` of the channel's success
512 /// probability for a payment, as determined by our latest estimates of the channel's
513 /// liquidity, to determine the liquidity penalty.
515 /// The penalty is based in part on the knowledge learned from prior successful and unsuccessful
516 /// payments. This knowledge is decayed over time based on [`liquidity_offset_half_life`]. The
517 /// penalty is effectively limited to `2 * liquidity_penalty_multiplier_msat` (corresponding to
518 /// lower bounding the success probability to `0.01`) when the amount falls within the
519 /// uncertainty bounds of the channel liquidity balance. Amounts above the upper bound will
520 /// result in a `u64::max_value` penalty, however.
522 /// `-log10(success_probability) * liquidity_penalty_multiplier_msat`
524 /// Default value: 30,000 msat
526 /// [`liquidity_offset_half_life`]: ProbabilisticScoringDecayParameters::liquidity_offset_half_life
527 pub liquidity_penalty_multiplier_msat: u64,
529 /// A multiplier used in conjunction with the total amount flowing over a channel and the
530 /// negative `log10` of the channel's success probability for the payment, as determined by our
531 /// latest estimates of the channel's liquidity, to determine the amount penalty.
533 /// The purpose of the amount penalty is to avoid having fees dominate the channel cost (i.e.,
534 /// fees plus penalty) for large payments. The penalty is computed as the product of this
535 /// multiplier and `2^20`ths of the amount flowing over this channel, weighted by the negative
536 /// `log10` of the success probability.
538 /// `-log10(success_probability) * liquidity_penalty_amount_multiplier_msat * amount_msat / 2^20`
540 /// In practice, this means for 0.1 success probability (`-log10(0.1) == 1`) each `2^20`th of
541 /// the amount will result in a penalty of the multiplier. And, as the success probability
542 /// decreases, the negative `log10` weighting will increase dramatically. For higher success
543 /// probabilities, the multiplier will have a decreasing effect as the negative `log10` will
546 /// Default value: 192 msat
547 pub liquidity_penalty_amount_multiplier_msat: u64,
549 /// A multiplier used in conjunction with the negative `log10` of the channel's success
550 /// probability for the payment, as determined based on the history of our estimates of the
551 /// channel's available liquidity, to determine a penalty.
553 /// This penalty is similar to [`liquidity_penalty_multiplier_msat`], however, instead of using
554 /// only our latest estimate for the current liquidity available in the channel, it estimates
555 /// success probability based on the estimated liquidity available in the channel through
556 /// history. Specifically, every time we update our liquidity bounds on a given channel, we
557 /// track which of several buckets those bounds fall into, exponentially decaying the
558 /// probability of each bucket as new samples are added.
560 /// Default value: 10,000 msat
562 /// [`liquidity_penalty_multiplier_msat`]: Self::liquidity_penalty_multiplier_msat
563 pub historical_liquidity_penalty_multiplier_msat: u64,
565 /// A multiplier used in conjunction with the total amount flowing over a channel and the
566 /// negative `log10` of the channel's success probability for the payment, as determined based
567 /// on the history of our estimates of the channel's available liquidity, to determine a
570 /// The purpose of the amount penalty is to avoid having fees dominate the channel cost for
571 /// large payments. The penalty is computed as the product of this multiplier and `2^20`ths
572 /// of the amount flowing over this channel, weighted by the negative `log10` of the success
575 /// This penalty is similar to [`liquidity_penalty_amount_multiplier_msat`], however, instead
576 /// of using only our latest estimate for the current liquidity available in the channel, it
577 /// estimates success probability based on the estimated liquidity available in the channel
578 /// through history. Specifically, every time we update our liquidity bounds on a given
579 /// channel, we track which of several buckets those bounds fall into, exponentially decaying
580 /// the probability of each bucket as new samples are added.
582 /// Default value: 64 msat
584 /// [`liquidity_penalty_amount_multiplier_msat`]: Self::liquidity_penalty_amount_multiplier_msat
585 pub historical_liquidity_penalty_amount_multiplier_msat: u64,
587 /// Manual penalties used for the given nodes. Allows to set a particular penalty for a given
588 /// node. Note that a manual penalty of `u64::max_value()` means the node would not ever be
589 /// considered during path finding.
591 /// This is not exported to bindings users
592 pub manual_node_penalties: HashMap<NodeId, u64>,
594 /// This penalty is applied when `htlc_maximum_msat` is equal to or larger than half of the
595 /// channel's capacity, (ie. htlc_maximum_msat >= 0.5 * channel_capacity) which makes us
596 /// prefer nodes with a smaller `htlc_maximum_msat`. We treat such nodes preferentially
597 /// as this makes balance discovery attacks harder to execute, thereby creating an incentive
598 /// to restrict `htlc_maximum_msat` and improve privacy.
600 /// Default value: 250 msat
601 pub anti_probing_penalty_msat: u64,
603 /// This penalty is applied when the total amount flowing over a channel exceeds our current
604 /// estimate of the channel's available liquidity. The total amount is the amount of the
605 /// current HTLC plus any HTLCs which we've sent over the same channel.
607 /// Note that in this case all other penalties, including the
608 /// [`liquidity_penalty_multiplier_msat`] and [`liquidity_penalty_amount_multiplier_msat`]-based
609 /// penalties, as well as the [`base_penalty_msat`] and the [`anti_probing_penalty_msat`], if
610 /// applicable, are still included in the overall penalty.
612 /// If you wish to avoid creating paths with such channels entirely, setting this to a value of
613 /// `u64::max_value()` will guarantee that.
615 /// Default value: 1_0000_0000_000 msat (1 Bitcoin)
617 /// [`liquidity_penalty_multiplier_msat`]: Self::liquidity_penalty_multiplier_msat
618 /// [`liquidity_penalty_amount_multiplier_msat`]: Self::liquidity_penalty_amount_multiplier_msat
619 /// [`base_penalty_msat`]: Self::base_penalty_msat
620 /// [`anti_probing_penalty_msat`]: Self::anti_probing_penalty_msat
621 pub considered_impossible_penalty_msat: u64,
623 /// In order to calculate most of the scores above, we must first convert a lower and upper
624 /// bound on the available liquidity in a channel into the probability that we think a payment
625 /// will succeed. That probability is derived from a Probability Density Function for where we
626 /// think the liquidity in a channel likely lies, given such bounds.
628 /// If this flag is set, that PDF is simply a constant - we assume that the actual available
629 /// liquidity in a channel is just as likely to be at any point between our lower and upper
632 /// If this flag is *not* set, that PDF is `(x - 0.5*capacity) ^ 2`. That is, we use an
633 /// exponential curve which expects the liquidity of a channel to lie "at the edges". This
634 /// matches experimental results - most routing nodes do not aggressively rebalance their
635 /// channels and flows in the network are often unbalanced, leaving liquidity usually
638 /// Thus, for the "best" routes, leave this flag `false`. However, the flag does imply a number
639 /// of floating-point multiplications in the hottest routing code, which may lead to routing
640 /// performance degradation on some machines.
642 /// Default value: false
643 pub linear_success_probability: bool,
646 impl Default for ProbabilisticScoringFeeParameters {
647 fn default() -> Self {
649 base_penalty_msat: 500,
650 base_penalty_amount_multiplier_msat: 8192,
651 liquidity_penalty_multiplier_msat: 30_000,
652 liquidity_penalty_amount_multiplier_msat: 192,
653 manual_node_penalties: HashMap::new(),
654 anti_probing_penalty_msat: 250,
655 considered_impossible_penalty_msat: 1_0000_0000_000,
656 historical_liquidity_penalty_multiplier_msat: 10_000,
657 historical_liquidity_penalty_amount_multiplier_msat: 64,
658 linear_success_probability: false,
663 impl ProbabilisticScoringFeeParameters {
664 /// Marks the node with the given `node_id` as banned,
665 /// i.e it will be avoided during path finding.
666 pub fn add_banned(&mut self, node_id: &NodeId) {
667 self.manual_node_penalties.insert(*node_id, u64::max_value());
670 /// Marks all nodes in the given list as banned, i.e.,
671 /// they will be avoided during path finding.
672 pub fn add_banned_from_list(&mut self, node_ids: Vec<NodeId>) {
674 self.manual_node_penalties.insert(id, u64::max_value());
678 /// Removes the node with the given `node_id` from the list of nodes to avoid.
679 pub fn remove_banned(&mut self, node_id: &NodeId) {
680 self.manual_node_penalties.remove(node_id);
683 /// Sets a manual penalty for the given node.
684 pub fn set_manual_penalty(&mut self, node_id: &NodeId, penalty: u64) {
685 self.manual_node_penalties.insert(*node_id, penalty);
688 /// Removes the node with the given `node_id` from the list of manual penalties.
689 pub fn remove_manual_penalty(&mut self, node_id: &NodeId) {
690 self.manual_node_penalties.remove(node_id);
693 /// Clears the list of manual penalties that are applied during path finding.
694 pub fn clear_manual_penalties(&mut self) {
695 self.manual_node_penalties = HashMap::new();
700 impl ProbabilisticScoringFeeParameters {
701 fn zero_penalty() -> Self {
703 base_penalty_msat: 0,
704 base_penalty_amount_multiplier_msat: 0,
705 liquidity_penalty_multiplier_msat: 0,
706 liquidity_penalty_amount_multiplier_msat: 0,
707 historical_liquidity_penalty_multiplier_msat: 0,
708 historical_liquidity_penalty_amount_multiplier_msat: 0,
709 manual_node_penalties: HashMap::new(),
710 anti_probing_penalty_msat: 0,
711 considered_impossible_penalty_msat: 0,
712 linear_success_probability: true,
717 /// Parameters for configuring [`ProbabilisticScorer`].
719 /// Used to configure decay parameters that are static throughout the lifetime of the scorer.
720 /// these decay parameters affect the score of the channel penalty and are not changed on a
721 /// per-route penalty cost call.
722 #[derive(Copy, Clone)]
723 pub struct ProbabilisticScoringDecayParameters {
724 /// If we aren't learning any new datapoints for a channel, the historical liquidity bounds
725 /// tracking can simply live on with increasingly stale data. Instead, when a channel has not
726 /// seen a liquidity estimate update for this amount of time, the historical datapoints are
728 /// For an example of historical_no_updates_half_life being used see [`historical_estimated_channel_liquidity_probabilities`]
730 /// Note that after 16 or more half lives all historical data will be completely gone.
732 /// Default value: 14 days
734 /// [`historical_estimated_channel_liquidity_probabilities`]: ProbabilisticScorer::historical_estimated_channel_liquidity_probabilities
735 pub historical_no_updates_half_life: Duration,
737 /// Whenever this amount of time elapses since the last update to a channel's liquidity bounds,
738 /// the distance from the bounds to "zero" is cut in half. In other words, the lower-bound on
739 /// the available liquidity is halved and the upper-bound moves half-way to the channel's total
742 /// Because halving the liquidity bounds grows the uncertainty on the channel's liquidity,
743 /// the penalty for an amount within the new bounds may change. See the [`ProbabilisticScorer`]
744 /// struct documentation for more info on the way the liquidity bounds are used.
746 /// For example, if the channel's capacity is 1 million sats, and the current upper and lower
747 /// liquidity bounds are 200,000 sats and 600,000 sats, after this amount of time the upper
748 /// and lower liquidity bounds will be decayed to 100,000 and 800,000 sats.
750 /// Default value: 6 hours
754 /// When built with the `no-std` feature, time will never elapse. Therefore, the channel
755 /// liquidity knowledge will never decay except when the bounds cross.
756 pub liquidity_offset_half_life: Duration,
759 impl Default for ProbabilisticScoringDecayParameters {
760 fn default() -> Self {
762 liquidity_offset_half_life: Duration::from_secs(6 * 60 * 60),
763 historical_no_updates_half_life: Duration::from_secs(60 * 60 * 24 * 14),
769 impl ProbabilisticScoringDecayParameters {
770 fn zero_penalty() -> Self {
772 liquidity_offset_half_life: Duration::from_secs(6 * 60 * 60),
773 historical_no_updates_half_life: Duration::from_secs(60 * 60 * 24 * 14),
778 /// Accounting for channel liquidity balance uncertainty.
780 /// Direction is defined in terms of [`NodeId`] partial ordering, where the source node is the
781 /// first node in the ordering of the channel's counterparties. Thus, swapping the two liquidity
782 /// offset fields gives the opposite direction.
783 #[repr(C)] // Force the fields in memory to be in the order we specify
784 struct ChannelLiquidity {
785 /// Lower channel liquidity bound in terms of an offset from zero.
786 min_liquidity_offset_msat: u64,
788 /// Upper channel liquidity bound in terms of an offset from the effective capacity.
789 max_liquidity_offset_msat: u64,
791 liquidity_history: HistoricalLiquidityTracker,
793 /// Time when the liquidity bounds were last modified as an offset since the unix epoch.
794 last_updated: Duration,
796 /// Time when the historical liquidity bounds were last modified as an offset against the unix
798 offset_history_last_updated: Duration,
801 // Check that the liquidity HashMap's entries sit on round cache lines.
803 // Specifically, the first cache line will have the key, the liquidity offsets, and the total
804 // points tracked in the historical tracker.
806 // The next two cache lines will have the historical points, which we only access last during
807 // scoring, followed by the last_updated `Duration`s (which we do not need during scoring).
808 const _LIQUIDITY_MAP_SIZING_CHECK: usize = 192 - ::core::mem::size_of::<(u64, ChannelLiquidity)>();
809 const _LIQUIDITY_MAP_SIZING_CHECK_2: usize = ::core::mem::size_of::<(u64, ChannelLiquidity)>() - 192;
811 /// A snapshot of [`ChannelLiquidity`] in one direction assuming a certain channel capacity and
812 /// decayed with a given half life.
813 struct DirectedChannelLiquidity<L: Deref<Target = u64>, HT: Deref<Target = HistoricalLiquidityTracker>, T: Deref<Target = Duration>> {
814 min_liquidity_offset_msat: L,
815 max_liquidity_offset_msat: L,
816 liquidity_history: DirectedHistoricalLiquidityTracker<HT>,
819 offset_history_last_updated: T,
822 impl<G: Deref<Target = NetworkGraph<L>>, L: Deref> ProbabilisticScorer<G, L> where L::Target: Logger {
823 /// Creates a new scorer using the given scoring parameters for sending payments from a node
824 /// through a network graph.
825 pub fn new(decay_params: ProbabilisticScoringDecayParameters, network_graph: G, logger: L) -> Self {
830 channel_liquidities: HashMap::new(),
835 fn with_channel(mut self, short_channel_id: u64, liquidity: ChannelLiquidity) -> Self {
836 assert!(self.channel_liquidities.insert(short_channel_id, liquidity).is_none());
840 /// Dump the contents of this scorer into the configured logger.
842 /// Note that this writes roughly one line per channel for which we have a liquidity estimate,
843 /// which may be a substantial amount of log output.
844 pub fn debug_log_liquidity_stats(&self) {
845 let graph = self.network_graph.read_only();
846 for (scid, liq) in self.channel_liquidities.iter() {
847 if let Some(chan_debug) = graph.channels().get(scid) {
848 let log_direction = |source, target| {
849 if let Some((directed_info, _)) = chan_debug.as_directed_to(target) {
850 let amt = directed_info.effective_capacity().as_msat();
851 let dir_liq = liq.as_directed(source, target, amt);
853 let min_buckets = &dir_liq.liquidity_history.min_liquidity_offset_history_buckets();
854 let max_buckets = &dir_liq.liquidity_history.max_liquidity_offset_history_buckets();
856 log_debug!(self.logger, core::concat!(
857 "Liquidity from {} to {} via {} is in the range ({}, {}).\n",
858 "\tHistorical min liquidity bucket relative probabilities:\n",
859 "\t\t{} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {}\n",
860 "\tHistorical max liquidity bucket relative probabilities:\n",
861 "\t\t{} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {}"),
862 source, target, scid, dir_liq.min_liquidity_msat(), dir_liq.max_liquidity_msat(),
863 min_buckets[ 0], min_buckets[ 1], min_buckets[ 2], min_buckets[ 3],
864 min_buckets[ 4], min_buckets[ 5], min_buckets[ 6], min_buckets[ 7],
865 min_buckets[ 8], min_buckets[ 9], min_buckets[10], min_buckets[11],
866 min_buckets[12], min_buckets[13], min_buckets[14], min_buckets[15],
867 min_buckets[16], min_buckets[17], min_buckets[18], min_buckets[19],
868 min_buckets[20], min_buckets[21], min_buckets[22], min_buckets[23],
869 min_buckets[24], min_buckets[25], min_buckets[26], min_buckets[27],
870 min_buckets[28], min_buckets[29], min_buckets[30], min_buckets[31],
871 // Note that the liquidity buckets are an offset from the edge, so we
872 // inverse the max order to get the probabilities from zero.
873 max_buckets[31], max_buckets[30], max_buckets[29], max_buckets[28],
874 max_buckets[27], max_buckets[26], max_buckets[25], max_buckets[24],
875 max_buckets[23], max_buckets[22], max_buckets[21], max_buckets[20],
876 max_buckets[19], max_buckets[18], max_buckets[17], max_buckets[16],
877 max_buckets[15], max_buckets[14], max_buckets[13], max_buckets[12],
878 max_buckets[11], max_buckets[10], max_buckets[ 9], max_buckets[ 8],
879 max_buckets[ 7], max_buckets[ 6], max_buckets[ 5], max_buckets[ 4],
880 max_buckets[ 3], max_buckets[ 2], max_buckets[ 1], max_buckets[ 0]);
882 log_debug!(self.logger, "No amount known for SCID {} from {:?} to {:?}", scid, source, target);
886 log_direction(&chan_debug.node_one, &chan_debug.node_two);
887 log_direction(&chan_debug.node_two, &chan_debug.node_one);
889 log_debug!(self.logger, "No network graph entry for SCID {}", scid);
894 /// Query the estimated minimum and maximum liquidity available for sending a payment over the
895 /// channel with `scid` towards the given `target` node.
896 pub fn estimated_channel_liquidity_range(&self, scid: u64, target: &NodeId) -> Option<(u64, u64)> {
897 let graph = self.network_graph.read_only();
899 if let Some(chan) = graph.channels().get(&scid) {
900 if let Some(liq) = self.channel_liquidities.get(&scid) {
901 if let Some((directed_info, source)) = chan.as_directed_to(target) {
902 let amt = directed_info.effective_capacity().as_msat();
903 let dir_liq = liq.as_directed(source, target, amt);
904 return Some((dir_liq.min_liquidity_msat(), dir_liq.max_liquidity_msat()));
911 /// Query the historical estimated minimum and maximum liquidity available for sending a
912 /// payment over the channel with `scid` towards the given `target` node.
914 /// Returns two sets of 32 buckets. The first set describes the lower-bound liquidity history,
915 /// the second set describes the upper-bound liquidity history. Each bucket describes the
916 /// relative frequency at which we've seen a liquidity bound in the bucket's range relative to
917 /// the channel's total capacity, on an arbitrary scale. Because the values are slowly decayed,
918 /// more recent data points are weighted more heavily than older datapoints.
920 /// Note that the range of each bucket varies by its location to provide more granular results
921 /// at the edges of a channel's capacity, where it is more likely to sit.
923 /// When scoring, the estimated probability that an upper-/lower-bound lies in a given bucket
924 /// is calculated by dividing that bucket's value with the total value of all buckets.
926 /// For example, using a lower bucket count for illustrative purposes, a value of
927 /// `[0, 0, 0, ..., 0, 32]` indicates that we believe the probability of a bound being very
928 /// close to the channel's capacity to be 100%, and have never (recently) seen it in any other
929 /// bucket. A value of `[31, 0, 0, ..., 0, 0, 32]` indicates we've seen the bound being both
930 /// in the top and bottom bucket, and roughly with similar (recent) frequency.
932 /// Because the datapoints are decayed slowly over time, values will eventually return to
933 /// `Some(([0; 32], [0; 32]))` or `None` if no data remains for a channel.
935 /// In order to fetch a single success probability from the buckets provided here, as used in
936 /// the scoring model, see [`Self::historical_estimated_payment_success_probability`].
937 pub fn historical_estimated_channel_liquidity_probabilities(&self, scid: u64, target: &NodeId)
938 -> Option<([u16; 32], [u16; 32])> {
939 let graph = self.network_graph.read_only();
941 if let Some(chan) = graph.channels().get(&scid) {
942 if let Some(liq) = self.channel_liquidities.get(&scid) {
943 if let Some((directed_info, source)) = chan.as_directed_to(target) {
944 let amt = directed_info.effective_capacity().as_msat();
945 let dir_liq = liq.as_directed(source, target, amt);
947 let min_buckets = *dir_liq.liquidity_history.min_liquidity_offset_history_buckets();
948 let mut max_buckets = *dir_liq.liquidity_history.max_liquidity_offset_history_buckets();
950 // Note that the liquidity buckets are an offset from the edge, so we inverse
951 // the max order to get the probabilities from zero.
952 max_buckets.reverse();
953 return Some((min_buckets, max_buckets));
960 /// Query the probability of payment success sending the given `amount_msat` over the channel
961 /// with `scid` towards the given `target` node, based on the historical estimated liquidity
964 /// These are the same bounds as returned by
965 /// [`Self::historical_estimated_channel_liquidity_probabilities`] (but not those returned by
966 /// [`Self::estimated_channel_liquidity_range`]).
967 pub fn historical_estimated_payment_success_probability(
968 &self, scid: u64, target: &NodeId, amount_msat: u64, params: &ProbabilisticScoringFeeParameters)
970 let graph = self.network_graph.read_only();
972 if let Some(chan) = graph.channels().get(&scid) {
973 if let Some(liq) = self.channel_liquidities.get(&scid) {
974 if let Some((directed_info, source)) = chan.as_directed_to(target) {
975 let capacity_msat = directed_info.effective_capacity().as_msat();
976 let dir_liq = liq.as_directed(source, target, capacity_msat);
978 return dir_liq.liquidity_history.calculate_success_probability_times_billion(
979 ¶ms, amount_msat, capacity_msat
980 ).map(|p| p as f64 / (1024 * 1024 * 1024) as f64);
988 impl ChannelLiquidity {
989 fn new(last_updated: Duration) -> Self {
991 min_liquidity_offset_msat: 0,
992 max_liquidity_offset_msat: 0,
993 liquidity_history: HistoricalLiquidityTracker::new(),
995 offset_history_last_updated: last_updated,
999 /// Returns a view of the channel liquidity directed from `source` to `target` assuming
1000 /// `capacity_msat`.
1002 &self, source: &NodeId, target: &NodeId, capacity_msat: u64,
1003 ) -> DirectedChannelLiquidity<&u64, &HistoricalLiquidityTracker, &Duration> {
1004 let source_less_than_target = source < target;
1005 let (min_liquidity_offset_msat, max_liquidity_offset_msat) =
1006 if source_less_than_target {
1007 (&self.min_liquidity_offset_msat, &self.max_liquidity_offset_msat)
1009 (&self.max_liquidity_offset_msat, &self.min_liquidity_offset_msat)
1012 DirectedChannelLiquidity {
1013 min_liquidity_offset_msat,
1014 max_liquidity_offset_msat,
1015 liquidity_history: self.liquidity_history.as_directed(source_less_than_target),
1017 last_updated: &self.last_updated,
1018 offset_history_last_updated: &self.offset_history_last_updated,
1022 /// Returns a mutable view of the channel liquidity directed from `source` to `target` assuming
1023 /// `capacity_msat`.
1025 &mut self, source: &NodeId, target: &NodeId, capacity_msat: u64,
1026 ) -> DirectedChannelLiquidity<&mut u64, &mut HistoricalLiquidityTracker, &mut Duration> {
1027 let source_less_than_target = source < target;
1028 let (min_liquidity_offset_msat, max_liquidity_offset_msat) =
1029 if source_less_than_target {
1030 (&mut self.min_liquidity_offset_msat, &mut self.max_liquidity_offset_msat)
1032 (&mut self.max_liquidity_offset_msat, &mut self.min_liquidity_offset_msat)
1035 DirectedChannelLiquidity {
1036 min_liquidity_offset_msat,
1037 max_liquidity_offset_msat,
1038 liquidity_history: self.liquidity_history.as_directed_mut(source_less_than_target),
1040 last_updated: &mut self.last_updated,
1041 offset_history_last_updated: &mut self.offset_history_last_updated,
1045 fn decayed_offset(&self, offset: u64, duration_since_epoch: Duration,
1046 decay_params: ProbabilisticScoringDecayParameters
1048 let half_life = decay_params.liquidity_offset_half_life.as_secs_f64();
1049 if half_life != 0.0 {
1050 let elapsed_time = duration_since_epoch.saturating_sub(self.last_updated).as_secs_f64();
1051 ((offset as f64) * powf64(0.5, elapsed_time / half_life)) as u64
1058 /// Bounds `-log10` to avoid excessive liquidity penalties for payments with low success
1060 const NEGATIVE_LOG10_UPPER_BOUND: u64 = 2;
1062 /// The rough cutoff at which our precision falls off and we should stop bothering to try to log a
1063 /// ratio, as X in 1/X.
1064 const PRECISION_LOWER_BOUND_DENOMINATOR: u64 = log_approx::LOWER_BITS_BOUND;
1066 /// The divisor used when computing the amount penalty.
1067 const AMOUNT_PENALTY_DIVISOR: u64 = 1 << 20;
1068 const BASE_AMOUNT_PENALTY_DIVISOR: u64 = 1 << 30;
1070 /// Raises three `f64`s to the 3rd power, without `powi` because it requires `std` (dunno why).
1072 fn three_f64_pow_3(a: f64, b: f64, c: f64) -> (f64, f64, f64) {
1073 (a * a * a, b * b * b, c * c * c)
1076 /// Given liquidity bounds, calculates the success probability (in the form of a numerator and
1077 /// denominator) of an HTLC. This is a key assumption in our scoring models.
1079 /// Must not return a numerator or denominator greater than 2^31 for arguments less than 2^31.
1081 /// min_zero_implies_no_successes signals that a `min_liquidity_msat` of 0 means we've not
1082 /// (recently) seen an HTLC successfully complete over this channel.
1084 fn success_probability(
1085 amount_msat: u64, min_liquidity_msat: u64, max_liquidity_msat: u64, capacity_msat: u64,
1086 params: &ProbabilisticScoringFeeParameters, min_zero_implies_no_successes: bool,
1088 debug_assert!(min_liquidity_msat <= amount_msat);
1089 debug_assert!(amount_msat < max_liquidity_msat);
1090 debug_assert!(max_liquidity_msat <= capacity_msat);
1092 let (numerator, mut denominator) =
1093 if params.linear_success_probability {
1094 (max_liquidity_msat - amount_msat,
1095 (max_liquidity_msat - min_liquidity_msat).saturating_add(1))
1097 let capacity = capacity_msat as f64;
1098 let min = (min_liquidity_msat as f64) / capacity;
1099 let max = (max_liquidity_msat as f64) / capacity;
1100 let amount = (amount_msat as f64) / capacity;
1102 // Assume the channel has a probability density function of (x - 0.5)^2 for values from
1103 // 0 to 1 (where 1 is the channel's full capacity). The success probability given some
1104 // liquidity bounds is thus the integral under the curve from the amount to maximum
1105 // estimated liquidity, divided by the same integral from the minimum to the maximum
1106 // estimated liquidity bounds.
1108 // Because the integral from x to y is simply (y - 0.5)^3 - (x - 0.5)^3, we can
1109 // calculate the cumulative density function between the min/max bounds trivially. Note
1110 // that we don't bother to normalize the CDF to total to 1, as it will come out in the
1111 // division of num / den.
1112 let (max_pow, amt_pow, min_pow) = three_f64_pow_3(max - 0.5, amount - 0.5, min - 0.5);
1113 let num = max_pow - amt_pow;
1114 let den = max_pow - min_pow;
1116 // Because our numerator and denominator max out at 0.5^3 we need to multiply them by
1117 // quite a large factor to get something useful (ideally in the 2^30 range).
1118 const BILLIONISH: f64 = 1024.0 * 1024.0 * 1024.0;
1119 let numerator = (num * BILLIONISH) as u64 + 1;
1120 let denominator = (den * BILLIONISH) as u64 + 1;
1121 debug_assert!(numerator <= 1 << 30, "Got large numerator ({}) from float {}.", numerator, num);
1122 debug_assert!(denominator <= 1 << 30, "Got large denominator ({}) from float {}.", denominator, den);
1123 (numerator, denominator)
1126 if min_zero_implies_no_successes && min_liquidity_msat == 0 &&
1127 denominator < u64::max_value() / 21
1129 // If we have no knowledge of the channel, scale probability down by ~75%
1130 // Note that we prefer to increase the denominator rather than decrease the numerator as
1131 // the denominator is more likely to be larger and thus provide greater precision. This is
1132 // mostly an overoptimization but makes a large difference in tests.
1133 denominator = denominator * 21 / 16
1136 (numerator, denominator)
1139 impl<L: Deref<Target = u64>, HT: Deref<Target = HistoricalLiquidityTracker>, T: Deref<Target = Duration>>
1140 DirectedChannelLiquidity< L, HT, T> {
1141 /// Returns a liquidity penalty for routing the given HTLC `amount_msat` through the channel in
1143 fn penalty_msat(&self, amount_msat: u64, score_params: &ProbabilisticScoringFeeParameters) -> u64 {
1144 let available_capacity = self.capacity_msat;
1145 let max_liquidity_msat = self.max_liquidity_msat();
1146 let min_liquidity_msat = core::cmp::min(self.min_liquidity_msat(), max_liquidity_msat);
1148 let mut res = if amount_msat <= min_liquidity_msat {
1150 } else if amount_msat >= max_liquidity_msat {
1151 // Equivalent to hitting the else clause below with the amount equal to the effective
1152 // capacity and without any certainty on the liquidity upper bound, plus the
1153 // impossibility penalty.
1154 let negative_log10_times_2048 = NEGATIVE_LOG10_UPPER_BOUND * 2048;
1155 Self::combined_penalty_msat(amount_msat, negative_log10_times_2048,
1156 score_params.liquidity_penalty_multiplier_msat,
1157 score_params.liquidity_penalty_amount_multiplier_msat)
1158 .saturating_add(score_params.considered_impossible_penalty_msat)
1160 let (numerator, denominator) = success_probability(amount_msat,
1161 min_liquidity_msat, max_liquidity_msat, available_capacity, score_params, false);
1162 if denominator - numerator < denominator / PRECISION_LOWER_BOUND_DENOMINATOR {
1163 // If the failure probability is < 1.5625% (as 1 - numerator/denominator < 1/64),
1164 // don't bother trying to use the log approximation as it gets too noisy to be
1165 // particularly helpful, instead just round down to 0.
1168 let negative_log10_times_2048 =
1169 log_approx::negative_log10_times_2048(numerator, denominator);
1170 Self::combined_penalty_msat(amount_msat, negative_log10_times_2048,
1171 score_params.liquidity_penalty_multiplier_msat,
1172 score_params.liquidity_penalty_amount_multiplier_msat)
1176 if amount_msat >= available_capacity {
1177 // We're trying to send more than the capacity, use a max penalty.
1178 res = res.saturating_add(Self::combined_penalty_msat(amount_msat,
1179 NEGATIVE_LOG10_UPPER_BOUND * 2048,
1180 score_params.historical_liquidity_penalty_multiplier_msat,
1181 score_params.historical_liquidity_penalty_amount_multiplier_msat));
1185 if score_params.historical_liquidity_penalty_multiplier_msat != 0 ||
1186 score_params.historical_liquidity_penalty_amount_multiplier_msat != 0 {
1187 if let Some(cumulative_success_prob_times_billion) = self.liquidity_history
1188 .calculate_success_probability_times_billion(
1189 score_params, amount_msat, self.capacity_msat)
1191 let historical_negative_log10_times_2048 =
1192 log_approx::negative_log10_times_2048(cumulative_success_prob_times_billion + 1, 1024 * 1024 * 1024);
1193 res = res.saturating_add(Self::combined_penalty_msat(amount_msat,
1194 historical_negative_log10_times_2048, score_params.historical_liquidity_penalty_multiplier_msat,
1195 score_params.historical_liquidity_penalty_amount_multiplier_msat));
1197 // If we don't have any valid points (or, once decayed, we have less than a full
1198 // point), redo the non-historical calculation with no liquidity bounds tracked and
1199 // the historical penalty multipliers.
1200 let (numerator, denominator) = success_probability(amount_msat, 0,
1201 available_capacity, available_capacity, score_params, true);
1202 let negative_log10_times_2048 =
1203 log_approx::negative_log10_times_2048(numerator, denominator);
1204 res = res.saturating_add(Self::combined_penalty_msat(amount_msat, negative_log10_times_2048,
1205 score_params.historical_liquidity_penalty_multiplier_msat,
1206 score_params.historical_liquidity_penalty_amount_multiplier_msat));
1213 /// Computes the liquidity penalty from the penalty multipliers.
1215 fn combined_penalty_msat(amount_msat: u64, mut negative_log10_times_2048: u64,
1216 liquidity_penalty_multiplier_msat: u64, liquidity_penalty_amount_multiplier_msat: u64,
1218 negative_log10_times_2048 =
1219 negative_log10_times_2048.min(NEGATIVE_LOG10_UPPER_BOUND * 2048);
1221 // Upper bound the liquidity penalty to ensure some channel is selected.
1222 let liquidity_penalty_msat = negative_log10_times_2048
1223 .saturating_mul(liquidity_penalty_multiplier_msat) / 2048;
1224 let amount_penalty_msat = negative_log10_times_2048
1225 .saturating_mul(liquidity_penalty_amount_multiplier_msat)
1226 .saturating_mul(amount_msat) / 2048 / AMOUNT_PENALTY_DIVISOR;
1228 liquidity_penalty_msat.saturating_add(amount_penalty_msat)
1231 /// Returns the lower bound of the channel liquidity balance in this direction.
1233 fn min_liquidity_msat(&self) -> u64 {
1234 *self.min_liquidity_offset_msat
1237 /// Returns the upper bound of the channel liquidity balance in this direction.
1239 fn max_liquidity_msat(&self) -> u64 {
1241 .saturating_sub(*self.max_liquidity_offset_msat)
1245 impl<L: DerefMut<Target = u64>, HT: DerefMut<Target = HistoricalLiquidityTracker>, T: DerefMut<Target = Duration>>
1246 DirectedChannelLiquidity<L, HT, T> {
1247 /// Adjusts the channel liquidity balance bounds when failing to route `amount_msat`.
1248 fn failed_at_channel<Log: Deref>(
1249 &mut self, amount_msat: u64, duration_since_epoch: Duration, chan_descr: fmt::Arguments, logger: &Log
1250 ) where Log::Target: Logger {
1251 let existing_max_msat = self.max_liquidity_msat();
1252 if amount_msat < existing_max_msat {
1253 log_debug!(logger, "Setting max liquidity of {} from {} to {}", chan_descr, existing_max_msat, amount_msat);
1254 self.set_max_liquidity_msat(amount_msat, duration_since_epoch);
1256 log_trace!(logger, "Max liquidity of {} is {} (already less than or equal to {})",
1257 chan_descr, existing_max_msat, amount_msat);
1259 self.update_history_buckets(0, duration_since_epoch);
1262 /// Adjusts the channel liquidity balance bounds when failing to route `amount_msat` downstream.
1263 fn failed_downstream<Log: Deref>(
1264 &mut self, amount_msat: u64, duration_since_epoch: Duration, chan_descr: fmt::Arguments, logger: &Log
1265 ) where Log::Target: Logger {
1266 let existing_min_msat = self.min_liquidity_msat();
1267 if amount_msat > existing_min_msat {
1268 log_debug!(logger, "Setting min liquidity of {} from {} to {}", existing_min_msat, chan_descr, amount_msat);
1269 self.set_min_liquidity_msat(amount_msat, duration_since_epoch);
1271 log_trace!(logger, "Min liquidity of {} is {} (already greater than or equal to {})",
1272 chan_descr, existing_min_msat, amount_msat);
1274 self.update_history_buckets(0, duration_since_epoch);
1277 /// Adjusts the channel liquidity balance bounds when successfully routing `amount_msat`.
1278 fn successful<Log: Deref>(&mut self,
1279 amount_msat: u64, duration_since_epoch: Duration, chan_descr: fmt::Arguments, logger: &Log
1280 ) where Log::Target: Logger {
1281 let max_liquidity_msat = self.max_liquidity_msat().checked_sub(amount_msat).unwrap_or(0);
1282 log_debug!(logger, "Subtracting {} from max liquidity of {} (setting it to {})", amount_msat, chan_descr, max_liquidity_msat);
1283 self.set_max_liquidity_msat(max_liquidity_msat, duration_since_epoch);
1284 self.update_history_buckets(amount_msat, duration_since_epoch);
1287 /// Updates the history buckets for this channel. Because the history buckets track what we now
1288 /// know about the channel's state *prior to our payment* (i.e. what we assume is "steady
1289 /// state"), we allow the caller to set an offset applied to our liquidity bounds which
1290 /// represents the amount of the successful payment we just made.
1291 fn update_history_buckets(&mut self, bucket_offset_msat: u64, duration_since_epoch: Duration) {
1292 self.liquidity_history.track_datapoint(
1293 *self.min_liquidity_offset_msat + bucket_offset_msat,
1294 self.max_liquidity_offset_msat.saturating_sub(bucket_offset_msat),
1297 *self.offset_history_last_updated = duration_since_epoch;
1300 /// Adjusts the lower bound of the channel liquidity balance in this direction.
1301 fn set_min_liquidity_msat(&mut self, amount_msat: u64, duration_since_epoch: Duration) {
1302 *self.min_liquidity_offset_msat = amount_msat;
1303 if amount_msat > self.max_liquidity_msat() {
1304 *self.max_liquidity_offset_msat = 0;
1306 *self.last_updated = duration_since_epoch;
1309 /// Adjusts the upper bound of the channel liquidity balance in this direction.
1310 fn set_max_liquidity_msat(&mut self, amount_msat: u64, duration_since_epoch: Duration) {
1311 *self.max_liquidity_offset_msat = self.capacity_msat.checked_sub(amount_msat).unwrap_or(0);
1312 if amount_msat < *self.min_liquidity_offset_msat {
1313 *self.min_liquidity_offset_msat = 0;
1315 *self.last_updated = duration_since_epoch;
1319 impl<G: Deref<Target = NetworkGraph<L>>, L: Deref> ScoreLookUp for ProbabilisticScorer<G, L> where L::Target: Logger {
1320 type ScoreParams = ProbabilisticScoringFeeParameters;
1321 fn channel_penalty_msat(
1322 &self, candidate: &CandidateRouteHop, usage: ChannelUsage, score_params: &ProbabilisticScoringFeeParameters
1324 let (scid, target) = match candidate {
1325 CandidateRouteHop::PublicHop { info, short_channel_id } => {
1326 (short_channel_id, info.target())
1330 let source = candidate.source();
1331 if let Some(penalty) = score_params.manual_node_penalties.get(&target) {
1335 let base_penalty_msat = score_params.base_penalty_msat.saturating_add(
1336 score_params.base_penalty_amount_multiplier_msat
1337 .saturating_mul(usage.amount_msat) / BASE_AMOUNT_PENALTY_DIVISOR);
1339 let mut anti_probing_penalty_msat = 0;
1340 match usage.effective_capacity {
1341 EffectiveCapacity::ExactLiquidity { liquidity_msat: amount_msat } |
1342 EffectiveCapacity::HintMaxHTLC { amount_msat } =>
1344 if usage.amount_msat > amount_msat {
1345 return u64::max_value();
1347 return base_penalty_msat;
1350 EffectiveCapacity::Total { capacity_msat, htlc_maximum_msat } => {
1351 if htlc_maximum_msat >= capacity_msat/2 {
1352 anti_probing_penalty_msat = score_params.anti_probing_penalty_msat;
1358 let amount_msat = usage.amount_msat.saturating_add(usage.inflight_htlc_msat);
1359 let capacity_msat = usage.effective_capacity.as_msat();
1360 self.channel_liquidities
1362 .unwrap_or(&ChannelLiquidity::new(Duration::ZERO))
1363 .as_directed(&source, &target, capacity_msat)
1364 .penalty_msat(amount_msat, score_params)
1365 .saturating_add(anti_probing_penalty_msat)
1366 .saturating_add(base_penalty_msat)
1370 impl<G: Deref<Target = NetworkGraph<L>>, L: Deref> ScoreUpdate for ProbabilisticScorer<G, L> where L::Target: Logger {
1371 fn payment_path_failed(&mut self, path: &Path, short_channel_id: u64, duration_since_epoch: Duration) {
1372 let amount_msat = path.final_value_msat();
1373 log_trace!(self.logger, "Scoring path through to SCID {} as having failed at {} msat", short_channel_id, amount_msat);
1374 let network_graph = self.network_graph.read_only();
1375 for (hop_idx, hop) in path.hops.iter().enumerate() {
1376 let target = NodeId::from_pubkey(&hop.pubkey);
1377 let channel_directed_from_source = network_graph.channels()
1378 .get(&hop.short_channel_id)
1379 .and_then(|channel| channel.as_directed_to(&target));
1381 let at_failed_channel = hop.short_channel_id == short_channel_id;
1382 if at_failed_channel && hop_idx == 0 {
1383 log_warn!(self.logger, "Payment failed at the first hop - we do not attempt to learn channel info in such cases as we can directly observe local state.\n\tBecause we know the local state, we should generally not see failures here - this may be an indication that your channel peer on channel {} is broken and you may wish to close the channel.", hop.short_channel_id);
1386 // Only score announced channels.
1387 if let Some((channel, source)) = channel_directed_from_source {
1388 let capacity_msat = channel.effective_capacity().as_msat();
1389 if at_failed_channel {
1390 self.channel_liquidities
1391 .entry(hop.short_channel_id)
1392 .or_insert_with(|| ChannelLiquidity::new(duration_since_epoch))
1393 .as_directed_mut(source, &target, capacity_msat)
1394 .failed_at_channel(amount_msat, duration_since_epoch,
1395 format_args!("SCID {}, towards {:?}", hop.short_channel_id, target), &self.logger);
1397 self.channel_liquidities
1398 .entry(hop.short_channel_id)
1399 .or_insert_with(|| ChannelLiquidity::new(duration_since_epoch))
1400 .as_directed_mut(source, &target, capacity_msat)
1401 .failed_downstream(amount_msat, duration_since_epoch,
1402 format_args!("SCID {}, towards {:?}", hop.short_channel_id, target), &self.logger);
1405 log_debug!(self.logger, "Not able to penalize channel with SCID {} as we do not have graph info for it (likely a route-hint last-hop).",
1406 hop.short_channel_id);
1408 if at_failed_channel { break; }
1412 fn payment_path_successful(&mut self, path: &Path, duration_since_epoch: Duration) {
1413 let amount_msat = path.final_value_msat();
1414 log_trace!(self.logger, "Scoring path through SCID {} as having succeeded at {} msat.",
1415 path.hops.split_last().map(|(hop, _)| hop.short_channel_id).unwrap_or(0), amount_msat);
1416 let network_graph = self.network_graph.read_only();
1417 for hop in &path.hops {
1418 let target = NodeId::from_pubkey(&hop.pubkey);
1419 let channel_directed_from_source = network_graph.channels()
1420 .get(&hop.short_channel_id)
1421 .and_then(|channel| channel.as_directed_to(&target));
1423 // Only score announced channels.
1424 if let Some((channel, source)) = channel_directed_from_source {
1425 let capacity_msat = channel.effective_capacity().as_msat();
1426 self.channel_liquidities
1427 .entry(hop.short_channel_id)
1428 .or_insert_with(|| ChannelLiquidity::new(duration_since_epoch))
1429 .as_directed_mut(source, &target, capacity_msat)
1430 .successful(amount_msat, duration_since_epoch,
1431 format_args!("SCID {}, towards {:?}", hop.short_channel_id, target), &self.logger);
1433 log_debug!(self.logger, "Not able to learn for channel with SCID {} as we do not have graph info for it (likely a route-hint last-hop).",
1434 hop.short_channel_id);
1439 fn probe_failed(&mut self, path: &Path, short_channel_id: u64, duration_since_epoch: Duration) {
1440 self.payment_path_failed(path, short_channel_id, duration_since_epoch)
1443 fn probe_successful(&mut self, path: &Path, duration_since_epoch: Duration) {
1444 self.payment_path_failed(path, u64::max_value(), duration_since_epoch)
1447 fn decay_liquidity_certainty(&mut self, duration_since_epoch: Duration) {
1448 let decay_params = self.decay_params;
1449 self.channel_liquidities.retain(|_scid, liquidity| {
1450 liquidity.min_liquidity_offset_msat =
1451 liquidity.decayed_offset(liquidity.min_liquidity_offset_msat, duration_since_epoch, decay_params);
1452 liquidity.max_liquidity_offset_msat =
1453 liquidity.decayed_offset(liquidity.max_liquidity_offset_msat, duration_since_epoch, decay_params);
1454 liquidity.last_updated = duration_since_epoch;
1457 duration_since_epoch.saturating_sub(liquidity.offset_history_last_updated);
1458 if elapsed_time > decay_params.historical_no_updates_half_life {
1459 let half_life = decay_params.historical_no_updates_half_life.as_secs_f64();
1460 if half_life != 0.0 {
1461 liquidity.liquidity_history.decay_buckets(elapsed_time.as_secs_f64() / half_life);
1462 liquidity.offset_history_last_updated = duration_since_epoch;
1465 liquidity.min_liquidity_offset_msat != 0 || liquidity.max_liquidity_offset_msat != 0 ||
1466 liquidity.liquidity_history.has_datapoints()
1472 impl<G: Deref<Target = NetworkGraph<L>>, L: Deref> Score for ProbabilisticScorer<G, L>
1473 where L::Target: Logger {}
1475 #[cfg(feature = "std")]
1477 fn powf64(n: f64, exp: f64) -> f64 {
1480 #[cfg(not(feature = "std"))]
1481 fn powf64(n: f64, exp: f64) -> f64 {
1482 libm::powf(n as f32, exp as f32) as f64
1485 mod bucketed_history {
1488 // Because liquidity is often skewed heavily in one direction, we store historical state
1489 // distribution in buckets of different size. For backwards compatibility, buckets of size 1/8th
1490 // must fit evenly into the buckets here.
1492 // The smallest bucket is 2^-14th of the channel, for each of our 32 buckets here we define the
1493 // width of the bucket in 2^14'ths of the channel. This increases exponentially until we reach
1494 // a full 16th of the channel's capacity, which is reapeated a few times for backwards
1495 // compatibility. The four middle buckets represent full octiles of the channel's capacity.
1497 // For a 1 BTC channel, this let's us differentiate between failures in the bottom 6k sats, or
1498 // between the 12,000th sat and 24,000th sat, while only needing to store and operate on 32
1499 // buckets in total.
1501 // By default u16s may not be cache-aligned, but we'd rather not have to read a third cache
1502 // line just to access it
1504 struct BucketStartPos([u16; 33]);
1505 impl BucketStartPos {
1506 const fn new() -> Self {
1508 0, 1, 2, 4, 8, 16, 32, 64, 128, 256, 512, 1024, 2048, 3072, 4096, 6144, 8192, 10240, 12288,
1509 13312, 14336, 15360, 15872, 16128, 16256, 16320, 16352, 16368, 16376, 16380, 16382, 16383, 16384,
1513 impl core::ops::Index<usize> for BucketStartPos {
1516 fn index(&self, index: usize) -> &u16 { &self.0[index] }
1518 const BUCKET_START_POS: BucketStartPos = BucketStartPos::new();
1520 const LEGACY_TO_BUCKET_RANGE: [(u8, u8); 8] = [
1521 (0, 12), (12, 14), (14, 15), (15, 16), (16, 17), (17, 18), (18, 20), (20, 32)
1524 const POSITION_TICKS: u16 = 1 << 14;
1526 fn pos_to_bucket(pos: u16) -> usize {
1527 for bucket in 0..32 {
1528 if pos < BUCKET_START_POS[bucket + 1] {
1532 debug_assert!(false);
1538 fn check_bucket_maps() {
1539 const BUCKET_WIDTH_IN_16384S: [u16; 32] = [
1540 1, 1, 2, 4, 8, 16, 32, 64, 128, 256, 512, 1024, 1024, 1024, 2048, 2048,
1541 2048, 2048, 1024, 1024, 1024, 512, 256, 128, 64, 32, 16, 8, 4, 2, 1, 1];
1543 let mut min_size_iter = 0;
1544 let mut legacy_bucket_iter = 0;
1545 for (bucket, width) in BUCKET_WIDTH_IN_16384S.iter().enumerate() {
1546 assert_eq!(BUCKET_START_POS[bucket], min_size_iter);
1547 for i in 0..*width {
1548 assert_eq!(pos_to_bucket(min_size_iter + i) as usize, bucket);
1550 min_size_iter += *width;
1551 if min_size_iter % (POSITION_TICKS / 8) == 0 {
1552 assert_eq!(LEGACY_TO_BUCKET_RANGE[legacy_bucket_iter].1 as usize, bucket + 1);
1553 if legacy_bucket_iter + 1 < 8 {
1554 assert_eq!(LEGACY_TO_BUCKET_RANGE[legacy_bucket_iter + 1].0 as usize, bucket + 1);
1556 legacy_bucket_iter += 1;
1559 assert_eq!(BUCKET_START_POS[32], POSITION_TICKS);
1560 assert_eq!(min_size_iter, POSITION_TICKS);
1564 fn amount_to_pos(amount_msat: u64, capacity_msat: u64) -> u16 {
1565 let pos = if amount_msat < u64::max_value() / (POSITION_TICKS as u64) {
1566 (amount_msat * (POSITION_TICKS as u64) / capacity_msat.saturating_add(1))
1567 .try_into().unwrap_or(POSITION_TICKS)
1569 // Only use 128-bit arithmetic when multiplication will overflow to avoid 128-bit
1570 // division. This branch should only be hit in fuzz testing since the amount would
1571 // need to be over 2.88 million BTC in practice.
1572 ((amount_msat as u128) * (POSITION_TICKS as u128)
1573 / (capacity_msat as u128).saturating_add(1))
1574 .try_into().unwrap_or(POSITION_TICKS)
1576 // If we are running in a client that doesn't validate gossip, its possible for a channel's
1577 // capacity to change due to a `channel_update` message which, if received while a payment
1578 // is in-flight, could cause this to fail. Thus, we only assert in test.
1580 debug_assert!(pos < POSITION_TICKS);
1584 /// Prior to LDK 0.0.117 we used eight buckets which were split evenly across the either
1585 /// octiles. This was changed to use 32 buckets for accuracy reasons in 0.0.117, however we
1586 /// support reading the legacy values here for backwards compatibility.
1587 pub(super) struct LegacyHistoricalBucketRangeTracker {
1591 impl LegacyHistoricalBucketRangeTracker {
1592 pub(crate) fn into_current(&self) -> HistoricalBucketRangeTracker {
1593 let mut buckets = [0; 32];
1594 for (idx, legacy_bucket) in self.buckets.iter().enumerate() {
1595 let mut new_val = *legacy_bucket;
1596 let (start, end) = LEGACY_TO_BUCKET_RANGE[idx];
1597 new_val /= (end - start) as u16;
1598 for i in start..end {
1599 buckets[i as usize] = new_val;
1602 HistoricalBucketRangeTracker { buckets }
1606 /// Tracks the historical state of a distribution as a weighted average of how much time was spent
1607 /// in each of 32 buckets.
1608 #[derive(Clone, Copy)]
1609 pub(super) struct HistoricalBucketRangeTracker {
1613 /// Buckets are stored in fixed point numbers with a 5 bit fractional part. Thus, the value
1614 /// "one" is 32, or this constant.
1615 pub const BUCKET_FIXED_POINT_ONE: u16 = 32;
1617 impl HistoricalBucketRangeTracker {
1618 pub(super) fn new() -> Self { Self { buckets: [0; 32] } }
1619 fn track_datapoint(&mut self, liquidity_offset_msat: u64, capacity_msat: u64) {
1620 // We have 32 leaky buckets for min and max liquidity. Each bucket tracks the amount of time
1621 // we spend in each bucket as a 16-bit fixed-point number with a 5 bit fractional part.
1623 // Each time we update our liquidity estimate, we add 32 (1.0 in our fixed-point system) to
1624 // the buckets for the current min and max liquidity offset positions.
1626 // We then decay each bucket by multiplying by 2047/2048 (avoiding dividing by a
1627 // non-power-of-two). This ensures we can't actually overflow the u16 - when we get to
1628 // 63,457 adding 32 and decaying by 2047/2048 leaves us back at 63,457.
1630 // In total, this allows us to track data for the last 8,000 or so payments across a given
1633 // These constants are a balance - we try to fit in 2 bytes per bucket to reduce overhead,
1634 // and need to balance having more bits in the decimal part (to ensure decay isn't too
1635 // non-linear) with having too few bits in the mantissa, causing us to not store very many
1638 // The constants were picked experimentally, selecting a decay amount that restricts us
1639 // from overflowing buckets without having to cap them manually.
1641 let pos: u16 = amount_to_pos(liquidity_offset_msat, capacity_msat);
1642 if pos < POSITION_TICKS {
1643 for e in self.buckets.iter_mut() {
1644 *e = ((*e as u32) * 2047 / 2048) as u16;
1646 let bucket = pos_to_bucket(pos);
1647 self.buckets[bucket] = self.buckets[bucket].saturating_add(BUCKET_FIXED_POINT_ONE);
1652 impl_writeable_tlv_based!(HistoricalBucketRangeTracker, { (0, buckets, required) });
1653 impl_writeable_tlv_based!(LegacyHistoricalBucketRangeTracker, { (0, buckets, required) });
1655 #[derive(Clone, Copy)]
1656 #[repr(C)] // Force the fields in memory to be in the order we specify.
1657 pub(super) struct HistoricalLiquidityTracker {
1658 total_valid_points_tracked: u64,
1659 min_liquidity_offset_history: HistoricalBucketRangeTracker,
1660 max_liquidity_offset_history: HistoricalBucketRangeTracker,
1663 impl HistoricalLiquidityTracker {
1664 pub(super) fn new() -> HistoricalLiquidityTracker {
1665 HistoricalLiquidityTracker {
1666 min_liquidity_offset_history: HistoricalBucketRangeTracker::new(),
1667 max_liquidity_offset_history: HistoricalBucketRangeTracker::new(),
1668 total_valid_points_tracked: 0,
1672 pub(super) fn from_min_max(
1673 min_liquidity_offset_history: HistoricalBucketRangeTracker,
1674 max_liquidity_offset_history: HistoricalBucketRangeTracker,
1675 ) -> HistoricalLiquidityTracker {
1676 let mut res = HistoricalLiquidityTracker {
1677 min_liquidity_offset_history,
1678 max_liquidity_offset_history,
1679 total_valid_points_tracked: 0,
1681 res.recalculate_valid_points();
1685 pub(super) fn has_datapoints(&self) -> bool {
1686 self.min_liquidity_offset_history.buckets != [0; 32] ||
1687 self.max_liquidity_offset_history.buckets != [0; 32]
1690 pub(super) fn decay_buckets(&mut self, half_lives: f64) {
1691 let divisor = powf64(2048.0, half_lives) as u64;
1692 for bucket in self.min_liquidity_offset_history.buckets.iter_mut() {
1693 *bucket = ((*bucket as u64) * 1024 / divisor) as u16;
1695 for bucket in self.max_liquidity_offset_history.buckets.iter_mut() {
1696 *bucket = ((*bucket as u64) * 1024 / divisor) as u16;
1698 self.recalculate_valid_points();
1701 fn recalculate_valid_points(&mut self) {
1702 self.total_valid_points_tracked = 0;
1703 for (min_idx, min_bucket) in self.min_liquidity_offset_history.buckets.iter().enumerate() {
1704 for max_bucket in self.max_liquidity_offset_history.buckets.iter().take(32 - min_idx) {
1705 self.total_valid_points_tracked += (*min_bucket as u64) * (*max_bucket as u64);
1710 pub(super) fn writeable_min_offset_history(&self) -> &HistoricalBucketRangeTracker {
1711 &self.min_liquidity_offset_history
1714 pub(super) fn writeable_max_offset_history(&self) -> &HistoricalBucketRangeTracker {
1715 &self.max_liquidity_offset_history
1718 pub(super) fn as_directed<'a>(&'a self, source_less_than_target: bool)
1719 -> DirectedHistoricalLiquidityTracker<&'a HistoricalLiquidityTracker> {
1720 DirectedHistoricalLiquidityTracker { source_less_than_target, tracker: self }
1723 pub(super) fn as_directed_mut<'a>(&'a mut self, source_less_than_target: bool)
1724 -> DirectedHistoricalLiquidityTracker<&'a mut HistoricalLiquidityTracker> {
1725 DirectedHistoricalLiquidityTracker { source_less_than_target, tracker: self }
1729 /// A set of buckets representing the history of where we've seen the minimum- and maximum-
1730 /// liquidity bounds for a given channel.
1731 pub(super) struct DirectedHistoricalLiquidityTracker<D: Deref<Target = HistoricalLiquidityTracker>> {
1732 source_less_than_target: bool,
1736 impl<D: DerefMut<Target = HistoricalLiquidityTracker>> DirectedHistoricalLiquidityTracker<D> {
1737 pub(super) fn track_datapoint(
1738 &mut self, min_offset_msat: u64, max_offset_msat: u64, capacity_msat: u64,
1740 if self.source_less_than_target {
1741 self.tracker.min_liquidity_offset_history.track_datapoint(min_offset_msat, capacity_msat);
1742 self.tracker.max_liquidity_offset_history.track_datapoint(max_offset_msat, capacity_msat);
1744 self.tracker.max_liquidity_offset_history.track_datapoint(min_offset_msat, capacity_msat);
1745 self.tracker.min_liquidity_offset_history.track_datapoint(max_offset_msat, capacity_msat);
1747 self.tracker.recalculate_valid_points();
1751 impl<D: Deref<Target = HistoricalLiquidityTracker>> DirectedHistoricalLiquidityTracker<D> {
1752 pub(super) fn min_liquidity_offset_history_buckets(&self) -> &[u16; 32] {
1753 if self.source_less_than_target {
1754 &self.tracker.min_liquidity_offset_history.buckets
1756 &self.tracker.max_liquidity_offset_history.buckets
1760 pub(super) fn max_liquidity_offset_history_buckets(&self) -> &[u16; 32] {
1761 if self.source_less_than_target {
1762 &self.tracker.max_liquidity_offset_history.buckets
1764 &self.tracker.min_liquidity_offset_history.buckets
1769 pub(super) fn calculate_success_probability_times_billion(
1770 &self, params: &ProbabilisticScoringFeeParameters, amount_msat: u64,
1773 // If historical penalties are enabled, we try to calculate a probability of success
1774 // given our historical distribution of min- and max-liquidity bounds in a channel.
1775 // To do so, we walk the set of historical liquidity bucket (min, max) combinations
1776 // (where min_idx < max_idx, as having a minimum above our maximum is an invalid
1777 // state). For each pair, we calculate the probability as if the bucket's corresponding
1778 // min- and max- liquidity bounds were our current liquidity bounds and then multiply
1779 // that probability by the weight of the selected buckets.
1780 let payment_pos = amount_to_pos(amount_msat, capacity_msat);
1781 if payment_pos >= POSITION_TICKS { return None; }
1783 let min_liquidity_offset_history_buckets =
1784 self.min_liquidity_offset_history_buckets();
1785 let max_liquidity_offset_history_buckets =
1786 self.max_liquidity_offset_history_buckets();
1788 let total_valid_points_tracked = self.tracker.total_valid_points_tracked;
1789 #[cfg(debug_assertions)] {
1790 let mut actual_valid_points_tracked = 0;
1791 for (min_idx, min_bucket) in min_liquidity_offset_history_buckets.iter().enumerate() {
1792 for max_bucket in max_liquidity_offset_history_buckets.iter().take(32 - min_idx) {
1793 actual_valid_points_tracked += (*min_bucket as u64) * (*max_bucket as u64);
1796 assert_eq!(total_valid_points_tracked, actual_valid_points_tracked);
1799 // If the total valid points is smaller than 1.0 (i.e. 32 in our fixed-point scheme),
1800 // treat it as if we were fully decayed.
1801 const FULLY_DECAYED: u16 = BUCKET_FIXED_POINT_ONE * BUCKET_FIXED_POINT_ONE;
1802 if total_valid_points_tracked < FULLY_DECAYED.into() {
1806 let mut cumulative_success_prob_times_billion = 0;
1807 // Special-case the 0th min bucket - it generally means we failed a payment, so only
1808 // consider the highest (i.e. largest-offset-from-max-capacity) max bucket for all
1809 // points against the 0th min bucket. This avoids the case where we fail to route
1810 // increasingly lower values over a channel, but treat each failure as a separate
1811 // datapoint, many of which may have relatively high maximum-available-liquidity
1812 // values, which will result in us thinking we have some nontrivial probability of
1813 // routing up to that amount.
1814 if min_liquidity_offset_history_buckets[0] != 0 {
1815 let mut highest_max_bucket_with_points = 0; // The highest max-bucket with any data
1816 let mut total_max_points = 0; // Total points in max-buckets to consider
1817 for (max_idx, max_bucket) in max_liquidity_offset_history_buckets.iter().enumerate() {
1818 if *max_bucket >= BUCKET_FIXED_POINT_ONE {
1819 highest_max_bucket_with_points = cmp::max(highest_max_bucket_with_points, max_idx);
1821 total_max_points += *max_bucket as u64;
1823 let max_bucket_end_pos = BUCKET_START_POS[32 - highest_max_bucket_with_points] - 1;
1824 if payment_pos < max_bucket_end_pos {
1825 let (numerator, denominator) = success_probability(payment_pos as u64, 0,
1826 max_bucket_end_pos as u64, POSITION_TICKS as u64 - 1, params, true);
1827 let bucket_prob_times_billion =
1828 (min_liquidity_offset_history_buckets[0] as u64) * total_max_points
1829 * 1024 * 1024 * 1024 / total_valid_points_tracked;
1830 cumulative_success_prob_times_billion += bucket_prob_times_billion *
1831 numerator / denominator;
1835 for (min_idx, min_bucket) in min_liquidity_offset_history_buckets.iter().enumerate().skip(1) {
1836 let min_bucket_start_pos = BUCKET_START_POS[min_idx];
1837 for (max_idx, max_bucket) in max_liquidity_offset_history_buckets.iter().enumerate().take(32 - min_idx) {
1838 let max_bucket_end_pos = BUCKET_START_POS[32 - max_idx] - 1;
1839 // Note that this multiply can only barely not overflow - two 16 bit ints plus
1840 // 30 bits is 62 bits.
1841 let bucket_prob_times_billion = (*min_bucket as u64) * (*max_bucket as u64)
1842 * 1024 * 1024 * 1024 / total_valid_points_tracked;
1843 if payment_pos >= max_bucket_end_pos {
1844 // Success probability 0, the payment amount may be above the max liquidity
1846 } else if payment_pos < min_bucket_start_pos {
1847 cumulative_success_prob_times_billion += bucket_prob_times_billion;
1849 let (numerator, denominator) = success_probability(payment_pos as u64,
1850 min_bucket_start_pos as u64, max_bucket_end_pos as u64,
1851 POSITION_TICKS as u64 - 1, params, true);
1852 cumulative_success_prob_times_billion += bucket_prob_times_billion *
1853 numerator / denominator;
1858 Some(cumulative_success_prob_times_billion)
1862 use bucketed_history::{LegacyHistoricalBucketRangeTracker, HistoricalBucketRangeTracker, DirectedHistoricalLiquidityTracker, HistoricalLiquidityTracker};
1864 impl<G: Deref<Target = NetworkGraph<L>>, L: Deref> Writeable for ProbabilisticScorer<G, L> where L::Target: Logger {
1866 fn write<W: Writer>(&self, w: &mut W) -> Result<(), io::Error> {
1867 write_tlv_fields!(w, {
1868 (0, self.channel_liquidities, required),
1874 impl<G: Deref<Target = NetworkGraph<L>>, L: Deref>
1875 ReadableArgs<(ProbabilisticScoringDecayParameters, G, L)> for ProbabilisticScorer<G, L> where L::Target: Logger {
1878 r: &mut R, args: (ProbabilisticScoringDecayParameters, G, L)
1879 ) -> Result<Self, DecodeError> {
1880 let (decay_params, network_graph, logger) = args;
1881 let mut channel_liquidities = HashMap::new();
1882 read_tlv_fields!(r, {
1883 (0, channel_liquidities, required),
1889 channel_liquidities,
1894 impl Writeable for ChannelLiquidity {
1896 fn write<W: Writer>(&self, w: &mut W) -> Result<(), io::Error> {
1897 write_tlv_fields!(w, {
1898 (0, self.min_liquidity_offset_msat, required),
1899 // 1 was the min_liquidity_offset_history in octile form
1900 (2, self.max_liquidity_offset_msat, required),
1901 // 3 was the max_liquidity_offset_history in octile form
1902 (4, self.last_updated, required),
1903 (5, self.liquidity_history.writeable_min_offset_history(), required),
1904 (7, self.liquidity_history.writeable_max_offset_history(), required),
1905 (9, self.offset_history_last_updated, required),
1911 impl Readable for ChannelLiquidity {
1913 fn read<R: Read>(r: &mut R) -> Result<Self, DecodeError> {
1914 let mut min_liquidity_offset_msat = 0;
1915 let mut max_liquidity_offset_msat = 0;
1916 let mut legacy_min_liq_offset_history: Option<LegacyHistoricalBucketRangeTracker> = None;
1917 let mut legacy_max_liq_offset_history: Option<LegacyHistoricalBucketRangeTracker> = None;
1918 let mut min_liquidity_offset_history: Option<HistoricalBucketRangeTracker> = None;
1919 let mut max_liquidity_offset_history: Option<HistoricalBucketRangeTracker> = None;
1920 let mut last_updated = Duration::from_secs(0);
1921 let mut offset_history_last_updated = None;
1922 read_tlv_fields!(r, {
1923 (0, min_liquidity_offset_msat, required),
1924 (1, legacy_min_liq_offset_history, option),
1925 (2, max_liquidity_offset_msat, required),
1926 (3, legacy_max_liq_offset_history, option),
1927 (4, last_updated, required),
1928 (5, min_liquidity_offset_history, option),
1929 (7, max_liquidity_offset_history, option),
1930 (9, offset_history_last_updated, option),
1933 if min_liquidity_offset_history.is_none() {
1934 if let Some(legacy_buckets) = legacy_min_liq_offset_history {
1935 min_liquidity_offset_history = Some(legacy_buckets.into_current());
1937 min_liquidity_offset_history = Some(HistoricalBucketRangeTracker::new());
1940 if max_liquidity_offset_history.is_none() {
1941 if let Some(legacy_buckets) = legacy_max_liq_offset_history {
1942 max_liquidity_offset_history = Some(legacy_buckets.into_current());
1944 max_liquidity_offset_history = Some(HistoricalBucketRangeTracker::new());
1948 min_liquidity_offset_msat,
1949 max_liquidity_offset_msat,
1950 liquidity_history: HistoricalLiquidityTracker::from_min_max(
1951 min_liquidity_offset_history.unwrap(), max_liquidity_offset_history.unwrap()
1954 offset_history_last_updated: offset_history_last_updated.unwrap_or(last_updated),
1961 use super::{ChannelLiquidity, HistoricalLiquidityTracker, ProbabilisticScoringFeeParameters, ProbabilisticScoringDecayParameters, ProbabilisticScorer};
1962 use crate::blinded_path::{BlindedHop, BlindedPath};
1963 use crate::util::config::UserConfig;
1965 use crate::ln::channelmanager;
1966 use crate::ln::msgs::{ChannelAnnouncement, ChannelUpdate, UnsignedChannelAnnouncement, UnsignedChannelUpdate};
1967 use crate::routing::gossip::{EffectiveCapacity, NetworkGraph, NodeId};
1968 use crate::routing::router::{BlindedTail, Path, RouteHop, CandidateRouteHop};
1969 use crate::routing::scoring::{ChannelUsage, ScoreLookUp, ScoreUpdate};
1970 use crate::util::ser::{ReadableArgs, Writeable};
1971 use crate::util::test_utils::{self, TestLogger};
1973 use bitcoin::blockdata::constants::ChainHash;
1974 use bitcoin::hashes::Hash;
1975 use bitcoin::hashes::sha256d::Hash as Sha256dHash;
1976 use bitcoin::network::constants::Network;
1977 use bitcoin::secp256k1::{PublicKey, Secp256k1, SecretKey};
1978 use core::time::Duration;
1981 fn source_privkey() -> SecretKey {
1982 SecretKey::from_slice(&[42; 32]).unwrap()
1985 fn target_privkey() -> SecretKey {
1986 SecretKey::from_slice(&[43; 32]).unwrap()
1989 fn source_pubkey() -> PublicKey {
1990 let secp_ctx = Secp256k1::new();
1991 PublicKey::from_secret_key(&secp_ctx, &source_privkey())
1994 fn target_pubkey() -> PublicKey {
1995 let secp_ctx = Secp256k1::new();
1996 PublicKey::from_secret_key(&secp_ctx, &target_privkey())
1999 fn source_node_id() -> NodeId {
2000 NodeId::from_pubkey(&source_pubkey())
2003 fn target_node_id() -> NodeId {
2004 NodeId::from_pubkey(&target_pubkey())
2007 // `ProbabilisticScorer` tests
2009 fn sender_privkey() -> SecretKey {
2010 SecretKey::from_slice(&[41; 32]).unwrap()
2013 fn recipient_privkey() -> SecretKey {
2014 SecretKey::from_slice(&[45; 32]).unwrap()
2017 fn sender_pubkey() -> PublicKey {
2018 let secp_ctx = Secp256k1::new();
2019 PublicKey::from_secret_key(&secp_ctx, &sender_privkey())
2022 fn recipient_pubkey() -> PublicKey {
2023 let secp_ctx = Secp256k1::new();
2024 PublicKey::from_secret_key(&secp_ctx, &recipient_privkey())
2027 fn recipient_node_id() -> NodeId {
2028 NodeId::from_pubkey(&recipient_pubkey())
2031 fn network_graph(logger: &TestLogger) -> NetworkGraph<&TestLogger> {
2032 let mut network_graph = NetworkGraph::new(Network::Testnet, logger);
2033 add_channel(&mut network_graph, 42, source_privkey(), target_privkey());
2034 add_channel(&mut network_graph, 43, target_privkey(), recipient_privkey());
2040 network_graph: &mut NetworkGraph<&TestLogger>, short_channel_id: u64, node_1_key: SecretKey,
2041 node_2_key: SecretKey
2043 let genesis_hash = ChainHash::using_genesis_block(Network::Testnet);
2044 let node_1_secret = &SecretKey::from_slice(&[39; 32]).unwrap();
2045 let node_2_secret = &SecretKey::from_slice(&[40; 32]).unwrap();
2046 let secp_ctx = Secp256k1::new();
2047 let unsigned_announcement = UnsignedChannelAnnouncement {
2048 features: channelmanager::provided_channel_features(&UserConfig::default()),
2049 chain_hash: genesis_hash,
2051 node_id_1: NodeId::from_pubkey(&PublicKey::from_secret_key(&secp_ctx, &node_1_key)),
2052 node_id_2: NodeId::from_pubkey(&PublicKey::from_secret_key(&secp_ctx, &node_2_key)),
2053 bitcoin_key_1: NodeId::from_pubkey(&PublicKey::from_secret_key(&secp_ctx, &node_1_secret)),
2054 bitcoin_key_2: NodeId::from_pubkey(&PublicKey::from_secret_key(&secp_ctx, &node_2_secret)),
2055 excess_data: Vec::new(),
2057 let msghash = hash_to_message!(&Sha256dHash::hash(&unsigned_announcement.encode()[..])[..]);
2058 let signed_announcement = ChannelAnnouncement {
2059 node_signature_1: secp_ctx.sign_ecdsa(&msghash, &node_1_key),
2060 node_signature_2: secp_ctx.sign_ecdsa(&msghash, &node_2_key),
2061 bitcoin_signature_1: secp_ctx.sign_ecdsa(&msghash, &node_1_secret),
2062 bitcoin_signature_2: secp_ctx.sign_ecdsa(&msghash, &node_2_secret),
2063 contents: unsigned_announcement,
2065 let chain_source: Option<&crate::util::test_utils::TestChainSource> = None;
2066 network_graph.update_channel_from_announcement(
2067 &signed_announcement, &chain_source).unwrap();
2068 update_channel(network_graph, short_channel_id, node_1_key, 0, 1_000, 100);
2069 update_channel(network_graph, short_channel_id, node_2_key, 1, 0, 100);
2073 network_graph: &mut NetworkGraph<&TestLogger>, short_channel_id: u64, node_key: SecretKey,
2074 flags: u8, htlc_maximum_msat: u64, timestamp: u32,
2076 let genesis_hash = ChainHash::using_genesis_block(Network::Testnet);
2077 let secp_ctx = Secp256k1::new();
2078 let unsigned_update = UnsignedChannelUpdate {
2079 chain_hash: genesis_hash,
2083 cltv_expiry_delta: 18,
2084 htlc_minimum_msat: 0,
2087 fee_proportional_millionths: 0,
2088 excess_data: Vec::new(),
2090 let msghash = hash_to_message!(&Sha256dHash::hash(&unsigned_update.encode()[..])[..]);
2091 let signed_update = ChannelUpdate {
2092 signature: secp_ctx.sign_ecdsa(&msghash, &node_key),
2093 contents: unsigned_update,
2095 network_graph.update_channel(&signed_update).unwrap();
2098 fn path_hop(pubkey: PublicKey, short_channel_id: u64, fee_msat: u64) -> RouteHop {
2099 let config = UserConfig::default();
2102 node_features: channelmanager::provided_node_features(&config),
2104 channel_features: channelmanager::provided_channel_features(&config),
2106 cltv_expiry_delta: 18,
2107 maybe_announced_channel: true,
2111 fn payment_path_for_amount(amount_msat: u64) -> Path {
2114 path_hop(source_pubkey(), 41, 1),
2115 path_hop(target_pubkey(), 42, 2),
2116 path_hop(recipient_pubkey(), 43, amount_msat),
2117 ], blinded_tail: None,
2122 fn liquidity_bounds_directed_from_lowest_node_id() {
2123 let logger = TestLogger::new();
2124 let last_updated = Duration::ZERO;
2125 let offset_history_last_updated = Duration::ZERO;
2126 let network_graph = network_graph(&logger);
2127 let decay_params = ProbabilisticScoringDecayParameters::default();
2128 let mut scorer = ProbabilisticScorer::new(decay_params, &network_graph, &logger)
2131 min_liquidity_offset_msat: 700, max_liquidity_offset_msat: 100,
2132 last_updated, offset_history_last_updated,
2133 liquidity_history: HistoricalLiquidityTracker::new(),
2137 min_liquidity_offset_msat: 700, max_liquidity_offset_msat: 100,
2138 last_updated, offset_history_last_updated,
2139 liquidity_history: HistoricalLiquidityTracker::new(),
2141 let source = source_node_id();
2142 let target = target_node_id();
2143 let recipient = recipient_node_id();
2144 assert!(source > target);
2145 assert!(target < recipient);
2147 // Update minimum liquidity.
2149 let liquidity = scorer.channel_liquidities.get(&42).unwrap()
2150 .as_directed(&source, &target, 1_000);
2151 assert_eq!(liquidity.min_liquidity_msat(), 100);
2152 assert_eq!(liquidity.max_liquidity_msat(), 300);
2154 let liquidity = scorer.channel_liquidities.get(&42).unwrap()
2155 .as_directed(&target, &source, 1_000);
2156 assert_eq!(liquidity.min_liquidity_msat(), 700);
2157 assert_eq!(liquidity.max_liquidity_msat(), 900);
2159 scorer.channel_liquidities.get_mut(&42).unwrap()
2160 .as_directed_mut(&source, &target, 1_000)
2161 .set_min_liquidity_msat(200, Duration::ZERO);
2163 let liquidity = scorer.channel_liquidities.get(&42).unwrap()
2164 .as_directed(&source, &target, 1_000);
2165 assert_eq!(liquidity.min_liquidity_msat(), 200);
2166 assert_eq!(liquidity.max_liquidity_msat(), 300);
2168 let liquidity = scorer.channel_liquidities.get(&42).unwrap()
2169 .as_directed(&target, &source, 1_000);
2170 assert_eq!(liquidity.min_liquidity_msat(), 700);
2171 assert_eq!(liquidity.max_liquidity_msat(), 800);
2173 // Update maximum liquidity.
2175 let liquidity = scorer.channel_liquidities.get(&43).unwrap()
2176 .as_directed(&target, &recipient, 1_000);
2177 assert_eq!(liquidity.min_liquidity_msat(), 700);
2178 assert_eq!(liquidity.max_liquidity_msat(), 900);
2180 let liquidity = scorer.channel_liquidities.get(&43).unwrap()
2181 .as_directed(&recipient, &target, 1_000);
2182 assert_eq!(liquidity.min_liquidity_msat(), 100);
2183 assert_eq!(liquidity.max_liquidity_msat(), 300);
2185 scorer.channel_liquidities.get_mut(&43).unwrap()
2186 .as_directed_mut(&target, &recipient, 1_000)
2187 .set_max_liquidity_msat(200, Duration::ZERO);
2189 let liquidity = scorer.channel_liquidities.get(&43).unwrap()
2190 .as_directed(&target, &recipient, 1_000);
2191 assert_eq!(liquidity.min_liquidity_msat(), 0);
2192 assert_eq!(liquidity.max_liquidity_msat(), 200);
2194 let liquidity = scorer.channel_liquidities.get(&43).unwrap()
2195 .as_directed(&recipient, &target, 1_000);
2196 assert_eq!(liquidity.min_liquidity_msat(), 800);
2197 assert_eq!(liquidity.max_liquidity_msat(), 1000);
2201 fn resets_liquidity_upper_bound_when_crossed_by_lower_bound() {
2202 let logger = TestLogger::new();
2203 let last_updated = Duration::ZERO;
2204 let offset_history_last_updated = Duration::ZERO;
2205 let network_graph = network_graph(&logger);
2206 let decay_params = ProbabilisticScoringDecayParameters::default();
2207 let mut scorer = ProbabilisticScorer::new(decay_params, &network_graph, &logger)
2210 min_liquidity_offset_msat: 200, max_liquidity_offset_msat: 400,
2211 last_updated, offset_history_last_updated,
2212 liquidity_history: HistoricalLiquidityTracker::new(),
2214 let source = source_node_id();
2215 let target = target_node_id();
2216 assert!(source > target);
2218 // Check initial bounds.
2219 let liquidity = scorer.channel_liquidities.get(&42).unwrap()
2220 .as_directed(&source, &target, 1_000);
2221 assert_eq!(liquidity.min_liquidity_msat(), 400);
2222 assert_eq!(liquidity.max_liquidity_msat(), 800);
2224 let liquidity = scorer.channel_liquidities.get(&42).unwrap()
2225 .as_directed(&target, &source, 1_000);
2226 assert_eq!(liquidity.min_liquidity_msat(), 200);
2227 assert_eq!(liquidity.max_liquidity_msat(), 600);
2229 // Reset from source to target.
2230 scorer.channel_liquidities.get_mut(&42).unwrap()
2231 .as_directed_mut(&source, &target, 1_000)
2232 .set_min_liquidity_msat(900, Duration::ZERO);
2234 let liquidity = scorer.channel_liquidities.get(&42).unwrap()
2235 .as_directed(&source, &target, 1_000);
2236 assert_eq!(liquidity.min_liquidity_msat(), 900);
2237 assert_eq!(liquidity.max_liquidity_msat(), 1_000);
2239 let liquidity = scorer.channel_liquidities.get(&42).unwrap()
2240 .as_directed(&target, &source, 1_000);
2241 assert_eq!(liquidity.min_liquidity_msat(), 0);
2242 assert_eq!(liquidity.max_liquidity_msat(), 100);
2244 // Reset from target to source.
2245 scorer.channel_liquidities.get_mut(&42).unwrap()
2246 .as_directed_mut(&target, &source, 1_000)
2247 .set_min_liquidity_msat(400, Duration::ZERO);
2249 let liquidity = scorer.channel_liquidities.get(&42).unwrap()
2250 .as_directed(&source, &target, 1_000);
2251 assert_eq!(liquidity.min_liquidity_msat(), 0);
2252 assert_eq!(liquidity.max_liquidity_msat(), 600);
2254 let liquidity = scorer.channel_liquidities.get(&42).unwrap()
2255 .as_directed(&target, &source, 1_000);
2256 assert_eq!(liquidity.min_liquidity_msat(), 400);
2257 assert_eq!(liquidity.max_liquidity_msat(), 1_000);
2261 fn resets_liquidity_lower_bound_when_crossed_by_upper_bound() {
2262 let logger = TestLogger::new();
2263 let last_updated = Duration::ZERO;
2264 let offset_history_last_updated = Duration::ZERO;
2265 let network_graph = network_graph(&logger);
2266 let decay_params = ProbabilisticScoringDecayParameters::default();
2267 let mut scorer = ProbabilisticScorer::new(decay_params, &network_graph, &logger)
2270 min_liquidity_offset_msat: 200, max_liquidity_offset_msat: 400,
2271 last_updated, offset_history_last_updated,
2272 liquidity_history: HistoricalLiquidityTracker::new(),
2274 let source = source_node_id();
2275 let target = target_node_id();
2276 assert!(source > target);
2278 // Check initial bounds.
2279 let liquidity = scorer.channel_liquidities.get(&42).unwrap()
2280 .as_directed(&source, &target, 1_000);
2281 assert_eq!(liquidity.min_liquidity_msat(), 400);
2282 assert_eq!(liquidity.max_liquidity_msat(), 800);
2284 let liquidity = scorer.channel_liquidities.get(&42).unwrap()
2285 .as_directed(&target, &source, 1_000);
2286 assert_eq!(liquidity.min_liquidity_msat(), 200);
2287 assert_eq!(liquidity.max_liquidity_msat(), 600);
2289 // Reset from source to target.
2290 scorer.channel_liquidities.get_mut(&42).unwrap()
2291 .as_directed_mut(&source, &target, 1_000)
2292 .set_max_liquidity_msat(300, Duration::ZERO);
2294 let liquidity = scorer.channel_liquidities.get(&42).unwrap()
2295 .as_directed(&source, &target, 1_000);
2296 assert_eq!(liquidity.min_liquidity_msat(), 0);
2297 assert_eq!(liquidity.max_liquidity_msat(), 300);
2299 let liquidity = scorer.channel_liquidities.get(&42).unwrap()
2300 .as_directed(&target, &source, 1_000);
2301 assert_eq!(liquidity.min_liquidity_msat(), 700);
2302 assert_eq!(liquidity.max_liquidity_msat(), 1_000);
2304 // Reset from target to source.
2305 scorer.channel_liquidities.get_mut(&42).unwrap()
2306 .as_directed_mut(&target, &source, 1_000)
2307 .set_max_liquidity_msat(600, Duration::ZERO);
2309 let liquidity = scorer.channel_liquidities.get(&42).unwrap()
2310 .as_directed(&source, &target, 1_000);
2311 assert_eq!(liquidity.min_liquidity_msat(), 400);
2312 assert_eq!(liquidity.max_liquidity_msat(), 1_000);
2314 let liquidity = scorer.channel_liquidities.get(&42).unwrap()
2315 .as_directed(&target, &source, 1_000);
2316 assert_eq!(liquidity.min_liquidity_msat(), 0);
2317 assert_eq!(liquidity.max_liquidity_msat(), 600);
2321 fn increased_penalty_nearing_liquidity_upper_bound() {
2322 let logger = TestLogger::new();
2323 let network_graph = network_graph(&logger);
2324 let params = ProbabilisticScoringFeeParameters {
2325 liquidity_penalty_multiplier_msat: 1_000,
2326 ..ProbabilisticScoringFeeParameters::zero_penalty()
2328 let decay_params = ProbabilisticScoringDecayParameters::default();
2329 let scorer = ProbabilisticScorer::new(decay_params, &network_graph, &logger);
2330 let source = source_node_id();
2332 let usage = ChannelUsage {
2334 inflight_htlc_msat: 0,
2335 effective_capacity: EffectiveCapacity::Total { capacity_msat: 1_024_000, htlc_maximum_msat: 1_000 },
2337 let network_graph = network_graph.read_only();
2338 let channel = network_graph.channel(42).unwrap();
2339 let (info, _) = channel.as_directed_from(&source).unwrap();
2340 let candidate = CandidateRouteHop::PublicHop {
2342 short_channel_id: 42,
2344 assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 0);
2345 let usage = ChannelUsage { amount_msat: 10_240, ..usage };
2346 assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 0);
2347 let usage = ChannelUsage { amount_msat: 102_400, ..usage };
2348 assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 47);
2349 let usage = ChannelUsage { amount_msat: 1_023_999, ..usage };
2350 assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 2_000);
2352 let usage = ChannelUsage {
2354 inflight_htlc_msat: 0,
2355 effective_capacity: EffectiveCapacity::Total { capacity_msat: 1_024, htlc_maximum_msat: 1_000 },
2357 assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 58);
2358 let usage = ChannelUsage { amount_msat: 256, ..usage };
2359 assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 125);
2360 let usage = ChannelUsage { amount_msat: 374, ..usage };
2361 assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 198);
2362 let usage = ChannelUsage { amount_msat: 512, ..usage };
2363 assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 300);
2364 let usage = ChannelUsage { amount_msat: 640, ..usage };
2365 assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 425);
2366 let usage = ChannelUsage { amount_msat: 768, ..usage };
2367 assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 602);
2368 let usage = ChannelUsage { amount_msat: 896, ..usage };
2369 assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 902);
2373 fn constant_penalty_outside_liquidity_bounds() {
2374 let logger = TestLogger::new();
2375 let last_updated = Duration::ZERO;
2376 let offset_history_last_updated = Duration::ZERO;
2377 let network_graph = network_graph(&logger);
2378 let params = ProbabilisticScoringFeeParameters {
2379 liquidity_penalty_multiplier_msat: 1_000,
2380 considered_impossible_penalty_msat: u64::max_value(),
2381 ..ProbabilisticScoringFeeParameters::zero_penalty()
2383 let decay_params = ProbabilisticScoringDecayParameters {
2384 ..ProbabilisticScoringDecayParameters::zero_penalty()
2386 let scorer = ProbabilisticScorer::new(decay_params, &network_graph, &logger)
2389 min_liquidity_offset_msat: 40, max_liquidity_offset_msat: 40,
2390 last_updated, offset_history_last_updated,
2391 liquidity_history: HistoricalLiquidityTracker::new(),
2393 let source = source_node_id();
2395 let usage = ChannelUsage {
2397 inflight_htlc_msat: 0,
2398 effective_capacity: EffectiveCapacity::Total { capacity_msat: 100, htlc_maximum_msat: 1_000 },
2400 let channel = network_graph.read_only().channel(42).unwrap().to_owned();
2401 let (info, _) = channel.as_directed_from(&source).unwrap();
2402 let candidate = CandidateRouteHop::PublicHop {
2404 short_channel_id: 42,
2406 assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 0);
2407 let usage = ChannelUsage { amount_msat: 50, ..usage };
2408 assert_ne!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 0);
2409 assert_ne!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), u64::max_value());
2410 let usage = ChannelUsage { amount_msat: 61, ..usage };
2411 assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), u64::max_value());
2415 fn does_not_further_penalize_own_channel() {
2416 let logger = TestLogger::new();
2417 let network_graph = network_graph(&logger);
2418 let params = ProbabilisticScoringFeeParameters {
2419 liquidity_penalty_multiplier_msat: 1_000,
2420 ..ProbabilisticScoringFeeParameters::zero_penalty()
2422 let mut scorer = ProbabilisticScorer::new(ProbabilisticScoringDecayParameters::default(), &network_graph, &logger);
2423 let source = source_node_id();
2424 let usage = ChannelUsage {
2426 inflight_htlc_msat: 0,
2427 effective_capacity: EffectiveCapacity::Total { capacity_msat: 1_000, htlc_maximum_msat: 1_000 },
2429 let failed_path = payment_path_for_amount(500);
2430 let successful_path = payment_path_for_amount(200);
2431 let channel = &network_graph.read_only().channel(42).unwrap().to_owned();
2432 let (info, _) = channel.as_directed_from(&source).unwrap();
2433 let candidate = CandidateRouteHop::PublicHop {
2435 short_channel_id: 41,
2438 assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 301);
2440 scorer.payment_path_failed(&failed_path, 41, Duration::ZERO);
2441 assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 301);
2443 scorer.payment_path_successful(&successful_path, Duration::ZERO);
2444 assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 301);
2448 fn sets_liquidity_lower_bound_on_downstream_failure() {
2449 let logger = TestLogger::new();
2450 let network_graph = network_graph(&logger);
2451 let params = ProbabilisticScoringFeeParameters {
2452 liquidity_penalty_multiplier_msat: 1_000,
2453 ..ProbabilisticScoringFeeParameters::zero_penalty()
2455 let mut scorer = ProbabilisticScorer::new(ProbabilisticScoringDecayParameters::default(), &network_graph, &logger);
2456 let source = source_node_id();
2457 let path = payment_path_for_amount(500);
2459 let usage = ChannelUsage {
2461 inflight_htlc_msat: 0,
2462 effective_capacity: EffectiveCapacity::Total { capacity_msat: 1_000, htlc_maximum_msat: 1_000 },
2464 let channel = network_graph.read_only().channel(42).unwrap().to_owned();
2465 let (info, _) = channel.as_directed_from(&source).unwrap();
2466 let candidate = CandidateRouteHop::PublicHop {
2468 short_channel_id: 42,
2470 assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 128);
2471 let usage = ChannelUsage { amount_msat: 500, ..usage };
2472 assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 301);
2473 let usage = ChannelUsage { amount_msat: 750, ..usage };
2474 assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 602);
2476 scorer.payment_path_failed(&path, 43, Duration::ZERO);
2478 let usage = ChannelUsage { amount_msat: 250, ..usage };
2479 assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 0);
2480 let usage = ChannelUsage { amount_msat: 500, ..usage };
2481 assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 0);
2482 let usage = ChannelUsage { amount_msat: 750, ..usage };
2483 assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 300);
2487 fn sets_liquidity_upper_bound_on_failure() {
2488 let logger = TestLogger::new();
2489 let network_graph = network_graph(&logger);
2490 let params = ProbabilisticScoringFeeParameters {
2491 liquidity_penalty_multiplier_msat: 1_000,
2492 considered_impossible_penalty_msat: u64::max_value(),
2493 ..ProbabilisticScoringFeeParameters::zero_penalty()
2495 let mut scorer = ProbabilisticScorer::new(ProbabilisticScoringDecayParameters::default(), &network_graph, &logger);
2496 let source = source_node_id();
2497 let path = payment_path_for_amount(500);
2499 let usage = ChannelUsage {
2501 inflight_htlc_msat: 0,
2502 effective_capacity: EffectiveCapacity::Total { capacity_msat: 1_000, htlc_maximum_msat: 1_000 },
2504 let channel = network_graph.read_only().channel(42).unwrap().to_owned();
2505 let (info, _) = channel.as_directed_from(&source).unwrap();
2506 let candidate = CandidateRouteHop::PublicHop {
2508 short_channel_id: 42,
2510 assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 128);
2511 let usage = ChannelUsage { amount_msat: 500, ..usage };
2512 assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 301);
2513 let usage = ChannelUsage { amount_msat: 750, ..usage };
2514 assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 602);
2516 scorer.payment_path_failed(&path, 42, Duration::ZERO);
2518 let usage = ChannelUsage { amount_msat: 250, ..usage };
2519 assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 300);
2520 let usage = ChannelUsage { amount_msat: 500, ..usage };
2521 assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), u64::max_value());
2522 let usage = ChannelUsage { amount_msat: 750, ..usage };
2523 assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), u64::max_value());
2527 fn ignores_channels_after_removed_failed_channel() {
2528 // Previously, if we'd tried to send over a channel which was removed from the network
2529 // graph before we call `payment_path_failed` (which is the default if the we get a "no
2530 // such channel" error in the `InvoicePayer`), we would call `failed_downstream` on all
2531 // channels in the route, even ones which they payment never reached. This tests to ensure
2532 // we do not score such channels.
2533 let secp_ctx = Secp256k1::new();
2534 let logger = TestLogger::new();
2535 let mut network_graph = NetworkGraph::new(Network::Testnet, &logger);
2536 let secret_a = SecretKey::from_slice(&[42; 32]).unwrap();
2537 let secret_b = SecretKey::from_slice(&[43; 32]).unwrap();
2538 let secret_c = SecretKey::from_slice(&[44; 32]).unwrap();
2539 let secret_d = SecretKey::from_slice(&[45; 32]).unwrap();
2540 add_channel(&mut network_graph, 42, secret_a, secret_b);
2541 // Don't add the channel from B -> C.
2542 add_channel(&mut network_graph, 44, secret_c, secret_d);
2544 let pub_a = PublicKey::from_secret_key(&secp_ctx, &secret_a);
2545 let pub_b = PublicKey::from_secret_key(&secp_ctx, &secret_b);
2546 let pub_c = PublicKey::from_secret_key(&secp_ctx, &secret_c);
2547 let pub_d = PublicKey::from_secret_key(&secp_ctx, &secret_d);
2550 path_hop(pub_b, 42, 1),
2551 path_hop(pub_c, 43, 2),
2552 path_hop(pub_d, 44, 100),
2555 let node_a = NodeId::from_pubkey(&pub_a);
2556 let node_b = NodeId::from_pubkey(&pub_b);
2557 let node_c = NodeId::from_pubkey(&pub_c);
2559 let params = ProbabilisticScoringFeeParameters {
2560 liquidity_penalty_multiplier_msat: 1_000,
2561 ..ProbabilisticScoringFeeParameters::zero_penalty()
2563 let mut scorer = ProbabilisticScorer::new(ProbabilisticScoringDecayParameters::default(), &network_graph, &logger);
2565 let usage = ChannelUsage {
2567 inflight_htlc_msat: 0,
2568 effective_capacity: EffectiveCapacity::Total { capacity_msat: 1_000, htlc_maximum_msat: 1_000 },
2570 let channel = network_graph.read_only().channel(42).unwrap().to_owned();
2571 let (info, _) = channel.as_directed_from(&node_a).unwrap();
2572 let candidate = CandidateRouteHop::PublicHop {
2574 short_channel_id: 42,
2576 assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 128);
2577 // Note that a default liquidity bound is used for B -> C as no channel exists
2578 let channel = network_graph.read_only().channel(42).unwrap().to_owned();
2579 let (info, _) = channel.as_directed_from(&node_b).unwrap();
2580 let candidate = CandidateRouteHop::PublicHop {
2582 short_channel_id: 43,
2584 assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 128);
2585 let channel = network_graph.read_only().channel(44).unwrap().to_owned();
2586 let (info, _) = channel.as_directed_from(&node_c).unwrap();
2587 let candidate = CandidateRouteHop::PublicHop {
2589 short_channel_id: 44,
2591 assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 128);
2593 scorer.payment_path_failed(&Path { hops: path, blinded_tail: None }, 43, Duration::ZERO);
2595 let channel = network_graph.read_only().channel(42).unwrap().to_owned();
2596 let (info, _) = channel.as_directed_from(&node_a).unwrap();
2597 let candidate = CandidateRouteHop::PublicHop {
2599 short_channel_id: 42,
2601 assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 80);
2602 // Note that a default liquidity bound is used for B -> C as no channel exists
2603 let channel = network_graph.read_only().channel(42).unwrap().to_owned();
2604 let (info, _) = channel.as_directed_from(&node_b).unwrap();
2605 let candidate = CandidateRouteHop::PublicHop {
2607 short_channel_id: 43,
2609 assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 128);
2610 let channel = network_graph.read_only().channel(44).unwrap().to_owned();
2611 let (info, _) = channel.as_directed_from(&node_c).unwrap();
2612 let candidate = CandidateRouteHop::PublicHop {
2614 short_channel_id: 44,
2616 assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 128);
2620 fn reduces_liquidity_upper_bound_along_path_on_success() {
2621 let logger = TestLogger::new();
2622 let network_graph = network_graph(&logger);
2623 let params = ProbabilisticScoringFeeParameters {
2624 liquidity_penalty_multiplier_msat: 1_000,
2625 ..ProbabilisticScoringFeeParameters::zero_penalty()
2627 let mut scorer = ProbabilisticScorer::new(ProbabilisticScoringDecayParameters::default(), &network_graph, &logger);
2628 let source = source_node_id();
2629 let usage = ChannelUsage {
2631 inflight_htlc_msat: 0,
2632 effective_capacity: EffectiveCapacity::Total { capacity_msat: 1_000, htlc_maximum_msat: 1_000 },
2634 let network_graph = network_graph.read_only().channels().clone();
2635 let channel_42 = network_graph.get(&42).unwrap();
2636 let channel_43 = network_graph.get(&43).unwrap();
2637 let (info, _) = channel_42.as_directed_from(&source).unwrap();
2638 let candidate_41 = CandidateRouteHop::PublicHop {
2640 short_channel_id: 41,
2642 let (info, target) = channel_42.as_directed_from(&source).unwrap();
2643 let candidate_42 = CandidateRouteHop::PublicHop {
2645 short_channel_id: 42,
2647 let (info, _) = channel_43.as_directed_from(&target).unwrap();
2648 let candidate_43 = CandidateRouteHop::PublicHop {
2650 short_channel_id: 43,
2652 assert_eq!(scorer.channel_penalty_msat(&candidate_41, usage, ¶ms), 128);
2653 assert_eq!(scorer.channel_penalty_msat(&candidate_42, usage, ¶ms), 128);
2654 assert_eq!(scorer.channel_penalty_msat(&candidate_43, usage, ¶ms), 128);
2656 scorer.payment_path_successful(&payment_path_for_amount(500), Duration::ZERO);
2658 assert_eq!(scorer.channel_penalty_msat(&candidate_41, usage, ¶ms), 128);
2659 assert_eq!(scorer.channel_penalty_msat(&candidate_42, usage, ¶ms), 300);
2660 assert_eq!(scorer.channel_penalty_msat(&candidate_43, usage, ¶ms), 300);
2664 fn decays_liquidity_bounds_over_time() {
2665 let logger = TestLogger::new();
2666 let network_graph = network_graph(&logger);
2667 let params = ProbabilisticScoringFeeParameters {
2668 liquidity_penalty_multiplier_msat: 1_000,
2669 considered_impossible_penalty_msat: u64::max_value(),
2670 ..ProbabilisticScoringFeeParameters::zero_penalty()
2672 let decay_params = ProbabilisticScoringDecayParameters {
2673 liquidity_offset_half_life: Duration::from_secs(10),
2674 ..ProbabilisticScoringDecayParameters::zero_penalty()
2676 let mut scorer = ProbabilisticScorer::new(decay_params, &network_graph, &logger);
2677 let source = source_node_id();
2679 let usage = ChannelUsage {
2681 inflight_htlc_msat: 0,
2682 effective_capacity: EffectiveCapacity::Total { capacity_msat: 1_024, htlc_maximum_msat: 1_024 },
2684 let channel = network_graph.read_only().channel(42).unwrap().to_owned();
2685 let (info, _) = channel.as_directed_from(&source).unwrap();
2686 let candidate = CandidateRouteHop::PublicHop {
2688 short_channel_id: 42,
2690 assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 0);
2691 let usage = ChannelUsage { amount_msat: 1_023, ..usage };
2692 assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 2_000);
2694 scorer.payment_path_failed(&payment_path_for_amount(768), 42, Duration::ZERO);
2695 scorer.payment_path_failed(&payment_path_for_amount(128), 43, Duration::ZERO);
2697 // Initial penalties
2698 let usage = ChannelUsage { amount_msat: 128, ..usage };
2699 assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 0);
2700 let usage = ChannelUsage { amount_msat: 256, ..usage };
2701 assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 93);
2702 let usage = ChannelUsage { amount_msat: 768, ..usage };
2703 assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 1_479);
2704 let usage = ChannelUsage { amount_msat: 896, ..usage };
2705 assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), u64::max_value());
2707 // Half decay (i.e., three-quarter life)
2708 scorer.decay_liquidity_certainty(Duration::from_secs(5));
2709 let usage = ChannelUsage { amount_msat: 128, ..usage };
2710 assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 22);
2711 let usage = ChannelUsage { amount_msat: 256, ..usage };
2712 assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 106);
2713 let usage = ChannelUsage { amount_msat: 768, ..usage };
2714 assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 921);
2715 let usage = ChannelUsage { amount_msat: 896, ..usage };
2716 assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), u64::max_value());
2718 // One decay (i.e., half life)
2719 scorer.decay_liquidity_certainty(Duration::from_secs(10));
2720 let usage = ChannelUsage { amount_msat: 64, ..usage };
2721 assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 0);
2722 let usage = ChannelUsage { amount_msat: 128, ..usage };
2723 assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 34);
2724 let usage = ChannelUsage { amount_msat: 896, ..usage };
2725 assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 1_970);
2726 let usage = ChannelUsage { amount_msat: 960, ..usage };
2727 assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), u64::max_value());
2729 // Fully decay liquidity lower bound.
2730 scorer.decay_liquidity_certainty(Duration::from_secs(10 * 8));
2731 let usage = ChannelUsage { amount_msat: 0, ..usage };
2732 assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 0);
2733 let usage = ChannelUsage { amount_msat: 1, ..usage };
2734 assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 0);
2735 let usage = ChannelUsage { amount_msat: 1_023, ..usage };
2736 assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 2_000);
2737 let usage = ChannelUsage { amount_msat: 1_024, ..usage };
2738 assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), u64::max_value());
2740 // Fully decay liquidity upper bound.
2741 scorer.decay_liquidity_certainty(Duration::from_secs(10 * 9));
2742 let usage = ChannelUsage { amount_msat: 0, ..usage };
2743 assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 0);
2744 let usage = ChannelUsage { amount_msat: 1_024, ..usage };
2745 assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), u64::max_value());
2747 scorer.decay_liquidity_certainty(Duration::from_secs(10 * 10));
2748 let usage = ChannelUsage { amount_msat: 0, ..usage };
2749 assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 0);
2750 let usage = ChannelUsage { amount_msat: 1_024, ..usage };
2751 assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), u64::max_value());
2755 fn restricts_liquidity_bounds_after_decay() {
2756 let logger = TestLogger::new();
2757 let network_graph = network_graph(&logger);
2758 let params = ProbabilisticScoringFeeParameters {
2759 liquidity_penalty_multiplier_msat: 1_000,
2760 ..ProbabilisticScoringFeeParameters::zero_penalty()
2762 let decay_params = ProbabilisticScoringDecayParameters {
2763 liquidity_offset_half_life: Duration::from_secs(10),
2764 ..ProbabilisticScoringDecayParameters::default()
2766 let mut scorer = ProbabilisticScorer::new(decay_params, &network_graph, &logger);
2767 let source = source_node_id();
2768 let usage = ChannelUsage {
2770 inflight_htlc_msat: 0,
2771 effective_capacity: EffectiveCapacity::Total { capacity_msat: 1_024, htlc_maximum_msat: 1_000 },
2773 let channel = network_graph.read_only().channel(42).unwrap().to_owned();
2774 let (info, _) = channel.as_directed_from(&source).unwrap();
2775 let candidate = CandidateRouteHop::PublicHop {
2777 short_channel_id: 42,
2780 assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 300);
2782 // More knowledge gives higher confidence (256, 768), meaning a lower penalty.
2783 scorer.payment_path_failed(&payment_path_for_amount(768), 42, Duration::ZERO);
2784 scorer.payment_path_failed(&payment_path_for_amount(256), 43, Duration::ZERO);
2785 assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 281);
2787 // Decaying knowledge gives less confidence (128, 896), meaning a higher penalty.
2788 scorer.decay_liquidity_certainty(Duration::from_secs(10));
2789 assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 291);
2791 // Reducing the upper bound gives more confidence (128, 832) that the payment amount (512)
2792 // is closer to the upper bound, meaning a higher penalty.
2793 scorer.payment_path_successful(&payment_path_for_amount(64), Duration::from_secs(10));
2794 assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 331);
2796 // Increasing the lower bound gives more confidence (256, 832) that the payment amount (512)
2797 // is closer to the lower bound, meaning a lower penalty.
2798 scorer.payment_path_failed(&payment_path_for_amount(256), 43, Duration::from_secs(10));
2799 assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 245);
2801 // Further decaying affects the lower bound more than the upper bound (128, 928).
2802 scorer.decay_liquidity_certainty(Duration::from_secs(20));
2803 assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 280);
2807 fn restores_persisted_liquidity_bounds() {
2808 let logger = TestLogger::new();
2809 let network_graph = network_graph(&logger);
2810 let params = ProbabilisticScoringFeeParameters {
2811 liquidity_penalty_multiplier_msat: 1_000,
2812 considered_impossible_penalty_msat: u64::max_value(),
2813 ..ProbabilisticScoringFeeParameters::zero_penalty()
2815 let decay_params = ProbabilisticScoringDecayParameters {
2816 liquidity_offset_half_life: Duration::from_secs(10),
2817 ..ProbabilisticScoringDecayParameters::default()
2819 let mut scorer = ProbabilisticScorer::new(decay_params, &network_graph, &logger);
2820 let source = source_node_id();
2821 let usage = ChannelUsage {
2823 inflight_htlc_msat: 0,
2824 effective_capacity: EffectiveCapacity::Total { capacity_msat: 1_000, htlc_maximum_msat: 1_000 },
2827 scorer.payment_path_failed(&payment_path_for_amount(500), 42, Duration::ZERO);
2828 let channel = network_graph.read_only().channel(42).unwrap().to_owned();
2829 let (info, _) = channel.as_directed_from(&source).unwrap();
2830 let candidate = CandidateRouteHop::PublicHop {
2832 short_channel_id: 42,
2834 assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), u64::max_value());
2836 scorer.decay_liquidity_certainty(Duration::from_secs(10));
2837 assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 473);
2839 scorer.payment_path_failed(&payment_path_for_amount(250), 43, Duration::from_secs(10));
2840 assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 300);
2842 let mut serialized_scorer = Vec::new();
2843 scorer.write(&mut serialized_scorer).unwrap();
2845 let mut serialized_scorer = io::Cursor::new(&serialized_scorer);
2846 let deserialized_scorer =
2847 <ProbabilisticScorer<_, _>>::read(&mut serialized_scorer, (decay_params, &network_graph, &logger)).unwrap();
2848 assert_eq!(deserialized_scorer.channel_penalty_msat(&candidate, usage, ¶ms), 300);
2851 fn do_decays_persisted_liquidity_bounds(decay_before_reload: bool) {
2852 let logger = TestLogger::new();
2853 let network_graph = network_graph(&logger);
2854 let params = ProbabilisticScoringFeeParameters {
2855 liquidity_penalty_multiplier_msat: 1_000,
2856 considered_impossible_penalty_msat: u64::max_value(),
2857 ..ProbabilisticScoringFeeParameters::zero_penalty()
2859 let decay_params = ProbabilisticScoringDecayParameters {
2860 liquidity_offset_half_life: Duration::from_secs(10),
2861 ..ProbabilisticScoringDecayParameters::zero_penalty()
2863 let mut scorer = ProbabilisticScorer::new(decay_params, &network_graph, &logger);
2864 let source = source_node_id();
2865 let usage = ChannelUsage {
2867 inflight_htlc_msat: 0,
2868 effective_capacity: EffectiveCapacity::Total { capacity_msat: 1_000, htlc_maximum_msat: 1_000 },
2871 scorer.payment_path_failed(&payment_path_for_amount(500), 42, Duration::ZERO);
2872 let channel = network_graph.read_only().channel(42).unwrap().to_owned();
2873 let (info, _) = channel.as_directed_from(&source).unwrap();
2874 let candidate = CandidateRouteHop::PublicHop {
2876 short_channel_id: 42,
2878 assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), u64::max_value());
2880 if decay_before_reload {
2881 scorer.decay_liquidity_certainty(Duration::from_secs(10));
2884 let mut serialized_scorer = Vec::new();
2885 scorer.write(&mut serialized_scorer).unwrap();
2887 let mut serialized_scorer = io::Cursor::new(&serialized_scorer);
2888 let mut deserialized_scorer =
2889 <ProbabilisticScorer<_, _>>::read(&mut serialized_scorer, (decay_params, &network_graph, &logger)).unwrap();
2890 if !decay_before_reload {
2891 scorer.decay_liquidity_certainty(Duration::from_secs(10));
2892 deserialized_scorer.decay_liquidity_certainty(Duration::from_secs(10));
2894 assert_eq!(deserialized_scorer.channel_penalty_msat(&candidate, usage, ¶ms), 473);
2896 scorer.payment_path_failed(&payment_path_for_amount(250), 43, Duration::from_secs(10));
2897 assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 300);
2899 deserialized_scorer.decay_liquidity_certainty(Duration::from_secs(20));
2900 assert_eq!(deserialized_scorer.channel_penalty_msat(&candidate, usage, ¶ms), 370);
2904 fn decays_persisted_liquidity_bounds() {
2905 do_decays_persisted_liquidity_bounds(false);
2906 do_decays_persisted_liquidity_bounds(true);
2910 fn scores_realistic_payments() {
2911 // Shows the scores of "realistic" sends of 100k sats over channels of 1-10m sats (with a
2912 // 50k sat reserve).
2913 let logger = TestLogger::new();
2914 let network_graph = network_graph(&logger);
2915 let params = ProbabilisticScoringFeeParameters::default();
2916 let scorer = ProbabilisticScorer::new(ProbabilisticScoringDecayParameters::default(), &network_graph, &logger);
2917 let source = source_node_id();
2919 let usage = ChannelUsage {
2920 amount_msat: 100_000_000,
2921 inflight_htlc_msat: 0,
2922 effective_capacity: EffectiveCapacity::Total { capacity_msat: 950_000_000, htlc_maximum_msat: 1_000 },
2924 let channel = network_graph.read_only().channel(42).unwrap().to_owned();
2925 let (info, _) = channel.as_directed_from(&source).unwrap();
2926 let candidate = CandidateRouteHop::PublicHop {
2928 short_channel_id: 42,
2930 assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 11497);
2931 let usage = ChannelUsage {
2932 effective_capacity: EffectiveCapacity::Total { capacity_msat: 1_950_000_000, htlc_maximum_msat: 1_000 }, ..usage
2934 assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 7408);
2935 let usage = ChannelUsage {
2936 effective_capacity: EffectiveCapacity::Total { capacity_msat: 2_950_000_000, htlc_maximum_msat: 1_000 }, ..usage
2938 assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 6151);
2939 let usage = ChannelUsage {
2940 effective_capacity: EffectiveCapacity::Total { capacity_msat: 3_950_000_000, htlc_maximum_msat: 1_000 }, ..usage
2942 assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 5427);
2943 let usage = ChannelUsage {
2944 effective_capacity: EffectiveCapacity::Total { capacity_msat: 4_950_000_000, htlc_maximum_msat: 1_000 }, ..usage
2946 assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 4955);
2947 let usage = ChannelUsage {
2948 effective_capacity: EffectiveCapacity::Total { capacity_msat: 5_950_000_000, htlc_maximum_msat: 1_000 }, ..usage
2950 assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 4736);
2951 let usage = ChannelUsage {
2952 effective_capacity: EffectiveCapacity::Total { capacity_msat: 6_950_000_000, htlc_maximum_msat: 1_000 }, ..usage
2954 assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 4484);
2955 let usage = ChannelUsage {
2956 effective_capacity: EffectiveCapacity::Total { capacity_msat: 7_450_000_000, htlc_maximum_msat: 1_000 }, ..usage
2958 assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 4484);
2959 let usage = ChannelUsage {
2960 effective_capacity: EffectiveCapacity::Total { capacity_msat: 7_950_000_000, htlc_maximum_msat: 1_000 }, ..usage
2962 assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 4263);
2963 let usage = ChannelUsage {
2964 effective_capacity: EffectiveCapacity::Total { capacity_msat: 8_950_000_000, htlc_maximum_msat: 1_000 }, ..usage
2966 assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 4263);
2967 let usage = ChannelUsage {
2968 effective_capacity: EffectiveCapacity::Total { capacity_msat: 9_950_000_000, htlc_maximum_msat: 1_000 }, ..usage
2970 assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 4044);
2974 fn adds_base_penalty_to_liquidity_penalty() {
2975 let logger = TestLogger::new();
2976 let network_graph = network_graph(&logger);
2977 let source = source_node_id();
2978 let usage = ChannelUsage {
2980 inflight_htlc_msat: 0,
2981 effective_capacity: EffectiveCapacity::Total { capacity_msat: 1_024, htlc_maximum_msat: 1_000 },
2984 let params = ProbabilisticScoringFeeParameters {
2985 liquidity_penalty_multiplier_msat: 1_000,
2986 ..ProbabilisticScoringFeeParameters::zero_penalty()
2988 let scorer = ProbabilisticScorer::new(ProbabilisticScoringDecayParameters::default(), &network_graph, &logger);
2989 let channel = network_graph.read_only().channel(42).unwrap().to_owned();
2990 let (info, _) = channel.as_directed_from(&source).unwrap();
2991 let candidate = CandidateRouteHop::PublicHop {
2993 short_channel_id: 42,
2995 assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 58);
2997 let params = ProbabilisticScoringFeeParameters {
2998 base_penalty_msat: 500, liquidity_penalty_multiplier_msat: 1_000,
2999 anti_probing_penalty_msat: 0, ..ProbabilisticScoringFeeParameters::zero_penalty()
3001 let scorer = ProbabilisticScorer::new(ProbabilisticScoringDecayParameters::default(), &network_graph, &logger);
3002 assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 558);
3004 let params = ProbabilisticScoringFeeParameters {
3005 base_penalty_msat: 500, liquidity_penalty_multiplier_msat: 1_000,
3006 base_penalty_amount_multiplier_msat: (1 << 30),
3007 anti_probing_penalty_msat: 0, ..ProbabilisticScoringFeeParameters::zero_penalty()
3010 let scorer = ProbabilisticScorer::new(ProbabilisticScoringDecayParameters::default(), &network_graph, &logger);
3011 assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 558 + 128);
3015 fn adds_amount_penalty_to_liquidity_penalty() {
3016 let logger = TestLogger::new();
3017 let network_graph = network_graph(&logger);
3018 let source = source_node_id();
3019 let usage = ChannelUsage {
3020 amount_msat: 512_000,
3021 inflight_htlc_msat: 0,
3022 effective_capacity: EffectiveCapacity::Total { capacity_msat: 1_024_000, htlc_maximum_msat: 1_000 },
3025 let params = ProbabilisticScoringFeeParameters {
3026 liquidity_penalty_multiplier_msat: 1_000,
3027 liquidity_penalty_amount_multiplier_msat: 0,
3028 ..ProbabilisticScoringFeeParameters::zero_penalty()
3030 let scorer = ProbabilisticScorer::new(ProbabilisticScoringDecayParameters::default(), &network_graph, &logger);
3031 let channel = network_graph.read_only().channel(42).unwrap().to_owned();
3032 let (info, _) = channel.as_directed_from(&source).unwrap();
3033 let candidate = CandidateRouteHop::PublicHop {
3035 short_channel_id: 42,
3037 assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 300);
3039 let params = ProbabilisticScoringFeeParameters {
3040 liquidity_penalty_multiplier_msat: 1_000,
3041 liquidity_penalty_amount_multiplier_msat: 256,
3042 ..ProbabilisticScoringFeeParameters::zero_penalty()
3044 let scorer = ProbabilisticScorer::new(ProbabilisticScoringDecayParameters::default(), &network_graph, &logger);
3045 assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 337);
3049 fn calculates_log10_without_overflowing_u64_max_value() {
3050 let logger = TestLogger::new();
3051 let network_graph = network_graph(&logger);
3052 let source = source_node_id();
3053 let usage = ChannelUsage {
3054 amount_msat: u64::max_value(),
3055 inflight_htlc_msat: 0,
3056 effective_capacity: EffectiveCapacity::Infinite,
3058 let params = ProbabilisticScoringFeeParameters {
3059 liquidity_penalty_multiplier_msat: 40_000,
3060 ..ProbabilisticScoringFeeParameters::zero_penalty()
3062 let decay_params = ProbabilisticScoringDecayParameters::zero_penalty();
3063 let channel = network_graph.read_only().channel(42).unwrap().to_owned();
3064 let (info, _) = channel.as_directed_from(&source).unwrap();
3065 let candidate = CandidateRouteHop::PublicHop {
3067 short_channel_id: 42,
3069 let scorer = ProbabilisticScorer::new(decay_params, &network_graph, &logger);
3070 assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 80_000);
3074 fn accounts_for_inflight_htlc_usage() {
3075 let logger = TestLogger::new();
3076 let network_graph = network_graph(&logger);
3077 let params = ProbabilisticScoringFeeParameters {
3078 considered_impossible_penalty_msat: u64::max_value(),
3079 ..ProbabilisticScoringFeeParameters::zero_penalty()
3081 let scorer = ProbabilisticScorer::new(ProbabilisticScoringDecayParameters::default(), &network_graph, &logger);
3082 let source = source_node_id();
3084 let usage = ChannelUsage {
3086 inflight_htlc_msat: 0,
3087 effective_capacity: EffectiveCapacity::Total { capacity_msat: 1_000, htlc_maximum_msat: 1_000 },
3089 let network_graph = network_graph.read_only();
3090 let channel = network_graph.channel(42).unwrap();
3091 let (info, _) = channel.as_directed_from(&source).unwrap();
3092 let candidate = CandidateRouteHop::PublicHop {
3094 short_channel_id: 42,
3096 assert_ne!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), u64::max_value());
3098 let usage = ChannelUsage { inflight_htlc_msat: 251, ..usage };
3099 assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), u64::max_value());
3103 fn removes_uncertainity_when_exact_liquidity_known() {
3104 let logger = TestLogger::new();
3105 let network_graph = network_graph(&logger);
3106 let params = ProbabilisticScoringFeeParameters::default();
3107 let scorer = ProbabilisticScorer::new(ProbabilisticScoringDecayParameters::default(), &network_graph, &logger);
3108 let source = source_node_id();
3110 let base_penalty_msat = params.base_penalty_msat;
3111 let usage = ChannelUsage {
3113 inflight_htlc_msat: 0,
3114 effective_capacity: EffectiveCapacity::ExactLiquidity { liquidity_msat: 1_000 },
3116 let network_graph = network_graph.read_only();
3117 let channel = network_graph.channel(42).unwrap();
3118 let (info, _) = channel.as_directed_from(&source).unwrap();
3119 let candidate = CandidateRouteHop::PublicHop {
3121 short_channel_id: 42,
3123 assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), base_penalty_msat);
3125 let usage = ChannelUsage { amount_msat: 1_000, ..usage };
3126 assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), base_penalty_msat);
3128 let usage = ChannelUsage { amount_msat: 1_001, ..usage };
3129 assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), u64::max_value());
3133 fn remembers_historical_failures() {
3134 let logger = TestLogger::new();
3135 let network_graph = network_graph(&logger);
3136 let params = ProbabilisticScoringFeeParameters {
3137 historical_liquidity_penalty_multiplier_msat: 1024,
3138 historical_liquidity_penalty_amount_multiplier_msat: 1024,
3139 ..ProbabilisticScoringFeeParameters::zero_penalty()
3141 let decay_params = ProbabilisticScoringDecayParameters {
3142 liquidity_offset_half_life: Duration::from_secs(60 * 60),
3143 historical_no_updates_half_life: Duration::from_secs(10),
3145 let mut scorer = ProbabilisticScorer::new(decay_params, &network_graph, &logger);
3146 let source = source_node_id();
3147 let target = target_node_id();
3149 let usage = ChannelUsage {
3151 inflight_htlc_msat: 0,
3152 effective_capacity: EffectiveCapacity::Total { capacity_msat: 1_024, htlc_maximum_msat: 1_024 },
3154 let usage_1 = ChannelUsage {
3156 inflight_htlc_msat: 0,
3157 effective_capacity: EffectiveCapacity::Total { capacity_msat: 1_024, htlc_maximum_msat: 1_024 },
3161 let network_graph = network_graph.read_only();
3162 let channel = network_graph.channel(42).unwrap();
3163 let (info, _) = channel.as_directed_from(&source).unwrap();
3164 let candidate = CandidateRouteHop::PublicHop {
3166 short_channel_id: 42,
3169 // With no historical data the normal liquidity penalty calculation is used.
3170 assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 168);
3172 assert_eq!(scorer.historical_estimated_channel_liquidity_probabilities(42, &target),
3174 assert_eq!(scorer.historical_estimated_payment_success_probability(42, &target, 42, ¶ms),
3177 scorer.payment_path_failed(&payment_path_for_amount(1), 42, Duration::ZERO);
3179 let network_graph = network_graph.read_only();
3180 let channel = network_graph.channel(42).unwrap();
3181 let (info, _) = channel.as_directed_from(&source).unwrap();
3182 let candidate = CandidateRouteHop::PublicHop {
3184 short_channel_id: 42,
3187 assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 2048);
3188 assert_eq!(scorer.channel_penalty_msat(&candidate, usage_1, ¶ms), 249);
3190 // The "it failed" increment is 32, where the probability should lie several buckets into
3191 // the first octile.
3192 assert_eq!(scorer.historical_estimated_channel_liquidity_probabilities(42, &target),
3193 Some(([32, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
3194 [0, 0, 0, 0, 0, 0, 32, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0])));
3195 assert!(scorer.historical_estimated_payment_success_probability(42, &target, 1, ¶ms)
3197 assert_eq!(scorer.historical_estimated_payment_success_probability(42, &target, 500, ¶ms),
3200 // Even after we tell the scorer we definitely have enough available liquidity, it will
3201 // still remember that there was some failure in the past, and assign a non-0 penalty.
3202 scorer.payment_path_failed(&payment_path_for_amount(1000), 43, Duration::ZERO);
3204 let network_graph = network_graph.read_only();
3205 let channel = network_graph.channel(42).unwrap();
3206 let (info, _) = channel.as_directed_from(&source).unwrap();
3207 let candidate = CandidateRouteHop::PublicHop {
3209 short_channel_id: 42,
3212 assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 105);
3214 // The first points should be decayed just slightly and the last bucket has a new point.
3215 assert_eq!(scorer.historical_estimated_channel_liquidity_probabilities(42, &target),
3216 Some(([31, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 32, 0, 0, 0, 0, 0],
3217 [0, 0, 0, 0, 0, 0, 31, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 32])));
3219 // The exact success probability is a bit complicated and involves integer rounding, so we
3220 // simply check bounds here.
3221 let five_hundred_prob =
3222 scorer.historical_estimated_payment_success_probability(42, &target, 500, ¶ms).unwrap();
3223 assert!(five_hundred_prob > 0.59);
3224 assert!(five_hundred_prob < 0.60);
3226 scorer.historical_estimated_payment_success_probability(42, &target, 1, ¶ms).unwrap();
3227 assert!(one_prob < 0.85);
3228 assert!(one_prob > 0.84);
3230 // Advance the time forward 16 half-lives (which the docs claim will ensure all data is
3231 // gone), and check that we're back to where we started.
3232 scorer.decay_liquidity_certainty(Duration::from_secs(10 * 16));
3234 let network_graph = network_graph.read_only();
3235 let channel = network_graph.channel(42).unwrap();
3236 let (info, _) = channel.as_directed_from(&source).unwrap();
3237 let candidate = CandidateRouteHop::PublicHop {
3239 short_channel_id: 42,
3242 assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 168);
3244 // Once fully decayed we still have data, but its all-0s. In the future we may remove the
3245 // data entirely instead.
3246 assert_eq!(scorer.historical_estimated_channel_liquidity_probabilities(42, &target),
3247 Some(([0; 32], [0; 32])));
3248 assert_eq!(scorer.historical_estimated_payment_success_probability(42, &target, 1, ¶ms), None);
3250 let mut usage = ChannelUsage {
3252 inflight_htlc_msat: 1024,
3253 effective_capacity: EffectiveCapacity::Total { capacity_msat: 1_024, htlc_maximum_msat: 1_024 },
3255 scorer.payment_path_failed(&payment_path_for_amount(1), 42, Duration::from_secs(10 * 16));
3257 let network_graph = network_graph.read_only();
3258 let channel = network_graph.channel(42).unwrap();
3259 let (info, _) = channel.as_directed_from(&source).unwrap();
3260 let candidate = CandidateRouteHop::PublicHop {
3262 short_channel_id: 42,
3265 assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 2050);
3267 let usage = ChannelUsage {
3269 inflight_htlc_msat: 0,
3270 effective_capacity: EffectiveCapacity::AdvertisedMaxHTLC { amount_msat: 0 },
3272 assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 2048);
3275 // Advance to decay all liquidity offsets to zero.
3276 scorer.decay_liquidity_certainty(Duration::from_secs(10 * (16 + 60 * 60)));
3278 // Once even the bounds have decayed information about the channel should be removed
3280 assert_eq!(scorer.historical_estimated_channel_liquidity_probabilities(42, &target),
3283 // Use a path in the opposite direction, which have zero for htlc_maximum_msat. This will
3284 // ensure that the effective capacity is zero to test division-by-zero edge cases.
3286 path_hop(target_pubkey(), 43, 2),
3287 path_hop(source_pubkey(), 42, 1),
3288 path_hop(sender_pubkey(), 41, 0),
3290 scorer.payment_path_failed(&Path { hops: path, blinded_tail: None }, 42, Duration::from_secs(10 * (16 + 60 * 60)));
3294 fn adds_anti_probing_penalty() {
3295 let logger = TestLogger::new();
3296 let network_graph = network_graph(&logger);
3297 let source = source_node_id();
3298 let params = ProbabilisticScoringFeeParameters {
3299 anti_probing_penalty_msat: 500,
3300 ..ProbabilisticScoringFeeParameters::zero_penalty()
3302 let scorer = ProbabilisticScorer::new(ProbabilisticScoringDecayParameters::default(), &network_graph, &logger);
3304 // Check we receive no penalty for a low htlc_maximum_msat.
3305 let usage = ChannelUsage {
3306 amount_msat: 512_000,
3307 inflight_htlc_msat: 0,
3308 effective_capacity: EffectiveCapacity::Total { capacity_msat: 1_024_000, htlc_maximum_msat: 1_000 },
3310 let network_graph = network_graph.read_only();
3311 let channel = network_graph.channel(42).unwrap();
3312 let (info, _) = channel.as_directed_from(&source).unwrap();
3313 let candidate = CandidateRouteHop::PublicHop {
3315 short_channel_id: 42,
3317 assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 0);
3319 // Check we receive anti-probing penalty for htlc_maximum_msat == channel_capacity.
3320 let usage = ChannelUsage {
3321 amount_msat: 512_000,
3322 inflight_htlc_msat: 0,
3323 effective_capacity: EffectiveCapacity::Total { capacity_msat: 1_024_000, htlc_maximum_msat: 1_024_000 },
3325 assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 500);
3327 // Check we receive anti-probing penalty for htlc_maximum_msat == channel_capacity/2.
3328 let usage = ChannelUsage {
3329 amount_msat: 512_000,
3330 inflight_htlc_msat: 0,
3331 effective_capacity: EffectiveCapacity::Total { capacity_msat: 1_024_000, htlc_maximum_msat: 512_000 },
3333 assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 500);
3335 // Check we receive no anti-probing penalty for htlc_maximum_msat == channel_capacity/2 - 1.
3336 let usage = ChannelUsage {
3337 amount_msat: 512_000,
3338 inflight_htlc_msat: 0,
3339 effective_capacity: EffectiveCapacity::Total { capacity_msat: 1_024_000, htlc_maximum_msat: 511_999 },
3341 assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 0);
3345 fn scores_with_blinded_path() {
3346 // Make sure we'll account for a blinded path's final_value_msat in scoring
3347 let logger = TestLogger::new();
3348 let network_graph = network_graph(&logger);
3349 let params = ProbabilisticScoringFeeParameters {
3350 liquidity_penalty_multiplier_msat: 1_000,
3351 ..ProbabilisticScoringFeeParameters::zero_penalty()
3353 let decay_params = ProbabilisticScoringDecayParameters::default();
3354 let mut scorer = ProbabilisticScorer::new(decay_params, &network_graph, &logger);
3355 let source = source_node_id();
3356 let usage = ChannelUsage {
3358 inflight_htlc_msat: 0,
3359 effective_capacity: EffectiveCapacity::Total { capacity_msat: 1_024, htlc_maximum_msat: 1_000 },
3361 let channel = network_graph.read_only().channel(42).unwrap().to_owned();
3362 let (info, target) = channel.as_directed_from(&source).unwrap();
3363 let candidate = CandidateRouteHop::PublicHop {
3365 short_channel_id: 42,
3367 assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 300);
3369 let mut path = payment_path_for_amount(768);
3370 let recipient_hop = path.hops.pop().unwrap();
3371 let blinded_path = BlindedPath {
3372 introduction_node_id: path.hops.last().as_ref().unwrap().pubkey,
3373 blinding_point: test_utils::pubkey(42),
3375 BlindedHop { blinded_node_id: test_utils::pubkey(44), encrypted_payload: Vec::new() }
3378 path.blinded_tail = Some(BlindedTail {
3379 hops: blinded_path.blinded_hops,
3380 blinding_point: blinded_path.blinding_point,
3381 excess_final_cltv_expiry_delta: recipient_hop.cltv_expiry_delta,
3382 final_value_msat: recipient_hop.fee_msat,
3385 // Check the liquidity before and after scoring payment failures to ensure the blinded path's
3386 // final value is taken into account.
3387 assert!(scorer.channel_liquidities.get(&42).is_none());
3389 scorer.payment_path_failed(&path, 42, Duration::ZERO);
3390 path.blinded_tail.as_mut().unwrap().final_value_msat = 256;
3391 scorer.payment_path_failed(&path, 43, Duration::ZERO);
3393 let liquidity = scorer.channel_liquidities.get(&42).unwrap()
3394 .as_directed(&source, &target, 1_000);
3395 assert_eq!(liquidity.min_liquidity_msat(), 256);
3396 assert_eq!(liquidity.max_liquidity_msat(), 768);
3400 fn realistic_historical_failures() {
3401 // The motivation for the unequal sized buckets came largely from attempting to pay 10k
3402 // sats over a one bitcoin channel. This tests that case explicitly, ensuring that we score
3404 let logger = TestLogger::new();
3405 let mut network_graph = network_graph(&logger);
3406 let params = ProbabilisticScoringFeeParameters {
3407 historical_liquidity_penalty_multiplier_msat: 1024,
3408 historical_liquidity_penalty_amount_multiplier_msat: 1024,
3409 ..ProbabilisticScoringFeeParameters::zero_penalty()
3411 let decay_params = ProbabilisticScoringDecayParameters {
3412 liquidity_offset_half_life: Duration::from_secs(60 * 60),
3413 historical_no_updates_half_life: Duration::from_secs(10),
3414 ..ProbabilisticScoringDecayParameters::default()
3417 let capacity_msat = 100_000_000_000;
3418 update_channel(&mut network_graph, 42, source_privkey(), 0, capacity_msat, 200);
3419 update_channel(&mut network_graph, 42, target_privkey(), 1, capacity_msat, 200);
3421 let mut scorer = ProbabilisticScorer::new(decay_params, &network_graph, &logger);
3422 let source = source_node_id();
3424 let mut amount_msat = 10_000_000;
3425 let usage = ChannelUsage {
3427 inflight_htlc_msat: 0,
3428 effective_capacity: EffectiveCapacity::Total { capacity_msat, htlc_maximum_msat: capacity_msat },
3430 let channel = network_graph.read_only().channel(42).unwrap().to_owned();
3431 let (info, target) = channel.as_directed_from(&source).unwrap();
3432 let candidate = CandidateRouteHop::PublicHop {
3434 short_channel_id: 42,
3436 // With no historical data the normal liquidity penalty calculation is used, which results
3437 // in a success probability of ~75%.
3438 assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 1269);
3439 assert_eq!(scorer.historical_estimated_channel_liquidity_probabilities(42, &target),
3441 assert_eq!(scorer.historical_estimated_payment_success_probability(42, &target, 42, ¶ms),
3444 // Fail to pay once, and then check the buckets and penalty.
3445 scorer.payment_path_failed(&payment_path_for_amount(amount_msat), 42, Duration::ZERO);
3446 // The penalty should be the maximum penalty, as the payment we're scoring is now in the
3447 // same bucket which is the only maximum datapoint.
3448 assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms),
3449 2048 + 2048 * amount_msat / super::AMOUNT_PENALTY_DIVISOR);
3450 // The "it failed" increment is 32, which we should apply to the first upper-bound (between
3451 // 6k sats and 12k sats).
3452 assert_eq!(scorer.historical_estimated_channel_liquidity_probabilities(42, &target),
3453 Some(([32, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
3454 [0, 32, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0])));
3455 // The success probability estimate itself should be zero.
3456 assert_eq!(scorer.historical_estimated_payment_success_probability(42, &target, amount_msat, ¶ms),
3459 // Now test again with the amount in the bottom bucket.
3461 // The new amount is entirely within the only minimum bucket with score, so the probability
3462 // we assign is 1/2.
3463 assert_eq!(scorer.historical_estimated_payment_success_probability(42, &target, amount_msat, ¶ms),
3466 // ...but once we see a failure, we consider the payment to be substantially less likely,
3467 // even though not a probability of zero as we still look at the second max bucket which
3469 scorer.payment_path_failed(&payment_path_for_amount(amount_msat), 42, Duration::ZERO);
3470 assert_eq!(scorer.historical_estimated_channel_liquidity_probabilities(42, &target),
3471 Some(([63, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
3472 [32, 31, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0])));
3473 assert_eq!(scorer.historical_estimated_payment_success_probability(42, &target, amount_msat, ¶ms),
3481 use criterion::Criterion;
3482 use crate::routing::router::{bench_utils, RouteHop};
3483 use crate::util::test_utils::TestLogger;
3484 use crate::ln::features::{ChannelFeatures, NodeFeatures};
3486 pub fn decay_100k_channel_bounds(bench: &mut Criterion) {
3487 let logger = TestLogger::new();
3488 let network_graph = bench_utils::read_network_graph(&logger).unwrap();
3489 let mut scorer = ProbabilisticScorer::new(Default::default(), &network_graph, &logger);
3490 // Score a number of random channels
3491 let mut seed: u64 = 0xdeadbeef;
3492 for _ in 0..100_000 {
3493 seed = seed.overflowing_mul(6364136223846793005).0.overflowing_add(1).0;
3494 let (victim, victim_dst, amt) = {
3495 let rong = network_graph.read_only();
3496 let channels = rong.channels();
3497 let chan = channels.unordered_iter()
3498 .skip((seed as usize) % channels.len())
3500 seed = seed.overflowing_mul(6364136223846793005).0.overflowing_add(1).0;
3501 let amt = seed % chan.1.capacity_sats.map(|c| c * 1000)
3502 .or(chan.1.one_to_two.as_ref().map(|info| info.htlc_maximum_msat))
3503 .or(chan.1.two_to_one.as_ref().map(|info| info.htlc_maximum_msat))
3504 .unwrap_or(1_000_000_000).saturating_add(1);
3505 (*chan.0, chan.1.node_two, amt)
3508 hops: vec![RouteHop {
3509 pubkey: victim_dst.as_pubkey().unwrap(),
3510 node_features: NodeFeatures::empty(),
3511 short_channel_id: victim,
3512 channel_features: ChannelFeatures::empty(),
3514 cltv_expiry_delta: 42,
3515 maybe_announced_channel: true,
3519 seed = seed.overflowing_mul(6364136223846793005).0.overflowing_add(1).0;
3521 scorer.probe_failed(&path, victim, Duration::ZERO);
3523 scorer.probe_successful(&path, Duration::ZERO);
3526 let mut cur_time = Duration::ZERO;
3527 cur_time += Duration::from_millis(1);
3528 scorer.decay_liquidity_certainty(cur_time);
3529 bench.bench_function("decay_100k_channel_bounds", |b| b.iter(|| {
3530 cur_time += Duration::from_millis(1);
3531 scorer.decay_liquidity_certainty(cur_time);