decay_params: ProbabilisticScoringDecayParameters,
network_graph: G,
logger: L,
- // TODO: Remove entries of closed channels.
channel_liquidities: HashMap<u64, ChannelLiquidity<T>>,
}
/// Note that this writes roughly one line per channel for which we have a liquidity estimate,
/// which may be a substantial amount of log output.
pub fn debug_log_liquidity_stats(&self) {
- let now = T::now();
-
let graph = self.network_graph.read_only();
for (scid, liq) in self.channel_liquidities.iter() {
if let Some(chan_debug) = graph.channels().get(scid) {
let amt = directed_info.effective_capacity().as_msat();
let dir_liq = liq.as_directed(source, target, amt, self.decay_params);
- let (min_buckets, max_buckets) = dir_liq.liquidity_history
- .get_decayed_buckets(now, *dir_liq.offset_history_last_updated,
- self.decay_params.historical_no_updates_half_life)
- .unwrap_or(([0; 32], [0; 32]));
+ let min_buckets = &dir_liq.liquidity_history.min_liquidity_offset_history.buckets;
+ let max_buckets = &dir_liq.liquidity_history.max_liquidity_offset_history.buckets;
log_debug!(self.logger, core::concat!(
"Liquidity from {} to {} via {} is in the range ({}, {}).\n",
/// in the top and bottom bucket, and roughly with similar (recent) frequency.
///
/// Because the datapoints are decayed slowly over time, values will eventually return to
- /// `Some(([1; 32], [1; 32]))` and then to `None` once no datapoints remain.
+ /// `Some(([0; 32], [0; 32]))` or `None` if no data remains for a channel.
///
/// In order to fetch a single success probability from the buckets provided here, as used in
/// the scoring model, see [`Self::historical_estimated_payment_success_probability`].
let amt = directed_info.effective_capacity().as_msat();
let dir_liq = liq.as_directed(source, target, amt, self.decay_params);
- let (min_buckets, mut max_buckets) =
- dir_liq.liquidity_history.get_decayed_buckets(
- dir_liq.now, *dir_liq.offset_history_last_updated,
- self.decay_params.historical_no_updates_half_life
- )?;
+ let min_buckets = dir_liq.liquidity_history.min_liquidity_offset_history.buckets;
+ let mut max_buckets = dir_liq.liquidity_history.max_liquidity_offset_history.buckets;
// Note that the liquidity buckets are an offset from the edge, so we inverse
// the max order to get the probabilities from zero.
let dir_liq = liq.as_directed(source, target, capacity_msat, self.decay_params);
return dir_liq.liquidity_history.calculate_success_probability_times_billion(
- dir_liq.now, *dir_liq.offset_history_last_updated,
- self.decay_params.historical_no_updates_half_life, ¶ms, amount_msat,
- capacity_msat
+ ¶ms, amount_msat, capacity_msat
).map(|p| p as f64 / (1024 * 1024 * 1024) as f64);
}
}
decay_params: decay_params,
}
}
+
+ fn decayed_offset(&self, offset: u64, decay_params: ProbabilisticScoringDecayParameters) -> u64 {
+ let half_life = decay_params.liquidity_offset_half_life.as_secs_f64();
+ if half_life != 0.0 {
+ let elapsed_time = T::now().duration_since(self.last_updated).as_secs_f64();
+ ((offset as f64) * powf64(0.5, elapsed_time / half_life)) as u64
+ } else {
+ 0
+ }
+ }
}
/// Bounds `-log10` to avoid excessive liquidity penalties for payments with low success
score_params.historical_liquidity_penalty_amount_multiplier_msat != 0 {
if let Some(cumulative_success_prob_times_billion) = self.liquidity_history
.calculate_success_probability_times_billion(
- self.now, *self.offset_history_last_updated,
- self.decay_params.historical_no_updates_half_life, score_params, amount_msat,
- self.capacity_msat)
+ score_params, amount_msat, self.capacity_msat)
{
let historical_negative_log10_times_2048 = approx::negative_log10_times_2048(cumulative_success_prob_times_billion + 1, 1024 * 1024 * 1024);
res = res.saturating_add(Self::combined_penalty_msat(amount_msat,
}
fn decayed_offset_msat(&self, offset_msat: u64) -> u64 {
- let half_life = self.decay_params.liquidity_offset_half_life.as_secs();
- if half_life != 0 {
- // Decay the offset by the appropriate number of half lives. If half of the next half
- // life has passed, approximate an additional three-quarter life to help smooth out the
- // decay.
- let elapsed_time = self.now.duration_since(*self.last_updated).as_secs();
- let half_decays = elapsed_time / (half_life / 2);
- let decays = half_decays / 2;
- let decayed_offset_msat = offset_msat.checked_shr(decays as u32).unwrap_or(0);
- if half_decays % 2 == 0 {
- decayed_offset_msat
- } else {
- // 11_585 / 16_384 ~= core::f64::consts::FRAC_1_SQRT_2
- // 16_384 == 2^14
- (decayed_offset_msat as u128 * 11_585 / 16_384) as u64
- }
- } else {
- 0
- }
+ offset_msat
}
}
self.liquidity_history.max_liquidity_offset_history.track_datapoint(
max_liquidity_offset_msat.saturating_sub(bucket_offset_msat), self.capacity_msat
);
+ *self.offset_history_last_updated = self.now;
}
/// Adjusts the lower bound of the channel liquidity balance in this direction.
self.decayed_offset_msat(*self.max_liquidity_offset_msat)
};
*self.last_updated = self.now;
- *self.offset_history_last_updated = self.now;
}
/// Adjusts the upper bound of the channel liquidity balance in this direction.
self.decayed_offset_msat(*self.min_liquidity_offset_msat)
};
*self.last_updated = self.now;
- *self.offset_history_last_updated = self.now;
}
}
self.payment_path_failed(path, u64::max_value(), duration_since_epoch)
}
- fn decay_liquidity_certainty(&mut self, _duration_since_epoch: Duration) {}
+ fn decay_liquidity_certainty(&mut self, _duration_since_epoch: Duration) {
+ let decay_params = self.decay_params;
+ self.channel_liquidities.retain(|_scid, liquidity| {
+ liquidity.min_liquidity_offset_msat = liquidity.decayed_offset(liquidity.min_liquidity_offset_msat, decay_params);
+ liquidity.max_liquidity_offset_msat = liquidity.decayed_offset(liquidity.max_liquidity_offset_msat, decay_params);
+ liquidity.last_updated = T::now();
+ let elapsed_time =
+ T::now().duration_since(liquidity.offset_history_last_updated);
+ if elapsed_time > decay_params.historical_no_updates_half_life {
+ let half_life = decay_params.historical_no_updates_half_life.as_secs_f64();
+ if half_life != 0.0 {
+ let divisor = powf64(2048.0, elapsed_time.as_secs_f64() / half_life) as u64;
+ for bucket in liquidity.min_liquidity_offset_history.buckets.iter_mut() {
+ *bucket = ((*bucket as u64) * 1024 / divisor) as u16;
+ }
+ for bucket in liquidity.max_liquidity_offset_history.buckets.iter_mut() {
+ *bucket = ((*bucket as u64) * 1024 / divisor) as u16;
+ }
+ liquidity.offset_history_last_updated = T::now();
+ }
+ }
+ liquidity.min_liquidity_offset_msat != 0 || liquidity.max_liquidity_offset_msat != 0 ||
+ liquidity.min_liquidity_offset_history.buckets != [0; 32] ||
+ liquidity.max_liquidity_offset_history.buckets != [0; 32]
+ });
+ }
}
#[cfg(c_bindings)]
/// in each of 32 buckets.
#[derive(Clone, Copy)]
pub(super) struct HistoricalBucketRangeTracker {
- buckets: [u16; 32],
+ pub(super) buckets: [u16; 32],
}
/// Buckets are stored in fixed point numbers with a 5 bit fractional part. Thus, the value
}
impl<D: Deref<Target = HistoricalBucketRangeTracker>> HistoricalMinMaxBuckets<D> {
- pub(super) fn get_decayed_buckets<T: Time>(&self, now: T, offset_history_last_updated: T, half_life: Duration)
- -> Option<([u16; 32], [u16; 32])> {
- let (_, required_decays) = self.get_total_valid_points(now, offset_history_last_updated, half_life)?;
-
- let mut min_buckets = *self.min_liquidity_offset_history;
- min_buckets.time_decay_data(required_decays);
- let mut max_buckets = *self.max_liquidity_offset_history;
- max_buckets.time_decay_data(required_decays);
- Some((min_buckets.buckets, max_buckets.buckets))
- }
#[inline]
- pub(super) fn get_total_valid_points<T: Time>(&self, now: T, offset_history_last_updated: T, half_life: Duration)
- -> Option<(u64, u32)> {
- let required_decays = now.duration_since(offset_history_last_updated).as_secs()
- .checked_div(half_life.as_secs())
- .map_or(u32::max_value(), |decays| cmp::min(decays, u32::max_value() as u64) as u32);
+ pub(super) fn calculate_success_probability_times_billion(
+ &self, params: &ProbabilisticScoringFeeParameters, amount_msat: u64,
+ capacity_msat: u64
+ ) -> Option<u64> {
+ // If historical penalties are enabled, we try to calculate a probability of success
+ // given our historical distribution of min- and max-liquidity bounds in a channel.
+ // To do so, we walk the set of historical liquidity bucket (min, max) combinations
+ // (where min_idx < max_idx, as having a minimum above our maximum is an invalid
+ // state). For each pair, we calculate the probability as if the bucket's corresponding
+ // min- and max- liquidity bounds were our current liquidity bounds and then multiply
+ // that probability by the weight of the selected buckets.
+ let payment_pos = amount_to_pos(amount_msat, capacity_msat);
+ if payment_pos >= POSITION_TICKS { return None; }
let mut total_valid_points_tracked = 0;
for (min_idx, min_bucket) in self.min_liquidity_offset_history.buckets.iter().enumerate() {
// If the total valid points is smaller than 1.0 (i.e. 32 in our fixed-point scheme),
// treat it as if we were fully decayed.
const FULLY_DECAYED: u16 = BUCKET_FIXED_POINT_ONE * BUCKET_FIXED_POINT_ONE;
- if total_valid_points_tracked.checked_shr(required_decays).unwrap_or(0) < FULLY_DECAYED.into() {
+ if total_valid_points_tracked < FULLY_DECAYED.into() {
return None;
}
- Some((total_valid_points_tracked, required_decays))
- }
-
- #[inline]
- pub(super) fn calculate_success_probability_times_billion<T: Time>(
- &self, now: T, offset_history_last_updated: T, half_life: Duration,
- params: &ProbabilisticScoringFeeParameters, amount_msat: u64, capacity_msat: u64
- ) -> Option<u64> {
- // If historical penalties are enabled, we try to calculate a probability of success
- // given our historical distribution of min- and max-liquidity bounds in a channel.
- // To do so, we walk the set of historical liquidity bucket (min, max) combinations
- // (where min_idx < max_idx, as having a minimum above our maximum is an invalid
- // state). For each pair, we calculate the probability as if the bucket's corresponding
- // min- and max- liquidity bounds were our current liquidity bounds and then multiply
- // that probability by the weight of the selected buckets.
- let payment_pos = amount_to_pos(amount_msat, capacity_msat);
- if payment_pos >= POSITION_TICKS { return None; }
-
- // Check if all our buckets are zero, once decayed and treat it as if we had no data. We
- // don't actually use the decayed buckets, though, as that would lose precision.
- let (total_valid_points_tracked, _)
- = self.get_total_valid_points(now, offset_history_last_updated, half_life)?;
-
let mut cumulative_success_prob_times_billion = 0;
// Special-case the 0th min bucket - it generally means we failed a payment, so only
// consider the highest (i.e. largest-offset-from-max-capacity) max bucket for all
let usage = ChannelUsage { amount_msat: 896, ..usage };
assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), u64::max_value());
- // No decay
- SinceEpoch::advance(Duration::from_secs(4));
- let usage = ChannelUsage { amount_msat: 128, ..usage };
- assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 0);
- let usage = ChannelUsage { amount_msat: 256, ..usage };
- assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 93);
- let usage = ChannelUsage { amount_msat: 768, ..usage };
- assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 1_479);
- let usage = ChannelUsage { amount_msat: 896, ..usage };
- assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), u64::max_value());
-
// Half decay (i.e., three-quarter life)
- SinceEpoch::advance(Duration::from_secs(1));
+ SinceEpoch::advance(Duration::from_secs(5));
+ scorer.decay_liquidity_certainty(Duration::from_secs(5));
let usage = ChannelUsage { amount_msat: 128, ..usage };
assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 22);
let usage = ChannelUsage { amount_msat: 256, ..usage };
// One decay (i.e., half life)
SinceEpoch::advance(Duration::from_secs(5));
+ scorer.decay_liquidity_certainty(Duration::from_secs(10));
let usage = ChannelUsage { amount_msat: 64, ..usage };
assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 0);
let usage = ChannelUsage { amount_msat: 128, ..usage };
// Fully decay liquidity lower bound.
SinceEpoch::advance(Duration::from_secs(10 * 7));
+ scorer.decay_liquidity_certainty(Duration::from_secs(10 * 8));
let usage = ChannelUsage { amount_msat: 0, ..usage };
assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 0);
let usage = ChannelUsage { amount_msat: 1, ..usage };
// Fully decay liquidity upper bound.
SinceEpoch::advance(Duration::from_secs(10));
+ scorer.decay_liquidity_certainty(Duration::from_secs(10 * 9));
let usage = ChannelUsage { amount_msat: 0, ..usage };
assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 0);
let usage = ChannelUsage { amount_msat: 1_024, ..usage };
assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), u64::max_value());
SinceEpoch::advance(Duration::from_secs(10));
+ scorer.decay_liquidity_certainty(Duration::from_secs(10 * 10));
let usage = ChannelUsage { amount_msat: 0, ..usage };
assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 0);
let usage = ChannelUsage { amount_msat: 1_024, ..usage };
// An unchecked right shift 64 bits or more in DirectedChannelLiquidity::decayed_offset_msat
// would cause an overflow.
SinceEpoch::advance(Duration::from_secs(10 * 64));
+ scorer.decay_liquidity_certainty(Duration::from_secs(10 * 64));
assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 125);
SinceEpoch::advance(Duration::from_secs(10));
+ scorer.decay_liquidity_certainty(Duration::from_secs(10 * 65));
assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 125);
}
// Decaying knowledge gives less confidence (128, 896), meaning a higher penalty.
SinceEpoch::advance(Duration::from_secs(10));
+ scorer.decay_liquidity_certainty(Duration::from_secs(10));
assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 291);
// Reducing the upper bound gives more confidence (128, 832) that the payment amount (512)
// Further decaying affects the lower bound more than the upper bound (128, 928).
SinceEpoch::advance(Duration::from_secs(10));
+ scorer.decay_liquidity_certainty(Duration::from_secs(20));
assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 280);
}
assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), u64::max_value());
SinceEpoch::advance(Duration::from_secs(10));
+ scorer.decay_liquidity_certainty(Duration::from_secs(10));
assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 473);
scorer.payment_path_failed(&payment_path_for_amount(250), 43, Duration::from_secs(10));
assert_eq!(deserialized_scorer.channel_penalty_msat(&candidate, usage, ¶ms), 300);
}
- #[test]
- fn decays_persisted_liquidity_bounds() {
+ fn do_decays_persisted_liquidity_bounds(decay_before_reload: bool) {
let logger = TestLogger::new();
let network_graph = network_graph(&logger);
let params = ProbabilisticScoringFeeParameters {
};
assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), u64::max_value());
+ if decay_before_reload {
+ SinceEpoch::advance(Duration::from_secs(10));
+ scorer.decay_liquidity_certainty(Duration::from_secs(10));
+ }
+
let mut serialized_scorer = Vec::new();
scorer.write(&mut serialized_scorer).unwrap();
- SinceEpoch::advance(Duration::from_secs(10));
-
let mut serialized_scorer = io::Cursor::new(&serialized_scorer);
- let deserialized_scorer =
+ let mut deserialized_scorer =
<ProbabilisticScorer>::read(&mut serialized_scorer, (decay_params, &network_graph, &logger)).unwrap();
+ if !decay_before_reload {
+ SinceEpoch::advance(Duration::from_secs(10));
+ scorer.decay_liquidity_certainty(Duration::from_secs(10));
+ deserialized_scorer.decay_liquidity_certainty(Duration::from_secs(10));
+ }
assert_eq!(deserialized_scorer.channel_penalty_msat(&candidate, usage, ¶ms), 473);
scorer.payment_path_failed(&payment_path_for_amount(250), 43, Duration::from_secs(10));
assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 300);
SinceEpoch::advance(Duration::from_secs(10));
+ deserialized_scorer.decay_liquidity_certainty(Duration::from_secs(20));
assert_eq!(deserialized_scorer.channel_penalty_msat(&candidate, usage, ¶ms), 370);
}
+ #[test]
+ fn decays_persisted_liquidity_bounds() {
+ do_decays_persisted_liquidity_bounds(false);
+ do_decays_persisted_liquidity_bounds(true);
+ }
+
#[test]
fn scores_realistic_payments() {
// Shows the scores of "realistic" sends of 100k sats over channels of 1-10m sats (with a
// Advance the time forward 16 half-lives (which the docs claim will ensure all data is
// gone), and check that we're back to where we started.
SinceEpoch::advance(Duration::from_secs(10 * 16));
+ scorer.decay_liquidity_certainty(Duration::from_secs(10 * 16));
{
let network_graph = network_graph.read_only();
let channel = network_graph.channel(42).unwrap();
// Once fully decayed we still have data, but its all-0s. In the future we may remove the
// data entirely instead.
assert_eq!(scorer.historical_estimated_channel_liquidity_probabilities(42, &target),
- None);
+ Some(([0; 32], [0; 32])));
assert_eq!(scorer.historical_estimated_payment_success_probability(42, &target, 1, ¶ms), None);
let mut usage = ChannelUsage {
};
assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 2050);
- usage.inflight_htlc_msat = 0;
- assert_eq!(scorer.channel_penalty_msat(&candidate, usage, ¶ms), 866);
let usage = ChannelUsage {
amount_msat: 1,
// Advance to decay all liquidity offsets to zero.
SinceEpoch::advance(Duration::from_secs(60 * 60 * 10));
+ scorer.decay_liquidity_certainty(Duration::from_secs(10 * (16 + 60 * 60)));
+
+ // Once even the bounds have decayed information about the channel should be removed
+ // entirely.
+ assert_eq!(scorer.historical_estimated_channel_liquidity_probabilities(42, &target),
+ None);
// Use a path in the opposite direction, which have zero for htlc_maximum_msat. This will
// ensure that the effective capacity is zero to test division-by-zero edge cases.