Update history bucket last_update time immediately on update
[rust-lightning] / lightning / src / routing / scoring.rs
index 25ea012e506dc0647bd74701b89e14c2b46219cc..e37da41755566c160983619988e74477f7095501 100644 (file)
@@ -121,10 +121,10 @@ pub trait ScoreUpdate {
        /// Handles updating channel penalties after a probe over the given path succeeded.
        fn probe_successful(&mut self, path: &Path, duration_since_epoch: Duration);
 
-       /// Scorers may wich to reduce their certainty of channel liquidity information over time.
+       /// Scorers may wish to reduce their certainty of channel liquidity information over time.
        /// Thus, this method is provided to allow scorers to observe the passage of time - the holder
        /// of this object should call this method regularly (generally via the
-       /// `lightning-background-processor` trait).
+       /// `lightning-background-processor` crate).
        fn decay_liquidity_certainty(&mut self, duration_since_epoch: Duration);
 }
 
@@ -493,7 +493,6 @@ where L::Target: Logger {
        decay_params: ProbabilisticScoringDecayParameters,
        network_graph: G,
        logger: L,
-       // TODO: Remove entries of closed channels.
        channel_liquidities: HashMap<u64, ChannelLiquidity<T>>,
 }
 
@@ -805,11 +804,14 @@ struct ChannelLiquidity<T: Time> {
        /// Upper channel liquidity bound in terms of an offset from the effective capacity.
        max_liquidity_offset_msat: u64,
 
+       min_liquidity_offset_history: HistoricalBucketRangeTracker,
+       max_liquidity_offset_history: HistoricalBucketRangeTracker,
+
        /// Time when the liquidity bounds were last modified.
        last_updated: T,
 
-       min_liquidity_offset_history: HistoricalBucketRangeTracker,
-       max_liquidity_offset_history: HistoricalBucketRangeTracker,
+       /// Time when the historical liquidity bounds were last modified.
+       offset_history_last_updated: T,
 }
 
 /// A snapshot of [`ChannelLiquidity`] in one direction assuming a certain channel capacity and
@@ -820,6 +822,7 @@ struct DirectedChannelLiquidity<L: Deref<Target = u64>, BRT: Deref<Target = Hist
        liquidity_history: HistoricalMinMaxBuckets<BRT>,
        capacity_msat: u64,
        last_updated: U,
+       offset_history_last_updated: U,
        now: T,
        decay_params: ProbabilisticScoringDecayParameters,
 }
@@ -847,8 +850,6 @@ impl<G: Deref<Target = NetworkGraph<L>>, L: Deref, T: Time> ProbabilisticScorerU
        /// Note that this writes roughly one line per channel for which we have a liquidity estimate,
        /// which may be a substantial amount of log output.
        pub fn debug_log_liquidity_stats(&self) {
-               let now = T::now();
-
                let graph = self.network_graph.read_only();
                for (scid, liq) in self.channel_liquidities.iter() {
                        if let Some(chan_debug) = graph.channels().get(scid) {
@@ -857,10 +858,8 @@ impl<G: Deref<Target = NetworkGraph<L>>, L: Deref, T: Time> ProbabilisticScorerU
                                                let amt = directed_info.effective_capacity().as_msat();
                                                let dir_liq = liq.as_directed(source, target, amt, self.decay_params);
 
-                                               let (min_buckets, max_buckets) = dir_liq.liquidity_history
-                                                       .get_decayed_buckets(now, *dir_liq.last_updated,
-                                                               self.decay_params.historical_no_updates_half_life)
-                                                       .unwrap_or(([0; 32], [0; 32]));
+                                               let min_buckets = &dir_liq.liquidity_history.min_liquidity_offset_history.buckets;
+                                               let max_buckets = &dir_liq.liquidity_history.max_liquidity_offset_history.buckets;
 
                                                log_debug!(self.logger, core::concat!(
                                                        "Liquidity from {} to {} via {} is in the range ({}, {}).\n",
@@ -939,7 +938,7 @@ impl<G: Deref<Target = NetworkGraph<L>>, L: Deref, T: Time> ProbabilisticScorerU
        /// in the top and bottom bucket, and roughly with similar (recent) frequency.
        ///
        /// Because the datapoints are decayed slowly over time, values will eventually return to
-       /// `Some(([1; 32], [1; 32]))` and then to `None` once no datapoints remain.
+       /// `Some(([0; 32], [0; 32]))` or `None` if no data remains for a channel.
        ///
        /// In order to fetch a single success probability from the buckets provided here, as used in
        /// the scoring model, see [`Self::historical_estimated_payment_success_probability`].
@@ -953,11 +952,8 @@ impl<G: Deref<Target = NetworkGraph<L>>, L: Deref, T: Time> ProbabilisticScorerU
                                        let amt = directed_info.effective_capacity().as_msat();
                                        let dir_liq = liq.as_directed(source, target, amt, self.decay_params);
 
-                                       let (min_buckets, mut max_buckets) =
-                                               dir_liq.liquidity_history.get_decayed_buckets(
-                                                       dir_liq.now, *dir_liq.last_updated,
-                                                       self.decay_params.historical_no_updates_half_life
-                                               )?;
+                                       let min_buckets = dir_liq.liquidity_history.min_liquidity_offset_history.buckets;
+                                       let mut max_buckets = dir_liq.liquidity_history.max_liquidity_offset_history.buckets;
 
                                        // Note that the liquidity buckets are an offset from the edge, so we inverse
                                        // the max order to get the probabilities from zero.
@@ -988,9 +984,7 @@ impl<G: Deref<Target = NetworkGraph<L>>, L: Deref, T: Time> ProbabilisticScorerU
                                        let dir_liq = liq.as_directed(source, target, capacity_msat, self.decay_params);
 
                                        return dir_liq.liquidity_history.calculate_success_probability_times_billion(
-                                               dir_liq.now, *dir_liq.last_updated,
-                                               self.decay_params.historical_no_updates_half_life, &params, amount_msat,
-                                               capacity_msat
+                                               &params, amount_msat, capacity_msat
                                        ).map(|p| p as f64 / (1024 * 1024 * 1024) as f64);
                                }
                        }
@@ -1008,6 +1002,7 @@ impl<T: Time> ChannelLiquidity<T> {
                        min_liquidity_offset_history: HistoricalBucketRangeTracker::new(),
                        max_liquidity_offset_history: HistoricalBucketRangeTracker::new(),
                        last_updated: T::now(),
+                       offset_history_last_updated: T::now(),
                }
        }
 
@@ -1034,6 +1029,7 @@ impl<T: Time> ChannelLiquidity<T> {
                        },
                        capacity_msat,
                        last_updated: &self.last_updated,
+                       offset_history_last_updated: &self.offset_history_last_updated,
                        now: T::now(),
                        decay_params: decay_params,
                }
@@ -1062,10 +1058,21 @@ impl<T: Time> ChannelLiquidity<T> {
                        },
                        capacity_msat,
                        last_updated: &mut self.last_updated,
+                       offset_history_last_updated: &mut self.offset_history_last_updated,
                        now: T::now(),
                        decay_params: decay_params,
                }
        }
+
+       fn decayed_offset(&self, offset: u64, decay_params: ProbabilisticScoringDecayParameters) -> u64 {
+               let half_life = decay_params.liquidity_offset_half_life.as_secs_f64();
+               if half_life != 0.0 {
+                       let elapsed_time = T::now().duration_since(self.last_updated).as_secs_f64();
+                       ((offset as f64) * powf64(0.5, elapsed_time / half_life)) as u64
+               } else {
+                       0
+               }
+       }
 }
 
 /// Bounds `-log10` to avoid excessive liquidity penalties for payments with low success
@@ -1197,9 +1204,8 @@ impl<L: Deref<Target = u64>, BRT: Deref<Target = HistoricalBucketRangeTracker>,
                if score_params.historical_liquidity_penalty_multiplier_msat != 0 ||
                   score_params.historical_liquidity_penalty_amount_multiplier_msat != 0 {
                        if let Some(cumulative_success_prob_times_billion) = self.liquidity_history
-                               .calculate_success_probability_times_billion(self.now, *self.last_updated,
-                                       self.decay_params.historical_no_updates_half_life, score_params, amount_msat,
-                                       self.capacity_msat)
+                               .calculate_success_probability_times_billion(
+                                       score_params, amount_msat, self.capacity_msat)
                        {
                                let historical_negative_log10_times_2048 = approx::negative_log10_times_2048(cumulative_success_prob_times_billion + 1, 1024 * 1024 * 1024);
                                res = res.saturating_add(Self::combined_penalty_msat(amount_msat,
@@ -1254,25 +1260,7 @@ impl<L: Deref<Target = u64>, BRT: Deref<Target = HistoricalBucketRangeTracker>,
        }
 
        fn decayed_offset_msat(&self, offset_msat: u64) -> u64 {
-               let half_life = self.decay_params.liquidity_offset_half_life.as_secs();
-               if half_life != 0 {
-                       // Decay the offset by the appropriate number of half lives. If half of the next half
-                       // life has passed, approximate an additional three-quarter life to help smooth out the
-                       // decay.
-                       let elapsed_time = self.now.duration_since(*self.last_updated).as_secs();
-                       let half_decays = elapsed_time / (half_life / 2);
-                       let decays = half_decays / 2;
-                       let decayed_offset_msat = offset_msat.checked_shr(decays as u32).unwrap_or(0);
-                       if half_decays % 2 == 0 {
-                               decayed_offset_msat
-                       } else {
-                               // 11_585 / 16_384 ~= core::f64::consts::FRAC_1_SQRT_2
-                               // 16_384 == 2^14
-                               (decayed_offset_msat as u128 * 11_585 / 16_384) as u64
-                       }
-               } else {
-                       0
-               }
+               offset_msat
        }
 }
 
@@ -1316,7 +1304,7 @@ impl<L: DerefMut<Target = u64>, BRT: DerefMut<Target = HistoricalBucketRangeTrac
        /// state"), we allow the caller to set an offset applied to our liquidity bounds which
        /// represents the amount of the successful payment we just made.
        fn update_history_buckets(&mut self, bucket_offset_msat: u64) {
-               let half_lives = self.now.duration_since(*self.last_updated).as_secs()
+               let half_lives = self.now.duration_since(*self.offset_history_last_updated).as_secs()
                        .checked_div(self.decay_params.historical_no_updates_half_life.as_secs())
                        .map(|v| v.try_into().unwrap_or(u32::max_value())).unwrap_or(u32::max_value());
                self.liquidity_history.min_liquidity_offset_history.time_decay_data(half_lives);
@@ -1330,6 +1318,7 @@ impl<L: DerefMut<Target = u64>, BRT: DerefMut<Target = HistoricalBucketRangeTrac
                self.liquidity_history.max_liquidity_offset_history.track_datapoint(
                        max_liquidity_offset_msat.saturating_sub(bucket_offset_msat), self.capacity_msat
                );
+               *self.offset_history_last_updated = self.now;
        }
 
        /// Adjusts the lower bound of the channel liquidity balance in this direction.
@@ -1480,7 +1469,32 @@ impl<G: Deref<Target = NetworkGraph<L>>, L: Deref, T: Time> ScoreUpdate for Prob
                self.payment_path_failed(path, u64::max_value(), duration_since_epoch)
        }
 
-       fn decay_liquidity_certainty(&mut self, _duration_since_epoch: Duration) {}
+       fn decay_liquidity_certainty(&mut self, _duration_since_epoch: Duration) {
+               let decay_params = self.decay_params;
+               self.channel_liquidities.retain(|_scid, liquidity| {
+                       liquidity.min_liquidity_offset_msat = liquidity.decayed_offset(liquidity.min_liquidity_offset_msat, decay_params);
+                       liquidity.max_liquidity_offset_msat = liquidity.decayed_offset(liquidity.max_liquidity_offset_msat, decay_params);
+                       liquidity.last_updated = T::now();
+                       let elapsed_time =
+                               T::now().duration_since(liquidity.offset_history_last_updated);
+                       if elapsed_time > decay_params.historical_no_updates_half_life {
+                               let half_life = decay_params.historical_no_updates_half_life.as_secs_f64();
+                               if half_life != 0.0 {
+                                       let divisor = powf64(2048.0, elapsed_time.as_secs_f64() / half_life) as u64;
+                                       for bucket in liquidity.min_liquidity_offset_history.buckets.iter_mut() {
+                                               *bucket = ((*bucket as u64) * 1024 / divisor) as u16;
+                                       }
+                                       for bucket in liquidity.max_liquidity_offset_history.buckets.iter_mut() {
+                                               *bucket = ((*bucket as u64) * 1024 / divisor) as u16;
+                                       }
+                                       liquidity.offset_history_last_updated = T::now();
+                               }
+                       }
+                       liquidity.min_liquidity_offset_msat != 0 || liquidity.max_liquidity_offset_msat != 0 ||
+                               liquidity.min_liquidity_offset_history.buckets != [0; 32] ||
+                               liquidity.max_liquidity_offset_history.buckets != [0; 32]
+               });
+       }
 }
 
 #[cfg(c_bindings)]
@@ -1918,7 +1932,7 @@ mod bucketed_history {
        /// in each of 32 buckets.
        #[derive(Clone, Copy)]
        pub(super) struct HistoricalBucketRangeTracker {
-               buckets: [u16; 32],
+               pub(super) buckets: [u16; 32],
        }
 
        /// Buckets are stored in fixed point numbers with a 5 bit fractional part. Thus, the value
@@ -1983,22 +1997,20 @@ mod bucketed_history {
        }
 
        impl<D: Deref<Target = HistoricalBucketRangeTracker>> HistoricalMinMaxBuckets<D> {
-               pub(super) fn get_decayed_buckets<T: Time>(&self, now: T, last_updated: T, half_life: Duration)
-               -> Option<([u16; 32], [u16; 32])> {
-                       let (_, required_decays) = self.get_total_valid_points(now, last_updated, half_life)?;
-
-                       let mut min_buckets = *self.min_liquidity_offset_history;
-                       min_buckets.time_decay_data(required_decays);
-                       let mut max_buckets = *self.max_liquidity_offset_history;
-                       max_buckets.time_decay_data(required_decays);
-                       Some((min_buckets.buckets, max_buckets.buckets))
-               }
                #[inline]
-               pub(super) fn get_total_valid_points<T: Time>(&self, now: T, last_updated: T, half_life: Duration)
-               -> Option<(u64, u32)> {
-                       let required_decays = now.duration_since(last_updated).as_secs()
-                               .checked_div(half_life.as_secs())
-                               .map_or(u32::max_value(), |decays| cmp::min(decays, u32::max_value() as u64) as u32);
+               pub(super) fn calculate_success_probability_times_billion(
+                       &self, params: &ProbabilisticScoringFeeParameters, amount_msat: u64,
+                       capacity_msat: u64
+               ) -> Option<u64> {
+                       // If historical penalties are enabled, we try to calculate a probability of success
+                       // given our historical distribution of min- and max-liquidity bounds in a channel.
+                       // To do so, we walk the set of historical liquidity bucket (min, max) combinations
+                       // (where min_idx < max_idx, as having a minimum above our maximum is an invalid
+                       // state). For each pair, we calculate the probability as if the bucket's corresponding
+                       // min- and max- liquidity bounds were our current liquidity bounds and then multiply
+                       // that probability by the weight of the selected buckets.
+                       let payment_pos = amount_to_pos(amount_msat, capacity_msat);
+                       if payment_pos >= POSITION_TICKS { return None; }
 
                        let mut total_valid_points_tracked = 0;
                        for (min_idx, min_bucket) in self.min_liquidity_offset_history.buckets.iter().enumerate() {
@@ -2010,33 +2022,10 @@ mod bucketed_history {
                        // If the total valid points is smaller than 1.0 (i.e. 32 in our fixed-point scheme),
                        // treat it as if we were fully decayed.
                        const FULLY_DECAYED: u16 = BUCKET_FIXED_POINT_ONE * BUCKET_FIXED_POINT_ONE;
-                       if total_valid_points_tracked.checked_shr(required_decays).unwrap_or(0) < FULLY_DECAYED.into() {
+                       if total_valid_points_tracked < FULLY_DECAYED.into() {
                                return None;
                        }
 
-                       Some((total_valid_points_tracked, required_decays))
-               }
-
-               #[inline]
-               pub(super) fn calculate_success_probability_times_billion<T: Time>(
-                       &self, now: T, last_updated: T, half_life: Duration,
-                       params: &ProbabilisticScoringFeeParameters, amount_msat: u64, capacity_msat: u64
-               ) -> Option<u64> {
-                       // If historical penalties are enabled, we try to calculate a probability of success
-                       // given our historical distribution of min- and max-liquidity bounds in a channel.
-                       // To do so, we walk the set of historical liquidity bucket (min, max) combinations
-                       // (where min_idx < max_idx, as having a minimum above our maximum is an invalid
-                       // state). For each pair, we calculate the probability as if the bucket's corresponding
-                       // min- and max- liquidity bounds were our current liquidity bounds and then multiply
-                       // that probability by the weight of the selected buckets.
-                       let payment_pos = amount_to_pos(amount_msat, capacity_msat);
-                       if payment_pos >= POSITION_TICKS { return None; }
-
-                       // Check if all our buckets are zero, once decayed and treat it as if we had no data. We
-                       // don't actually use the decayed buckets, though, as that would lose precision.
-                       let (total_valid_points_tracked, _)
-                               = self.get_total_valid_points(now, last_updated, half_life)?;
-
                        let mut cumulative_success_prob_times_billion = 0;
                        // Special-case the 0th min bucket - it generally means we failed a payment, so only
                        // consider the highest (i.e. largest-offset-from-max-capacity) max bucket for all
@@ -2128,6 +2117,8 @@ ReadableArgs<(ProbabilisticScoringDecayParameters, G, L)> for ProbabilisticScore
 impl<T: Time> Writeable for ChannelLiquidity<T> {
        #[inline]
        fn write<W: Writer>(&self, w: &mut W) -> Result<(), io::Error> {
+               let offset_history_duration_since_epoch =
+                       T::duration_since_epoch() - self.offset_history_last_updated.elapsed();
                let duration_since_epoch = T::duration_since_epoch() - self.last_updated.elapsed();
                write_tlv_fields!(w, {
                        (0, self.min_liquidity_offset_msat, required),
@@ -2137,6 +2128,7 @@ impl<T: Time> Writeable for ChannelLiquidity<T> {
                        (4, duration_since_epoch, required),
                        (5, Some(self.min_liquidity_offset_history), option),
                        (7, Some(self.max_liquidity_offset_history), option),
+                       (9, offset_history_duration_since_epoch, required),
                });
                Ok(())
        }
@@ -2152,6 +2144,7 @@ impl<T: Time> Readable for ChannelLiquidity<T> {
                let mut min_liquidity_offset_history: Option<HistoricalBucketRangeTracker> = None;
                let mut max_liquidity_offset_history: Option<HistoricalBucketRangeTracker> = None;
                let mut duration_since_epoch = Duration::from_secs(0);
+               let mut offset_history_duration_since_epoch = None;
                read_tlv_fields!(r, {
                        (0, min_liquidity_offset_msat, required),
                        (1, legacy_min_liq_offset_history, option),
@@ -2160,6 +2153,7 @@ impl<T: Time> Readable for ChannelLiquidity<T> {
                        (4, duration_since_epoch, required),
                        (5, min_liquidity_offset_history, option),
                        (7, max_liquidity_offset_history, option),
+                       (9, offset_history_duration_since_epoch, option),
                });
                // On rust prior to 1.60 `Instant::duration_since` will panic if time goes backwards.
                // We write `last_updated` as wallclock time even though its ultimately an `Instant` (which
@@ -2173,6 +2167,13 @@ impl<T: Time> Readable for ChannelLiquidity<T> {
                let last_updated = if wall_clock_now > duration_since_epoch {
                        now - (wall_clock_now - duration_since_epoch)
                } else { now };
+
+               let offset_history_duration_since_epoch =
+                       offset_history_duration_since_epoch.unwrap_or(duration_since_epoch);
+               let offset_history_last_updated = if wall_clock_now > offset_history_duration_since_epoch {
+                       now - (wall_clock_now - offset_history_duration_since_epoch)
+               } else { now };
+
                if min_liquidity_offset_history.is_none() {
                        if let Some(legacy_buckets) = legacy_min_liq_offset_history {
                                min_liquidity_offset_history = Some(legacy_buckets.into_current());
@@ -2193,6 +2194,7 @@ impl<T: Time> Readable for ChannelLiquidity<T> {
                        min_liquidity_offset_history: min_liquidity_offset_history.unwrap(),
                        max_liquidity_offset_history: max_liquidity_offset_history.unwrap(),
                        last_updated,
+                       offset_history_last_updated,
                })
        }
 }
@@ -2368,18 +2370,21 @@ mod tests {
        fn liquidity_bounds_directed_from_lowest_node_id() {
                let logger = TestLogger::new();
                let last_updated = SinceEpoch::now();
+               let offset_history_last_updated = SinceEpoch::now();
                let network_graph = network_graph(&logger);
                let decay_params = ProbabilisticScoringDecayParameters::default();
                let mut scorer = ProbabilisticScorer::new(decay_params, &network_graph, &logger)
                        .with_channel(42,
                                ChannelLiquidity {
-                                       min_liquidity_offset_msat: 700, max_liquidity_offset_msat: 100, last_updated,
+                                       min_liquidity_offset_msat: 700, max_liquidity_offset_msat: 100,
+                                       last_updated, offset_history_last_updated,
                                        min_liquidity_offset_history: HistoricalBucketRangeTracker::new(),
                                        max_liquidity_offset_history: HistoricalBucketRangeTracker::new(),
                                })
                        .with_channel(43,
                                ChannelLiquidity {
-                                       min_liquidity_offset_msat: 700, max_liquidity_offset_msat: 100, last_updated,
+                                       min_liquidity_offset_msat: 700, max_liquidity_offset_msat: 100,
+                                       last_updated, offset_history_last_updated,
                                        min_liquidity_offset_history: HistoricalBucketRangeTracker::new(),
                                        max_liquidity_offset_history: HistoricalBucketRangeTracker::new(),
                                });
@@ -2446,12 +2451,14 @@ mod tests {
        fn resets_liquidity_upper_bound_when_crossed_by_lower_bound() {
                let logger = TestLogger::new();
                let last_updated = SinceEpoch::now();
+               let offset_history_last_updated = SinceEpoch::now();
                let network_graph = network_graph(&logger);
                let decay_params = ProbabilisticScoringDecayParameters::default();
                let mut scorer = ProbabilisticScorer::new(decay_params, &network_graph, &logger)
                        .with_channel(42,
                                ChannelLiquidity {
-                                       min_liquidity_offset_msat: 200, max_liquidity_offset_msat: 400, last_updated,
+                                       min_liquidity_offset_msat: 200, max_liquidity_offset_msat: 400,
+                                       last_updated, offset_history_last_updated,
                                        min_liquidity_offset_history: HistoricalBucketRangeTracker::new(),
                                        max_liquidity_offset_history: HistoricalBucketRangeTracker::new(),
                                });
@@ -2505,12 +2512,14 @@ mod tests {
        fn resets_liquidity_lower_bound_when_crossed_by_upper_bound() {
                let logger = TestLogger::new();
                let last_updated = SinceEpoch::now();
+               let offset_history_last_updated = SinceEpoch::now();
                let network_graph = network_graph(&logger);
                let decay_params = ProbabilisticScoringDecayParameters::default();
                let mut scorer = ProbabilisticScorer::new(decay_params, &network_graph, &logger)
                        .with_channel(42,
                                ChannelLiquidity {
-                                       min_liquidity_offset_msat: 200, max_liquidity_offset_msat: 400, last_updated,
+                                       min_liquidity_offset_msat: 200, max_liquidity_offset_msat: 400,
+                                       last_updated, offset_history_last_updated,
                                        min_liquidity_offset_history: HistoricalBucketRangeTracker::new(),
                                        max_liquidity_offset_history: HistoricalBucketRangeTracker::new(),
                                });
@@ -2616,6 +2625,7 @@ mod tests {
        fn constant_penalty_outside_liquidity_bounds() {
                let logger = TestLogger::new();
                let last_updated = SinceEpoch::now();
+               let offset_history_last_updated = SinceEpoch::now();
                let network_graph = network_graph(&logger);
                let params = ProbabilisticScoringFeeParameters {
                        liquidity_penalty_multiplier_msat: 1_000,
@@ -2628,7 +2638,8 @@ mod tests {
                let scorer = ProbabilisticScorer::new(decay_params, &network_graph, &logger)
                        .with_channel(42,
                                ChannelLiquidity {
-                                       min_liquidity_offset_msat: 40, max_liquidity_offset_msat: 40, last_updated,
+                                       min_liquidity_offset_msat: 40, max_liquidity_offset_msat: 40,
+                                       last_updated, offset_history_last_updated,
                                        min_liquidity_offset_history: HistoricalBucketRangeTracker::new(),
                                        max_liquidity_offset_history: HistoricalBucketRangeTracker::new(),
                                });
@@ -2946,19 +2957,9 @@ mod tests {
                let usage = ChannelUsage { amount_msat: 896, ..usage };
                assert_eq!(scorer.channel_penalty_msat(&candidate, usage, &params), u64::max_value());
 
-               // No decay
-               SinceEpoch::advance(Duration::from_secs(4));
-               let usage = ChannelUsage { amount_msat: 128, ..usage };
-               assert_eq!(scorer.channel_penalty_msat(&candidate, usage, &params), 0);
-               let usage = ChannelUsage { amount_msat: 256, ..usage };
-               assert_eq!(scorer.channel_penalty_msat(&candidate, usage, &params), 93);
-               let usage = ChannelUsage { amount_msat: 768, ..usage };
-               assert_eq!(scorer.channel_penalty_msat(&candidate, usage, &params), 1_479);
-               let usage = ChannelUsage { amount_msat: 896, ..usage };
-               assert_eq!(scorer.channel_penalty_msat(&candidate, usage, &params), u64::max_value());
-
                // Half decay (i.e., three-quarter life)
-               SinceEpoch::advance(Duration::from_secs(1));
+               SinceEpoch::advance(Duration::from_secs(5));
+               scorer.decay_liquidity_certainty(Duration::from_secs(5));
                let usage = ChannelUsage { amount_msat: 128, ..usage };
                assert_eq!(scorer.channel_penalty_msat(&candidate, usage, &params), 22);
                let usage = ChannelUsage { amount_msat: 256, ..usage };
@@ -2970,6 +2971,7 @@ mod tests {
 
                // One decay (i.e., half life)
                SinceEpoch::advance(Duration::from_secs(5));
+               scorer.decay_liquidity_certainty(Duration::from_secs(10));
                let usage = ChannelUsage { amount_msat: 64, ..usage };
                assert_eq!(scorer.channel_penalty_msat(&candidate, usage, &params), 0);
                let usage = ChannelUsage { amount_msat: 128, ..usage };
@@ -2981,6 +2983,7 @@ mod tests {
 
                // Fully decay liquidity lower bound.
                SinceEpoch::advance(Duration::from_secs(10 * 7));
+               scorer.decay_liquidity_certainty(Duration::from_secs(10 * 8));
                let usage = ChannelUsage { amount_msat: 0, ..usage };
                assert_eq!(scorer.channel_penalty_msat(&candidate, usage, &params), 0);
                let usage = ChannelUsage { amount_msat: 1, ..usage };
@@ -2992,12 +2995,14 @@ mod tests {
 
                // Fully decay liquidity upper bound.
                SinceEpoch::advance(Duration::from_secs(10));
+               scorer.decay_liquidity_certainty(Duration::from_secs(10 * 9));
                let usage = ChannelUsage { amount_msat: 0, ..usage };
                assert_eq!(scorer.channel_penalty_msat(&candidate, usage, &params), 0);
                let usage = ChannelUsage { amount_msat: 1_024, ..usage };
                assert_eq!(scorer.channel_penalty_msat(&candidate, usage, &params), u64::max_value());
 
                SinceEpoch::advance(Duration::from_secs(10));
+               scorer.decay_liquidity_certainty(Duration::from_secs(10 * 10));
                let usage = ChannelUsage { amount_msat: 0, ..usage };
                assert_eq!(scorer.channel_penalty_msat(&candidate, usage, &params), 0);
                let usage = ChannelUsage { amount_msat: 1_024, ..usage };
@@ -3037,9 +3042,11 @@ mod tests {
                // An unchecked right shift 64 bits or more in DirectedChannelLiquidity::decayed_offset_msat
                // would cause an overflow.
                SinceEpoch::advance(Duration::from_secs(10 * 64));
+               scorer.decay_liquidity_certainty(Duration::from_secs(10 * 64));
                assert_eq!(scorer.channel_penalty_msat(&candidate, usage, &params), 125);
 
                SinceEpoch::advance(Duration::from_secs(10));
+               scorer.decay_liquidity_certainty(Duration::from_secs(10 * 65));
                assert_eq!(scorer.channel_penalty_msat(&candidate, usage, &params), 125);
        }
 
@@ -3078,6 +3085,7 @@ mod tests {
 
                // Decaying knowledge gives less confidence (128, 896), meaning a higher penalty.
                SinceEpoch::advance(Duration::from_secs(10));
+               scorer.decay_liquidity_certainty(Duration::from_secs(10));
                assert_eq!(scorer.channel_penalty_msat(&candidate, usage, &params), 291);
 
                // Reducing the upper bound gives more confidence (128, 832) that the payment amount (512)
@@ -3092,6 +3100,7 @@ mod tests {
 
                // Further decaying affects the lower bound more than the upper bound (128, 928).
                SinceEpoch::advance(Duration::from_secs(10));
+               scorer.decay_liquidity_certainty(Duration::from_secs(20));
                assert_eq!(scorer.channel_penalty_msat(&candidate, usage, &params), 280);
        }
 
@@ -3126,6 +3135,7 @@ mod tests {
                assert_eq!(scorer.channel_penalty_msat(&candidate, usage, &params), u64::max_value());
 
                SinceEpoch::advance(Duration::from_secs(10));
+               scorer.decay_liquidity_certainty(Duration::from_secs(10));
                assert_eq!(scorer.channel_penalty_msat(&candidate, usage, &params), 473);
 
                scorer.payment_path_failed(&payment_path_for_amount(250), 43, Duration::from_secs(10));
@@ -3140,8 +3150,7 @@ mod tests {
                assert_eq!(deserialized_scorer.channel_penalty_msat(&candidate, usage, &params), 300);
        }
 
-       #[test]
-       fn decays_persisted_liquidity_bounds() {
+       fn do_decays_persisted_liquidity_bounds(decay_before_reload: bool) {
                let logger = TestLogger::new();
                let network_graph = network_graph(&logger);
                let params = ProbabilisticScoringFeeParameters {
@@ -3170,23 +3179,38 @@ mod tests {
                };
                assert_eq!(scorer.channel_penalty_msat(&candidate, usage, &params), u64::max_value());
 
+               if decay_before_reload {
+                       SinceEpoch::advance(Duration::from_secs(10));
+                       scorer.decay_liquidity_certainty(Duration::from_secs(10));
+               }
+
                let mut serialized_scorer = Vec::new();
                scorer.write(&mut serialized_scorer).unwrap();
 
-               SinceEpoch::advance(Duration::from_secs(10));
-
                let mut serialized_scorer = io::Cursor::new(&serialized_scorer);
-               let deserialized_scorer =
+               let mut deserialized_scorer =
                        <ProbabilisticScorer>::read(&mut serialized_scorer, (decay_params, &network_graph, &logger)).unwrap();
+               if !decay_before_reload {
+                       SinceEpoch::advance(Duration::from_secs(10));
+                       scorer.decay_liquidity_certainty(Duration::from_secs(10));
+                       deserialized_scorer.decay_liquidity_certainty(Duration::from_secs(10));
+               }
                assert_eq!(deserialized_scorer.channel_penalty_msat(&candidate, usage, &params), 473);
 
                scorer.payment_path_failed(&payment_path_for_amount(250), 43, Duration::from_secs(10));
                assert_eq!(scorer.channel_penalty_msat(&candidate, usage, &params), 300);
 
                SinceEpoch::advance(Duration::from_secs(10));
+               deserialized_scorer.decay_liquidity_certainty(Duration::from_secs(20));
                assert_eq!(deserialized_scorer.channel_penalty_msat(&candidate, usage, &params), 370);
        }
 
+       #[test]
+       fn decays_persisted_liquidity_bounds() {
+               do_decays_persisted_liquidity_bounds(false);
+               do_decays_persisted_liquidity_bounds(true);
+       }
+
        #[test]
        fn scores_realistic_payments() {
                // Shows the scores of "realistic" sends of 100k sats over channels of 1-10m sats (with a
@@ -3511,6 +3535,7 @@ mod tests {
                // Advance the time forward 16 half-lives (which the docs claim will ensure all data is
                // gone), and check that we're back to where we started.
                SinceEpoch::advance(Duration::from_secs(10 * 16));
+               scorer.decay_liquidity_certainty(Duration::from_secs(10 * 16));
                {
                        let network_graph = network_graph.read_only();
                        let channel = network_graph.channel(42).unwrap();
@@ -3525,7 +3550,7 @@ mod tests {
                // Once fully decayed we still have data, but its all-0s. In the future we may remove the
                // data entirely instead.
                assert_eq!(scorer.historical_estimated_channel_liquidity_probabilities(42, &target),
-                       None);
+                       Some(([0; 32], [0; 32])));
                assert_eq!(scorer.historical_estimated_payment_success_probability(42, &target, 1, &params), None);
 
                let mut usage = ChannelUsage {
@@ -3544,8 +3569,6 @@ mod tests {
                        };
 
                        assert_eq!(scorer.channel_penalty_msat(&candidate, usage, &params), 2050);
-                       usage.inflight_htlc_msat = 0;
-                       assert_eq!(scorer.channel_penalty_msat(&candidate, usage, &params), 866);
 
                        let usage = ChannelUsage {
                                amount_msat: 1,
@@ -3557,6 +3580,12 @@ mod tests {
 
                // Advance to decay all liquidity offsets to zero.
                SinceEpoch::advance(Duration::from_secs(60 * 60 * 10));
+               scorer.decay_liquidity_certainty(Duration::from_secs(10 * (16 + 60 * 60)));
+
+               // Once even the bounds have decayed information about the channel should be removed
+               // entirely.
+               assert_eq!(scorer.historical_estimated_channel_liquidity_probabilities(42, &target),
+                       None);
 
                // Use a path in the opposite direction, which have zero for htlc_maximum_msat. This will
                // ensure that the effective capacity is zero to test division-by-zero edge cases.