[Java] Update auto-generated Java bindings
[ldk-java] / src / main / java / org / ldk / structs / ProbabilisticScorer.java
index c2ac732b4bfdca94b1a8137585b11c8086414dd6..71658b3f3f14e3c00957d7710f6b58c717a9ceb7 100644 (file)
@@ -11,19 +11,28 @@ import javax.annotation.Nullable;
 /**
  * [`Score`] implementation using channel success probability distributions.
  * 
- * Based on *Optimally Reliable & Cheap Payment Flows on the Lightning Network* by Rene Pickhardt
- * and Stefan Richter [[1]]. Given the uncertainty of channel liquidity balances, probability
- * distributions are defined based on knowledge learned from successful and unsuccessful attempts.
- * Then the negative `log10` of the success probability is used to determine the cost of routing a
- * specific HTLC amount through a channel.
+ * Channels are tracked with upper and lower liquidity bounds - when an HTLC fails at a channel,
+ * we learn that the upper-bound on the available liquidity is lower than the amount of the HTLC.
+ * When a payment is forwarded through a channel (but fails later in the route), we learn the
+ * lower-bound on the channel's available liquidity must be at least the value of the HTLC.
  * 
- * Knowledge about channel liquidity balances takes the form of upper and lower bounds on the
- * possible liquidity. Certainty of the bounds is decreased over time using a decay function. See
- * [`ProbabilisticScoringParameters`] for details.
+ * These bounds are then used to determine a success probability using the formula from
+ * Optimally Reliable & Cheap Payment Flows on the Lightning Network* by Rene Pickhardt
+ * and Stefan Richter [[1]] (i.e. `(upper_bound - payment_amount) / (upper_bound - lower_bound)`).
  * 
- * Since the scorer aims to learn the current channel liquidity balances, it works best for nodes
- * with high payment volume or that actively probe the [`NetworkGraph`]. Nodes with low payment
- * volume are more likely to experience failed payment paths, which would need to be retried.
+ * This probability is combined with the [`liquidity_penalty_multiplier_msat`] and
+ * [`liquidity_penalty_amount_multiplier_msat`] parameters to calculate a concrete penalty in
+ * milli-satoshis. The penalties, when added across all hops, have the property of being linear in
+ * terms of the entire path's success probability. This allows the router to directly compare
+ * penalties for different paths. See the documentation of those parameters for the exact formulas.
+ * 
+ * The liquidity bounds are decayed by halving them every [`liquidity_offset_half_life`].
+ * 
+ * Further, we track the history of our upper and lower liquidity bounds for each channel,
+ * allowing us to assign a second penalty (using [`historical_liquidity_penalty_multiplier_msat`]
+ * and [`historical_liquidity_penalty_amount_multiplier_msat`]) based on the same probability
+ * formula, but using the history of a channel rather than our latest estimates for the liquidity
+ * bounds.
  * 
  * # Note
  * 
@@ -31,6 +40,11 @@ import javax.annotation.Nullable;
  * behavior.
  * 
  * [1]: https://arxiv.org/abs/2107.05322
+ * [`liquidity_penalty_multiplier_msat`]: ProbabilisticScoringParameters::liquidity_penalty_multiplier_msat
+ * [`liquidity_penalty_amount_multiplier_msat`]: ProbabilisticScoringParameters::liquidity_penalty_amount_multiplier_msat
+ * [`liquidity_offset_half_life`]: ProbabilisticScoringParameters::liquidity_offset_half_life
+ * [`historical_liquidity_penalty_multiplier_msat`]: ProbabilisticScoringParameters::historical_liquidity_penalty_multiplier_msat
+ * [`historical_liquidity_penalty_amount_multiplier_msat`]: ProbabilisticScoringParameters::historical_liquidity_penalty_amount_multiplier_msat
  */
 @SuppressWarnings("unchecked") // We correctly assign various generic arrays
 public class ProbabilisticScorer extends CommonBase {
@@ -52,10 +66,10 @@ public class ProbabilisticScorer extends CommonBase {
                Reference.reachabilityFence(logger);
                if (ret >= 0 && ret <= 4096) { return null; }
                org.ldk.structs.ProbabilisticScorer ret_hu_conv = null; if (ret < 0 || ret > 4096) { ret_hu_conv = new org.ldk.structs.ProbabilisticScorer(null, ret); }
-               ret_hu_conv.ptrs_to.add(ret_hu_conv);
-               ret_hu_conv.ptrs_to.add(params);
-               ret_hu_conv.ptrs_to.add(network_graph);
-               ret_hu_conv.ptrs_to.add(logger);
+               if (ret_hu_conv != null) { ret_hu_conv.ptrs_to.add(ret_hu_conv); };
+               if (ret_hu_conv != null) { ret_hu_conv.ptrs_to.add(params); };
+               if (ret_hu_conv != null) { ret_hu_conv.ptrs_to.add(network_graph); };
+               if (ret_hu_conv != null) { ret_hu_conv.ptrs_to.add(logger); };
                return ret_hu_conv;
        }
 
@@ -81,8 +95,8 @@ public class ProbabilisticScorer extends CommonBase {
                Reference.reachabilityFence(target);
                if (ret >= 0 && ret <= 4096) { return null; }
                org.ldk.structs.Option_C2Tuple_u64u64ZZ ret_hu_conv = org.ldk.structs.Option_C2Tuple_u64u64ZZ.constr_from_ptr(ret);
-               ret_hu_conv.ptrs_to.add(this);
-               this.ptrs_to.add(target);
+               if (ret_hu_conv != null) { ret_hu_conv.ptrs_to.add(this); };
+               if (this != null) { this.ptrs_to.add(target); };
                return ret_hu_conv;
        }
 
@@ -94,7 +108,7 @@ public class ProbabilisticScorer extends CommonBase {
                bindings.ProbabilisticScorer_add_banned(this.ptr, node_id == null ? 0 : node_id.ptr);
                Reference.reachabilityFence(this);
                Reference.reachabilityFence(node_id);
-               this.ptrs_to.add(node_id);
+               if (this != null) { this.ptrs_to.add(node_id); };
        }
 
        /**
@@ -104,7 +118,7 @@ public class ProbabilisticScorer extends CommonBase {
                bindings.ProbabilisticScorer_remove_banned(this.ptr, node_id == null ? 0 : node_id.ptr);
                Reference.reachabilityFence(this);
                Reference.reachabilityFence(node_id);
-               this.ptrs_to.add(node_id);
+               if (this != null) { this.ptrs_to.add(node_id); };
        }
 
        /**
@@ -115,7 +129,7 @@ public class ProbabilisticScorer extends CommonBase {
                Reference.reachabilityFence(this);
                Reference.reachabilityFence(node_id);
                Reference.reachabilityFence(penalty);
-               this.ptrs_to.add(node_id);
+               if (this != null) { this.ptrs_to.add(node_id); };
        }
 
        /**
@@ -125,7 +139,7 @@ public class ProbabilisticScorer extends CommonBase {
                bindings.ProbabilisticScorer_remove_manual_penalty(this.ptr, node_id == null ? 0 : node_id.ptr);
                Reference.reachabilityFence(this);
                Reference.reachabilityFence(node_id);
-               this.ptrs_to.add(node_id);
+               if (this != null) { this.ptrs_to.add(node_id); };
        }
 
        /**
@@ -145,7 +159,7 @@ public class ProbabilisticScorer extends CommonBase {
                Reference.reachabilityFence(this);
                if (ret >= 0 && ret <= 4096) { return null; }
                Score ret_hu_conv = new Score(null, ret);
-               ret_hu_conv.ptrs_to.add(this);
+               if (ret_hu_conv != null) { ret_hu_conv.ptrs_to.add(this); };
                return ret_hu_conv;
        }
 
@@ -169,9 +183,9 @@ public class ProbabilisticScorer extends CommonBase {
                Reference.reachabilityFence(arg_c);
                if (ret >= 0 && ret <= 4096) { return null; }
                Result_ProbabilisticScorerDecodeErrorZ ret_hu_conv = Result_ProbabilisticScorerDecodeErrorZ.constr_from_ptr(ret);
-               ret_hu_conv.ptrs_to.add(arg_a);
-               ret_hu_conv.ptrs_to.add(arg_b);
-               ret_hu_conv.ptrs_to.add(arg_c);
+               if (ret_hu_conv != null) { ret_hu_conv.ptrs_to.add(arg_a); };
+               if (ret_hu_conv != null) { ret_hu_conv.ptrs_to.add(arg_b); };
+               if (ret_hu_conv != null) { ret_hu_conv.ptrs_to.add(arg_c); };
                return ret_hu_conv;
        }