ecd6aafb856cf48762bf656d24904ce336aae467
[ldk-java] / src / main / java / org / ldk / structs / ProbabilisticScorer.java
1 package org.ldk.structs;
2
3 import org.ldk.impl.bindings;
4 import org.ldk.enums.*;
5 import org.ldk.util.*;
6 import java.util.Arrays;
7 import java.lang.ref.Reference;
8 import javax.annotation.Nullable;
9
10
11 /**
12  * [`Score`] implementation using channel success probability distributions.
13  * 
14  * Channels are tracked with upper and lower liquidity bounds - when an HTLC fails at a channel,
15  * we learn that the upper-bound on the available liquidity is lower than the amount of the HTLC.
16  * When a payment is forwarded through a channel (but fails later in the route), we learn the
17  * lower-bound on the channel's available liquidity must be at least the value of the HTLC.
18  * 
19  * These bounds are then used to determine a success probability using the formula from
20  * Optimally Reliable & Cheap Payment Flows on the Lightning Network* by Rene Pickhardt
21  * and Stefan Richter [[1]] (i.e. `(upper_bound - payment_amount) / (upper_bound - lower_bound)`).
22  * 
23  * This probability is combined with the [`liquidity_penalty_multiplier_msat`] and
24  * [`liquidity_penalty_amount_multiplier_msat`] parameters to calculate a concrete penalty in
25  * milli-satoshis. The penalties, when added across all hops, have the property of being linear in
26  * terms of the entire path's success probability. This allows the router to directly compare
27  * penalties for different paths. See the documentation of those parameters for the exact formulas.
28  * 
29  * The liquidity bounds are decayed by halving them every [`liquidity_offset_half_life`].
30  * 
31  * Further, we track the history of our upper and lower liquidity bounds for each channel,
32  * allowing us to assign a second penalty (using [`historical_liquidity_penalty_multiplier_msat`]
33  * and [`historical_liquidity_penalty_amount_multiplier_msat`]) based on the same probability
34  * formula, but using the history of a channel rather than our latest estimates for the liquidity
35  * bounds.
36  * 
37  * # Note
38  * 
39  * Mixing the `no-std` feature between serialization and deserialization results in undefined
40  * behavior.
41  * 
42  * [1]: https://arxiv.org/abs/2107.05322
43  * [`liquidity_penalty_multiplier_msat`]: ProbabilisticScoringParameters::liquidity_penalty_multiplier_msat
44  * [`liquidity_penalty_amount_multiplier_msat`]: ProbabilisticScoringParameters::liquidity_penalty_amount_multiplier_msat
45  * [`liquidity_offset_half_life`]: ProbabilisticScoringParameters::liquidity_offset_half_life
46  * [`historical_liquidity_penalty_multiplier_msat`]: ProbabilisticScoringParameters::historical_liquidity_penalty_multiplier_msat
47  * [`historical_liquidity_penalty_amount_multiplier_msat`]: ProbabilisticScoringParameters::historical_liquidity_penalty_amount_multiplier_msat
48  */
49 @SuppressWarnings("unchecked") // We correctly assign various generic arrays
50 public class ProbabilisticScorer extends CommonBase {
51         ProbabilisticScorer(Object _dummy, long ptr) { super(ptr); }
52         @Override @SuppressWarnings("deprecation")
53         protected void finalize() throws Throwable {
54                 super.finalize();
55                 if (ptr != 0) { bindings.ProbabilisticScorer_free(ptr); }
56         }
57
58         /**
59          * Creates a new scorer using the given scoring parameters for sending payments from a node
60          * through a network graph.
61          */
62         public static ProbabilisticScorer of(org.ldk.structs.ProbabilisticScoringParameters params, org.ldk.structs.NetworkGraph network_graph, org.ldk.structs.Logger logger) {
63                 long ret = bindings.ProbabilisticScorer_new(params == null ? 0 : params.ptr, network_graph == null ? 0 : network_graph.ptr, logger == null ? 0 : logger.ptr);
64                 Reference.reachabilityFence(params);
65                 Reference.reachabilityFence(network_graph);
66                 Reference.reachabilityFence(logger);
67                 if (ret >= 0 && ret <= 4096) { return null; }
68                 org.ldk.structs.ProbabilisticScorer ret_hu_conv = null; if (ret < 0 || ret > 4096) { ret_hu_conv = new org.ldk.structs.ProbabilisticScorer(null, ret); }
69                 if (ret_hu_conv != null) { ret_hu_conv.ptrs_to.add(ret_hu_conv); };
70                 if (ret_hu_conv != null) { ret_hu_conv.ptrs_to.add(params); };
71                 if (ret_hu_conv != null) { ret_hu_conv.ptrs_to.add(network_graph); };
72                 if (ret_hu_conv != null) { ret_hu_conv.ptrs_to.add(logger); };
73                 return ret_hu_conv;
74         }
75
76         /**
77          * Dump the contents of this scorer into the configured logger.
78          * 
79          * Note that this writes roughly one line per channel for which we have a liquidity estimate,
80          * which may be a substantial amount of log output.
81          */
82         public void debug_log_liquidity_stats() {
83                 bindings.ProbabilisticScorer_debug_log_liquidity_stats(this.ptr);
84                 Reference.reachabilityFence(this);
85         }
86
87         /**
88          * Query the estimated minimum and maximum liquidity available for sending a payment over the
89          * channel with `scid` towards the given `target` node.
90          */
91         public Option_C2Tuple_u64u64ZZ estimated_channel_liquidity_range(long scid, org.ldk.structs.NodeId target) {
92                 long ret = bindings.ProbabilisticScorer_estimated_channel_liquidity_range(this.ptr, scid, target == null ? 0 : target.ptr);
93                 Reference.reachabilityFence(this);
94                 Reference.reachabilityFence(scid);
95                 Reference.reachabilityFence(target);
96                 if (ret >= 0 && ret <= 4096) { return null; }
97                 org.ldk.structs.Option_C2Tuple_u64u64ZZ ret_hu_conv = org.ldk.structs.Option_C2Tuple_u64u64ZZ.constr_from_ptr(ret);
98                 if (ret_hu_conv != null) { ret_hu_conv.ptrs_to.add(this); };
99                 if (this != null) { this.ptrs_to.add(target); };
100                 return ret_hu_conv;
101         }
102
103         /**
104          * Query the historical estimated minimum and maximum liquidity available for sending a
105          * payment over the channel with `scid` towards the given `target` node.
106          * 
107          * Returns two sets of 8 buckets. The first set describes the octiles for lower-bound
108          * liquidity estimates, the second set describes the octiles for upper-bound liquidity
109          * estimates. Each bucket describes the relative frequency at which we've seen a liquidity
110          * bound in the octile relative to the channel's total capacity, on an arbitrary scale.
111          * Because the values are slowly decayed, more recent data points are weighted more heavily
112          * than older datapoints.
113          * 
114          * When scoring, the estimated probability that an upper-/lower-bound lies in a given octile
115          * relative to the channel's total capacity is calculated by dividing that bucket's value with
116          * the total of all buckets for the given bound.
117          * 
118          * For example, a value of `[0, 0, 0, 0, 0, 0, 32]` indicates that we believe the probability
119          * of a bound being in the top octile to be 100%, and have never (recently) seen it in any
120          * other octiles. A value of `[31, 0, 0, 0, 0, 0, 0, 32]` indicates we've seen the bound being
121          * both in the top and bottom octile, and roughly with similar (recent) frequency.
122          * 
123          * Because the datapoints are decayed slowly over time, values will eventually return to
124          * `Some(([0; 8], [0; 8]))`.
125          */
126         public Option_C2Tuple_EightU16sEightU16sZZ historical_estimated_channel_liquidity_probabilities(long scid, org.ldk.structs.NodeId target) {
127                 long ret = bindings.ProbabilisticScorer_historical_estimated_channel_liquidity_probabilities(this.ptr, scid, target == null ? 0 : target.ptr);
128                 Reference.reachabilityFence(this);
129                 Reference.reachabilityFence(scid);
130                 Reference.reachabilityFence(target);
131                 if (ret >= 0 && ret <= 4096) { return null; }
132                 org.ldk.structs.Option_C2Tuple_EightU16sEightU16sZZ ret_hu_conv = org.ldk.structs.Option_C2Tuple_EightU16sEightU16sZZ.constr_from_ptr(ret);
133                 if (ret_hu_conv != null) { ret_hu_conv.ptrs_to.add(this); };
134                 if (this != null) { this.ptrs_to.add(target); };
135                 return ret_hu_conv;
136         }
137
138         /**
139          * Marks the node with the given `node_id` as banned, i.e.,
140          * it will be avoided during path finding.
141          */
142         public void add_banned(org.ldk.structs.NodeId node_id) {
143                 bindings.ProbabilisticScorer_add_banned(this.ptr, node_id == null ? 0 : node_id.ptr);
144                 Reference.reachabilityFence(this);
145                 Reference.reachabilityFence(node_id);
146                 if (this != null) { this.ptrs_to.add(node_id); };
147         }
148
149         /**
150          * Removes the node with the given `node_id` from the list of nodes to avoid.
151          */
152         public void remove_banned(org.ldk.structs.NodeId node_id) {
153                 bindings.ProbabilisticScorer_remove_banned(this.ptr, node_id == null ? 0 : node_id.ptr);
154                 Reference.reachabilityFence(this);
155                 Reference.reachabilityFence(node_id);
156                 if (this != null) { this.ptrs_to.add(node_id); };
157         }
158
159         /**
160          * Sets a manual penalty for the given node.
161          */
162         public void set_manual_penalty(org.ldk.structs.NodeId node_id, long penalty) {
163                 bindings.ProbabilisticScorer_set_manual_penalty(this.ptr, node_id == null ? 0 : node_id.ptr, penalty);
164                 Reference.reachabilityFence(this);
165                 Reference.reachabilityFence(node_id);
166                 Reference.reachabilityFence(penalty);
167                 if (this != null) { this.ptrs_to.add(node_id); };
168         }
169
170         /**
171          * Removes the node with the given `node_id` from the list of manual penalties.
172          */
173         public void remove_manual_penalty(org.ldk.structs.NodeId node_id) {
174                 bindings.ProbabilisticScorer_remove_manual_penalty(this.ptr, node_id == null ? 0 : node_id.ptr);
175                 Reference.reachabilityFence(this);
176                 Reference.reachabilityFence(node_id);
177                 if (this != null) { this.ptrs_to.add(node_id); };
178         }
179
180         /**
181          * Clears the list of manual penalties that are applied during path finding.
182          */
183         public void clear_manual_penalties() {
184                 bindings.ProbabilisticScorer_clear_manual_penalties(this.ptr);
185                 Reference.reachabilityFence(this);
186         }
187
188         /**
189          * Constructs a new Score which calls the relevant methods on this_arg.
190          * This copies the `inner` pointer in this_arg and thus the returned Score must be freed before this_arg is
191          */
192         public Score as_Score() {
193                 long ret = bindings.ProbabilisticScorer_as_Score(this.ptr);
194                 Reference.reachabilityFence(this);
195                 if (ret >= 0 && ret <= 4096) { return null; }
196                 Score ret_hu_conv = new Score(null, ret);
197                 if (ret_hu_conv != null) { ret_hu_conv.ptrs_to.add(this); };
198                 return ret_hu_conv;
199         }
200
201         /**
202          * Serialize the ProbabilisticScorer object into a byte array which can be read by ProbabilisticScorer_read
203          */
204         public byte[] write() {
205                 byte[] ret = bindings.ProbabilisticScorer_write(this.ptr);
206                 Reference.reachabilityFence(this);
207                 return ret;
208         }
209
210         /**
211          * Read a ProbabilisticScorer from a byte array, created by ProbabilisticScorer_write
212          */
213         public static Result_ProbabilisticScorerDecodeErrorZ read(byte[] ser, org.ldk.structs.ProbabilisticScoringParameters arg_a, org.ldk.structs.NetworkGraph arg_b, org.ldk.structs.Logger arg_c) {
214                 long ret = bindings.ProbabilisticScorer_read(ser, arg_a == null ? 0 : arg_a.ptr, arg_b == null ? 0 : arg_b.ptr, arg_c == null ? 0 : arg_c.ptr);
215                 Reference.reachabilityFence(ser);
216                 Reference.reachabilityFence(arg_a);
217                 Reference.reachabilityFence(arg_b);
218                 Reference.reachabilityFence(arg_c);
219                 if (ret >= 0 && ret <= 4096) { return null; }
220                 Result_ProbabilisticScorerDecodeErrorZ ret_hu_conv = Result_ProbabilisticScorerDecodeErrorZ.constr_from_ptr(ret);
221                 if (ret_hu_conv != null) { ret_hu_conv.ptrs_to.add(arg_a); };
222                 if (ret_hu_conv != null) { ret_hu_conv.ptrs_to.add(arg_b); };
223                 if (ret_hu_conv != null) { ret_hu_conv.ptrs_to.add(arg_c); };
224                 return ret_hu_conv;
225         }
226
227 }