- * Marks the node with the given `node_id` as banned, i.e.,
- * it will be avoided during path finding.
- */
- public void add_banned(org.ldk.structs.NodeId node_id) {
- bindings.ProbabilisticScorer_add_banned(this.ptr, node_id == null ? 0 : node_id.ptr);
- GC.KeepAlive(this);
- GC.KeepAlive(node_id);
- if (this != null) { this.ptrs_to.AddLast(node_id); };
- }
-
- /**
- * Removes the node with the given `node_id` from the list of nodes to avoid.
- */
- public void remove_banned(org.ldk.structs.NodeId node_id) {
- bindings.ProbabilisticScorer_remove_banned(this.ptr, node_id == null ? 0 : node_id.ptr);
- GC.KeepAlive(this);
- GC.KeepAlive(node_id);
- if (this != null) { this.ptrs_to.AddLast(node_id); };
- }
-
- /**
- * Sets a manual penalty for the given node.
- */
- public void set_manual_penalty(org.ldk.structs.NodeId node_id, long penalty) {
- bindings.ProbabilisticScorer_set_manual_penalty(this.ptr, node_id == null ? 0 : node_id.ptr, penalty);
- GC.KeepAlive(this);
- GC.KeepAlive(node_id);
- GC.KeepAlive(penalty);
- if (this != null) { this.ptrs_to.AddLast(node_id); };
- }
-
- /**
- * Removes the node with the given `node_id` from the list of manual penalties.
- */
- public void remove_manual_penalty(org.ldk.structs.NodeId node_id) {
- bindings.ProbabilisticScorer_remove_manual_penalty(this.ptr, node_id == null ? 0 : node_id.ptr);
- GC.KeepAlive(this);
- GC.KeepAlive(node_id);
- if (this != null) { this.ptrs_to.AddLast(node_id); };
- }
-
- /**
- * Clears the list of manual penalties that are applied during path finding.
+ * Query the historical estimated minimum and maximum liquidity available for sending a
+ * payment over the channel with `scid` towards the given `target` node.
+ *
+ * Returns two sets of 8 buckets. The first set describes the octiles for lower-bound
+ * liquidity estimates, the second set describes the octiles for upper-bound liquidity
+ * estimates. Each bucket describes the relative frequency at which we've seen a liquidity
+ * bound in the octile relative to the channel's total capacity, on an arbitrary scale.
+ * Because the values are slowly decayed, more recent data points are weighted more heavily
+ * than older datapoints.
+ *
+ * When scoring, the estimated probability that an upper-/lower-bound lies in a given octile
+ * relative to the channel's total capacity is calculated by dividing that bucket's value with
+ * the total of all buckets for the given bound.
+ *
+ * For example, a value of `[0, 0, 0, 0, 0, 0, 32]` indicates that we believe the probability
+ * of a bound being in the top octile to be 100%, and have never (recently) seen it in any
+ * other octiles. A value of `[31, 0, 0, 0, 0, 0, 0, 32]` indicates we've seen the bound being
+ * both in the top and bottom octile, and roughly with similar (recent) frequency.
+ *
+ * Because the datapoints are decayed slowly over time, values will eventually return to
+ * `Some(([0; 8], [0; 8]))`.