Rename keys for OnchainTxHandler::claimable_outpoints map
[rust-lightning] / lightning / src / chain / onchaintx.rs
index 039fb5ff13a04eb1fd843d061bbd992a0f394935..faf3fe12f60b1787494d803b77ce6abbe31b6017 100644 (file)
@@ -36,7 +36,7 @@ use crate::chain::keysinterface::WriteableEcdsaChannelSigner;
 use crate::chain::package::PackageSolvingData;
 use crate::chain::package::PackageTemplate;
 use crate::util::logger::Logger;
-use crate::util::ser::{Readable, ReadableArgs, MaybeReadable, Writer, Writeable, VecWriter};
+use crate::util::ser::{Readable, ReadableArgs, MaybeReadable, UpgradableRequired, Writer, Writeable, VecWriter};
 
 use crate::io;
 use crate::prelude::*;
@@ -72,18 +72,23 @@ impl OnchainEventEntry {
        }
 }
 
-/// Upon discovering of some classes of onchain tx by ChannelMonitor, we may have to take actions on it
-/// once they mature to enough confirmations (ANTI_REORG_DELAY)
+/// Events for claims the [`OnchainTxHandler`] has generated. Once the events are considered safe
+/// from a chain reorg, the [`OnchainTxHandler`] will act accordingly.
 #[derive(PartialEq, Eq)]
 enum OnchainEvent {
-       /// Outpoint under claim process by our own tx, once this one get enough confirmations, we remove it from
-       /// bump-txn candidate buffer.
+       /// A pending request has been claimed by a transaction spending the exact same set of outpoints
+       /// as the request. This claim can either be ours or from the counterparty. Once the claiming
+       /// transaction has met [`ANTI_REORG_DELAY`] confirmations, we consider it final and remove the
+       /// pending request.
        Claim {
                package_id: PackageID,
        },
-       /// Claim tx aggregate multiple claimable outpoints. One of the outpoint may be claimed by a counterparty party tx.
-       /// In this case, we need to drop the outpoint and regenerate a new claim tx. By safety, we keep tracking
-       /// the outpoint to be sure to resurect it back to the claim tx if reorgs happen.
+       /// The counterparty has claimed an outpoint from one of our pending requests through a
+       /// different transaction than ours. If our transaction was attempting to claim multiple
+       /// outputs, we need to drop the outpoint claimed by the counterparty and regenerate a new claim
+       /// transaction for ourselves. We keep tracking, separately, the outpoint claimed by the
+       /// counterparty up to [`ANTI_REORG_DELAY`] confirmations to ensure we attempt to re-claim it
+       /// if the counterparty's claim is reorged from the chain.
        ContentiousOutpoint {
                package: PackageTemplate,
        }
@@ -106,18 +111,14 @@ impl MaybeReadable for OnchainEventEntry {
                let mut txid = Txid::all_zeros();
                let mut height = 0;
                let mut block_hash = None;
-               let mut event = None;
+               let mut event = UpgradableRequired(None);
                read_tlv_fields!(reader, {
                        (0, txid, required),
                        (1, block_hash, option),
                        (2, height, required),
-                       (4, event, ignorable),
+                       (4, event, upgradable_required),
                });
-               if let Some(ev) = event {
-                       Ok(Some(Self { txid, height, block_hash, event: ev }))
-               } else {
-                       Ok(None)
-               }
+               Ok(Some(Self { txid, height, block_hash, event: _init_tlv_based_struct_field!(event, upgradable_required) }))
        }
 }
 
@@ -219,7 +220,6 @@ type PackageID = [u8; 32];
 
 /// OnchainTxHandler receives claiming requests, aggregates them if it's sound, broadcast and
 /// do RBF bumping if possible.
-#[derive(PartialEq)]
 pub struct OnchainTxHandler<ChannelSigner: WriteableEcdsaChannelSigner> {
        destination_script: Script,
        holder_commitment: HolderCommitmentTransaction,
@@ -251,12 +251,12 @@ pub struct OnchainTxHandler<ChannelSigner: WriteableEcdsaChannelSigner> {
        #[cfg(anchors)]
        pending_claim_events: HashMap<PackageID, ClaimEvent>,
 
-       // Used to link outpoints claimed in a connected block to a pending claim request.
-       // Key is outpoint than monitor parsing has detected we have keys/scripts to claim
-       // Value is (pending claim request identifier, confirmation_block), identifier
-       // is txid of the initial claiming transaction and is immutable until outpoint is
-       // post-anti-reorg-delay solved, confirmaiton_block is used to erase entry if
-       // block with output gets disconnected.
+       // Used to link outpoints claimed in a connected block to a pending claim request. The keys
+       // represent the outpoints that our `ChannelMonitor` has detected we have keys/scripts to
+       // claim. The values track the pending claim request identifier and the initial confirmation
+       // block height, and are immutable until the outpoint has enough confirmations to meet our
+       // [`ANTI_REORG_DELAY`]. The initial confirmation block height is used to remove the entry if
+       // the block gets disconnected.
        #[cfg(test)] // Used in functional_test to verify sanitization
        pub claimable_outpoints: HashMap<BitcoinOutPoint, (PackageID, u32)>,
        #[cfg(not(test))]
@@ -269,6 +269,22 @@ pub struct OnchainTxHandler<ChannelSigner: WriteableEcdsaChannelSigner> {
        pub(super) secp_ctx: Secp256k1<secp256k1::All>,
 }
 
+impl<ChannelSigner: WriteableEcdsaChannelSigner> PartialEq for OnchainTxHandler<ChannelSigner> {
+       fn eq(&self, other: &Self) -> bool {
+               // `signer`, `secp_ctx`, and `pending_claim_events` are excluded on purpose.
+               self.destination_script == other.destination_script &&
+                       self.holder_commitment == other.holder_commitment &&
+                       self.holder_htlc_sigs == other.holder_htlc_sigs &&
+                       self.prev_holder_commitment == other.prev_holder_commitment &&
+                       self.prev_holder_htlc_sigs == other.prev_holder_htlc_sigs &&
+                       self.channel_transaction_parameters == other.channel_transaction_parameters &&
+                       self.pending_claim_requests == other.pending_claim_requests &&
+                       self.claimable_outpoints == other.claimable_outpoints &&
+                       self.locktimed_packages == other.locktimed_packages &&
+                       self.onchain_events_awaiting_threshold_conf == other.onchain_events_awaiting_threshold_conf
+       }
+}
+
 const SERIALIZATION_VERSION: u8 = 1;
 const MIN_SERIALIZATION_VERSION: u8 = 1;
 
@@ -478,12 +494,12 @@ impl<ChannelSigner: WriteableEcdsaChannelSigner> OnchainTxHandler<ChannelSigner>
                // transaction is reorged out.
                let mut all_inputs_have_confirmed_spend = true;
                for outpoint in request_outpoints.iter() {
-                       if let Some(first_claim_txid_height) = self.claimable_outpoints.get(*outpoint) {
+                       if let Some((request_package_id, _)) = self.claimable_outpoints.get(*outpoint) {
                                // We check for outpoint spends within claims individually rather than as a set
                                // since requests can have outpoints split off.
                                if !self.onchain_events_awaiting_threshold_conf.iter()
                                        .any(|event_entry| if let OnchainEvent::Claim { package_id } = event_entry.event {
-                                               first_claim_txid_height.0 == package_id
+                                               *request_package_id == package_id
                                        } else {
                                                // The onchain event is not a claim, keep seeking until we find one.
                                                false
@@ -728,9 +744,9 @@ impl<ChannelSigner: WriteableEcdsaChannelSigner> OnchainTxHandler<ChannelSigner>
                        // Scan all input to verify is one of the outpoint spent is of interest for us
                        let mut claimed_outputs_material = Vec::new();
                        for inp in &tx.input {
-                               if let Some(first_claim_txid_height) = self.claimable_outpoints.get(&inp.previous_output) {
+                               if let Some((package_id, _)) = self.claimable_outpoints.get(&inp.previous_output) {
                                        // If outpoint has claim request pending on it...
-                                       if let Some(request) = self.pending_claim_requests.get_mut(&first_claim_txid_height.0) {
+                                       if let Some(request) = self.pending_claim_requests.get_mut(package_id) {
                                                //... we need to verify equality between transaction outpoints and claim request
                                                // outpoints to know if transaction is the original claim or a bumped one issued
                                                // by us.
@@ -750,7 +766,7 @@ impl<ChannelSigner: WriteableEcdsaChannelSigner> OnchainTxHandler<ChannelSigner>
                                                                        txid: tx.txid(),
                                                                        height: conf_height,
                                                                        block_hash: Some(conf_hash),
-                                                                       event: OnchainEvent::Claim { package_id: first_claim_txid_height.0 }
+                                                                       event: OnchainEvent::Claim { package_id: *package_id }
                                                                };
                                                                if !self.onchain_events_awaiting_threshold_conf.contains(&entry) {
                                                                        self.onchain_events_awaiting_threshold_conf.push(entry);
@@ -777,7 +793,7 @@ impl<ChannelSigner: WriteableEcdsaChannelSigner> OnchainTxHandler<ChannelSigner>
                                                        }
                                                        //TODO: recompute soonest_timelock to avoid wasting a bit on fees
                                                        if at_least_one_drop {
-                                                               bump_candidates.insert(first_claim_txid_height.0.clone(), request.clone());
+                                                               bump_candidates.insert(*package_id, request.clone());
                                                        }
                                                }
                                                break; //No need to iterate further, either tx is our or their
@@ -830,17 +846,17 @@ impl<ChannelSigner: WriteableEcdsaChannelSigner> OnchainTxHandler<ChannelSigner>
                }
 
                // Check if any pending claim request must be rescheduled
-               for (first_claim_txid, ref request) in self.pending_claim_requests.iter() {
+               for (package_id, request) in self.pending_claim_requests.iter() {
                        if let Some(h) = request.timer() {
                                if cur_height >= h {
-                                       bump_candidates.insert(*first_claim_txid, (*request).clone());
+                                       bump_candidates.insert(*package_id, request.clone());
                                }
                        }
                }
 
                // Build, bump and rebroadcast tx accordingly
                log_trace!(logger, "Bumping {} candidates", bump_candidates.len());
-               for (first_claim_txid, request) in bump_candidates.iter() {
+               for (package_id, request) in bump_candidates.iter() {
                        if let Some((new_timer, new_feerate, bump_claim)) = self.generate_claim(cur_height, &request, &*fee_estimator, &*logger) {
                                match bump_claim {
                                        OnchainClaim::Tx(bump_tx) => {
@@ -850,10 +866,10 @@ impl<ChannelSigner: WriteableEcdsaChannelSigner> OnchainTxHandler<ChannelSigner>
                                        #[cfg(anchors)]
                                        OnchainClaim::Event(claim_event) => {
                                                log_info!(logger, "Yielding RBF-bumped onchain event to spend inputs {:?}", request.outpoints());
-                                               self.pending_claim_events.insert(*first_claim_txid, claim_event);
+                                               self.pending_claim_events.insert(*package_id, claim_event);
                                        },
                                }
-                               if let Some(request) = self.pending_claim_requests.get_mut(first_claim_txid) {
+                               if let Some(request) = self.pending_claim_requests.get_mut(package_id) {
                                        request.set_timer(new_timer);
                                        request.set_feerate(new_feerate);
                                }
@@ -899,12 +915,12 @@ impl<ChannelSigner: WriteableEcdsaChannelSigner> OnchainTxHandler<ChannelSigner>
                                //- resurect outpoint back in its claimable set and regenerate tx
                                match entry.event {
                                        OnchainEvent::ContentiousOutpoint { package } => {
-                                               if let Some(ancestor_claimable_txid) = self.claimable_outpoints.get(package.outpoints()[0]) {
-                                                       if let Some(request) = self.pending_claim_requests.get_mut(&ancestor_claimable_txid.0) {
+                                               if let Some(pending_claim) = self.claimable_outpoints.get(package.outpoints()[0]) {
+                                                       if let Some(request) = self.pending_claim_requests.get_mut(&pending_claim.0) {
                                                                request.merge_package(package);
                                                                // Using a HashMap guarantee us than if we have multiple outpoints getting
                                                                // resurrected only one bump claim tx is going to be broadcast
-                                                               bump_candidates.insert(ancestor_claimable_txid.clone(), request.clone());
+                                                               bump_candidates.insert(pending_claim.clone(), request.clone());
                                                        }
                                                }
                                        },
@@ -914,7 +930,7 @@ impl<ChannelSigner: WriteableEcdsaChannelSigner> OnchainTxHandler<ChannelSigner>
                                self.onchain_events_awaiting_threshold_conf.push(entry);
                        }
                }
-               for (_first_claim_txid_height, request) in bump_candidates.iter_mut() {
+               for ((_package_id, _), request) in bump_candidates.iter_mut() {
                        if let Some((new_timer, new_feerate, bump_claim)) = self.generate_claim(height, &request, fee_estimator, &&*logger) {
                                request.set_timer(new_timer);
                                request.set_feerate(new_feerate);
@@ -926,7 +942,7 @@ impl<ChannelSigner: WriteableEcdsaChannelSigner> OnchainTxHandler<ChannelSigner>
                                        #[cfg(anchors)]
                                        OnchainClaim::Event(claim_event) => {
                                                log_info!(logger, "Yielding onchain event after reorg to spend inputs {:?}", request.outpoints());
-                                               self.pending_claim_events.insert(_first_claim_txid_height.0, claim_event);
+                                               self.pending_claim_events.insert(_package_id, claim_event);
                                        },
                                }
                        }