From: Matt Corallo <649246+TheBlueMatt@users.noreply.github.com> Date: Tue, 11 Jun 2024 18:55:11 +0000 (-0700) Subject: Merge pull request #3107 from mhrheaume/mhr/closure_reason_abandoned X-Git-Url: http://git.bitcoin.ninja/index.cgi?a=commitdiff_plain;h=b8cdde8af64189fd1567cbc85cfae26c610af969;hp=a6664010fd2603abc474cf80cf86a95d08578f65;p=rust-lightning Merge pull request #3107 from mhrheaume/mhr/closure_reason_abandoned Updated `ClosureReason::HolderForceClosed` with whether txn was broadcasted. --- diff --git a/lightning-persister/src/fs_store.rs b/lightning-persister/src/fs_store.rs index 093ab1ec..d1e19421 100644 --- a/lightning-persister/src/fs_store.rs +++ b/lightning-persister/src/fs_store.rs @@ -447,7 +447,7 @@ mod tests { let chan = create_announced_chan_between_nodes(&nodes, 0, 1); let error_message = "Channel force-closed"; nodes[1].node.force_close_broadcasting_latest_txn(&chan.2, &nodes[0].node.get_our_node_id(), error_message.to_string()).unwrap(); - check_closed_event!(nodes[1], 1, ClosureReason::HolderForceClosed, [nodes[0].node.get_our_node_id()], 100000); + check_closed_event!(nodes[1], 1, ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(true) }, [nodes[0].node.get_our_node_id()], 100000); let mut added_monitors = nodes[1].chain_monitor.added_monitors.lock().unwrap(); // Set the store's directory to read-only, which should result in @@ -485,7 +485,7 @@ mod tests { let chan = create_announced_chan_between_nodes(&nodes, 0, 1); let error_message = "Channel force-closed"; nodes[1].node.force_close_broadcasting_latest_txn(&chan.2, &nodes[0].node.get_our_node_id(), error_message.to_string()).unwrap(); - check_closed_event!(nodes[1], 1, ClosureReason::HolderForceClosed, [nodes[0].node.get_our_node_id()], 100000); + check_closed_event!(nodes[1], 1, ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(true) }, [nodes[0].node.get_our_node_id()], 100000); let mut added_monitors = nodes[1].chain_monitor.added_monitors.lock().unwrap(); let update_map = nodes[1].chain_monitor.latest_monitor_update_id.lock().unwrap(); let update_id = update_map.get(&added_monitors[0].1.channel_id()).unwrap(); diff --git a/lightning-persister/src/test_utils.rs b/lightning-persister/src/test_utils.rs index 42569c7a..4a2c6589 100644 --- a/lightning-persister/src/test_utils.rs +++ b/lightning-persister/src/test_utils.rs @@ -106,7 +106,7 @@ pub(crate) fn do_test_store(store_0: &K, store_1: &K) { // updates. let error_message = "Channel force-closed"; nodes[0].node.force_close_broadcasting_latest_txn(&nodes[0].node.list_channels()[0].channel_id, &nodes[1].node.get_our_node_id(), error_message.to_string()).unwrap(); - check_closed_event!(nodes[0], 1, ClosureReason::HolderForceClosed, [nodes[1].node.get_our_node_id()], 100000); + check_closed_event!(nodes[0], 1, ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(true) }, [nodes[1].node.get_our_node_id()], 100000); check_closed_broadcast!(nodes[0], true); check_added_monitors!(nodes[0], 1); diff --git a/lightning/src/chain/channelmonitor.rs b/lightning/src/chain/channelmonitor.rs index 6291006f..cd6061a2 100644 --- a/lightning/src/chain/channelmonitor.rs +++ b/lightning/src/chain/channelmonitor.rs @@ -2873,7 +2873,7 @@ impl ChannelMonitorImpl { F::Target: FeeEstimator, L::Target: Logger, { - let (claimable_outpoints, _) = self.generate_claimable_outpoints_and_watch_outputs(ClosureReason::HolderForceClosed); + let (claimable_outpoints, _) = self.generate_claimable_outpoints_and_watch_outputs(ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(true) }); self.onchain_tx_handler.update_claims_view_from_requests( claimable_outpoints, self.best_block.height, self.best_block.height, broadcaster, fee_estimator, logger diff --git a/lightning/src/events/mod.rs b/lightning/src/events/mod.rs index f054a50c..527aabb0 100644 --- a/lightning/src/events/mod.rs +++ b/lightning/src/events/mod.rs @@ -278,7 +278,21 @@ pub enum ClosureReason { /// Closure generated from [`ChannelManager::force_close_channel`], called by the user. /// /// [`ChannelManager::force_close_channel`]: crate::ln::channelmanager::ChannelManager::force_close_channel. - HolderForceClosed, + HolderForceClosed { + /// Whether or not the latest transaction was broadcasted when the channel was force + /// closed. + /// + /// Channels closed using [`ChannelManager::force_close_broadcasting_latest_txn`] will have + /// this field set to true, whereas channels closed using [`ChannelManager::force_close_without_broadcasting_txn`] + /// or force-closed prior to being funded will have this field set to false. + /// + /// This will be `None` for objects generated or written by LDK 0.0.123 and + /// earlier. + /// + /// [`ChannelManager::force_close_broadcasting_latest_txn`]: crate::ln::channelmanager::ChannelManager::force_close_broadcasting_latest_txn. + /// [`ChannelManager::force_close_without_broadcasting_txn`]: crate::ln::channelmanager::ChannelManager::force_close_without_broadcasting_txn. + broadcasted_latest_txn: Option + }, /// The channel was closed after negotiating a cooperative close and we've now broadcasted /// the cooperative close transaction. Note the shutdown may have been initiated by us. /// @@ -355,7 +369,14 @@ impl core::fmt::Display for ClosureReason { ClosureReason::CounterpartyForceClosed { peer_msg } => { f.write_fmt(format_args!("counterparty force-closed with message: {}", peer_msg)) }, - ClosureReason::HolderForceClosed => f.write_str("user force-closed the channel"), + ClosureReason::HolderForceClosed { broadcasted_latest_txn } => { + f.write_str("user force-closed the channel")?; + if let Some(brodcasted) = broadcasted_latest_txn { + write!(f, " and {} the latest transaction", if *brodcasted { "broadcasted" } else { "did not broadcast" }) + } else { + Ok(()) + } + }, ClosureReason::LegacyCooperativeClosure => f.write_str("the channel was cooperatively closed"), ClosureReason::CounterpartyInitiatedCooperativeClosure => f.write_str("the channel was cooperatively closed by our peer"), ClosureReason::LocallyInitiatedCooperativeClosure => f.write_str("the channel was cooperatively closed by us"), @@ -382,7 +403,7 @@ impl core::fmt::Display for ClosureReason { impl_writeable_tlv_based_enum_upgradable!(ClosureReason, (0, CounterpartyForceClosed) => { (1, peer_msg, required) }, (1, FundingTimedOut) => {}, - (2, HolderForceClosed) => {}, + (2, HolderForceClosed) => { (1, broadcasted_latest_txn, option) }, (6, CommitmentTxConfirmed) => {}, (4, LegacyCooperativeClosure) => {}, (8, ProcessingError) => { (1, err, required) }, diff --git a/lightning/src/ln/async_signer_tests.rs b/lightning/src/ln/async_signer_tests.rs index 4342886f..8e929fe8 100644 --- a/lightning/src/ln/async_signer_tests.rs +++ b/lightning/src/ln/async_signer_tests.rs @@ -373,7 +373,7 @@ fn do_test_async_holder_signatures(anchors: bool, remote_commitment: bool) { nodes[1].node.force_close_broadcasting_latest_txn(&chan_id, &nodes[0].node.get_our_node_id(), error_message.to_string()).unwrap(); check_added_monitors(&nodes[1], 1); check_closed_broadcast(&nodes[1], 1, true); - check_closed_event(&nodes[1], 1, ClosureReason::HolderForceClosed, false, &[nodes[0].node.get_our_node_id()], 100_000); + check_closed_event(&nodes[1], 1, ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(true) }, false, &[nodes[0].node.get_our_node_id()], 100_000); } else { // We'll connect blocks until the sender has to go onchain to time out the HTLC. connect_blocks(&nodes[0], TEST_FINAL_CLTV + LATENCY_GRACE_PERIOD_BLOCKS + 1); diff --git a/lightning/src/ln/chanmon_update_fail_tests.rs b/lightning/src/ln/chanmon_update_fail_tests.rs index b33e1962..29a6c161 100644 --- a/lightning/src/ln/chanmon_update_fail_tests.rs +++ b/lightning/src/ln/chanmon_update_fail_tests.rs @@ -216,7 +216,7 @@ fn do_test_simple_monitor_temporary_update_fail(disconnect: bool) { // PaymentPathFailed event assert_eq!(nodes[0].node.list_channels().len(), 0); - check_closed_event!(nodes[0], 1, ClosureReason::HolderForceClosed, [nodes[1].node.get_our_node_id()], 100000); + check_closed_event!(nodes[0], 1, ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(true) }, [nodes[1].node.get_our_node_id()], 100000); } #[test] @@ -3232,13 +3232,13 @@ fn do_test_durable_preimages_on_closed_channel(close_chans_before_reload: bool, chanmon_cfgs[1].persister.set_update_ret(ChannelMonitorUpdateStatus::InProgress); nodes[1].node.force_close_broadcasting_latest_txn(&chan_id_bc, &nodes[2].node.get_our_node_id(), error_message.to_string()).unwrap(); check_closed_broadcast(&nodes[1], 1, true); - check_closed_event(&nodes[1], 1, ClosureReason::HolderForceClosed, false, &[nodes[2].node.get_our_node_id()], 100000); + check_closed_event(&nodes[1], 1, ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(true) }, false, &[nodes[2].node.get_our_node_id()], 100000); } chanmon_cfgs[1].persister.set_update_ret(ChannelMonitorUpdateStatus::InProgress); nodes[1].node.force_close_broadcasting_latest_txn(&chan_id_ab, &nodes[0].node.get_our_node_id(), error_message.to_string()).unwrap(); check_closed_broadcast(&nodes[1], 1, true); - check_closed_event(&nodes[1], 1, ClosureReason::HolderForceClosed, false, &[nodes[0].node.get_our_node_id()], 100000); + check_closed_event(&nodes[1], 1, ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(true) }, false, &[nodes[0].node.get_our_node_id()], 100000); } // Now reload node B @@ -3260,7 +3260,7 @@ fn do_test_durable_preimages_on_closed_channel(close_chans_before_reload: bool, let error_message = "Channel force-closed"; nodes[0].node.force_close_broadcasting_latest_txn(&chan_id_ab, &nodes[1].node.get_our_node_id(), error_message.to_string()).unwrap(); - check_closed_event(&nodes[0], 1, ClosureReason::HolderForceClosed, false, &[nodes[1].node.get_our_node_id()], 100000); + check_closed_event(&nodes[0], 1, ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(true) }, false, &[nodes[1].node.get_our_node_id()], 100000); let as_closing_tx = nodes[0].tx_broadcaster.txn_broadcasted.lock().unwrap().split_off(0); assert_eq!(as_closing_tx.len(), 1); @@ -3403,7 +3403,7 @@ fn do_test_reload_mon_update_completion_actions(close_during_reload: bool) { nodes[0].node.force_close_broadcasting_latest_txn(&chan_id_ab, &nodes[1].node.get_our_node_id(), error_message.to_string()).unwrap(); check_added_monitors!(nodes[0], 1); check_closed_broadcast!(nodes[0], true); - check_closed_event(&nodes[0], 1, ClosureReason::HolderForceClosed, false, &[nodes[1].node.get_our_node_id()], 100_000); + check_closed_event(&nodes[0], 1, ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(true) }, false, &[nodes[1].node.get_our_node_id()], 100_000); let as_closing_tx = nodes[0].tx_broadcaster.txn_broadcasted.lock().unwrap().split_off(0); mine_transaction_without_consistency_checks(&nodes[1], &as_closing_tx[0]); } diff --git a/lightning/src/ln/channelmanager.rs b/lightning/src/ln/channelmanager.rs index 80b98b2b..f5e83dcc 100644 --- a/lightning/src/ln/channelmanager.rs +++ b/lightning/src/ln/channelmanager.rs @@ -3188,7 +3188,7 @@ where } } else { let mut chan_phase = remove_channel_phase!(self, chan_phase_entry); - shutdown_result = Some(chan_phase.context_mut().force_shutdown(false, ClosureReason::HolderForceClosed)); + shutdown_result = Some(chan_phase.context_mut().force_shutdown(false, ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(false) })); } }, hash_map::Entry::Vacant(_) => { @@ -3357,7 +3357,7 @@ where let closure_reason = if let Some(peer_msg) = peer_msg { ClosureReason::CounterpartyForceClosed { peer_msg: UntrustedString(peer_msg.to_string()) } } else { - ClosureReason::HolderForceClosed + ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(broadcast) } }; let logger = WithContext::from(&self.logger, Some(*peer_node_id), Some(*channel_id), None); if let hash_map::Entry::Occupied(chan_phase_entry) = peer_state.channel_by_id.entry(channel_id.clone()) { @@ -5531,7 +5531,7 @@ where log_error!(logger, "Force-closing pending channel with ID {} for not establishing in a timely manner", chan_id); update_maps_on_chan_removal!(self, &context); - shutdown_channels.push(context.force_shutdown(false, ClosureReason::HolderForceClosed)); + shutdown_channels.push(context.force_shutdown(false, ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(false) })); pending_msg_events.push(MessageSendEvent::HandleError { node_id: counterparty_node_id, action: msgs::ErrorAction::SendErrorMessage { @@ -8018,7 +8018,7 @@ where let reason = if let MonitorEvent::HolderForceClosedWithInfo { reason, .. } = monitor_event { reason } else { - ClosureReason::HolderForceClosed + ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(true) } }; failed_channels.push(chan.context.force_shutdown(false, reason.clone())); if let Ok(update) = self.get_channel_update_for_broadcast(&chan) { @@ -12584,7 +12584,7 @@ mod tests { nodes[0].node.force_close_channel_with_peer(&chan.2, &nodes[1].node.get_our_node_id(), None, true).unwrap(); check_added_monitors!(nodes[0], 1); - check_closed_event!(nodes[0], 1, ClosureReason::HolderForceClosed, [nodes[1].node.get_our_node_id()], 100000); + check_closed_event!(nodes[0], 1, ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(true) }, [nodes[1].node.get_our_node_id()], 100000); // Confirm that the channel_update was not sent immediately to node[1] but was cached. let node_1_events = nodes[1].node.get_and_clear_pending_msg_events(); @@ -12643,7 +12643,7 @@ mod tests { nodes[0].node.force_close_broadcasting_latest_txn(&chan.2, &nodes[1].node.get_our_node_id(), error_message.to_string()).unwrap(); check_closed_broadcast!(nodes[0], true); check_added_monitors!(nodes[0], 1); - check_closed_event!(nodes[0], 1, ClosureReason::HolderForceClosed, [nodes[1].node.get_our_node_id()], 100000); + check_closed_event!(nodes[0], 1, ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(true) }, [nodes[1].node.get_our_node_id()], 100000); { // Assert that nodes[1] is awaiting removal for nodes[0] once nodes[1] has been @@ -13380,7 +13380,7 @@ mod tests { nodes[0].node.force_close_broadcasting_latest_txn(&chan_id, &nodes[1].node.get_our_node_id(), error_message.to_string()).unwrap(); check_closed_broadcast(&nodes[0], 1, true); check_added_monitors(&nodes[0], 1); - check_closed_event!(nodes[0], 1, ClosureReason::HolderForceClosed, [nodes[1].node.get_our_node_id()], 100000); + check_closed_event!(nodes[0], 1, ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(true) }, [nodes[1].node.get_our_node_id()], 100000); { let txn = nodes[0].tx_broadcaster.txn_broadcast(); assert_eq!(txn.len(), 1); diff --git a/lightning/src/ln/functional_tests.rs b/lightning/src/ln/functional_tests.rs index 1e31e52c..38be3d8b 100644 --- a/lightning/src/ln/functional_tests.rs +++ b/lightning/src/ln/functional_tests.rs @@ -2307,7 +2307,7 @@ fn channel_monitor_network_test() { nodes[1].node.force_close_broadcasting_latest_txn(&chan_1.2, &nodes[0].node.get_our_node_id(), error_message.to_string()).unwrap(); check_added_monitors!(nodes[1], 1); check_closed_broadcast!(nodes[1], true); - check_closed_event!(nodes[1], 1, ClosureReason::HolderForceClosed, [nodes[0].node.get_our_node_id()], 100000); + check_closed_event!(nodes[1], 1, ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(true) }, [nodes[0].node.get_our_node_id()], 100000); { let mut node_txn = test_txn_broadcast(&nodes[1], &chan_1, None, HTLCType::NONE); assert_eq!(node_txn.len(), 1); @@ -2345,7 +2345,7 @@ fn channel_monitor_network_test() { check_closed_broadcast!(nodes[2], true); assert_eq!(nodes[1].node.list_channels().len(), 0); assert_eq!(nodes[2].node.list_channels().len(), 1); - check_closed_event!(nodes[1], 1, ClosureReason::HolderForceClosed, [nodes[2].node.get_our_node_id()], 100000); + check_closed_event!(nodes[1], 1, ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(true) }, [nodes[2].node.get_our_node_id()], 100000); check_closed_event!(nodes[2], 1, ClosureReason::CommitmentTxConfirmed, [nodes[1].node.get_our_node_id()], 100000); macro_rules! claim_funds { @@ -2391,7 +2391,7 @@ fn channel_monitor_network_test() { check_closed_broadcast!(nodes[3], true); assert_eq!(nodes[2].node.list_channels().len(), 0); assert_eq!(nodes[3].node.list_channels().len(), 1); - check_closed_event!(nodes[2], 1, ClosureReason::HolderForceClosed, [nodes[3].node.get_our_node_id()], 100000); + check_closed_event!(nodes[2], 1, ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(true) }, [nodes[3].node.get_our_node_id()], 100000); check_closed_event!(nodes[3], 1, ClosureReason::CommitmentTxConfirmed, [nodes[2].node.get_our_node_id()], 100000); // Drop the ChannelMonitor for the previous channel to avoid it broadcasting transactions and @@ -3605,7 +3605,7 @@ fn test_htlc_ignore_latest_remote_commitment() { connect_blocks(&nodes[0], TEST_FINAL_CLTV + LATENCY_GRACE_PERIOD_BLOCKS + 1); check_closed_broadcast!(nodes[0], true); check_added_monitors!(nodes[0], 1); - check_closed_event!(nodes[0], 1, ClosureReason::HolderForceClosed, [nodes[1].node.get_our_node_id()], 100000); + check_closed_event!(nodes[0], 1, ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(true) }, [nodes[1].node.get_our_node_id()], 100000); let node_txn = nodes[0].tx_broadcaster.unique_txn_broadcast(); assert_eq!(node_txn.len(), 2); @@ -3668,7 +3668,7 @@ fn test_force_close_fail_back() { nodes[2].node.force_close_broadcasting_latest_txn(&payment_event.commitment_msg.channel_id, &nodes[1].node.get_our_node_id(), error_message.to_string()).unwrap(); check_closed_broadcast!(nodes[2], true); check_added_monitors!(nodes[2], 1); - check_closed_event!(nodes[2], 1, ClosureReason::HolderForceClosed, [nodes[1].node.get_our_node_id()], 100000); + check_closed_event!(nodes[2], 1, ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(true) }, [nodes[1].node.get_our_node_id()], 100000); let commitment_tx = { let mut node_txn = nodes[2].tx_broadcaster.txn_broadcasted.lock().unwrap(); // Note that we don't bother broadcasting the HTLC-Success transaction here as we don't @@ -4550,7 +4550,7 @@ fn test_claim_sizeable_push_msat() { nodes[1].node.force_close_broadcasting_latest_txn(&chan.2, &nodes[0].node.get_our_node_id(), error_message.to_string()).unwrap(); check_closed_broadcast!(nodes[1], true); check_added_monitors!(nodes[1], 1); - check_closed_event!(nodes[1], 1, ClosureReason::HolderForceClosed, [nodes[0].node.get_our_node_id()], 100000); + check_closed_event!(nodes[1], 1, ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(true) }, [nodes[0].node.get_our_node_id()], 100000); let node_txn = nodes[1].tx_broadcaster.txn_broadcasted.lock().unwrap().clone(); assert_eq!(node_txn.len(), 1); check_spends!(node_txn[0], chan.3); @@ -4580,7 +4580,7 @@ fn test_claim_on_remote_sizeable_push_msat() { nodes[0].node.force_close_broadcasting_latest_txn(&chan.2, &nodes[1].node.get_our_node_id(), error_message.to_string()).unwrap(); check_closed_broadcast!(nodes[0], true); check_added_monitors!(nodes[0], 1); - check_closed_event!(nodes[0], 1, ClosureReason::HolderForceClosed, [nodes[1].node.get_our_node_id()], 100000); + check_closed_event!(nodes[0], 1, ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(true) }, [nodes[1].node.get_our_node_id()], 100000); let node_txn = nodes[0].tx_broadcaster.txn_broadcasted.lock().unwrap().split_off(0); assert_eq!(node_txn.len(), 1); @@ -8862,7 +8862,7 @@ fn do_test_onchain_htlc_settlement_after_close(broadcast_alice: bool, go_onchain nodes[force_closing_node].node.force_close_broadcasting_latest_txn(&chan_ab.2, &nodes[counterparty_node].node.get_our_node_id(), error_message.to_string()).unwrap(); check_closed_broadcast!(nodes[force_closing_node], true); check_added_monitors!(nodes[force_closing_node], 1); - check_closed_event!(nodes[force_closing_node], 1, ClosureReason::HolderForceClosed, [nodes[counterparty_node].node.get_our_node_id()], 100000); + check_closed_event!(nodes[force_closing_node], 1, ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(true) }, [nodes[counterparty_node].node.get_our_node_id()], 100000); if go_onchain_before_fulfill { let txn_to_broadcast = match broadcast_alice { true => alice_txn.clone(), @@ -9595,7 +9595,7 @@ fn do_test_tx_confirmed_skipping_blocks_immediate_broadcast(test_height_before_t let error_message = "Channel force-closed"; nodes[1].node.force_close_broadcasting_latest_txn(&channel_id, &nodes[2].node.get_our_node_id(), error_message.to_string()).unwrap(); check_closed_broadcast!(nodes[1], true); - check_closed_event!(nodes[1], 1, ClosureReason::HolderForceClosed, [nodes[2].node.get_our_node_id()], 100000); + check_closed_event!(nodes[1], 1, ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(true) }, [nodes[2].node.get_our_node_id()], 100000); check_added_monitors!(nodes[1], 1); let node_txn = nodes[1].tx_broadcaster.txn_broadcasted.lock().unwrap().split_off(0); assert_eq!(node_txn.len(), 1); @@ -10653,7 +10653,7 @@ fn test_remove_expired_outbound_unfunded_channels() { }, _ => panic!("Unexpected event"), } - check_closed_event(&nodes[0], 1, ClosureReason::HolderForceClosed, false, &[nodes[1].node.get_our_node_id()], 100000); + check_closed_event(&nodes[0], 1, ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(false) }, false, &[nodes[1].node.get_our_node_id()], 100000); } #[test] @@ -10704,7 +10704,7 @@ fn test_remove_expired_inbound_unfunded_channels() { }, _ => panic!("Unexpected event"), } - check_closed_event(&nodes[1], 1, ClosureReason::HolderForceClosed, false, &[nodes[0].node.get_our_node_id()], 100000); + check_closed_event(&nodes[1], 1, ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(false) }, false, &[nodes[0].node.get_our_node_id()], 100000); } #[test] @@ -10738,7 +10738,7 @@ fn test_channel_close_when_not_timely_accepted() { // Since we disconnected from peer and did not connect back within time, // we should have forced-closed the channel by now. - check_closed_event!(nodes[0], 1, ClosureReason::HolderForceClosed, [nodes[1].node.get_our_node_id()], 100000); + check_closed_event!(nodes[0], 1, ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(false) }, [nodes[1].node.get_our_node_id()], 100000); assert_eq!(nodes[0].node.list_channels().len(), 0); { @@ -11146,7 +11146,7 @@ fn do_test_funding_and_commitment_tx_confirm_same_block(confirm_remote_commitmen _ => panic!("Unexpected event"), } check_added_monitors(closing_node, 1); - check_closed_event(closing_node, 1, ClosureReason::HolderForceClosed, false, &[other_node.node.get_our_node_id()], 1_000_000); + check_closed_event(closing_node, 1, ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(true) }, false, &[other_node.node.get_our_node_id()], 1_000_000); let commitment_tx = { let mut txn = closing_node.tx_broadcaster.txn_broadcast(); diff --git a/lightning/src/ln/monitor_tests.rs b/lightning/src/ln/monitor_tests.rs index 50858bb4..af65d0cc 100644 --- a/lightning/src/ln/monitor_tests.rs +++ b/lightning/src/ln/monitor_tests.rs @@ -198,7 +198,7 @@ fn archive_fully_resolved_monitors() { assert_eq!(nodes[0].chain_monitor.chain_monitor.list_monitors().len(), 1); // First archive should set balances_empty_height to current block height - nodes[0].chain_monitor.chain_monitor.archive_fully_resolved_channel_monitors(); + nodes[0].chain_monitor.chain_monitor.archive_fully_resolved_channel_monitors(); assert_eq!(nodes[0].chain_monitor.chain_monitor.list_monitors().len(), 1); connect_blocks(&nodes[0], 4032); // Second call after 4032 blocks, should archive the monitor @@ -784,7 +784,7 @@ fn do_test_balances_on_local_commitment_htlcs(anchors: bool) { nodes[0].node.force_close_broadcasting_latest_txn(&chan_id, &nodes[1].node.get_our_node_id(), error_message.to_string()).unwrap(); check_added_monitors!(nodes[0], 1); check_closed_broadcast!(nodes[0], true); - check_closed_event!(nodes[0], 1, ClosureReason::HolderForceClosed, [nodes[1].node.get_our_node_id()], 1000000); + check_closed_event!(nodes[0], 1, ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(true) }, [nodes[1].node.get_our_node_id()], 1000000); let commitment_tx = { let mut txn = nodes[0].tx_broadcaster.unique_txn_broadcast(); assert_eq!(txn.len(), 1); @@ -2042,7 +2042,7 @@ fn do_test_restored_packages_retry(check_old_monitor_retries_after_upgrade: bool nodes[0].node.force_close_broadcasting_latest_txn(&chan_id, &nodes[1].node.get_our_node_id(), error_message.to_string()).unwrap(); check_added_monitors(&nodes[0], 1); check_closed_broadcast(&nodes[0], 1, true); - check_closed_event!(&nodes[0], 1, ClosureReason::HolderForceClosed, false, + check_closed_event!(&nodes[0], 1, ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(true) }, false, [nodes[1].node.get_our_node_id()], 100000); let commitment_tx = { @@ -2715,7 +2715,7 @@ fn do_test_anchors_monitor_fixes_counterparty_payment_script_on_reload(confirm_c nodes[0].node.force_close_broadcasting_latest_txn(&chan_id, &nodes[1].node.get_our_node_id(), error_message.to_string()).unwrap(); check_added_monitors(&nodes[0], 1); check_closed_broadcast(&nodes[0], 1, true); - check_closed_event!(&nodes[0], 1, ClosureReason::HolderForceClosed, false, + check_closed_event!(&nodes[0], 1, ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(true) }, false, [nodes[1].node.get_our_node_id()], 100000); let commitment_tx = { diff --git a/lightning/src/ln/payment_tests.rs b/lightning/src/ln/payment_tests.rs index 487fb83c..dcfb6ed8 100644 --- a/lightning/src/ln/payment_tests.rs +++ b/lightning/src/ln/payment_tests.rs @@ -1064,7 +1064,7 @@ fn do_test_dup_htlc_onchain_doesnt_fail_on_reload(persist_manager_post_event: bo nodes[0].node.force_close_broadcasting_latest_txn(&nodes[0].node.list_channels()[0].channel_id, &nodes[1].node.get_our_node_id(), error_message.to_string()).unwrap(); check_closed_broadcast!(nodes[0], true); check_added_monitors!(nodes[0], 1); - check_closed_event!(nodes[0], 1, ClosureReason::HolderForceClosed, [nodes[1].node.get_our_node_id()], 100000); + check_closed_event!(nodes[0], 1, ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(true) }, [nodes[1].node.get_our_node_id()], 100000); nodes[0].node.peer_disconnected(&nodes[1].node.get_our_node_id()); nodes[1].node.peer_disconnected(&nodes[0].node.get_our_node_id()); @@ -3611,7 +3611,7 @@ fn do_claim_from_closed_chan(fail_payment: bool) { pass_failed_payment_back(&nodes[0], &[&[&nodes[1], &nodes[3]], &[&nodes[2], &nodes[3]]], false, payment_hash, PaymentFailureReason::RecipientRejected); } else { nodes[1].node.force_close_broadcasting_latest_txn(&chan_bd, &nodes[3].node.get_our_node_id(), error_message.to_string()).unwrap(); - check_closed_event!(&nodes[1], 1, ClosureReason::HolderForceClosed, false, + check_closed_event!(&nodes[1], 1, ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(true) }, false, [nodes[3].node.get_our_node_id()], 1000000); check_closed_broadcast(&nodes[1], 1, true); let bs_tx = nodes[1].tx_broadcaster.txn_broadcasted.lock().unwrap().split_off(0); diff --git a/lightning/src/ln/priv_short_conf_tests.rs b/lightning/src/ln/priv_short_conf_tests.rs index f677b292..7aef8959 100644 --- a/lightning/src/ln/priv_short_conf_tests.rs +++ b/lightning/src/ln/priv_short_conf_tests.rs @@ -765,7 +765,7 @@ fn test_0conf_close_no_early_chan_update() { nodes[0].node.force_close_all_channels_broadcasting_latest_txn(error_message.to_string()); check_added_monitors!(nodes[0], 1); - check_closed_event!(&nodes[0], 1, ClosureReason::HolderForceClosed, [nodes[1].node.get_our_node_id()], 100000); + check_closed_event!(&nodes[0], 1, ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(true) }, [nodes[1].node.get_our_node_id()], 100000); let _ = get_err_msg(&nodes[0], &nodes[1].node.get_our_node_id()); } diff --git a/lightning/src/ln/reload_tests.rs b/lightning/src/ln/reload_tests.rs index 4c32a0bd..5f375ec8 100644 --- a/lightning/src/ln/reload_tests.rs +++ b/lightning/src/ln/reload_tests.rs @@ -633,7 +633,7 @@ fn do_test_data_loss_protect(reconnect_panicing: bool, substantially_old: bool, nodes[0].node.force_close_without_broadcasting_txn(&chan.2, &nodes[1].node.get_our_node_id(), error_message.to_string()).unwrap(); check_added_monitors!(nodes[0], 1); - check_closed_event!(nodes[0], 1, ClosureReason::HolderForceClosed, [nodes[1].node.get_our_node_id()], 1000000); + check_closed_event!(nodes[0], 1, ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(false) }, [nodes[1].node.get_our_node_id()], 1000000); { let node_txn = nodes[0].tx_broadcaster.txn_broadcasted.lock().unwrap(); assert_eq!(node_txn.len(), 0); @@ -1033,7 +1033,7 @@ fn do_forwarded_payment_no_manager_persistence(use_cs_commitment: bool, claim_ht assert_eq!(cs_commitment_tx.len(), if claim_htlc { 2 } else { 1 }); check_added_monitors!(nodes[2], 1); - check_closed_event!(nodes[2], 1, ClosureReason::HolderForceClosed, [nodes[1].node.get_our_node_id()], 100000); + check_closed_event!(nodes[2], 1, ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(true) }, [nodes[1].node.get_our_node_id()], 100000); check_closed_broadcast!(nodes[2], true); let chan_0_monitor_serialized = get_monitor!(nodes[1], chan_id_1).encode(); diff --git a/lightning/src/ln/reorg_tests.rs b/lightning/src/ln/reorg_tests.rs index efc07588..e5457784 100644 --- a/lightning/src/ln/reorg_tests.rs +++ b/lightning/src/ln/reorg_tests.rs @@ -644,12 +644,12 @@ fn test_htlc_preimage_claim_holder_commitment_after_counterparty_commitment_reor nodes[0].node.force_close_broadcasting_latest_txn(&chan_id, &nodes[1].node.get_our_node_id(), error_message.to_string()).unwrap(); check_closed_broadcast(&nodes[0], 1, true); check_added_monitors(&nodes[0], 1); - check_closed_event(&nodes[0], 1, ClosureReason::HolderForceClosed, false, &[nodes[1].node.get_our_node_id()], 100000); + check_closed_event(&nodes[0], 1, ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(true) }, false, &[nodes[1].node.get_our_node_id()], 100000); nodes[1].node.force_close_broadcasting_latest_txn(&chan_id, &nodes[0].node.get_our_node_id(), error_message.to_string()).unwrap(); check_closed_broadcast(&nodes[1], 1, true); check_added_monitors(&nodes[1], 1); - check_closed_event(&nodes[1], 1, ClosureReason::HolderForceClosed, false, &[nodes[0].node.get_our_node_id()], 100000); + check_closed_event(&nodes[1], 1, ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(true) }, false, &[nodes[0].node.get_our_node_id()], 100000); let mut txn = nodes[0].tx_broadcaster.txn_broadcast(); assert_eq!(txn.len(), 1); @@ -727,7 +727,7 @@ fn test_htlc_preimage_claim_prev_counterparty_commitment_after_current_counterpa nodes[0].node.force_close_broadcasting_latest_txn(&chan_id, &nodes[1].node.get_our_node_id(), error_message.to_string()).unwrap(); check_closed_broadcast(&nodes[0], 1, true); check_added_monitors(&nodes[0], 1); - check_closed_event(&nodes[0], 1, ClosureReason::HolderForceClosed, false, &[nodes[1].node.get_our_node_id()], 100000); + check_closed_event(&nodes[0], 1, ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(true) }, false, &[nodes[1].node.get_our_node_id()], 100000); let mut txn = nodes[0].tx_broadcaster.txn_broadcast(); assert_eq!(txn.len(), 1); @@ -833,7 +833,7 @@ fn do_test_retries_own_commitment_broadcast_after_reorg(anchors: bool, revoked_c nodes[1].node.force_close_broadcasting_latest_txn(&chan_id, &nodes[0].node.get_our_node_id(), error_message.to_string()).unwrap(); check_closed_broadcast(&nodes[1], 1, true); check_added_monitors(&nodes[1], 1); - check_closed_event(&nodes[1], 1, ClosureReason::HolderForceClosed, false, &[nodes[0].node.get_our_node_id()], 100_000); + check_closed_event(&nodes[1], 1, ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(true) }, false, &[nodes[0].node.get_our_node_id()], 100_000); let commitment_b = { let mut txn = nodes[1].tx_broadcaster.txn_broadcast(); diff --git a/lightning/src/ln/shutdown_tests.rs b/lightning/src/ln/shutdown_tests.rs index fe79da41..802ee1e5 100644 --- a/lightning/src/ln/shutdown_tests.rs +++ b/lightning/src/ln/shutdown_tests.rs @@ -291,7 +291,7 @@ fn close_on_unfunded_channel() { let _open_chan = get_event_msg!(nodes[0], MessageSendEvent::SendOpenChannel, nodes[1].node.get_our_node_id()); nodes[0].node.close_channel(&chan_id, &nodes[1].node.get_our_node_id()).unwrap(); - check_closed_event!(nodes[0], 1, ClosureReason::HolderForceClosed, [nodes[1].node.get_our_node_id()], 1_000_000); + check_closed_event!(nodes[0], 1, ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(false) }, [nodes[1].node.get_our_node_id()], 1_000_000); } #[test] @@ -324,7 +324,7 @@ fn expect_channel_shutdown_state_with_force_closure() { assert!(nodes[1].node.list_channels().is_empty()); check_closed_broadcast!(nodes[0], true); check_closed_event!(nodes[0], 1, ClosureReason::CommitmentTxConfirmed, [nodes[1].node.get_our_node_id()], 100000); - check_closed_event!(nodes[1], 1, ClosureReason::HolderForceClosed, [nodes[0].node.get_our_node_id()], 100000); + check_closed_event!(nodes[1], 1, ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(true) }, [nodes[0].node.get_our_node_id()], 100000); } #[test] diff --git a/lightning/src/util/persist.rs b/lightning/src/util/persist.rs index 95c68b58..00b1c246 100644 --- a/lightning/src/util/persist.rs +++ b/lightning/src/util/persist.rs @@ -1076,7 +1076,7 @@ mod tests { let error_message = "Channel force-closed"; nodes[0].node.force_close_broadcasting_latest_txn(&nodes[0].node.list_channels()[0].channel_id, &nodes[1].node.get_our_node_id(), error_message.to_string()).unwrap(); - check_closed_event(&nodes[0], 1, ClosureReason::HolderForceClosed, false, &[nodes[1].node.get_our_node_id()], 100000); + check_closed_event(&nodes[0], 1, ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(true) }, false, &[nodes[1].node.get_our_node_id()], 100000); check_closed_broadcast!(nodes[0], true); check_added_monitors!(nodes[0], 1); @@ -1114,7 +1114,7 @@ mod tests { let chan = create_announced_chan_between_nodes(&nodes, 0, 1); let error_message = "Channel force-closed"; nodes[1].node.force_close_broadcasting_latest_txn(&chan.2, &nodes[0].node.get_our_node_id(), error_message.to_string()).unwrap(); - check_closed_event(&nodes[1], 1, ClosureReason::HolderForceClosed, false, &[nodes[0].node.get_our_node_id()], 100000); + check_closed_event(&nodes[1], 1, ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(true) }, false, &[nodes[0].node.get_our_node_id()], 100000); { let mut added_monitors = nodes[1].chain_monitor.added_monitors.lock().unwrap(); let cmu_map = nodes[1].chain_monitor.monitor_updates.lock().unwrap(); @@ -1231,7 +1231,7 @@ mod tests { // Force close. let error_message = "Channel force-closed"; nodes[0].node.force_close_broadcasting_latest_txn(&nodes[0].node.list_channels()[0].channel_id, &nodes[1].node.get_our_node_id(), error_message.to_string()).unwrap(); - check_closed_event(&nodes[0], 1, ClosureReason::HolderForceClosed, false, &[nodes[1].node.get_our_node_id()], 100000); + check_closed_event(&nodes[0], 1, ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(true) }, false, &[nodes[1].node.get_our_node_id()], 100000); check_closed_broadcast!(nodes[0], true); check_added_monitors!(nodes[0], 1);