Merge pull request #3125 from valentinewallace/2024-06-async-payments-prefactor
[rust-lightning] / lightning / src / ln / channelmanager.rs
index 2fd170ebfc9d605c0a421b61e75510bc2003c48a..6ba7396ebfe04262904640c6b041d6969e95195f 100644 (file)
@@ -58,7 +58,7 @@ use crate::ln::onion_utils::{HTLCFailReason, INVALID_ONION_BLINDING};
 use crate::ln::msgs::{ChannelMessageHandler, DecodeError, LightningError};
 #[cfg(test)]
 use crate::ln::outbound_payment;
-use crate::ln::outbound_payment::{Bolt12PaymentError, OutboundPayments, PaymentAttempts, PendingOutboundPayment, SendAlongPathArgs, StaleExpiration};
+use crate::ln::outbound_payment::{OutboundPayments, PaymentAttempts, PendingOutboundPayment, SendAlongPathArgs, StaleExpiration};
 use crate::ln::wire::Encode;
 use crate::offers::invoice::{BlindedPayInfo, Bolt12Invoice, DEFAULT_RELATIVE_EXPIRY, DerivedSigningPubkey, ExplicitSigningPubkey, InvoiceBuilder, UnsignedBolt12Invoice};
 use crate::offers::invoice_error::InvoiceError;
@@ -105,7 +105,7 @@ use core::time::Duration;
 use core::ops::Deref;
 
 // Re-export this for use in the public API.
-pub use crate::ln::outbound_payment::{PaymentSendFailure, ProbeSendFailure, Retry, RetryableSendFailure, RecipientOnionFields};
+pub use crate::ln::outbound_payment::{Bolt12PaymentError, PaymentSendFailure, ProbeSendFailure, Retry, RetryableSendFailure, RecipientOnionFields};
 use crate::ln::script::ShutdownScript;
 
 // We hold various information about HTLC relay in the HTLC objects in Channel itself:
@@ -636,7 +636,7 @@ impl MsgHandleErrInternal {
                                        err: msg,
                                        action: msgs::ErrorAction::IgnoreError,
                                },
-                               ChannelError::Close(msg) => LightningError {
+                               ChannelError::Close((msg, _reason)) => LightningError {
                                        err: msg.clone(),
                                        action: msgs::ErrorAction::SendErrorMessage {
                                                msg: msgs::ErrorMessage {
@@ -962,6 +962,11 @@ pub(super) struct InboundChannelRequest {
 /// accepted. An unaccepted channel that exceeds this limit will be abandoned.
 const UNACCEPTED_INBOUND_CHANNEL_AGE_LIMIT_TICKS: i32 = 2;
 
+/// The number of blocks of historical feerate estimates we keep around and consider when deciding
+/// to force-close a channel for having too-low fees. Also the number of blocks we have to see
+/// after startup before we consider force-closing channels for having too-low fees.
+pub(super) const FEERATE_TRACKING_BLOCKS: usize = 144;
+
 /// Stores a PaymentSecret and any other data we may need to validate an inbound payment is
 /// actually ours and not some duplicate HTLC sent to us by a node along the route.
 ///
@@ -2098,6 +2103,21 @@ where
        /// Tracks the message events that are to be broadcasted when we are connected to some peer.
        pending_broadcast_messages: Mutex<Vec<MessageSendEvent>>,
 
+       /// We only want to force-close our channels on peers based on stale feerates when we're
+       /// confident the feerate on the channel is *really* stale, not just became stale recently.
+       /// Thus, we store the fee estimates we had as of the last [`FEERATE_TRACKING_BLOCKS`] blocks
+       /// (after startup completed) here, and only force-close when channels have a lower feerate
+       /// than we predicted any time in the last [`FEERATE_TRACKING_BLOCKS`] blocks.
+       ///
+       /// We only keep this in memory as we assume any feerates we receive immediately after startup
+       /// may be bunk (as they often are if Bitcoin Core crashes) and want to delay taking any
+       /// actions for a day anyway.
+       ///
+       /// The first element in the pair is the
+       /// [`ConfirmationTarget::MinAllowedAnchorChannelRemoteFee`] estimate, the second the
+       /// [`ConfirmationTarget::MinAllowedNonAnchorChannelRemoteFee`] estimate.
+       last_days_feerates: Mutex<VecDeque<(u32, u32)>>,
+
        entropy_source: ES,
        node_signer: NS,
        signer_provider: SP,
@@ -2446,11 +2466,10 @@ macro_rules! convert_chan_phase_err {
                        ChannelError::Ignore(msg) => {
                                (false, MsgHandleErrInternal::from_chan_no_close(ChannelError::Ignore(msg), *$channel_id))
                        },
-                       ChannelError::Close(msg) => {
+                       ChannelError::Close((msg, reason)) => {
                                let logger = WithChannelContext::from(&$self.logger, &$channel.context, None);
                                log_error!(logger, "Closing channel {} due to close-required error: {}", $channel_id, msg);
                                update_maps_on_chan_removal!($self, $channel.context);
-                               let reason = ClosureReason::ProcessingError { err: msg.clone() };
                                let shutdown_res = $channel.context.force_shutdown(true, reason);
                                let err =
                                        MsgHandleErrInternal::from_finish_shutdown(msg, *$channel_id, shutdown_res, $channel_update);
@@ -2876,6 +2895,8 @@ where
                        pending_offers_messages: Mutex::new(Vec::new()),
                        pending_broadcast_messages: Mutex::new(Vec::new()),
 
+                       last_days_feerates: Mutex::new(VecDeque::new()),
+
                        entropy_source,
                        node_signer,
                        signer_provider,
@@ -3167,7 +3188,7 @@ where
                                                }
                                        } else {
                                                let mut chan_phase = remove_channel_phase!(self, chan_phase_entry);
-                                               shutdown_result = Some(chan_phase.context_mut().force_shutdown(false, ClosureReason::HolderForceClosed));
+                                               shutdown_result = Some(chan_phase.context_mut().force_shutdown(false, ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(false) }));
                                        }
                                },
                                hash_map::Entry::Vacant(_) => {
@@ -3336,7 +3357,7 @@ where
                        let closure_reason = if let Some(peer_msg) = peer_msg {
                                ClosureReason::CounterpartyForceClosed { peer_msg: UntrustedString(peer_msg.to_string()) }
                        } else {
-                               ClosureReason::HolderForceClosed
+                               ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(broadcast) }
                        };
                        let logger = WithContext::from(&self.logger, Some(*peer_node_id), Some(*channel_id), None);
                        if let hash_map::Entry::Occupied(chan_phase_entry) = peer_state.channel_by_id.entry(channel_id.clone()) {
@@ -3975,14 +3996,43 @@ where
                self.pending_outbound_payments.test_set_payment_metadata(payment_id, new_payment_metadata);
        }
 
-       pub(super) fn send_payment_for_bolt12_invoice(&self, invoice: &Bolt12Invoice, payment_id: PaymentId) -> Result<(), Bolt12PaymentError> {
+       /// Pays the [`Bolt12Invoice`] associated with the `payment_id` encoded in its `payer_metadata`.
+       ///
+       /// The invoice's `payer_metadata` is used to authenticate that the invoice was indeed requested
+       /// before attempting a payment. [`Bolt12PaymentError::UnexpectedInvoice`] is returned if this
+       /// fails or if the encoded `payment_id` is not recognized. The latter may happen once the
+       /// payment is no longer tracked because the payment was attempted after:
+       /// - an invoice for the `payment_id` was already paid,
+       /// - one full [timer tick] has elapsed since initially requesting the invoice when paying an
+       ///   offer, or
+       /// - the refund corresponding to the invoice has already expired.
+       ///
+       /// To retry the payment, request another invoice using a new `payment_id`.
+       ///
+       /// Attempting to pay the same invoice twice while the first payment is still pending will
+       /// result in a [`Bolt12PaymentError::DuplicateInvoice`].
+       ///
+       /// Otherwise, either [`Event::PaymentSent`] or [`Event::PaymentFailed`] are used to indicate
+       /// whether or not the payment was successful.
+       ///
+       /// [timer tick]: Self::timer_tick_occurred
+       pub fn send_payment_for_bolt12_invoice(&self, invoice: &Bolt12Invoice) -> Result<(), Bolt12PaymentError> {
+               let secp_ctx = &self.secp_ctx;
+               let expanded_key = &self.inbound_payment_key;
+               match invoice.verify(expanded_key, secp_ctx) {
+                       Ok(payment_id) => self.send_payment_for_verified_bolt12_invoice(invoice, payment_id),
+                       Err(()) => Err(Bolt12PaymentError::UnexpectedInvoice),
+               }
+       }
+
+       fn send_payment_for_verified_bolt12_invoice(&self, invoice: &Bolt12Invoice, payment_id: PaymentId) -> Result<(), Bolt12PaymentError> {
                let best_block_height = self.best_block.read().unwrap().height;
                let _persistence_guard = PersistenceNotifierGuard::notify_on_drop(self);
                self.pending_outbound_payments
                        .send_payment_for_bolt12_invoice(
                                invoice, payment_id, &self.router, self.list_usable_channels(),
-                               || self.compute_inflight_htlcs(), &self.entropy_source, &self.node_signer,
-                               best_block_height, &self.logger, &self.pending_events,
+                               || self.compute_inflight_htlcs(), &self.entropy_source, &self.node_signer, &self,
+                               &self.secp_ctx, best_block_height, &self.logger, &self.pending_events,
                                |args| self.send_payment_along_path(args)
                        )
        }
@@ -4201,10 +4251,9 @@ where
                        Some(ChannelPhase::UnfundedOutboundV1(mut chan)) => {
                                macro_rules! close_chan { ($err: expr, $api_err: expr, $chan: expr) => { {
                                        let counterparty;
-                                       let err = if let ChannelError::Close(msg) = $err {
+                                       let err = if let ChannelError::Close((msg, reason)) = $err {
                                                let channel_id = $chan.context.channel_id();
                                                counterparty = chan.context.get_counterparty_node_id();
-                                               let reason = ClosureReason::ProcessingError { err: msg.clone() };
                                                let shutdown_res = $chan.context.force_shutdown(false, reason);
                                                MsgHandleErrInternal::from_finish_shutdown(msg, channel_id, shutdown_res, None)
                                        } else { unreachable!(); };
@@ -4217,7 +4266,7 @@ where
                                match find_funding_output(&chan, &funding_transaction) {
                                        Ok(found_funding_txo) => funding_txo = found_funding_txo,
                                        Err(err) => {
-                                               let chan_err = ChannelError::Close(err.to_owned());
+                                               let chan_err = ChannelError::close(err.to_owned());
                                                let api_err = APIError::APIMisuseError { err: err.to_owned() };
                                                return close_chan!(chan_err, api_err, chan);
                                        },
@@ -4846,8 +4895,8 @@ where
                                if short_chan_id != 0 {
                                        let mut forwarding_counterparty = None;
                                        macro_rules! forwarding_channel_not_found {
-                                               () => {
-                                                       for forward_info in pending_forwards.drain(..) {
+                                               ($forward_infos: expr) => {
+                                                       for forward_info in $forward_infos {
                                                                match forward_info {
                                                                        HTLCForwardInfo::AddHTLC(PendingAddHTLCInfo {
                                                                                prev_short_channel_id, prev_htlc_id, prev_channel_id, prev_funding_outpoint,
@@ -4955,7 +5004,7 @@ where
                                        let (counterparty_node_id, forward_chan_id) = match chan_info_opt {
                                                Some((cp_id, chan_id)) => (cp_id, chan_id),
                                                None => {
-                                                       forwarding_channel_not_found!();
+                                                       forwarding_channel_not_found!(pending_forwards.drain(..));
                                                        continue;
                                                }
                                        };
@@ -4963,96 +5012,148 @@ where
                                        let per_peer_state = self.per_peer_state.read().unwrap();
                                        let peer_state_mutex_opt = per_peer_state.get(&counterparty_node_id);
                                        if peer_state_mutex_opt.is_none() {
-                                               forwarding_channel_not_found!();
+                                               forwarding_channel_not_found!(pending_forwards.drain(..));
                                                continue;
                                        }
                                        let mut peer_state_lock = peer_state_mutex_opt.unwrap().lock().unwrap();
                                        let peer_state = &mut *peer_state_lock;
-                                       if let Some(ChannelPhase::Funded(ref mut chan)) = peer_state.channel_by_id.get_mut(&forward_chan_id) {
-                                               let logger = WithChannelContext::from(&self.logger, &chan.context, None);
-                                               for forward_info in pending_forwards.drain(..) {
-                                                       let queue_fail_htlc_res = match forward_info {
-                                                               HTLCForwardInfo::AddHTLC(PendingAddHTLCInfo {
-                                                                       prev_short_channel_id, prev_htlc_id, prev_channel_id, prev_funding_outpoint,
-                                                                       prev_user_channel_id, forward_info: PendingHTLCInfo {
-                                                                               incoming_shared_secret, payment_hash, outgoing_amt_msat, outgoing_cltv_value,
-                                                                               routing: PendingHTLCRouting::Forward {
-                                                                                       onion_packet, blinded, ..
-                                                                               }, skimmed_fee_msat, ..
+                                       let mut draining_pending_forwards = pending_forwards.drain(..);
+                                       while let Some(forward_info) = draining_pending_forwards.next() {
+                                               let queue_fail_htlc_res = match forward_info {
+                                                       HTLCForwardInfo::AddHTLC(PendingAddHTLCInfo {
+                                                               prev_short_channel_id, prev_htlc_id, prev_channel_id, prev_funding_outpoint,
+                                                               prev_user_channel_id, forward_info: PendingHTLCInfo {
+                                                                       incoming_shared_secret, payment_hash, outgoing_amt_msat, outgoing_cltv_value,
+                                                                       routing: PendingHTLCRouting::Forward {
+                                                                               ref onion_packet, blinded, ..
+                                                                       }, skimmed_fee_msat, ..
+                                                               },
+                                                       }) => {
+                                                               let htlc_source = HTLCSource::PreviousHopData(HTLCPreviousHopData {
+                                                                       short_channel_id: prev_short_channel_id,
+                                                                       user_channel_id: Some(prev_user_channel_id),
+                                                                       channel_id: prev_channel_id,
+                                                                       outpoint: prev_funding_outpoint,
+                                                                       htlc_id: prev_htlc_id,
+                                                                       incoming_packet_shared_secret: incoming_shared_secret,
+                                                                       // Phantom payments are only PendingHTLCRouting::Receive.
+                                                                       phantom_shared_secret: None,
+                                                                       blinded_failure: blinded.map(|b| b.failure),
+                                                               });
+                                                               let next_blinding_point = blinded.and_then(|b| {
+                                                                       let encrypted_tlvs_ss = self.node_signer.ecdh(
+                                                                               Recipient::Node, &b.inbound_blinding_point, None
+                                                                       ).unwrap().secret_bytes();
+                                                                       onion_utils::next_hop_pubkey(
+                                                                               &self.secp_ctx, b.inbound_blinding_point, &encrypted_tlvs_ss
+                                                                       ).ok()
+                                                               });
+
+                                                               // Forward the HTLC over the most appropriate channel with the corresponding peer,
+                                                               // applying non-strict forwarding.
+                                                               // The channel with the least amount of outbound liquidity will be used to maximize the
+                                                               // probability of being able to successfully forward a subsequent HTLC.
+                                                               let maybe_optimal_channel = peer_state.channel_by_id.values_mut().filter_map(|phase| match phase {
+                                                                       ChannelPhase::Funded(chan) => {
+                                                                               let balances = chan.context.get_available_balances(&self.fee_estimator);
+                                                                               if outgoing_amt_msat <= balances.next_outbound_htlc_limit_msat &&
+                                                                                       outgoing_amt_msat >= balances.next_outbound_htlc_minimum_msat &&
+                                                                                       chan.context.is_usable() {
+                                                                                       Some((chan, balances))
+                                                                               } else {
+                                                                                       None
+                                                                               }
                                                                        },
-                                                               }) => {
-                                                                       let logger = WithChannelContext::from(&self.logger, &chan.context, Some(payment_hash));
-                                                                       log_trace!(logger, "Adding HTLC from short id {} with payment_hash {} to channel with short id {} after delay", prev_short_channel_id, &payment_hash, short_chan_id);
-                                                                       let htlc_source = HTLCSource::PreviousHopData(HTLCPreviousHopData {
-                                                                               short_channel_id: prev_short_channel_id,
-                                                                               user_channel_id: Some(prev_user_channel_id),
-                                                                               channel_id: prev_channel_id,
-                                                                               outpoint: prev_funding_outpoint,
-                                                                               htlc_id: prev_htlc_id,
-                                                                               incoming_packet_shared_secret: incoming_shared_secret,
-                                                                               // Phantom payments are only PendingHTLCRouting::Receive.
-                                                                               phantom_shared_secret: None,
-                                                                               blinded_failure: blinded.map(|b| b.failure),
-                                                                       });
-                                                                       let next_blinding_point = blinded.and_then(|b| {
-                                                                               let encrypted_tlvs_ss = self.node_signer.ecdh(
-                                                                                       Recipient::Node, &b.inbound_blinding_point, None
-                                                                               ).unwrap().secret_bytes();
-                                                                               onion_utils::next_hop_pubkey(
-                                                                                       &self.secp_ctx, b.inbound_blinding_point, &encrypted_tlvs_ss
-                                                                               ).ok()
-                                                                       });
-                                                                       if let Err(e) = chan.queue_add_htlc(outgoing_amt_msat,
-                                                                               payment_hash, outgoing_cltv_value, htlc_source.clone(),
-                                                                               onion_packet, skimmed_fee_msat, next_blinding_point, &self.fee_estimator,
-                                                                               &&logger)
-                                                                       {
-                                                                               if let ChannelError::Ignore(msg) = e {
-                                                                                       log_trace!(logger, "Failed to forward HTLC with payment_hash {}: {}", &payment_hash, msg);
+                                                                       _ => None,
+                                                               }).min_by_key(|(_, balances)| balances.next_outbound_htlc_limit_msat).map(|(c, _)| c);
+                                                               let optimal_channel = match maybe_optimal_channel {
+                                                                       Some(chan) => chan,
+                                                                       None => {
+                                                                               // Fall back to the specified channel to return an appropriate error.
+                                                                               if let Some(ChannelPhase::Funded(ref mut chan)) = peer_state.channel_by_id.get_mut(&forward_chan_id) {
+                                                                                       chan
                                                                                } else {
-                                                                                       panic!("Stated return value requirements in send_htlc() were not met");
+                                                                                       forwarding_channel_not_found!(core::iter::once(forward_info).chain(draining_pending_forwards));
+                                                                                       break;
                                                                                }
+                                                                       }
+                                                               };
+
+                                                               let logger = WithChannelContext::from(&self.logger, &optimal_channel.context, Some(payment_hash));
+                                                               let channel_description = if optimal_channel.context.get_short_channel_id() == Some(short_chan_id) {
+                                                                       "specified"
+                                                               } else {
+                                                                       "alternate"
+                                                               };
+                                                               log_trace!(logger, "Forwarding HTLC from SCID {} with payment_hash {} and next hop SCID {} over {} channel {} with corresponding peer {}",
+                                                                       prev_short_channel_id, &payment_hash, short_chan_id, channel_description, optimal_channel.context.channel_id(), &counterparty_node_id);
+                                                               if let Err(e) = optimal_channel.queue_add_htlc(outgoing_amt_msat,
+                                                                               payment_hash, outgoing_cltv_value, htlc_source.clone(),
+                                                                               onion_packet.clone(), skimmed_fee_msat, next_blinding_point, &self.fee_estimator,
+                                                                               &&logger)
+                                                               {
+                                                                       if let ChannelError::Ignore(msg) = e {
+                                                                               log_trace!(logger, "Failed to forward HTLC with payment_hash {} to peer {}: {}", &payment_hash, &counterparty_node_id, msg);
+                                                                       } else {
+                                                                               panic!("Stated return value requirements in send_htlc() were not met");
+                                                                       }
+
+                                                                       if let Some(ChannelPhase::Funded(ref mut chan)) = peer_state.channel_by_id.get_mut(&forward_chan_id) {
                                                                                let (failure_code, data) = self.get_htlc_temp_fail_err_and_data(0x1000|7, short_chan_id, chan);
                                                                                failed_forwards.push((htlc_source, payment_hash,
                                                                                        HTLCFailReason::reason(failure_code, data),
                                                                                        HTLCDestination::NextHopChannel { node_id: Some(chan.context.get_counterparty_node_id()), channel_id: forward_chan_id }
                                                                                ));
-                                                                               continue;
+                                                                       } else {
+                                                                               forwarding_channel_not_found!(core::iter::once(forward_info).chain(draining_pending_forwards));
+                                                                               break;
                                                                        }
-                                                                       None
-                                                               },
-                                                               HTLCForwardInfo::AddHTLC { .. } => {
-                                                                       panic!("short_channel_id != 0 should imply any pending_forward entries are of type Forward");
-                                                               },
-                                                               HTLCForwardInfo::FailHTLC { htlc_id, err_packet } => {
+                                                               }
+                                                               None
+                                                       },
+                                                       HTLCForwardInfo::AddHTLC { .. } => {
+                                                               panic!("short_channel_id != 0 should imply any pending_forward entries are of type Forward");
+                                                       },
+                                                       HTLCForwardInfo::FailHTLC { htlc_id, ref err_packet } => {
+                                                               if let Some(ChannelPhase::Funded(ref mut chan)) = peer_state.channel_by_id.get_mut(&forward_chan_id) {
+                                                                       let logger = WithChannelContext::from(&self.logger, &chan.context, None);
                                                                        log_trace!(logger, "Failing HTLC back to channel with short id {} (backward HTLC ID {}) after delay", short_chan_id, htlc_id);
-                                                                       Some((chan.queue_fail_htlc(htlc_id, err_packet, &&logger), htlc_id))
-                                                               },
-                                                               HTLCForwardInfo::FailMalformedHTLC { htlc_id, failure_code, sha256_of_onion } => {
+                                                                       Some((chan.queue_fail_htlc(htlc_id, err_packet.clone(), &&logger), htlc_id))
+                                                               } else {
+                                                                       forwarding_channel_not_found!(core::iter::once(forward_info).chain(draining_pending_forwards));
+                                                                       break;
+                                                               }
+                                                       },
+                                                       HTLCForwardInfo::FailMalformedHTLC { htlc_id, failure_code, sha256_of_onion } => {
+                                                               if let Some(ChannelPhase::Funded(ref mut chan)) = peer_state.channel_by_id.get_mut(&forward_chan_id) {
+                                                                       let logger = WithChannelContext::from(&self.logger, &chan.context, None);
                                                                        log_trace!(logger, "Failing malformed HTLC back to channel with short id {} (backward HTLC ID {}) after delay", short_chan_id, htlc_id);
                                                                        let res = chan.queue_fail_malformed_htlc(
                                                                                htlc_id, failure_code, sha256_of_onion, &&logger
                                                                        );
                                                                        Some((res, htlc_id))
-                                                               },
-                                                       };
-                                                       if let Some((queue_fail_htlc_res, htlc_id)) = queue_fail_htlc_res {
-                                                               if let Err(e) = queue_fail_htlc_res {
-                                                                       if let ChannelError::Ignore(msg) = e {
+                                                               } else {
+                                                                       forwarding_channel_not_found!(core::iter::once(forward_info).chain(draining_pending_forwards));
+                                                                       break;
+                                                               }
+                                                       },
+                                               };
+                                               if let Some((queue_fail_htlc_res, htlc_id)) = queue_fail_htlc_res {
+                                                       if let Err(e) = queue_fail_htlc_res {
+                                                               if let ChannelError::Ignore(msg) = e {
+                                                                       if let Some(ChannelPhase::Funded(ref mut chan)) = peer_state.channel_by_id.get_mut(&forward_chan_id) {
+                                                                               let logger = WithChannelContext::from(&self.logger, &chan.context, None);
                                                                                log_trace!(logger, "Failed to fail HTLC with ID {} backwards to short_id {}: {}", htlc_id, short_chan_id, msg);
-                                                                       } else {
-                                                                               panic!("Stated return value requirements in queue_fail_{{malformed_}}htlc() were not met");
                                                                        }
-                                                                       // fail-backs are best-effort, we probably already have one
-                                                                       // pending, and if not that's OK, if not, the channel is on
-                                                                       // the chain and sending the HTLC-Timeout is their problem.
-                                                                       continue;
+                                                               } else {
+                                                                       panic!("Stated return value requirements in queue_fail_{{malformed_}}htlc() were not met");
                                                                }
+                                                               // fail-backs are best-effort, we probably already have one
+                                                               // pending, and if not that's OK, if not, the channel is on
+                                                               // the chain and sending the HTLC-Timeout is their problem.
+                                                               continue;
                                                        }
                                                }
-                                       } else {
-                                               forwarding_channel_not_found!();
-                                               continue;
                                        }
                                } else {
                                        'next_forwardable_htlc: for forward_info in pending_forwards.drain(..) {
@@ -5511,7 +5612,7 @@ where
                                        log_error!(logger,
                                                "Force-closing pending channel with ID {} for not establishing in a timely manner", chan_id);
                                        update_maps_on_chan_removal!(self, &context);
-                                       shutdown_channels.push(context.force_shutdown(false, ClosureReason::HolderForceClosed));
+                                       shutdown_channels.push(context.force_shutdown(false, ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(false) }));
                                        pending_msg_events.push(MessageSendEvent::HandleError {
                                                node_id: counterparty_node_id,
                                                action: msgs::ErrorAction::SendErrorMessage {
@@ -6110,21 +6211,13 @@ where
                }
                if valid_mpp {
                        for htlc in sources.drain(..) {
-                               let prev_hop_chan_id = htlc.prev_hop.channel_id;
-                               if let Err((pk, err)) = self.claim_funds_from_hop(
+                               self.claim_funds_from_hop(
                                        htlc.prev_hop, payment_preimage,
                                        |_, definitely_duplicate| {
                                                debug_assert!(!definitely_duplicate, "We shouldn't claim duplicatively from a payment");
                                                Some(MonitorUpdateCompletionAction::PaymentClaimed { payment_hash })
                                        }
-                               ) {
-                                       if let msgs::ErrorAction::IgnoreError = err.err.action {
-                                               // We got a temporary failure updating monitor, but will claim the
-                                               // HTLC when the monitor updating is restored (or on chain).
-                                               let logger = WithContext::from(&self.logger, None, Some(prev_hop_chan_id), Some(payment_hash));
-                                               log_error!(logger, "Temporary failure claiming HTLC, treating as success: {}", err.err.err);
-                                       } else { errs.push((pk, err)); }
-                               }
+                               );
                        }
                }
                if !valid_mpp {
@@ -6146,9 +6239,10 @@ where
                }
        }
 
-       fn claim_funds_from_hop<ComplFunc: FnOnce(Option<u64>, bool) -> Option<MonitorUpdateCompletionAction>>(&self,
-               prev_hop: HTLCPreviousHopData, payment_preimage: PaymentPreimage, completion_action: ComplFunc)
-       -> Result<(), (PublicKey, MsgHandleErrInternal)> {
+       fn claim_funds_from_hop<ComplFunc: FnOnce(Option<u64>, bool) -> Option<MonitorUpdateCompletionAction>>(
+               &self, prev_hop: HTLCPreviousHopData, payment_preimage: PaymentPreimage,
+               completion_action: ComplFunc,
+       ) {
                //TODO: Delay the claimed_funds relaying just like we do outbound relay!
 
                // If we haven't yet run background events assume we're still deserializing and shouldn't
@@ -6210,7 +6304,7 @@ where
                                                                let action = if let Some(action) = completion_action(None, true) {
                                                                        action
                                                                } else {
-                                                                       return Ok(());
+                                                                       return;
                                                                };
                                                                mem::drop(peer_state_lock);
 
@@ -6226,7 +6320,7 @@ where
                                                                } else {
                                                                        debug_assert!(false,
                                                                                "Duplicate claims should always free another channel immediately");
-                                                                       return Ok(());
+                                                                       return;
                                                                };
                                                                if let Some(peer_state_mtx) = per_peer_state.get(&node_id) {
                                                                        let mut peer_state = peer_state_mtx.lock().unwrap();
@@ -6251,7 +6345,7 @@ where
                                                        }
                                                }
                                        }
-                                       return Ok(());
+                                       return;
                                }
                        }
                }
@@ -6299,7 +6393,6 @@ where
                // generally always allowed to be duplicative (and it's specifically noted in
                // `PaymentForwarded`).
                self.handle_monitor_update_completion_actions(completion_action(None, false));
-               Ok(())
        }
 
        fn finalize_claims(&self, sources: Vec<HTLCSource>) {
@@ -6332,7 +6425,7 @@ where
                                let completed_blocker = RAAMonitorUpdateBlockingAction::from_prev_hop_data(&hop_data);
                                #[cfg(debug_assertions)]
                                let claiming_chan_funding_outpoint = hop_data.outpoint;
-                               let res = self.claim_funds_from_hop(hop_data, payment_preimage,
+                               self.claim_funds_from_hop(hop_data, payment_preimage,
                                        |htlc_claim_value_msat, definitely_duplicate| {
                                                let chan_to_release =
                                                        if let Some(node_id) = next_channel_counterparty_node_id {
@@ -6426,10 +6519,6 @@ where
                                                        })
                                                }
                                        });
-                               if let Err((pk, err)) = res {
-                                       let result: Result<(), _> = Err(err);
-                                       let _ = handle_error!(self, result, pk);
-                               }
                        },
                }
        }
@@ -6613,7 +6702,7 @@ where
                log_trace!(logger, "ChannelMonitor updated to {}. Current highest is {}. {} pending in-flight updates.",
                        highest_applied_update_id, channel.context.get_latest_monitor_update_id(),
                        remaining_in_flight);
-               if !channel.is_awaiting_monitor_update() || channel.context.get_latest_monitor_update_id() != highest_applied_update_id {
+               if !channel.is_awaiting_monitor_update() || remaining_in_flight != 0 {
                        return;
                }
                handle_monitor_update_completion!(self, peer_state_lock, peer_state, per_peer_state, channel);
@@ -7016,7 +7105,7 @@ where
                                },
                                Some(mut phase) => {
                                        let err_msg = format!("Got an unexpected funding_created message from peer with counterparty_node_id {}", counterparty_node_id);
-                                       let err = ChannelError::Close(err_msg);
+                                       let err = ChannelError::close(err_msg);
                                        return Err(convert_chan_phase_err!(self, err, &mut phase, &msg.temporary_channel_id).1);
                                },
                                None => return Err(MsgHandleErrInternal::send_err_msg_no_close(format!("Got a message for a channel from the wrong node! No such channel for the passed counterparty_node_id {}", counterparty_node_id), msg.temporary_channel_id))
@@ -7031,7 +7120,7 @@ where
                        // `update_maps_on_chan_removal`), we'll remove the existing channel
                        // from `outpoint_to_peer`. Thus, we must first unset the funding outpoint
                        // on the channel.
-                       let err = ChannelError::Close($err.to_owned());
+                       let err = ChannelError::close($err.to_owned());
                        chan.unset_funding_info(msg.temporary_channel_id);
                        return Err(convert_chan_phase_err!(self, err, chan, &funded_channel_id, UNFUNDED_CHANNEL).1);
                } } }
@@ -7116,7 +7205,7 @@ where
                                                                } else { unreachable!(); }
                                                                Ok(())
                                                        } else {
-                                                               let e = ChannelError::Close("Channel funding outpoint was a duplicate".to_owned());
+                                                               let e = ChannelError::close("Channel funding outpoint was a duplicate".to_owned());
                                                                // We weren't able to watch the channel to begin with, so no
                                                                // updates should be made on it. Previously, full_stack_target
                                                                // found an (unreachable) panic when the monitor update contained
@@ -7187,7 +7276,7 @@ where
 
                                        Ok(())
                                } else {
-                                       try_chan_phase_entry!(self, Err(ChannelError::Close(
+                                       try_chan_phase_entry!(self, Err(ChannelError::close(
                                                "Got a channel_ready message for an unfunded channel!".into())), chan_phase_entry)
                                }
                        },
@@ -7302,7 +7391,7 @@ where
                                                        (tx, Some(remove_channel_phase!(self, chan_phase_entry)), shutdown_result)
                                                } else { (tx, None, shutdown_result) }
                                        } else {
-                                               return try_chan_phase_entry!(self, Err(ChannelError::Close(
+                                               return try_chan_phase_entry!(self, Err(ChannelError::close(
                                                        "Got a closing_signed message for an unfunded channel!".into())), chan_phase_entry);
                                        }
                                },
@@ -7402,7 +7491,7 @@ where
                                        }
                                        try_chan_phase_entry!(self, chan.update_add_htlc(&msg, pending_forward_info, &self.fee_estimator), chan_phase_entry);
                                } else {
-                                       return try_chan_phase_entry!(self, Err(ChannelError::Close(
+                                       return try_chan_phase_entry!(self, Err(ChannelError::close(
                                                "Got an update_add_htlc message for an unfunded channel!".into())), chan_phase_entry);
                                }
                        },
@@ -7446,7 +7535,7 @@ where
                                                next_user_channel_id = chan.context.get_user_id();
                                                res
                                        } else {
-                                               return try_chan_phase_entry!(self, Err(ChannelError::Close(
+                                               return try_chan_phase_entry!(self, Err(ChannelError::close(
                                                        "Got an update_fulfill_htlc message for an unfunded channel!".into())), chan_phase_entry);
                                        }
                                },
@@ -7477,7 +7566,7 @@ where
                                if let ChannelPhase::Funded(chan) = chan_phase_entry.get_mut() {
                                        try_chan_phase_entry!(self, chan.update_fail_htlc(&msg, HTLCFailReason::from_msg(msg)), chan_phase_entry);
                                } else {
-                                       return try_chan_phase_entry!(self, Err(ChannelError::Close(
+                                       return try_chan_phase_entry!(self, Err(ChannelError::close(
                                                "Got an update_fail_htlc message for an unfunded channel!".into())), chan_phase_entry);
                                }
                        },
@@ -7500,13 +7589,13 @@ where
                match peer_state.channel_by_id.entry(msg.channel_id) {
                        hash_map::Entry::Occupied(mut chan_phase_entry) => {
                                if (msg.failure_code & 0x8000) == 0 {
-                                       let chan_err: ChannelError = ChannelError::Close("Got update_fail_malformed_htlc with BADONION not set".to_owned());
+                                       let chan_err = ChannelError::close("Got update_fail_malformed_htlc with BADONION not set".to_owned());
                                        try_chan_phase_entry!(self, Err(chan_err), chan_phase_entry);
                                }
                                if let ChannelPhase::Funded(chan) = chan_phase_entry.get_mut() {
                                        try_chan_phase_entry!(self, chan.update_fail_malformed_htlc(&msg, HTLCFailReason::reason(msg.failure_code, msg.sha256_of_onion.to_vec())), chan_phase_entry);
                                } else {
-                                       return try_chan_phase_entry!(self, Err(ChannelError::Close(
+                                       return try_chan_phase_entry!(self, Err(ChannelError::close(
                                                "Got an update_fail_malformed_htlc message for an unfunded channel!".into())), chan_phase_entry);
                                }
                                Ok(())
@@ -7536,7 +7625,7 @@ where
                                        }
                                        Ok(())
                                } else {
-                                       return try_chan_phase_entry!(self, Err(ChannelError::Close(
+                                       return try_chan_phase_entry!(self, Err(ChannelError::close(
                                                "Got a commitment_signed message for an unfunded channel!".into())), chan_phase_entry);
                                }
                        },
@@ -7732,7 +7821,7 @@ where
                                                }
                                                htlcs_to_fail
                                        } else {
-                                               return try_chan_phase_entry!(self, Err(ChannelError::Close(
+                                               return try_chan_phase_entry!(self, Err(ChannelError::close(
                                                        "Got a revoke_and_ack message for an unfunded channel!".into())), chan_phase_entry);
                                        }
                                },
@@ -7758,7 +7847,7 @@ where
                                        let logger = WithChannelContext::from(&self.logger, &chan.context, None);
                                        try_chan_phase_entry!(self, chan.update_fee(&self.fee_estimator, &msg, &&logger), chan_phase_entry);
                                } else {
-                                       return try_chan_phase_entry!(self, Err(ChannelError::Close(
+                                       return try_chan_phase_entry!(self, Err(ChannelError::close(
                                                "Got an update_fee message for an unfunded channel!".into())), chan_phase_entry);
                                }
                        },
@@ -7793,7 +7882,7 @@ where
                                                update_msg: Some(self.get_channel_update_for_broadcast(chan).unwrap()),
                                        });
                                } else {
-                                       return try_chan_phase_entry!(self, Err(ChannelError::Close(
+                                       return try_chan_phase_entry!(self, Err(ChannelError::close(
                                                "Got an announcement_signatures message for an unfunded channel!".into())), chan_phase_entry);
                                }
                        },
@@ -7845,7 +7934,7 @@ where
                                                }
                                        }
                                } else {
-                                       return try_chan_phase_entry!(self, Err(ChannelError::Close(
+                                       return try_chan_phase_entry!(self, Err(ChannelError::close(
                                                "Got a channel_update for an unfunded channel!".into())), chan_phase_entry);
                                }
                        },
@@ -7907,7 +7996,7 @@ where
                                                }
                                                need_lnd_workaround
                                        } else {
-                                               return try_chan_phase_entry!(self, Err(ChannelError::Close(
+                                               return try_chan_phase_entry!(self, Err(ChannelError::close(
                                                        "Got a channel_reestablish message for an unfunded channel!".into())), chan_phase_entry);
                                        }
                                },
@@ -7998,7 +8087,7 @@ where
                                                                                let reason = if let MonitorEvent::HolderForceClosedWithInfo { reason, .. } = monitor_event {
                                                                                        reason
                                                                                } else {
-                                                                                       ClosureReason::HolderForceClosed
+                                                                                       ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(true) }
                                                                                };
                                                                                failed_channels.push(chan.context.force_shutdown(false, reason.clone()));
                                                                                if let Ok(update) = self.get_channel_update_for_broadcast(&chan) {
@@ -8261,10 +8350,8 @@ macro_rules! create_offer_builder { ($self: ident, $builder: ty) => {
        ///
        /// Uses [`MessageRouter`] to construct a [`BlindedPath`] for the offer based on the given
        /// `absolute_expiry` according to [`MAX_SHORT_LIVED_RELATIVE_EXPIRY`]. See those docs for
-       /// privacy implications. However, if one is not found, uses a one-hop [`BlindedPath`] with
-       /// [`ChannelManager::get_our_node_id`] as the introduction node instead. In the latter case,
-       /// the node must be announced, otherwise, there is no way to find a path to the introduction
-       /// node in order to send the [`InvoiceRequest`].
+       /// privacy implications as well as those of the parameterized [`Router`], which implements
+       /// [`MessageRouter`].
        ///
        /// Also, uses a derived signing pubkey in the offer for recipient privacy.
        ///
@@ -8329,10 +8416,8 @@ macro_rules! create_refund_builder { ($self: ident, $builder: ty) => {
        ///
        /// Uses [`MessageRouter`] to construct a [`BlindedPath`] for the refund based on the given
        /// `absolute_expiry` according to [`MAX_SHORT_LIVED_RELATIVE_EXPIRY`]. See those docs for
-       /// privacy implications. However, if one is not found, uses a one-hop [`BlindedPath`] with
-       /// [`ChannelManager::get_our_node_id`] as the introduction node instead. In the latter case,
-       /// the node must be announced, otherwise, there is no way to find a path to the introduction
-       /// node in order to send the [`Bolt12Invoice`].
+       /// privacy implications as well as those of the parameterized [`Router`], which implements
+       /// [`MessageRouter`].
        ///
        /// Also, uses a derived payer id in the refund for payer privacy.
        ///
@@ -8431,10 +8516,9 @@ where
        ///
        /// # Privacy
        ///
-       /// Uses a one-hop [`BlindedPath`] for the reply path with [`ChannelManager::get_our_node_id`]
-       /// as the introduction node and a derived payer id for payer privacy. As such, currently, the
-       /// node must be announced. Otherwise, there is no way to find a path to the introduction node
-       /// in order to send the [`Bolt12Invoice`].
+       /// For payer privacy, uses a derived payer id and uses [`MessageRouter::create_blinded_paths`]
+       /// to construct a [`BlindedPath`] for the reply path. For further privacy implications, see the
+       /// docs of the parameterized [`Router`], which implements [`MessageRouter`].
        ///
        /// # Limitations
        ///
@@ -8752,7 +8836,9 @@ where
 
                let peers = self.per_peer_state.read().unwrap()
                        .iter()
-                       .filter(|(_, peer)| peer.lock().unwrap().latest_features.supports_onion_messages())
+                       .map(|(node_id, peer_state)| (node_id, peer_state.lock().unwrap()))
+                       .filter(|(_, peer)| peer.is_connected)
+                       .filter(|(_, peer)| peer.latest_features.supports_onion_messages())
                        .map(|(node_id, _)| *node_id)
                        .collect::<Vec<_>>();
 
@@ -8771,6 +8857,7 @@ where
                let peers = self.per_peer_state.read().unwrap()
                        .iter()
                        .map(|(node_id, peer_state)| (node_id, peer_state.lock().unwrap()))
+                       .filter(|(_, peer)| peer.is_connected)
                        .filter(|(_, peer)| peer.latest_features.supports_onion_messages())
                        .map(|(node_id, peer)| ForwardNode {
                                node_id: *node_id,
@@ -9177,7 +9264,38 @@ where
                                self, || -> NotifyOption { NotifyOption::DoPersist });
                *self.best_block.write().unwrap() = BestBlock::new(block_hash, height);
 
-               self.do_chain_event(Some(height), |channel| channel.best_block_updated(height, header.time, self.chain_hash, &self.node_signer, &self.default_configuration, &&WithChannelContext::from(&self.logger, &channel.context, None)));
+               let mut min_anchor_feerate = None;
+               let mut min_non_anchor_feerate = None;
+               if self.background_events_processed_since_startup.load(Ordering::Relaxed) {
+                       // If we're past the startup phase, update our feerate cache
+                       let mut last_days_feerates = self.last_days_feerates.lock().unwrap();
+                       if last_days_feerates.len() >= FEERATE_TRACKING_BLOCKS {
+                               last_days_feerates.pop_front();
+                       }
+                       let anchor_feerate = self.fee_estimator
+                               .bounded_sat_per_1000_weight(ConfirmationTarget::MinAllowedAnchorChannelRemoteFee);
+                       let non_anchor_feerate = self.fee_estimator
+                               .bounded_sat_per_1000_weight(ConfirmationTarget::MinAllowedNonAnchorChannelRemoteFee);
+                       last_days_feerates.push_back((anchor_feerate, non_anchor_feerate));
+                       if last_days_feerates.len() >= FEERATE_TRACKING_BLOCKS {
+                               min_anchor_feerate = last_days_feerates.iter().map(|(f, _)| f).min().copied();
+                               min_non_anchor_feerate = last_days_feerates.iter().map(|(_, f)| f).min().copied();
+                       }
+               }
+
+               self.do_chain_event(Some(height), |channel| {
+                       let logger = WithChannelContext::from(&self.logger, &channel.context, None);
+                       if channel.context.get_channel_type().supports_anchors_zero_fee_htlc_tx() {
+                               if let Some(feerate) = min_anchor_feerate {
+                                       channel.check_for_stale_feerate(&logger, feerate)?;
+                               }
+                       } else {
+                               if let Some(feerate) = min_non_anchor_feerate {
+                                       channel.check_for_stale_feerate(&logger, feerate)?;
+                               }
+                       }
+                       channel.best_block_updated(height, header.time, self.chain_hash, &self.node_signer, &self.default_configuration, &&WithChannelContext::from(&self.logger, &channel.context, None))
+               });
 
                macro_rules! max_time {
                        ($timestamp: expr) => {
@@ -10223,42 +10341,56 @@ where
                                };
 
                                match response {
-                                       Ok(invoice) => return responder.respond(OffersMessage::Invoice(invoice)),
-                                       Err(error) => return responder.respond(OffersMessage::InvoiceError(error.into())),
+                                       Ok(invoice) => responder.respond(OffersMessage::Invoice(invoice)),
+                                       Err(error) => responder.respond(OffersMessage::InvoiceError(error.into())),
                                }
                        },
                        OffersMessage::Invoice(invoice) => {
-                               let response = invoice
-                                       .verify(expanded_key, secp_ctx)
-                                       .map_err(|()| InvoiceError::from_string("Unrecognized invoice".to_owned()))
-                                       .and_then(|payment_id| {
+                               let result = match invoice.verify(expanded_key, secp_ctx) {
+                                       Ok(payment_id) => {
                                                let features = self.bolt12_invoice_features();
                                                if invoice.invoice_features().requires_unknown_bits_from(&features) {
                                                        Err(InvoiceError::from(Bolt12SemanticError::UnknownRequiredFeatures))
+                                               } else if self.default_configuration.manually_handle_bolt12_invoices {
+                                                       let event = Event::InvoiceReceived { payment_id, invoice, responder };
+                                                       self.pending_events.lock().unwrap().push_back((event, None));
+                                                       return ResponseInstruction::NoResponse;
                                                } else {
-                                                       self.send_payment_for_bolt12_invoice(&invoice, payment_id)
+                                                       self.send_payment_for_verified_bolt12_invoice(&invoice, payment_id)
                                                                .map_err(|e| {
                                                                        log_trace!(self.logger, "Failed paying invoice: {:?}", e);
                                                                        InvoiceError::from_string(format!("{:?}", e))
                                                                })
                                                }
-                                       });
+                                       },
+                                       Err(()) => Err(InvoiceError::from_string("Unrecognized invoice".to_owned())),
+                               };
 
-                               match (responder, response) {
-                                       (Some(responder), Err(e)) => responder.respond(OffersMessage::InvoiceError(e)),
-                                       (None, Err(_)) => {
-                                               log_trace!(
-                                                       self.logger,
-                                                       "A response was generated, but there is no reply_path specified for sending the response."
-                                               );
-                                               return ResponseInstruction::NoResponse;
-                                       }
-                                       _ => return ResponseInstruction::NoResponse,
+                               match result {
+                                       Ok(()) => ResponseInstruction::NoResponse,
+                                       Err(e) => match responder {
+                                               Some(responder) => responder.respond(OffersMessage::InvoiceError(e)),
+                                               None => {
+                                                       log_trace!(self.logger, "No reply path for sending invoice error: {:?}", e);
+                                                       ResponseInstruction::NoResponse
+                                               },
+                                       },
+                               }
+                       },
+                       #[cfg(async_payments)]
+                       OffersMessage::StaticInvoice(_invoice) => {
+                               match responder {
+                                       Some(responder) => {
+                                               responder.respond(OffersMessage::InvoiceError(
+                                                               InvoiceError::from_string("Static invoices not yet supported".to_string())
+                                               ))
+                                       },
+                                       None => return ResponseInstruction::NoResponse,
                                }
                        },
                        OffersMessage::InvoiceError(invoice_error) => {
                                log_trace!(self.logger, "Received invoice_error: {}", invoice_error);
-                               return ResponseInstruction::NoResponse;
+                               ResponseInstruction::NoResponse
                        },
                }
        }
@@ -11996,6 +12128,8 @@ where
                        node_signer: args.node_signer,
                        signer_provider: args.signer_provider,
 
+                       last_days_feerates: Mutex::new(VecDeque::new()),
+
                        logger: args.logger,
                        default_configuration: args.default_config,
                };
@@ -12533,7 +12667,7 @@ mod tests {
 
                nodes[0].node.force_close_channel_with_peer(&chan.2, &nodes[1].node.get_our_node_id(), None, true).unwrap();
                check_added_monitors!(nodes[0], 1);
-               check_closed_event!(nodes[0], 1, ClosureReason::HolderForceClosed, [nodes[1].node.get_our_node_id()], 100000);
+               check_closed_event!(nodes[0], 1, ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(true) }, [nodes[1].node.get_our_node_id()], 100000);
 
                // Confirm that the channel_update was not sent immediately to node[1] but was cached.
                let node_1_events = nodes[1].node.get_and_clear_pending_msg_events();
@@ -12592,7 +12726,7 @@ mod tests {
                nodes[0].node.force_close_broadcasting_latest_txn(&chan.2, &nodes[1].node.get_our_node_id(), error_message.to_string()).unwrap();
                check_closed_broadcast!(nodes[0], true);
                check_added_monitors!(nodes[0], 1);
-               check_closed_event!(nodes[0], 1, ClosureReason::HolderForceClosed, [nodes[1].node.get_our_node_id()], 100000);
+               check_closed_event!(nodes[0], 1, ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(true) }, [nodes[1].node.get_our_node_id()], 100000);
 
                {
                        // Assert that nodes[1] is awaiting removal for nodes[0] once nodes[1] has been
@@ -13329,7 +13463,7 @@ mod tests {
                nodes[0].node.force_close_broadcasting_latest_txn(&chan_id, &nodes[1].node.get_our_node_id(), error_message.to_string()).unwrap();
                check_closed_broadcast(&nodes[0], 1, true);
                check_added_monitors(&nodes[0], 1);
-               check_closed_event!(nodes[0], 1, ClosureReason::HolderForceClosed, [nodes[1].node.get_our_node_id()], 100000);
+               check_closed_event!(nodes[0], 1, ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(true) }, [nodes[1].node.get_our_node_id()], 100000);
                {
                        let txn = nodes[0].tx_broadcaster.txn_broadcast();
                        assert_eq!(txn.len(), 1);