Merge pull request #2101 from TheBlueMatt/2023-03-one-less-sig
[rust-lightning] / lightning / src / ln / channelmanager.rs
index 89cb317dc6c6705effe175a54e98d437727b9c92..b5e5a43fee828515cd6c3f5570113615354758c3 100644 (file)
@@ -35,6 +35,8 @@ use crate::chain::{Confirm, ChannelMonitorUpdateStatus, Watch, BestBlock};
 use crate::chain::chaininterface::{BroadcasterInterface, ConfirmationTarget, FeeEstimator, LowerBoundedFeeEstimator};
 use crate::chain::channelmonitor::{ChannelMonitor, ChannelMonitorUpdate, ChannelMonitorUpdateStep, HTLC_FAIL_BACK_BUFFER, CLTV_CLAIM_BUFFER, LATENCY_GRACE_PERIOD_BLOCKS, ANTI_REORG_DELAY, MonitorEvent, CLOSED_CHANNEL_UPDATE_ID};
 use crate::chain::transaction::{OutPoint, TransactionData};
+use crate::events;
+use crate::events::{Event, EventHandler, EventsProvider, MessageSendEvent, MessageSendEventsProvider, ClosureReason, HTLCDestination};
 // Since this struct is returned in `list_channels` methods, expose it here in case users want to
 // construct one themselves.
 use crate::ln::{inbound_payment, PaymentHash, PaymentPreimage, PaymentSecret};
@@ -55,10 +57,9 @@ use crate::ln::outbound_payment::{OutboundPayments, PaymentAttempts, PendingOutb
 use crate::ln::wire::Encode;
 use crate::chain::keysinterface::{EntropySource, KeysManager, NodeSigner, Recipient, SignerProvider, ChannelSigner, WriteableEcdsaChannelSigner};
 use crate::util::config::{UserConfig, ChannelConfig};
-use crate::util::events::{Event, EventHandler, EventsProvider, MessageSendEvent, MessageSendEventsProvider, ClosureReason, HTLCDestination};
-use crate::util::events;
 use crate::util::wakers::{Future, Notifier};
 use crate::util::scid_utils::fake_scid;
+use crate::util::string::UntrustedString;
 use crate::util::ser::{BigSize, FixedLengthReader, Readable, ReadableArgs, MaybeReadable, Writeable, Writer, VecWriter};
 use crate::util::logger::{Level, Logger};
 use crate::util::errors::APIError;
@@ -119,7 +120,10 @@ pub(super) struct PendingHTLCInfo {
        pub(super) routing: PendingHTLCRouting,
        pub(super) incoming_shared_secret: [u8; 32],
        payment_hash: PaymentHash,
+       /// Amount received
        pub(super) incoming_amt_msat: Option<u64>, // Added in 0.0.113
+       /// Sender intended amount to forward or receive (actual amount received
+       /// may overshoot this in either case)
        pub(super) outgoing_amt_msat: u64,
        pub(super) outgoing_cltv_value: u32,
 }
@@ -191,14 +195,21 @@ struct ClaimableHTLC {
        cltv_expiry: u32,
        /// The amount (in msats) of this MPP part
        value: u64,
+       /// The amount (in msats) that the sender intended to be sent in this MPP
+       /// part (used for validating total MPP amount)
+       sender_intended_value: u64,
        onion_payload: OnionPayload,
        timer_ticks: u8,
-       /// The sum total of all MPP parts
+       /// The total value received for a payment (sum of all MPP parts if the payment is a MPP).
+       /// Gets set to the amount reported when pushing [`Event::PaymentClaimable`].
+       total_value_received: Option<u64>,
+       /// The sender intended sum total of all MPP parts specified in the onion
        total_msat: u64,
 }
 
 /// A payment identifier used to uniquely identify a payment to LDK.
-/// (C-not exported) as we just use [u8; 32] directly
+///
+/// This is not exported to bindings users as we just use [u8; 32] directly
 #[derive(Hash, Copy, Clone, PartialEq, Eq, Debug)]
 pub struct PaymentId(pub [u8; 32]);
 
@@ -216,7 +227,8 @@ impl Readable for PaymentId {
 }
 
 /// An identifier used to uniquely identify an intercepted HTLC to LDK.
-/// (C-not exported) as we just use [u8; 32] directly
+///
+/// This is not exported to bindings users as we just use [u8; 32] directly
 #[derive(Hash, Copy, Clone, PartialEq, Eq, Debug)]
 pub struct InterceptId(pub [u8; 32]);
 
@@ -296,9 +308,9 @@ impl core::hash::Hash for HTLCSource {
                }
        }
 }
-#[cfg(not(feature = "grind_signatures"))]
-#[cfg(test)]
 impl HTLCSource {
+       #[cfg(not(feature = "grind_signatures"))]
+       #[cfg(test)]
        pub fn dummy() -> Self {
                HTLCSource::OutboundRoute {
                        path: Vec::new(),
@@ -308,6 +320,18 @@ impl HTLCSource {
                        payment_secret: None,
                }
        }
+
+       #[cfg(debug_assertions)]
+       /// Checks whether this HTLCSource could possibly match the given HTLC output in a commitment
+       /// transaction. Useful to ensure different datastructures match up.
+       pub(crate) fn possibly_matches_output(&self, htlc: &super::chan_utils::HTLCOutputInCommitment) -> bool {
+               if let HTLCSource::OutboundRoute { first_hop_htlc_msat, .. } = self {
+                       *first_hop_htlc_msat == htlc.amount_msat
+               } else {
+                       // There's nothing we can check for forwarded HTLCs
+                       true
+               }
+       }
 }
 
 struct ReceiveError {
@@ -568,7 +592,7 @@ struct PendingInboundPayment {
 /// or, respectively, [`Router`] for its router, but this type alias chooses the concrete types
 /// of [`KeysManager`] and [`DefaultRouter`].
 ///
-/// (C-not exported) as Arcs don't make sense in bindings
+/// This is not exported to bindings users as Arcs don't make sense in bindings
 pub type SimpleArcChannelManager<M, T, F, L> = ChannelManager<
        Arc<M>,
        Arc<T>,
@@ -594,7 +618,7 @@ pub type SimpleArcChannelManager<M, T, F, L> = ChannelManager<
 /// or, respectively, [`Router`]  for its router, but this type alias chooses the concrete types
 /// of [`KeysManager`] and [`DefaultRouter`].
 ///
-/// (C-not exported) as Arcs don't make sense in bindings
+/// This is not exported to bindings users as Arcs don't make sense in bindings
 pub type SimpleRefChannelManager<'a, 'b, 'c, 'd, 'e, 'f, 'g, 'h, M, T, F, L> = ChannelManager<&'a M, &'b T, &'c KeysManager, &'c KeysManager, &'c KeysManager, &'d F, &'e DefaultRouter<&'f NetworkGraph<&'g L>, &'g L, &'h Mutex<ProbabilisticScorer<&'f NetworkGraph<&'g L>, &'g L>>>, &'g L>;
 
 /// Manager which keeps track of a number of channels and sends messages to the appropriate
@@ -1120,6 +1144,11 @@ pub struct ChannelDetails {
        /// inbound. This may be zero for inbound channels serialized with LDK versions prior to
        /// 0.0.113.
        pub user_channel_id: u128,
+       /// The currently negotiated fee rate denominated in satoshi per 1000 weight units,
+       /// which is applied to commitment and HTLC transactions.
+       ///
+       /// This value will be `None` for objects serialized with LDK versions prior to 0.0.115.
+       pub feerate_sat_per_1000_weight: Option<u32>,
        /// Our total balance.  This is the amount we would get if we close the channel.
        /// This value is not exact. Due to various in-flight changes and feerate changes, exactly this
        /// amount is not likely to be recoverable on close.
@@ -1262,6 +1291,7 @@ impl ChannelDetails {
                        outbound_scid_alias: if channel.is_usable() { Some(channel.outbound_scid_alias()) } else { None },
                        inbound_scid_alias: channel.latest_inbound_scid_alias(),
                        channel_value_satoshis: channel.get_value_satoshis(),
+                       feerate_sat_per_1000_weight: Some(channel.get_feerate_sat_per_1000_weight()),
                        unspendable_punishment_reserve: to_self_reserve_satoshis,
                        balance_msat: balance.balance_msat,
                        inbound_capacity_msat: balance.inbound_capacity_msat,
@@ -1478,18 +1508,31 @@ macro_rules! send_channel_ready {
        }}
 }
 
+macro_rules! emit_channel_pending_event {
+       ($locked_events: expr, $channel: expr) => {
+               if $channel.should_emit_channel_pending_event() {
+                       $locked_events.push(events::Event::ChannelPending {
+                               channel_id: $channel.channel_id(),
+                               former_temporary_channel_id: $channel.temporary_channel_id(),
+                               counterparty_node_id: $channel.get_counterparty_node_id(),
+                               user_channel_id: $channel.get_user_id(),
+                               funding_txo: $channel.get_funding_txo().unwrap().into_bitcoin_outpoint(),
+                       });
+                       $channel.set_channel_pending_event_emitted();
+               }
+       }
+}
+
 macro_rules! emit_channel_ready_event {
-       ($self: expr, $channel: expr) => {
+       ($locked_events: expr, $channel: expr) => {
                if $channel.should_emit_channel_ready_event() {
-                       {
-                               let mut pending_events = $self.pending_events.lock().unwrap();
-                               pending_events.push(events::Event::ChannelReady {
-                                       channel_id: $channel.channel_id(),
-                                       user_channel_id: $channel.get_user_id(),
-                                       counterparty_node_id: $channel.get_counterparty_node_id(),
-                                       channel_type: $channel.get_channel_type().clone(),
-                               });
-                       }
+                       debug_assert!($channel.channel_pending_event_emitted());
+                       $locked_events.push(events::Event::ChannelReady {
+                               channel_id: $channel.channel_id(),
+                               user_channel_id: $channel.get_user_id(),
+                               counterparty_node_id: $channel.get_counterparty_node_id(),
+                               channel_type: $channel.get_channel_type().clone(),
+                       });
                        $channel.set_channel_ready_event_emitted();
                }
        }
@@ -1940,7 +1983,7 @@ where
        /// [`ChannelConfig::force_close_avoidance_max_fee_satoshis`]: crate::util::config::ChannelConfig::force_close_avoidance_max_fee_satoshis
        /// [`Background`]: crate::chain::chaininterface::ConfirmationTarget::Background
        /// [`Normal`]: crate::chain::chaininterface::ConfirmationTarget::Normal
-       /// [`SendShutdown`]: crate::util::events::MessageSendEvent::SendShutdown
+       /// [`SendShutdown`]: crate::events::MessageSendEvent::SendShutdown
        pub fn close_channel(&self, channel_id: &[u8; 32], counterparty_node_id: &PublicKey) -> Result<(), APIError> {
                self.close_channel_internal(channel_id, counterparty_node_id, None)
        }
@@ -1964,7 +2007,7 @@ where
        /// [`ChannelConfig::force_close_avoidance_max_fee_satoshis`]: crate::util::config::ChannelConfig::force_close_avoidance_max_fee_satoshis
        /// [`Background`]: crate::chain::chaininterface::ConfirmationTarget::Background
        /// [`Normal`]: crate::chain::chaininterface::ConfirmationTarget::Normal
-       /// [`SendShutdown`]: crate::util::events::MessageSendEvent::SendShutdown
+       /// [`SendShutdown`]: crate::events::MessageSendEvent::SendShutdown
        pub fn close_channel_with_target_feerate(&self, channel_id: &[u8; 32], counterparty_node_id: &PublicKey, target_feerate_sats_per_1000_weight: u32) -> Result<(), APIError> {
                self.close_channel_internal(channel_id, counterparty_node_id, Some(target_feerate_sats_per_1000_weight))
        }
@@ -2000,7 +2043,7 @@ where
                        let peer_state = &mut *peer_state_lock;
                        if let hash_map::Entry::Occupied(chan) = peer_state.channel_by_id.entry(channel_id.clone()) {
                                if let Some(peer_msg) = peer_msg {
-                                       self.issue_channel_close_events(chan.get(),ClosureReason::CounterpartyForceClosed { peer_msg: peer_msg.to_string() });
+                                       self.issue_channel_close_events(chan.get(),ClosureReason::CounterpartyForceClosed { peer_msg: UntrustedString(peer_msg.to_string()) });
                                } else {
                                        self.issue_channel_close_events(chan.get(),ClosureReason::HolderForceClosed);
                                }
@@ -2083,9 +2126,9 @@ where
                payment_hash: PaymentHash, amt_msat: u64, cltv_expiry: u32, phantom_shared_secret: Option<[u8; 32]>) -> Result<PendingHTLCInfo, ReceiveError>
        {
                // final_incorrect_cltv_expiry
-               if hop_data.outgoing_cltv_value != cltv_expiry {
+               if hop_data.outgoing_cltv_value > cltv_expiry {
                        return Err(ReceiveError {
-                               msg: "Upstream node set CLTV to the wrong value",
+                               msg: "Upstream node set CLTV to less than the CLTV set by the sender",
                                err_code: 18,
                                err_data: cltv_expiry.to_be_bytes().to_vec()
                        })
@@ -2169,7 +2212,7 @@ where
                        payment_hash,
                        incoming_shared_secret: shared_secret,
                        incoming_amt_msat: Some(amt_msat),
-                       outgoing_amt_msat: amt_msat,
+                       outgoing_amt_msat: hop_data.amt_to_forward,
                        outgoing_cltv_value: hop_data.outgoing_cltv_value,
                })
        }
@@ -2651,7 +2694,7 @@ where
        }
 
        #[cfg(test)]
-       fn test_send_payment_internal(&self, route: &Route, payment_hash: PaymentHash, payment_secret: &Option<PaymentSecret>, keysend_preimage: Option<PaymentPreimage>, payment_id: PaymentId, recv_value_msat: Option<u64>, onion_session_privs: Vec<[u8; 32]>) -> Result<(), PaymentSendFailure> {
+       pub(super) fn test_send_payment_internal(&self, route: &Route, payment_hash: PaymentHash, payment_secret: &Option<PaymentSecret>, keysend_preimage: Option<PaymentPreimage>, payment_id: PaymentId, recv_value_msat: Option<u64>, onion_session_privs: Vec<[u8; 32]>) -> Result<(), PaymentSendFailure> {
                let best_block_height = self.best_block.read().unwrap().height();
                let _persistence_guard = PersistenceNotifierGuard::notify_on_drop(&self.total_consistency_lock, &self.persistence_notifier);
                self.pending_outbound_payments.test_send_payment_internal(route, payment_hash, payment_secret, keysend_preimage, payment_id, recv_value_msat, onion_session_privs, &self.node_signer, best_block_height,
@@ -2838,8 +2881,8 @@ where
        /// implemented by Bitcoin Core wallet. See <https://bitcoinops.org/en/topics/fee-sniping/>
        /// for more details.
        ///
-       /// [`Event::FundingGenerationReady`]: crate::util::events::Event::FundingGenerationReady
-       /// [`Event::ChannelClosed`]: crate::util::events::Event::ChannelClosed
+       /// [`Event::FundingGenerationReady`]: crate::events::Event::FundingGenerationReady
+       /// [`Event::ChannelClosed`]: crate::events::Event::ChannelClosed
        pub fn funding_transaction_generated(&self, temporary_channel_id: &[u8; 32], counterparty_node_id: &PublicKey, funding_transaction: Transaction) -> Result<(), APIError> {
                let _persistence_guard = PersistenceNotifierGuard::notify_on_drop(&self.total_consistency_lock, &self.persistence_notifier);
 
@@ -3249,7 +3292,7 @@ where
                                                        HTLCForwardInfo::AddHTLC(PendingAddHTLCInfo {
                                                                prev_short_channel_id, prev_htlc_id, prev_funding_outpoint, prev_user_channel_id,
                                                                forward_info: PendingHTLCInfo {
-                                                                       routing, incoming_shared_secret, payment_hash, outgoing_amt_msat, ..
+                                                                       routing, incoming_shared_secret, payment_hash, incoming_amt_msat, outgoing_amt_msat, ..
                                                                }
                                                        }) => {
                                                                let (cltv_expiry, onion_payload, payment_data, phantom_shared_secret) = match routing {
@@ -3263,7 +3306,7 @@ where
                                                                                panic!("short_channel_id == 0 should imply any pending_forward entries are of type Receive");
                                                                        }
                                                                };
-                                                               let claimable_htlc = ClaimableHTLC {
+                                                               let mut claimable_htlc = ClaimableHTLC {
                                                                        prev_hop: HTLCPreviousHopData {
                                                                                short_channel_id: prev_short_channel_id,
                                                                                outpoint: prev_funding_outpoint,
@@ -3271,8 +3314,13 @@ where
                                                                                incoming_packet_shared_secret: incoming_shared_secret,
                                                                                phantom_shared_secret,
                                                                        },
-                                                                       value: outgoing_amt_msat,
+                                                                       // We differentiate the received value from the sender intended value
+                                                                       // if possible so that we don't prematurely mark MPP payments complete
+                                                                       // if routing nodes overpay
+                                                                       value: incoming_amt_msat.unwrap_or(outgoing_amt_msat),
+                                                                       sender_intended_value: outgoing_amt_msat,
                                                                        timer_ticks: 0,
+                                                                       total_value_received: None,
                                                                        total_msat: if let Some(data) = &payment_data { data.total_msat } else { outgoing_amt_msat },
                                                                        cltv_expiry,
                                                                        onion_payload,
@@ -3317,7 +3365,7 @@ where
                                                                                        fail_htlc!(claimable_htlc, payment_hash);
                                                                                        continue
                                                                                }
-                                                                               let (_, htlcs) = claimable_payments.claimable_htlcs.entry(payment_hash)
+                                                                               let (_, ref mut htlcs) = claimable_payments.claimable_htlcs.entry(payment_hash)
                                                                                        .or_insert_with(|| (purpose(), Vec::new()));
                                                                                if htlcs.len() == 1 {
                                                                                        if let OnionPayload::Spontaneous(_) = htlcs[0].onion_payload {
@@ -3326,9 +3374,9 @@ where
                                                                                                continue
                                                                                        }
                                                                                }
-                                                                               let mut total_value = claimable_htlc.value;
+                                                                               let mut total_value = claimable_htlc.sender_intended_value;
                                                                                for htlc in htlcs.iter() {
-                                                                                       total_value += htlc.value;
+                                                                                       total_value += htlc.sender_intended_value;
                                                                                        match &htlc.onion_payload {
                                                                                                OnionPayload::Invoice { .. } => {
                                                                                                        if htlc.total_msat != $payment_data.total_msat {
@@ -3341,18 +3389,24 @@ where
                                                                                                _ => unreachable!(),
                                                                                        }
                                                                                }
-                                                                               if total_value >= msgs::MAX_VALUE_MSAT || total_value > $payment_data.total_msat {
-                                                                                       log_trace!(self.logger, "Failing HTLCs with payment_hash {} as the total value {} ran over expected value {} (or HTLCs were inconsistent)",
-                                                                                               log_bytes!(payment_hash.0), total_value, $payment_data.total_msat);
+                                                                               // The condition determining whether an MPP is complete must
+                                                                               // match exactly the condition used in `timer_tick_occurred`
+                                                                               if total_value >= msgs::MAX_VALUE_MSAT {
+                                                                                       fail_htlc!(claimable_htlc, payment_hash);
+                                                                               } else if total_value - claimable_htlc.sender_intended_value >= $payment_data.total_msat {
+                                                                                       log_trace!(self.logger, "Failing HTLC with payment_hash {} as payment is already claimable",
+                                                                                               log_bytes!(payment_hash.0));
                                                                                        fail_htlc!(claimable_htlc, payment_hash);
-                                                                               } else if total_value == $payment_data.total_msat {
+                                                                               } else if total_value >= $payment_data.total_msat {
                                                                                        let prev_channel_id = prev_funding_outpoint.to_channel_id();
                                                                                        htlcs.push(claimable_htlc);
+                                                                                       let amount_msat = htlcs.iter().map(|htlc| htlc.value).sum();
+                                                                                       htlcs.iter_mut().for_each(|htlc| htlc.total_value_received = Some(amount_msat));
                                                                                        new_events.push(events::Event::PaymentClaimable {
                                                                                                receiver_node_id: Some(receiver_node_id),
                                                                                                payment_hash,
                                                                                                purpose: purpose(),
-                                                                                               amount_msat: total_value,
+                                                                                               amount_msat,
                                                                                                via_channel_id: Some(prev_channel_id),
                                                                                                via_user_channel_id: Some(prev_user_channel_id),
                                                                                        });
@@ -3406,13 +3460,15 @@ where
                                                                                                }
                                                                                                match claimable_payments.claimable_htlcs.entry(payment_hash) {
                                                                                                        hash_map::Entry::Vacant(e) => {
+                                                                                                               let amount_msat = claimable_htlc.value;
+                                                                                                               claimable_htlc.total_value_received = Some(amount_msat);
                                                                                                                let purpose = events::PaymentPurpose::SpontaneousPayment(preimage);
                                                                                                                e.insert((purpose.clone(), vec![claimable_htlc]));
                                                                                                                let prev_channel_id = prev_funding_outpoint.to_channel_id();
                                                                                                                new_events.push(events::Event::PaymentClaimable {
                                                                                                                        receiver_node_id: Some(receiver_node_id),
                                                                                                                        payment_hash,
-                                                                                                                       amount_msat: outgoing_amt_msat,
+                                                                                                                       amount_msat,
                                                                                                                        purpose,
                                                                                                                        via_channel_id: Some(prev_channel_id),
                                                                                                                        via_user_channel_id: Some(prev_user_channel_id),
@@ -3515,18 +3571,18 @@ where
        fn update_channel_fee(&self, chan_id: &[u8; 32], chan: &mut Channel<<SP::Target as SignerProvider>::Signer>, new_feerate: u32) -> NotifyOption {
                if !chan.is_outbound() { return NotifyOption::SkipPersist; }
                // If the feerate has decreased by less than half, don't bother
-               if new_feerate <= chan.get_feerate() && new_feerate * 2 > chan.get_feerate() {
+               if new_feerate <= chan.get_feerate_sat_per_1000_weight() && new_feerate * 2 > chan.get_feerate_sat_per_1000_weight() {
                        log_trace!(self.logger, "Channel {} does not qualify for a feerate change from {} to {}.",
-                               log_bytes!(chan_id[..]), chan.get_feerate(), new_feerate);
+                               log_bytes!(chan_id[..]), chan.get_feerate_sat_per_1000_weight(), new_feerate);
                        return NotifyOption::SkipPersist;
                }
                if !chan.is_live() {
                        log_trace!(self.logger, "Channel {} does not qualify for a feerate change from {} to {} as it cannot currently be updated (probably the peer is disconnected).",
-                               log_bytes!(chan_id[..]), chan.get_feerate(), new_feerate);
+                               log_bytes!(chan_id[..]), chan.get_feerate_sat_per_1000_weight(), new_feerate);
                        return NotifyOption::SkipPersist;
                }
                log_trace!(self.logger, "Channel {} qualifies for a feerate change from {} to {}.",
-                       log_bytes!(chan_id[..]), chan.get_feerate(), new_feerate);
+                       log_bytes!(chan_id[..]), chan.get_feerate_sat_per_1000_weight(), new_feerate);
 
                chan.queue_update_fee(new_feerate, &self.logger);
                NotifyOption::DoPersist
@@ -3672,7 +3728,9 @@ where
                                if let OnionPayload::Invoice { .. } = htlcs[0].onion_payload {
                                        // Check if we've received all the parts we need for an MPP (the value of the parts adds to total_msat).
                                        // In this case we're not going to handle any timeouts of the parts here.
-                                       if htlcs[0].total_msat == htlcs.iter().fold(0, |total, htlc| total + htlc.value) {
+                                       // This condition determining whether the MPP is complete here must match
+                                       // exactly the condition used in `process_pending_htlc_forwards`.
+                                       if htlcs[0].total_msat <= htlcs.iter().fold(0, |total, htlc| total + htlc.sender_intended_value) {
                                                return true;
                                        } else if htlcs.into_iter().any(|htlc| {
                                                htlc.timer_ticks += 1;
@@ -3898,8 +3956,8 @@ where
        /// event matches your expectation. If you fail to do so and call this method, you may provide
        /// the sender "proof-of-payment" when they did not fulfill the full expected payment.
        ///
-       /// [`Event::PaymentClaimable`]: crate::util::events::Event::PaymentClaimable
-       /// [`Event::PaymentClaimed`]: crate::util::events::Event::PaymentClaimed
+       /// [`Event::PaymentClaimable`]: crate::events::Event::PaymentClaimable
+       /// [`Event::PaymentClaimed`]: crate::events::Event::PaymentClaimed
        /// [`process_pending_events`]: EventsProvider::process_pending_events
        /// [`create_inbound_payment`]: Self::create_inbound_payment
        /// [`create_inbound_payment_for_hash`]: Self::create_inbound_payment_for_hash
@@ -3951,6 +4009,7 @@ where
                // provide the preimage, so worrying too much about the optimal handling isn't worth
                // it.
                let mut claimable_amt_msat = 0;
+               let mut prev_total_msat = None;
                let mut expected_amt_msat = None;
                let mut valid_mpp = true;
                let mut errs = Vec::new();
@@ -3978,14 +4037,22 @@ where
                                break;
                        }
 
-                       if expected_amt_msat.is_some() && expected_amt_msat != Some(htlc.total_msat) {
-                               log_error!(self.logger, "Somehow ended up with an MPP payment with different total amounts - this should not be reachable!");
+                       if prev_total_msat.is_some() && prev_total_msat != Some(htlc.total_msat) {
+                               log_error!(self.logger, "Somehow ended up with an MPP payment with different expected total amounts - this should not be reachable!");
+                               debug_assert!(false);
+                               valid_mpp = false;
+                               break;
+                       }
+                       prev_total_msat = Some(htlc.total_msat);
+
+                       if expected_amt_msat.is_some() && expected_amt_msat != htlc.total_value_received {
+                               log_error!(self.logger, "Somehow ended up with an MPP payment with different received total amounts - this should not be reachable!");
                                debug_assert!(false);
                                valid_mpp = false;
                                break;
                        }
+                       expected_amt_msat = htlc.total_value_received;
 
-                       expected_amt_msat = Some(htlc.total_msat);
                        if let OnionPayload::Spontaneous(_) = &htlc.onion_payload {
                                // We don't currently support MPP for spontaneous payments, so just check
                                // that there's one payment here and move on.
@@ -4142,6 +4209,7 @@ where
                                                                claim_from_onchain_tx: from_onchain,
                                                                prev_channel_id,
                                                                next_channel_id,
+                                                               outbound_amount_forwarded_msat: forwarded_htlc_value_msat,
                                                        }})
                                                } else { None }
                                        });
@@ -4210,8 +4278,6 @@ where
                        });
                }
 
-               emit_channel_ready_event!(self, channel);
-
                macro_rules! handle_cs { () => {
                        if let Some(update) = commitment_update {
                                pending_msg_events.push(events::MessageSendEvent::UpdateHTLCs {
@@ -4244,6 +4310,12 @@ where
                        self.tx_broadcaster.broadcast_transaction(&tx);
                }
 
+               {
+                       let mut pending_events = self.pending_events.lock().unwrap();
+                       emit_channel_pending_event!(pending_events, channel);
+                       emit_channel_ready_event!(pending_events, channel);
+               }
+
                htlc_forwards
        }
 
@@ -4668,7 +4740,10 @@ where
                                        }
                                }
 
-                               emit_channel_ready_event!(self, chan.get_mut());
+                               {
+                                       let mut pending_events = self.pending_events.lock().unwrap();
+                                       emit_channel_ready_event!(pending_events, chan.get_mut());
+                               }
 
                                Ok(())
                        },
@@ -5993,7 +6068,10 @@ where
                                                        }
                                                }
 
-                                               emit_channel_ready_event!(self, channel);
+                                               {
+                                                       let mut pending_events = self.pending_events.lock().unwrap();
+                                                       emit_channel_ready_event!(pending_events, channel);
+                                               }
 
                                                if let Some(announcement_sigs) = announcement_sigs {
                                                        log_trace!(self.logger, "Sending announcement_signatures for channel {}", log_bytes!(channel.channel_id()));
@@ -6104,34 +6182,11 @@ where
                }
        }
 
-       /// Blocks until ChannelManager needs to be persisted or a timeout is reached. It returns a bool
-       /// indicating whether persistence is necessary. Only one listener on
-       /// [`await_persistable_update`], [`await_persistable_update_timeout`], or a future returned by
-       /// [`get_persistable_update_future`] is guaranteed to be woken up.
-       ///
-       /// Note that this method is not available with the `no-std` feature.
+       /// Gets a [`Future`] that completes when this [`ChannelManager`] needs to be persisted.
        ///
-       /// [`await_persistable_update`]: Self::await_persistable_update
-       /// [`await_persistable_update_timeout`]: Self::await_persistable_update_timeout
-       /// [`get_persistable_update_future`]: Self::get_persistable_update_future
-       #[cfg(any(test, feature = "std"))]
-       pub fn await_persistable_update_timeout(&self, max_wait: Duration) -> bool {
-               self.persistence_notifier.wait_timeout(max_wait)
-       }
-
-       /// Blocks until ChannelManager needs to be persisted. Only one listener on
-       /// [`await_persistable_update`], `await_persistable_update_timeout`, or a future returned by
-       /// [`get_persistable_update_future`] is guaranteed to be woken up.
+       /// Note that callbacks registered on the [`Future`] MUST NOT call back into this
+       /// [`ChannelManager`] and should instead register actions to be taken later.
        ///
-       /// [`await_persistable_update`]: Self::await_persistable_update
-       /// [`get_persistable_update_future`]: Self::get_persistable_update_future
-       pub fn await_persistable_update(&self) {
-               self.persistence_notifier.wait()
-       }
-
-       /// Gets a [`Future`] that completes when a persistable update is available. Note that
-       /// callbacks registered on the [`Future`] MUST NOT call back into this [`ChannelManager`] and
-       /// should instead register actions to be taken later.
        pub fn get_persistable_update_future(&self) -> Future {
                self.persistence_notifier.get_future()
        }
@@ -6588,6 +6643,7 @@ impl Writeable for ChannelDetails {
                        (33, self.inbound_htlc_minimum_msat, option),
                        (35, self.inbound_htlc_maximum_msat, option),
                        (37, user_channel_id_high_opt, option),
+                       (39, self.feerate_sat_per_1000_weight, option),
                });
                Ok(())
        }
@@ -6623,6 +6679,7 @@ impl Readable for ChannelDetails {
                        (33, inbound_htlc_minimum_msat, option),
                        (35, inbound_htlc_maximum_msat, option),
                        (37, user_channel_id_high_opt, option),
+                       (39, feerate_sat_per_1000_weight, option),
                });
 
                // `user_channel_id` used to be a single u64 value. In order to remain backwards compatible with
@@ -6656,6 +6713,7 @@ impl Readable for ChannelDetails {
                        is_public: is_public.0.unwrap(),
                        inbound_htlc_minimum_msat,
                        inbound_htlc_maximum_msat,
+                       feerate_sat_per_1000_weight,
                })
        }
 }
@@ -6782,7 +6840,9 @@ impl Writeable for ClaimableHTLC {
                        (0, self.prev_hop, required),
                        (1, self.total_msat, required),
                        (2, self.value, required),
+                       (3, self.sender_intended_value, required),
                        (4, payment_data, option),
+                       (5, self.total_value_received, option),
                        (6, self.cltv_expiry, required),
                        (8, keysend_preimage, option),
                });
@@ -6794,15 +6854,19 @@ impl Readable for ClaimableHTLC {
        fn read<R: Read>(reader: &mut R) -> Result<Self, DecodeError> {
                let mut prev_hop = crate::util::ser::RequiredWrapper(None);
                let mut value = 0;
+               let mut sender_intended_value = None;
                let mut payment_data: Option<msgs::FinalOnionHopData> = None;
                let mut cltv_expiry = 0;
+               let mut total_value_received = None;
                let mut total_msat = None;
                let mut keysend_preimage: Option<PaymentPreimage> = None;
                read_tlv_fields!(reader, {
                        (0, prev_hop, required),
                        (1, total_msat, option),
                        (2, value, required),
+                       (3, sender_intended_value, option),
                        (4, payment_data, option),
+                       (5, total_value_received, option),
                        (6, cltv_expiry, required),
                        (8, keysend_preimage, option)
                });
@@ -6830,6 +6894,8 @@ impl Readable for ClaimableHTLC {
                        prev_hop: prev_hop.0.unwrap(),
                        timer_ticks: 0,
                        value,
+                       sender_intended_value: sender_intended_value.unwrap_or(value),
+                       total_value_received,
                        total_msat: total_msat.unwrap(),
                        onion_payload,
                        cltv_expiry,
@@ -7226,7 +7292,7 @@ where
        /// In such cases the latest local transactions will be sent to the tx_broadcaster included in
        /// this struct.
        ///
-       /// (C-not exported) because we have no HashMap bindings
+       /// This is not exported to bindings users because we have no HashMap bindings
        pub channel_monitors: HashMap<OutPoint, &'a mut ChannelMonitor<<SP::Target as SignerProvider>::Signer>>,
 }
 
@@ -7301,6 +7367,7 @@ where
                let mut id_to_peer = HashMap::with_capacity(cmp::min(channel_count as usize, 128));
                let mut short_to_chan_info = HashMap::with_capacity(cmp::min(channel_count as usize, 128));
                let mut channel_closures = Vec::new();
+               let mut pending_background_events = Vec::new();
                for _ in 0..channel_count {
                        let mut channel: Channel<<SP::Target as SignerProvider>::Signer> = Channel::read(reader, (
                                &args.entropy_source, &args.signer_provider, best_block_height, &provided_channel_type_features(&args.default_config)
@@ -7330,9 +7397,11 @@ where
                                        log_error!(args.logger, " The channel will be force-closed and the latest commitment transaction from the ChannelMonitor broadcast.");
                                        log_error!(args.logger, " The ChannelMonitor for channel {} is at update_id {} but the ChannelManager is at update_id {}.",
                                                log_bytes!(channel.channel_id()), monitor.get_latest_update_id(), channel.get_latest_monitor_update_id());
-                                       let (_, mut new_failed_htlcs) = channel.force_shutdown(true);
+                                       let (monitor_update, mut new_failed_htlcs) = channel.force_shutdown(true);
+                                       if let Some(monitor_update) = monitor_update {
+                                               pending_background_events.push(BackgroundEvent::ClosingMonitorUpdate(monitor_update));
+                                       }
                                        failed_htlcs.append(&mut new_failed_htlcs);
-                                       monitor.broadcast_latest_holder_commitment_txn(&args.tx_broadcaster, &args.logger);
                                        channel_closures.push(events::Event::ChannelClosed {
                                                channel_id: channel.channel_id(),
                                                user_channel_id: channel.get_user_id(),
@@ -7397,10 +7466,13 @@ where
                        }
                }
 
-               for (funding_txo, monitor) in args.channel_monitors.iter_mut() {
+               for (funding_txo, _) in args.channel_monitors.iter() {
                        if !funding_txo_set.contains(funding_txo) {
-                               log_info!(args.logger, "Broadcasting latest holder commitment transaction for closed channel {}", log_bytes!(funding_txo.to_channel_id()));
-                               monitor.broadcast_latest_holder_commitment_txn(&args.tx_broadcaster, &args.logger);
+                               let monitor_update = ChannelMonitorUpdate {
+                                       update_id: CLOSED_CHANNEL_UPDATE_ID,
+                                       updates: vec![ChannelMonitorUpdateStep::ChannelForceClosed { should_broadcast: true }],
+                               };
+                               pending_background_events.push(BackgroundEvent::ClosingMonitorUpdate((*funding_txo, monitor_update)));
                        }
                }
 
@@ -7453,10 +7525,17 @@ where
                }
 
                let background_event_count: u64 = Readable::read(reader)?;
-               let mut pending_background_events_read: Vec<BackgroundEvent> = Vec::with_capacity(cmp::min(background_event_count as usize, MAX_ALLOC_SIZE/mem::size_of::<BackgroundEvent>()));
                for _ in 0..background_event_count {
                        match <u8 as Readable>::read(reader)? {
-                               0 => pending_background_events_read.push(BackgroundEvent::ClosingMonitorUpdate((Readable::read(reader)?, Readable::read(reader)?))),
+                               0 => {
+                                       let (funding_txo, monitor_update): (OutPoint, ChannelMonitorUpdate) = (Readable::read(reader)?, Readable::read(reader)?);
+                                       if pending_background_events.iter().find(|e| {
+                                               let BackgroundEvent::ClosingMonitorUpdate((pending_funding_txo, pending_monitor_update)) = e;
+                                               *pending_funding_txo == funding_txo && *pending_monitor_update == monitor_update
+                                       }).is_none() {
+                                               pending_background_events.push(BackgroundEvent::ClosingMonitorUpdate((funding_txo, monitor_update)));
+                                       }
+                               }
                                _ => return Err(DecodeError::InvalidValue),
                        }
                }
@@ -7831,7 +7910,7 @@ where
                        per_peer_state: FairRwLock::new(per_peer_state),
 
                        pending_events: Mutex::new(pending_events_read),
-                       pending_background_events: Mutex::new(pending_background_events_read),
+                       pending_background_events: Mutex::new(pending_background_events),
                        total_consistency_lock: RwLock::new(()),
                        persistence_notifier: Notifier::new(),
 
@@ -7862,8 +7941,10 @@ mod tests {
        use bitcoin::hashes::Hash;
        use bitcoin::hashes::sha256::Hash as Sha256;
        use bitcoin::secp256k1::{PublicKey, Secp256k1, SecretKey};
+       #[cfg(feature = "std")]
        use core::time::Duration;
        use core::sync::atomic::Ordering;
+       use crate::events::{Event, HTLCDestination, MessageSendEvent, MessageSendEventsProvider, ClosureReason};
        use crate::ln::{PaymentPreimage, PaymentHash, PaymentSecret};
        use crate::ln::channelmanager::{inbound_payment, PaymentId, PaymentSendFailure, InterceptId};
        use crate::ln::functional_test_utils::*;
@@ -7871,7 +7952,6 @@ mod tests {
        use crate::ln::msgs::ChannelMessageHandler;
        use crate::routing::router::{PaymentParameters, RouteParameters, find_route};
        use crate::util::errors::APIError;
-       use crate::util::events::{Event, HTLCDestination, MessageSendEvent, MessageSendEventsProvider, ClosureReason};
        use crate::util::test_utils;
        use crate::util::config::ChannelConfig;
        use crate::chain::keysinterface::EntropySource;
@@ -7887,9 +7967,9 @@ mod tests {
 
                // All nodes start with a persistable update pending as `create_network` connects each node
                // with all other nodes to make most tests simpler.
-               assert!(nodes[0].node.await_persistable_update_timeout(Duration::from_millis(1)));
-               assert!(nodes[1].node.await_persistable_update_timeout(Duration::from_millis(1)));
-               assert!(nodes[2].node.await_persistable_update_timeout(Duration::from_millis(1)));
+               assert!(nodes[0].node.get_persistable_update_future().poll_is_complete());
+               assert!(nodes[1].node.get_persistable_update_future().poll_is_complete());
+               assert!(nodes[2].node.get_persistable_update_future().poll_is_complete());
 
                let mut chan = create_announced_chan_between_nodes(&nodes, 0, 1);
 
@@ -7903,19 +7983,19 @@ mod tests {
                        &nodes[0].node.get_our_node_id()).pop().unwrap();
 
                // The first two nodes (which opened a channel) should now require fresh persistence
-               assert!(nodes[0].node.await_persistable_update_timeout(Duration::from_millis(1)));
-               assert!(nodes[1].node.await_persistable_update_timeout(Duration::from_millis(1)));
+               assert!(nodes[0].node.get_persistable_update_future().poll_is_complete());
+               assert!(nodes[1].node.get_persistable_update_future().poll_is_complete());
                // ... but the last node should not.
-               assert!(!nodes[2].node.await_persistable_update_timeout(Duration::from_millis(1)));
+               assert!(!nodes[2].node.get_persistable_update_future().poll_is_complete());
                // After persisting the first two nodes they should no longer need fresh persistence.
-               assert!(!nodes[0].node.await_persistable_update_timeout(Duration::from_millis(1)));
-               assert!(!nodes[1].node.await_persistable_update_timeout(Duration::from_millis(1)));
+               assert!(!nodes[0].node.get_persistable_update_future().poll_is_complete());
+               assert!(!nodes[1].node.get_persistable_update_future().poll_is_complete());
 
                // Node 3, unrelated to the only channel, shouldn't care if it receives a channel_update
                // about the channel.
                nodes[2].node.handle_channel_update(&nodes[1].node.get_our_node_id(), &chan.0);
                nodes[2].node.handle_channel_update(&nodes[1].node.get_our_node_id(), &chan.1);
-               assert!(!nodes[2].node.await_persistable_update_timeout(Duration::from_millis(1)));
+               assert!(!nodes[2].node.get_persistable_update_future().poll_is_complete());
 
                // The nodes which are a party to the channel should also ignore messages from unrelated
                // parties.
@@ -7923,8 +8003,8 @@ mod tests {
                nodes[0].node.handle_channel_update(&nodes[2].node.get_our_node_id(), &chan.1);
                nodes[1].node.handle_channel_update(&nodes[2].node.get_our_node_id(), &chan.0);
                nodes[1].node.handle_channel_update(&nodes[2].node.get_our_node_id(), &chan.1);
-               assert!(!nodes[0].node.await_persistable_update_timeout(Duration::from_millis(1)));
-               assert!(!nodes[1].node.await_persistable_update_timeout(Duration::from_millis(1)));
+               assert!(!nodes[0].node.get_persistable_update_future().poll_is_complete());
+               assert!(!nodes[1].node.get_persistable_update_future().poll_is_complete());
 
                // At this point the channel info given by peers should still be the same.
                assert_eq!(nodes[0].node.list_channels()[0], node_a_chan_info);
@@ -7941,8 +8021,8 @@ mod tests {
                // persisted and that its channel info remains the same.
                nodes[0].node.handle_channel_update(&nodes[1].node.get_our_node_id(), &as_update);
                nodes[1].node.handle_channel_update(&nodes[0].node.get_our_node_id(), &bs_update);
-               assert!(!nodes[0].node.await_persistable_update_timeout(Duration::from_millis(1)));
-               assert!(!nodes[1].node.await_persistable_update_timeout(Duration::from_millis(1)));
+               assert!(!nodes[0].node.get_persistable_update_future().poll_is_complete());
+               assert!(!nodes[1].node.get_persistable_update_future().poll_is_complete());
                assert_eq!(nodes[0].node.list_channels()[0], node_a_chan_info);
                assert_eq!(nodes[1].node.list_channels()[0], node_b_chan_info);
 
@@ -7950,8 +8030,8 @@ mod tests {
                // the channel info has updated.
                nodes[0].node.handle_channel_update(&nodes[1].node.get_our_node_id(), &bs_update);
                nodes[1].node.handle_channel_update(&nodes[0].node.get_our_node_id(), &as_update);
-               assert!(nodes[0].node.await_persistable_update_timeout(Duration::from_millis(1)));
-               assert!(nodes[1].node.await_persistable_update_timeout(Duration::from_millis(1)));
+               assert!(nodes[0].node.get_persistable_update_future().poll_is_complete());
+               assert!(nodes[1].node.get_persistable_update_future().poll_is_complete());
                assert_ne!(nodes[0].node.list_channels()[0], node_a_chan_info);
                assert_ne!(nodes[1].node.list_channels()[0], node_b_chan_info);
        }
@@ -8212,7 +8292,7 @@ mod tests {
                assert!(updates.update_fee.is_none());
                nodes[1].node.handle_update_add_htlc(&nodes[0].node.get_our_node_id(), &updates.update_add_htlcs[0]);
 
-               nodes[1].logger.assert_log_contains("lightning::ln::channelmanager".to_string(), "Payment preimage didn't match payment hash".to_string(), 1);
+               nodes[1].logger.assert_log_contains("lightning::ln::channelmanager", "Payment preimage didn't match payment hash", 1);
        }
 
        #[test]
@@ -8255,7 +8335,7 @@ mod tests {
                assert!(updates.update_fee.is_none());
                nodes[1].node.handle_update_add_htlc(&nodes[0].node.get_our_node_id(), &updates.update_add_htlcs[0]);
 
-               nodes[1].logger.assert_log_contains("lightning::ln::channelmanager".to_string(), "We don't support MPP keysend payments".to_string(), 1);
+               nodes[1].logger.assert_log_contains("lightning::ln::channelmanager", "We don't support MPP keysend payments", 1);
        }
 
        #[test]
@@ -8283,7 +8363,8 @@ mod tests {
 
                match nodes[0].node.send_payment(&route, payment_hash, &None, PaymentId(payment_hash.0)).unwrap_err() {
                        PaymentSendFailure::ParameterError(APIError::APIMisuseError { ref err }) => {
-                               assert!(regex::Regex::new(r"Payment secret is required for multi-path payments").unwrap().is_match(err))                        },
+                               assert!(regex::Regex::new(r"Payment secret is required for multi-path payments").unwrap().is_match(err))
+                       },
                        _ => panic!("unexpected error")
                }
        }
@@ -8343,7 +8424,7 @@ mod tests {
                match inbound_payment::verify(bad_payment_hash, &payment_data, nodes[0].node.highest_seen_timestamp.load(Ordering::Acquire) as u64, &nodes[0].node.inbound_payment_key, &nodes[0].logger) {
                        Ok(_) => panic!("Unexpected ok"),
                        Err(()) => {
-                               nodes[0].logger.assert_log_contains("lightning::ln::inbound_payment".to_string(), "Failing HTLC with user-generated payment_hash".to_string(), 1);
+                               nodes[0].logger.assert_log_contains("lightning::ln::inbound_payment", "Failing HTLC with user-generated payment_hash", 1);
                        }
                }
 
@@ -8395,6 +8476,7 @@ mod tests {
                        assert_eq!(nodes_0_lock.len(), 1);
                        assert!(nodes_0_lock.contains_key(channel_id));
                }
+               expect_channel_pending_event(&nodes[1], &nodes[0].node.get_our_node_id());
 
                {
                        // Assert that `nodes[1]`'s `id_to_peer` map is populated with the channel as soon as
@@ -8407,6 +8489,7 @@ mod tests {
                let funding_signed = get_event_msg!(nodes[1], MessageSendEvent::SendFundingSigned, nodes[0].node.get_our_node_id());
                nodes[0].node.handle_funding_signed(&nodes[1].node.get_our_node_id(), &funding_signed);
                check_added_monitors!(nodes[0], 1);
+               expect_channel_pending_event(&nodes[0], &nodes[1].node.get_our_node_id());
                let (channel_ready, _) = create_chan_between_nodes_with_value_confirm(&nodes[0], &nodes[1], &tx);
                let (announcement, nodes_0_update, nodes_1_update) = create_chan_between_nodes_with_value_b(&nodes[0], &nodes[1], &channel_ready);
                update_nodes_with_chan_announce(&nodes, 0, 1, &announcement, &nodes_0_update, &nodes_1_update);
@@ -8549,10 +8632,13 @@ mod tests {
 
                                nodes[1].node.handle_funding_created(&nodes[0].node.get_our_node_id(), &funding_created_msg);
                                check_added_monitors!(nodes[1], 1);
+                               expect_channel_pending_event(&nodes[1], &nodes[0].node.get_our_node_id());
+
                                let funding_signed = get_event_msg!(nodes[1], MessageSendEvent::SendFundingSigned, nodes[0].node.get_our_node_id());
 
                                nodes[0].node.handle_funding_signed(&nodes[1].node.get_our_node_id(), &funding_signed);
                                check_added_monitors!(nodes[0], 1);
+                               expect_channel_pending_event(&nodes[0], &nodes[1].node.get_our_node_id());
                        }
                        open_channel_msg.temporary_channel_id = nodes[0].keys_manager.get_secure_random_bytes();
                }
@@ -8771,14 +8857,14 @@ pub mod bench {
        use crate::chain::Listen;
        use crate::chain::chainmonitor::{ChainMonitor, Persist};
        use crate::chain::keysinterface::{EntropySource, KeysManager, InMemorySigner};
-       use crate::ln::channelmanager::{self, BestBlock, ChainParameters, ChannelManager, PaymentHash, PaymentPreimage, PaymentId};
+       use crate::events::{Event, MessageSendEvent, MessageSendEventsProvider};
+       use crate::ln::channelmanager::{BestBlock, ChainParameters, ChannelManager, PaymentHash, PaymentPreimage, PaymentId};
        use crate::ln::functional_test_utils::*;
        use crate::ln::msgs::{ChannelMessageHandler, Init};
        use crate::routing::gossip::NetworkGraph;
        use crate::routing::router::{PaymentParameters, get_route};
        use crate::util::test_utils;
        use crate::util::config::UserConfig;
-       use crate::util::events::{Event, MessageSendEvent, MessageSendEventsProvider};
 
        use bitcoin::hashes::Hash;
        use bitcoin::hashes::sha256::Hash as Sha256;
@@ -8853,7 +8939,24 @@ pub mod bench {
                } else { panic!(); }
 
                node_b.handle_funding_created(&node_a.get_our_node_id(), &get_event_msg!(node_a_holder, MessageSendEvent::SendFundingCreated, node_b.get_our_node_id()));
+               let events_b = node_b.get_and_clear_pending_events();
+               assert_eq!(events_b.len(), 1);
+               match events_b[0] {
+                       Event::ChannelPending{ ref counterparty_node_id, .. } => {
+                               assert_eq!(*counterparty_node_id, node_a.get_our_node_id());
+                       },
+                       _ => panic!("Unexpected event"),
+               }
+
                node_a.handle_funding_signed(&node_b.get_our_node_id(), &get_event_msg!(node_b_holder, MessageSendEvent::SendFundingSigned, node_a.get_our_node_id()));
+               let events_a = node_a.get_and_clear_pending_events();
+               assert_eq!(events_a.len(), 1);
+               match events_a[0] {
+                       Event::ChannelPending{ ref counterparty_node_id, .. } => {
+                               assert_eq!(*counterparty_node_id, node_b.get_our_node_id());
+                       },
+                       _ => panic!("Unexpected event"),
+               }
 
                assert_eq!(&tx_broadcaster.txn_broadcasted.lock().unwrap()[..], &[tx.clone()]);