Add a `payment_metadata` field to `RecipientOnionFields`
[rust-lightning] / lightning / src / ln / channelmanager.rs
index c8adc721235470dc37bce5a9a2651fc8e74dc8ed..4ca93418582e1da2d94fbeeff63959c5c3b06e5e 100644 (file)
@@ -77,7 +77,7 @@ use core::time::Duration;
 use core::ops::Deref;
 
 // Re-export this for use in the public API.
-pub use crate::ln::outbound_payment::{PaymentSendFailure, Retry, RetryableSendFailure};
+pub use crate::ln::outbound_payment::{PaymentSendFailure, Retry, RetryableSendFailure, RecipientOnionFields};
 
 // We hold various information about HTLC relay in the HTLC objects in Channel itself:
 //
@@ -286,7 +286,6 @@ pub(crate) enum HTLCSource {
                /// doing a double-pass on route when we get a failure back
                first_hop_htlc_msat: u64,
                payment_id: PaymentId,
-               payment_secret: Option<PaymentSecret>,
        },
 }
 #[allow(clippy::derive_hash_xor_eq)] // Our Hash is faithful to the data, we just don't have SecretKey::hash
@@ -297,27 +296,37 @@ impl core::hash::Hash for HTLCSource {
                                0u8.hash(hasher);
                                prev_hop_data.hash(hasher);
                        },
-                       HTLCSource::OutboundRoute { path, session_priv, payment_id, payment_secret, first_hop_htlc_msat } => {
+                       HTLCSource::OutboundRoute { path, session_priv, payment_id, first_hop_htlc_msat } => {
                                1u8.hash(hasher);
                                path.hash(hasher);
                                session_priv[..].hash(hasher);
                                payment_id.hash(hasher);
-                               payment_secret.hash(hasher);
                                first_hop_htlc_msat.hash(hasher);
                        },
                }
        }
 }
-#[cfg(not(feature = "grind_signatures"))]
-#[cfg(test)]
 impl HTLCSource {
+       #[cfg(not(feature = "grind_signatures"))]
+       #[cfg(test)]
        pub fn dummy() -> Self {
                HTLCSource::OutboundRoute {
                        path: Vec::new(),
                        session_priv: SecretKey::from_slice(&[1; 32]).unwrap(),
                        first_hop_htlc_msat: 0,
                        payment_id: PaymentId([2; 32]),
-                       payment_secret: None,
+               }
+       }
+
+       #[cfg(debug_assertions)]
+       /// Checks whether this HTLCSource could possibly match the given HTLC output in a commitment
+       /// transaction. Useful to ensure different datastructures match up.
+       pub(crate) fn possibly_matches_output(&self, htlc: &super::chan_utils::HTLCOutputInCommitment) -> bool {
+               if let HTLCSource::OutboundRoute { first_hop_htlc_msat, .. } = self {
+                       *first_hop_htlc_msat == htlc.amount_msat
+               } else {
+                       // There's nothing we can check for forwarded HTLCs
+                       true
                }
        }
 }
@@ -1496,18 +1505,31 @@ macro_rules! send_channel_ready {
        }}
 }
 
+macro_rules! emit_channel_pending_event {
+       ($locked_events: expr, $channel: expr) => {
+               if $channel.should_emit_channel_pending_event() {
+                       $locked_events.push(events::Event::ChannelPending {
+                               channel_id: $channel.channel_id(),
+                               former_temporary_channel_id: $channel.temporary_channel_id(),
+                               counterparty_node_id: $channel.get_counterparty_node_id(),
+                               user_channel_id: $channel.get_user_id(),
+                               funding_txo: $channel.get_funding_txo().unwrap().into_bitcoin_outpoint(),
+                       });
+                       $channel.set_channel_pending_event_emitted();
+               }
+       }
+}
+
 macro_rules! emit_channel_ready_event {
-       ($self: expr, $channel: expr) => {
+       ($locked_events: expr, $channel: expr) => {
                if $channel.should_emit_channel_ready_event() {
-                       {
-                               let mut pending_events = $self.pending_events.lock().unwrap();
-                               pending_events.push(events::Event::ChannelReady {
-                                       channel_id: $channel.channel_id(),
-                                       user_channel_id: $channel.get_user_id(),
-                                       counterparty_node_id: $channel.get_counterparty_node_id(),
-                                       channel_type: $channel.get_channel_type().clone(),
-                               });
-                       }
+                       debug_assert!($channel.channel_pending_event_emitted());
+                       $locked_events.push(events::Event::ChannelReady {
+                               channel_id: $channel.channel_id(),
+                               user_channel_id: $channel.get_user_id(),
+                               counterparty_node_id: $channel.get_counterparty_node_id(),
+                               channel_type: $channel.get_channel_type().clone(),
+                       });
                        $channel.set_channel_ready_event_emitted();
                }
        }
@@ -2141,7 +2163,7 @@ where
                                        msg: "Got non final data with an HMAC of 0",
                                });
                        },
-                       msgs::OnionHopDataFormat::FinalNode { payment_data, keysend_preimage } => {
+                       msgs::OnionHopDataFormat::FinalNode { payment_data, keysend_preimage, .. } => { // TODO: expose the payment_metadata to the user
                                if payment_data.is_some() && keysend_preimage.is_some() {
                                        return Err(ReceiveError {
                                                err_code: 0x4000|22,
@@ -2500,12 +2522,12 @@ where
        }
 
        #[cfg(test)]
-       pub(crate) fn test_send_payment_along_path(&self, path: &Vec<RouteHop>, payment_hash: &PaymentHash, payment_secret: &Option<PaymentSecret>, total_value: u64, cur_height: u32, payment_id: PaymentId, keysend_preimage: &Option<PaymentPreimage>, session_priv_bytes: [u8; 32]) -> Result<(), APIError> {
+       pub(crate) fn test_send_payment_along_path(&self, path: &Vec<RouteHop>, payment_hash: &PaymentHash, recipient_onion: RecipientOnionFields, total_value: u64, cur_height: u32, payment_id: PaymentId, keysend_preimage: &Option<PaymentPreimage>, session_priv_bytes: [u8; 32]) -> Result<(), APIError> {
                let _lck = self.total_consistency_lock.read().unwrap();
-               self.send_payment_along_path(path, payment_hash, payment_secret, total_value, cur_height, payment_id, keysend_preimage, session_priv_bytes)
+               self.send_payment_along_path(path, payment_hash, recipient_onion, total_value, cur_height, payment_id, keysend_preimage, session_priv_bytes)
        }
 
-       fn send_payment_along_path(&self, path: &Vec<RouteHop>, payment_hash: &PaymentHash, payment_secret: &Option<PaymentSecret>, total_value: u64, cur_height: u32, payment_id: PaymentId, keysend_preimage: &Option<PaymentPreimage>, session_priv_bytes: [u8; 32]) -> Result<(), APIError> {
+       fn send_payment_along_path(&self, path: &Vec<RouteHop>, payment_hash: &PaymentHash, recipient_onion: RecipientOnionFields, total_value: u64, cur_height: u32, payment_id: PaymentId, keysend_preimage: &Option<PaymentPreimage>, session_priv_bytes: [u8; 32]) -> Result<(), APIError> {
                // The top-level caller should hold the total_consistency_lock read lock.
                debug_assert!(self.total_consistency_lock.try_write().is_err());
 
@@ -2515,7 +2537,7 @@ where
 
                let onion_keys = onion_utils::construct_onion_keys(&self.secp_ctx, &path, &session_priv)
                        .map_err(|_| APIError::InvalidRoute{err: "Pubkey along hop was maliciously selected".to_owned()})?;
-               let (onion_payloads, htlc_msat, htlc_cltv) = onion_utils::build_onion_payloads(path, total_value, payment_secret, cur_height, keysend_preimage)?;
+               let (onion_payloads, htlc_msat, htlc_cltv) = onion_utils::build_onion_payloads(path, total_value, recipient_onion, cur_height, keysend_preimage)?;
                if onion_utils::route_size_insane(&onion_payloads) {
                        return Err(APIError::InvalidRoute{err: "Route size too large considering onion data".to_owned()});
                }
@@ -2543,7 +2565,6 @@ where
                                                session_priv: session_priv.clone(),
                                                first_hop_htlc_msat: htlc_msat,
                                                payment_id,
-                                               payment_secret: payment_secret.clone(),
                                        }, onion_packet, &self.logger);
                                match break_chan_entry!(self, send_res, chan) {
                                        Some(monitor_update) => {
@@ -2628,59 +2649,47 @@ where
        /// irrevocably committed to on our end. In such a case, do NOT retry the payment with a
        /// different route unless you intend to pay twice!
        ///
-       /// # A caution on `payment_secret`
-       ///
-       /// `payment_secret` is unrelated to `payment_hash` (or [`PaymentPreimage`]) and exists to
-       /// authenticate the sender to the recipient and prevent payment-probing (deanonymization)
-       /// attacks. For newer nodes, it will be provided to you in the invoice. If you do not have one,
-       /// the [`Route`] must not contain multiple paths as multi-path payments require a
-       /// recipient-provided `payment_secret`.
-       ///
-       /// If a `payment_secret` *is* provided, we assume that the invoice had the payment_secret
-       /// feature bit set (either as required or as available). If multiple paths are present in the
-       /// [`Route`], we assume the invoice had the basic_mpp feature set.
-       ///
        /// [`Event::PaymentSent`]: events::Event::PaymentSent
        /// [`Event::PaymentFailed`]: events::Event::PaymentFailed
        /// [`UpdateHTLCs`]: events::MessageSendEvent::UpdateHTLCs
        /// [`PeerManager::process_events`]: crate::ln::peer_handler::PeerManager::process_events
        /// [`ChannelMonitorUpdateStatus::InProgress`]: crate::chain::ChannelMonitorUpdateStatus::InProgress
-       pub fn send_payment(&self, route: &Route, payment_hash: PaymentHash, payment_secret: &Option<PaymentSecret>, payment_id: PaymentId) -> Result<(), PaymentSendFailure> {
+       pub fn send_payment_with_route(&self, route: &Route, payment_hash: PaymentHash, recipient_onion: RecipientOnionFields, payment_id: PaymentId) -> Result<(), PaymentSendFailure> {
                let best_block_height = self.best_block.read().unwrap().height();
                let _persistence_guard = PersistenceNotifierGuard::notify_on_drop(&self.total_consistency_lock, &self.persistence_notifier);
                self.pending_outbound_payments
-                       .send_payment_with_route(route, payment_hash, payment_secret, payment_id, &self.entropy_source, &self.node_signer, best_block_height,
-                               |path, payment_hash, payment_secret, total_value, cur_height, payment_id, keysend_preimage, session_priv|
-                               self.send_payment_along_path(path, payment_hash, payment_secret, total_value, cur_height, payment_id, keysend_preimage, session_priv))
+                       .send_payment_with_route(route, payment_hash, recipient_onion, payment_id, &self.entropy_source, &self.node_signer, best_block_height,
+                               |path, payment_hash, recipient_onion, total_value, cur_height, payment_id, keysend_preimage, session_priv|
+                               self.send_payment_along_path(path, payment_hash, recipient_onion, total_value, cur_height, payment_id, keysend_preimage, session_priv))
        }
 
        /// Similar to [`ChannelManager::send_payment`], but will automatically find a route based on
        /// `route_params` and retry failed payment paths based on `retry_strategy`.
-       pub fn send_payment_with_retry(&self, payment_hash: PaymentHash, payment_secret: &Option<PaymentSecret>, payment_id: PaymentId, route_params: RouteParameters, retry_strategy: Retry) -> Result<(), RetryableSendFailure> {
+       pub fn send_payment(&self, payment_hash: PaymentHash, recipient_onion: RecipientOnionFields, payment_id: PaymentId, route_params: RouteParameters, retry_strategy: Retry) -> Result<(), RetryableSendFailure> {
                let best_block_height = self.best_block.read().unwrap().height();
                let _persistence_guard = PersistenceNotifierGuard::notify_on_drop(&self.total_consistency_lock, &self.persistence_notifier);
                self.pending_outbound_payments
-                       .send_payment(payment_hash, payment_secret, payment_id, retry_strategy, route_params,
+                       .send_payment(payment_hash, recipient_onion, payment_id, retry_strategy, route_params,
                                &self.router, self.list_usable_channels(), || self.compute_inflight_htlcs(),
                                &self.entropy_source, &self.node_signer, best_block_height, &self.logger,
                                &self.pending_events,
-                               |path, payment_hash, payment_secret, total_value, cur_height, payment_id, keysend_preimage, session_priv|
-                               self.send_payment_along_path(path, payment_hash, payment_secret, total_value, cur_height, payment_id, keysend_preimage, session_priv))
+                               |path, payment_hash, recipient_onion, total_value, cur_height, payment_id, keysend_preimage, session_priv|
+                               self.send_payment_along_path(path, payment_hash, recipient_onion, total_value, cur_height, payment_id, keysend_preimage, session_priv))
        }
 
        #[cfg(test)]
-       pub(super) fn test_send_payment_internal(&self, route: &Route, payment_hash: PaymentHash, payment_secret: &Option<PaymentSecret>, keysend_preimage: Option<PaymentPreimage>, payment_id: PaymentId, recv_value_msat: Option<u64>, onion_session_privs: Vec<[u8; 32]>) -> Result<(), PaymentSendFailure> {
+       pub(super) fn test_send_payment_internal(&self, route: &Route, payment_hash: PaymentHash, recipient_onion: RecipientOnionFields, keysend_preimage: Option<PaymentPreimage>, payment_id: PaymentId, recv_value_msat: Option<u64>, onion_session_privs: Vec<[u8; 32]>) -> Result<(), PaymentSendFailure> {
                let best_block_height = self.best_block.read().unwrap().height();
                let _persistence_guard = PersistenceNotifierGuard::notify_on_drop(&self.total_consistency_lock, &self.persistence_notifier);
-               self.pending_outbound_payments.test_send_payment_internal(route, payment_hash, payment_secret, keysend_preimage, payment_id, recv_value_msat, onion_session_privs, &self.node_signer, best_block_height,
-                       |path, payment_hash, payment_secret, total_value, cur_height, payment_id, keysend_preimage, session_priv|
-                       self.send_payment_along_path(path, payment_hash, payment_secret, total_value, cur_height, payment_id, keysend_preimage, session_priv))
+               self.pending_outbound_payments.test_send_payment_internal(route, payment_hash, recipient_onion, keysend_preimage, payment_id, recv_value_msat, onion_session_privs, &self.node_signer, best_block_height,
+                       |path, payment_hash, recipient_onion, total_value, cur_height, payment_id, keysend_preimage, session_priv|
+                       self.send_payment_along_path(path, payment_hash, recipient_onion, total_value, cur_height, payment_id, keysend_preimage, session_priv))
        }
 
        #[cfg(test)]
-       pub(crate) fn test_add_new_pending_payment(&self, payment_hash: PaymentHash, payment_secret: Option<PaymentSecret>, payment_id: PaymentId, route: &Route) -> Result<Vec<[u8; 32]>, PaymentSendFailure> {
+       pub(crate) fn test_add_new_pending_payment(&self, payment_hash: PaymentHash, recipient_onion: RecipientOnionFields, payment_id: PaymentId, route: &Route) -> Result<Vec<[u8; 32]>, PaymentSendFailure> {
                let best_block_height = self.best_block.read().unwrap().height();
-               self.pending_outbound_payments.test_add_new_pending_payment(payment_hash, payment_secret, payment_id, route, None, &self.entropy_source, best_block_height)
+               self.pending_outbound_payments.test_add_new_pending_payment(payment_hash, recipient_onion, payment_id, route, None, &self.entropy_source, best_block_height)
        }
 
 
@@ -2720,14 +2729,14 @@ where
        /// Note that `route` must have exactly one path.
        ///
        /// [`send_payment`]: Self::send_payment
-       pub fn send_spontaneous_payment(&self, route: &Route, payment_preimage: Option<PaymentPreimage>, payment_id: PaymentId) -> Result<PaymentHash, PaymentSendFailure> {
+       pub fn send_spontaneous_payment(&self, route: &Route, payment_preimage: Option<PaymentPreimage>, recipient_onion: RecipientOnionFields, payment_id: PaymentId) -> Result<PaymentHash, PaymentSendFailure> {
                let best_block_height = self.best_block.read().unwrap().height();
                let _persistence_guard = PersistenceNotifierGuard::notify_on_drop(&self.total_consistency_lock, &self.persistence_notifier);
                self.pending_outbound_payments.send_spontaneous_payment_with_route(
-                       route, payment_preimage, payment_id, &self.entropy_source, &self.node_signer,
-                       best_block_height,
-                       |path, payment_hash, payment_secret, total_value, cur_height, payment_id, keysend_preimage, session_priv|
-                       self.send_payment_along_path(path, payment_hash, payment_secret, total_value, cur_height, payment_id, keysend_preimage, session_priv))
+                       route, payment_preimage, recipient_onion, payment_id, &self.entropy_source,
+                       &self.node_signer, best_block_height,
+                       |path, payment_hash, recipient_onion, total_value, cur_height, payment_id, keysend_preimage, session_priv|
+                       self.send_payment_along_path(path, payment_hash, recipient_onion, total_value, cur_height, payment_id, keysend_preimage, session_priv))
        }
 
        /// Similar to [`ChannelManager::send_spontaneous_payment`], but will automatically find a route
@@ -2737,15 +2746,15 @@ where
        /// payments.
        ///
        /// [`PaymentParameters::for_keysend`]: crate::routing::router::PaymentParameters::for_keysend
-       pub fn send_spontaneous_payment_with_retry(&self, payment_preimage: Option<PaymentPreimage>, payment_id: PaymentId, route_params: RouteParameters, retry_strategy: Retry) -> Result<PaymentHash, RetryableSendFailure> {
+       pub fn send_spontaneous_payment_with_retry(&self, payment_preimage: Option<PaymentPreimage>, recipient_onion: RecipientOnionFields, payment_id: PaymentId, route_params: RouteParameters, retry_strategy: Retry) -> Result<PaymentHash, RetryableSendFailure> {
                let best_block_height = self.best_block.read().unwrap().height();
                let _persistence_guard = PersistenceNotifierGuard::notify_on_drop(&self.total_consistency_lock, &self.persistence_notifier);
-               self.pending_outbound_payments.send_spontaneous_payment(payment_preimage, payment_id,
-                       retry_strategy, route_params, &self.router, self.list_usable_channels(),
+               self.pending_outbound_payments.send_spontaneous_payment(payment_preimage, recipient_onion,
+                       payment_id, retry_strategy, route_params, &self.router, self.list_usable_channels(),
                        || self.compute_inflight_htlcs(),  &self.entropy_source, &self.node_signer, best_block_height,
                        &self.logger, &self.pending_events,
-                       |path, payment_hash, payment_secret, total_value, cur_height, payment_id, keysend_preimage, session_priv|
-                       self.send_payment_along_path(path, payment_hash, payment_secret, total_value, cur_height, payment_id, keysend_preimage, session_priv))
+                       |path, payment_hash, recipient_onion, total_value, cur_height, payment_id, keysend_preimage, session_priv|
+                       self.send_payment_along_path(path, payment_hash, recipient_onion, total_value, cur_height, payment_id, keysend_preimage, session_priv))
        }
 
        /// Send a payment that is probing the given route for liquidity. We calculate the
@@ -2755,8 +2764,8 @@ where
                let best_block_height = self.best_block.read().unwrap().height();
                let _persistence_guard = PersistenceNotifierGuard::notify_on_drop(&self.total_consistency_lock, &self.persistence_notifier);
                self.pending_outbound_payments.send_probe(hops, self.probing_cookie_secret, &self.entropy_source, &self.node_signer, best_block_height,
-                       |path, payment_hash, payment_secret, total_value, cur_height, payment_id, keysend_preimage, session_priv|
-                       self.send_payment_along_path(path, payment_hash, payment_secret, total_value, cur_height, payment_id, keysend_preimage, session_priv))
+                       |path, payment_hash, recipient_onion, total_value, cur_height, payment_id, keysend_preimage, session_priv|
+                       self.send_payment_along_path(path, payment_hash, recipient_onion, total_value, cur_height, payment_id, keysend_preimage, session_priv))
        }
 
        /// Returns whether a payment with the given [`PaymentHash`] and [`PaymentId`] is, in fact, a
@@ -3350,8 +3359,10 @@ where
                                                                                        }
                                                                                }
                                                                                let mut total_value = claimable_htlc.sender_intended_value;
+                                                                               let mut earliest_expiry = claimable_htlc.cltv_expiry;
                                                                                for htlc in htlcs.iter() {
                                                                                        total_value += htlc.sender_intended_value;
+                                                                                       earliest_expiry = cmp::min(earliest_expiry, htlc.cltv_expiry);
                                                                                        match &htlc.onion_payload {
                                                                                                OnionPayload::Invoice { .. } => {
                                                                                                        if htlc.total_msat != $payment_data.total_msat {
@@ -3384,6 +3395,7 @@ where
                                                                                                amount_msat,
                                                                                                via_channel_id: Some(prev_channel_id),
                                                                                                via_user_channel_id: Some(prev_user_channel_id),
+                                                                                               claim_deadline: Some(earliest_expiry - HTLC_FAIL_BACK_BUFFER),
                                                                                        });
                                                                                        payment_claimable_generated = true;
                                                                                } else {
@@ -3437,6 +3449,7 @@ where
                                                                                                        hash_map::Entry::Vacant(e) => {
                                                                                                                let amount_msat = claimable_htlc.value;
                                                                                                                claimable_htlc.total_value_received = Some(amount_msat);
+                                                                                                               let claim_deadline = Some(claimable_htlc.cltv_expiry - HTLC_FAIL_BACK_BUFFER);
                                                                                                                let purpose = events::PaymentPurpose::SpontaneousPayment(preimage);
                                                                                                                e.insert((purpose.clone(), vec![claimable_htlc]));
                                                                                                                let prev_channel_id = prev_funding_outpoint.to_channel_id();
@@ -3447,6 +3460,7 @@ where
                                                                                                                        purpose,
                                                                                                                        via_channel_id: Some(prev_channel_id),
                                                                                                                        via_user_channel_id: Some(prev_user_channel_id),
+                                                                                                                       claim_deadline,
                                                                                                                });
                                                                                                        },
                                                                                                        hash_map::Entry::Occupied(_) => {
@@ -3493,8 +3507,8 @@ where
                self.pending_outbound_payments.check_retry_payments(&self.router, || self.list_usable_channels(),
                        || self.compute_inflight_htlcs(), &self.entropy_source, &self.node_signer, best_block_height,
                        &self.pending_events, &self.logger,
-                       |path, payment_hash, payment_secret, total_value, cur_height, payment_id, keysend_preimage, session_priv|
-                       self.send_payment_along_path(path, payment_hash, payment_secret, total_value, cur_height, payment_id, keysend_preimage, session_priv));
+                       |path, payment_hash, recipient_onion, total_value, cur_height, payment_id, keysend_preimage, session_priv|
+                       self.send_payment_along_path(path, payment_hash, recipient_onion, total_value, cur_height, payment_id, keysend_preimage, session_priv));
 
                for (htlc_source, payment_hash, failure_reason, destination) in failed_forwards.drain(..) {
                        self.fail_htlc_backwards_internal(&htlc_source, &payment_hash, &failure_reason, destination);
@@ -3922,9 +3936,10 @@ where
        /// Provides a payment preimage in response to [`Event::PaymentClaimable`], generating any
        /// [`MessageSendEvent`]s needed to claim the payment.
        ///
-       /// Note that calling this method does *not* guarantee that the payment has been claimed. You
-       /// *must* wait for an [`Event::PaymentClaimed`] event which upon a successful claim will be
-       /// provided to your [`EventHandler`] when [`process_pending_events`] is next called.
+       /// This method is guaranteed to ensure the payment has been claimed but only if the current
+       /// height is strictly below [`Event::PaymentClaimable::claim_deadline`]. To avoid race
+       /// conditions, you should wait for an [`Event::PaymentClaimed`] before considering the payment
+       /// successful. It will generally be available in the next [`process_pending_events`] call.
        ///
        /// Note that if you did not set an `amount_msat` when calling [`create_inbound_payment`] or
        /// [`create_inbound_payment_for_hash`] you must check that the amount in the `PaymentClaimable`
@@ -3932,6 +3947,7 @@ where
        /// the sender "proof-of-payment" when they did not fulfill the full expected payment.
        ///
        /// [`Event::PaymentClaimable`]: crate::events::Event::PaymentClaimable
+       /// [`Event::PaymentClaimable::claim_deadline`]: crate::events::Event::PaymentClaimable::claim_deadline
        /// [`Event::PaymentClaimed`]: crate::events::Event::PaymentClaimed
        /// [`process_pending_events`]: EventsProvider::process_pending_events
        /// [`create_inbound_payment`]: Self::create_inbound_payment
@@ -3968,21 +3984,10 @@ where
                };
                debug_assert!(!sources.is_empty());
 
-               // If we are claiming an MPP payment, we check that all channels which contain a claimable
-               // HTLC still exist. While this isn't guaranteed to remain true if a channel closes while
-               // we're claiming (or even after we claim, before the commitment update dance completes),
-               // it should be a relatively rare race, and we'd rather not claim HTLCs that require us to
-               // go on-chain (and lose the on-chain fee to do so) than just reject the payment.
-               //
-               // Note that we'll still always get our funds - as long as the generated
-               // `ChannelMonitorUpdate` makes it out to the relevant monitor we can claim on-chain.
-               //
-               // If we find an HTLC which we would need to claim but for which we do not have a
-               // channel, we will fail all parts of the MPP payment. While we could wait and see if
-               // the sender retries the already-failed path(s), it should be a pretty rare case where
-               // we got all the HTLCs and then a channel closed while we were waiting for the user to
-               // provide the preimage, so worrying too much about the optimal handling isn't worth
-               // it.
+               // Just in case one HTLC has been failed between when we generated the `PaymentClaimable`
+               // and when we got here we need to check that the amount we're about to claim matches the
+               // amount we told the user in the last `PaymentClaimable`. We also do a sanity-check that
+               // the MPP parts all have the same `total_msat`.
                let mut claimable_amt_msat = 0;
                let mut prev_total_msat = None;
                let mut expected_amt_msat = None;
@@ -3990,28 +3995,6 @@ where
                let mut errs = Vec::new();
                let per_peer_state = self.per_peer_state.read().unwrap();
                for htlc in sources.iter() {
-                       let (counterparty_node_id, chan_id) = match self.short_to_chan_info.read().unwrap().get(&htlc.prev_hop.short_channel_id) {
-                               Some((cp_id, chan_id)) => (cp_id.clone(), chan_id.clone()),
-                               None => {
-                                       valid_mpp = false;
-                                       break;
-                               }
-                       };
-
-                       let peer_state_mutex_opt = per_peer_state.get(&counterparty_node_id);
-                       if peer_state_mutex_opt.is_none() {
-                               valid_mpp = false;
-                               break;
-                       }
-
-                       let mut peer_state_lock = peer_state_mutex_opt.unwrap().lock().unwrap();
-                       let peer_state = &mut *peer_state_lock;
-
-                       if peer_state.channel_by_id.get(&chan_id).is_none() {
-                               valid_mpp = false;
-                               break;
-                       }
-
                        if prev_total_msat.is_some() && prev_total_msat != Some(htlc.total_msat) {
                                log_error!(self.logger, "Somehow ended up with an MPP payment with different expected total amounts - this should not be reachable!");
                                debug_assert!(false);
@@ -4091,45 +4074,46 @@ where
        -> Result<(), (PublicKey, MsgHandleErrInternal)> {
                //TODO: Delay the claimed_funds relaying just like we do outbound relay!
 
-               let per_peer_state = self.per_peer_state.read().unwrap();
-               let chan_id = prev_hop.outpoint.to_channel_id();
-               let counterparty_node_id_opt = match self.short_to_chan_info.read().unwrap().get(&prev_hop.short_channel_id) {
-                       Some((cp_id, _dup_chan_id)) => Some(cp_id.clone()),
-                       None => None
-               };
+               {
+                       let per_peer_state = self.per_peer_state.read().unwrap();
+                       let chan_id = prev_hop.outpoint.to_channel_id();
+                       let counterparty_node_id_opt = match self.short_to_chan_info.read().unwrap().get(&prev_hop.short_channel_id) {
+                               Some((cp_id, _dup_chan_id)) => Some(cp_id.clone()),
+                               None => None
+                       };
 
-               let peer_state_opt = counterparty_node_id_opt.as_ref().map(
-                       |counterparty_node_id| per_peer_state.get(counterparty_node_id).map(
-                               |peer_mutex| peer_mutex.lock().unwrap()
-                       )
-               ).unwrap_or(None);
+                       let peer_state_opt = counterparty_node_id_opt.as_ref().map(
+                               |counterparty_node_id| per_peer_state.get(counterparty_node_id)
+                                       .map(|peer_mutex| peer_mutex.lock().unwrap())
+                       ).unwrap_or(None);
 
-               if peer_state_opt.is_some() {
-                       let mut peer_state_lock = peer_state_opt.unwrap();
-                       let peer_state = &mut *peer_state_lock;
-                       if let hash_map::Entry::Occupied(mut chan) = peer_state.channel_by_id.entry(chan_id) {
-                               let counterparty_node_id = chan.get().get_counterparty_node_id();
-                               let fulfill_res = chan.get_mut().get_update_fulfill_htlc_and_commit(prev_hop.htlc_id, payment_preimage, &self.logger);
-
-                               if let UpdateFulfillCommitFetch::NewClaim { htlc_value_msat, monitor_update } = fulfill_res {
-                                       if let Some(action) = completion_action(Some(htlc_value_msat)) {
-                                               log_trace!(self.logger, "Tracking monitor update completion action for channel {}: {:?}",
-                                                       log_bytes!(chan_id), action);
-                                               peer_state.monitor_update_blocked_actions.entry(chan_id).or_insert(Vec::new()).push(action);
-                                       }
-                                       let update_id = monitor_update.update_id;
-                                       let update_res = self.chain_monitor.update_channel(prev_hop.outpoint, monitor_update);
-                                       let res = handle_new_monitor_update!(self, update_res, update_id, peer_state_lock,
-                                               peer_state, per_peer_state, chan);
-                                       if let Err(e) = res {
-                                               // TODO: This is a *critical* error - we probably updated the outbound edge
-                                               // of the HTLC's monitor with a preimage. We should retry this monitor
-                                               // update over and over again until morale improves.
-                                               log_error!(self.logger, "Failed to update channel monitor with preimage {:?}", payment_preimage);
-                                               return Err((counterparty_node_id, e));
+                       if peer_state_opt.is_some() {
+                               let mut peer_state_lock = peer_state_opt.unwrap();
+                               let peer_state = &mut *peer_state_lock;
+                               if let hash_map::Entry::Occupied(mut chan) = peer_state.channel_by_id.entry(chan_id) {
+                                       let counterparty_node_id = chan.get().get_counterparty_node_id();
+                                       let fulfill_res = chan.get_mut().get_update_fulfill_htlc_and_commit(prev_hop.htlc_id, payment_preimage, &self.logger);
+
+                                       if let UpdateFulfillCommitFetch::NewClaim { htlc_value_msat, monitor_update } = fulfill_res {
+                                               if let Some(action) = completion_action(Some(htlc_value_msat)) {
+                                                       log_trace!(self.logger, "Tracking monitor update completion action for channel {}: {:?}",
+                                                               log_bytes!(chan_id), action);
+                                                       peer_state.monitor_update_blocked_actions.entry(chan_id).or_insert(Vec::new()).push(action);
+                                               }
+                                               let update_id = monitor_update.update_id;
+                                               let update_res = self.chain_monitor.update_channel(prev_hop.outpoint, monitor_update);
+                                               let res = handle_new_monitor_update!(self, update_res, update_id, peer_state_lock,
+                                                       peer_state, per_peer_state, chan);
+                                               if let Err(e) = res {
+                                                       // TODO: This is a *critical* error - we probably updated the outbound edge
+                                                       // of the HTLC's monitor with a preimage. We should retry this monitor
+                                                       // update over and over again until morale improves.
+                                                       log_error!(self.logger, "Failed to update channel monitor with preimage {:?}", payment_preimage);
+                                                       return Err((counterparty_node_id, e));
+                                               }
                                        }
+                                       return Ok(());
                                }
-                               return Ok(());
                        }
                }
                let preimage_update = ChannelMonitorUpdate {
@@ -4184,6 +4168,7 @@ where
                                                                claim_from_onchain_tx: from_onchain,
                                                                prev_channel_id,
                                                                next_channel_id,
+                                                               outbound_amount_forwarded_msat: forwarded_htlc_value_msat,
                                                        }})
                                                } else { None }
                                        });
@@ -4252,8 +4237,6 @@ where
                        });
                }
 
-               emit_channel_ready_event!(self, channel);
-
                macro_rules! handle_cs { () => {
                        if let Some(update) = commitment_update {
                                pending_msg_events.push(events::MessageSendEvent::UpdateHTLCs {
@@ -4286,6 +4269,12 @@ where
                        self.tx_broadcaster.broadcast_transaction(&tx);
                }
 
+               {
+                       let mut pending_events = self.pending_events.lock().unwrap();
+                       emit_channel_pending_event!(pending_events, channel);
+                       emit_channel_ready_event!(pending_events, channel);
+               }
+
                htlc_forwards
        }
 
@@ -4710,7 +4699,10 @@ where
                                        }
                                }
 
-                               emit_channel_ready_event!(self, chan.get_mut());
+                               {
+                                       let mut pending_events = self.pending_events.lock().unwrap();
+                                       emit_channel_ready_event!(pending_events, chan.get_mut());
+                               }
 
                                Ok(())
                        },
@@ -6035,7 +6027,10 @@ where
                                                        }
                                                }
 
-                                               emit_channel_ready_event!(self, channel);
+                                               {
+                                                       let mut pending_events = self.pending_events.lock().unwrap();
+                                                       emit_channel_ready_event!(pending_events, channel);
+                                               }
 
                                                if let Some(announcement_sigs) = announcement_sigs {
                                                        log_trace!(self.logger, "Sending announcement_signatures for channel {}", log_bytes!(channel.channel_id()));
@@ -6146,34 +6141,11 @@ where
                }
        }
 
-       /// Blocks until ChannelManager needs to be persisted or a timeout is reached. It returns a bool
-       /// indicating whether persistence is necessary. Only one listener on
-       /// [`await_persistable_update`], [`await_persistable_update_timeout`], or a future returned by
-       /// [`get_persistable_update_future`] is guaranteed to be woken up.
+       /// Gets a [`Future`] that completes when this [`ChannelManager`] needs to be persisted.
        ///
-       /// Note that this method is not available with the `no-std` feature.
-       ///
-       /// [`await_persistable_update`]: Self::await_persistable_update
-       /// [`await_persistable_update_timeout`]: Self::await_persistable_update_timeout
-       /// [`get_persistable_update_future`]: Self::get_persistable_update_future
-       #[cfg(any(test, feature = "std"))]
-       pub fn await_persistable_update_timeout(&self, max_wait: Duration) -> bool {
-               self.persistence_notifier.wait_timeout(max_wait)
-       }
-
-       /// Blocks until ChannelManager needs to be persisted. Only one listener on
-       /// [`await_persistable_update`], `await_persistable_update_timeout`, or a future returned by
-       /// [`get_persistable_update_future`] is guaranteed to be woken up.
+       /// Note that callbacks registered on the [`Future`] MUST NOT call back into this
+       /// [`ChannelManager`] and should instead register actions to be taken later.
        ///
-       /// [`await_persistable_update`]: Self::await_persistable_update
-       /// [`get_persistable_update_future`]: Self::get_persistable_update_future
-       pub fn await_persistable_update(&self) {
-               self.persistence_notifier.wait()
-       }
-
-       /// Gets a [`Future`] that completes when a persistable update is available. Note that
-       /// callbacks registered on the [`Future`] MUST NOT call back into this [`ChannelManager`] and
-       /// should instead register actions to be taken later.
        pub fn get_persistable_update_future(&self) -> Future {
                self.persistence_notifier.get_future()
        }
@@ -6899,13 +6871,11 @@ impl Readable for HTLCSource {
                                let mut first_hop_htlc_msat: u64 = 0;
                                let mut path: Option<Vec<RouteHop>> = Some(Vec::new());
                                let mut payment_id = None;
-                               let mut payment_secret = None;
                                let mut payment_params: Option<PaymentParameters> = None;
                                read_tlv_fields!(reader, {
                                        (0, session_priv, required),
                                        (1, payment_id, option),
                                        (2, first_hop_htlc_msat, required),
-                                       (3, payment_secret, option),
                                        (4, path, vec_type),
                                        (5, payment_params, (option: ReadableArgs, 0)),
                                });
@@ -6928,7 +6898,6 @@ impl Readable for HTLCSource {
                                        first_hop_htlc_msat,
                                        path,
                                        payment_id: payment_id.unwrap(),
-                                       payment_secret,
                                })
                        }
                        1 => Ok(HTLCSource::PreviousHopData(Readable::read(reader)?)),
@@ -6940,14 +6909,14 @@ impl Readable for HTLCSource {
 impl Writeable for HTLCSource {
        fn write<W: Writer>(&self, writer: &mut W) -> Result<(), crate::io::Error> {
                match self {
-                       HTLCSource::OutboundRoute { ref session_priv, ref first_hop_htlc_msat, ref path, payment_id, payment_secret } => {
+                       HTLCSource::OutboundRoute { ref session_priv, ref first_hop_htlc_msat, ref path, payment_id } => {
                                0u8.write(writer)?;
                                let payment_id_opt = Some(payment_id);
                                write_tlv_fields!(writer, {
                                        (0, session_priv, required),
                                        (1, payment_id_opt, option),
                                        (2, first_hop_htlc_msat, required),
-                                       (3, payment_secret, option),
+                                       // 3 was previously used to write a PaymentSecret for the payment.
                                        (4, *path, vec_type),
                                        (5, None::<PaymentParameters>, option), // payment_params in LDK versions prior to 0.0.115
                                 });
@@ -7354,6 +7323,7 @@ where
                let mut id_to_peer = HashMap::with_capacity(cmp::min(channel_count as usize, 128));
                let mut short_to_chan_info = HashMap::with_capacity(cmp::min(channel_count as usize, 128));
                let mut channel_closures = Vec::new();
+               let mut pending_background_events = Vec::new();
                for _ in 0..channel_count {
                        let mut channel: Channel<<SP::Target as SignerProvider>::Signer> = Channel::read(reader, (
                                &args.entropy_source, &args.signer_provider, best_block_height, &provided_channel_type_features(&args.default_config)
@@ -7383,9 +7353,11 @@ where
                                        log_error!(args.logger, " The channel will be force-closed and the latest commitment transaction from the ChannelMonitor broadcast.");
                                        log_error!(args.logger, " The ChannelMonitor for channel {} is at update_id {} but the ChannelManager is at update_id {}.",
                                                log_bytes!(channel.channel_id()), monitor.get_latest_update_id(), channel.get_latest_monitor_update_id());
-                                       let (_, mut new_failed_htlcs) = channel.force_shutdown(true);
+                                       let (monitor_update, mut new_failed_htlcs) = channel.force_shutdown(true);
+                                       if let Some(monitor_update) = monitor_update {
+                                               pending_background_events.push(BackgroundEvent::ClosingMonitorUpdate(monitor_update));
+                                       }
                                        failed_htlcs.append(&mut new_failed_htlcs);
-                                       monitor.broadcast_latest_holder_commitment_txn(&args.tx_broadcaster, &args.logger);
                                        channel_closures.push(events::Event::ChannelClosed {
                                                channel_id: channel.channel_id(),
                                                user_channel_id: channel.get_user_id(),
@@ -7450,10 +7422,13 @@ where
                        }
                }
 
-               for (funding_txo, monitor) in args.channel_monitors.iter_mut() {
+               for (funding_txo, _) in args.channel_monitors.iter() {
                        if !funding_txo_set.contains(funding_txo) {
-                               log_info!(args.logger, "Broadcasting latest holder commitment transaction for closed channel {}", log_bytes!(funding_txo.to_channel_id()));
-                               monitor.broadcast_latest_holder_commitment_txn(&args.tx_broadcaster, &args.logger);
+                               let monitor_update = ChannelMonitorUpdate {
+                                       update_id: CLOSED_CHANNEL_UPDATE_ID,
+                                       updates: vec![ChannelMonitorUpdateStep::ChannelForceClosed { should_broadcast: true }],
+                               };
+                               pending_background_events.push(BackgroundEvent::ClosingMonitorUpdate((*funding_txo, monitor_update)));
                        }
                }
 
@@ -7506,10 +7481,17 @@ where
                }
 
                let background_event_count: u64 = Readable::read(reader)?;
-               let mut pending_background_events_read: Vec<BackgroundEvent> = Vec::with_capacity(cmp::min(background_event_count as usize, MAX_ALLOC_SIZE/mem::size_of::<BackgroundEvent>()));
                for _ in 0..background_event_count {
                        match <u8 as Readable>::read(reader)? {
-                               0 => pending_background_events_read.push(BackgroundEvent::ClosingMonitorUpdate((Readable::read(reader)?, Readable::read(reader)?))),
+                               0 => {
+                                       let (funding_txo, monitor_update): (OutPoint, ChannelMonitorUpdate) = (Readable::read(reader)?, Readable::read(reader)?);
+                                       if pending_background_events.iter().find(|e| {
+                                               let BackgroundEvent::ClosingMonitorUpdate((pending_funding_txo, pending_monitor_update)) = e;
+                                               *pending_funding_txo == funding_txo && *pending_monitor_update == monitor_update
+                                       }).is_none() {
+                                               pending_background_events.push(BackgroundEvent::ClosingMonitorUpdate((funding_txo, monitor_update)));
+                                       }
+                               }
                                _ => return Err(DecodeError::InvalidValue),
                        }
                }
@@ -7597,7 +7579,7 @@ where
                        for (_, monitor) in args.channel_monitors.iter() {
                                if id_to_peer.get(&monitor.get_funding_txo().0.to_channel_id()).is_none() {
                                        for (htlc_source, (htlc, _)) in monitor.get_pending_or_resolved_outbound_htlcs() {
-                                               if let HTLCSource::OutboundRoute { payment_id, session_priv, path, payment_secret, .. } = htlc_source {
+                                               if let HTLCSource::OutboundRoute { payment_id, session_priv, path, .. } = htlc_source {
                                                        if path.is_empty() {
                                                                log_error!(args.logger, "Got an empty path for a pending payment");
                                                                return Err(DecodeError::InvalidValue);
@@ -7620,7 +7602,8 @@ where
                                                                                payment_params: None,
                                                                                session_privs: [session_priv_bytes].iter().map(|a| *a).collect(),
                                                                                payment_hash: htlc.payment_hash,
-                                                                               payment_secret,
+                                                                               payment_secret: None, // only used for retries, and we'll never retry on startup
+                                                                               payment_metadata: None, // only used for retries, and we'll never retry on startup
                                                                                keysend_preimage: None, // only used for retries, and we'll never retry on startup
                                                                                pending_amt_msat: path_amt,
                                                                                pending_fee_msat: Some(path_fee),
@@ -7884,7 +7867,7 @@ where
                        per_peer_state: FairRwLock::new(per_peer_state),
 
                        pending_events: Mutex::new(pending_events_read),
-                       pending_background_events: Mutex::new(pending_background_events_read),
+                       pending_background_events: Mutex::new(pending_background_events),
                        total_consistency_lock: RwLock::new(()),
                        persistence_notifier: Notifier::new(),
 
@@ -7915,11 +7898,12 @@ mod tests {
        use bitcoin::hashes::Hash;
        use bitcoin::hashes::sha256::Hash as Sha256;
        use bitcoin::secp256k1::{PublicKey, Secp256k1, SecretKey};
+       #[cfg(feature = "std")]
        use core::time::Duration;
        use core::sync::atomic::Ordering;
        use crate::events::{Event, HTLCDestination, MessageSendEvent, MessageSendEventsProvider, ClosureReason};
        use crate::ln::{PaymentPreimage, PaymentHash, PaymentSecret};
-       use crate::ln::channelmanager::{inbound_payment, PaymentId, PaymentSendFailure, InterceptId};
+       use crate::ln::channelmanager::{inbound_payment, PaymentId, PaymentSendFailure, RecipientOnionFields, InterceptId};
        use crate::ln::functional_test_utils::*;
        use crate::ln::msgs;
        use crate::ln::msgs::ChannelMessageHandler;
@@ -7940,9 +7924,9 @@ mod tests {
 
                // All nodes start with a persistable update pending as `create_network` connects each node
                // with all other nodes to make most tests simpler.
-               assert!(nodes[0].node.await_persistable_update_timeout(Duration::from_millis(1)));
-               assert!(nodes[1].node.await_persistable_update_timeout(Duration::from_millis(1)));
-               assert!(nodes[2].node.await_persistable_update_timeout(Duration::from_millis(1)));
+               assert!(nodes[0].node.get_persistable_update_future().poll_is_complete());
+               assert!(nodes[1].node.get_persistable_update_future().poll_is_complete());
+               assert!(nodes[2].node.get_persistable_update_future().poll_is_complete());
 
                let mut chan = create_announced_chan_between_nodes(&nodes, 0, 1);
 
@@ -7956,19 +7940,19 @@ mod tests {
                        &nodes[0].node.get_our_node_id()).pop().unwrap();
 
                // The first two nodes (which opened a channel) should now require fresh persistence
-               assert!(nodes[0].node.await_persistable_update_timeout(Duration::from_millis(1)));
-               assert!(nodes[1].node.await_persistable_update_timeout(Duration::from_millis(1)));
+               assert!(nodes[0].node.get_persistable_update_future().poll_is_complete());
+               assert!(nodes[1].node.get_persistable_update_future().poll_is_complete());
                // ... but the last node should not.
-               assert!(!nodes[2].node.await_persistable_update_timeout(Duration::from_millis(1)));
+               assert!(!nodes[2].node.get_persistable_update_future().poll_is_complete());
                // After persisting the first two nodes they should no longer need fresh persistence.
-               assert!(!nodes[0].node.await_persistable_update_timeout(Duration::from_millis(1)));
-               assert!(!nodes[1].node.await_persistable_update_timeout(Duration::from_millis(1)));
+               assert!(!nodes[0].node.get_persistable_update_future().poll_is_complete());
+               assert!(!nodes[1].node.get_persistable_update_future().poll_is_complete());
 
                // Node 3, unrelated to the only channel, shouldn't care if it receives a channel_update
                // about the channel.
                nodes[2].node.handle_channel_update(&nodes[1].node.get_our_node_id(), &chan.0);
                nodes[2].node.handle_channel_update(&nodes[1].node.get_our_node_id(), &chan.1);
-               assert!(!nodes[2].node.await_persistable_update_timeout(Duration::from_millis(1)));
+               assert!(!nodes[2].node.get_persistable_update_future().poll_is_complete());
 
                // The nodes which are a party to the channel should also ignore messages from unrelated
                // parties.
@@ -7976,8 +7960,8 @@ mod tests {
                nodes[0].node.handle_channel_update(&nodes[2].node.get_our_node_id(), &chan.1);
                nodes[1].node.handle_channel_update(&nodes[2].node.get_our_node_id(), &chan.0);
                nodes[1].node.handle_channel_update(&nodes[2].node.get_our_node_id(), &chan.1);
-               assert!(!nodes[0].node.await_persistable_update_timeout(Duration::from_millis(1)));
-               assert!(!nodes[1].node.await_persistable_update_timeout(Duration::from_millis(1)));
+               assert!(!nodes[0].node.get_persistable_update_future().poll_is_complete());
+               assert!(!nodes[1].node.get_persistable_update_future().poll_is_complete());
 
                // At this point the channel info given by peers should still be the same.
                assert_eq!(nodes[0].node.list_channels()[0], node_a_chan_info);
@@ -7994,8 +7978,8 @@ mod tests {
                // persisted and that its channel info remains the same.
                nodes[0].node.handle_channel_update(&nodes[1].node.get_our_node_id(), &as_update);
                nodes[1].node.handle_channel_update(&nodes[0].node.get_our_node_id(), &bs_update);
-               assert!(!nodes[0].node.await_persistable_update_timeout(Duration::from_millis(1)));
-               assert!(!nodes[1].node.await_persistable_update_timeout(Duration::from_millis(1)));
+               assert!(!nodes[0].node.get_persistable_update_future().poll_is_complete());
+               assert!(!nodes[1].node.get_persistable_update_future().poll_is_complete());
                assert_eq!(nodes[0].node.list_channels()[0], node_a_chan_info);
                assert_eq!(nodes[1].node.list_channels()[0], node_b_chan_info);
 
@@ -8003,8 +7987,8 @@ mod tests {
                // the channel info has updated.
                nodes[0].node.handle_channel_update(&nodes[1].node.get_our_node_id(), &bs_update);
                nodes[1].node.handle_channel_update(&nodes[0].node.get_our_node_id(), &as_update);
-               assert!(nodes[0].node.await_persistable_update_timeout(Duration::from_millis(1)));
-               assert!(nodes[1].node.await_persistable_update_timeout(Duration::from_millis(1)));
+               assert!(nodes[0].node.get_persistable_update_future().poll_is_complete());
+               assert!(nodes[1].node.get_persistable_update_future().poll_is_complete());
                assert_ne!(nodes[0].node.list_channels()[0], node_a_chan_info);
                assert_ne!(nodes[1].node.list_channels()[0], node_b_chan_info);
        }
@@ -8028,15 +8012,18 @@ mod tests {
                // Use the utility function send_payment_along_path to send the payment with MPP data which
                // indicates there are more HTLCs coming.
                let cur_height = CHAN_CONFIRM_DEPTH + 1; // route_payment calls send_payment, which adds 1 to the current height. So we do the same here to match.
-               let session_privs = nodes[0].node.test_add_new_pending_payment(our_payment_hash, Some(payment_secret), payment_id, &mpp_route).unwrap();
-               nodes[0].node.test_send_payment_along_path(&mpp_route.paths[0], &our_payment_hash, &Some(payment_secret), 200_000, cur_height, payment_id, &None, session_privs[0]).unwrap();
+               let session_privs = nodes[0].node.test_add_new_pending_payment(our_payment_hash,
+                       RecipientOnionFields::secret_only(payment_secret), payment_id, &mpp_route).unwrap();
+               nodes[0].node.test_send_payment_along_path(&mpp_route.paths[0], &our_payment_hash,
+                       RecipientOnionFields::secret_only(payment_secret), 200_000, cur_height, payment_id, &None, session_privs[0]).unwrap();
                check_added_monitors!(nodes[0], 1);
                let mut events = nodes[0].node.get_and_clear_pending_msg_events();
                assert_eq!(events.len(), 1);
                pass_along_path(&nodes[0], &[&nodes[1]], 200_000, our_payment_hash, Some(payment_secret), events.drain(..).next().unwrap(), false, None);
 
                // Next, send a keysend payment with the same payment_hash and make sure it fails.
-               nodes[0].node.send_spontaneous_payment(&route, Some(payment_preimage), PaymentId(payment_preimage.0)).unwrap();
+               nodes[0].node.send_spontaneous_payment(&route, Some(payment_preimage),
+                       RecipientOnionFields::spontaneous_empty(), PaymentId(payment_preimage.0)).unwrap();
                check_added_monitors!(nodes[0], 1);
                let mut events = nodes[0].node.get_and_clear_pending_msg_events();
                assert_eq!(events.len(), 1);
@@ -8059,7 +8046,8 @@ mod tests {
                expect_payment_failed!(nodes[0], our_payment_hash, true);
 
                // Send the second half of the original MPP payment.
-               nodes[0].node.test_send_payment_along_path(&mpp_route.paths[1], &our_payment_hash, &Some(payment_secret), 200_000, cur_height, payment_id, &None, session_privs[1]).unwrap();
+               nodes[0].node.test_send_payment_along_path(&mpp_route.paths[1], &our_payment_hash,
+                       RecipientOnionFields::secret_only(payment_secret), 200_000, cur_height, payment_id, &None, session_privs[1]).unwrap();
                check_added_monitors!(nodes[0], 1);
                let mut events = nodes[0].node.get_and_clear_pending_msg_events();
                assert_eq!(events.len(), 1);
@@ -8156,7 +8144,8 @@ mod tests {
                        &nodes[0].node.get_our_node_id(), &route_params, &nodes[0].network_graph,
                        None, nodes[0].logger, &scorer, &random_seed_bytes
                ).unwrap();
-               nodes[0].node.send_spontaneous_payment(&route, Some(payment_preimage), PaymentId(payment_preimage.0)).unwrap();
+               nodes[0].node.send_spontaneous_payment(&route, Some(payment_preimage),
+                       RecipientOnionFields::spontaneous_empty(), PaymentId(payment_preimage.0)).unwrap();
                check_added_monitors!(nodes[0], 1);
                let mut events = nodes[0].node.get_and_clear_pending_msg_events();
                assert_eq!(events.len(), 1);
@@ -8189,7 +8178,8 @@ mod tests {
                        &nodes[0].node.get_our_node_id(), &route_params, &nodes[0].network_graph,
                        None, nodes[0].logger, &scorer, &random_seed_bytes
                ).unwrap();
-               let payment_hash = nodes[0].node.send_spontaneous_payment(&route, Some(payment_preimage), PaymentId(payment_preimage.0)).unwrap();
+               let payment_hash = nodes[0].node.send_spontaneous_payment(&route, Some(payment_preimage),
+                       RecipientOnionFields::spontaneous_empty(), PaymentId(payment_preimage.0)).unwrap();
                check_added_monitors!(nodes[0], 1);
                let mut events = nodes[0].node.get_and_clear_pending_msg_events();
                assert_eq!(events.len(), 1);
@@ -8199,7 +8189,8 @@ mod tests {
 
                // Next, attempt a regular payment and make sure it fails.
                let payment_secret = PaymentSecret([43; 32]);
-               nodes[0].node.send_payment(&route, payment_hash, &Some(payment_secret), PaymentId(payment_hash.0)).unwrap();
+               nodes[0].node.send_payment_with_route(&route, payment_hash,
+                       RecipientOnionFields::secret_only(payment_secret), PaymentId(payment_hash.0)).unwrap();
                check_added_monitors!(nodes[0], 1);
                let mut events = nodes[0].node.get_and_clear_pending_msg_events();
                assert_eq!(events.len(), 1);
@@ -8253,8 +8244,10 @@ mod tests {
 
                let test_preimage = PaymentPreimage([42; 32]);
                let mismatch_payment_hash = PaymentHash([43; 32]);
-               let session_privs = nodes[0].node.test_add_new_pending_payment(mismatch_payment_hash, None, PaymentId(mismatch_payment_hash.0), &route).unwrap();
-               nodes[0].node.test_send_payment_internal(&route, mismatch_payment_hash, &None, Some(test_preimage), PaymentId(mismatch_payment_hash.0), None, session_privs).unwrap();
+               let session_privs = nodes[0].node.test_add_new_pending_payment(mismatch_payment_hash,
+                       RecipientOnionFields::spontaneous_empty(), PaymentId(mismatch_payment_hash.0), &route).unwrap();
+               nodes[0].node.test_send_payment_internal(&route, mismatch_payment_hash,
+                       RecipientOnionFields::spontaneous_empty(), Some(test_preimage), PaymentId(mismatch_payment_hash.0), None, session_privs).unwrap();
                check_added_monitors!(nodes[0], 1);
 
                let updates = get_htlc_update_msgs!(nodes[0], nodes[1].node.get_our_node_id());
@@ -8296,8 +8289,11 @@ mod tests {
                let test_preimage = PaymentPreimage([42; 32]);
                let test_secret = PaymentSecret([43; 32]);
                let payment_hash = PaymentHash(Sha256::hash(&test_preimage.0).into_inner());
-               let session_privs = nodes[0].node.test_add_new_pending_payment(payment_hash, Some(test_secret), PaymentId(payment_hash.0), &route).unwrap();
-               nodes[0].node.test_send_payment_internal(&route, payment_hash, &Some(test_secret), Some(test_preimage), PaymentId(payment_hash.0), None, session_privs).unwrap();
+               let session_privs = nodes[0].node.test_add_new_pending_payment(payment_hash,
+                       RecipientOnionFields::secret_only(test_secret), PaymentId(payment_hash.0), &route).unwrap();
+               nodes[0].node.test_send_payment_internal(&route, payment_hash,
+                       RecipientOnionFields::secret_only(test_secret), Some(test_preimage),
+                       PaymentId(payment_hash.0), None, session_privs).unwrap();
                check_added_monitors!(nodes[0], 1);
 
                let updates = get_htlc_update_msgs!(nodes[0], nodes[1].node.get_our_node_id());
@@ -8334,7 +8330,9 @@ mod tests {
                route.paths[1][0].short_channel_id = chan_2_id;
                route.paths[1][1].short_channel_id = chan_4_id;
 
-               match nodes[0].node.send_payment(&route, payment_hash, &None, PaymentId(payment_hash.0)).unwrap_err() {
+               match nodes[0].node.send_payment_with_route(&route, payment_hash,
+                       RecipientOnionFields::spontaneous_empty(), PaymentId(payment_hash.0))
+               .unwrap_err() {
                        PaymentSendFailure::ParameterError(APIError::APIMisuseError { ref err }) => {
                                assert!(regex::Regex::new(r"Payment secret is required for multi-path payments").unwrap().is_match(err))
                        },
@@ -8449,6 +8447,7 @@ mod tests {
                        assert_eq!(nodes_0_lock.len(), 1);
                        assert!(nodes_0_lock.contains_key(channel_id));
                }
+               expect_channel_pending_event(&nodes[1], &nodes[0].node.get_our_node_id());
 
                {
                        // Assert that `nodes[1]`'s `id_to_peer` map is populated with the channel as soon as
@@ -8461,6 +8460,7 @@ mod tests {
                let funding_signed = get_event_msg!(nodes[1], MessageSendEvent::SendFundingSigned, nodes[0].node.get_our_node_id());
                nodes[0].node.handle_funding_signed(&nodes[1].node.get_our_node_id(), &funding_signed);
                check_added_monitors!(nodes[0], 1);
+               expect_channel_pending_event(&nodes[0], &nodes[1].node.get_our_node_id());
                let (channel_ready, _) = create_chan_between_nodes_with_value_confirm(&nodes[0], &nodes[1], &tx);
                let (announcement, nodes_0_update, nodes_1_update) = create_chan_between_nodes_with_value_b(&nodes[0], &nodes[1], &channel_ready);
                update_nodes_with_chan_announce(&nodes, 0, 1, &announcement, &nodes_0_update, &nodes_1_update);
@@ -8603,10 +8603,13 @@ mod tests {
 
                                nodes[1].node.handle_funding_created(&nodes[0].node.get_our_node_id(), &funding_created_msg);
                                check_added_monitors!(nodes[1], 1);
+                               expect_channel_pending_event(&nodes[1], &nodes[0].node.get_our_node_id());
+
                                let funding_signed = get_event_msg!(nodes[1], MessageSendEvent::SendFundingSigned, nodes[0].node.get_our_node_id());
 
                                nodes[0].node.handle_funding_signed(&nodes[1].node.get_our_node_id(), &funding_signed);
                                check_added_monitors!(nodes[0], 1);
+                               expect_channel_pending_event(&nodes[0], &nodes[1].node.get_our_node_id());
                        }
                        open_channel_msg.temporary_channel_id = nodes[0].keys_manager.get_secure_random_bytes();
                }
@@ -8824,13 +8827,13 @@ mod tests {
 pub mod bench {
        use crate::chain::Listen;
        use crate::chain::chainmonitor::{ChainMonitor, Persist};
-       use crate::chain::keysinterface::{EntropySource, KeysManager, InMemorySigner};
+       use crate::chain::keysinterface::{KeysManager, InMemorySigner};
        use crate::events::{Event, MessageSendEvent, MessageSendEventsProvider};
-       use crate::ln::channelmanager::{self, BestBlock, ChainParameters, ChannelManager, PaymentHash, PaymentPreimage, PaymentId};
+       use crate::ln::channelmanager::{BestBlock, ChainParameters, ChannelManager, PaymentHash, PaymentPreimage, PaymentId, RecipientOnionFields, Retry};
        use crate::ln::functional_test_utils::*;
        use crate::ln::msgs::{ChannelMessageHandler, Init};
        use crate::routing::gossip::NetworkGraph;
-       use crate::routing::router::{PaymentParameters, get_route};
+       use crate::routing::router::{PaymentParameters, RouteParameters};
        use crate::util::test_utils;
        use crate::util::config::UserConfig;
 
@@ -8907,7 +8910,24 @@ pub mod bench {
                } else { panic!(); }
 
                node_b.handle_funding_created(&node_a.get_our_node_id(), &get_event_msg!(node_a_holder, MessageSendEvent::SendFundingCreated, node_b.get_our_node_id()));
+               let events_b = node_b.get_and_clear_pending_events();
+               assert_eq!(events_b.len(), 1);
+               match events_b[0] {
+                       Event::ChannelPending{ ref counterparty_node_id, .. } => {
+                               assert_eq!(*counterparty_node_id, node_a.get_our_node_id());
+                       },
+                       _ => panic!("Unexpected event"),
+               }
+
                node_a.handle_funding_signed(&node_b.get_our_node_id(), &get_event_msg!(node_b_holder, MessageSendEvent::SendFundingSigned, node_a.get_our_node_id()));
+               let events_a = node_a.get_and_clear_pending_events();
+               assert_eq!(events_a.len(), 1);
+               match events_a[0] {
+                       Event::ChannelPending{ ref counterparty_node_id, .. } => {
+                               assert_eq!(*counterparty_node_id, node_b.get_our_node_id());
+                       },
+                       _ => panic!("Unexpected event"),
+               }
 
                assert_eq!(&tx_broadcaster.txn_broadcasted.lock().unwrap()[..], &[tx.clone()]);
 
@@ -8951,28 +8971,21 @@ pub mod bench {
                        _ => panic!("Unexpected event"),
                }
 
-               let dummy_graph = NetworkGraph::new(network, &logger_a);
-
                let mut payment_count: u64 = 0;
                macro_rules! send_payment {
                        ($node_a: expr, $node_b: expr) => {
-                               let usable_channels = $node_a.list_usable_channels();
                                let payment_params = PaymentParameters::from_node_id($node_b.get_our_node_id(), TEST_FINAL_CLTV)
                                        .with_features($node_b.invoice_features());
-                               let scorer = test_utils::TestScorer::new();
-                               let seed = [3u8; 32];
-                               let keys_manager = KeysManager::new(&seed, 42, 42);
-                               let random_seed_bytes = keys_manager.get_secure_random_bytes();
-                               let route = get_route(&$node_a.get_our_node_id(), &payment_params, &dummy_graph.read_only(),
-                                       Some(&usable_channels.iter().map(|r| r).collect::<Vec<_>>()), 10_000, TEST_FINAL_CLTV, &logger_a, &scorer, &random_seed_bytes).unwrap();
-
                                let mut payment_preimage = PaymentPreimage([0; 32]);
                                payment_preimage.0[0..8].copy_from_slice(&payment_count.to_le_bytes());
                                payment_count += 1;
                                let payment_hash = PaymentHash(Sha256::hash(&payment_preimage.0[..]).into_inner());
                                let payment_secret = $node_b.create_inbound_payment_for_hash(payment_hash, None, 7200, None).unwrap();
 
-                               $node_a.send_payment(&route, payment_hash, &Some(payment_secret), PaymentId(payment_hash.0)).unwrap();
+                               $node_a.send_payment(payment_hash, RecipientOnionFields::secret_only(payment_secret),
+                                       PaymentId(payment_hash.0), RouteParameters {
+                                               payment_params, final_value_msat: 10_000,
+                                       }, Retry::Attempts(0)).unwrap();
                                let payment_event = SendEvent::from_event($node_a.get_and_clear_pending_msg_events().pop().unwrap());
                                $node_b.handle_update_add_htlc(&$node_a.get_our_node_id(), &payment_event.msgs[0]);
                                $node_b.handle_commitment_signed(&$node_a.get_our_node_id(), &payment_event.commitment_msg);