Swap pending_outbound_payments for OutboundPayments struct
[rust-lightning] / lightning / src / ln / channelmanager.rs
index e0a54841e5d432d8bce0cc0f16ecdb909eb18ca0..61cc2af81c7d8f50447f8ee02471df230dabe4a6 100644 (file)
@@ -46,16 +46,19 @@ use crate::ln::channel::{Channel, ChannelError, ChannelUpdateStatus, UpdateFulfi
 use crate::ln::features::{ChannelFeatures, ChannelTypeFeatures, InitFeatures, NodeFeatures};
 #[cfg(any(feature = "_test_utils", test))]
 use crate::ln::features::InvoiceFeatures;
-use crate::routing::router::{InFlightHtlcs, PaymentParameters, Route, RouteHop, RoutePath, RouteParameters};
+use crate::routing::router::{InFlightHtlcs, PaymentParameters, Route, RouteHop, RoutePath};
 use crate::ln::msgs;
 use crate::ln::onion_utils;
 use crate::ln::onion_utils::HTLCFailReason;
 use crate::ln::msgs::{ChannelMessageHandler, DecodeError, LightningError, MAX_VALUE_MSAT};
+#[cfg(test)]
+use crate::ln::outbound_payment;
+use crate::ln::outbound_payment::{OutboundPayments, PendingOutboundPayment};
 use crate::ln::wire::Encode;
 use crate::chain::keysinterface::{Sign, KeysInterface, KeysManager, Recipient};
 use crate::util::config::{UserConfig, ChannelConfig};
 use crate::util::events::{Event, EventHandler, EventsProvider, MessageSendEvent, MessageSendEventsProvider, ClosureReason, HTLCDestination};
-use crate::util::{byte_utils, events};
+use crate::util::events;
 use crate::util::wakers::{Future, Notifier};
 use crate::util::scid_utils::fake_scid;
 use crate::util::ser::{BigSize, FixedLengthReader, Readable, ReadableArgs, MaybeReadable, Writeable, Writer, VecWriter};
@@ -72,6 +75,9 @@ use core::sync::atomic::{AtomicUsize, Ordering};
 use core::time::Duration;
 use core::ops::Deref;
 
+// Re-export this for use in the public API.
+pub use crate::ln::outbound_payment::PaymentSendFailure;
+
 // We hold various information about HTLC relay in the HTLC objects in Channel itself:
 //
 // Upon receipt of an HTLC from a peer, we'll give it a PendingHTLCStatus indicating if it should
@@ -283,14 +289,6 @@ struct ReceiveError {
        msg: &'static str,
 }
 
-/// Return value for claim_funds_from_hop
-enum ClaimFundsFromHop {
-       PrevHopForceClosed,
-       MonitorUpdateFail(PublicKey, MsgHandleErrInternal, Option<u64>),
-       Success(u64),
-       DuplicateClaim,
-}
-
 type ShutdownResult = (Option<(OutPoint, ChannelMonitorUpdate)>, Vec<(HTLCSource, PaymentHash, PublicKey, [u8; 32])>);
 
 /// Error type returned across the channel_state mutex boundary. When an Err is generated for a
@@ -404,6 +402,36 @@ pub(super) enum RAACommitmentOrder {
        RevokeAndACKFirst,
 }
 
+/// Information about a payment which is currently being claimed.
+struct ClaimingPayment {
+       amount_msat: u64,
+       payment_purpose: events::PaymentPurpose,
+       receiver_node_id: PublicKey,
+}
+impl_writeable_tlv_based!(ClaimingPayment, {
+       (0, amount_msat, required),
+       (2, payment_purpose, required),
+       (4, receiver_node_id, required),
+});
+
+/// Information about claimable or being-claimed payments
+struct ClaimablePayments {
+       /// Map from payment hash to the payment data and any HTLCs which are to us and can be
+       /// failed/claimed by the user.
+       ///
+       /// Note that, no consistency guarantees are made about the channels given here actually
+       /// existing anymore by the time you go to read them!
+       ///
+       /// When adding to the map, [`Self::pending_claiming_payments`] must also be checked to ensure
+       /// we don't get a duplicate payment.
+       claimable_htlcs: HashMap<PaymentHash, (events::PaymentPurpose, Vec<ClaimableHTLC>)>,
+
+       /// Map from payment hash to the payment data for HTLCs which we have begun claiming, but which
+       /// are waiting on a [`ChannelMonitorUpdate`] to complete in order to be surfaced to the user
+       /// as an [`events::Event::PaymentClaimed`].
+       pending_claiming_payments: HashMap<PaymentHash, ClaimingPayment>,
+}
+
 // Note this is only exposed in cfg(test):
 pub(super) struct ChannelHolder<Signer: Sign> {
        pub(super) by_id: HashMap<[u8; 32], Channel<Signer>>,
@@ -421,6 +449,16 @@ enum BackgroundEvent {
        ClosingMonitorUpdate((OutPoint, ChannelMonitorUpdate)),
 }
 
+pub(crate) enum MonitorUpdateCompletionAction {
+       /// Indicates that a payment ultimately destined for us was claimed and we should emit an
+       /// [`events::Event::PaymentClaimed`] to the user if we haven't yet generated such an event for
+       /// this payment. Note that this is only best-effort. On restart it's possible such a duplicate
+       /// event can be generated.
+       PaymentClaimed { payment_hash: PaymentHash },
+       /// Indicates an [`events::Event`] should be surfaced to the user.
+       EmitEvent { event: events::Event },
+}
+
 /// State we hold per-peer. In the future we should put channels in here, but for now we only hold
 /// the latest Init features we heard from the peer.
 struct PeerState {
@@ -448,160 +486,6 @@ struct PendingInboundPayment {
        min_value_msat: Option<u64>,
 }
 
-/// Stores the session_priv for each part of a payment that is still pending. For versions 0.0.102
-/// and later, also stores information for retrying the payment.
-pub(crate) enum PendingOutboundPayment {
-       Legacy {
-               session_privs: HashSet<[u8; 32]>,
-       },
-       Retryable {
-               session_privs: HashSet<[u8; 32]>,
-               payment_hash: PaymentHash,
-               payment_secret: Option<PaymentSecret>,
-               pending_amt_msat: u64,
-               /// Used to track the fee paid. Only present if the payment was serialized on 0.0.103+.
-               pending_fee_msat: Option<u64>,
-               /// The total payment amount across all paths, used to verify that a retry is not overpaying.
-               total_msat: u64,
-               /// Our best known block height at the time this payment was initiated.
-               starting_block_height: u32,
-       },
-       /// When a pending payment is fulfilled, we continue tracking it until all pending HTLCs have
-       /// been resolved. This ensures we don't look up pending payments in ChannelMonitors on restart
-       /// and add a pending payment that was already fulfilled.
-       Fulfilled {
-               session_privs: HashSet<[u8; 32]>,
-               payment_hash: Option<PaymentHash>,
-               timer_ticks_without_htlcs: u8,
-       },
-       /// When a payer gives up trying to retry a payment, they inform us, letting us generate a
-       /// `PaymentFailed` event when all HTLCs have irrevocably failed. This avoids a number of race
-       /// conditions in MPP-aware payment retriers (1), where the possibility of multiple
-       /// `PaymentPathFailed` events with `all_paths_failed` can be pending at once, confusing a
-       /// downstream event handler as to when a payment has actually failed.
-       ///
-       /// (1) https://github.com/lightningdevkit/rust-lightning/issues/1164
-       Abandoned {
-               session_privs: HashSet<[u8; 32]>,
-               payment_hash: PaymentHash,
-       },
-}
-
-impl PendingOutboundPayment {
-       fn is_fulfilled(&self) -> bool {
-               match self {
-                       PendingOutboundPayment::Fulfilled { .. } => true,
-                       _ => false,
-               }
-       }
-       fn abandoned(&self) -> bool {
-               match self {
-                       PendingOutboundPayment::Abandoned { .. } => true,
-                       _ => false,
-               }
-       }
-       fn get_pending_fee_msat(&self) -> Option<u64> {
-               match self {
-                       PendingOutboundPayment::Retryable { pending_fee_msat, .. } => pending_fee_msat.clone(),
-                       _ => None,
-               }
-       }
-
-       fn payment_hash(&self) -> Option<PaymentHash> {
-               match self {
-                       PendingOutboundPayment::Legacy { .. } => None,
-                       PendingOutboundPayment::Retryable { payment_hash, .. } => Some(*payment_hash),
-                       PendingOutboundPayment::Fulfilled { payment_hash, .. } => *payment_hash,
-                       PendingOutboundPayment::Abandoned { payment_hash, .. } => Some(*payment_hash),
-               }
-       }
-
-       fn mark_fulfilled(&mut self) {
-               let mut session_privs = HashSet::new();
-               core::mem::swap(&mut session_privs, match self {
-                       PendingOutboundPayment::Legacy { session_privs } |
-                       PendingOutboundPayment::Retryable { session_privs, .. } |
-                       PendingOutboundPayment::Fulfilled { session_privs, .. } |
-                       PendingOutboundPayment::Abandoned { session_privs, .. }
-                               => session_privs,
-               });
-               let payment_hash = self.payment_hash();
-               *self = PendingOutboundPayment::Fulfilled { session_privs, payment_hash, timer_ticks_without_htlcs: 0 };
-       }
-
-       fn mark_abandoned(&mut self) -> Result<(), ()> {
-               let mut session_privs = HashSet::new();
-               let our_payment_hash;
-               core::mem::swap(&mut session_privs, match self {
-                       PendingOutboundPayment::Legacy { .. } |
-                       PendingOutboundPayment::Fulfilled { .. } =>
-                               return Err(()),
-                       PendingOutboundPayment::Retryable { session_privs, payment_hash, .. } |
-                       PendingOutboundPayment::Abandoned { session_privs, payment_hash, .. } => {
-                               our_payment_hash = *payment_hash;
-                               session_privs
-                       },
-               });
-               *self = PendingOutboundPayment::Abandoned { session_privs, payment_hash: our_payment_hash };
-               Ok(())
-       }
-
-       /// panics if path is None and !self.is_fulfilled
-       fn remove(&mut self, session_priv: &[u8; 32], path: Option<&Vec<RouteHop>>) -> bool {
-               let remove_res = match self {
-                       PendingOutboundPayment::Legacy { session_privs } |
-                       PendingOutboundPayment::Retryable { session_privs, .. } |
-                       PendingOutboundPayment::Fulfilled { session_privs, .. } |
-                       PendingOutboundPayment::Abandoned { session_privs, .. } => {
-                               session_privs.remove(session_priv)
-                       }
-               };
-               if remove_res {
-                       if let PendingOutboundPayment::Retryable { ref mut pending_amt_msat, ref mut pending_fee_msat, .. } = self {
-                               let path = path.expect("Fulfilling a payment should always come with a path");
-                               let path_last_hop = path.last().expect("Outbound payments must have had a valid path");
-                               *pending_amt_msat -= path_last_hop.fee_msat;
-                               if let Some(fee_msat) = pending_fee_msat.as_mut() {
-                                       *fee_msat -= path.get_path_fees();
-                               }
-                       }
-               }
-               remove_res
-       }
-
-       fn insert(&mut self, session_priv: [u8; 32], path: &Vec<RouteHop>) -> bool {
-               let insert_res = match self {
-                       PendingOutboundPayment::Legacy { session_privs } |
-                       PendingOutboundPayment::Retryable { session_privs, .. } => {
-                               session_privs.insert(session_priv)
-                       }
-                       PendingOutboundPayment::Fulfilled { .. } => false,
-                       PendingOutboundPayment::Abandoned { .. } => false,
-               };
-               if insert_res {
-                       if let PendingOutboundPayment::Retryable { ref mut pending_amt_msat, ref mut pending_fee_msat, .. } = self {
-                               let path_last_hop = path.last().expect("Outbound payments must have had a valid path");
-                               *pending_amt_msat += path_last_hop.fee_msat;
-                               if let Some(fee_msat) = pending_fee_msat.as_mut() {
-                                       *fee_msat += path.get_path_fees();
-                               }
-                       }
-               }
-               insert_res
-       }
-
-       fn remaining_parts(&self) -> usize {
-               match self {
-                       PendingOutboundPayment::Legacy { session_privs } |
-                       PendingOutboundPayment::Retryable { session_privs, .. } |
-                       PendingOutboundPayment::Fulfilled { session_privs, .. } |
-                       PendingOutboundPayment::Abandoned { session_privs, .. } => {
-                               session_privs.len()
-                       }
-               }
-       }
-}
-
 /// SimpleArcChannelManager is useful when you need a ChannelManager with a static lifetime, e.g.
 /// when you're using lightning-net-tokio (since tokio::spawn requires parameters with static
 /// lifetimes). Other times you can afford a reference, which is more efficient, in which case
@@ -679,9 +563,9 @@ pub type SimpleRefChannelManager<'a, 'b, 'c, 'd, 'e, M, T, F, L> = ChannelManage
 //  |
 //  |__`pending_inbound_payments`
 //  |   |
-//  |   |__`claimable_htlcs`
+//  |   |__`claimable_payments`
 //  |   |
-//  |   |__`pending_outbound_payments`
+//  |   |__`pending_outbound_payments` // This field's struct contains a map of pending outbounds
 //  |       |
 //  |       |__`channel_state`
 //  |           |
@@ -726,9 +610,9 @@ pub struct ChannelManager<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref>
        channel_state: Mutex<ChannelHolder<<K::Target as KeysInterface>::Signer>>,
 
        /// Storage for PaymentSecrets and any requirements on future inbound payments before we will
-       /// expose them to users via a PaymentReceived event. HTLCs which do not meet the requirements
+       /// expose them to users via a PaymentClaimable event. HTLCs which do not meet the requirements
        /// here are failed when we process them as pending-forwardable-HTLCs, and entries are removed
-       /// after we generate a PaymentReceived upon receipt of all MPP parts or when they time out.
+       /// after we generate a PaymentClaimable upon receipt of all MPP parts or when they time out.
        ///
        /// See `ChannelManager` struct-level documentation for lock order requirements.
        pending_inbound_payments: Mutex<HashMap<PaymentHash, PendingInboundPayment>>,
@@ -745,7 +629,7 @@ pub struct ChannelManager<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref>
        /// See `PendingOutboundPayment` documentation for more info.
        ///
        /// See `ChannelManager` struct-level documentation for lock order requirements.
-       pending_outbound_payments: Mutex<HashMap<PaymentId, PendingOutboundPayment>>,
+       pending_outbound_payments: OutboundPayments,
 
        /// SCID/SCID Alias -> forward infos. Key of 0 means payments received.
        ///
@@ -767,14 +651,11 @@ pub struct ChannelManager<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref>
        /// See `ChannelManager` struct-level documentation for lock order requirements.
        pending_intercepted_htlcs: Mutex<HashMap<InterceptId, PendingAddHTLCInfo>>,
 
-       /// Map from payment hash to the payment data and any HTLCs which are to us and can be
-       /// failed/claimed by the user.
-       ///
-       /// Note that, no consistency guarantees are made about the channels given here actually
-       /// existing anymore by the time you go to read them!
+       /// The sets of payments which are claimable or currently being claimed. See
+       /// [`ClaimablePayments`]' individual field docs for more info.
        ///
        /// See `ChannelManager` struct-level documentation for lock order requirements.
-       claimable_htlcs: Mutex<HashMap<PaymentHash, (events::PaymentPurpose, Vec<ClaimableHTLC>)>>,
+       claimable_payments: Mutex<ClaimablePayments>,
 
        /// The set of outbound SCID aliases across all our channels, including unconfirmed channels
        /// and some closed channels which reached a usable state prior to being closed. This is used
@@ -994,7 +875,7 @@ pub(crate) const MPP_TIMEOUT_TICKS: u8 = 3;
 
 /// The number of ticks of [`ChannelManager::timer_tick_occurred`] until we time-out the
 /// idempotency of payments by [`PaymentId`]. See
-/// [`ChannelManager::remove_stale_resolved_payments`].
+/// [`OutboundPayments::remove_stale_resolved_payments`].
 pub(crate) const IDEMPOTENCY_TIMEOUT_TICKS: u8 = 7;
 
 /// Information needed for constructing an invoice route hint for this channel.
@@ -1223,70 +1104,6 @@ impl ChannelDetails {
        }
 }
 
-/// If a payment fails to send, it can be in one of several states. This enum is returned as the
-/// Err() type describing which state the payment is in, see the description of individual enum
-/// states for more.
-#[derive(Clone, Debug)]
-pub enum PaymentSendFailure {
-       /// A parameter which was passed to send_payment was invalid, preventing us from attempting to
-       /// send the payment at all.
-       ///
-       /// You can freely resend the payment in full (with the parameter error fixed).
-       ///
-       /// Because the payment failed outright, no payment tracking is done, you do not need to call
-       /// [`ChannelManager::abandon_payment`] and [`ChannelManager::retry_payment`] will *not* work
-       /// for this payment.
-       ParameterError(APIError),
-       /// A parameter in a single path which was passed to send_payment was invalid, preventing us
-       /// from attempting to send the payment at all.
-       ///
-       /// You can freely resend the payment in full (with the parameter error fixed).
-       ///
-       /// The results here are ordered the same as the paths in the route object which was passed to
-       /// send_payment.
-       ///
-       /// Because the payment failed outright, no payment tracking is done, you do not need to call
-       /// [`ChannelManager::abandon_payment`] and [`ChannelManager::retry_payment`] will *not* work
-       /// for this payment.
-       PathParameterError(Vec<Result<(), APIError>>),
-       /// All paths which were attempted failed to send, with no channel state change taking place.
-       /// You can freely resend the payment in full (though you probably want to do so over different
-       /// paths than the ones selected).
-       ///
-       /// Because the payment failed outright, no payment tracking is done, you do not need to call
-       /// [`ChannelManager::abandon_payment`] and [`ChannelManager::retry_payment`] will *not* work
-       /// for this payment.
-       AllFailedResendSafe(Vec<APIError>),
-       /// Indicates that a payment for the provided [`PaymentId`] is already in-flight and has not
-       /// yet completed (i.e. generated an [`Event::PaymentSent`]) or been abandoned (via
-       /// [`ChannelManager::abandon_payment`]).
-       ///
-       /// [`Event::PaymentSent`]: events::Event::PaymentSent
-       DuplicatePayment,
-       /// Some paths which were attempted failed to send, though possibly not all. At least some
-       /// paths have irrevocably committed to the HTLC and retrying the payment in full would result
-       /// in over-/re-payment.
-       ///
-       /// The results here are ordered the same as the paths in the route object which was passed to
-       /// send_payment, and any `Err`s which are not [`APIError::MonitorUpdateInProgress`] can be
-       /// safely retried via [`ChannelManager::retry_payment`].
-       ///
-       /// Any entries which contain `Err(APIError::MonitorUpdateInprogress)` or `Ok(())` MUST NOT be
-       /// retried as they will result in over-/re-payment. These HTLCs all either successfully sent
-       /// (in the case of `Ok(())`) or will send once a [`MonitorEvent::Completed`] is provided for
-       /// the next-hop channel with the latest update_id.
-       PartialFailure {
-               /// The errors themselves, in the same order as the route hops.
-               results: Vec<Result<(), APIError>>,
-               /// If some paths failed without irrevocably committing to the new HTLC(s), this will
-               /// contain a [`RouteParameters`] object which can be used to calculate a new route that
-               /// will pay all remaining unpaid balance.
-               failed_paths_retry: Option<RouteParameters>,
-               /// The payment id for the payment, which is now at least partially pending.
-               payment_id: PaymentId,
-       },
-}
-
 /// Route hints used in constructing invoices for [phantom node payents].
 ///
 /// [phantom node payments]: crate::chain::keysinterface::PhantomKeysManager
@@ -1578,9 +1395,9 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                        }),
                        outbound_scid_aliases: Mutex::new(HashSet::new()),
                        pending_inbound_payments: Mutex::new(HashMap::new()),
-                       pending_outbound_payments: Mutex::new(HashMap::new()),
+                       pending_outbound_payments: OutboundPayments::new(),
                        forward_htlcs: Mutex::new(HashMap::new()),
-                       claimable_htlcs: Mutex::new(HashMap::new()),
+                       claimable_payments: Mutex::new(ClaimablePayments { claimable_htlcs: HashMap::new(), pending_claiming_payments: HashMap::new() }),
                        pending_intercepted_htlcs: Mutex::new(HashMap::new()),
                        id_to_peer: Mutex::new(HashMap::new()),
                        short_to_chan_info: FairRwLock::new(HashMap::new()),
@@ -2033,7 +1850,7 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                        return Err(ReceiveError {
                                msg: "Upstream node set CLTV to the wrong value",
                                err_code: 18,
-                               err_data: byte_utils::be32_to_array(cltv_expiry).to_vec()
+                               err_data: cltv_expiry.to_be_bytes().to_vec()
                        })
                }
                // final_expiry_too_soon
@@ -2055,7 +1872,7 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                if hop_data.amt_to_forward > amt_msat {
                        return Err(ReceiveError {
                                err_code: 19,
-                               err_data: byte_utils::be64_to_array(amt_msat).to_vec(),
+                               err_data: amt_msat.to_be_bytes().to_vec(),
                                msg: "Upstream node sent less than we were supposed to receive in payment",
                        });
                }
@@ -2222,7 +2039,7 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                        // with a short_channel_id of 0. This is important as various things later assume
                        // short_channel_id is non-0 in any ::Forward.
                        if let &PendingHTLCRouting::Forward { ref short_channel_id, .. } = routing {
-                               if let Some((err, code, chan_update)) = loop {
+                               if let Some((err, mut code, chan_update)) = loop {
                                        let id_option = self.short_to_chan_info.read().unwrap().get(&short_channel_id).cloned();
                                        let mut channel_state = self.channel_state.lock().unwrap();
                                        let forwarding_id_opt = match id_option {
@@ -2331,6 +2148,12 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                                                (chan_update.serialized_length() as u16 + 2).write(&mut res).expect("Writes cannot fail");
                                                msgs::ChannelUpdate::TYPE.write(&mut res).expect("Writes cannot fail");
                                                chan_update.write(&mut res).expect("Writes cannot fail");
+                                       } else if code & 0x1000 == 0x1000 {
+                                               // If we're trying to return an error that requires a `channel_update` but
+                                               // we're forwarding to a phantom or intercept "channel" (i.e. cannot
+                                               // generate an update), just use the generic "temporary_node_failure"
+                                               // instead.
+                                               code = 0x2000 | 2;
                                        }
                                        return_err!(err, code, &res.0[..]);
                                }
@@ -2406,10 +2229,10 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                let session_priv = SecretKey::from_slice(&session_priv_bytes[..]).expect("RNG is busted");
 
                let onion_keys = onion_utils::construct_onion_keys(&self.secp_ctx, &path, &session_priv)
-                       .map_err(|_| APIError::RouteError{err: "Pubkey along hop was maliciously selected"})?;
+                       .map_err(|_| APIError::InvalidRoute{err: "Pubkey along hop was maliciously selected"})?;
                let (onion_payloads, htlc_msat, htlc_cltv) = onion_utils::build_onion_payloads(path, total_value, payment_secret, cur_height, keysend_preimage)?;
                if onion_utils::route_size_insane(&onion_payloads) {
-                       return Err(APIError::RouteError{err: "Route size too large considering onion data"});
+                       return Err(APIError::InvalidRoute{err: "Route size too large considering onion data"});
                }
                let onion_packet = onion_utils::construct_onion_packet(onion_payloads, onion_keys, prng_seed, payment_hash);
 
@@ -2426,7 +2249,7 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                        if let hash_map::Entry::Occupied(mut chan) = channel_state.by_id.entry(id) {
                                match {
                                        if chan.get().get_counterparty_node_id() != path.first().unwrap().pubkey {
-                                               return Err(APIError::RouteError{err: "Node ID mismatch on first hop!"});
+                                               return Err(APIError::InvalidRoute{err: "Node ID mismatch on first hop!"});
                                        }
                                        if !chan.get().is_live() {
                                                return Err(APIError::ChannelUnavailable{err: "Peer for first hop currently disconnected/pending monitor update!".to_owned()});
@@ -2501,7 +2324,7 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
        /// fields for more info.
        ///
        /// If a pending payment is currently in-flight with the same [`PaymentId`] provided, this
-       /// method will error with an [`APIError::RouteError`]. Note, however, that once a payment
+       /// method will error with an [`APIError::InvalidRoute`]. Note, however, that once a payment
        /// is no longer pending (either via [`ChannelManager::abandon_payment`], or handling of an
        /// [`Event::PaymentSent`]) LDK will not stop you from sending a second payment with the same
        /// [`PaymentId`].
@@ -2520,7 +2343,7 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
        /// PaymentSendFailure for more info.
        ///
        /// In general, a path may raise:
-       ///  * [`APIError::RouteError`] when an invalid route or forwarding parameter (cltv_delta, fee,
+       ///  * [`APIError::InvalidRoute`] when an invalid route or forwarding parameter (cltv_delta, fee,
        ///    node public key) is specified.
        ///  * [`APIError::ChannelUnavailable`] if the next-hop channel is not available for updates
        ///    (including due to previous monitor update failure or new permanent monitor update
@@ -2545,142 +2368,27 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
        /// [`Event::PaymentSent`]: events::Event::PaymentSent
        /// [`PeerManager::process_events`]: crate::ln::peer_handler::PeerManager::process_events
        pub fn send_payment(&self, route: &Route, payment_hash: PaymentHash, payment_secret: &Option<PaymentSecret>, payment_id: PaymentId) -> Result<(), PaymentSendFailure> {
-               let onion_session_privs = self.add_new_pending_payment(payment_hash, *payment_secret, payment_id, route)?;
-               self.send_payment_internal(route, payment_hash, payment_secret, None, payment_id, None, onion_session_privs)
+               let best_block_height = self.best_block.read().unwrap().height();
+               self.pending_outbound_payments
+                       .send_payment(route, payment_hash, payment_secret, payment_id, &self.keys_manager, best_block_height,
+                               |path, payment_params, payment_hash, payment_secret, total_value, cur_height, payment_id, keysend_preimage, session_priv|
+                               self.send_payment_along_path(path, payment_params, payment_hash, payment_secret, total_value, cur_height, payment_id, keysend_preimage, session_priv))
        }
 
        #[cfg(test)]
-       pub(crate) fn test_add_new_pending_payment(&self, payment_hash: PaymentHash, payment_secret: Option<PaymentSecret>, payment_id: PaymentId, route: &Route) -> Result<Vec<[u8; 32]>, PaymentSendFailure> {
-               self.add_new_pending_payment(payment_hash, payment_secret, payment_id, route)
+       fn test_send_payment_internal(&self, route: &Route, payment_hash: PaymentHash, payment_secret: &Option<PaymentSecret>, keysend_preimage: Option<PaymentPreimage>, payment_id: PaymentId, recv_value_msat: Option<u64>, onion_session_privs: Vec<[u8; 32]>) -> Result<(), PaymentSendFailure> {
+               let best_block_height = self.best_block.read().unwrap().height();
+               self.pending_outbound_payments.test_send_payment_internal(route, payment_hash, payment_secret, keysend_preimage, payment_id, recv_value_msat, onion_session_privs, &self.keys_manager, best_block_height,
+                       |path, payment_params, payment_hash, payment_secret, total_value, cur_height, payment_id, keysend_preimage, session_priv|
+                       self.send_payment_along_path(path, payment_params, payment_hash, payment_secret, total_value, cur_height, payment_id, keysend_preimage, session_priv))
        }
 
-       fn add_new_pending_payment(&self, payment_hash: PaymentHash, payment_secret: Option<PaymentSecret>, payment_id: PaymentId, route: &Route) -> Result<Vec<[u8; 32]>, PaymentSendFailure> {
-               let mut onion_session_privs = Vec::with_capacity(route.paths.len());
-               for _ in 0..route.paths.len() {
-                       onion_session_privs.push(self.keys_manager.get_secure_random_bytes());
-               }
-
-               let mut pending_outbounds = self.pending_outbound_payments.lock().unwrap();
-               match pending_outbounds.entry(payment_id) {
-                       hash_map::Entry::Occupied(_) => Err(PaymentSendFailure::DuplicatePayment),
-                       hash_map::Entry::Vacant(entry) => {
-                               let payment = entry.insert(PendingOutboundPayment::Retryable {
-                                       session_privs: HashSet::new(),
-                                       pending_amt_msat: 0,
-                                       pending_fee_msat: Some(0),
-                                       payment_hash,
-                                       payment_secret,
-                                       starting_block_height: self.best_block.read().unwrap().height(),
-                                       total_msat: route.get_total_amount(),
-                               });
-
-                               for (path, session_priv_bytes) in route.paths.iter().zip(onion_session_privs.iter()) {
-                                       assert!(payment.insert(*session_priv_bytes, path));
-                               }
-
-                               Ok(onion_session_privs)
-                       },
-               }
+       #[cfg(test)]
+       pub(crate) fn test_add_new_pending_payment(&self, payment_hash: PaymentHash, payment_secret: Option<PaymentSecret>, payment_id: PaymentId, route: &Route) -> Result<Vec<[u8; 32]>, PaymentSendFailure> {
+               let best_block_height = self.best_block.read().unwrap().height();
+               self.pending_outbound_payments.add_new_pending_payment(payment_hash, payment_secret, payment_id, route, &self.keys_manager, best_block_height)
        }
 
-       fn send_payment_internal(&self, route: &Route, payment_hash: PaymentHash, payment_secret: &Option<PaymentSecret>, keysend_preimage: Option<PaymentPreimage>, payment_id: PaymentId, recv_value_msat: Option<u64>, onion_session_privs: Vec<[u8; 32]>) -> Result<(), PaymentSendFailure> {
-               if route.paths.len() < 1 {
-                       return Err(PaymentSendFailure::ParameterError(APIError::RouteError{err: "There must be at least one path to send over"}));
-               }
-               if payment_secret.is_none() && route.paths.len() > 1 {
-                       return Err(PaymentSendFailure::ParameterError(APIError::APIMisuseError{err: "Payment secret is required for multi-path payments".to_string()}));
-               }
-               let mut total_value = 0;
-               let our_node_id = self.get_our_node_id();
-               let mut path_errs = Vec::with_capacity(route.paths.len());
-               'path_check: for path in route.paths.iter() {
-                       if path.len() < 1 || path.len() > 20 {
-                               path_errs.push(Err(APIError::RouteError{err: "Path didn't go anywhere/had bogus size"}));
-                               continue 'path_check;
-                       }
-                       for (idx, hop) in path.iter().enumerate() {
-                               if idx != path.len() - 1 && hop.pubkey == our_node_id {
-                                       path_errs.push(Err(APIError::RouteError{err: "Path went through us but wasn't a simple rebalance loop to us"}));
-                                       continue 'path_check;
-                               }
-                       }
-                       total_value += path.last().unwrap().fee_msat;
-                       path_errs.push(Ok(()));
-               }
-               if path_errs.iter().any(|e| e.is_err()) {
-                       return Err(PaymentSendFailure::PathParameterError(path_errs));
-               }
-               if let Some(amt_msat) = recv_value_msat {
-                       debug_assert!(amt_msat >= total_value);
-                       total_value = amt_msat;
-               }
-
-               let cur_height = self.best_block.read().unwrap().height() + 1;
-               let mut results = Vec::new();
-               debug_assert_eq!(route.paths.len(), onion_session_privs.len());
-               for (path, session_priv) in route.paths.iter().zip(onion_session_privs.into_iter()) {
-                       let mut path_res = self.send_payment_along_path(&path, &route.payment_params, &payment_hash, payment_secret, total_value, cur_height, payment_id, &keysend_preimage, session_priv);
-                       match path_res {
-                               Ok(_) => {},
-                               Err(APIError::MonitorUpdateInProgress) => {
-                                       // While a MonitorUpdateInProgress is an Err(_), the payment is still
-                                       // considered "in flight" and we shouldn't remove it from the
-                                       // PendingOutboundPayment set.
-                               },
-                               Err(_) => {
-                                       let mut pending_outbounds = self.pending_outbound_payments.lock().unwrap();
-                                       if let Some(payment) = pending_outbounds.get_mut(&payment_id) {
-                                               let removed = payment.remove(&session_priv, Some(path));
-                                               debug_assert!(removed, "This can't happen as the payment has an entry for this path added by callers");
-                                       } else {
-                                               debug_assert!(false, "This can't happen as the payment was added by callers");
-                                               path_res = Err(APIError::APIMisuseError { err: "Internal error: payment disappeared during processing. Please report this bug!".to_owned() });
-                                       }
-                               }
-                       }
-                       results.push(path_res);
-               }
-               let mut has_ok = false;
-               let mut has_err = false;
-               let mut pending_amt_unsent = 0;
-               let mut max_unsent_cltv_delta = 0;
-               for (res, path) in results.iter().zip(route.paths.iter()) {
-                       if res.is_ok() { has_ok = true; }
-                       if res.is_err() { has_err = true; }
-                       if let &Err(APIError::MonitorUpdateInProgress) = res {
-                               // MonitorUpdateInProgress is inherently unsafe to retry, so we call it a
-                               // PartialFailure.
-                               has_err = true;
-                               has_ok = true;
-                       } else if res.is_err() {
-                               pending_amt_unsent += path.last().unwrap().fee_msat;
-                               max_unsent_cltv_delta = cmp::max(max_unsent_cltv_delta, path.last().unwrap().cltv_expiry_delta);
-                       }
-               }
-               if has_err && has_ok {
-                       Err(PaymentSendFailure::PartialFailure {
-                               results,
-                               payment_id,
-                               failed_paths_retry: if pending_amt_unsent != 0 {
-                                       if let Some(payment_params) = &route.payment_params {
-                                               Some(RouteParameters {
-                                                       payment_params: payment_params.clone(),
-                                                       final_value_msat: pending_amt_unsent,
-                                                       final_cltv_expiry_delta: max_unsent_cltv_delta,
-                                               })
-                                       } else { None }
-                               } else { None },
-                       })
-               } else if has_err {
-                       // If we failed to send any paths, we should remove the new PaymentId from the
-                       // `pending_outbound_payments` map, as the user isn't expected to `abandon_payment`.
-                       let removed = self.pending_outbound_payments.lock().unwrap().remove(&payment_id).is_some();
-                       debug_assert!(removed, "We should always have a pending payment to remove here");
-                       Err(PaymentSendFailure::AllFailedResendSafe(results.drain(..).map(|r| r.unwrap_err()).collect()))
-               } else {
-                       Ok(())
-               }
-       }
 
        /// Retries a payment along the given [`Route`].
        ///
@@ -2693,94 +2401,36 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
        /// [`send_payment`]: [`ChannelManager::send_payment`]
        /// [`abandon_payment`]: [`ChannelManager::abandon_payment`]
        pub fn retry_payment(&self, route: &Route, payment_id: PaymentId) -> Result<(), PaymentSendFailure> {
-               const RETRY_OVERFLOW_PERCENTAGE: u64 = 10;
-               for path in route.paths.iter() {
-                       if path.len() == 0 {
-                               return Err(PaymentSendFailure::ParameterError(APIError::APIMisuseError {
-                                       err: "length-0 path in route".to_string()
-                               }))
-                       }
-               }
-
-               let mut onion_session_privs = Vec::with_capacity(route.paths.len());
-               for _ in 0..route.paths.len() {
-                       onion_session_privs.push(self.keys_manager.get_secure_random_bytes());
-               }
-
-               let (total_msat, payment_hash, payment_secret) = {
-                       let mut outbounds = self.pending_outbound_payments.lock().unwrap();
-                       match outbounds.get_mut(&payment_id) {
-                               Some(payment) => {
-                                       let res = match payment {
-                                               PendingOutboundPayment::Retryable {
-                                                       total_msat, payment_hash, payment_secret, pending_amt_msat, ..
-                                               } => {
-                                                       let retry_amt_msat: u64 = route.paths.iter().map(|path| path.last().unwrap().fee_msat).sum();
-                                                       if retry_amt_msat + *pending_amt_msat > *total_msat * (100 + RETRY_OVERFLOW_PERCENTAGE) / 100 {
-                                                               return Err(PaymentSendFailure::ParameterError(APIError::APIMisuseError {
-                                                                       err: format!("retry_amt_msat of {} will put pending_amt_msat (currently: {}) more than 10% over total_payment_amt_msat of {}", retry_amt_msat, pending_amt_msat, total_msat).to_string()
-                                                               }))
-                                                       }
-                                                       (*total_msat, *payment_hash, *payment_secret)
-                                               },
-                                               PendingOutboundPayment::Legacy { .. } => {
-                                                       return Err(PaymentSendFailure::ParameterError(APIError::APIMisuseError {
-                                                               err: "Unable to retry payments that were initially sent on LDK versions prior to 0.0.102".to_string()
-                                                       }))
-                                               },
-                                               PendingOutboundPayment::Fulfilled { .. } => {
-                                                       return Err(PaymentSendFailure::ParameterError(APIError::APIMisuseError {
-                                                               err: "Payment already completed".to_owned()
-                                                       }));
-                                               },
-                                               PendingOutboundPayment::Abandoned { .. } => {
-                                                       return Err(PaymentSendFailure::ParameterError(APIError::APIMisuseError {
-                                                               err: "Payment already abandoned (with some HTLCs still pending)".to_owned()
-                                                       }));
-                                               },
-                                       };
-                                       for (path, session_priv_bytes) in route.paths.iter().zip(onion_session_privs.iter()) {
-                                               assert!(payment.insert(*session_priv_bytes, path));
-                                       }
-                                       res
-                               },
-                               None =>
-                                       return Err(PaymentSendFailure::ParameterError(APIError::APIMisuseError {
-                                               err: format!("Payment with ID {} not found", log_bytes!(payment_id.0)),
-                                       })),
-                       }
-               };
-               self.send_payment_internal(route, payment_hash, &payment_secret, None, payment_id, Some(total_msat), onion_session_privs)
+               let best_block_height = self.best_block.read().unwrap().height();
+               self.pending_outbound_payments.retry_payment(route, payment_id, &self.keys_manager, best_block_height,
+                       |path, payment_params, payment_hash, payment_secret, total_value, cur_height, payment_id, keysend_preimage, session_priv|
+                       self.send_payment_along_path(path, payment_params, payment_hash, payment_secret, total_value, cur_height, payment_id, keysend_preimage, session_priv))
        }
 
        /// Signals that no further retries for the given payment will occur.
        ///
-       /// After this method returns, any future calls to [`retry_payment`] for the given `payment_id`
-       /// will fail with [`PaymentSendFailure::ParameterError`]. If no such event has been generated,
-       /// an [`Event::PaymentFailed`] event will be generated as soon as there are no remaining
-       /// pending HTLCs for this payment.
+       /// After this method returns, no future calls to [`retry_payment`] for the given `payment_id`
+       /// are allowed. If no [`Event::PaymentFailed`] event had been generated before, one will be
+       /// generated as soon as there are no remaining pending HTLCs for this payment.
        ///
        /// Note that calling this method does *not* prevent a payment from succeeding. You must still
        /// wait until you receive either a [`Event::PaymentFailed`] or [`Event::PaymentSent`] event to
        /// determine the ultimate status of a payment.
        ///
+       /// If an [`Event::PaymentFailed`] event is generated and we restart without this
+       /// [`ChannelManager`] having been persisted, the payment may still be in the pending state
+       /// upon restart. This allows further calls to [`retry_payment`] (and requiring a second call
+       /// to [`abandon_payment`] to mark the payment as failed again). Otherwise, future calls to
+       /// [`retry_payment`] will fail with [`PaymentSendFailure::ParameterError`].
+       ///
+       /// [`abandon_payment`]: Self::abandon_payment
        /// [`retry_payment`]: Self::retry_payment
        /// [`Event::PaymentFailed`]: events::Event::PaymentFailed
        /// [`Event::PaymentSent`]: events::Event::PaymentSent
        pub fn abandon_payment(&self, payment_id: PaymentId) {
                let _persistence_guard = PersistenceNotifierGuard::notify_on_drop(&self.total_consistency_lock, &self.persistence_notifier);
-
-               let mut outbounds = self.pending_outbound_payments.lock().unwrap();
-               if let hash_map::Entry::Occupied(mut payment) = outbounds.entry(payment_id) {
-                       if let Ok(()) = payment.get_mut().mark_abandoned() {
-                               if payment.get().remaining_parts() == 0 {
-                                       self.pending_events.lock().unwrap().push(events::Event::PaymentFailed {
-                                               payment_id,
-                                               payment_hash: payment.get().payment_hash().expect("PendingOutboundPayments::RetriesExceeded always has a payment hash set"),
-                                       });
-                                       payment.remove();
-                               }
-                       }
+               if let Some(payment_failed_ev) = self.pending_outbound_payments.abandon_payment(payment_id) {
+                       self.pending_events.lock().unwrap().push(payment_failed_ev);
                }
        }
 
@@ -2800,55 +2450,27 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
        ///
        /// [`send_payment`]: Self::send_payment
        pub fn send_spontaneous_payment(&self, route: &Route, payment_preimage: Option<PaymentPreimage>, payment_id: PaymentId) -> Result<PaymentHash, PaymentSendFailure> {
-               let preimage = match payment_preimage {
-                       Some(p) => p,
-                       None => PaymentPreimage(self.keys_manager.get_secure_random_bytes()),
-               };
-               let payment_hash = PaymentHash(Sha256::hash(&preimage.0).into_inner());
-               let onion_session_privs = self.add_new_pending_payment(payment_hash, None, payment_id, &route)?;
-
-               match self.send_payment_internal(route, payment_hash, &None, Some(preimage), payment_id, None, onion_session_privs) {
-                       Ok(()) => Ok(payment_hash),
-                       Err(e) => Err(e)
-               }
+               let best_block_height = self.best_block.read().unwrap().height();
+               self.pending_outbound_payments.send_spontaneous_payment(route, payment_preimage, payment_id, &self.keys_manager, best_block_height,
+                       |path, payment_params, payment_hash, payment_secret, total_value, cur_height, payment_id, keysend_preimage, session_priv|
+                       self.send_payment_along_path(path, payment_params, payment_hash, payment_secret, total_value, cur_height, payment_id, keysend_preimage, session_priv))
        }
 
        /// Send a payment that is probing the given route for liquidity. We calculate the
        /// [`PaymentHash`] of probes based on a static secret and a random [`PaymentId`], which allows
        /// us to easily discern them from real payments.
        pub fn send_probe(&self, hops: Vec<RouteHop>) -> Result<(PaymentHash, PaymentId), PaymentSendFailure> {
-               let payment_id = PaymentId(self.keys_manager.get_secure_random_bytes());
-
-               let payment_hash = self.probing_cookie_from_id(&payment_id);
-
-               if hops.len() < 2 {
-                       return Err(PaymentSendFailure::ParameterError(APIError::APIMisuseError {
-                               err: "No need probing a path with less than two hops".to_string()
-                       }))
-               }
-
-               let route = Route { paths: vec![hops], payment_params: None };
-               let onion_session_privs = self.add_new_pending_payment(payment_hash, None, payment_id, &route)?;
-
-               match self.send_payment_internal(&route, payment_hash, &None, None, payment_id, None, onion_session_privs) {
-                       Ok(()) => Ok((payment_hash, payment_id)),
-                       Err(e) => Err(e)
-               }
+               let best_block_height = self.best_block.read().unwrap().height();
+               self.pending_outbound_payments.send_probe(hops, self.probing_cookie_secret, &self.keys_manager, best_block_height,
+                       |path, payment_params, payment_hash, payment_secret, total_value, cur_height, payment_id, keysend_preimage, session_priv|
+                       self.send_payment_along_path(path, payment_params, payment_hash, payment_secret, total_value, cur_height, payment_id, keysend_preimage, session_priv))
        }
 
        /// Returns whether a payment with the given [`PaymentHash`] and [`PaymentId`] is, in fact, a
        /// payment probe.
+       #[cfg(test)]
        pub(crate) fn payment_is_probe(&self, payment_hash: &PaymentHash, payment_id: &PaymentId) -> bool {
-               let target_payment_hash = self.probing_cookie_from_id(payment_id);
-               target_payment_hash == *payment_hash
-       }
-
-       /// Returns the 'probing cookie' for the given [`PaymentId`].
-       fn probing_cookie_from_id(&self, payment_id: &PaymentId) -> PaymentHash {
-               let mut preimage = [0u8; 64];
-               preimage[..32].copy_from_slice(&self.probing_cookie_secret);
-               preimage[32..].copy_from_slice(&payment_id.0);
-               PaymentHash(Sha256::hash(&preimage).into_inner())
+               outbound_payment::payment_is_probe(payment_hash, payment_id, self.probing_cookie_secret)
        }
 
        /// Handles the generation of a funding transaction, optionally (for tests) with a function
@@ -3079,20 +2701,20 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                let next_hop_scid = match self.channel_state.lock().unwrap().by_id.get(next_hop_channel_id) {
                        Some(chan) => {
                                if !chan.is_usable() {
-                                       return Err(APIError::APIMisuseError {
-                                               err: format!("Channel with id {:?} not fully established", next_hop_channel_id)
+                                       return Err(APIError::ChannelUnavailable {
+                                               err: format!("Channel with id {} not fully established", log_bytes!(*next_hop_channel_id))
                                        })
                                }
                                chan.get_short_channel_id().unwrap_or(chan.outbound_scid_alias())
                        },
-                       None => return Err(APIError::APIMisuseError {
-                               err: format!("Channel with id {:?} not found", next_hop_channel_id)
+                       None => return Err(APIError::ChannelUnavailable {
+                               err: format!("Channel with id {} not found", log_bytes!(*next_hop_channel_id))
                        })
                };
 
                let payment = self.pending_intercepted_htlcs.lock().unwrap().remove(&intercept_id)
                        .ok_or_else(|| APIError::APIMisuseError {
-                               err: format!("Payment with intercept id {:?} not found", intercept_id.0)
+                               err: format!("Payment with intercept id {} not found", log_bytes!(intercept_id.0))
                        })?;
 
                let routing = match payment.forward_info.routing {
@@ -3127,7 +2749,7 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
 
                let payment = self.pending_intercepted_htlcs.lock().unwrap().remove(&intercept_id)
                        .ok_or_else(|| APIError::APIMisuseError {
-                               err: format!("Payment with InterceptId {:?} not found", intercept_id)
+                               err: format!("Payment with intercept id {} not found", log_bytes!(intercept_id.0))
                        })?;
 
                if let PendingHTLCRouting::Forward { short_channel_id, .. } = payment.forward_info.routing {
@@ -3157,7 +2779,6 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                let mut new_events = Vec::new();
                let mut failed_forwards = Vec::new();
                let mut phantom_receives: Vec<(u64, OutPoint, u128, Vec<(PendingHTLCInfo, u64)>)> = Vec::new();
-               let mut handle_errors = Vec::new();
                {
                        let mut forward_htlcs = HashMap::new();
                        mem::swap(&mut forward_htlcs, &mut self.forward_htlcs.lock().unwrap());
@@ -3273,8 +2894,6 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                                                        continue;
                                                },
                                                hash_map::Entry::Occupied(mut chan) => {
-                                                       let mut add_htlc_msgs = Vec::new();
-                                                       let mut fail_htlc_msgs = Vec::new();
                                                        for forward_info in pending_forwards.drain(..) {
                                                                match forward_info {
                                                                        HTLCForwardInfo::AddHTLC(PendingAddHTLCInfo {
@@ -3293,34 +2912,21 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                                                                                        // Phantom payments are only PendingHTLCRouting::Receive.
                                                                                        phantom_shared_secret: None,
                                                                                });
-                                                                               match chan.get_mut().send_htlc(outgoing_amt_msat, payment_hash, outgoing_cltv_value, htlc_source.clone(), onion_packet, &self.logger) {
-                                                                                       Err(e) => {
-                                                                                               if let ChannelError::Ignore(msg) = e {
-                                                                                                       log_trace!(self.logger, "Failed to forward HTLC with payment_hash {}: {}", log_bytes!(payment_hash.0), msg);
-                                                                                               } else {
-                                                                                                       panic!("Stated return value requirements in send_htlc() were not met");
-                                                                                               }
-                                                                                               let (failure_code, data) = self.get_htlc_temp_fail_err_and_data(0x1000|7, short_chan_id, chan.get());
-                                                                                               failed_forwards.push((htlc_source, payment_hash,
-                                                                                                       HTLCFailReason::reason(failure_code, data),
-                                                                                                       HTLCDestination::NextHopChannel { node_id: Some(chan.get().get_counterparty_node_id()), channel_id: forward_chan_id }
-                                                                                               ));
-                                                                                               continue;
-                                                                                       },
-                                                                                       Ok(update_add) => {
-                                                                                               match update_add {
-                                                                                                       Some(msg) => { add_htlc_msgs.push(msg); },
-                                                                                                       None => {
-                                                                                                               // Nothing to do here...we're waiting on a remote
-                                                                                                               // revoke_and_ack before we can add anymore HTLCs. The Channel
-                                                                                                               // will automatically handle building the update_add_htlc and
-                                                                                                               // commitment_signed messages when we can.
-                                                                                                               // TODO: Do some kind of timer to set the channel as !is_live()
-                                                                                                               // as we don't really want others relying on us relaying through
-                                                                                                               // this channel currently :/.
-                                                                                                       }
-                                                                                               }
+                                                                               if let Err(e) = chan.get_mut().queue_add_htlc(outgoing_amt_msat,
+                                                                                       payment_hash, outgoing_cltv_value, htlc_source.clone(),
+                                                                                       onion_packet, &self.logger)
+                                                                               {
+                                                                                       if let ChannelError::Ignore(msg) = e {
+                                                                                               log_trace!(self.logger, "Failed to forward HTLC with payment_hash {}: {}", log_bytes!(payment_hash.0), msg);
+                                                                                       } else {
+                                                                                               panic!("Stated return value requirements in send_htlc() were not met");
                                                                                        }
+                                                                                       let (failure_code, data) = self.get_htlc_temp_fail_err_and_data(0x1000|7, short_chan_id, chan.get());
+                                                                                       failed_forwards.push((htlc_source, payment_hash,
+                                                                                               HTLCFailReason::reason(failure_code, data),
+                                                                                               HTLCDestination::NextHopChannel { node_id: Some(chan.get().get_counterparty_node_id()), channel_id: forward_chan_id }
+                                                                                       ));
+                                                                                       continue;
                                                                                }
                                                                        },
                                                                        HTLCForwardInfo::AddHTLC { .. } => {
@@ -3328,77 +2934,22 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                                                                        },
                                                                        HTLCForwardInfo::FailHTLC { htlc_id, err_packet } => {
                                                                                log_trace!(self.logger, "Failing HTLC back to channel with short id {} (backward HTLC ID {}) after delay", short_chan_id, htlc_id);
-                                                                               match chan.get_mut().get_update_fail_htlc(htlc_id, err_packet, &self.logger) {
-                                                                                       Err(e) => {
-                                                                                               if let ChannelError::Ignore(msg) = e {
-                                                                                                       log_trace!(self.logger, "Failed to fail HTLC with ID {} backwards to short_id {}: {}", htlc_id, short_chan_id, msg);
-                                                                                               } else {
-                                                                                                       panic!("Stated return value requirements in get_update_fail_htlc() were not met");
-                                                                                               }
-                                                                                               // fail-backs are best-effort, we probably already have one
-                                                                                               // pending, and if not that's OK, if not, the channel is on
-                                                                                               // the chain and sending the HTLC-Timeout is their problem.
-                                                                                               continue;
-                                                                                       },
-                                                                                       Ok(Some(msg)) => { fail_htlc_msgs.push(msg); },
-                                                                                       Ok(None) => {
-                                                                                               // Nothing to do here...we're waiting on a remote
-                                                                                               // revoke_and_ack before we can update the commitment
-                                                                                               // transaction. The Channel will automatically handle
-                                                                                               // building the update_fail_htlc and commitment_signed
-                                                                                               // messages when we can.
-                                                                                               // We don't need any kind of timer here as they should fail
-                                                                                               // the channel onto the chain if they can't get our
-                                                                                               // update_fail_htlc in time, it's not our problem.
+                                                                               if let Err(e) = chan.get_mut().queue_fail_htlc(
+                                                                                       htlc_id, err_packet, &self.logger
+                                                                               ) {
+                                                                                       if let ChannelError::Ignore(msg) = e {
+                                                                                               log_trace!(self.logger, "Failed to fail HTLC with ID {} backwards to short_id {}: {}", htlc_id, short_chan_id, msg);
+                                                                                       } else {
+                                                                                               panic!("Stated return value requirements in queue_fail_htlc() were not met");
                                                                                        }
+                                                                                       // fail-backs are best-effort, we probably already have one
+                                                                                       // pending, and if not that's OK, if not, the channel is on
+                                                                                       // the chain and sending the HTLC-Timeout is their problem.
+                                                                                       continue;
                                                                                }
                                                                        },
                                                                }
                                                        }
-
-                                                       if !add_htlc_msgs.is_empty() || !fail_htlc_msgs.is_empty() {
-                                                               let (commitment_msg, monitor_update) = match chan.get_mut().send_commitment(&self.logger) {
-                                                                       Ok(res) => res,
-                                                                       Err(e) => {
-                                                                               // We surely failed send_commitment due to bad keys, in that case
-                                                                               // close channel and then send error message to peer.
-                                                                               let counterparty_node_id = chan.get().get_counterparty_node_id();
-                                                                               let err: Result<(), _>  = match e {
-                                                                                       ChannelError::Ignore(_) | ChannelError::Warn(_) => {
-                                                                                               panic!("Stated return value requirements in send_commitment() were not met");
-                                                                                       }
-                                                                                       ChannelError::Close(msg) => {
-                                                                                               log_trace!(self.logger, "Closing channel {} due to Close-required error: {}", log_bytes!(chan.key()[..]), msg);
-                                                                                               let mut channel = remove_channel!(self, chan);
-                                                                                               // ChannelClosed event is generated by handle_error for us.
-                                                                                               Err(MsgHandleErrInternal::from_finish_shutdown(msg, channel.channel_id(), channel.get_user_id(), channel.force_shutdown(true), self.get_channel_update_for_broadcast(&channel).ok()))
-                                                                                       },
-                                                                               };
-                                                                               handle_errors.push((counterparty_node_id, err));
-                                                                               continue;
-                                                                       }
-                                                               };
-                                                               match self.chain_monitor.update_channel(chan.get().get_funding_txo().unwrap(), monitor_update) {
-                                                                       ChannelMonitorUpdateStatus::Completed => {},
-                                                                       e => {
-                                                                               handle_errors.push((chan.get().get_counterparty_node_id(), handle_monitor_update_res!(self, e, chan, RAACommitmentOrder::CommitmentFirst, false, true)));
-                                                                               continue;
-                                                                       }
-                                                               }
-                                                               log_debug!(self.logger, "Forwarding HTLCs resulted in a commitment update with {} HTLCs added and {} HTLCs failed for channel {}",
-                                                                       add_htlc_msgs.len(), fail_htlc_msgs.len(), log_bytes!(chan.get().channel_id()));
-                                                               channel_state.pending_msg_events.push(events::MessageSendEvent::UpdateHTLCs {
-                                                                       node_id: chan.get().get_counterparty_node_id(),
-                                                                       updates: msgs::CommitmentUpdate {
-                                                                               update_add_htlcs: add_htlc_msgs,
-                                                                               update_fulfill_htlcs: Vec::new(),
-                                                                               update_fail_htlcs: fail_htlc_msgs,
-                                                                               update_fail_malformed_htlcs: Vec::new(),
-                                                                               update_fee: None,
-                                                                               commitment_signed: commitment_msg,
-                                                                       },
-                                                               });
-                                                       }
                                                }
                                        }
                                } else {
@@ -3438,9 +2989,9 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
 
                                                                macro_rules! fail_htlc {
                                                                        ($htlc: expr, $payment_hash: expr) => {
-                                                                               let mut htlc_msat_height_data = byte_utils::be64_to_array($htlc.value).to_vec();
+                                                                               let mut htlc_msat_height_data = $htlc.value.to_be_bytes().to_vec();
                                                                                htlc_msat_height_data.extend_from_slice(
-                                                                                       &byte_utils::be32_to_array(self.best_block.read().unwrap().height()),
+                                                                                       &self.best_block.read().unwrap().height().to_be_bytes(),
                                                                                );
                                                                                failed_forwards.push((HTLCSource::PreviousHopData(HTLCPreviousHopData {
                                                                                                short_channel_id: $htlc.prev_hop.short_channel_id,
@@ -3463,15 +3014,19 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
 
                                                                macro_rules! check_total_value {
                                                                        ($payment_data: expr, $payment_preimage: expr) => {{
-                                                                               let mut payment_received_generated = false;
+                                                                               let mut payment_claimable_generated = false;
                                                                                let purpose = || {
                                                                                        events::PaymentPurpose::InvoicePayment {
                                                                                                payment_preimage: $payment_preimage,
                                                                                                payment_secret: $payment_data.payment_secret,
                                                                                        }
                                                                                };
-                                                                               let mut claimable_htlcs = self.claimable_htlcs.lock().unwrap();
-                                                                               let (_, htlcs) = claimable_htlcs.entry(payment_hash)
+                                                                               let mut claimable_payments = self.claimable_payments.lock().unwrap();
+                                                                               if claimable_payments.pending_claiming_payments.contains_key(&payment_hash) {
+                                                                                       fail_htlc!(claimable_htlc, payment_hash);
+                                                                                       continue
+                                                                               }
+                                                                               let (_, htlcs) = claimable_payments.claimable_htlcs.entry(payment_hash)
                                                                                        .or_insert_with(|| (purpose(), Vec::new()));
                                                                                if htlcs.len() == 1 {
                                                                                        if let OnionPayload::Spontaneous(_) = htlcs[0].onion_payload {
@@ -3502,7 +3057,7 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                                                                                } else if total_value == $payment_data.total_msat {
                                                                                        let prev_channel_id = prev_funding_outpoint.to_channel_id();
                                                                                        htlcs.push(claimable_htlc);
-                                                                                       new_events.push(events::Event::PaymentReceived {
+                                                                                       new_events.push(events::Event::PaymentClaimable {
                                                                                                receiver_node_id: Some(receiver_node_id),
                                                                                                payment_hash,
                                                                                                purpose: purpose(),
@@ -3510,14 +3065,14 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                                                                                                via_channel_id: Some(prev_channel_id),
                                                                                                via_user_channel_id: Some(prev_user_channel_id),
                                                                                        });
-                                                                                       payment_received_generated = true;
+                                                                                       payment_claimable_generated = true;
                                                                                } else {
                                                                                        // Nothing to do - we haven't reached the total
                                                                                        // payment value yet, wait until we receive more
                                                                                        // MPP parts.
                                                                                        htlcs.push(claimable_htlc);
                                                                                }
-                                                                               payment_received_generated
+                                                                               payment_claimable_generated
                                                                        }}
                                                                }
 
@@ -3543,12 +3098,17 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                                                                                                check_total_value!(payment_data, payment_preimage);
                                                                                        },
                                                                                        OnionPayload::Spontaneous(preimage) => {
-                                                                                               match self.claimable_htlcs.lock().unwrap().entry(payment_hash) {
+                                                                                               let mut claimable_payments = self.claimable_payments.lock().unwrap();
+                                                                                               if claimable_payments.pending_claiming_payments.contains_key(&payment_hash) {
+                                                                                                       fail_htlc!(claimable_htlc, payment_hash);
+                                                                                                       continue
+                                                                                               }
+                                                                                               match claimable_payments.claimable_htlcs.entry(payment_hash) {
                                                                                                        hash_map::Entry::Vacant(e) => {
                                                                                                                let purpose = events::PaymentPurpose::SpontaneousPayment(preimage);
                                                                                                                e.insert((purpose.clone(), vec![claimable_htlc]));
                                                                                                                let prev_channel_id = prev_funding_outpoint.to_channel_id();
-                                                                                                               new_events.push(events::Event::PaymentReceived {
+                                                                                                               new_events.push(events::Event::PaymentClaimable {
                                                                                                                        receiver_node_id: Some(receiver_node_id),
                                                                                                                        payment_hash,
                                                                                                                        amount_msat: outgoing_amt_msat,
@@ -3580,8 +3140,8 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                                                                                                log_bytes!(payment_hash.0), payment_data.total_msat, inbound_payment.get().min_value_msat.unwrap());
                                                                                        fail_htlc!(claimable_htlc, payment_hash);
                                                                                } else {
-                                                                                       let payment_received_generated = check_total_value!(payment_data, inbound_payment.get().payment_preimage);
-                                                                                       if payment_received_generated {
+                                                                                       let payment_claimable_generated = check_total_value!(payment_data, inbound_payment.get().payment_preimage);
+                                                                                       if payment_claimable_generated {
                                                                                                inbound_payment.remove_entry();
                                                                                        }
                                                                                }
@@ -3602,9 +3162,11 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                }
                self.forward_htlcs(&mut phantom_receives);
 
-               for (counterparty_node_id, err) in handle_errors.drain(..) {
-                       let _ = handle_error!(self, err, counterparty_node_id);
-               }
+               // Freeing the holding cell here is relatively redundant - in practice we'll do it when we
+               // next get a `get_and_clear_pending_msg_events` call, but some tests rely on it, and it's
+               // nice to do the work now if we can rather than while we're trying to get messages in the
+               // network stack.
+               self.check_free_holding_cells();
 
                if new_events.is_empty() { return }
                let mut events = self.pending_events.lock().unwrap();
@@ -3642,59 +3204,24 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                self.process_background_events();
        }
 
-       fn update_channel_fee(&self, pending_msg_events: &mut Vec<events::MessageSendEvent>, chan_id: &[u8; 32], chan: &mut Channel<<K::Target as KeysInterface>::Signer>, new_feerate: u32) -> (bool, NotifyOption, Result<(), MsgHandleErrInternal>) {
-               if !chan.is_outbound() { return (true, NotifyOption::SkipPersist, Ok(())); }
+       fn update_channel_fee(&self, chan_id: &[u8; 32], chan: &mut Channel<<K::Target as KeysInterface>::Signer>, new_feerate: u32) -> NotifyOption {
+               if !chan.is_outbound() { return NotifyOption::SkipPersist; }
                // If the feerate has decreased by less than half, don't bother
                if new_feerate <= chan.get_feerate() && new_feerate * 2 > chan.get_feerate() {
                        log_trace!(self.logger, "Channel {} does not qualify for a feerate change from {} to {}.",
                                log_bytes!(chan_id[..]), chan.get_feerate(), new_feerate);
-                       return (true, NotifyOption::SkipPersist, Ok(()));
+                       return NotifyOption::SkipPersist;
                }
                if !chan.is_live() {
                        log_trace!(self.logger, "Channel {} does not qualify for a feerate change from {} to {} as it cannot currently be updated (probably the peer is disconnected).",
                                log_bytes!(chan_id[..]), chan.get_feerate(), new_feerate);
-                       return (true, NotifyOption::SkipPersist, Ok(()));
+                       return NotifyOption::SkipPersist;
                }
                log_trace!(self.logger, "Channel {} qualifies for a feerate change from {} to {}.",
                        log_bytes!(chan_id[..]), chan.get_feerate(), new_feerate);
 
-               let mut retain_channel = true;
-               let res = match chan.send_update_fee_and_commit(new_feerate, &self.logger) {
-                       Ok(res) => Ok(res),
-                       Err(e) => {
-                               let (drop, res) = convert_chan_err!(self, e, chan, chan_id);
-                               if drop { retain_channel = false; }
-                               Err(res)
-                       }
-               };
-               let ret_err = match res {
-                       Ok(Some((update_fee, commitment_signed, monitor_update))) => {
-                               match self.chain_monitor.update_channel(chan.get_funding_txo().unwrap(), monitor_update) {
-                                       ChannelMonitorUpdateStatus::Completed => {
-                                               pending_msg_events.push(events::MessageSendEvent::UpdateHTLCs {
-                                                       node_id: chan.get_counterparty_node_id(),
-                                                       updates: msgs::CommitmentUpdate {
-                                                               update_add_htlcs: Vec::new(),
-                                                               update_fulfill_htlcs: Vec::new(),
-                                                               update_fail_htlcs: Vec::new(),
-                                                               update_fail_malformed_htlcs: Vec::new(),
-                                                               update_fee: Some(update_fee),
-                                                               commitment_signed,
-                                                       },
-                                               });
-                                               Ok(())
-                                       },
-                                       e => {
-                                               let (res, drop) = handle_monitor_update_res!(self, e, chan, RAACommitmentOrder::CommitmentFirst, chan_id, COMMITMENT_UPDATE_ONLY);
-                                               if drop { retain_channel = false; }
-                                               res
-                                       }
-                               }
-                       },
-                       Ok(None) => Ok(()),
-                       Err(e) => Err(e),
-               };
-               (retain_channel, NotifyOption::DoPersist, ret_err)
+               chan.queue_update_fee(new_feerate, &self.logger);
+               NotifyOption::DoPersist
        }
 
        #[cfg(fuzzing)]
@@ -3708,64 +3235,16 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
 
                        let new_feerate = self.fee_estimator.bounded_sat_per_1000_weight(ConfirmationTarget::Normal);
 
-                       let mut handle_errors = Vec::new();
-                       {
-                               let mut channel_state_lock = self.channel_state.lock().unwrap();
-                               let channel_state = &mut *channel_state_lock;
-                               let pending_msg_events = &mut channel_state.pending_msg_events;
-                               channel_state.by_id.retain(|chan_id, chan| {
-                                       let (retain_channel, chan_needs_persist, err) = self.update_channel_fee(pending_msg_events, chan_id, chan, new_feerate);
-                                       if chan_needs_persist == NotifyOption::DoPersist { should_persist = NotifyOption::DoPersist; }
-                                       if err.is_err() {
-                                               handle_errors.push(err);
-                                       }
-                                       retain_channel
-                               });
+                       let mut channel_state = self.channel_state.lock().unwrap();
+                       for (chan_id, chan) in channel_state.by_id.iter_mut() {
+                               let chan_needs_persist = self.update_channel_fee(chan_id, chan, new_feerate);
+                               if chan_needs_persist == NotifyOption::DoPersist { should_persist = NotifyOption::DoPersist; }
                        }
 
                        should_persist
                });
        }
 
-       fn remove_stale_resolved_payments(&self) {
-               // If an outbound payment was completed, and no pending HTLCs remain, we should remove it
-               // from the map. However, if we did that immediately when the last payment HTLC is claimed,
-               // this could race the user making a duplicate send_payment call and our idempotency
-               // guarantees would be violated. Instead, we wait a few timer ticks to do the actual
-               // removal. This should be more than sufficient to ensure the idempotency of any
-               // `send_payment` calls that were made at the same time the `PaymentSent` event was being
-               // processed.
-               let mut pending_outbound_payments = self.pending_outbound_payments.lock().unwrap();
-               let pending_events = self.pending_events.lock().unwrap();
-               pending_outbound_payments.retain(|payment_id, payment| {
-                       if let PendingOutboundPayment::Fulfilled { session_privs, timer_ticks_without_htlcs, .. } = payment {
-                               let mut no_remaining_entries = session_privs.is_empty();
-                               if no_remaining_entries {
-                                       for ev in pending_events.iter() {
-                                               match ev {
-                                                       events::Event::PaymentSent { payment_id: Some(ev_payment_id), .. } |
-                                                       events::Event::PaymentPathSuccessful { payment_id: ev_payment_id, .. } |
-                                                       events::Event::PaymentPathFailed { payment_id: Some(ev_payment_id), .. } => {
-                                                               if payment_id == ev_payment_id {
-                                                                       no_remaining_entries = false;
-                                                                       break;
-                                                               }
-                                                       },
-                                                       _ => {},
-                                               }
-                                       }
-                               }
-                               if no_remaining_entries {
-                                       *timer_ticks_without_htlcs += 1;
-                                       *timer_ticks_without_htlcs <= IDEMPOTENCY_TIMEOUT_TICKS
-                               } else {
-                                       *timer_ticks_without_htlcs = 0;
-                                       true
-                               }
-                       } else { true }
-               });
-       }
-
        /// Performs actions which should happen on startup and roughly once per minute thereafter.
        ///
        /// This currently includes:
@@ -3785,20 +3264,15 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
 
                        let new_feerate = self.fee_estimator.bounded_sat_per_1000_weight(ConfirmationTarget::Normal);
 
-                       let mut handle_errors = Vec::new();
+                       let mut handle_errors: Vec<(Result<(), _>, _)> = Vec::new();
                        let mut timed_out_mpp_htlcs = Vec::new();
                        {
                                let mut channel_state_lock = self.channel_state.lock().unwrap();
                                let channel_state = &mut *channel_state_lock;
                                let pending_msg_events = &mut channel_state.pending_msg_events;
                                channel_state.by_id.retain(|chan_id, chan| {
-                                       let counterparty_node_id = chan.get_counterparty_node_id();
-                                       let (retain_channel, chan_needs_persist, err) = self.update_channel_fee(pending_msg_events, chan_id, chan, new_feerate);
+                                       let chan_needs_persist = self.update_channel_fee(chan_id, chan, new_feerate);
                                        if chan_needs_persist == NotifyOption::DoPersist { should_persist = NotifyOption::DoPersist; }
-                                       if err.is_err() {
-                                               handle_errors.push((err, counterparty_node_id));
-                                       }
-                                       if !retain_channel { return false; }
 
                                        if let Err(e) = chan.timer_check_closing_negotiation_progress() {
                                                let (needs_close, err) = convert_chan_err!(self, e, chan, chan_id);
@@ -3838,7 +3312,7 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                                });
                        }
 
-                       self.claimable_htlcs.lock().unwrap().retain(|payment_hash, (_, htlcs)| {
+                       self.claimable_payments.lock().unwrap().claimable_htlcs.retain(|payment_hash, (_, htlcs)| {
                                if htlcs.is_empty() {
                                        // This should be unreachable
                                        debug_assert!(false);
@@ -3871,19 +3345,26 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                                let _ = handle_error!(self, err, counterparty_node_id);
                        }
 
-                       self.remove_stale_resolved_payments();
+                       self.pending_outbound_payments.remove_stale_resolved_payments(&self.pending_events);
+
+                       // Technically we don't need to do this here, but if we have holding cell entries in a
+                       // channel that need freeing, it's better to do that here and block a background task
+                       // than block the message queueing pipeline.
+                       if self.check_free_holding_cells() {
+                               should_persist = NotifyOption::DoPersist;
+                       }
 
                        should_persist
                });
        }
 
        /// Indicates that the preimage for payment_hash is unknown or the received amount is incorrect
-       /// after a PaymentReceived event, failing the HTLC back to its origin and freeing resources
+       /// after a PaymentClaimable event, failing the HTLC back to its origin and freeing resources
        /// along the path (including in our own channel on which we received it).
        ///
        /// Note that in some cases around unclean shutdown, it is possible the payment may have
        /// already been claimed by you via [`ChannelManager::claim_funds`] prior to you seeing (a
-       /// second copy of) the [`events::Event::PaymentReceived`] event. Alternatively, the payment
+       /// second copy of) the [`events::Event::PaymentClaimable`] event. Alternatively, the payment
        /// may have already been failed automatically by LDK if it was nearing its expiration time.
        ///
        /// While LDK will never claim a payment automatically on your behalf (i.e. without you calling
@@ -3893,12 +3374,11 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
        pub fn fail_htlc_backwards(&self, payment_hash: &PaymentHash) {
                let _persistence_guard = PersistenceNotifierGuard::notify_on_drop(&self.total_consistency_lock, &self.persistence_notifier);
 
-               let removed_source = self.claimable_htlcs.lock().unwrap().remove(payment_hash);
+               let removed_source = self.claimable_payments.lock().unwrap().claimable_htlcs.remove(payment_hash);
                if let Some((_, mut sources)) = removed_source {
                        for htlc in sources.drain(..) {
-                               let mut htlc_msat_height_data = byte_utils::be64_to_array(htlc.value).to_vec();
-                               htlc_msat_height_data.extend_from_slice(&byte_utils::be32_to_array(
-                                               self.best_block.read().unwrap().height()));
+                               let mut htlc_msat_height_data = htlc.value.to_be_bytes().to_vec();
+                               htlc_msat_height_data.extend_from_slice(&self.best_block.read().unwrap().height().to_be_bytes());
                                let source = HTLCSource::PreviousHopData(htlc.prev_hop);
                                let reason = HTLCFailReason::reason(0x4000 | 15, htlc_msat_height_data);
                                let receiver = HTLCDestination::FailedPayment { payment_hash: *payment_hash };
@@ -3998,91 +3478,7 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                // being fully configured. See the docs for `ChannelManagerReadArgs` for more.
                match source {
                        HTLCSource::OutboundRoute { ref path, ref session_priv, ref payment_id, ref payment_params, .. } => {
-                               let mut session_priv_bytes = [0; 32];
-                               session_priv_bytes.copy_from_slice(&session_priv[..]);
-                               let mut outbounds = self.pending_outbound_payments.lock().unwrap();
-                               let mut all_paths_failed = false;
-                               let mut full_failure_ev = None;
-                               if let hash_map::Entry::Occupied(mut payment) = outbounds.entry(*payment_id) {
-                                       if !payment.get_mut().remove(&session_priv_bytes, Some(&path)) {
-                                               log_trace!(self.logger, "Received duplicative fail for HTLC with payment_hash {}", log_bytes!(payment_hash.0));
-                                               return;
-                                       }
-                                       if payment.get().is_fulfilled() {
-                                               log_trace!(self.logger, "Received failure of HTLC with payment_hash {} after payment completion", log_bytes!(payment_hash.0));
-                                               return;
-                                       }
-                                       if payment.get().remaining_parts() == 0 {
-                                               all_paths_failed = true;
-                                               if payment.get().abandoned() {
-                                                       full_failure_ev = Some(events::Event::PaymentFailed {
-                                                               payment_id: *payment_id,
-                                                               payment_hash: payment.get().payment_hash().expect("PendingOutboundPayments::RetriesExceeded always has a payment hash set"),
-                                                       });
-                                                       payment.remove();
-                                               }
-                                       }
-                               } else {
-                                       log_trace!(self.logger, "Received duplicative fail for HTLC with payment_hash {}", log_bytes!(payment_hash.0));
-                                       return;
-                               }
-                               let mut retry = if let Some(payment_params_data) = payment_params {
-                                       let path_last_hop = path.last().expect("Outbound payments must have had a valid path");
-                                       Some(RouteParameters {
-                                               payment_params: payment_params_data.clone(),
-                                               final_value_msat: path_last_hop.fee_msat,
-                                               final_cltv_expiry_delta: path_last_hop.cltv_expiry_delta,
-                                       })
-                               } else { None };
-                               log_trace!(self.logger, "Failing outbound payment HTLC with payment_hash {}", log_bytes!(payment_hash.0));
-
-                               let path_failure = {
-#[cfg(test)]
-                                       let (network_update, short_channel_id, payment_retryable, onion_error_code, onion_error_data) = onion_error.decode_onion_failure(&self.secp_ctx, &self.logger, &source);
-#[cfg(not(test))]
-                                       let (network_update, short_channel_id, payment_retryable, _, _) = onion_error.decode_onion_failure(&self.secp_ctx, &self.logger, &source);
-
-                                       if self.payment_is_probe(payment_hash, &payment_id) {
-                                               if !payment_retryable {
-                                                       events::Event::ProbeSuccessful {
-                                                               payment_id: *payment_id,
-                                                               payment_hash: payment_hash.clone(),
-                                                               path: path.clone(),
-                                                       }
-                                               } else {
-                                                       events::Event::ProbeFailed {
-                                                               payment_id: *payment_id,
-                                                               payment_hash: payment_hash.clone(),
-                                                               path: path.clone(),
-                                                               short_channel_id,
-                                                       }
-                                               }
-                                       } else {
-                                               // TODO: If we decided to blame ourselves (or one of our channels) in
-                                               // process_onion_failure we should close that channel as it implies our
-                                               // next-hop is needlessly blaming us!
-                                               if let Some(scid) = short_channel_id {
-                                                       retry.as_mut().map(|r| r.payment_params.previously_failed_channels.push(scid));
-                                               }
-                                               events::Event::PaymentPathFailed {
-                                                       payment_id: Some(*payment_id),
-                                                       payment_hash: payment_hash.clone(),
-                                                       payment_failed_permanently: !payment_retryable,
-                                                       network_update,
-                                                       all_paths_failed,
-                                                       path: path.clone(),
-                                                       short_channel_id,
-                                                       retry,
-                                                       #[cfg(test)]
-                                                       error_code: onion_error_code,
-                                                       #[cfg(test)]
-                                                       error_data: onion_error_data
-                                               }
-                                       }
-                               };
-                               let mut pending_events = self.pending_events.lock().unwrap();
-                               pending_events.push(path_failure);
-                               if let Some(ev) = full_failure_ev { pending_events.push(ev); }
+                               self.pending_outbound_payments.fail_htlc(source, payment_hash, onion_error, path, session_priv, payment_id, payment_params, self.probing_cookie_secret, &self.secp_ctx, &self.pending_events, &self.logger);
                        },
                        HTLCSource::PreviousHopData(HTLCPreviousHopData { ref short_channel_id, ref htlc_id, ref incoming_packet_shared_secret, ref phantom_shared_secret, ref outpoint }) => {
                                log_trace!(self.logger, "Failing HTLC with payment_hash {} backwards from us with {:?}", log_bytes!(payment_hash.0), onion_error);
@@ -4116,7 +3512,7 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                }
        }
 
-       /// Provides a payment preimage in response to [`Event::PaymentReceived`], generating any
+       /// Provides a payment preimage in response to [`Event::PaymentClaimable`], generating any
        /// [`MessageSendEvent`]s needed to claim the payment.
        ///
        /// Note that calling this method does *not* guarantee that the payment has been claimed. You
@@ -4124,151 +3520,159 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
        /// provided to your [`EventHandler`] when [`process_pending_events`] is next called.
        ///
        /// Note that if you did not set an `amount_msat` when calling [`create_inbound_payment`] or
-       /// [`create_inbound_payment_for_hash`] you must check that the amount in the `PaymentReceived`
+       /// [`create_inbound_payment_for_hash`] you must check that the amount in the `PaymentClaimable`
        /// event matches your expectation. If you fail to do so and call this method, you may provide
        /// the sender "proof-of-payment" when they did not fulfill the full expected payment.
        ///
-       /// [`Event::PaymentReceived`]: crate::util::events::Event::PaymentReceived
+       /// [`Event::PaymentClaimable`]: crate::util::events::Event::PaymentClaimable
        /// [`Event::PaymentClaimed`]: crate::util::events::Event::PaymentClaimed
        /// [`process_pending_events`]: EventsProvider::process_pending_events
        /// [`create_inbound_payment`]: Self::create_inbound_payment
        /// [`create_inbound_payment_for_hash`]: Self::create_inbound_payment_for_hash
-       /// [`get_and_clear_pending_msg_events`]: MessageSendEventsProvider::get_and_clear_pending_msg_events
        pub fn claim_funds(&self, payment_preimage: PaymentPreimage) {
                let payment_hash = PaymentHash(Sha256::hash(&payment_preimage.0).into_inner());
 
                let _persistence_guard = PersistenceNotifierGuard::notify_on_drop(&self.total_consistency_lock, &self.persistence_notifier);
 
-               let removed_source = self.claimable_htlcs.lock().unwrap().remove(&payment_hash);
-               if let Some((payment_purpose, mut sources)) = removed_source {
-                       assert!(!sources.is_empty());
-
-                       // If we are claiming an MPP payment, we have to take special care to ensure that each
-                       // channel exists before claiming all of the payments (inside one lock).
-                       // Note that channel existance is sufficient as we should always get a monitor update
-                       // which will take care of the real HTLC claim enforcement.
-                       //
-                       // If we find an HTLC which we would need to claim but for which we do not have a
-                       // channel, we will fail all parts of the MPP payment. While we could wait and see if
-                       // the sender retries the already-failed path(s), it should be a pretty rare case where
-                       // we got all the HTLCs and then a channel closed while we were waiting for the user to
-                       // provide the preimage, so worrying too much about the optimal handling isn't worth
-                       // it.
-                       let mut claimable_amt_msat = 0;
-                       let mut expected_amt_msat = None;
-                       let mut valid_mpp = true;
-                       let mut errs = Vec::new();
-                       let mut claimed_any_htlcs = false;
-                       let mut channel_state_lock = self.channel_state.lock().unwrap();
-                       let channel_state = &mut *channel_state_lock;
-                       let mut receiver_node_id = Some(self.our_network_pubkey);
-                       for htlc in sources.iter() {
-                               let chan_id = match self.short_to_chan_info.read().unwrap().get(&htlc.prev_hop.short_channel_id) {
-                                       Some((_cp_id, chan_id)) => chan_id.clone(),
-                                       None => {
-                                               valid_mpp = false;
+               let mut sources = {
+                       let mut claimable_payments = self.claimable_payments.lock().unwrap();
+                       if let Some((payment_purpose, sources)) = claimable_payments.claimable_htlcs.remove(&payment_hash) {
+                               let mut receiver_node_id = self.our_network_pubkey;
+                               for htlc in sources.iter() {
+                                       if htlc.prev_hop.phantom_shared_secret.is_some() {
+                                               let phantom_pubkey = self.keys_manager.get_node_id(Recipient::PhantomNode)
+                                                       .expect("Failed to get node_id for phantom node recipient");
+                                               receiver_node_id = phantom_pubkey;
                                                break;
                                        }
-                               };
+                               }
 
-                               if let None = channel_state.by_id.get(&chan_id) {
-                                       valid_mpp = false;
-                                       break;
+                               let dup_purpose = claimable_payments.pending_claiming_payments.insert(payment_hash,
+                                       ClaimingPayment { amount_msat: sources.iter().map(|source| source.value).sum(),
+                                       payment_purpose, receiver_node_id,
+                               });
+                               if dup_purpose.is_some() {
+                                       debug_assert!(false, "Shouldn't get a duplicate pending claim event ever");
+                                       log_error!(self.logger, "Got a duplicate pending claimable event on payment hash {}! Please report this bug",
+                                               log_bytes!(payment_hash.0));
                                }
+                               sources
+                       } else { return; }
+               };
+               debug_assert!(!sources.is_empty());
 
-                               if expected_amt_msat.is_some() && expected_amt_msat != Some(htlc.total_msat) {
-                                       log_error!(self.logger, "Somehow ended up with an MPP payment with different total amounts - this should not be reachable!");
-                                       debug_assert!(false);
+               // If we are claiming an MPP payment, we check that all channels which contain a claimable
+               // HTLC still exist. While this isn't guaranteed to remain true if a channel closes while
+               // we're claiming (or even after we claim, before the commitment update dance completes),
+               // it should be a relatively rare race, and we'd rather not claim HTLCs that require us to
+               // go on-chain (and lose the on-chain fee to do so) than just reject the payment.
+               //
+               // Note that we'll still always get our funds - as long as the generated
+               // `ChannelMonitorUpdate` makes it out to the relevant monitor we can claim on-chain.
+               //
+               // If we find an HTLC which we would need to claim but for which we do not have a
+               // channel, we will fail all parts of the MPP payment. While we could wait and see if
+               // the sender retries the already-failed path(s), it should be a pretty rare case where
+               // we got all the HTLCs and then a channel closed while we were waiting for the user to
+               // provide the preimage, so worrying too much about the optimal handling isn't worth
+               // it.
+               let mut claimable_amt_msat = 0;
+               let mut expected_amt_msat = None;
+               let mut valid_mpp = true;
+               let mut errs = Vec::new();
+               let mut channel_state = Some(self.channel_state.lock().unwrap());
+               for htlc in sources.iter() {
+                       let chan_id = match self.short_to_chan_info.read().unwrap().get(&htlc.prev_hop.short_channel_id) {
+                               Some((_cp_id, chan_id)) => chan_id.clone(),
+                               None => {
                                        valid_mpp = false;
                                        break;
                                }
-                               expected_amt_msat = Some(htlc.total_msat);
-                               if let OnionPayload::Spontaneous(_) = &htlc.onion_payload {
-                                       // We don't currently support MPP for spontaneous payments, so just check
-                                       // that there's one payment here and move on.
-                                       if sources.len() != 1 {
-                                               log_error!(self.logger, "Somehow ended up with an MPP spontaneous payment - this should not be reachable!");
-                                               debug_assert!(false);
-                                               valid_mpp = false;
-                                               break;
-                                       }
-                               }
-                               let phantom_shared_secret = htlc.prev_hop.phantom_shared_secret;
-                               if phantom_shared_secret.is_some() {
-                                       let phantom_pubkey = self.keys_manager.get_node_id(Recipient::PhantomNode)
-                                               .expect("Failed to get node_id for phantom node recipient");
-                                       receiver_node_id = Some(phantom_pubkey)
-                               }
+                       };
 
-                               claimable_amt_msat += htlc.value;
-                       }
-                       if sources.is_empty() || expected_amt_msat.is_none() {
-                               log_info!(self.logger, "Attempted to claim an incomplete payment which no longer had any available HTLCs!");
-                               return;
+                       if let None = channel_state.as_ref().unwrap().by_id.get(&chan_id) {
+                               valid_mpp = false;
+                               break;
                        }
-                       if claimable_amt_msat != expected_amt_msat.unwrap() {
-                               log_info!(self.logger, "Attempted to claim an incomplete payment, expected {} msat, had {} available to claim.",
-                                       expected_amt_msat.unwrap(), claimable_amt_msat);
-                               return;
+
+                       if expected_amt_msat.is_some() && expected_amt_msat != Some(htlc.total_msat) {
+                               log_error!(self.logger, "Somehow ended up with an MPP payment with different total amounts - this should not be reachable!");
+                               debug_assert!(false);
+                               valid_mpp = false;
+                               break;
                        }
-                       if valid_mpp {
-                               for htlc in sources.drain(..) {
-                                       match self.claim_funds_from_hop(&mut channel_state_lock, htlc.prev_hop, payment_preimage) {
-                                               ClaimFundsFromHop::MonitorUpdateFail(pk, err, _) => {
-                                                       if let msgs::ErrorAction::IgnoreError = err.err.action {
-                                                               // We got a temporary failure updating monitor, but will claim the
-                                                               // HTLC when the monitor updating is restored (or on chain).
-                                                               log_error!(self.logger, "Temporary failure claiming HTLC, treating as success: {}", err.err.err);
-                                                               claimed_any_htlcs = true;
-                                                       } else { errs.push((pk, err)); }
-                                               },
-                                               ClaimFundsFromHop::PrevHopForceClosed => unreachable!("We already checked for channel existence, we can't fail here!"),
-                                               ClaimFundsFromHop::DuplicateClaim => {
-                                                       // While we should never get here in most cases, if we do, it likely
-                                                       // indicates that the HTLC was timed out some time ago and is no longer
-                                                       // available to be claimed. Thus, it does not make sense to set
-                                                       // `claimed_any_htlcs`.
-                                               },
-                                               ClaimFundsFromHop::Success(_) => claimed_any_htlcs = true,
-                                       }
+                       expected_amt_msat = Some(htlc.total_msat);
+                       if let OnionPayload::Spontaneous(_) = &htlc.onion_payload {
+                               // We don't currently support MPP for spontaneous payments, so just check
+                               // that there's one payment here and move on.
+                               if sources.len() != 1 {
+                                       log_error!(self.logger, "Somehow ended up with an MPP spontaneous payment - this should not be reachable!");
+                                       debug_assert!(false);
+                                       valid_mpp = false;
+                                       break;
                                }
                        }
-                       mem::drop(channel_state_lock);
-                       if !valid_mpp {
-                               for htlc in sources.drain(..) {
-                                       let mut htlc_msat_height_data = byte_utils::be64_to_array(htlc.value).to_vec();
-                                       htlc_msat_height_data.extend_from_slice(&byte_utils::be32_to_array(
-                                               self.best_block.read().unwrap().height()));
-                                       let source = HTLCSource::PreviousHopData(htlc.prev_hop);
-                                       let reason = HTLCFailReason::reason(0x4000 | 15, htlc_msat_height_data);
-                                       let receiver = HTLCDestination::FailedPayment { payment_hash };
-                                       self.fail_htlc_backwards_internal(&source, &payment_hash, &reason, receiver);
+
+                       claimable_amt_msat += htlc.value;
+               }
+               if sources.is_empty() || expected_amt_msat.is_none() {
+                       mem::drop(channel_state);
+                       self.claimable_payments.lock().unwrap().pending_claiming_payments.remove(&payment_hash);
+                       log_info!(self.logger, "Attempted to claim an incomplete payment which no longer had any available HTLCs!");
+                       return;
+               }
+               if claimable_amt_msat != expected_amt_msat.unwrap() {
+                       mem::drop(channel_state);
+                       self.claimable_payments.lock().unwrap().pending_claiming_payments.remove(&payment_hash);
+                       log_info!(self.logger, "Attempted to claim an incomplete payment, expected {} msat, had {} available to claim.",
+                               expected_amt_msat.unwrap(), claimable_amt_msat);
+                       return;
+               }
+               if valid_mpp {
+                       for htlc in sources.drain(..) {
+                               if channel_state.is_none() { channel_state = Some(self.channel_state.lock().unwrap()); }
+                               if let Err((pk, err)) = self.claim_funds_from_hop(channel_state.take().unwrap(), htlc.prev_hop,
+                                       payment_preimage,
+                                       |_| Some(MonitorUpdateCompletionAction::PaymentClaimed { payment_hash }))
+                               {
+                                       if let msgs::ErrorAction::IgnoreError = err.err.action {
+                                               // We got a temporary failure updating monitor, but will claim the
+                                               // HTLC when the monitor updating is restored (or on chain).
+                                               log_error!(self.logger, "Temporary failure claiming HTLC, treating as success: {}", err.err.err);
+                                       } else { errs.push((pk, err)); }
                                }
                        }
-
-                       if claimed_any_htlcs {
-                               self.pending_events.lock().unwrap().push(events::Event::PaymentClaimed {
-                                       receiver_node_id,
-                                       payment_hash,
-                                       purpose: payment_purpose,
-                                       amount_msat: claimable_amt_msat,
-                               });
+               }
+               mem::drop(channel_state);
+               if !valid_mpp {
+                       for htlc in sources.drain(..) {
+                               let mut htlc_msat_height_data = htlc.value.to_be_bytes().to_vec();
+                               htlc_msat_height_data.extend_from_slice(&self.best_block.read().unwrap().height().to_be_bytes());
+                               let source = HTLCSource::PreviousHopData(htlc.prev_hop);
+                               let reason = HTLCFailReason::reason(0x4000 | 15, htlc_msat_height_data);
+                               let receiver = HTLCDestination::FailedPayment { payment_hash };
+                               self.fail_htlc_backwards_internal(&source, &payment_hash, &reason, receiver);
                        }
+                       self.claimable_payments.lock().unwrap().pending_claiming_payments.remove(&payment_hash);
+               }
 
-                       // Now we can handle any errors which were generated.
-                       for (counterparty_node_id, err) in errs.drain(..) {
-                               let res: Result<(), _> = Err(err);
-                               let _ = handle_error!(self, res, counterparty_node_id);
-                       }
+               // Now we can handle any errors which were generated.
+               for (counterparty_node_id, err) in errs.drain(..) {
+                       let res: Result<(), _> = Err(err);
+                       let _ = handle_error!(self, res, counterparty_node_id);
                }
        }
 
-       fn claim_funds_from_hop(&self, channel_state_lock: &mut MutexGuard<ChannelHolder<<K::Target as KeysInterface>::Signer>>, prev_hop: HTLCPreviousHopData, payment_preimage: PaymentPreimage) -> ClaimFundsFromHop {
+       fn claim_funds_from_hop<ComplFunc: FnOnce(Option<u64>) -> Option<MonitorUpdateCompletionAction>>(&self,
+               mut channel_state_lock: MutexGuard<ChannelHolder<<K::Target as KeysInterface>::Signer>>,
+               prev_hop: HTLCPreviousHopData, payment_preimage: PaymentPreimage, completion_action: ComplFunc)
+       -> Result<(), (PublicKey, MsgHandleErrInternal)> {
                //TODO: Delay the claimed_funds relaying just like we do outbound relay!
 
                let chan_id = prev_hop.outpoint.to_channel_id();
-               let channel_state = &mut **channel_state_lock;
+               let channel_state = &mut *channel_state_lock;
                if let hash_map::Entry::Occupied(mut chan) = channel_state.by_id.entry(chan_id) {
+                       let counterparty_node_id = chan.get().get_counterparty_node_id();
                        match chan.get_mut().get_update_fulfill_htlc_and_commit(prev_hop.htlc_id, payment_preimage, &self.logger) {
                                Ok(msgs_monitor_option) => {
                                        if let UpdateFulfillCommitFetch::NewClaim { msgs, htlc_value_msat, monitor_update } = msgs_monitor_option {
@@ -4278,11 +3682,10 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                                                                log_given_level!(self.logger, if e == ChannelMonitorUpdateStatus::PermanentFailure { Level::Error } else { Level::Debug },
                                                                        "Failed to update channel monitor with preimage {:?}: {:?}",
                                                                        payment_preimage, e);
-                                                               return ClaimFundsFromHop::MonitorUpdateFail(
-                                                                       chan.get().get_counterparty_node_id(),
-                                                                       handle_monitor_update_res!(self, e, chan, RAACommitmentOrder::CommitmentFirst, false, msgs.is_some()).unwrap_err(),
-                                                                       Some(htlc_value_msat)
-                                                               );
+                                                               let err = handle_monitor_update_res!(self, e, chan, RAACommitmentOrder::CommitmentFirst, false, msgs.is_some()).unwrap_err();
+                                                               mem::drop(channel_state_lock);
+                                                               self.handle_monitor_update_completion_actions(completion_action(Some(htlc_value_msat)));
+                                                               return Err((counterparty_node_id, err));
                                                        }
                                                }
                                                if let Some((msg, commitment_signed)) = msgs {
@@ -4300,157 +3703,98 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                                                                }
                                                        });
                                                }
-                                               return ClaimFundsFromHop::Success(htlc_value_msat);
+                                               mem::drop(channel_state_lock);
+                                               self.handle_monitor_update_completion_actions(completion_action(Some(htlc_value_msat)));
+                                               Ok(())
                                        } else {
-                                               return ClaimFundsFromHop::DuplicateClaim;
+                                               Ok(())
                                        }
                                },
                                Err((e, monitor_update)) => {
                                        match self.chain_monitor.update_channel(chan.get().get_funding_txo().unwrap(), monitor_update) {
                                                ChannelMonitorUpdateStatus::Completed => {},
                                                e => {
+                                                       // TODO: This needs to be handled somehow - if we receive a monitor update
+                                                       // with a preimage we *must* somehow manage to propagate it to the upstream
+                                                       // channel, or we must have an ability to receive the same update and try
+                                                       // again on restart.
                                                        log_given_level!(self.logger, if e == ChannelMonitorUpdateStatus::PermanentFailure { Level::Error } else { Level::Info },
                                                                "Failed to update channel monitor with preimage {:?} immediately prior to force-close: {:?}",
                                                                payment_preimage, e);
                                                },
                                        }
-                                       let counterparty_node_id = chan.get().get_counterparty_node_id();
                                        let (drop, res) = convert_chan_err!(self, e, chan.get_mut(), &chan_id);
                                        if drop {
                                                chan.remove_entry();
                                        }
-                                       return ClaimFundsFromHop::MonitorUpdateFail(counterparty_node_id, res, None);
+                                       mem::drop(channel_state_lock);
+                                       self.handle_monitor_update_completion_actions(completion_action(None));
+                                       Err((counterparty_node_id, res))
                                },
                        }
-               } else { return ClaimFundsFromHop::PrevHopForceClosed }
-       }
-
-       fn finalize_claims(&self, mut sources: Vec<HTLCSource>) {
-               let mut outbounds = self.pending_outbound_payments.lock().unwrap();
-               let mut pending_events = self.pending_events.lock().unwrap();
-               for source in sources.drain(..) {
-                       if let HTLCSource::OutboundRoute { session_priv, payment_id, path, .. } = source {
-                               let mut session_priv_bytes = [0; 32];
-                               session_priv_bytes.copy_from_slice(&session_priv[..]);
-                               if let hash_map::Entry::Occupied(mut payment) = outbounds.entry(payment_id) {
-                                       assert!(payment.get().is_fulfilled());
-                                       if payment.get_mut().remove(&session_priv_bytes, None) {
-                                               pending_events.push(
-                                                       events::Event::PaymentPathSuccessful {
-                                                               payment_id,
-                                                               payment_hash: payment.get().payment_hash(),
-                                                               path,
-                                                       }
-                                               );
-                                       }
-                               }
+               } else {
+                       let preimage_update = ChannelMonitorUpdate {
+                               update_id: CLOSED_CHANNEL_UPDATE_ID,
+                               updates: vec![ChannelMonitorUpdateStep::PaymentPreimage {
+                                       payment_preimage,
+                               }],
+                       };
+                       // We update the ChannelMonitor on the backward link, after
+                       // receiving an `update_fulfill_htlc` from the forward link.
+                       let update_res = self.chain_monitor.update_channel(prev_hop.outpoint, preimage_update);
+                       if update_res != ChannelMonitorUpdateStatus::Completed {
+                               // TODO: This needs to be handled somehow - if we receive a monitor update
+                               // with a preimage we *must* somehow manage to propagate it to the upstream
+                               // channel, or we must have an ability to receive the same event and try
+                               // again on restart.
+                               log_error!(self.logger, "Critical error: failed to update channel monitor with preimage {:?}: {:?}",
+                                       payment_preimage, update_res);
                        }
+                       mem::drop(channel_state_lock);
+                       // Note that we do process the completion action here. This totally could be a
+                       // duplicate claim, but we have no way of knowing without interrogating the
+                       // `ChannelMonitor` we've provided the above update to. Instead, note that `Event`s are
+                       // generally always allowed to be duplicative (and it's specifically noted in
+                       // `PaymentForwarded`).
+                       self.handle_monitor_update_completion_actions(completion_action(None));
+                       Ok(())
                }
        }
 
-       fn claim_funds_internal(&self, mut channel_state_lock: MutexGuard<ChannelHolder<<K::Target as KeysInterface>::Signer>>, source: HTLCSource, payment_preimage: PaymentPreimage, forwarded_htlc_value_msat: Option<u64>, from_onchain: bool, next_channel_id: [u8; 32]) {
+       fn finalize_claims(&self, sources: Vec<HTLCSource>) {
+               self.pending_outbound_payments.finalize_claims(sources, &self.pending_events);
+       }
+
+       fn claim_funds_internal(&self, channel_state_lock: MutexGuard<ChannelHolder<<K::Target as KeysInterface>::Signer>>, source: HTLCSource, payment_preimage: PaymentPreimage, forwarded_htlc_value_msat: Option<u64>, from_onchain: bool, next_channel_id: [u8; 32]) {
                match source {
                        HTLCSource::OutboundRoute { session_priv, payment_id, path, .. } => {
                                mem::drop(channel_state_lock);
-                               let mut session_priv_bytes = [0; 32];
-                               session_priv_bytes.copy_from_slice(&session_priv[..]);
-                               let mut outbounds = self.pending_outbound_payments.lock().unwrap();
-                               if let hash_map::Entry::Occupied(mut payment) = outbounds.entry(payment_id) {
-                                       let mut pending_events = self.pending_events.lock().unwrap();
-                                       if !payment.get().is_fulfilled() {
-                                               let payment_hash = PaymentHash(Sha256::hash(&payment_preimage.0).into_inner());
-                                               let fee_paid_msat = payment.get().get_pending_fee_msat();
-                                               pending_events.push(
-                                                       events::Event::PaymentSent {
-                                                               payment_id: Some(payment_id),
-                                                               payment_preimage,
-                                                               payment_hash,
-                                                               fee_paid_msat,
-                                                       }
-                                               );
-                                               payment.get_mut().mark_fulfilled();
-                                       }
-
-                                       if from_onchain {
-                                               // We currently immediately remove HTLCs which were fulfilled on-chain.
-                                               // This could potentially lead to removing a pending payment too early,
-                                               // with a reorg of one block causing us to re-add the fulfilled payment on
-                                               // restart.
-                                               // TODO: We should have a second monitor event that informs us of payments
-                                               // irrevocably fulfilled.
-                                               if payment.get_mut().remove(&session_priv_bytes, Some(&path)) {
-                                                       let payment_hash = Some(PaymentHash(Sha256::hash(&payment_preimage.0).into_inner()));
-                                                       pending_events.push(
-                                                               events::Event::PaymentPathSuccessful {
-                                                                       payment_id,
-                                                                       payment_hash,
-                                                                       path,
-                                                               }
-                                                       );
-                                               }
-                                       }
-                               } else {
-                                       log_trace!(self.logger, "Received duplicative fulfill for HTLC with payment_preimage {}", log_bytes!(payment_preimage.0));
-                               }
+                               self.pending_outbound_payments.claim_htlc(payment_id, payment_preimage, session_priv, path, from_onchain, &self.pending_events, &self.logger);
                        },
                        HTLCSource::PreviousHopData(hop_data) => {
                                let prev_outpoint = hop_data.outpoint;
-                               let res = self.claim_funds_from_hop(&mut channel_state_lock, hop_data, payment_preimage);
-                               let claimed_htlc = if let ClaimFundsFromHop::DuplicateClaim = res { false } else { true };
-                               let htlc_claim_value_msat = match res {
-                                       ClaimFundsFromHop::MonitorUpdateFail(_, _, amt_opt) => amt_opt,
-                                       ClaimFundsFromHop::Success(amt) => Some(amt),
-                                       _ => None,
-                               };
-                               if let ClaimFundsFromHop::PrevHopForceClosed = res {
-                                       let preimage_update = ChannelMonitorUpdate {
-                                               update_id: CLOSED_CHANNEL_UPDATE_ID,
-                                               updates: vec![ChannelMonitorUpdateStep::PaymentPreimage {
-                                                       payment_preimage: payment_preimage.clone(),
-                                               }],
-                                       };
-                                       // We update the ChannelMonitor on the backward link, after
-                                       // receiving an offchain preimage event from the forward link (the
-                                       // event being update_fulfill_htlc).
-                                       let update_res = self.chain_monitor.update_channel(prev_outpoint, preimage_update);
-                                       if update_res != ChannelMonitorUpdateStatus::Completed {
-                                               // TODO: This needs to be handled somehow - if we receive a monitor update
-                                               // with a preimage we *must* somehow manage to propagate it to the upstream
-                                               // channel, or we must have an ability to receive the same event and try
-                                               // again on restart.
-                                               log_error!(self.logger, "Critical error: failed to update channel monitor with preimage {:?}: {:?}",
-                                                       payment_preimage, update_res);
-                                       }
-                                       // Note that we do *not* set `claimed_htlc` to false here. In fact, this
-                                       // totally could be a duplicate claim, but we have no way of knowing
-                                       // without interrogating the `ChannelMonitor` we've provided the above
-                                       // update to. Instead, we simply document in `PaymentForwarded` that this
-                                       // can happen.
-                               }
-                               mem::drop(channel_state_lock);
-                               if let ClaimFundsFromHop::MonitorUpdateFail(pk, err, _) = res {
+                               let res = self.claim_funds_from_hop(channel_state_lock, hop_data, payment_preimage,
+                                       |htlc_claim_value_msat| {
+                                               if let Some(forwarded_htlc_value) = forwarded_htlc_value_msat {
+                                                       let fee_earned_msat = if let Some(claimed_htlc_value) = htlc_claim_value_msat {
+                                                               Some(claimed_htlc_value - forwarded_htlc_value)
+                                                       } else { None };
+
+                                                       let prev_channel_id = Some(prev_outpoint.to_channel_id());
+                                                       let next_channel_id = Some(next_channel_id);
+
+                                                       Some(MonitorUpdateCompletionAction::EmitEvent { event: events::Event::PaymentForwarded {
+                                                               fee_earned_msat,
+                                                               claim_from_onchain_tx: from_onchain,
+                                                               prev_channel_id,
+                                                               next_channel_id,
+                                                       }})
+                                               } else { None }
+                                       });
+                               if let Err((pk, err)) = res {
                                        let result: Result<(), _> = Err(err);
                                        let _ = handle_error!(self, result, pk);
                                }
-
-                               if claimed_htlc {
-                                       if let Some(forwarded_htlc_value) = forwarded_htlc_value_msat {
-                                               let fee_earned_msat = if let Some(claimed_htlc_value) = htlc_claim_value_msat {
-                                                       Some(claimed_htlc_value - forwarded_htlc_value)
-                                               } else { None };
-
-                                               let mut pending_events = self.pending_events.lock().unwrap();
-                                               let prev_channel_id = Some(prev_outpoint.to_channel_id());
-                                               let next_channel_id = Some(next_channel_id);
-
-                                               pending_events.push(events::Event::PaymentForwarded {
-                                                       fee_earned_msat,
-                                                       claim_from_onchain_tx: from_onchain,
-                                                       prev_channel_id,
-                                                       next_channel_id,
-                                               });
-                                       }
-                               }
                        },
                }
        }
@@ -4460,6 +3804,24 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                self.our_network_pubkey.clone()
        }
 
+       fn handle_monitor_update_completion_actions<I: IntoIterator<Item=MonitorUpdateCompletionAction>>(&self, actions: I) {
+               for action in actions.into_iter() {
+                       match action {
+                               MonitorUpdateCompletionAction::PaymentClaimed { payment_hash } => {
+                                       let payment = self.claimable_payments.lock().unwrap().pending_claiming_payments.remove(&payment_hash);
+                                       if let Some(ClaimingPayment { amount_msat, payment_purpose: purpose, receiver_node_id }) = payment {
+                                               self.pending_events.lock().unwrap().push(events::Event::PaymentClaimed {
+                                                       payment_hash, purpose, amount_msat, receiver_node_id: Some(receiver_node_id),
+                                               });
+                                       }
+                               },
+                               MonitorUpdateCompletionAction::EmitEvent { event } => {
+                                       self.pending_events.lock().unwrap().push(event);
+                               },
+                       }
+               }
+       }
+
        /// Handles a channel reentering a functional state, either due to reconnect or a monitor
        /// update completion.
        fn handle_channel_resumption(&self, pending_msg_events: &mut Vec<MessageSendEvent>,
@@ -4746,7 +4108,7 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                                        if chan.get().get_counterparty_node_id() != *counterparty_node_id {
                                                return Err(MsgHandleErrInternal::send_err_msg_no_close("Got a message for a channel from the wrong node!".to_owned(), msg.temporary_channel_id));
                                        }
-                                       (try_chan_entry!(self, chan.get_mut().funding_created(msg, best_block, &self.logger), chan), chan.remove())
+                                       (try_chan_entry!(self, chan.get_mut().funding_created(msg, best_block, &self.keys_manager, &self.logger), chan), chan.remove())
                                },
                                hash_map::Entry::Vacant(_) => return Err(MsgHandleErrInternal::send_err_msg_no_close("Failed to find corresponding channel".to_owned(), msg.temporary_channel_id))
                        }
@@ -4817,7 +4179,7 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                                        if chan.get().get_counterparty_node_id() != *counterparty_node_id {
                                                return Err(MsgHandleErrInternal::send_err_msg_no_close("Got a message for a channel from the wrong node!".to_owned(), msg.channel_id));
                                        }
-                                       let (monitor, funding_tx, channel_ready) = match chan.get_mut().funding_signed(&msg, best_block, &self.logger) {
+                                       let (monitor, funding_tx, channel_ready) = match chan.get_mut().funding_signed(&msg, best_block, &self.keys_manager, &self.logger) {
                                                Ok(update) => update,
                                                Err(e) => try_chan_entry!(self, Err(e), chan),
                                        };
@@ -5498,11 +4860,6 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
        /// Check the holding cell in each channel and free any pending HTLCs in them if possible.
        /// Returns whether there were any updates such as if pending HTLCs were freed or a monitor
        /// update was applied.
-       ///
-       /// This should only apply to HTLCs which were added to the holding cell because we were
-       /// waiting on a monitor update to finish. In that case, we don't want to free the holding cell
-       /// directly in `channel_monitor_updated` as it may introduce deadlocks calling back into user
-       /// code to inform them of a channel monitor update.
        fn check_free_holding_cells(&self) -> bool {
                let mut has_monitor_update = false;
                let mut failed_htlcs = Vec::new();
@@ -5678,8 +5035,8 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
        /// This differs from [`create_inbound_payment_for_hash`] only in that it generates the
        /// [`PaymentHash`] and [`PaymentPreimage`] for you.
        ///
-       /// The [`PaymentPreimage`] will ultimately be returned to you in the [`PaymentReceived`], which
-       /// will have the [`PaymentReceived::payment_preimage`] field filled in. That should then be
+       /// The [`PaymentPreimage`] will ultimately be returned to you in the [`PaymentClaimable`], which
+       /// will have the [`PaymentClaimable::payment_preimage`] field filled in. That should then be
        /// passed directly to [`claim_funds`].
        ///
        /// See [`create_inbound_payment_for_hash`] for detailed documentation on behavior and requirements.
@@ -5695,8 +5052,8 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
        /// Errors if `min_value_msat` is greater than total bitcoin supply.
        ///
        /// [`claim_funds`]: Self::claim_funds
-       /// [`PaymentReceived`]: events::Event::PaymentReceived
-       /// [`PaymentReceived::payment_preimage`]: events::Event::PaymentReceived::payment_preimage
+       /// [`PaymentClaimable`]: events::Event::PaymentClaimable
+       /// [`PaymentClaimable::payment_preimage`]: events::Event::PaymentClaimable::payment_preimage
        /// [`create_inbound_payment_for_hash`]: Self::create_inbound_payment_for_hash
        pub fn create_inbound_payment(&self, min_value_msat: Option<u64>, invoice_expiry_delta_secs: u32) -> Result<(PaymentHash, PaymentSecret), ()> {
                inbound_payment::create(&self.inbound_payment_key, min_value_msat, invoice_expiry_delta_secs, &self.keys_manager, self.highest_seen_timestamp.load(Ordering::Acquire) as u64)
@@ -5722,7 +5079,7 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
        /// Gets a [`PaymentSecret`] for a given [`PaymentHash`], for which the payment preimage is
        /// stored external to LDK.
        ///
-       /// A [`PaymentReceived`] event will only be generated if the [`PaymentSecret`] matches a
+       /// A [`PaymentClaimable`] event will only be generated if the [`PaymentSecret`] matches a
        /// payment secret fetched via this method or [`create_inbound_payment`], and which is at least
        /// the `min_value_msat` provided here, if one is provided.
        ///
@@ -5732,7 +5089,7 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
        ///
        /// `min_value_msat` should be set if the invoice being generated contains a value. Any payment
        /// received for the returned [`PaymentHash`] will be required to be at least `min_value_msat`
-       /// before a [`PaymentReceived`] event will be generated, ensuring that we do not provide the
+       /// before a [`PaymentClaimable`] event will be generated, ensuring that we do not provide the
        /// sender "proof-of-payment" unless they have paid the required amount.
        ///
        /// `invoice_expiry_delta_secs` describes the number of seconds that the invoice is valid for
@@ -5743,9 +5100,9 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
        ///
        /// Note that we use block header time to time-out pending inbound payments (with some margin
        /// to compensate for the inaccuracy of block header timestamps). Thus, in practice we will
-       /// accept a payment and generate a [`PaymentReceived`] event for some time after the expiry.
+       /// accept a payment and generate a [`PaymentClaimable`] event for some time after the expiry.
        /// If you need exact expiry semantics, you should enforce them upon receipt of
-       /// [`PaymentReceived`].
+       /// [`PaymentClaimable`].
        ///
        /// Note that invoices generated for inbound payments should have their `min_final_cltv_expiry`
        /// set to at least [`MIN_FINAL_CLTV_EXPIRY`].
@@ -5761,7 +5118,7 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
        /// Errors if `min_value_msat` is greater than total bitcoin supply.
        ///
        /// [`create_inbound_payment`]: Self::create_inbound_payment
-       /// [`PaymentReceived`]: events::Event::PaymentReceived
+       /// [`PaymentClaimable`]: events::Event::PaymentClaimable
        pub fn create_inbound_payment_for_hash(&self, payment_hash: PaymentHash, min_value_msat: Option<u64>, invoice_expiry_delta_secs: u32) -> Result<PaymentSecret, ()> {
                inbound_payment::create_from_hash(&self.inbound_payment_key, min_value_msat, payment_hash, invoice_expiry_delta_secs, self.highest_seen_timestamp.load(Ordering::Acquire) as u64)
        }
@@ -5839,7 +5196,7 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                let mut inflight_htlcs = InFlightHtlcs::new();
 
                for chan in self.channel_state.lock().unwrap().by_id.values() {
-                       for htlc_source in chan.inflight_htlc_sources() {
+                       for (htlc_source, _) in chan.inflight_htlc_sources() {
                                if let HTLCSource::OutboundRoute { path, .. } = htlc_source {
                                        inflight_htlcs.process_path(path, self.get_our_node_id());
                                }
@@ -5857,14 +5214,20 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                events.into_inner()
        }
 
+       #[cfg(test)]
+       pub fn pop_pending_event(&self) -> Option<events::Event> {
+               let mut events = self.pending_events.lock().unwrap();
+               if events.is_empty() { None } else { Some(events.remove(0)) }
+       }
+
        #[cfg(test)]
        pub fn has_pending_payments(&self) -> bool {
-               !self.pending_outbound_payments.lock().unwrap().is_empty()
+               self.pending_outbound_payments.has_pending_payments()
        }
 
        #[cfg(test)]
        pub fn clear_pending_payments(&self) {
-               self.pending_outbound_payments.lock().unwrap().clear()
+               self.pending_outbound_payments.clear_pending_payments()
        }
 
        /// Processes any events asynchronously in the order they were generated since the last call
@@ -6206,15 +5569,15 @@ where
                }
 
                if let Some(height) = height_opt {
-                       self.claimable_htlcs.lock().unwrap().retain(|payment_hash, (_, htlcs)| {
+                       self.claimable_payments.lock().unwrap().claimable_htlcs.retain(|payment_hash, (_, htlcs)| {
                                htlcs.retain(|htlc| {
                                        // If height is approaching the number of blocks we think it takes us to get
                                        // our commitment transaction confirmed before the HTLC expires, plus the
                                        // number of blocks we generally consider it to take to do a commitment update,
                                        // just give up on it and fail the HTLC.
                                        if height >= htlc.cltv_expiry - HTLC_FAIL_BACK_BUFFER {
-                                               let mut htlc_msat_height_data = byte_utils::be64_to_array(htlc.value).to_vec();
-                                               htlc_msat_height_data.extend_from_slice(&byte_utils::be32_to_array(height));
+                                               let mut htlc_msat_height_data = htlc.value.to_be_bytes().to_vec();
+                                               htlc_msat_height_data.extend_from_slice(&height.to_be_bytes());
 
                                                timed_out_htlcs.push((HTLCSource::PreviousHopData(htlc.prev_hop.clone()), payment_hash.clone(),
                                                        HTLCFailReason::reason(0x4000 | 15, htlc_msat_height_data),
@@ -6987,30 +6350,6 @@ impl_writeable_tlv_based!(PendingInboundPayment, {
        (8, min_value_msat, required),
 });
 
-impl_writeable_tlv_based_enum_upgradable!(PendingOutboundPayment,
-       (0, Legacy) => {
-               (0, session_privs, required),
-       },
-       (1, Fulfilled) => {
-               (0, session_privs, required),
-               (1, payment_hash, option),
-               (3, timer_ticks_without_htlcs, (default_value, 0)),
-       },
-       (2, Retryable) => {
-               (0, session_privs, required),
-               (1, pending_fee_msat, option),
-               (2, payment_hash, required),
-               (4, payment_secret, option),
-               (6, total_msat, required),
-               (8, pending_amt_msat, required),
-               (10, starting_block_height, required),
-       },
-       (3, Abandoned) => {
-               (0, session_privs, required),
-               (2, payment_hash, required),
-       },
-);
-
 impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> Writeable for ChannelManager<M, T, K, F, L>
        where M::Target: chain::Watch<<K::Target as KeysInterface>::Signer>,
         T::Target: BroadcasterInterface,
@@ -7061,12 +6400,12 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> Writeable for ChannelMana
                }
 
                let pending_inbound_payments = self.pending_inbound_payments.lock().unwrap();
-               let claimable_htlcs = self.claimable_htlcs.lock().unwrap();
-               let pending_outbound_payments = self.pending_outbound_payments.lock().unwrap();
+               let claimable_payments = self.claimable_payments.lock().unwrap();
+               let pending_outbound_payments = self.pending_outbound_payments.pending_outbound_payments.lock().unwrap();
 
                let mut htlc_purposes: Vec<&events::PaymentPurpose> = Vec::new();
-               (claimable_htlcs.len() as u64).write(writer)?;
-               for (payment_hash, (purpose, previous_hops)) in claimable_htlcs.iter() {
+               (claimable_payments.claimable_htlcs.len() as u64).write(writer)?;
+               for (payment_hash, (purpose, previous_hops)) in claimable_payments.claimable_htlcs.iter() {
                        payment_hash.write(writer)?;
                        (previous_hops.len() as u64).write(writer)?;
                        for htlc in previous_hops.iter() {
@@ -7151,10 +6490,21 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> Writeable for ChannelMana
                if our_pending_intercepts.len() != 0 {
                        pending_intercepted_htlcs = Some(our_pending_intercepts);
                }
+
+               let mut pending_claiming_payments = Some(&claimable_payments.pending_claiming_payments);
+               if pending_claiming_payments.as_ref().unwrap().is_empty() {
+                       // LDK versions prior to 0.0.113 do not know how to read the pending claimed payments
+                       // map. Thus, if there are no entries we skip writing a TLV for it.
+                       pending_claiming_payments = None;
+               } else {
+                       debug_assert!(false, "While we have code to serialize pending_claiming_payments, the map should always be empty until a later PR");
+               }
+
                write_tlv_fields!(writer, {
                        (1, pending_outbound_payments_no_retry, required),
                        (2, pending_intercepted_htlcs, option),
                        (3, pending_outbound_payments, required),
+                       (4, pending_claiming_payments, option),
                        (5, self.our_network_pubkey, required),
                        (7, self.fake_scid_rand_bytes, required),
                        (9, htlc_purposes, vec_type),
@@ -7340,6 +6690,25 @@ impl<'a, M: Deref, T: Deref, K: Deref, F: Deref, L: Deref>
                                                user_channel_id: channel.get_user_id(),
                                                reason: ClosureReason::OutdatedChannelManager
                                        });
+                                       for (channel_htlc_source, payment_hash) in channel.inflight_htlc_sources() {
+                                               let mut found_htlc = false;
+                                               for (monitor_htlc_source, _) in monitor.get_all_current_outbound_htlcs() {
+                                                       if *channel_htlc_source == monitor_htlc_source { found_htlc = true; break; }
+                                               }
+                                               if !found_htlc {
+                                                       // If we have some HTLCs in the channel which are not present in the newer
+                                                       // ChannelMonitor, they have been removed and should be failed back to
+                                                       // ensure we don't forget them entirely. Note that if the missing HTLC(s)
+                                                       // were actually claimed we'd have generated and ensured the previous-hop
+                                                       // claim update ChannelMonitor updates were persisted prior to persising
+                                                       // the ChannelMonitor update for the forward leg, so attempting to fail the
+                                                       // backwards leg of the HTLC will simply be rejected.
+                                                       log_info!(args.logger,
+                                                               "Failing HTLC with hash {} as it is missing in the ChannelMonitor for channel {} but was present in the (stale) ChannelManager",
+                                                               log_bytes!(channel.channel_id()), log_bytes!(payment_hash.0));
+                                                       failed_htlcs.push((channel_htlc_source.clone(), *payment_hash, channel.get_counterparty_node_id(), channel.channel_id()));
+                                               }
+                                       }
                                } else {
                                        log_info!(args.logger, "Successfully loaded channel {}", log_bytes!(channel.channel_id()));
                                        if let Some(short_channel_id) = channel.get_short_channel_id() {
@@ -7420,16 +6789,6 @@ impl<'a, M: Deref, T: Deref, K: Deref, F: Deref, L: Deref>
                                None => continue,
                        }
                }
-               if forward_htlcs_count > 0 {
-                       // If we have pending HTLCs to forward, assume we either dropped a
-                       // `PendingHTLCsForwardable` or the user received it but never processed it as they
-                       // shut down before the timer hit. Either way, set the time_forwardable to a small
-                       // constant as enough time has likely passed that we should simply handle the forwards
-                       // now, or at least after the user gets a chance to reconnect to our peers.
-                       pending_events_read.push(events::Event::PendingHTLCsForwardable {
-                               time_forwardable: Duration::from_secs(2),
-                       });
-               }
 
                let background_event_count: u64 = Readable::read(reader)?;
                let mut pending_background_events_read: Vec<BackgroundEvent> = Vec::with_capacity(cmp::min(background_event_count as usize, MAX_ALLOC_SIZE/mem::size_of::<BackgroundEvent>()));
@@ -7472,10 +6831,12 @@ impl<'a, M: Deref, T: Deref, K: Deref, F: Deref, L: Deref>
                let mut fake_scid_rand_bytes: Option<[u8; 32]> = None;
                let mut probing_cookie_secret: Option<[u8; 32]> = None;
                let mut claimable_htlc_purposes = None;
+               let mut pending_claiming_payments = Some(HashMap::new());
                read_tlv_fields!(reader, {
                        (1, pending_outbound_payments_no_retry, option),
                        (2, pending_intercepted_htlcs, option),
                        (3, pending_outbound_payments, option),
+                       (4, pending_claiming_payments, option),
                        (5, received_network_pubkey, option),
                        (7, fake_scid_rand_bytes, option),
                        (9, claimable_htlc_purposes, vec_type),
@@ -7540,10 +6901,58 @@ impl<'a, M: Deref, T: Deref, K: Deref, F: Deref, L: Deref>
                                                        }
                                                }
                                        }
+                                       for (htlc_source, htlc) in monitor.get_all_current_outbound_htlcs() {
+                                               if let HTLCSource::PreviousHopData(prev_hop_data) = htlc_source {
+                                                       let pending_forward_matches_htlc = |info: &PendingAddHTLCInfo| {
+                                                               info.prev_funding_outpoint == prev_hop_data.outpoint &&
+                                                                       info.prev_htlc_id == prev_hop_data.htlc_id
+                                                       };
+                                                       // The ChannelMonitor is now responsible for this HTLC's
+                                                       // failure/success and will let us know what its outcome is. If we
+                                                       // still have an entry for this HTLC in `forward_htlcs` or
+                                                       // `pending_intercepted_htlcs`, we were apparently not persisted after
+                                                       // the monitor was when forwarding the payment.
+                                                       forward_htlcs.retain(|_, forwards| {
+                                                               forwards.retain(|forward| {
+                                                                       if let HTLCForwardInfo::AddHTLC(htlc_info) = forward {
+                                                                               if pending_forward_matches_htlc(&htlc_info) {
+                                                                                       log_info!(args.logger, "Removing pending to-forward HTLC with hash {} as it was forwarded to the closed channel {}",
+                                                                                               log_bytes!(htlc.payment_hash.0), log_bytes!(monitor.get_funding_txo().0.to_channel_id()));
+                                                                                       false
+                                                                               } else { true }
+                                                                       } else { true }
+                                                               });
+                                                               !forwards.is_empty()
+                                                       });
+                                                       pending_intercepted_htlcs.as_mut().unwrap().retain(|intercepted_id, htlc_info| {
+                                                               if pending_forward_matches_htlc(&htlc_info) {
+                                                                       log_info!(args.logger, "Removing pending intercepted HTLC with hash {} as it was forwarded to the closed channel {}",
+                                                                               log_bytes!(htlc.payment_hash.0), log_bytes!(monitor.get_funding_txo().0.to_channel_id()));
+                                                                       pending_events_read.retain(|event| {
+                                                                               if let Event::HTLCIntercepted { intercept_id: ev_id, .. } = event {
+                                                                                       intercepted_id != ev_id
+                                                                               } else { true }
+                                                                       });
+                                                                       false
+                                                               } else { true }
+                                                       });
+                                               }
+                                       }
                                }
                        }
                }
 
+               if !forward_htlcs.is_empty() {
+                       // If we have pending HTLCs to forward, assume we either dropped a
+                       // `PendingHTLCsForwardable` or the user received it but never processed it as they
+                       // shut down before the timer hit. Either way, set the time_forwardable to a small
+                       // constant as enough time has likely passed that we should simply handle the forwards
+                       // now, or at least after the user gets a chance to reconnect to our peers.
+                       pending_events_read.push(events::Event::PendingHTLCsForwardable {
+                               time_forwardable: Duration::from_secs(2),
+                       });
+               }
+
                let inbound_pmt_key_material = args.keys_manager.get_inbound_payment_key_material();
                let expanded_inbound_key = inbound_payment::ExpandedKey::new(&inbound_pmt_key_material);
 
@@ -7696,11 +7105,11 @@ impl<'a, M: Deref, T: Deref, K: Deref, F: Deref, L: Deref>
                        }),
                        inbound_payment_key: expanded_inbound_key,
                        pending_inbound_payments: Mutex::new(pending_inbound_payments),
-                       pending_outbound_payments: Mutex::new(pending_outbound_payments.unwrap()),
+                       pending_outbound_payments: OutboundPayments { pending_outbound_payments: Mutex::new(pending_outbound_payments.unwrap()) },
                        pending_intercepted_htlcs: Mutex::new(pending_intercepted_htlcs.unwrap()),
 
                        forward_htlcs: Mutex::new(forward_htlcs),
-                       claimable_htlcs: Mutex::new(claimable_htlcs),
+                       claimable_payments: Mutex::new(ClaimablePayments { claimable_htlcs, pending_claiming_payments: pending_claiming_payments.unwrap() }),
                        outbound_scid_aliases: Mutex::new(outbound_scid_aliases),
                        id_to_peer: Mutex::new(id_to_peer),
                        short_to_chan_info: FairRwLock::new(short_to_chan_info),
@@ -7854,7 +7263,7 @@ mod tests {
                // Use the utility function send_payment_along_path to send the payment with MPP data which
                // indicates there are more HTLCs coming.
                let cur_height = CHAN_CONFIRM_DEPTH + 1; // route_payment calls send_payment, which adds 1 to the current height. So we do the same here to match.
-               let session_privs = nodes[0].node.add_new_pending_payment(our_payment_hash, Some(payment_secret), payment_id, &mpp_route).unwrap();
+               let session_privs = nodes[0].node.test_add_new_pending_payment(our_payment_hash, Some(payment_secret), payment_id, &mpp_route).unwrap();
                nodes[0].node.send_payment_along_path(&mpp_route.paths[0], &route.payment_params, &our_payment_hash, &Some(payment_secret), 200_000, cur_height, payment_id, &None, session_privs[0]).unwrap();
                check_added_monitors!(nodes[0], 1);
                let mut events = nodes[0].node.get_and_clear_pending_msg_events();
@@ -8083,8 +7492,8 @@ mod tests {
 
                let test_preimage = PaymentPreimage([42; 32]);
                let mismatch_payment_hash = PaymentHash([43; 32]);
-               let session_privs = nodes[0].node.add_new_pending_payment(mismatch_payment_hash, None, PaymentId(mismatch_payment_hash.0), &route).unwrap();
-               nodes[0].node.send_payment_internal(&route, mismatch_payment_hash, &None, Some(test_preimage), PaymentId(mismatch_payment_hash.0), None, session_privs).unwrap();
+               let session_privs = nodes[0].node.test_add_new_pending_payment(mismatch_payment_hash, None, PaymentId(mismatch_payment_hash.0), &route).unwrap();
+               nodes[0].node.test_send_payment_internal(&route, mismatch_payment_hash, &None, Some(test_preimage), PaymentId(mismatch_payment_hash.0), None, session_privs).unwrap();
                check_added_monitors!(nodes[0], 1);
 
                let updates = get_htlc_update_msgs!(nodes[0], nodes[1].node.get_our_node_id());
@@ -8129,8 +7538,8 @@ mod tests {
                let test_preimage = PaymentPreimage([42; 32]);
                let test_secret = PaymentSecret([43; 32]);
                let payment_hash = PaymentHash(Sha256::hash(&test_preimage.0).into_inner());
-               let session_privs = nodes[0].node.add_new_pending_payment(payment_hash, Some(test_secret), PaymentId(payment_hash.0), &route).unwrap();
-               nodes[0].node.send_payment_internal(&route, payment_hash, &Some(test_secret), Some(test_preimage), PaymentId(payment_hash.0), None, session_privs).unwrap();
+               let session_privs = nodes[0].node.test_add_new_pending_payment(payment_hash, Some(test_secret), PaymentId(payment_hash.0), &route).unwrap();
+               nodes[0].node.test_send_payment_internal(&route, payment_hash, &Some(test_secret), Some(test_preimage), PaymentId(payment_hash.0), None, session_privs).unwrap();
                check_added_monitors!(nodes[0], 1);
 
                let updates = get_htlc_update_msgs!(nodes[0], nodes[1].node.get_our_node_id());
@@ -8477,7 +7886,7 @@ pub mod bench {
                                $node_b.handle_revoke_and_ack(&$node_a.get_our_node_id(), &get_event_msg!(NodeHolder { node: &$node_a }, MessageSendEvent::SendRevokeAndACK, $node_b.get_our_node_id()));
 
                                expect_pending_htlcs_forwardable!(NodeHolder { node: &$node_b });
-                               expect_payment_received!(NodeHolder { node: &$node_b }, payment_hash, payment_secret, 10_000);
+                               expect_payment_claimable!(NodeHolder { node: &$node_b }, payment_hash, payment_secret, 10_000);
                                $node_b.claim_funds(payment_preimage);
                                expect_payment_claimed!(NodeHolder { node: &$node_b }, payment_hash, 10_000);