Merge pull request #1857 from TheBlueMatt/2022-11-reload-htlc
[rust-lightning] / lightning / src / ln / channelmanager.rs
index 7f65719a57ff3a531ad0c648dea0973b4726c3df..29553d17ffb3ac2dee788f59c2de7f5bb9a814c1 100644 (file)
@@ -92,8 +92,8 @@ use core::ops::Deref;
 pub(super) enum PendingHTLCRouting {
        Forward {
                onion_packet: msgs::OnionPacket,
-               /// The SCID from the onion that we should forward to. This could be a "real" SCID, an
-               /// outbound SCID alias, or a phantom node SCID.
+               /// The SCID from the onion that we should forward to. This could be a real SCID or a fake one
+               /// generated using `get_fake_scid` from the scid_utils::fake_scid module.
                short_channel_id: u64, // This should be NonZero<u64> eventually when we bump MSRV
        },
        Receive {
@@ -142,6 +142,7 @@ pub(super) struct PendingAddHTLCInfo {
        prev_short_channel_id: u64,
        prev_htlc_id: u64,
        prev_funding_outpoint: OutPoint,
+       prev_user_channel_id: u128,
 }
 
 pub(super) enum HTLCForwardInfo {
@@ -206,6 +207,24 @@ impl Readable for PaymentId {
                Ok(PaymentId(buf))
        }
 }
+
+/// An identifier used to uniquely identify an intercepted HTLC to LDK.
+/// (C-not exported) as we just use [u8; 32] directly
+#[derive(Hash, Copy, Clone, PartialEq, Eq, Debug)]
+pub struct InterceptId(pub [u8; 32]);
+
+impl Writeable for InterceptId {
+       fn write<W: Writer>(&self, w: &mut W) -> Result<(), io::Error> {
+               self.0.write(w)
+       }
+}
+
+impl Readable for InterceptId {
+       fn read<R: Read>(r: &mut R) -> Result<Self, DecodeError> {
+               let buf: [u8; 32] = Readable::read(r)?;
+               Ok(InterceptId(buf))
+       }
+}
 /// Tracks the inbound corresponding to an outbound HTLC
 #[allow(clippy::derive_hash_xor_eq)] // Our Hash is faithful to the data, we just don't have SecretKey::hash
 #[derive(Clone, PartialEq, Eq)]
@@ -268,6 +287,16 @@ pub(super) enum HTLCFailReason {
        }
 }
 
+impl HTLCFailReason {
+       pub(super) fn reason(failure_code: u16, data: Vec<u8>) -> Self {
+               Self::Reason { failure_code, data }
+       }
+
+       pub(super) fn from_failure_code(failure_code: u16) -> Self {
+               Self::Reason { failure_code, data: Vec::new() }
+       }
+}
+
 struct ReceiveError {
        err_code: u16,
        err_data: Vec<u8>,
@@ -398,13 +427,6 @@ pub(super) enum RAACommitmentOrder {
 // Note this is only exposed in cfg(test):
 pub(super) struct ChannelHolder<Signer: Sign> {
        pub(super) by_id: HashMap<[u8; 32], Channel<Signer>>,
-       /// Map from payment hash to the payment data and any HTLCs which are to us and can be
-       /// failed/claimed by the user.
-       ///
-       /// Note that while this is held in the same mutex as the channels themselves, no consistency
-       /// guarantees are made about the channels given here actually existing anymore by the time you
-       /// go to read them!
-       claimable_htlcs: HashMap<PaymentHash, (events::PaymentPurpose, Vec<ClaimableHTLC>)>,
        /// Messages to send to peers - pushed to in the same lock that they are generated in (except
        /// for broadcast messages, where ordering isn't as strict).
        pub(super) pending_msg_events: Vec<MessageSendEvent>,
@@ -672,20 +694,24 @@ pub type SimpleRefChannelManager<'a, 'b, 'c, 'd, 'e, M, T, F, L> = ChannelManage
 // `total_consistency_lock`
 //  |
 //  |__`forward_htlcs`
-//  |
-//  |__`channel_state`
 //  |   |
-//  |   |__`id_to_peer`
+//  |   |__`pending_intercepted_htlcs`
+//  |
+//  |__`pending_inbound_payments`
 //  |   |
-//  |   |__`short_to_chan_info`
+//  |   |__`claimable_htlcs`
 //  |   |
-//  |   |__`per_peer_state`
+//  |   |__`pending_outbound_payments`
 //  |       |
-//  |       |__`outbound_scid_aliases`
-//  |       |
-//  |       |__`pending_inbound_payments`
+//  |       |__`channel_state`
+//  |           |
+//  |           |__`id_to_peer`
 //  |           |
-//  |           |__`pending_outbound_payments`
+//  |           |__`short_to_chan_info`
+//  |           |
+//  |           |__`per_peer_state`
+//  |               |
+//  |               |__`outbound_scid_aliases`
 //  |               |
 //  |               |__`best_block`
 //  |               |
@@ -720,9 +746,9 @@ pub struct ChannelManager<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref>
        channel_state: Mutex<ChannelHolder<<K::Target as KeysInterface>::Signer>>,
 
        /// Storage for PaymentSecrets and any requirements on future inbound payments before we will
-       /// expose them to users via a PaymentReceived event. HTLCs which do not meet the requirements
+       /// expose them to users via a PaymentClaimable event. HTLCs which do not meet the requirements
        /// here are failed when we process them as pending-forwardable-HTLCs, and entries are removed
-       /// after we generate a PaymentReceived upon receipt of all MPP parts or when they time out.
+       /// after we generate a PaymentClaimable upon receipt of all MPP parts or when they time out.
        ///
        /// See `ChannelManager` struct-level documentation for lock order requirements.
        pending_inbound_payments: Mutex<HashMap<PaymentHash, PendingInboundPayment>>,
@@ -755,6 +781,20 @@ pub struct ChannelManager<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref>
        pub(super) forward_htlcs: Mutex<HashMap<u64, Vec<HTLCForwardInfo>>>,
        #[cfg(not(test))]
        forward_htlcs: Mutex<HashMap<u64, Vec<HTLCForwardInfo>>>,
+       /// Storage for HTLCs that have been intercepted and bubbled up to the user. We hold them here
+       /// until the user tells us what we should do with them.
+       ///
+       /// See `ChannelManager` struct-level documentation for lock order requirements.
+       pending_intercepted_htlcs: Mutex<HashMap<InterceptId, PendingAddHTLCInfo>>,
+
+       /// Map from payment hash to the payment data and any HTLCs which are to us and can be
+       /// failed/claimed by the user.
+       ///
+       /// Note that, no consistency guarantees are made about the channels given here actually
+       /// existing anymore by the time you go to read them!
+       ///
+       /// See `ChannelManager` struct-level documentation for lock order requirements.
+       claimable_htlcs: Mutex<HashMap<PaymentHash, (events::PaymentPurpose, Vec<ClaimableHTLC>)>>,
 
        /// The set of outbound SCID aliases across all our channels, including unconfirmed channels
        /// and some closed channels which reached a usable state prior to being closed. This is used
@@ -1141,6 +1181,10 @@ pub struct ChannelDetails {
        /// [`ChannelHandshakeConfig::minimum_depth`]: crate::util::config::ChannelHandshakeConfig::minimum_depth
        /// [`ChannelHandshakeLimits::max_minimum_depth`]: crate::util::config::ChannelHandshakeLimits::max_minimum_depth
        pub confirmations_required: Option<u32>,
+       /// The current number of confirmations on the funding transaction.
+       ///
+       /// This value will be `None` for objects serialized with LDK versions prior to 0.0.113.
+       pub confirmations: Option<u32>,
        /// The number of blocks (after our commitment transaction confirms) that we will need to wait
        /// until we can claim our funds after we force-close the channel. During this time our
        /// counterparty is allowed to punish us if we broadcasted a stale state. If our counterparty
@@ -1517,134 +1561,6 @@ macro_rules! emit_channel_ready_event {
        }
 }
 
-macro_rules! handle_chan_restoration_locked {
-       ($self: ident, $channel_lock: expr, $channel_state: expr, $channel_entry: expr,
-        $raa: expr, $commitment_update: expr, $order: expr, $chanmon_update: expr,
-        $pending_forwards: expr, $funding_broadcastable: expr, $channel_ready: expr, $announcement_sigs: expr) => { {
-               let mut htlc_forwards = None;
-
-               let chanmon_update: Option<ChannelMonitorUpdate> = $chanmon_update; // Force type-checking to resolve
-               let chanmon_update_is_none = chanmon_update.is_none();
-               let counterparty_node_id = $channel_entry.get().get_counterparty_node_id();
-               let res = loop {
-                       let forwards: Vec<(PendingHTLCInfo, u64)> = $pending_forwards; // Force type-checking to resolve
-                       if !forwards.is_empty() {
-                               htlc_forwards = Some(($channel_entry.get().get_short_channel_id().unwrap_or($channel_entry.get().outbound_scid_alias()),
-                                       $channel_entry.get().get_funding_txo().unwrap(), forwards));
-                       }
-
-                       if chanmon_update.is_some() {
-                               // On reconnect, we, by definition, only resend a channel_ready if there have been
-                               // no commitment updates, so the only channel monitor update which could also be
-                               // associated with a channel_ready would be the funding_created/funding_signed
-                               // monitor update. That monitor update failing implies that we won't send
-                               // channel_ready until it's been updated, so we can't have a channel_ready and a
-                               // monitor update here (so we don't bother to handle it correctly below).
-                               assert!($channel_ready.is_none());
-                               // A channel monitor update makes no sense without either a channel_ready or a
-                               // commitment update to process after it. Since we can't have a channel_ready, we
-                               // only bother to handle the monitor-update + commitment_update case below.
-                               assert!($commitment_update.is_some());
-                       }
-
-                       if let Some(msg) = $channel_ready {
-                               // Similar to the above, this implies that we're letting the channel_ready fly
-                               // before it should be allowed to.
-                               assert!(chanmon_update.is_none());
-                               send_channel_ready!($self, $channel_state.pending_msg_events, $channel_entry.get(), msg);
-                       }
-                       if let Some(msg) = $announcement_sigs {
-                               $channel_state.pending_msg_events.push(events::MessageSendEvent::SendAnnouncementSignatures {
-                                       node_id: counterparty_node_id,
-                                       msg,
-                               });
-                       }
-
-                       emit_channel_ready_event!($self, $channel_entry.get_mut());
-
-                       let funding_broadcastable: Option<Transaction> = $funding_broadcastable; // Force type-checking to resolve
-                       if let Some(monitor_update) = chanmon_update {
-                               // We only ever broadcast a funding transaction in response to a funding_signed
-                               // message and the resulting monitor update. Thus, on channel_reestablish
-                               // message handling we can't have a funding transaction to broadcast. When
-                               // processing a monitor update finishing resulting in a funding broadcast, we
-                               // cannot have a second monitor update, thus this case would indicate a bug.
-                               assert!(funding_broadcastable.is_none());
-                               // Given we were just reconnected or finished updating a channel monitor, the
-                               // only case where we can get a new ChannelMonitorUpdate would be if we also
-                               // have some commitment updates to send as well.
-                               assert!($commitment_update.is_some());
-                               match $self.chain_monitor.update_channel($channel_entry.get().get_funding_txo().unwrap(), monitor_update) {
-                                       ChannelMonitorUpdateStatus::Completed => {},
-                                       e => {
-                                               // channel_reestablish doesn't guarantee the order it returns is sensical
-                                               // for the messages it returns, but if we're setting what messages to
-                                               // re-transmit on monitor update success, we need to make sure it is sane.
-                                               let mut order = $order;
-                                               if $raa.is_none() {
-                                                       order = RAACommitmentOrder::CommitmentFirst;
-                                               }
-                                               break handle_monitor_update_res!($self, e, $channel_entry, order, $raa.is_some(), true);
-                                       }
-                               }
-                       }
-
-                       macro_rules! handle_cs { () => {
-                               if let Some(update) = $commitment_update {
-                                       $channel_state.pending_msg_events.push(events::MessageSendEvent::UpdateHTLCs {
-                                               node_id: counterparty_node_id,
-                                               updates: update,
-                                       });
-                               }
-                       } }
-                       macro_rules! handle_raa { () => {
-                               if let Some(revoke_and_ack) = $raa {
-                                       $channel_state.pending_msg_events.push(events::MessageSendEvent::SendRevokeAndACK {
-                                               node_id: counterparty_node_id,
-                                               msg: revoke_and_ack,
-                                       });
-                               }
-                       } }
-                       match $order {
-                               RAACommitmentOrder::CommitmentFirst => {
-                                       handle_cs!();
-                                       handle_raa!();
-                               },
-                               RAACommitmentOrder::RevokeAndACKFirst => {
-                                       handle_raa!();
-                                       handle_cs!();
-                               },
-                       }
-                       if let Some(tx) = funding_broadcastable {
-                               log_info!($self.logger, "Broadcasting funding transaction with txid {}", tx.txid());
-                               $self.tx_broadcaster.broadcast_transaction(&tx);
-                       }
-                       break Ok(());
-               };
-
-               if chanmon_update_is_none {
-                       // If there was no ChannelMonitorUpdate, we should never generate an Err in the res loop
-                       // above. Doing so would imply calling handle_err!() from channel_monitor_updated() which
-                       // should *never* end up calling back to `chain_monitor.update_channel()`.
-                       assert!(res.is_ok());
-               }
-
-               (htlc_forwards, res, counterparty_node_id)
-       } }
-}
-
-macro_rules! post_handle_chan_restoration {
-       ($self: ident, $locked_res: expr) => { {
-               let (htlc_forwards, res, counterparty_node_id) = $locked_res;
-
-               let _ = handle_error!($self, res, counterparty_node_id);
-
-               if let Some(forwards) = htlc_forwards {
-                       $self.forward_htlcs(&mut [forwards][..]);
-               }
-       } }
-}
-
 impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F, L>
        where M::Target: chain::Watch<<K::Target as KeysInterface>::Signer>,
         T::Target: BroadcasterInterface,
@@ -1678,13 +1594,14 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
 
                        channel_state: Mutex::new(ChannelHolder{
                                by_id: HashMap::new(),
-                               claimable_htlcs: HashMap::new(),
                                pending_msg_events: Vec::new(),
                        }),
                        outbound_scid_aliases: Mutex::new(HashSet::new()),
                        pending_inbound_payments: Mutex::new(HashMap::new()),
                        pending_outbound_payments: Mutex::new(HashMap::new()),
                        forward_htlcs: Mutex::new(HashMap::new()),
+                       claimable_htlcs: Mutex::new(HashMap::new()),
+                       pending_intercepted_htlcs: Mutex::new(HashMap::new()),
                        id_to_peer: Mutex::new(HashMap::new()),
                        short_to_chan_info: FairRwLock::new(HashMap::new()),
 
@@ -1817,6 +1734,7 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                let mut res = Vec::new();
                {
                        let channel_state = self.channel_state.lock().unwrap();
+                       let best_block_height = self.best_block.read().unwrap().height();
                        res.reserve(channel_state.by_id.len());
                        for (channel_id, channel) in channel_state.by_id.iter().filter(f) {
                                let balance = channel.get_available_balances();
@@ -1853,6 +1771,7 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                                        next_outbound_htlc_limit_msat: balance.next_outbound_htlc_limit_msat,
                                        user_channel_id: channel.get_user_id(),
                                        confirmations_required: channel.minimum_depth(),
+                                       confirmations: Some(channel.get_funding_tx_confirmations(best_block_height)),
                                        force_close_spend_delay: channel.get_counterparty_selected_contest_delay(),
                                        is_outbound: channel.is_outbound(),
                                        is_channel_ready: channel.is_usable(),
@@ -1922,14 +1841,16 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                                        if *counterparty_node_id != chan_entry.get().get_counterparty_node_id(){
                                                return Err(APIError::APIMisuseError { err: "The passed counterparty_node_id doesn't match the channel's counterparty node_id".to_owned() });
                                        }
-                                       let per_peer_state = self.per_peer_state.read().unwrap();
-                                       let (shutdown_msg, monitor_update, htlcs) = match per_peer_state.get(&counterparty_node_id) {
-                                               Some(peer_state) => {
-                                                       let peer_state = peer_state.lock().unwrap();
-                                                       let their_features = &peer_state.latest_features;
-                                                       chan_entry.get_mut().get_shutdown(&self.keys_manager, their_features, target_feerate_sats_per_1000_weight)?
-                                               },
-                                               None => return Err(APIError::ChannelUnavailable { err: format!("Not connected to node: {}", counterparty_node_id) }),
+                                       let (shutdown_msg, monitor_update, htlcs) = {
+                                               let per_peer_state = self.per_peer_state.read().unwrap();
+                                               match per_peer_state.get(&counterparty_node_id) {
+                                                       Some(peer_state) => {
+                                                               let peer_state = peer_state.lock().unwrap();
+                                                               let their_features = &peer_state.latest_features;
+                                                               chan_entry.get_mut().get_shutdown(&self.keys_manager, their_features, target_feerate_sats_per_1000_weight)?
+                                                       },
+                                                       None => return Err(APIError::ChannelUnavailable { err: format!("Not connected to node: {}", counterparty_node_id) }),
+                                               }
                                        };
                                        failed_htlcs = htlcs;
 
@@ -1965,8 +1886,9 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                };
 
                for htlc_source in failed_htlcs.drain(..) {
+                       let reason = HTLCFailReason::from_failure_code(0x4000 | 8);
                        let receiver = HTLCDestination::NextHopChannel { node_id: Some(*counterparty_node_id), channel_id: *channel_id };
-                       self.fail_htlc_backwards_internal(htlc_source.0, &htlc_source.1, HTLCFailReason::Reason { failure_code: 0x4000 | 8, data: Vec::new() }, receiver);
+                       self.fail_htlc_backwards_internal(&htlc_source.0, &htlc_source.1, &reason, receiver);
                }
 
                let _ = handle_error!(self, result, *counterparty_node_id);
@@ -2023,8 +1945,9 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                log_debug!(self.logger, "Finishing force-closure of channel with {} HTLCs to fail", failed_htlcs.len());
                for htlc_source in failed_htlcs.drain(..) {
                        let (source, payment_hash, counterparty_node_id, channel_id) = htlc_source;
+                       let reason = HTLCFailReason::from_failure_code(0x4000 | 8);
                        let receiver = HTLCDestination::NextHopChannel { node_id: Some(counterparty_node_id), channel_id };
-                       self.fail_htlc_backwards_internal(source, &payment_hash, HTLCFailReason::Reason { failure_code: 0x4000 | 8, data: Vec::new() }, receiver);
+                       self.fail_htlc_backwards_internal(&source, &payment_hash, &reason, receiver);
                }
                if let Some((funding_txo, monitor_update)) = monitor_update_option {
                        // There isn't anything we can do if we get an update failure - we're already
@@ -2321,8 +2244,11 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                                        let forwarding_id_opt = match id_option {
                                                None => { // unknown_next_peer
                                                        // Note that this is likely a timing oracle for detecting whether an scid is a
-                                                       // phantom.
-                                                       if fake_scid::is_valid_phantom(&self.fake_scid_rand_bytes, *short_channel_id, &self.genesis_hash) {
+                                                       // phantom or an intercept.
+                                                       if (self.default_configuration.accept_intercept_htlcs &&
+                                                          fake_scid::is_valid_intercept(&self.fake_scid_rand_bytes, *short_channel_id, &self.genesis_hash)) ||
+                                                          fake_scid::is_valid_phantom(&self.fake_scid_rand_bytes, *short_channel_id, &self.genesis_hash)
+                                                       {
                                                                None
                                                        } else {
                                                                break Some(("Don't have available channel for forwarding as requested.", 0x4000 | 10, None));
@@ -2493,10 +2419,10 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                let session_priv = SecretKey::from_slice(&session_priv_bytes[..]).expect("RNG is busted");
 
                let onion_keys = onion_utils::construct_onion_keys(&self.secp_ctx, &path, &session_priv)
-                       .map_err(|_| APIError::RouteError{err: "Pubkey along hop was maliciously selected"})?;
+                       .map_err(|_| APIError::InvalidRoute{err: "Pubkey along hop was maliciously selected"})?;
                let (onion_payloads, htlc_msat, htlc_cltv) = onion_utils::build_onion_payloads(path, total_value, payment_secret, cur_height, keysend_preimage)?;
                if onion_utils::route_size_insane(&onion_payloads) {
-                       return Err(APIError::RouteError{err: "Route size too large considering onion data"});
+                       return Err(APIError::InvalidRoute{err: "Route size too large considering onion data"});
                }
                let onion_packet = onion_utils::construct_onion_packet(onion_payloads, onion_keys, prng_seed, payment_hash);
 
@@ -2513,7 +2439,7 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                        if let hash_map::Entry::Occupied(mut chan) = channel_state.by_id.entry(id) {
                                match {
                                        if chan.get().get_counterparty_node_id() != path.first().unwrap().pubkey {
-                                               return Err(APIError::RouteError{err: "Node ID mismatch on first hop!"});
+                                               return Err(APIError::InvalidRoute{err: "Node ID mismatch on first hop!"});
                                        }
                                        if !chan.get().is_live() {
                                                return Err(APIError::ChannelUnavailable{err: "Peer for first hop currently disconnected/pending monitor update!".to_owned()});
@@ -2588,7 +2514,7 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
        /// fields for more info.
        ///
        /// If a pending payment is currently in-flight with the same [`PaymentId`] provided, this
-       /// method will error with an [`APIError::RouteError`]. Note, however, that once a payment
+       /// method will error with an [`APIError::InvalidRoute`]. Note, however, that once a payment
        /// is no longer pending (either via [`ChannelManager::abandon_payment`], or handling of an
        /// [`Event::PaymentSent`]) LDK will not stop you from sending a second payment with the same
        /// [`PaymentId`].
@@ -2607,7 +2533,7 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
        /// PaymentSendFailure for more info.
        ///
        /// In general, a path may raise:
-       ///  * [`APIError::RouteError`] when an invalid route or forwarding parameter (cltv_delta, fee,
+       ///  * [`APIError::InvalidRoute`] when an invalid route or forwarding parameter (cltv_delta, fee,
        ///    node public key) is specified.
        ///  * [`APIError::ChannelUnavailable`] if the next-hop channel is not available for updates
        ///    (including due to previous monitor update failure or new permanent monitor update
@@ -2672,7 +2598,7 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
 
        fn send_payment_internal(&self, route: &Route, payment_hash: PaymentHash, payment_secret: &Option<PaymentSecret>, keysend_preimage: Option<PaymentPreimage>, payment_id: PaymentId, recv_value_msat: Option<u64>, onion_session_privs: Vec<[u8; 32]>) -> Result<(), PaymentSendFailure> {
                if route.paths.len() < 1 {
-                       return Err(PaymentSendFailure::ParameterError(APIError::RouteError{err: "There must be at least one path to send over"}));
+                       return Err(PaymentSendFailure::ParameterError(APIError::InvalidRoute{err: "There must be at least one path to send over"}));
                }
                if payment_secret.is_none() && route.paths.len() > 1 {
                        return Err(PaymentSendFailure::ParameterError(APIError::APIMisuseError{err: "Payment secret is required for multi-path payments".to_string()}));
@@ -2682,12 +2608,12 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                let mut path_errs = Vec::with_capacity(route.paths.len());
                'path_check: for path in route.paths.iter() {
                        if path.len() < 1 || path.len() > 20 {
-                               path_errs.push(Err(APIError::RouteError{err: "Path didn't go anywhere/had bogus size"}));
+                               path_errs.push(Err(APIError::InvalidRoute{err: "Path didn't go anywhere/had bogus size"}));
                                continue 'path_check;
                        }
                        for (idx, hop) in path.iter().enumerate() {
                                if idx != path.len() - 1 && hop.pubkey == our_node_id {
-                                       path_errs.push(Err(APIError::RouteError{err: "Path went through us but wasn't a simple rebalance loop to us"}));
+                                       path_errs.push(Err(APIError::InvalidRoute{err: "Path went through us but wasn't a simple rebalance loop to us"}));
                                        continue 'path_check;
                                }
                        }
@@ -3138,6 +3064,102 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                Ok(())
        }
 
+       /// Attempts to forward an intercepted HTLC over the provided channel id and with the provided
+       /// amount to forward. Should only be called in response to an [`HTLCIntercepted`] event.
+       ///
+       /// Intercepted HTLCs can be useful for Lightning Service Providers (LSPs) to open a just-in-time
+       /// channel to a receiving node if the node lacks sufficient inbound liquidity.
+       ///
+       /// To make use of intercepted HTLCs, set [`UserConfig::accept_intercept_htlcs`] and use
+       /// [`ChannelManager::get_intercept_scid`] to generate short channel id(s) to put in the
+       /// receiver's invoice route hints. These route hints will signal to LDK to generate an
+       /// [`HTLCIntercepted`] event when it receives the forwarded HTLC, and this method or
+       /// [`ChannelManager::fail_intercepted_htlc`] MUST be called in response to the event.
+       ///
+       /// Note that LDK does not enforce fee requirements in `amt_to_forward_msat`, and will not stop
+       /// you from forwarding more than you received.
+       ///
+       /// Errors if the event was not handled in time, in which case the HTLC was automatically failed
+       /// backwards.
+       ///
+       /// [`UserConfig::accept_intercept_htlcs`]: crate::util::config::UserConfig::accept_intercept_htlcs
+       /// [`HTLCIntercepted`]: events::Event::HTLCIntercepted
+       // TODO: when we move to deciding the best outbound channel at forward time, only take
+       // `next_node_id` and not `next_hop_channel_id`
+       pub fn forward_intercepted_htlc(&self, intercept_id: InterceptId, next_hop_channel_id: &[u8; 32], _next_node_id: PublicKey, amt_to_forward_msat: u64) -> Result<(), APIError> {
+               let _persistence_guard = PersistenceNotifierGuard::notify_on_drop(&self.total_consistency_lock, &self.persistence_notifier);
+
+               let next_hop_scid = match self.channel_state.lock().unwrap().by_id.get(next_hop_channel_id) {
+                       Some(chan) => {
+                               if !chan.is_usable() {
+                                       return Err(APIError::ChannelUnavailable {
+                                               err: format!("Channel with id {} not fully established", log_bytes!(*next_hop_channel_id))
+                                       })
+                               }
+                               chan.get_short_channel_id().unwrap_or(chan.outbound_scid_alias())
+                       },
+                       None => return Err(APIError::ChannelUnavailable {
+                               err: format!("Channel with id {} not found", log_bytes!(*next_hop_channel_id))
+                       })
+               };
+
+               let payment = self.pending_intercepted_htlcs.lock().unwrap().remove(&intercept_id)
+                       .ok_or_else(|| APIError::APIMisuseError {
+                               err: format!("Payment with intercept id {} not found", log_bytes!(intercept_id.0))
+                       })?;
+
+               let routing = match payment.forward_info.routing {
+                       PendingHTLCRouting::Forward { onion_packet, .. } => {
+                               PendingHTLCRouting::Forward { onion_packet, short_channel_id: next_hop_scid }
+                       },
+                       _ => unreachable!() // Only `PendingHTLCRouting::Forward`s are intercepted
+               };
+               let pending_htlc_info = PendingHTLCInfo {
+                       outgoing_amt_msat: amt_to_forward_msat, routing, ..payment.forward_info
+               };
+
+               let mut per_source_pending_forward = [(
+                       payment.prev_short_channel_id,
+                       payment.prev_funding_outpoint,
+                       payment.prev_user_channel_id,
+                       vec![(pending_htlc_info, payment.prev_htlc_id)]
+               )];
+               self.forward_htlcs(&mut per_source_pending_forward);
+               Ok(())
+       }
+
+       /// Fails the intercepted HTLC indicated by intercept_id. Should only be called in response to
+       /// an [`HTLCIntercepted`] event. See [`ChannelManager::forward_intercepted_htlc`].
+       ///
+       /// Errors if the event was not handled in time, in which case the HTLC was automatically failed
+       /// backwards.
+       ///
+       /// [`HTLCIntercepted`]: events::Event::HTLCIntercepted
+       pub fn fail_intercepted_htlc(&self, intercept_id: InterceptId) -> Result<(), APIError> {
+               let _persistence_guard = PersistenceNotifierGuard::notify_on_drop(&self.total_consistency_lock, &self.persistence_notifier);
+
+               let payment = self.pending_intercepted_htlcs.lock().unwrap().remove(&intercept_id)
+                       .ok_or_else(|| APIError::APIMisuseError {
+                               err: format!("Payment with intercept id {} not found", log_bytes!(intercept_id.0))
+                       })?;
+
+               if let PendingHTLCRouting::Forward { short_channel_id, .. } = payment.forward_info.routing {
+                       let htlc_source = HTLCSource::PreviousHopData(HTLCPreviousHopData {
+                               short_channel_id: payment.prev_short_channel_id,
+                               outpoint: payment.prev_funding_outpoint,
+                               htlc_id: payment.prev_htlc_id,
+                               incoming_packet_shared_secret: payment.forward_info.incoming_shared_secret,
+                               phantom_shared_secret: None,
+                       });
+
+                       let failure_reason = HTLCFailReason::from_failure_code(0x4000 | 10);
+                       let destination = HTLCDestination::UnknownNextHop { requested_forward_scid: short_channel_id };
+                       self.fail_htlc_backwards_internal(&htlc_source, &payment.forward_info.payment_hash, &failure_reason, destination);
+               } else { unreachable!() } // Only `PendingHTLCRouting::Forward`s are intercepted
+
+               Ok(())
+       }
+
        /// Processes HTLCs which are pending waiting on random forward delay.
        ///
        /// Should only really ever be called in response to a PendingHTLCsForwardable event.
@@ -3147,22 +3169,20 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
 
                let mut new_events = Vec::new();
                let mut failed_forwards = Vec::new();
-               let mut phantom_receives: Vec<(u64, OutPoint, Vec<(PendingHTLCInfo, u64)>)> = Vec::new();
+               let mut phantom_receives: Vec<(u64, OutPoint, u128, Vec<(PendingHTLCInfo, u64)>)> = Vec::new();
                let mut handle_errors = Vec::new();
                {
                        let mut forward_htlcs = HashMap::new();
                        mem::swap(&mut forward_htlcs, &mut self.forward_htlcs.lock().unwrap());
 
                        for (short_chan_id, mut pending_forwards) in forward_htlcs {
-                               let mut channel_state_lock = self.channel_state.lock().unwrap();
-                               let channel_state = &mut *channel_state_lock;
                                if short_chan_id != 0 {
                                        macro_rules! forwarding_channel_not_found {
                                                () => {
                                                        for forward_info in pending_forwards.drain(..) {
                                                                match forward_info {
                                                                        HTLCForwardInfo::AddHTLC(PendingAddHTLCInfo {
-                                                                               prev_short_channel_id, prev_htlc_id, prev_funding_outpoint,
+                                                                               prev_short_channel_id, prev_htlc_id, prev_funding_outpoint, prev_user_channel_id,
                                                                                forward_info: PendingHTLCInfo {
                                                                                        routing, incoming_shared_secret, payment_hash, outgoing_amt_msat,
                                                                                        outgoing_cltv_value, incoming_amt_msat: _
@@ -3187,7 +3207,7 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                                                                                                };
 
                                                                                                failed_forwards.push((htlc_source, payment_hash,
-                                                                                                       HTLCFailReason::Reason { failure_code: $err_code, data: $err_data },
+                                                                                                       HTLCFailReason::reason($err_code, $err_data),
                                                                                                        reason
                                                                                                ));
                                                                                                continue;
@@ -3228,7 +3248,7 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                                                                                                match next_hop {
                                                                                                        onion_utils::Hop::Receive(hop_data) => {
                                                                                                                match self.construct_recv_pending_htlc_info(hop_data, incoming_shared_secret, payment_hash, outgoing_amt_msat, outgoing_cltv_value, Some(phantom_shared_secret)) {
-                                                                                                                       Ok(info) => phantom_receives.push((prev_short_channel_id, prev_funding_outpoint, vec![(info, prev_htlc_id)])),
+                                                                                                                       Ok(info) => phantom_receives.push((prev_short_channel_id, prev_funding_outpoint, prev_user_channel_id, vec![(info, prev_htlc_id)])),
                                                                                                                        Err(ReceiveError { err_code, err_data, msg }) => failed_payment!(msg, err_code, err_data, Some(phantom_shared_secret))
                                                                                                                }
                                                                                                        },
@@ -3258,6 +3278,8 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                                                        continue;
                                                }
                                        };
+                                       let mut channel_state_lock = self.channel_state.lock().unwrap();
+                                       let channel_state = &mut *channel_state_lock;
                                        match channel_state.by_id.entry(forward_chan_id) {
                                                hash_map::Entry::Vacant(_) => {
                                                        forwarding_channel_not_found!();
@@ -3269,7 +3291,7 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                                                        for forward_info in pending_forwards.drain(..) {
                                                                match forward_info {
                                                                        HTLCForwardInfo::AddHTLC(PendingAddHTLCInfo {
-                                                                               prev_short_channel_id, prev_htlc_id, prev_funding_outpoint ,
+                                                                               prev_short_channel_id, prev_htlc_id, prev_funding_outpoint, prev_user_channel_id: _,
                                                                                forward_info: PendingHTLCInfo {
                                                                                        incoming_shared_secret, payment_hash, outgoing_amt_msat, outgoing_cltv_value,
                                                                                        routing: PendingHTLCRouting::Forward { onion_packet, .. }, incoming_amt_msat: _,
@@ -3293,7 +3315,7 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                                                                                                }
                                                                                                let (failure_code, data) = self.get_htlc_temp_fail_err_and_data(0x1000|7, short_chan_id, chan.get());
                                                                                                failed_forwards.push((htlc_source, payment_hash,
-                                                                                                       HTLCFailReason::Reason { failure_code, data },
+                                                                                                       HTLCFailReason::reason(failure_code, data),
                                                                                                        HTLCDestination::NextHopChannel { node_id: Some(chan.get().get_counterparty_node_id()), channel_id: forward_chan_id }
                                                                                                ));
                                                                                                continue;
@@ -3396,7 +3418,7 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                                        for forward_info in pending_forwards.drain(..) {
                                                match forward_info {
                                                        HTLCForwardInfo::AddHTLC(PendingAddHTLCInfo {
-                                                               prev_short_channel_id, prev_htlc_id, prev_funding_outpoint,
+                                                               prev_short_channel_id, prev_htlc_id, prev_funding_outpoint, prev_user_channel_id,
                                                                forward_info: PendingHTLCInfo {
                                                                        routing, incoming_shared_secret, payment_hash, outgoing_amt_msat, ..
                                                                }
@@ -3440,11 +3462,17 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                                                                                                incoming_packet_shared_secret: $htlc.prev_hop.incoming_packet_shared_secret,
                                                                                                phantom_shared_secret,
                                                                                        }), payment_hash,
-                                                                                       HTLCFailReason::Reason { failure_code: 0x4000 | 15, data: htlc_msat_height_data },
+                                                                                       HTLCFailReason::reason(0x4000 | 15, htlc_msat_height_data),
                                                                                        HTLCDestination::FailedPayment { payment_hash: $payment_hash },
                                                                                ));
                                                                        }
                                                                }
+                                                               let phantom_shared_secret = claimable_htlc.prev_hop.phantom_shared_secret;
+                                                               let mut receiver_node_id = self.our_network_pubkey;
+                                                               if phantom_shared_secret.is_some() {
+                                                                       receiver_node_id = self.keys_manager.get_node_id(Recipient::PhantomNode)
+                                                                               .expect("Failed to get node_id for phantom node recipient");
+                                                               }
 
                                                                macro_rules! check_total_value {
                                                                        ($payment_data: expr, $payment_preimage: expr) => {{
@@ -3455,7 +3483,8 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                                                                                                payment_secret: $payment_data.payment_secret,
                                                                                        }
                                                                                };
-                                                                               let (_, htlcs) = channel_state.claimable_htlcs.entry(payment_hash)
+                                                                               let mut claimable_htlcs = self.claimable_htlcs.lock().unwrap();
+                                                                               let (_, htlcs) = claimable_htlcs.entry(payment_hash)
                                                                                        .or_insert_with(|| (purpose(), Vec::new()));
                                                                                if htlcs.len() == 1 {
                                                                                        if let OnionPayload::Spontaneous(_) = htlcs[0].onion_payload {
@@ -3484,11 +3513,15 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                                                                                                log_bytes!(payment_hash.0), total_value, $payment_data.total_msat);
                                                                                        fail_htlc!(claimable_htlc, payment_hash);
                                                                                } else if total_value == $payment_data.total_msat {
+                                                                                       let prev_channel_id = prev_funding_outpoint.to_channel_id();
                                                                                        htlcs.push(claimable_htlc);
-                                                                                       new_events.push(events::Event::PaymentReceived {
+                                                                                       new_events.push(events::Event::PaymentClaimable {
+                                                                                               receiver_node_id: Some(receiver_node_id),
                                                                                                payment_hash,
                                                                                                purpose: purpose(),
                                                                                                amount_msat: total_value,
+                                                                                               via_channel_id: Some(prev_channel_id),
+                                                                                               via_user_channel_id: Some(prev_user_channel_id),
                                                                                        });
                                                                                        payment_received_generated = true;
                                                                                } else {
@@ -3523,14 +3556,18 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                                                                                                check_total_value!(payment_data, payment_preimage);
                                                                                        },
                                                                                        OnionPayload::Spontaneous(preimage) => {
-                                                                                               match channel_state.claimable_htlcs.entry(payment_hash) {
+                                                                                               match self.claimable_htlcs.lock().unwrap().entry(payment_hash) {
                                                                                                        hash_map::Entry::Vacant(e) => {
                                                                                                                let purpose = events::PaymentPurpose::SpontaneousPayment(preimage);
                                                                                                                e.insert((purpose.clone(), vec![claimable_htlc]));
-                                                                                                               new_events.push(events::Event::PaymentReceived {
+                                                                                                               let prev_channel_id = prev_funding_outpoint.to_channel_id();
+                                                                                                               new_events.push(events::Event::PaymentClaimable {
+                                                                                                                       receiver_node_id: Some(receiver_node_id),
                                                                                                                        payment_hash,
                                                                                                                        amount_msat: outgoing_amt_msat,
                                                                                                                        purpose,
+                                                                                                                       via_channel_id: Some(prev_channel_id),
+                                                                                                                       via_user_channel_id: Some(prev_user_channel_id),
                                                                                                                });
                                                                                                        },
                                                                                                        hash_map::Entry::Occupied(_) => {
@@ -3574,7 +3611,7 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                }
 
                for (htlc_source, payment_hash, failure_reason, destination) in failed_forwards.drain(..) {
-                       self.fail_htlc_backwards_internal(htlc_source, &payment_hash, failure_reason, destination);
+                       self.fail_htlc_backwards_internal(&htlc_source, &payment_hash, &failure_reason, destination);
                }
                self.forward_htlcs(&mut phantom_receives);
 
@@ -3812,33 +3849,35 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
 
                                        true
                                });
+                       }
 
-                               channel_state.claimable_htlcs.retain(|payment_hash, (_, htlcs)| {
-                                       if htlcs.is_empty() {
-                                               // This should be unreachable
-                                               debug_assert!(false);
+                       self.claimable_htlcs.lock().unwrap().retain(|payment_hash, (_, htlcs)| {
+                               if htlcs.is_empty() {
+                                       // This should be unreachable
+                                       debug_assert!(false);
+                                       return false;
+                               }
+                               if let OnionPayload::Invoice { .. } = htlcs[0].onion_payload {
+                                       // Check if we've received all the parts we need for an MPP (the value of the parts adds to total_msat).
+                                       // In this case we're not going to handle any timeouts of the parts here.
+                                       if htlcs[0].total_msat == htlcs.iter().fold(0, |total, htlc| total + htlc.value) {
+                                               return true;
+                                       } else if htlcs.into_iter().any(|htlc| {
+                                               htlc.timer_ticks += 1;
+                                               return htlc.timer_ticks >= MPP_TIMEOUT_TICKS
+                                       }) {
+                                               timed_out_mpp_htlcs.extend(htlcs.drain(..).map(|htlc: ClaimableHTLC| (htlc.prev_hop, *payment_hash)));
                                                return false;
                                        }
-                                       if let OnionPayload::Invoice { .. } = htlcs[0].onion_payload {
-                                               // Check if we've received all the parts we need for an MPP (the value of the parts adds to total_msat).
-                                               // In this case we're not going to handle any timeouts of the parts here.
-                                               if htlcs[0].total_msat == htlcs.iter().fold(0, |total, htlc| total + htlc.value) {
-                                                       return true;
-                                               } else if htlcs.into_iter().any(|htlc| {
-                                                       htlc.timer_ticks += 1;
-                                                       return htlc.timer_ticks >= MPP_TIMEOUT_TICKS
-                                               }) {
-                                                       timed_out_mpp_htlcs.extend(htlcs.into_iter().map(|htlc| (htlc.prev_hop.clone(), payment_hash.clone())));
-                                                       return false;
-                                               }
-                                       }
-                                       true
-                               });
-                       }
+                               }
+                               true
+                       });
 
                        for htlc_source in timed_out_mpp_htlcs.drain(..) {
+                               let source = HTLCSource::PreviousHopData(htlc_source.0.clone());
+                               let reason = HTLCFailReason::from_failure_code(23);
                                let receiver = HTLCDestination::FailedPayment { payment_hash: htlc_source.1 };
-                               self.fail_htlc_backwards_internal(HTLCSource::PreviousHopData(htlc_source.0.clone()), &htlc_source.1, HTLCFailReason::Reason { failure_code: 23, data: Vec::new() }, receiver );
+                               self.fail_htlc_backwards_internal(&source, &htlc_source.1, &reason, receiver);
                        }
 
                        for (err, counterparty_node_id) in handle_errors.drain(..) {
@@ -3852,12 +3891,12 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
        }
 
        /// Indicates that the preimage for payment_hash is unknown or the received amount is incorrect
-       /// after a PaymentReceived event, failing the HTLC back to its origin and freeing resources
+       /// after a PaymentClaimable event, failing the HTLC back to its origin and freeing resources
        /// along the path (including in our own channel on which we received it).
        ///
        /// Note that in some cases around unclean shutdown, it is possible the payment may have
        /// already been claimed by you via [`ChannelManager::claim_funds`] prior to you seeing (a
-       /// second copy of) the [`events::Event::PaymentReceived`] event. Alternatively, the payment
+       /// second copy of) the [`events::Event::PaymentClaimable`] event. Alternatively, the payment
        /// may have already been failed automatically by LDK if it was nearing its expiration time.
        ///
        /// While LDK will never claim a payment automatically on your behalf (i.e. without you calling
@@ -3867,19 +3906,16 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
        pub fn fail_htlc_backwards(&self, payment_hash: &PaymentHash) {
                let _persistence_guard = PersistenceNotifierGuard::notify_on_drop(&self.total_consistency_lock, &self.persistence_notifier);
 
-               let removed_source = {
-                       let mut channel_state = self.channel_state.lock().unwrap();
-                       channel_state.claimable_htlcs.remove(payment_hash)
-               };
+               let removed_source = self.claimable_htlcs.lock().unwrap().remove(payment_hash);
                if let Some((_, mut sources)) = removed_source {
                        for htlc in sources.drain(..) {
                                let mut htlc_msat_height_data = byte_utils::be64_to_array(htlc.value).to_vec();
                                htlc_msat_height_data.extend_from_slice(&byte_utils::be32_to_array(
                                                self.best_block.read().unwrap().height()));
-                               self.fail_htlc_backwards_internal(
-                                               HTLCSource::PreviousHopData(htlc.prev_hop), payment_hash,
-                                               HTLCFailReason::Reason { failure_code: 0x4000 | 15, data: htlc_msat_height_data },
-                                               HTLCDestination::FailedPayment { payment_hash: *payment_hash });
+                               let source = HTLCSource::PreviousHopData(htlc.prev_hop);
+                               let reason = HTLCFailReason::reason(0x4000 | 15, htlc_msat_height_data);
+                               let receiver = HTLCDestination::FailedPayment { payment_hash: *payment_hash };
+                               self.fail_htlc_backwards_internal(&source, &payment_hash, &reason, receiver);
                        }
                }
        }
@@ -3938,23 +3974,24 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                &self, mut htlcs_to_fail: Vec<(HTLCSource, PaymentHash)>, channel_id: [u8; 32],
                counterparty_node_id: &PublicKey
        ) {
-               for (htlc_src, payment_hash) in htlcs_to_fail.drain(..) {
-                       let (failure_code, onion_failure_data) =
-                               match self.channel_state.lock().unwrap().by_id.entry(channel_id) {
-                                       hash_map::Entry::Occupied(chan_entry) => {
-                                               self.get_htlc_inbound_temp_fail_err_and_data(0x1000|7, &chan_entry.get())
-                                       },
-                                       hash_map::Entry::Vacant(_) => (0x4000|10, Vec::new())
-                               };
+               let (failure_code, onion_failure_data) =
+                       match self.channel_state.lock().unwrap().by_id.entry(channel_id) {
+                               hash_map::Entry::Occupied(chan_entry) => {
+                                       self.get_htlc_inbound_temp_fail_err_and_data(0x1000|7, &chan_entry.get())
+                               },
+                               hash_map::Entry::Vacant(_) => (0x4000|10, Vec::new())
+                       };
 
+               for (htlc_src, payment_hash) in htlcs_to_fail.drain(..) {
+                       let reason = HTLCFailReason::reason(failure_code, onion_failure_data.clone());
                        let receiver = HTLCDestination::NextHopChannel { node_id: Some(counterparty_node_id.clone()), channel_id };
-                       self.fail_htlc_backwards_internal(htlc_src, &payment_hash, HTLCFailReason::Reason { failure_code, data: onion_failure_data }, receiver);
+                       self.fail_htlc_backwards_internal(&htlc_src, &payment_hash, &reason, receiver);
                }
        }
 
        /// Fails an HTLC backwards to the sender of it to us.
        /// Note that we do not assume that channels corresponding to failed HTLCs are still available.
-       fn fail_htlc_backwards_internal(&self, source: HTLCSource, payment_hash: &PaymentHash, onion_error: HTLCFailReason,destination: HTLCDestination) {
+       fn fail_htlc_backwards_internal(&self, source: &HTLCSource, payment_hash: &PaymentHash, onion_error: &HTLCFailReason, destination: HTLCDestination) {
                #[cfg(debug_assertions)]
                {
                        // Ensure that the `channel_state` lock is not held when calling this function.
@@ -3973,13 +4010,13 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                // from block_connected which may run during initialization prior to the chain_monitor
                // being fully configured. See the docs for `ChannelManagerReadArgs` for more.
                match source {
-                       HTLCSource::OutboundRoute { ref path, session_priv, payment_id, ref payment_params, .. } => {
+                       HTLCSource::OutboundRoute { ref path, ref session_priv, ref payment_id, ref payment_params, .. } => {
                                let mut session_priv_bytes = [0; 32];
                                session_priv_bytes.copy_from_slice(&session_priv[..]);
                                let mut outbounds = self.pending_outbound_payments.lock().unwrap();
                                let mut all_paths_failed = false;
                                let mut full_failure_ev = None;
-                               if let hash_map::Entry::Occupied(mut payment) = outbounds.entry(payment_id) {
+                               if let hash_map::Entry::Occupied(mut payment) = outbounds.entry(*payment_id) {
                                        if !payment.get_mut().remove(&session_priv_bytes, Some(&path)) {
                                                log_trace!(self.logger, "Received duplicative fail for HTLC with payment_hash {}", log_bytes!(payment_hash.0));
                                                return;
@@ -3992,7 +4029,7 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                                                all_paths_failed = true;
                                                if payment.get().abandoned() {
                                                        full_failure_ev = Some(events::Event::PaymentFailed {
-                                                               payment_id,
+                                                               payment_id: *payment_id,
                                                                payment_hash: payment.get().payment_hash().expect("PendingOutboundPayments::RetriesExceeded always has a payment hash set"),
                                                        });
                                                        payment.remove();
@@ -4022,13 +4059,13 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                                                if self.payment_is_probe(payment_hash, &payment_id) {
                                                        if !payment_retryable {
                                                                events::Event::ProbeSuccessful {
-                                                                       payment_id,
+                                                                       payment_id: *payment_id,
                                                                        payment_hash: payment_hash.clone(),
                                                                        path: path.clone(),
                                                                }
                                                        } else {
                                                                events::Event::ProbeFailed {
-                                                                       payment_id,
+                                                                       payment_id: *payment_id,
                                                                        payment_hash: payment_hash.clone(),
                                                                        path: path.clone(),
                                                                        short_channel_id,
@@ -4042,7 +4079,7 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                                                                retry.as_mut().map(|r| r.payment_params.previously_failed_channels.push(scid));
                                                        }
                                                        events::Event::PaymentPathFailed {
-                                                               payment_id: Some(payment_id),
+                                                               payment_id: Some(*payment_id),
                                                                payment_hash: payment_hash.clone(),
                                                                payment_failed_permanently: !payment_retryable,
                                                                network_update,
@@ -4075,14 +4112,14 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
 
                                                if self.payment_is_probe(payment_hash, &payment_id) {
                                                        events::Event::ProbeFailed {
-                                                               payment_id,
+                                                               payment_id: *payment_id,
                                                                payment_hash: payment_hash.clone(),
                                                                path: path.clone(),
                                                                short_channel_id: Some(scid),
                                                        }
                                                } else {
                                                        events::Event::PaymentPathFailed {
-                                                               payment_id: Some(payment_id),
+                                                               payment_id: Some(*payment_id),
                                                                payment_hash: payment_hash.clone(),
                                                                payment_failed_permanently: false,
                                                                network_update: None,
@@ -4102,22 +4139,22 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                                pending_events.push(path_failure);
                                if let Some(ev) = full_failure_ev { pending_events.push(ev); }
                        },
-                       HTLCSource::PreviousHopData(HTLCPreviousHopData { short_channel_id, htlc_id, incoming_packet_shared_secret, phantom_shared_secret, outpoint }) => {
+                       HTLCSource::PreviousHopData(HTLCPreviousHopData { ref short_channel_id, ref htlc_id, ref incoming_packet_shared_secret, ref phantom_shared_secret, ref outpoint }) => {
                                let err_packet = match onion_error {
-                                       HTLCFailReason::Reason { failure_code, data } => {
+                                       HTLCFailReason::Reason { ref failure_code, ref data } => {
                                                log_trace!(self.logger, "Failing HTLC with payment_hash {} backwards from us with code {}", log_bytes!(payment_hash.0), failure_code);
                                                if let Some(phantom_ss) = phantom_shared_secret {
-                                                       let phantom_packet = onion_utils::build_failure_packet(&phantom_ss, failure_code, &data[..]).encode();
-                                                       let encrypted_phantom_packet = onion_utils::encrypt_failure_packet(&phantom_ss, &phantom_packet);
-                                                       onion_utils::encrypt_failure_packet(&incoming_packet_shared_secret, &encrypted_phantom_packet.data[..])
+                                                       let phantom_packet = onion_utils::build_failure_packet(phantom_ss, *failure_code, &data[..]).encode();
+                                                       let encrypted_phantom_packet = onion_utils::encrypt_failure_packet(phantom_ss, &phantom_packet);
+                                                       onion_utils::encrypt_failure_packet(incoming_packet_shared_secret, &encrypted_phantom_packet.data[..])
                                                } else {
-                                                       let packet = onion_utils::build_failure_packet(&incoming_packet_shared_secret, failure_code, &data[..]).encode();
-                                                       onion_utils::encrypt_failure_packet(&incoming_packet_shared_secret, &packet)
+                                                       let packet = onion_utils::build_failure_packet(incoming_packet_shared_secret, *failure_code, &data[..]).encode();
+                                                       onion_utils::encrypt_failure_packet(incoming_packet_shared_secret, &packet)
                                                }
                                        },
                                        HTLCFailReason::LightningError { err } => {
                                                log_trace!(self.logger, "Failing HTLC with payment_hash {} backwards with pre-built LightningError", log_bytes!(payment_hash.0));
-                                               onion_utils::encrypt_failure_packet(&incoming_packet_shared_secret, &err.data)
+                                               onion_utils::encrypt_failure_packet(incoming_packet_shared_secret, &err.data)
                                        }
                                };
 
@@ -4126,12 +4163,12 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                                if forward_htlcs.is_empty() {
                                        forward_event = Some(Duration::from_millis(MIN_HTLC_RELAY_HOLDING_CELL_MILLIS));
                                }
-                               match forward_htlcs.entry(short_channel_id) {
+                               match forward_htlcs.entry(*short_channel_id) {
                                        hash_map::Entry::Occupied(mut entry) => {
-                                               entry.get_mut().push(HTLCForwardInfo::FailHTLC { htlc_id, err_packet });
+                                               entry.get_mut().push(HTLCForwardInfo::FailHTLC { htlc_id: *htlc_id, err_packet });
                                        },
                                        hash_map::Entry::Vacant(entry) => {
-                                               entry.insert(vec!(HTLCForwardInfo::FailHTLC { htlc_id, err_packet }));
+                                               entry.insert(vec!(HTLCForwardInfo::FailHTLC { htlc_id: *htlc_id, err_packet }));
                                        }
                                }
                                mem::drop(forward_htlcs);
@@ -4143,13 +4180,13 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                                }
                                pending_events.push(events::Event::HTLCHandlingFailed {
                                        prev_channel_id: outpoint.to_channel_id(),
-                                       failed_next_destination: destination
+                                       failed_next_destination: destination,
                                });
                        },
                }
        }
 
-       /// Provides a payment preimage in response to [`Event::PaymentReceived`], generating any
+       /// Provides a payment preimage in response to [`Event::PaymentClaimable`], generating any
        /// [`MessageSendEvent`]s needed to claim the payment.
        ///
        /// Note that calling this method does *not* guarantee that the payment has been claimed. You
@@ -4157,11 +4194,11 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
        /// provided to your [`EventHandler`] when [`process_pending_events`] is next called.
        ///
        /// Note that if you did not set an `amount_msat` when calling [`create_inbound_payment`] or
-       /// [`create_inbound_payment_for_hash`] you must check that the amount in the `PaymentReceived`
+       /// [`create_inbound_payment_for_hash`] you must check that the amount in the `PaymentClaimable`
        /// event matches your expectation. If you fail to do so and call this method, you may provide
        /// the sender "proof-of-payment" when they did not fulfill the full expected payment.
        ///
-       /// [`Event::PaymentReceived`]: crate::util::events::Event::PaymentReceived
+       /// [`Event::PaymentClaimable`]: crate::util::events::Event::PaymentClaimable
        /// [`Event::PaymentClaimed`]: crate::util::events::Event::PaymentClaimed
        /// [`process_pending_events`]: EventsProvider::process_pending_events
        /// [`create_inbound_payment`]: Self::create_inbound_payment
@@ -4172,7 +4209,7 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
 
                let _persistence_guard = PersistenceNotifierGuard::notify_on_drop(&self.total_consistency_lock, &self.persistence_notifier);
 
-               let removed_source = self.channel_state.lock().unwrap().claimable_htlcs.remove(&payment_hash);
+               let removed_source = self.claimable_htlcs.lock().unwrap().remove(&payment_hash);
                if let Some((payment_purpose, mut sources)) = removed_source {
                        assert!(!sources.is_empty());
 
@@ -4194,6 +4231,7 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                        let mut claimed_any_htlcs = false;
                        let mut channel_state_lock = self.channel_state.lock().unwrap();
                        let channel_state = &mut *channel_state_lock;
+                       let mut receiver_node_id = Some(self.our_network_pubkey);
                        for htlc in sources.iter() {
                                let chan_id = match self.short_to_chan_info.read().unwrap().get(&htlc.prev_hop.short_channel_id) {
                                        Some((_cp_id, chan_id)) => chan_id.clone(),
@@ -4225,6 +4263,12 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                                                break;
                                        }
                                }
+                               let phantom_shared_secret = htlc.prev_hop.phantom_shared_secret;
+                               if phantom_shared_secret.is_some() {
+                                       let phantom_pubkey = self.keys_manager.get_node_id(Recipient::PhantomNode)
+                                               .expect("Failed to get node_id for phantom node recipient");
+                                       receiver_node_id = Some(phantom_pubkey)
+                               }
 
                                claimable_amt_msat += htlc.value;
                        }
@@ -4265,15 +4309,16 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                                        let mut htlc_msat_height_data = byte_utils::be64_to_array(htlc.value).to_vec();
                                        htlc_msat_height_data.extend_from_slice(&byte_utils::be32_to_array(
                                                self.best_block.read().unwrap().height()));
-                                       self.fail_htlc_backwards_internal(
-                                               HTLCSource::PreviousHopData(htlc.prev_hop), &payment_hash,
-                                               HTLCFailReason::Reason { failure_code: 0x4000|15, data: htlc_msat_height_data },
-                                               HTLCDestination::FailedPayment { payment_hash } );
+                                       let source = HTLCSource::PreviousHopData(htlc.prev_hop);
+                                       let reason = HTLCFailReason::reason(0x4000 | 15, htlc_msat_height_data);
+                                       let receiver = HTLCDestination::FailedPayment { payment_hash };
+                                       self.fail_htlc_backwards_internal(&source, &payment_hash, &reason, receiver);
                                }
                        }
 
                        if claimed_any_htlcs {
                                self.pending_events.lock().unwrap().push(events::Event::PaymentClaimed {
+                                       receiver_node_id,
                                        payment_hash,
                                        purpose: payment_purpose,
                                        amount_msat: claimable_amt_msat,
@@ -4290,14 +4335,9 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
 
        fn claim_funds_from_hop(&self, channel_state_lock: &mut MutexGuard<ChannelHolder<<K::Target as KeysInterface>::Signer>>, prev_hop: HTLCPreviousHopData, payment_preimage: PaymentPreimage) -> ClaimFundsFromHop {
                //TODO: Delay the claimed_funds relaying just like we do outbound relay!
-               let channel_state = &mut **channel_state_lock;
-               let chan_id = match self.short_to_chan_info.read().unwrap().get(&prev_hop.short_channel_id) {
-                       Some((_cp_id, chan_id)) => chan_id.clone(),
-                       None => {
-                               return ClaimFundsFromHop::PrevHopForceClosed
-                       }
-               };
 
+               let chan_id = prev_hop.outpoint.to_channel_id();
+               let channel_state = &mut **channel_state_lock;
                if let hash_map::Entry::Occupied(mut chan) = channel_state.by_id.entry(chan_id) {
                        match chan.get_mut().get_update_fulfill_htlc_and_commit(prev_hop.htlc_id, payment_preimage, &self.logger) {
                                Ok(msgs_monitor_option) => {
@@ -4490,10 +4530,73 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                self.our_network_pubkey.clone()
        }
 
+       /// Handles a channel reentering a functional state, either due to reconnect or a monitor
+       /// update completion.
+       fn handle_channel_resumption(&self, pending_msg_events: &mut Vec<MessageSendEvent>,
+               channel: &mut Channel<<K::Target as KeysInterface>::Signer>, raa: Option<msgs::RevokeAndACK>,
+               commitment_update: Option<msgs::CommitmentUpdate>, order: RAACommitmentOrder,
+               pending_forwards: Vec<(PendingHTLCInfo, u64)>, funding_broadcastable: Option<Transaction>,
+               channel_ready: Option<msgs::ChannelReady>, announcement_sigs: Option<msgs::AnnouncementSignatures>)
+       -> Option<(u64, OutPoint, u128, Vec<(PendingHTLCInfo, u64)>)> {
+               let mut htlc_forwards = None;
+
+               let counterparty_node_id = channel.get_counterparty_node_id();
+               if !pending_forwards.is_empty() {
+                       htlc_forwards = Some((channel.get_short_channel_id().unwrap_or(channel.outbound_scid_alias()),
+                               channel.get_funding_txo().unwrap(), channel.get_user_id(), pending_forwards));
+               }
+
+               if let Some(msg) = channel_ready {
+                       send_channel_ready!(self, pending_msg_events, channel, msg);
+               }
+               if let Some(msg) = announcement_sigs {
+                       pending_msg_events.push(events::MessageSendEvent::SendAnnouncementSignatures {
+                               node_id: counterparty_node_id,
+                               msg,
+                       });
+               }
+
+               emit_channel_ready_event!(self, channel);
+
+               macro_rules! handle_cs { () => {
+                       if let Some(update) = commitment_update {
+                               pending_msg_events.push(events::MessageSendEvent::UpdateHTLCs {
+                                       node_id: counterparty_node_id,
+                                       updates: update,
+                               });
+                       }
+               } }
+               macro_rules! handle_raa { () => {
+                       if let Some(revoke_and_ack) = raa {
+                               pending_msg_events.push(events::MessageSendEvent::SendRevokeAndACK {
+                                       node_id: counterparty_node_id,
+                                       msg: revoke_and_ack,
+                               });
+                       }
+               } }
+               match order {
+                       RAACommitmentOrder::CommitmentFirst => {
+                               handle_cs!();
+                               handle_raa!();
+                       },
+                       RAACommitmentOrder::RevokeAndACKFirst => {
+                               handle_raa!();
+                               handle_cs!();
+                       },
+               }
+
+               if let Some(tx) = funding_broadcastable {
+                       log_info!(self.logger, "Broadcasting funding transaction with txid {}", tx.txid());
+                       self.tx_broadcaster.broadcast_transaction(&tx);
+               }
+
+               htlc_forwards
+       }
+
        fn channel_monitor_updated(&self, funding_txo: &OutPoint, highest_applied_update_id: u64) {
                let _persistence_guard = PersistenceNotifierGuard::notify_on_drop(&self.total_consistency_lock, &self.persistence_notifier);
 
-               let chan_restoration_res;
+               let htlc_forwards;
                let (mut pending_failures, finalized_claims, counterparty_node_id) = {
                        let mut channel_lock = self.channel_state.lock().unwrap();
                        let channel_state = &mut *channel_lock;
@@ -4520,18 +4623,20 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                                        })
                                } else { None }
                        } else { None };
-                       chan_restoration_res = handle_chan_restoration_locked!(self, channel_lock, channel_state, channel, updates.raa, updates.commitment_update, updates.order, None, updates.accepted_htlcs, updates.funding_broadcastable, updates.channel_ready, updates.announcement_sigs);
+                       htlc_forwards = self.handle_channel_resumption(&mut channel_state.pending_msg_events, channel.get_mut(), updates.raa, updates.commitment_update, updates.order, updates.accepted_htlcs, updates.funding_broadcastable, updates.channel_ready, updates.announcement_sigs);
                        if let Some(upd) = channel_update {
                                channel_state.pending_msg_events.push(upd);
                        }
 
                        (updates.failed_htlcs, updates.finalized_claimed_htlcs, counterparty_node_id)
                };
-               post_handle_chan_restoration!(self, chan_restoration_res);
+               if let Some(forwards) = htlc_forwards {
+                       self.forward_htlcs(&mut [forwards][..]);
+               }
                self.finalize_claims(finalized_claims);
                for failure in pending_failures.drain(..) {
                        let receiver = HTLCDestination::NextHopChannel { node_id: Some(counterparty_node_id), channel_id: funding_txo.to_channel_id() };
-                       self.fail_htlc_backwards_internal(failure.0, &failure.1, failure.2, receiver);
+                       self.fail_htlc_backwards_internal(&failure.0, &failure.1, &failure.2, receiver);
                }
        }
 
@@ -4899,7 +5004,8 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                };
                for htlc_source in dropped_htlcs.drain(..) {
                        let receiver = HTLCDestination::NextHopChannel { node_id: Some(counterparty_node_id.clone()), channel_id: msg.channel_id };
-                       self.fail_htlc_backwards_internal(htlc_source.0, &htlc_source.1, HTLCFailReason::Reason { failure_code: 0x4000 | 8, data: Vec::new() }, receiver);
+                       let reason = HTLCFailReason::from_failure_code(0x4000 | 8);
+                       self.fail_htlc_backwards_internal(&htlc_source.0, &htlc_source.1, &reason, receiver);
                }
 
                let _ = handle_error!(self, result, *counterparty_node_id);
@@ -5044,7 +5150,7 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                                        let chan_err: ChannelError = ChannelError::Close("Got update_fail_malformed_htlc with BADONION not set".to_owned());
                                        try_chan_entry!(self, Err(chan_err), chan);
                                }
-                               try_chan_entry!(self, chan.get_mut().update_fail_malformed_htlc(&msg, HTLCFailReason::Reason { failure_code: msg.failure_code, data: Vec::new() }), chan);
+                               try_chan_entry!(self, chan.get_mut().update_fail_malformed_htlc(&msg, HTLCFailReason::from_failure_code(msg.failure_code)), chan);
                                Ok(())
                        },
                        hash_map::Entry::Vacant(_) => return Err(MsgHandleErrInternal::send_err_msg_no_close("Failed to find corresponding channel".to_owned(), msg.channel_id))
@@ -5099,31 +5205,85 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
        }
 
        #[inline]
-       fn forward_htlcs(&self, per_source_pending_forwards: &mut [(u64, OutPoint, Vec<(PendingHTLCInfo, u64)>)]) {
-               for &mut (prev_short_channel_id, prev_funding_outpoint, ref mut pending_forwards) in per_source_pending_forwards {
+       fn forward_htlcs(&self, per_source_pending_forwards: &mut [(u64, OutPoint, u128, Vec<(PendingHTLCInfo, u64)>)]) {
+               for &mut (prev_short_channel_id, prev_funding_outpoint, prev_user_channel_id, ref mut pending_forwards) in per_source_pending_forwards {
                        let mut forward_event = None;
+                       let mut new_intercept_events = Vec::new();
+                       let mut failed_intercept_forwards = Vec::new();
                        if !pending_forwards.is_empty() {
-                               let mut forward_htlcs = self.forward_htlcs.lock().unwrap();
-                               if forward_htlcs.is_empty() {
-                                       forward_event = Some(Duration::from_millis(MIN_HTLC_RELAY_HOLDING_CELL_MILLIS))
-                               }
                                for (forward_info, prev_htlc_id) in pending_forwards.drain(..) {
-                                       match forward_htlcs.entry(match forward_info.routing {
-                                                       PendingHTLCRouting::Forward { short_channel_id, .. } => short_channel_id,
-                                                       PendingHTLCRouting::Receive { .. } => 0,
-                                                       PendingHTLCRouting::ReceiveKeysend { .. } => 0,
-                                       }) {
+                                       let scid = match forward_info.routing {
+                                               PendingHTLCRouting::Forward { short_channel_id, .. } => short_channel_id,
+                                               PendingHTLCRouting::Receive { .. } => 0,
+                                               PendingHTLCRouting::ReceiveKeysend { .. } => 0,
+                                       };
+                                       // Pull this now to avoid introducing a lock order with `forward_htlcs`.
+                                       let is_our_scid = self.short_to_chan_info.read().unwrap().contains_key(&scid);
+
+                                       let mut forward_htlcs = self.forward_htlcs.lock().unwrap();
+                                       let forward_htlcs_empty = forward_htlcs.is_empty();
+                                       match forward_htlcs.entry(scid) {
                                                hash_map::Entry::Occupied(mut entry) => {
                                                        entry.get_mut().push(HTLCForwardInfo::AddHTLC(PendingAddHTLCInfo {
-                                                               prev_short_channel_id, prev_funding_outpoint, prev_htlc_id, forward_info }));
+                                                               prev_short_channel_id, prev_funding_outpoint, prev_htlc_id, prev_user_channel_id, forward_info }));
                                                },
                                                hash_map::Entry::Vacant(entry) => {
-                                                       entry.insert(vec!(HTLCForwardInfo::AddHTLC(PendingAddHTLCInfo {
-                                                               prev_short_channel_id, prev_funding_outpoint, prev_htlc_id, forward_info })));
+                                                       if !is_our_scid && forward_info.incoming_amt_msat.is_some() &&
+                                                          fake_scid::is_valid_intercept(&self.fake_scid_rand_bytes, scid, &self.genesis_hash)
+                                                       {
+                                                               let intercept_id = InterceptId(Sha256::hash(&forward_info.incoming_shared_secret).into_inner());
+                                                               let mut pending_intercepts = self.pending_intercepted_htlcs.lock().unwrap();
+                                                               match pending_intercepts.entry(intercept_id) {
+                                                                       hash_map::Entry::Vacant(entry) => {
+                                                                               new_intercept_events.push(events::Event::HTLCIntercepted {
+                                                                                       requested_next_hop_scid: scid,
+                                                                                       payment_hash: forward_info.payment_hash,
+                                                                                       inbound_amount_msat: forward_info.incoming_amt_msat.unwrap(),
+                                                                                       expected_outbound_amount_msat: forward_info.outgoing_amt_msat,
+                                                                                       intercept_id
+                                                                               });
+                                                                               entry.insert(PendingAddHTLCInfo {
+                                                                                       prev_short_channel_id, prev_funding_outpoint, prev_htlc_id, prev_user_channel_id, forward_info });
+                                                                       },
+                                                                       hash_map::Entry::Occupied(_) => {
+                                                                               log_info!(self.logger, "Failed to forward incoming HTLC: detected duplicate intercepted payment over short channel id {}", scid);
+                                                                               let htlc_source = HTLCSource::PreviousHopData(HTLCPreviousHopData {
+                                                                                       short_channel_id: prev_short_channel_id,
+                                                                                       outpoint: prev_funding_outpoint,
+                                                                                       htlc_id: prev_htlc_id,
+                                                                                       incoming_packet_shared_secret: forward_info.incoming_shared_secret,
+                                                                                       phantom_shared_secret: None,
+                                                                               });
+
+                                                                               failed_intercept_forwards.push((htlc_source, forward_info.payment_hash,
+                                                                                               HTLCFailReason::from_failure_code(0x4000 | 10),
+                                                                                               HTLCDestination::InvalidForward { requested_forward_scid: scid },
+                                                                               ));
+                                                                       }
+                                                               }
+                                                       } else {
+                                                               // We don't want to generate a PendingHTLCsForwardable event if only intercepted
+                                                               // payments are being processed.
+                                                               if forward_htlcs_empty {
+                                                                       forward_event = Some(Duration::from_millis(MIN_HTLC_RELAY_HOLDING_CELL_MILLIS));
+                                                               }
+                                                               entry.insert(vec!(HTLCForwardInfo::AddHTLC(PendingAddHTLCInfo {
+                                                                       prev_short_channel_id, prev_funding_outpoint, prev_htlc_id, prev_user_channel_id, forward_info })));
+                                                       }
                                                }
                                        }
                                }
                        }
+
+                       for (htlc_source, payment_hash, failure_reason, destination) in failed_intercept_forwards.drain(..) {
+                               self.fail_htlc_backwards_internal(&htlc_source, &payment_hash, &failure_reason, destination);
+                       }
+
+                       if !new_intercept_events.is_empty() {
+                               let mut events = self.pending_events.lock().unwrap();
+                               events.append(&mut new_intercept_events);
+                       }
+
                        match forward_event {
                                Some(time) => {
                                        let mut pending_events = self.pending_events.lock().unwrap();
@@ -5178,7 +5338,8 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                                                        raa_updates.finalized_claimed_htlcs,
                                                        chan.get().get_short_channel_id()
                                                                .unwrap_or(chan.get().outbound_scid_alias()),
-                                                       chan.get().get_funding_txo().unwrap()))
+                                                       chan.get().get_funding_txo().unwrap(),
+                                                       chan.get().get_user_id()))
                                },
                                hash_map::Entry::Vacant(_) => break Err(MsgHandleErrInternal::send_err_msg_no_close("Failed to find corresponding channel".to_owned(), msg.channel_id))
                        }
@@ -5186,13 +5347,13 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                self.fail_holding_cell_htlcs(htlcs_to_fail, msg.channel_id, counterparty_node_id);
                match res {
                        Ok((pending_forwards, mut pending_failures, finalized_claim_htlcs,
-                               short_channel_id, channel_outpoint)) =>
+                               short_channel_id, channel_outpoint, user_channel_id)) =>
                        {
                                for failure in pending_failures.drain(..) {
                                        let receiver = HTLCDestination::NextHopChannel { node_id: Some(*counterparty_node_id), channel_id: channel_outpoint.to_channel_id() };
-                                       self.fail_htlc_backwards_internal(failure.0, &failure.1, failure.2, receiver);
+                                       self.fail_htlc_backwards_internal(&failure.0, &failure.1, &failure.2, receiver);
                                }
-                               self.forward_htlcs(&mut [(short_channel_id, channel_outpoint, pending_forwards)]);
+                               self.forward_htlcs(&mut [(short_channel_id, channel_outpoint, user_channel_id, pending_forwards)]);
                                self.finalize_claims(finalized_claim_htlcs);
                                Ok(())
                        },
@@ -5278,8 +5439,8 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
        }
 
        fn internal_channel_reestablish(&self, counterparty_node_id: &PublicKey, msg: &msgs::ChannelReestablish) -> Result<(), MsgHandleErrInternal> {
-               let chan_restoration_res;
-               let (htlcs_failed_forward, need_lnd_workaround) = {
+               let htlc_forwards;
+               let need_lnd_workaround = {
                        let mut channel_state_lock = self.channel_state.lock().unwrap();
                        let channel_state = &mut *channel_state_lock;
 
@@ -5313,19 +5474,21 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                                                }
                                        }
                                        let need_lnd_workaround = chan.get_mut().workaround_lnd_bug_4006.take();
-                                       chan_restoration_res = handle_chan_restoration_locked!(
-                                               self, channel_state_lock, channel_state, chan, responses.raa, responses.commitment_update, responses.order,
-                                               responses.mon_update, Vec::new(), None, responses.channel_ready, responses.announcement_sigs);
+                                       htlc_forwards = self.handle_channel_resumption(
+                                               &mut channel_state.pending_msg_events, chan.get_mut(), responses.raa, responses.commitment_update, responses.order,
+                                               Vec::new(), None, responses.channel_ready, responses.announcement_sigs);
                                        if let Some(upd) = channel_update {
                                                channel_state.pending_msg_events.push(upd);
                                        }
-                                       (responses.holding_cell_failed_htlcs, need_lnd_workaround)
+                                       need_lnd_workaround
                                },
                                hash_map::Entry::Vacant(_) => return Err(MsgHandleErrInternal::send_err_msg_no_close("Failed to find corresponding channel".to_owned(), msg.channel_id))
                        }
                };
-               post_handle_chan_restoration!(self, chan_restoration_res);
-               self.fail_holding_cell_htlcs(htlcs_failed_forward, msg.channel_id, counterparty_node_id);
+
+               if let Some(forwards) = htlc_forwards {
+                       self.forward_htlcs(&mut [forwards][..]);
+               }
 
                if let Some(channel_ready_msg) = need_lnd_workaround {
                        self.internal_channel_ready(counterparty_node_id, &channel_ready_msg)?;
@@ -5348,7 +5511,8 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                                                } else {
                                                        log_trace!(self.logger, "Failing HTLC with hash {} from our monitor", log_bytes!(htlc_update.payment_hash.0));
                                                        let receiver = HTLCDestination::NextHopChannel { node_id: counterparty_node_id, channel_id: funding_outpoint.to_channel_id() };
-                                                       self.fail_htlc_backwards_internal(htlc_update.source, &htlc_update.payment_hash, HTLCFailReason::Reason { failure_code: 0x4000 | 8, data: Vec::new() }, receiver);
+                                                       let reason = HTLCFailReason::from_failure_code(0x4000 | 8);
+                                                       self.fail_htlc_backwards_internal(&htlc_update.source, &htlc_update.payment_hash, &reason, receiver);
                                                }
                                        },
                                        MonitorEvent::CommitmentTxConfirmed(funding_outpoint) |
@@ -5584,8 +5748,8 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
        /// This differs from [`create_inbound_payment_for_hash`] only in that it generates the
        /// [`PaymentHash`] and [`PaymentPreimage`] for you.
        ///
-       /// The [`PaymentPreimage`] will ultimately be returned to you in the [`PaymentReceived`], which
-       /// will have the [`PaymentReceived::payment_preimage`] field filled in. That should then be
+       /// The [`PaymentPreimage`] will ultimately be returned to you in the [`PaymentClaimable`], which
+       /// will have the [`PaymentClaimable::payment_preimage`] field filled in. That should then be
        /// passed directly to [`claim_funds`].
        ///
        /// See [`create_inbound_payment_for_hash`] for detailed documentation on behavior and requirements.
@@ -5601,8 +5765,8 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
        /// Errors if `min_value_msat` is greater than total bitcoin supply.
        ///
        /// [`claim_funds`]: Self::claim_funds
-       /// [`PaymentReceived`]: events::Event::PaymentReceived
-       /// [`PaymentReceived::payment_preimage`]: events::Event::PaymentReceived::payment_preimage
+       /// [`PaymentClaimable`]: events::Event::PaymentClaimable
+       /// [`PaymentClaimable::payment_preimage`]: events::Event::PaymentClaimable::payment_preimage
        /// [`create_inbound_payment_for_hash`]: Self::create_inbound_payment_for_hash
        pub fn create_inbound_payment(&self, min_value_msat: Option<u64>, invoice_expiry_delta_secs: u32) -> Result<(PaymentHash, PaymentSecret), ()> {
                inbound_payment::create(&self.inbound_payment_key, min_value_msat, invoice_expiry_delta_secs, &self.keys_manager, self.highest_seen_timestamp.load(Ordering::Acquire) as u64)
@@ -5628,7 +5792,7 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
        /// Gets a [`PaymentSecret`] for a given [`PaymentHash`], for which the payment preimage is
        /// stored external to LDK.
        ///
-       /// A [`PaymentReceived`] event will only be generated if the [`PaymentSecret`] matches a
+       /// A [`PaymentClaimable`] event will only be generated if the [`PaymentSecret`] matches a
        /// payment secret fetched via this method or [`create_inbound_payment`], and which is at least
        /// the `min_value_msat` provided here, if one is provided.
        ///
@@ -5638,7 +5802,7 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
        ///
        /// `min_value_msat` should be set if the invoice being generated contains a value. Any payment
        /// received for the returned [`PaymentHash`] will be required to be at least `min_value_msat`
-       /// before a [`PaymentReceived`] event will be generated, ensuring that we do not provide the
+       /// before a [`PaymentClaimable`] event will be generated, ensuring that we do not provide the
        /// sender "proof-of-payment" unless they have paid the required amount.
        ///
        /// `invoice_expiry_delta_secs` describes the number of seconds that the invoice is valid for
@@ -5649,9 +5813,9 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
        ///
        /// Note that we use block header time to time-out pending inbound payments (with some margin
        /// to compensate for the inaccuracy of block header timestamps). Thus, in practice we will
-       /// accept a payment and generate a [`PaymentReceived`] event for some time after the expiry.
+       /// accept a payment and generate a [`PaymentClaimable`] event for some time after the expiry.
        /// If you need exact expiry semantics, you should enforce them upon receipt of
-       /// [`PaymentReceived`].
+       /// [`PaymentClaimable`].
        ///
        /// Note that invoices generated for inbound payments should have their `min_final_cltv_expiry`
        /// set to at least [`MIN_FINAL_CLTV_EXPIRY`].
@@ -5667,7 +5831,7 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
        /// Errors if `min_value_msat` is greater than total bitcoin supply.
        ///
        /// [`create_inbound_payment`]: Self::create_inbound_payment
-       /// [`PaymentReceived`]: events::Event::PaymentReceived
+       /// [`PaymentClaimable`]: events::Event::PaymentClaimable
        pub fn create_inbound_payment_for_hash(&self, payment_hash: PaymentHash, min_value_msat: Option<u64>, invoice_expiry_delta_secs: u32) -> Result<PaymentSecret, ()> {
                inbound_payment::create_from_hash(&self.inbound_payment_key, min_value_msat, payment_hash, invoice_expiry_delta_secs, self.highest_seen_timestamp.load(Ordering::Acquire) as u64)
        }
@@ -5722,6 +5886,23 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                }
        }
 
+       /// Gets a fake short channel id for use in receiving intercepted payments. These fake scids are
+       /// used when constructing the route hints for HTLCs intended to be intercepted. See
+       /// [`ChannelManager::forward_intercepted_htlc`].
+       ///
+       /// Note that this method is not guaranteed to return unique values, you may need to call it a few
+       /// times to get a unique scid.
+       pub fn get_intercept_scid(&self) -> u64 {
+               let best_block_height = self.best_block.read().unwrap().height();
+               let short_to_chan_info = self.short_to_chan_info.read().unwrap();
+               loop {
+                       let scid_candidate = fake_scid::Namespace::Intercept.get_fake_scid(best_block_height, &self.genesis_hash, &self.fake_scid_rand_bytes, &self.keys_manager);
+                       // Ensure the generated scid doesn't conflict with a real channel.
+                       if short_to_chan_info.contains_key(&scid_candidate) { continue }
+                       return scid_candidate
+               }
+       }
+
        /// Gets inflight HTLC information by processing pending outbound payments that are in
        /// our channels. May be used during pathfinding to account for in-use channel liquidity.
        pub fn compute_inflight_htlcs(&self) -> InFlightHtlcs {
@@ -6023,9 +6204,8 @@ where
                                if let Ok((channel_ready_opt, mut timed_out_pending_htlcs, announcement_sigs)) = res {
                                        for (source, payment_hash) in timed_out_pending_htlcs.drain(..) {
                                                let (failure_code, data) = self.get_htlc_inbound_temp_fail_err_and_data(0x1000|14 /* expiry_too_soon */, &channel);
-                                               timed_out_htlcs.push((source, payment_hash, HTLCFailReason::Reason {
-                                                       failure_code, data,
-                                               }, HTLCDestination::NextHopChannel { node_id: Some(channel.get_counterparty_node_id()), channel_id: channel.channel_id() }));
+                                               timed_out_htlcs.push((source, payment_hash, HTLCFailReason::reason(failure_code, data),
+                                                       HTLCDestination::NextHopChannel { node_id: Some(channel.get_counterparty_node_id()), channel_id: channel.channel_id() }));
                                        }
                                        if let Some(channel_ready) = channel_ready_opt {
                                                send_channel_ready!(self, pending_msg_events, channel, channel_ready);
@@ -6099,34 +6279,56 @@ where
                                }
                                true
                        });
+               }
 
-                       if let Some(height) = height_opt {
-                               channel_state.claimable_htlcs.retain(|payment_hash, (_, htlcs)| {
-                                       htlcs.retain(|htlc| {
-                                               // If height is approaching the number of blocks we think it takes us to get
-                                               // our commitment transaction confirmed before the HTLC expires, plus the
-                                               // number of blocks we generally consider it to take to do a commitment update,
-                                               // just give up on it and fail the HTLC.
-                                               if height >= htlc.cltv_expiry - HTLC_FAIL_BACK_BUFFER {
-                                                       let mut htlc_msat_height_data = byte_utils::be64_to_array(htlc.value).to_vec();
-                                                       htlc_msat_height_data.extend_from_slice(&byte_utils::be32_to_array(height));
-
-                                                       timed_out_htlcs.push((HTLCSource::PreviousHopData(htlc.prev_hop.clone()), payment_hash.clone(), HTLCFailReason::Reason {
-                                                               failure_code: 0x4000 | 15,
-                                                               data: htlc_msat_height_data
-                                                       }, HTLCDestination::FailedPayment { payment_hash: payment_hash.clone() }));
-                                                       false
-                                               } else { true }
-                                       });
-                                       !htlcs.is_empty() // Only retain this entry if htlcs has at least one entry.
+               if let Some(height) = height_opt {
+                       self.claimable_htlcs.lock().unwrap().retain(|payment_hash, (_, htlcs)| {
+                               htlcs.retain(|htlc| {
+                                       // If height is approaching the number of blocks we think it takes us to get
+                                       // our commitment transaction confirmed before the HTLC expires, plus the
+                                       // number of blocks we generally consider it to take to do a commitment update,
+                                       // just give up on it and fail the HTLC.
+                                       if height >= htlc.cltv_expiry - HTLC_FAIL_BACK_BUFFER {
+                                               let mut htlc_msat_height_data = byte_utils::be64_to_array(htlc.value).to_vec();
+                                               htlc_msat_height_data.extend_from_slice(&byte_utils::be32_to_array(height));
+
+                                               timed_out_htlcs.push((HTLCSource::PreviousHopData(htlc.prev_hop.clone()), payment_hash.clone(),
+                                                       HTLCFailReason::reason(0x4000 | 15, htlc_msat_height_data),
+                                                       HTLCDestination::FailedPayment { payment_hash: payment_hash.clone() }));
+                                               false
+                                       } else { true }
                                });
-                       }
+                               !htlcs.is_empty() // Only retain this entry if htlcs has at least one entry.
+                       });
+
+                       let mut intercepted_htlcs = self.pending_intercepted_htlcs.lock().unwrap();
+                       intercepted_htlcs.retain(|_, htlc| {
+                               if height >= htlc.forward_info.outgoing_cltv_value - HTLC_FAIL_BACK_BUFFER {
+                                       let prev_hop_data = HTLCSource::PreviousHopData(HTLCPreviousHopData {
+                                               short_channel_id: htlc.prev_short_channel_id,
+                                               htlc_id: htlc.prev_htlc_id,
+                                               incoming_packet_shared_secret: htlc.forward_info.incoming_shared_secret,
+                                               phantom_shared_secret: None,
+                                               outpoint: htlc.prev_funding_outpoint,
+                                       });
+
+                                       let requested_forward_scid /* intercept scid */ = match htlc.forward_info.routing {
+                                               PendingHTLCRouting::Forward { short_channel_id, .. } => short_channel_id,
+                                               _ => unreachable!(),
+                                       };
+                                       timed_out_htlcs.push((prev_hop_data, htlc.forward_info.payment_hash,
+                                                       HTLCFailReason::from_failure_code(0x2000 | 2),
+                                                       HTLCDestination::InvalidForward { requested_forward_scid }));
+                                       log_trace!(self.logger, "Timing out intercepted HTLC with requested forward scid {}", requested_forward_scid);
+                                       false
+                               } else { true }
+                       });
                }
 
                self.handle_init_event_channel_failures(failed_channels);
 
                for (source, payment_hash, reason, destination) in timed_out_htlcs.drain(..) {
-                       self.fail_htlc_backwards_internal(source, &payment_hash, reason, destination);
+                       self.fail_htlc_backwards_internal(&source, &payment_hash, &reason, destination);
                }
        }
 
@@ -6174,7 +6376,7 @@ where
        }
 }
 
-impl<M: Deref , T: Deref , K: Deref , F: Deref , L: Deref >
+impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref >
        ChannelMessageHandler for ChannelManager<M, T, K, F, L>
        where M::Target: chain::Watch<<K::Target as KeysInterface>::Signer>,
         T::Target: BroadcasterInterface,
@@ -6506,6 +6708,7 @@ impl Writeable for ChannelDetails {
                        (6, self.funding_txo, option),
                        (7, self.config, option),
                        (8, self.short_channel_id, option),
+                       (9, self.confirmations, option),
                        (10, self.channel_value_satoshis, required),
                        (12, self.unspendable_punishment_reserve, option),
                        (14, user_channel_id_low, required),
@@ -6540,6 +6743,7 @@ impl Readable for ChannelDetails {
                        (6, funding_txo, option),
                        (7, config, option),
                        (8, short_channel_id, option),
+                       (9, confirmations, option),
                        (10, channel_value_satoshis, required),
                        (12, unspendable_punishment_reserve, option),
                        (14, user_channel_id_low, required),
@@ -6583,6 +6787,7 @@ impl Readable for ChannelDetails {
                        next_outbound_htlc_limit_msat: next_outbound_htlc_limit_msat.0.unwrap(),
                        inbound_capacity_msat: inbound_capacity_msat.0.unwrap(),
                        confirmations_required,
+                       confirmations,
                        force_close_spend_delay,
                        is_outbound: is_outbound.0.unwrap(),
                        is_channel_ready: is_channel_ready.0.unwrap(),
@@ -6846,6 +7051,7 @@ impl_writeable_tlv_based_enum!(HTLCFailReason,
 
 impl_writeable_tlv_based!(PendingAddHTLCInfo, {
        (0, forward_info, required),
+       (1, prev_user_channel_id, (default_value, 0)),
        (2, prev_short_channel_id, required),
        (4, prev_htlc_id, required),
        (6, prev_funding_outpoint, required),
@@ -6940,10 +7146,13 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> Writeable for ChannelMana
                        }
                }
 
-               let channel_state = self.channel_state.lock().unwrap();
+               let pending_inbound_payments = self.pending_inbound_payments.lock().unwrap();
+               let claimable_htlcs = self.claimable_htlcs.lock().unwrap();
+               let pending_outbound_payments = self.pending_outbound_payments.lock().unwrap();
+
                let mut htlc_purposes: Vec<&events::PaymentPurpose> = Vec::new();
-               (channel_state.claimable_htlcs.len() as u64).write(writer)?;
-               for (payment_hash, (purpose, previous_hops)) in channel_state.claimable_htlcs.iter() {
+               (claimable_htlcs.len() as u64).write(writer)?;
+               for (payment_hash, (purpose, previous_hops)) in claimable_htlcs.iter() {
                        payment_hash.write(writer)?;
                        (previous_hops.len() as u64).write(writer)?;
                        for htlc in previous_hops.iter() {
@@ -6960,8 +7169,6 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> Writeable for ChannelMana
                        peer_state.latest_features.write(writer)?;
                }
 
-               let pending_inbound_payments = self.pending_inbound_payments.lock().unwrap();
-               let pending_outbound_payments = self.pending_outbound_payments.lock().unwrap();
                let events = self.pending_events.lock().unwrap();
                (events.len() as u64).write(writer)?;
                for event in events.iter() {
@@ -7024,8 +7231,15 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> Writeable for ChannelMana
                                _ => {},
                        }
                }
+
+               let mut pending_intercepted_htlcs = None;
+               let our_pending_intercepts = self.pending_intercepted_htlcs.lock().unwrap();
+               if our_pending_intercepts.len() != 0 {
+                       pending_intercepted_htlcs = Some(our_pending_intercepts);
+               }
                write_tlv_fields!(writer, {
                        (1, pending_outbound_payments_no_retry, required),
+                       (2, pending_intercepted_htlcs, option),
                        (3, pending_outbound_payments, required),
                        (5, self.our_network_pubkey, required),
                        (7, self.fake_scid_rand_bytes, required),
@@ -7348,12 +7562,14 @@ impl<'a, M: Deref, T: Deref, K: Deref, F: Deref, L: Deref>
                // pending_outbound_payments_no_retry is for compatibility with 0.0.101 clients.
                let mut pending_outbound_payments_no_retry: Option<HashMap<PaymentId, HashSet<[u8; 32]>>> = None;
                let mut pending_outbound_payments = None;
+               let mut pending_intercepted_htlcs: Option<HashMap<InterceptId, PendingAddHTLCInfo>> = Some(HashMap::new());
                let mut received_network_pubkey: Option<PublicKey> = None;
                let mut fake_scid_rand_bytes: Option<[u8; 32]> = None;
                let mut probing_cookie_secret: Option<[u8; 32]> = None;
                let mut claimable_htlc_purposes = None;
                read_tlv_fields!(reader, {
                        (1, pending_outbound_payments_no_retry, option),
+                       (2, pending_intercepted_htlcs, option),
                        (3, pending_outbound_payments, option),
                        (5, received_network_pubkey, option),
                        (7, fake_scid_rand_bytes, option),
@@ -7552,6 +7768,13 @@ impl<'a, M: Deref, T: Deref, K: Deref, F: Deref, L: Deref>
                                if let Some((payment_purpose, claimable_htlcs)) = claimable_htlcs.remove(&payment_hash) {
                                        log_info!(args.logger, "Re-claiming HTLCs with payment hash {} as we've released the preimage to a ChannelMonitor!", log_bytes!(payment_hash.0));
                                        let mut claimable_amt_msat = 0;
+                                       let mut receiver_node_id = Some(our_network_pubkey);
+                                       let phantom_shared_secret = claimable_htlcs[0].prev_hop.phantom_shared_secret;
+                                       if phantom_shared_secret.is_some() {
+                                               let phantom_pubkey = args.keys_manager.get_node_id(Recipient::PhantomNode)
+                                                       .expect("Failed to get node_id for phantom node recipient");
+                                               receiver_node_id = Some(phantom_pubkey)
+                                       }
                                        for claimable_htlc in claimable_htlcs {
                                                claimable_amt_msat += claimable_htlc.value;
 
@@ -7579,6 +7802,7 @@ impl<'a, M: Deref, T: Deref, K: Deref, F: Deref, L: Deref>
                                                }
                                        }
                                        pending_events_read.push(events::Event::PaymentClaimed {
+                                               receiver_node_id,
                                                payment_hash,
                                                purpose: payment_purpose,
                                                amount_msat: claimable_amt_msat,
@@ -7597,14 +7821,15 @@ impl<'a, M: Deref, T: Deref, K: Deref, F: Deref, L: Deref>
 
                        channel_state: Mutex::new(ChannelHolder {
                                by_id,
-                               claimable_htlcs,
                                pending_msg_events: Vec::new(),
                        }),
                        inbound_payment_key: expanded_inbound_key,
                        pending_inbound_payments: Mutex::new(pending_inbound_payments),
                        pending_outbound_payments: Mutex::new(pending_outbound_payments.unwrap()),
+                       pending_intercepted_htlcs: Mutex::new(pending_intercepted_htlcs.unwrap()),
 
                        forward_htlcs: Mutex::new(forward_htlcs),
+                       claimable_htlcs: Mutex::new(claimable_htlcs),
                        outbound_scid_aliases: Mutex::new(outbound_scid_aliases),
                        id_to_peer: Mutex::new(id_to_peer),
                        short_to_chan_info: FairRwLock::new(short_to_chan_info),
@@ -7633,7 +7858,8 @@ impl<'a, M: Deref, T: Deref, K: Deref, F: Deref, L: Deref>
                for htlc_source in failed_htlcs.drain(..) {
                        let (source, payment_hash, counterparty_node_id, channel_id) = htlc_source;
                        let receiver = HTLCDestination::NextHopChannel { node_id: Some(counterparty_node_id), channel_id };
-                       channel_manager.fail_htlc_backwards_internal(source, &payment_hash, HTLCFailReason::Reason { failure_code: 0x4000 | 8, data: Vec::new() }, receiver);
+                       let reason = HTLCFailReason::from_failure_code(0x4000 | 8);
+                       channel_manager.fail_htlc_backwards_internal(&source, &payment_hash, &reason, receiver);
                }
 
                //TODO: Broadcast channel update for closed channels, but only after we've made a
@@ -8380,7 +8606,7 @@ pub mod bench {
                                $node_b.handle_revoke_and_ack(&$node_a.get_our_node_id(), &get_event_msg!(NodeHolder { node: &$node_a }, MessageSendEvent::SendRevokeAndACK, $node_b.get_our_node_id()));
 
                                expect_pending_htlcs_forwardable!(NodeHolder { node: &$node_b });
-                               expect_payment_received!(NodeHolder { node: &$node_b }, payment_hash, payment_secret, 10_000);
+                               expect_payment_claimable!(NodeHolder { node: &$node_b }, payment_hash, payment_secret, 10_000);
                                $node_b.claim_funds(payment_preimage);
                                expect_payment_claimed!(NodeHolder { node: &$node_b }, payment_hash, 10_000);