Add a ChaCha20 utility for encrypting in place
[rust-lightning] / lightning / src / ln / channelmanager.rs
index 6739e5260f52335a52c8c5b0d4090fcc412c9382..6393117b7f0b0291d2a86e4e82c84ee310f2669b 100644 (file)
@@ -55,7 +55,7 @@ use crate::ln::msgs::{ChannelMessageHandler, DecodeError, LightningError};
 use crate::ln::outbound_payment;
 use crate::ln::outbound_payment::{OutboundPayments, PaymentAttempts, PendingOutboundPayment, SendAlongPathArgs};
 use crate::ln::wire::Encode;
-use crate::sign::{EntropySource, KeysManager, NodeSigner, Recipient, SignerProvider, ChannelSigner, WriteableEcdsaChannelSigner};
+use crate::sign::{EntropySource, KeysManager, NodeSigner, Recipient, SignerProvider, WriteableEcdsaChannelSigner};
 use crate::util::config::{UserConfig, ChannelConfig, ChannelConfigUpdate};
 use crate::util::wakers::{Future, Notifier};
 use crate::util::scid_utils::fake_scid;
@@ -181,6 +181,7 @@ pub(super) enum HTLCForwardInfo {
 pub(crate) struct HTLCPreviousHopData {
        // Note that this may be an outbound SCID alias for the associated channel.
        short_channel_id: u64,
+       user_channel_id: Option<u128>,
        htlc_id: u64,
        incoming_packet_shared_secret: [u8; 32],
        phantom_shared_secret: Option<[u8; 32]>,
@@ -221,6 +222,17 @@ struct ClaimableHTLC {
        counterparty_skimmed_fee_msat: Option<u64>,
 }
 
+impl From<&ClaimableHTLC> for events::ClaimedHTLC {
+       fn from(val: &ClaimableHTLC) -> Self {
+               events::ClaimedHTLC {
+                       channel_id: val.prev_hop.outpoint.to_channel_id(),
+                       user_channel_id: val.prev_hop.user_channel_id.unwrap_or(0),
+                       cltv_expiry: val.cltv_expiry,
+                       value_msat: val.value,
+               }
+       }
+}
+
 /// A payment identifier used to uniquely identify a payment to LDK.
 ///
 /// This is not exported to bindings users as we just use [u8; 32] directly
@@ -240,6 +252,12 @@ impl Readable for PaymentId {
        }
 }
 
+impl core::fmt::Display for PaymentId {
+       fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result {
+               crate::util::logger::DebugBytes(&self.0).fmt(f)
+       }
+}
+
 /// An identifier used to uniquely identify an intercepted HTLC to LDK.
 ///
 /// This is not exported to bindings users as we just use [u8; 32] directly
@@ -496,11 +514,15 @@ struct ClaimingPayment {
        amount_msat: u64,
        payment_purpose: events::PaymentPurpose,
        receiver_node_id: PublicKey,
+       htlcs: Vec<events::ClaimedHTLC>,
+       sender_intended_value: Option<u64>,
 }
 impl_writeable_tlv_based!(ClaimingPayment, {
        (0, amount_msat, required),
        (2, payment_purpose, required),
        (4, receiver_node_id, required),
+       (5, htlcs, optional_vec),
+       (7, sender_intended_value, option),
 });
 
 struct ClaimablePayment {
@@ -643,23 +665,23 @@ impl_writeable_tlv_based_enum!(RAAMonitorUpdateBlockingAction,
 
 
 /// State we hold per-peer.
-pub(super) struct PeerState<Signer: ChannelSigner> {
+pub(super) struct PeerState<SP: Deref> where SP::Target: SignerProvider {
        /// `channel_id` -> `Channel`.
        ///
        /// Holds all funded channels where the peer is the counterparty.
-       pub(super) channel_by_id: HashMap<[u8; 32], Channel<Signer>>,
+       pub(super) channel_by_id: HashMap<[u8; 32], Channel<SP>>,
        /// `temporary_channel_id` -> `OutboundV1Channel`.
        ///
        /// Holds all outbound V1 channels where the peer is the counterparty. Once an outbound channel has
        /// been assigned a `channel_id`, the entry in this map is removed and one is created in
        /// `channel_by_id`.
-       pub(super) outbound_v1_channel_by_id: HashMap<[u8; 32], OutboundV1Channel<Signer>>,
+       pub(super) outbound_v1_channel_by_id: HashMap<[u8; 32], OutboundV1Channel<SP>>,
        /// `temporary_channel_id` -> `InboundV1Channel`.
        ///
        /// Holds all inbound V1 channels where the peer is the counterparty. Once an inbound channel has
        /// been assigned a `channel_id`, the entry in this map is removed and one is created in
        /// `channel_by_id`.
-       pub(super) inbound_v1_channel_by_id: HashMap<[u8; 32], InboundV1Channel<Signer>>,
+       pub(super) inbound_v1_channel_by_id: HashMap<[u8; 32], InboundV1Channel<SP>>,
        /// `temporary_channel_id` -> `InboundChannelRequest`.
        ///
        /// When manual channel acceptance is enabled, this holds all unaccepted inbound channels where
@@ -705,7 +727,7 @@ pub(super) struct PeerState<Signer: ChannelSigner> {
        is_connected: bool,
 }
 
-impl <Signer: ChannelSigner> PeerState<Signer> {
+impl <SP: Deref> PeerState<SP> where SP::Target: SignerProvider {
        /// Indicates that a peer meets the criteria where we're ok to remove it from our storage.
        /// If true is passed for `require_disconnected`, the function will return false if we haven't
        /// disconnected from the node already, ie. `PeerState::is_connected` is set to `true`.
@@ -1130,9 +1152,9 @@ where
        ///
        /// See `ChannelManager` struct-level documentation for lock order requirements.
        #[cfg(not(any(test, feature = "_test_utils")))]
-       per_peer_state: FairRwLock<HashMap<PublicKey, Mutex<PeerState<<SP::Target as SignerProvider>::Signer>>>>,
+       per_peer_state: FairRwLock<HashMap<PublicKey, Mutex<PeerState<SP>>>>,
        #[cfg(any(test, feature = "_test_utils"))]
-       pub(super) per_peer_state: FairRwLock<HashMap<PublicKey, Mutex<PeerState<<SP::Target as SignerProvider>::Signer>>>>,
+       pub(super) per_peer_state: FairRwLock<HashMap<PublicKey, Mutex<PeerState<SP>>>>,
 
        /// The set of events which we need to give to the user to handle. In some cases an event may
        /// require some further action after the user handles it (currently only blocking a monitor
@@ -1578,11 +1600,13 @@ impl ChannelDetails {
                self.short_channel_id.or(self.outbound_scid_alias)
        }
 
-       fn from_channel_context<Signer: WriteableEcdsaChannelSigner, F: Deref>(
-               context: &ChannelContext<Signer>, best_block_height: u32, latest_features: InitFeatures,
+       fn from_channel_context<SP: Deref, F: Deref>(
+               context: &ChannelContext<SP>, best_block_height: u32, latest_features: InitFeatures,
                fee_estimator: &LowerBoundedFeeEstimator<F>
        ) -> Self
-       where F::Target: FeeEstimator
+       where
+               SP::Target: SignerProvider,
+               F::Target: FeeEstimator
        {
                let balance = context.get_available_balances(fee_estimator);
                let (to_remote_reserve_satoshis, to_self_reserve_satoshis) =
@@ -2283,7 +2307,7 @@ where
                Ok(temporary_channel_id)
        }
 
-       fn list_funded_channels_with_filter<Fn: FnMut(&(&[u8; 32], &Channel<<SP::Target as SignerProvider>::Signer>)) -> bool + Copy>(&self, f: Fn) -> Vec<ChannelDetails> {
+       fn list_funded_channels_with_filter<Fn: FnMut(&(&[u8; 32], &Channel<SP>)) -> bool + Copy>(&self, f: Fn) -> Vec<ChannelDetails> {
                // Allocate our best estimate of the number of channels we have in the `res`
                // Vec. Sadly the `short_to_chan_info` map doesn't cover channels without
                // a scid or a scid alias, and the `id_to_peer` shouldn't be used outside
@@ -2409,7 +2433,7 @@ where
        }
 
        /// Helper function that issues the channel close events
-       fn issue_channel_close_events(&self, context: &ChannelContext<<SP::Target as SignerProvider>::Signer>, closure_reason: ClosureReason) {
+       fn issue_channel_close_events(&self, context: &ChannelContext<SP>, closure_reason: ClosureReason) {
                let mut pending_events_lock = self.pending_events.lock().unwrap();
                match context.unbroadcasted_funding() {
                        Some(transaction) => {
@@ -2898,9 +2922,9 @@ where
                                        short_channel_id, amt_to_forward, outgoing_cltv_value
                                }, ..
                        } => {
-                               let next_pk = onion_utils::next_hop_packet_pubkey(&self.secp_ctx,
+                               let next_packet_pk = onion_utils::next_hop_pubkey(&self.secp_ctx,
                                        msg.onion_routing_packet.public_key.unwrap(), &shared_secret);
-                               (short_channel_id, amt_to_forward, outgoing_cltv_value, Some(next_pk))
+                               (short_channel_id, amt_to_forward, outgoing_cltv_value, Some(next_packet_pk))
                        },
                        // We'll do receive checks in [`Self::construct_pending_htlc_info`] so we have access to the
                        // inbound channel's state.
@@ -3099,7 +3123,7 @@ where
        ///
        /// [`channel_update`]: msgs::ChannelUpdate
        /// [`internal_closing_signed`]: Self::internal_closing_signed
-       fn get_channel_update_for_broadcast(&self, chan: &Channel<<SP::Target as SignerProvider>::Signer>) -> Result<msgs::ChannelUpdate, LightningError> {
+       fn get_channel_update_for_broadcast(&self, chan: &Channel<SP>) -> Result<msgs::ChannelUpdate, LightningError> {
                if !chan.context.should_announce() {
                        return Err(LightningError {
                                err: "Cannot broadcast a channel_update for a private channel".to_owned(),
@@ -3124,7 +3148,7 @@ where
        ///
        /// [`channel_update`]: msgs::ChannelUpdate
        /// [`internal_closing_signed`]: Self::internal_closing_signed
-       fn get_channel_update_for_unicast(&self, chan: &Channel<<SP::Target as SignerProvider>::Signer>) -> Result<msgs::ChannelUpdate, LightningError> {
+       fn get_channel_update_for_unicast(&self, chan: &Channel<SP>) -> Result<msgs::ChannelUpdate, LightningError> {
                log_trace!(self.logger, "Attempting to generate channel update for channel {}", log_bytes!(chan.context.channel_id()));
                let short_channel_id = match chan.context.get_short_channel_id().or(chan.context.latest_inbound_scid_alias()) {
                        None => return Err(LightningError{err: "Channel not yet established".to_owned(), action: msgs::ErrorAction::IgnoreError}),
@@ -3134,7 +3158,7 @@ where
                self.get_channel_update_for_onion(short_channel_id, chan)
        }
 
-       fn get_channel_update_for_onion(&self, short_channel_id: u64, chan: &Channel<<SP::Target as SignerProvider>::Signer>) -> Result<msgs::ChannelUpdate, LightningError> {
+       fn get_channel_update_for_onion(&self, short_channel_id: u64, chan: &Channel<SP>) -> Result<msgs::ChannelUpdate, LightningError> {
                log_trace!(self.logger, "Generating channel update for channel {}", log_bytes!(chan.context.channel_id()));
                let were_node_one = self.our_network_pubkey.serialize()[..] < chan.context.get_counterparty_node_id().serialize()[..];
 
@@ -3428,7 +3452,7 @@ where
 
        /// Handles the generation of a funding transaction, optionally (for tests) with a function
        /// which checks the correctness of the funding transaction given the associated channel.
-       fn funding_transaction_generated_intern<FundingOutput: Fn(&OutboundV1Channel<<SP::Target as SignerProvider>::Signer>, &Transaction) -> Result<OutPoint, APIError>>(
+       fn funding_transaction_generated_intern<FundingOutput: Fn(&OutboundV1Channel<SP>, &Transaction) -> Result<OutPoint, APIError>>(
                &self, temporary_channel_id: &[u8; 32], counterparty_node_id: &PublicKey, funding_transaction: Transaction, find_funding_output: FundingOutput
        ) -> Result<(), APIError> {
                let per_peer_state = self.per_peer_state.read().unwrap();
@@ -3785,6 +3809,7 @@ where
                if let PendingHTLCRouting::Forward { short_channel_id, .. } = payment.forward_info.routing {
                        let htlc_source = HTLCSource::PreviousHopData(HTLCPreviousHopData {
                                short_channel_id: payment.prev_short_channel_id,
+                               user_channel_id: Some(payment.prev_user_channel_id),
                                outpoint: payment.prev_funding_outpoint,
                                htlc_id: payment.prev_htlc_id,
                                incoming_packet_shared_secret: payment.forward_info.incoming_shared_secret,
@@ -3832,6 +3857,7 @@ where
 
                                                                                                let htlc_source = HTLCSource::PreviousHopData(HTLCPreviousHopData {
                                                                                                        short_channel_id: prev_short_channel_id,
+                                                                                                       user_channel_id: Some(prev_user_channel_id),
                                                                                                        outpoint: prev_funding_outpoint,
                                                                                                        htlc_id: prev_htlc_id,
                                                                                                        incoming_packet_shared_secret: incoming_shared_secret,
@@ -3936,15 +3962,16 @@ where
                                                        for forward_info in pending_forwards.drain(..) {
                                                                match forward_info {
                                                                        HTLCForwardInfo::AddHTLC(PendingAddHTLCInfo {
-                                                                               prev_short_channel_id, prev_htlc_id, prev_funding_outpoint, prev_user_channel_id: _,
+                                                                               prev_short_channel_id, prev_htlc_id, prev_funding_outpoint, prev_user_channel_id,
                                                                                forward_info: PendingHTLCInfo {
                                                                                        incoming_shared_secret, payment_hash, outgoing_amt_msat, outgoing_cltv_value,
                                                                                        routing: PendingHTLCRouting::Forward { onion_packet, .. }, skimmed_fee_msat, ..
                                                                                },
                                                                        }) => {
-                                                                               log_trace!(self.logger, "Adding HTLC from short id {} with payment_hash {} to channel with short id {} after delay", prev_short_channel_id, log_bytes!(payment_hash.0), short_chan_id);
+                                                                               log_trace!(self.logger, "Adding HTLC from short id {} with payment_hash {} to channel with short id {} after delay", prev_short_channel_id, &payment_hash, short_chan_id);
                                                                                let htlc_source = HTLCSource::PreviousHopData(HTLCPreviousHopData {
                                                                                        short_channel_id: prev_short_channel_id,
+                                                                                       user_channel_id: Some(prev_user_channel_id),
                                                                                        outpoint: prev_funding_outpoint,
                                                                                        htlc_id: prev_htlc_id,
                                                                                        incoming_packet_shared_secret: incoming_shared_secret,
@@ -3957,7 +3984,7 @@ where
                                                                                        &self.logger)
                                                                                {
                                                                                        if let ChannelError::Ignore(msg) = e {
-                                                                                               log_trace!(self.logger, "Failed to forward HTLC with payment_hash {}: {}", log_bytes!(payment_hash.0), msg);
+                                                                                               log_trace!(self.logger, "Failed to forward HTLC with payment_hash {}: {}", &payment_hash, msg);
                                                                                        } else {
                                                                                                panic!("Stated return value requirements in send_htlc() were not met");
                                                                                        }
@@ -4026,6 +4053,7 @@ where
                                                                let claimable_htlc = ClaimableHTLC {
                                                                        prev_hop: HTLCPreviousHopData {
                                                                                short_channel_id: prev_short_channel_id,
+                                                                               user_channel_id: Some(prev_user_channel_id),
                                                                                outpoint: prev_funding_outpoint,
                                                                                htlc_id: prev_htlc_id,
                                                                                incoming_packet_shared_secret: incoming_shared_secret,
@@ -4055,6 +4083,7 @@ where
                                                                                );
                                                                                failed_forwards.push((HTLCSource::PreviousHopData(HTLCPreviousHopData {
                                                                                                short_channel_id: $htlc.prev_hop.short_channel_id,
+                                                                                               user_channel_id: $htlc.prev_hop.user_channel_id,
                                                                                                outpoint: prev_funding_outpoint,
                                                                                                htlc_id: $htlc.prev_hop.htlc_id,
                                                                                                incoming_packet_shared_secret: $htlc.prev_hop.incoming_packet_shared_secret,
@@ -4095,11 +4124,11 @@ where
                                                                                        });
                                                                                if $purpose != claimable_payment.purpose {
                                                                                        let log_keysend = |keysend| if keysend { "keysend" } else { "non-keysend" };
-                                                                                       log_trace!(self.logger, "Failing new {} HTLC with payment_hash {} as we already had an existing {} HTLC with the same payment hash", log_keysend(is_keysend), log_bytes!(payment_hash.0), log_keysend(!is_keysend));
+                                                                                       log_trace!(self.logger, "Failing new {} HTLC with payment_hash {} as we already had an existing {} HTLC with the same payment hash", log_keysend(is_keysend), &payment_hash, log_keysend(!is_keysend));
                                                                                        fail_htlc!(claimable_htlc, payment_hash);
                                                                                }
                                                                                if !self.default_configuration.accept_mpp_keysend && is_keysend && !claimable_payment.htlcs.is_empty() {
-                                                                                       log_trace!(self.logger, "Failing new keysend HTLC with payment_hash {} as we already had an existing keysend HTLC with the same payment hash and our config states we don't accept MPP keysend", log_bytes!(payment_hash.0));
+                                                                                       log_trace!(self.logger, "Failing new keysend HTLC with payment_hash {} as we already had an existing keysend HTLC with the same payment hash and our config states we don't accept MPP keysend", &payment_hash);
                                                                                        fail_htlc!(claimable_htlc, payment_hash);
                                                                                }
                                                                                if let Some(earlier_fields) = &mut claimable_payment.onion_fields {
@@ -4117,7 +4146,7 @@ where
                                                                                        earliest_expiry = cmp::min(earliest_expiry, htlc.cltv_expiry);
                                                                                        if htlc.total_msat != claimable_htlc.total_msat {
                                                                                                log_trace!(self.logger, "Failing HTLCs with payment_hash {} as the HTLCs had inconsistent total values (eg {} and {})",
-                                                                                                       log_bytes!(payment_hash.0), claimable_htlc.total_msat, htlc.total_msat);
+                                                                                                       &payment_hash, claimable_htlc.total_msat, htlc.total_msat);
                                                                                                total_value = msgs::MAX_VALUE_MSAT;
                                                                                        }
                                                                                        if total_value >= msgs::MAX_VALUE_MSAT { break; }
@@ -4128,7 +4157,7 @@ where
                                                                                        fail_htlc!(claimable_htlc, payment_hash);
                                                                                } else if total_value - claimable_htlc.sender_intended_value >= claimable_htlc.total_msat {
                                                                                        log_trace!(self.logger, "Failing HTLC with payment_hash {} as payment is already claimable",
-                                                                                               log_bytes!(payment_hash.0));
+                                                                                               &payment_hash);
                                                                                        fail_htlc!(claimable_htlc, payment_hash);
                                                                                } else if total_value >= claimable_htlc.total_msat {
                                                                                        #[allow(unused_assignments)] {
@@ -4182,7 +4211,7 @@ where
                                                                                                let (payment_preimage, min_final_cltv_expiry_delta) = match inbound_payment::verify(payment_hash, &payment_data, self.highest_seen_timestamp.load(Ordering::Acquire) as u64, &self.inbound_payment_key, &self.logger) {
                                                                                                        Ok(result) => result,
                                                                                                        Err(()) => {
-                                                                                                               log_trace!(self.logger, "Failing new HTLC with payment_hash {} as payment verification failed", log_bytes!(payment_hash.0));
+                                                                                                               log_trace!(self.logger, "Failing new HTLC with payment_hash {} as payment verification failed", &payment_hash);
                                                                                                                fail_htlc!(claimable_htlc, payment_hash);
                                                                                                        }
                                                                                                };
@@ -4190,7 +4219,7 @@ where
                                                                                                        let expected_min_expiry_height = (self.current_best_block().height() + min_final_cltv_expiry_delta as u32) as u64;
                                                                                                        if (cltv_expiry as u64) < expected_min_expiry_height {
                                                                                                                log_trace!(self.logger, "Failing new HTLC with payment_hash {} as its CLTV expiry was too soon (had {}, earliest expected {})",
-                                                                                                                       log_bytes!(payment_hash.0), cltv_expiry, expected_min_expiry_height);
+                                                                                                                       &payment_hash, cltv_expiry, expected_min_expiry_height);
                                                                                                                fail_htlc!(claimable_htlc, payment_hash);
                                                                                                        }
                                                                                                }
@@ -4208,16 +4237,16 @@ where
                                                                        },
                                                                        hash_map::Entry::Occupied(inbound_payment) => {
                                                                                if let OnionPayload::Spontaneous(_) = claimable_htlc.onion_payload {
-                                                                                       log_trace!(self.logger, "Failing new keysend HTLC with payment_hash {} because we already have an inbound payment with the same payment hash", log_bytes!(payment_hash.0));
+                                                                                       log_trace!(self.logger, "Failing new keysend HTLC with payment_hash {} because we already have an inbound payment with the same payment hash", &payment_hash);
                                                                                        fail_htlc!(claimable_htlc, payment_hash);
                                                                                }
                                                                                let payment_data = payment_data.unwrap();
                                                                                if inbound_payment.get().payment_secret != payment_data.payment_secret {
-                                                                                       log_trace!(self.logger, "Failing new HTLC with payment_hash {} as it didn't match our expected payment secret.", log_bytes!(payment_hash.0));
+                                                                                       log_trace!(self.logger, "Failing new HTLC with payment_hash {} as it didn't match our expected payment secret.", &payment_hash);
                                                                                        fail_htlc!(claimable_htlc, payment_hash);
                                                                                } else if inbound_payment.get().min_value_msat.is_some() && payment_data.total_msat < inbound_payment.get().min_value_msat.unwrap() {
                                                                                        log_trace!(self.logger, "Failing new HTLC with payment_hash {} as it didn't match our minimum value (had {}, needed {}).",
-                                                                                               log_bytes!(payment_hash.0), payment_data.total_msat, inbound_payment.get().min_value_msat.unwrap());
+                                                                                               &payment_hash, payment_data.total_msat, inbound_payment.get().min_value_msat.unwrap());
                                                                                        fail_htlc!(claimable_htlc, payment_hash);
                                                                                } else {
                                                                                        let purpose = events::PaymentPurpose::InvoicePayment {
@@ -4341,7 +4370,7 @@ where
                let _ = self.process_background_events();
        }
 
-       fn update_channel_fee(&self, chan_id: &[u8; 32], chan: &mut Channel<<SP::Target as SignerProvider>::Signer>, new_feerate: u32) -> NotifyOption {
+       fn update_channel_fee(&self, chan_id: &[u8; 32], chan: &mut Channel<SP>, new_feerate: u32) -> NotifyOption {
                if !chan.context.is_outbound() { return NotifyOption::SkipPersist; }
                // If the feerate has decreased by less than half, don't bother
                if new_feerate <= chan.context.get_feerate_sat_per_1000_weight() && new_feerate * 2 > chan.context.get_feerate_sat_per_1000_weight() {
@@ -4500,7 +4529,7 @@ where
 
                                        let process_unfunded_channel_tick = |
                                                chan_id: &[u8; 32],
-                                               chan_context: &mut ChannelContext<<SP::Target as SignerProvider>::Signer>,
+                                               chan_context: &mut ChannelContext<SP>,
                                                unfunded_chan_context: &mut UnfundedChannelContext,
                                                pending_msg_events: &mut Vec<MessageSendEvent>,
                                        | {
@@ -4690,7 +4719,7 @@ where
        ///
        /// This is for failures on the channel on which the HTLC was *received*, not failures
        /// forwarding
-       fn get_htlc_inbound_temp_fail_err_and_data(&self, desired_err_code: u16, chan: &Channel<<SP::Target as SignerProvider>::Signer>) -> (u16, Vec<u8>) {
+       fn get_htlc_inbound_temp_fail_err_and_data(&self, desired_err_code: u16, chan: &Channel<SP>) -> (u16, Vec<u8>) {
                // We can't be sure what SCID was used when relaying inbound towards us, so we have to
                // guess somewhat. If its a public channel, we figure best to just use the real SCID (as
                // we're not leaking that we have a channel with the counterparty), otherwise we try to use
@@ -4710,7 +4739,7 @@ where
 
        /// Gets an HTLC onion failure code and error data for an `UPDATE` error, given the error code
        /// that we want to return and a channel.
-       fn get_htlc_temp_fail_err_and_data(&self, desired_err_code: u16, scid: u64, chan: &Channel<<SP::Target as SignerProvider>::Signer>) -> (u16, Vec<u8>) {
+       fn get_htlc_temp_fail_err_and_data(&self, desired_err_code: u16, scid: u64, chan: &Channel<SP>) -> (u16, Vec<u8>) {
                debug_assert_eq!(desired_err_code & 0x1000, 0x1000);
                if let Ok(upd) = self.get_channel_update_for_onion(scid, chan) {
                        let mut enc = VecWriter(Vec::with_capacity(upd.serialized_length() + 6));
@@ -4786,8 +4815,8 @@ where
                                        &self.pending_events, &self.logger)
                                { self.push_pending_forwards_ev(); }
                        },
-                       HTLCSource::PreviousHopData(HTLCPreviousHopData { ref short_channel_id, ref htlc_id, ref incoming_packet_shared_secret, ref phantom_shared_secret, ref outpoint }) => {
-                               log_trace!(self.logger, "Failing HTLC with payment_hash {} backwards from us with {:?}", log_bytes!(payment_hash.0), onion_error);
+                       HTLCSource::PreviousHopData(HTLCPreviousHopData { ref short_channel_id, ref htlc_id, ref incoming_packet_shared_secret, ref phantom_shared_secret, ref outpoint, .. }) => {
+                               log_trace!(self.logger, "Failing HTLC with payment_hash {} backwards from us with {:?}", &payment_hash, onion_error);
                                let err_packet = onion_error.get_encrypted_failure_packet(incoming_packet_shared_secret, phantom_shared_secret);
 
                                let mut push_forward_ev = false;
@@ -4873,20 +4902,22 @@ where
                                        }
                                }
 
+                               let htlcs = payment.htlcs.iter().map(events::ClaimedHTLC::from).collect();
+                               let sender_intended_value = payment.htlcs.first().map(|htlc| htlc.total_msat);
                                let dup_purpose = claimable_payments.pending_claiming_payments.insert(payment_hash,
                                        ClaimingPayment { amount_msat: payment.htlcs.iter().map(|source| source.value).sum(),
-                                       payment_purpose: payment.purpose, receiver_node_id,
+                                       payment_purpose: payment.purpose, receiver_node_id, htlcs, sender_intended_value
                                });
                                if dup_purpose.is_some() {
                                        debug_assert!(false, "Shouldn't get a duplicate pending claim event ever");
                                        log_error!(self.logger, "Got a duplicate pending claimable event on payment hash {}! Please report this bug",
-                                               log_bytes!(payment_hash.0));
+                                               &payment_hash);
                                }
 
                                if let Some(RecipientOnionFields { ref custom_tlvs, .. }) = payment.onion_fields {
                                        if !custom_tlvs_known && custom_tlvs.iter().any(|(typ, _)| typ % 2 == 0) {
                                                log_info!(self.logger, "Rejecting payment with payment hash {} as we cannot accept payment with unknown even TLVs: {}",
-                                                       log_bytes!(payment_hash.0), log_iter!(custom_tlvs.iter().map(|(typ, _)| typ).filter(|typ| *typ % 2 == 0)));
+                                                       &payment_hash, log_iter!(custom_tlvs.iter().map(|(typ, _)| typ).filter(|typ| *typ % 2 == 0)));
                                                claimable_payments.pending_claiming_payments.remove(&payment_hash);
                                                mem::drop(claimable_payments);
                                                for htlc in payment.htlcs {
@@ -5139,9 +5170,20 @@ where
                        match action {
                                MonitorUpdateCompletionAction::PaymentClaimed { payment_hash } => {
                                        let payment = self.claimable_payments.lock().unwrap().pending_claiming_payments.remove(&payment_hash);
-                                       if let Some(ClaimingPayment { amount_msat, payment_purpose: purpose, receiver_node_id }) = payment {
+                                       if let Some(ClaimingPayment {
+                                               amount_msat,
+                                               payment_purpose: purpose,
+                                               receiver_node_id,
+                                               htlcs,
+                                               sender_intended_value: sender_intended_total_msat,
+                                       }) = payment {
                                                self.pending_events.lock().unwrap().push_back((events::Event::PaymentClaimed {
-                                                       payment_hash, purpose, amount_msat, receiver_node_id: Some(receiver_node_id),
+                                                       payment_hash,
+                                                       purpose,
+                                                       amount_msat,
+                                                       receiver_node_id: Some(receiver_node_id),
+                                                       htlcs,
+                                                       sender_intended_total_msat,
                                                }, None));
                                        }
                                },
@@ -5160,7 +5202,7 @@ where
        /// Handles a channel reentering a functional state, either due to reconnect or a monitor
        /// update completion.
        fn handle_channel_resumption(&self, pending_msg_events: &mut Vec<MessageSendEvent>,
-               channel: &mut Channel<<SP::Target as SignerProvider>::Signer>, raa: Option<msgs::RevokeAndACK>,
+               channel: &mut Channel<SP>, raa: Option<msgs::RevokeAndACK>,
                commitment_update: Option<msgs::CommitmentUpdate>, order: RAACommitmentOrder,
                pending_forwards: Vec<(PendingHTLCInfo, u64)>, funding_broadcastable: Option<Transaction>,
                channel_ready: Option<msgs::ChannelReady>, announcement_sigs: Option<msgs::AnnouncementSignatures>)
@@ -5395,7 +5437,7 @@ where
        /// The filter is called for each peer and provided with the number of unfunded, inbound, and
        /// non-0-conf channels we have with the peer.
        fn peers_without_funded_channels<Filter>(&self, maybe_count_peer: Filter) -> usize
-       where Filter: Fn(&PeerState<<SP::Target as SignerProvider>::Signer>) -> bool {
+       where Filter: Fn(&PeerState<SP>) -> bool {
                let mut peers_without_funded_channels = 0;
                let best_block_height = self.best_block.read().unwrap().height();
                {
@@ -5413,7 +5455,7 @@ where
        }
 
        fn unfunded_channel_count(
-               peer: &PeerState<<SP::Target as SignerProvider>::Signer>, best_block_height: u32
+               peer: &PeerState<SP>, best_block_height: u32
        ) -> usize {
                let mut num_unfunded_channels = 0;
                for (_, chan) in peer.channel_by_id.iter() {
@@ -5859,7 +5901,7 @@ where
                                                        chan.get().context.config().accept_underpaying_htlcs, next_packet_pk_opt),
                                        Err(e) => PendingHTLCStatus::Fail(e)
                                };
-                               let create_pending_htlc_status = |chan: &Channel<<SP::Target as SignerProvider>::Signer>, pending_forward_info: PendingHTLCStatus, error_code: u16| {
+                               let create_pending_htlc_status = |chan: &Channel<SP>, pending_forward_info: PendingHTLCStatus, error_code: u16| {
                                        // If the update_add is completely bogus, the call will Err and we will close,
                                        // but if we've sent a shutdown and they haven't acknowledged it yet, we just
                                        // want to reject the new HTLC and fail it backwards instead of forwarding.
@@ -6019,6 +6061,7 @@ where
                                                                                log_info!(self.logger, "Failed to forward incoming HTLC: detected duplicate intercepted payment over short channel id {}", scid);
                                                                                let htlc_source = HTLCSource::PreviousHopData(HTLCPreviousHopData {
                                                                                        short_channel_id: prev_short_channel_id,
+                                                                                       user_channel_id: Some(prev_user_channel_id),
                                                                                        outpoint: prev_funding_outpoint,
                                                                                        htlc_id: prev_htlc_id,
                                                                                        incoming_packet_shared_secret: forward_info.incoming_shared_secret,
@@ -6290,10 +6333,10 @@ where
                                match monitor_event {
                                        MonitorEvent::HTLCEvent(htlc_update) => {
                                                if let Some(preimage) = htlc_update.payment_preimage {
-                                                       log_trace!(self.logger, "Claiming HTLC with preimage {} from our monitor", log_bytes!(preimage.0));
+                                                       log_trace!(self.logger, "Claiming HTLC with preimage {} from our monitor", &preimage);
                                                        self.claim_funds_internal(htlc_update.source, preimage, htlc_update.htlc_value_satoshis.map(|v| v * 1000), true, funding_outpoint);
                                                } else {
-                                                       log_trace!(self.logger, "Failing HTLC with hash {} from our monitor", log_bytes!(htlc_update.payment_hash.0));
+                                                       log_trace!(self.logger, "Failing HTLC with hash {} from our monitor", &htlc_update.payment_hash);
                                                        let receiver = HTLCDestination::NextHopChannel { node_id: counterparty_node_id, channel_id: funding_outpoint.to_channel_id() };
                                                        let reason = HTLCFailReason::from_failure_code(0x4000 | 8);
                                                        self.fail_htlc_backwards_internal(&htlc_update.source, &htlc_update.payment_hash, &reason, receiver);
@@ -7020,7 +7063,7 @@ where
        /// Calls a function which handles an on-chain event (blocks dis/connected, transactions
        /// un/confirmed, etc) on each channel, handling any resulting errors or messages generated by
        /// the function.
-       fn do_chain_event<FN: Fn(&mut Channel<<SP::Target as SignerProvider>::Signer>) -> Result<(Option<msgs::ChannelReady>, Vec<(HTLCSource, PaymentHash)>, Option<msgs::AnnouncementSignatures>), ClosureReason>>
+       fn do_chain_event<FN: Fn(&mut Channel<SP>) -> Result<(Option<msgs::ChannelReady>, Vec<(HTLCSource, PaymentHash)>, Option<msgs::AnnouncementSignatures>), ClosureReason>>
                        (&self, height_opt: Option<u32>, f: FN) {
                // Note that we MUST NOT end up calling methods on self.chain_monitor here - we're called
                // during initialization prior to the chain_monitor being fully configured in some cases.
@@ -7145,6 +7188,7 @@ where
                                if height >= htlc.forward_info.outgoing_cltv_value - HTLC_FAIL_BACK_BUFFER {
                                        let prev_hop_data = HTLCSource::PreviousHopData(HTLCPreviousHopData {
                                                short_channel_id: htlc.prev_short_channel_id,
+                                               user_channel_id: Some(htlc.prev_user_channel_id),
                                                htlc_id: htlc.prev_htlc_id,
                                                incoming_packet_shared_secret: htlc.forward_info.incoming_shared_secret,
                                                phantom_shared_secret: None,
@@ -7510,6 +7554,46 @@ where
        fn handle_error(&self, counterparty_node_id: &PublicKey, msg: &msgs::ErrorMessage) {
                let _persistence_guard = PersistenceNotifierGuard::notify_on_drop(self);
 
+               match &msg.data as &str {
+                       "cannot co-op close channel w/ active htlcs"|
+                       "link failed to shutdown" =>
+                       {
+                               // LND hasn't properly handled shutdown messages ever, and force-closes any time we
+                               // send one while HTLCs are still present. The issue is tracked at
+                               // https://github.com/lightningnetwork/lnd/issues/6039 and has had multiple patches
+                               // to fix it but none so far have managed to land upstream. The issue appears to be
+                               // very low priority for the LND team despite being marked "P1".
+                               // We're not going to bother handling this in a sensible way, instead simply
+                               // repeating the Shutdown message on repeat until morale improves.
+                               if msg.channel_id != [0; 32] {
+                                       let per_peer_state = self.per_peer_state.read().unwrap();
+                                       let peer_state_mutex_opt = per_peer_state.get(counterparty_node_id);
+                                       if peer_state_mutex_opt.is_none() { return; }
+                                       let mut peer_state = peer_state_mutex_opt.unwrap().lock().unwrap();
+                                       if let Some(chan) = peer_state.channel_by_id.get(&msg.channel_id) {
+                                               if let Some(msg) = chan.get_outbound_shutdown() {
+                                                       peer_state.pending_msg_events.push(events::MessageSendEvent::SendShutdown {
+                                                               node_id: *counterparty_node_id,
+                                                               msg,
+                                                       });
+                                               }
+                                               peer_state.pending_msg_events.push(events::MessageSendEvent::HandleError {
+                                                       node_id: *counterparty_node_id,
+                                                       action: msgs::ErrorAction::SendWarningMessage {
+                                                               msg: msgs::WarningMessage {
+                                                                       channel_id: msg.channel_id,
+                                                                       data: "You appear to be exhibiting LND bug 6039, we'll keep sending you shutdown messages until you handle them correctly".to_owned()
+                                                               },
+                                                               log_level: Level::Trace,
+                                                       }
+                                               });
+                                       }
+                               }
+                               return;
+                       }
+                       _ => {}
+               }
+
                if msg.channel_id == [0; 32] {
                        let channel_ids: Vec<[u8; 32]> = {
                                let per_peer_state = self.per_peer_state.read().unwrap();
@@ -7733,7 +7817,7 @@ impl Writeable for ChannelDetails {
 
 impl Readable for ChannelDetails {
        fn read<R: Read>(reader: &mut R) -> Result<Self, DecodeError> {
-               _init_and_read_tlv_fields!(reader, {
+               _init_and_read_len_prefixed_tlv_fields!(reader, {
                        (1, inbound_scid_alias, option),
                        (2, channel_id, required),
                        (3, channel_type, option),
@@ -7920,7 +8004,8 @@ impl_writeable_tlv_based!(HTLCPreviousHopData, {
        (1, phantom_shared_secret, option),
        (2, outpoint, required),
        (4, htlc_id, required),
-       (6, incoming_packet_shared_secret, required)
+       (6, incoming_packet_shared_secret, required),
+       (7, user_channel_id, option),
 });
 
 impl Writeable for ClaimableHTLC {
@@ -7946,7 +8031,7 @@ impl Writeable for ClaimableHTLC {
 
 impl Readable for ClaimableHTLC {
        fn read<R: Read>(reader: &mut R) -> Result<Self, DecodeError> {
-               _init_and_read_tlv_fields!(reader, {
+               _init_and_read_len_prefixed_tlv_fields!(reader, {
                        (0, prev_hop, required),
                        (1, total_msat, option),
                        (2, value_ser, required),
@@ -8524,13 +8609,13 @@ where
 
                let channel_count: u64 = Readable::read(reader)?;
                let mut funding_txo_set = HashSet::with_capacity(cmp::min(channel_count as usize, 128));
-               let mut peer_channels: HashMap<PublicKey, HashMap<[u8; 32], Channel<<SP::Target as SignerProvider>::Signer>>> = HashMap::with_capacity(cmp::min(channel_count as usize, 128));
+               let mut peer_channels: HashMap<PublicKey, HashMap<[u8; 32], Channel<SP>>> = HashMap::with_capacity(cmp::min(channel_count as usize, 128));
                let mut id_to_peer = HashMap::with_capacity(cmp::min(channel_count as usize, 128));
                let mut short_to_chan_info = HashMap::with_capacity(cmp::min(channel_count as usize, 128));
                let mut channel_closures = VecDeque::new();
                let mut close_background_events = Vec::new();
                for _ in 0..channel_count {
-                       let mut channel: Channel<<SP::Target as SignerProvider>::Signer> = Channel::read(reader, (
+                       let mut channel: Channel<SP> = Channel::read(reader, (
                                &args.entropy_source, &args.signer_provider, best_block_height, &provided_channel_type_features(&args.default_config)
                        ))?;
                        let funding_txo = channel.context.get_funding_txo().ok_or(DecodeError::InvalidValue)?;
@@ -8574,7 +8659,7 @@ where
                                                        // backwards leg of the HTLC will simply be rejected.
                                                        log_info!(args.logger,
                                                                "Failing HTLC with hash {} as it is missing in the ChannelMonitor for channel {} but was present in the (stale) ChannelManager",
-                                                               log_bytes!(channel.context.channel_id()), log_bytes!(payment_hash.0));
+                                                               log_bytes!(channel.context.channel_id()), &payment_hash);
                                                        failed_htlcs.push((channel_htlc_source.clone(), *payment_hash, channel.context.get_counterparty_node_id(), channel.context.channel_id()));
                                                }
                                        }
@@ -8675,7 +8760,7 @@ where
                };
 
                let peer_count: u64 = Readable::read(reader)?;
-               let mut per_peer_state = HashMap::with_capacity(cmp::min(peer_count as usize, MAX_ALLOC_SIZE/mem::size_of::<(PublicKey, Mutex<PeerState<<SP::Target as SignerProvider>::Signer>>)>()));
+               let mut per_peer_state = HashMap::with_capacity(cmp::min(peer_count as usize, MAX_ALLOC_SIZE/mem::size_of::<(PublicKey, Mutex<PeerState<SP>>)>()));
                for _ in 0..peer_count {
                        let peer_pubkey = Readable::read(reader)?;
                        let peer_chans = peer_channels.remove(&peer_pubkey).unwrap_or(HashMap::new());
@@ -8927,7 +9012,7 @@ where
                                                                hash_map::Entry::Occupied(mut entry) => {
                                                                        let newly_added = entry.get_mut().insert(session_priv_bytes, &path);
                                                                        log_info!(args.logger, "{} a pending payment path for {} msat for session priv {} on an existing pending payment with payment hash {}",
-                                                                               if newly_added { "Added" } else { "Had" }, path_amt, log_bytes!(session_priv_bytes), log_bytes!(htlc.payment_hash.0));
+                                                                               if newly_added { "Added" } else { "Had" }, path_amt, log_bytes!(session_priv_bytes), &htlc.payment_hash);
                                                                },
                                                                hash_map::Entry::Vacant(entry) => {
                                                                        let path_fee = path.fee_msat();
@@ -8947,7 +9032,7 @@ where
                                                                                starting_block_height: best_block_height,
                                                                        });
                                                                        log_info!(args.logger, "Added a pending payment for {} msat with payment hash {} for path with session priv {}",
-                                                                               path_amt, log_bytes!(htlc.payment_hash.0),  log_bytes!(session_priv_bytes));
+                                                                               path_amt, &htlc.payment_hash,  log_bytes!(session_priv_bytes));
                                                                }
                                                        }
                                                }
@@ -8969,7 +9054,7 @@ where
                                                                                if let HTLCForwardInfo::AddHTLC(htlc_info) = forward {
                                                                                        if pending_forward_matches_htlc(&htlc_info) {
                                                                                                log_info!(args.logger, "Removing pending to-forward HTLC with hash {} as it was forwarded to the closed channel {}",
-                                                                                                       log_bytes!(htlc.payment_hash.0), log_bytes!(monitor.get_funding_txo().0.to_channel_id()));
+                                                                                                       &htlc.payment_hash, log_bytes!(monitor.get_funding_txo().0.to_channel_id()));
                                                                                                false
                                                                                        } else { true }
                                                                                } else { true }
@@ -8979,7 +9064,7 @@ where
                                                                pending_intercepted_htlcs.as_mut().unwrap().retain(|intercepted_id, htlc_info| {
                                                                        if pending_forward_matches_htlc(&htlc_info) {
                                                                                log_info!(args.logger, "Removing pending intercepted HTLC with hash {} as it was forwarded to the closed channel {}",
-                                                                                       log_bytes!(htlc.payment_hash.0), log_bytes!(monitor.get_funding_txo().0.to_channel_id()));
+                                                                                       &htlc.payment_hash, log_bytes!(monitor.get_funding_txo().0.to_channel_id()));
                                                                                pending_events_read.retain(|(event, _)| {
                                                                                        if let Event::HTLCIntercepted { intercept_id: ev_id, .. } = event {
                                                                                                intercepted_id != ev_id
@@ -9098,7 +9183,7 @@ where
                                                                        None => match inbound_payment::verify(payment_hash, &hop_data, 0, &expanded_inbound_key, &args.logger) {
                                                                                Ok((payment_preimage, _)) => payment_preimage,
                                                                                Err(()) => {
-                                                                                       log_error!(args.logger, "Failed to read claimable payment data for HTLC with payment hash {} - was not a pending inbound payment and didn't match our payment key", log_bytes!(payment_hash.0));
+                                                                                       log_error!(args.logger, "Failed to read claimable payment data for HTLC with payment hash {} - was not a pending inbound payment and didn't match our payment key", &payment_hash);
                                                                                        return Err(DecodeError::InvalidValue);
                                                                                }
                                                                        }
@@ -9165,7 +9250,7 @@ where
                for (_, monitor) in args.channel_monitors.iter() {
                        for (payment_hash, payment_preimage) in monitor.get_stored_preimages() {
                                if let Some(payment) = claimable_payments.remove(&payment_hash) {
-                                       log_info!(args.logger, "Re-claiming HTLCs with payment hash {} as we've released the preimage to a ChannelMonitor!", log_bytes!(payment_hash.0));
+                                       log_info!(args.logger, "Re-claiming HTLCs with payment hash {} as we've released the preimage to a ChannelMonitor!", &payment_hash);
                                        let mut claimable_amt_msat = 0;
                                        let mut receiver_node_id = Some(our_network_pubkey);
                                        let phantom_shared_secret = payment.htlcs[0].prev_hop.phantom_shared_secret;
@@ -9174,7 +9259,7 @@ where
                                                        .expect("Failed to get node_id for phantom node recipient");
                                                receiver_node_id = Some(phantom_pubkey)
                                        }
-                                       for claimable_htlc in payment.htlcs {
+                                       for claimable_htlc in &payment.htlcs {
                                                claimable_amt_msat += claimable_htlc.value;
 
                                                // Add a holding-cell claim of the payment to the Channel, which should be
@@ -9210,6 +9295,8 @@ where
                                                payment_hash,
                                                purpose: payment.purpose,
                                                amount_msat: claimable_amt_msat,
+                                               htlcs: payment.htlcs.iter().map(events::ClaimedHTLC::from).collect(),
+                                               sender_intended_total_msat: payment.htlcs.first().map(|htlc| htlc.total_msat),
                                        }, None));
                                }
                        }
@@ -10463,6 +10550,16 @@ mod tests {
                let events = nodes[0].node.get_and_clear_pending_msg_events();
                assert_eq!(events.len(), 0);
        }
+
+       #[test]
+       fn test_payment_display() {
+               let payment_id = PaymentId([42; 32]);
+               assert_eq!(format!("{}", &payment_id), "2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a");
+               let payment_hash = PaymentHash([42; 32]);
+               assert_eq!(format!("{}", &payment_hash), "2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a");
+               let payment_preimage = PaymentPreimage([42; 32]);
+               assert_eq!(format!("{}", &payment_preimage), "2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a");
+       }
 }
 
 #[cfg(ldk_bench)]
@@ -10495,13 +10592,13 @@ pub mod bench {
                &'a test_utils::TestFeeEstimator, &'a test_utils::TestRouter<'a>,
                &'a test_utils::TestLogger>;
 
-       struct ANodeHolder<'a, P: Persist<InMemorySigner>> {
-               node: &'a Manager<'a, P>,
+       struct ANodeHolder<'node_cfg, 'chan_mon_cfg: 'node_cfg, P: Persist<InMemorySigner>> {
+               node: &'node_cfg Manager<'chan_mon_cfg, P>,
        }
-       impl<'a, P: Persist<InMemorySigner>> NodeHolder for ANodeHolder<'a, P> {
-               type CM = Manager<'a, P>;
+       impl<'node_cfg, 'chan_mon_cfg: 'node_cfg, P: Persist<InMemorySigner>> NodeHolder for ANodeHolder<'node_cfg, 'chan_mon_cfg, P> {
+               type CM = Manager<'chan_mon_cfg, P>;
                #[inline]
-               fn node(&self) -> &Manager<'a, P> { self.node }
+               fn node(&self) -> &Manager<'chan_mon_cfg, P> { self.node }
                #[inline]
                fn chain_monitor(&self) -> Option<&test_utils::TestChainMonitor> { None }
        }