Rename onion util internal var
[rust-lightning] / lightning / src / ln / channelmanager.rs
index 5e9b53606517f5d1d3564a068e580e50b80fb152..90451d59d66d635821ffbe3ce32790d471290c6c 100644 (file)
@@ -177,7 +177,7 @@ pub(super) enum HTLCForwardInfo {
 }
 
 /// Tracks the inbound corresponding to an outbound HTLC
-#[derive(Clone, Hash, PartialEq, Eq)]
+#[derive(Clone, Debug, Hash, PartialEq, Eq)]
 pub(crate) struct HTLCPreviousHopData {
        // Note that this may be an outbound SCID alias for the associated channel.
        short_channel_id: u64,
@@ -233,7 +233,8 @@ impl From<&ClaimableHTLC> for events::ClaimedHTLC {
        }
 }
 
-/// A payment identifier used to uniquely identify a payment to LDK.
+/// A user-provided identifier in [`ChannelManager::send_payment`] used to uniquely identify
+/// a payment and ensure idempotency in LDK.
 ///
 /// This is not exported to bindings users as we just use [u8; 32] directly
 #[derive(Hash, Copy, Clone, PartialEq, Eq, Debug)]
@@ -282,7 +283,7 @@ impl Readable for InterceptId {
        }
 }
 
-#[derive(Clone, Copy, PartialEq, Eq, Hash)]
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
 /// Uniquely describes an HTLC by its source. Just the guaranteed-unique subset of [`HTLCSource`].
 pub(crate) enum SentHTLCId {
        PreviousHopData { short_channel_id: u64, htlc_id: u64 },
@@ -313,7 +314,7 @@ impl_writeable_tlv_based_enum!(SentHTLCId,
 
 /// Tracks the inbound corresponding to an outbound HTLC
 #[allow(clippy::derive_hash_xor_eq)] // Our Hash is faithful to the data, we just don't have SecretKey::hash
-#[derive(Clone, PartialEq, Eq)]
+#[derive(Clone, Debug, PartialEq, Eq)]
 pub(crate) enum HTLCSource {
        PreviousHopData(HTLCPreviousHopData),
        OutboundRoute {
@@ -659,7 +660,6 @@ pub(crate) enum RAAMonitorUpdateBlockingAction {
 }
 
 impl RAAMonitorUpdateBlockingAction {
-       #[allow(unused)]
        fn from_prev_hop_data(prev_hop: &HTLCPreviousHopData) -> Self {
                Self::ForwardedPaymentInboundClaim {
                        channel_id: prev_hop.outpoint.to_channel_id(),
@@ -1707,11 +1707,15 @@ pub enum ChannelShutdownState {
 pub enum RecentPaymentDetails {
        /// When an invoice was requested and thus a payment has not yet been sent.
        AwaitingInvoice {
-               /// Identifier for the payment to ensure idempotency.
+               /// A user-provided identifier in [`ChannelManager::send_payment`] used to uniquely identify
+               /// a payment and ensure idempotency in LDK.
                payment_id: PaymentId,
        },
        /// When a payment is still being sent and awaiting successful delivery.
        Pending {
+               /// A user-provided identifier in [`ChannelManager::send_payment`] used to uniquely identify
+               /// a payment and ensure idempotency in LDK.
+               payment_id: PaymentId,
                /// Hash of the payment that is currently being sent but has yet to be fulfilled or
                /// abandoned.
                payment_hash: PaymentHash,
@@ -1723,6 +1727,9 @@ pub enum RecentPaymentDetails {
        /// been resolved. Upon receiving [`Event::PaymentSent`], we delay for a few minutes before the
        /// payment is removed from tracking.
        Fulfilled {
+               /// A user-provided identifier in [`ChannelManager::send_payment`] used to uniquely identify
+               /// a payment and ensure idempotency in LDK.
+               payment_id: PaymentId,
                /// Hash of the payment that was claimed. `None` for serializations of [`ChannelManager`]
                /// made before LDK version 0.0.104.
                payment_hash: Option<PaymentHash>,
@@ -1731,6 +1738,9 @@ pub enum RecentPaymentDetails {
        /// abandoned via [`ChannelManager::abandon_payment`], it is marked as abandoned until all
        /// pending HTLCs for this payment resolve and an [`Event::PaymentFailed`] is generated.
        Abandoned {
+               /// A user-provided identifier in [`ChannelManager::send_payment`] used to uniquely identify
+               /// a payment and ensure idempotency in LDK.
+               payment_id: PaymentId,
                /// Hash of the payment that we have given up trying to send.
                payment_hash: PaymentHash,
        },
@@ -2475,15 +2485,16 @@ where
                                },
                                PendingOutboundPayment::Retryable { payment_hash, total_msat, .. } => {
                                        Some(RecentPaymentDetails::Pending {
+                                               payment_id: *payment_id,
                                                payment_hash: *payment_hash,
                                                total_msat: *total_msat,
                                        })
                                },
                                PendingOutboundPayment::Abandoned { payment_hash, .. } => {
-                                       Some(RecentPaymentDetails::Abandoned { payment_hash: *payment_hash })
+                                       Some(RecentPaymentDetails::Abandoned { payment_id: *payment_id, payment_hash: *payment_hash })
                                },
                                PendingOutboundPayment::Fulfilled { payment_hash, .. } => {
-                                       Some(RecentPaymentDetails::Fulfilled { payment_hash: *payment_hash })
+                                       Some(RecentPaymentDetails::Fulfilled { payment_id: *payment_id, payment_hash: *payment_hash })
                                },
                                PendingOutboundPayment::Legacy { .. } => None
                        })
@@ -2784,7 +2795,7 @@ where
                let (short_channel_id, amt_to_forward, outgoing_cltv_value) = match hop_data {
                        msgs::InboundOnionPayload::Forward { short_channel_id, amt_to_forward, outgoing_cltv_value } =>
                                (short_channel_id, amt_to_forward, outgoing_cltv_value),
-                       msgs::InboundOnionPayload::Receive { .. } =>
+                       msgs::InboundOnionPayload::Receive { .. } | msgs::InboundOnionPayload::BlindedReceive { .. } =>
                                return Err(InboundOnionErr {
                                        msg: "Final Node OnionHopData provided for us as an intermediary node",
                                        err_code: 0x4000 | 22,
@@ -2816,12 +2827,19 @@ where
                                payment_data, keysend_preimage, custom_tlvs, amt_msat, outgoing_cltv_value, payment_metadata, ..
                        } =>
                                (payment_data, keysend_preimage, custom_tlvs, amt_msat, outgoing_cltv_value, payment_metadata),
-                       _ =>
+                       msgs::InboundOnionPayload::BlindedReceive {
+                               amt_msat, total_msat, outgoing_cltv_value, payment_secret, ..
+                       } => {
+                               let payment_data = msgs::FinalOnionHopData { payment_secret, total_msat };
+                               (Some(payment_data), None, Vec::new(), amt_msat, outgoing_cltv_value, None)
+                       }
+                       msgs::InboundOnionPayload::Forward { .. } => {
                                return Err(InboundOnionErr {
                                        err_code: 0x4000|22,
                                        err_data: Vec::new(),
                                        msg: "Got non final data with an HMAC of 0",
-                               }),
+                               })
+                       },
                };
                // final_incorrect_cltv_expiry
                if outgoing_cltv_value > cltv_expiry {
@@ -2961,7 +2979,10 @@ where
                        }
                }
 
-               let next_hop = match onion_utils::decode_next_payment_hop(shared_secret, &msg.onion_routing_packet.hop_data[..], msg.onion_routing_packet.hmac, msg.payment_hash) {
+               let next_hop = match onion_utils::decode_next_payment_hop(
+                       shared_secret, &msg.onion_routing_packet.hop_data[..], msg.onion_routing_packet.hmac,
+                       msg.payment_hash, &self.node_signer
+               ) {
                        Ok(res) => res,
                        Err(onion_utils::OnionDecodeErr::Malformed { err_msg, err_code }) => {
                                return_malformed_err!(err_msg, err_code);
@@ -2983,7 +3004,9 @@ where
                        // We'll do receive checks in [`Self::construct_pending_htlc_info`] so we have access to the
                        // inbound channel's state.
                        onion_utils::Hop::Receive { .. } => return Ok((next_hop, shared_secret, None)),
-                       onion_utils::Hop::Forward { next_hop_data: msgs::InboundOnionPayload::Receive { .. }, .. } => {
+                       onion_utils::Hop::Forward { next_hop_data: msgs::InboundOnionPayload::Receive { .. }, .. } |
+                               onion_utils::Hop::Forward { next_hop_data: msgs::InboundOnionPayload::BlindedReceive { .. }, .. } =>
+                       {
                                return_err!("Final Node OnionHopData provided for us as an intermediary node", 0x4000 | 22, &[0; 0]);
                        }
                };
@@ -3970,7 +3993,10 @@ where
                                                                                        let phantom_pubkey_res = self.node_signer.get_node_id(Recipient::PhantomNode);
                                                                                        if phantom_pubkey_res.is_ok() && fake_scid::is_valid_phantom(&self.fake_scid_rand_bytes, short_chan_id, &self.genesis_hash) {
                                                                                                let phantom_shared_secret = self.node_signer.ecdh(Recipient::PhantomNode, &onion_packet.public_key.unwrap(), None).unwrap().secret_bytes();
-                                                                                               let next_hop = match onion_utils::decode_next_payment_hop(phantom_shared_secret, &onion_packet.hop_data, onion_packet.hmac, payment_hash) {
+                                                                                               let next_hop = match onion_utils::decode_next_payment_hop(
+                                                                                                       phantom_shared_secret, &onion_packet.hop_data, onion_packet.hmac,
+                                                                                                       payment_hash, &self.node_signer
+                                                                                               ) {
                                                                                                        Ok(res) => res,
                                                                                                        Err(onion_utils::OnionDecodeErr::Malformed { err_msg, err_code }) => {
                                                                                                                let sha256_of_onion = Sha256::hash(&onion_packet.hop_data).into_inner();
@@ -5209,11 +5235,17 @@ where
                self.pending_outbound_payments.finalize_claims(sources, &self.pending_events);
        }
 
-       fn claim_funds_internal(&self, source: HTLCSource, payment_preimage: PaymentPreimage, forwarded_htlc_value_msat: Option<u64>, from_onchain: bool, next_channel_outpoint: OutPoint) {
+       fn claim_funds_internal(&self, source: HTLCSource, payment_preimage: PaymentPreimage,
+               forwarded_htlc_value_msat: Option<u64>, from_onchain: bool,
+               next_channel_counterparty_node_id: Option<PublicKey>, next_channel_outpoint: OutPoint
+       ) {
                match source {
                        HTLCSource::OutboundRoute { session_priv, payment_id, path, .. } => {
                                debug_assert!(self.background_events_processed_since_startup.load(Ordering::Acquire),
                                        "We don't support claim_htlc claims during startup - monitors may not be available yet");
+                               if let Some(pubkey) = next_channel_counterparty_node_id {
+                                       debug_assert_eq!(pubkey, path.hops[0].pubkey);
+                               }
                                let ev_completion_action = EventCompletionAction::ReleaseRAAChannelMonitorUpdate {
                                        channel_funding_outpoint: next_channel_outpoint,
                                        counterparty_node_id: path.hops[0].pubkey,
@@ -5224,6 +5256,7 @@ where
                        },
                        HTLCSource::PreviousHopData(hop_data) => {
                                let prev_outpoint = hop_data.outpoint;
+                               let completed_blocker = RAAMonitorUpdateBlockingAction::from_prev_hop_data(&hop_data);
                                let res = self.claim_funds_from_hop(hop_data, payment_preimage,
                                        |htlc_claim_value_msat| {
                                                if let Some(forwarded_htlc_value) = forwarded_htlc_value_msat {
@@ -5239,7 +5272,17 @@ where
                                                                        next_channel_id: Some(next_channel_outpoint.to_channel_id()),
                                                                        outbound_amount_forwarded_msat: forwarded_htlc_value_msat,
                                                                },
-                                                               downstream_counterparty_and_funding_outpoint: None,
+                                                               downstream_counterparty_and_funding_outpoint:
+                                                                       if let Some(node_id) = next_channel_counterparty_node_id {
+                                                                               Some((node_id, next_channel_outpoint, completed_blocker))
+                                                                       } else {
+                                                                               // We can only get `None` here if we are processing a
+                                                                               // `ChannelMonitor`-originated event, in which case we
+                                                                               // don't care about ensuring we wake the downstream
+                                                                               // channel's monitor updating - the channel is already
+                                                                               // closed.
+                                                                               None
+                                                                       },
                                                        })
                                                } else { None }
                                        });
@@ -6087,6 +6130,17 @@ where
                                hash_map::Entry::Occupied(mut chan_phase_entry) => {
                                        if let ChannelPhase::Funded(chan) = chan_phase_entry.get_mut() {
                                                let res = try_chan_phase_entry!(self, chan.update_fulfill_htlc(&msg), chan_phase_entry);
+                                               if let HTLCSource::PreviousHopData(prev_hop) = &res.0 {
+                                                       peer_state.actions_blocking_raa_monitor_updates.entry(msg.channel_id)
+                                                               .or_insert_with(Vec::new)
+                                                               .push(RAAMonitorUpdateBlockingAction::from_prev_hop_data(&prev_hop));
+                                               }
+                                               // Note that we do not need to push an `actions_blocking_raa_monitor_updates`
+                                               // entry here, even though we *do* need to block the next RAA monitor update.
+                                               // We do this instead in the `claim_funds_internal` by attaching a
+                                               // `ReleaseRAAChannelMonitorUpdate` action to the event generated when the
+                                               // outbound HTLC is claimed. This is guaranteed to all complete before we
+                                               // process the RAA as messages are processed from single peers serially.
                                                funding_txo = chan.context.get_funding_txo().expect("We won't accept a fulfill until funded");
                                                res
                                        } else {
@@ -6097,7 +6151,7 @@ where
                                hash_map::Entry::Vacant(_) => return Err(MsgHandleErrInternal::send_err_msg_no_close(format!("Got a message for a channel from the wrong node! No such channel for the passed counterparty_node_id {}", counterparty_node_id), msg.channel_id))
                        }
                };
-               self.claim_funds_internal(htlc_source, msg.payment_preimage.clone(), Some(forwarded_htlc_value), false, funding_txo);
+               self.claim_funds_internal(htlc_source, msg.payment_preimage.clone(), Some(forwarded_htlc_value), false, Some(*counterparty_node_id), funding_txo);
                Ok(())
        }
 
@@ -6303,6 +6357,23 @@ where
                })
        }
 
+       #[cfg(any(test, feature = "_test_utils"))]
+       pub(crate) fn test_raa_monitor_updates_held(&self,
+               counterparty_node_id: PublicKey, channel_id: ChannelId
+       ) -> bool {
+               let per_peer_state = self.per_peer_state.read().unwrap();
+               if let Some(peer_state_mtx) = per_peer_state.get(&counterparty_node_id) {
+                       let mut peer_state_lck = peer_state_mtx.lock().unwrap();
+                       let peer_state = &mut *peer_state_lck;
+
+                       if let Some(chan) = peer_state.channel_by_id.get(&channel_id) {
+                               return self.raa_monitor_updates_held(&peer_state.actions_blocking_raa_monitor_updates,
+                                       chan.context().get_funding_txo().unwrap(), counterparty_node_id);
+                       }
+               }
+               false
+       }
+
        fn internal_revoke_and_ack(&self, counterparty_node_id: &PublicKey, msg: &msgs::RevokeAndACK) -> Result<(), MsgHandleErrInternal> {
                let (htlcs_to_fail, res) = {
                        let per_peer_state = self.per_peer_state.read().unwrap();
@@ -6446,7 +6517,7 @@ where
                Ok(NotifyOption::DoPersist)
        }
 
-       fn internal_channel_reestablish(&self, counterparty_node_id: &PublicKey, msg: &msgs::ChannelReestablish) -> Result<(), MsgHandleErrInternal> {
+       fn internal_channel_reestablish(&self, counterparty_node_id: &PublicKey, msg: &msgs::ChannelReestablish) -> Result<NotifyOption, MsgHandleErrInternal> {
                let htlc_forwards;
                let need_lnd_workaround = {
                        let per_peer_state = self.per_peer_state.read().unwrap();
@@ -6502,14 +6573,16 @@ where
                        }
                };
 
+               let mut persist = NotifyOption::SkipPersistHandleEvents;
                if let Some(forwards) = htlc_forwards {
                        self.forward_htlcs(&mut [forwards][..]);
+                       persist = NotifyOption::DoPersist;
                }
 
                if let Some(channel_ready_msg) = need_lnd_workaround {
                        self.internal_channel_ready(counterparty_node_id, &channel_ready_msg)?;
                }
-               Ok(())
+               Ok(persist)
        }
 
        /// Process pending events from the [`chain::Watch`], returning whether any events were processed.
@@ -6524,8 +6597,8 @@ where
                                match monitor_event {
                                        MonitorEvent::HTLCEvent(htlc_update) => {
                                                if let Some(preimage) = htlc_update.payment_preimage {
-                                                       log_trace!(self.logger, "Claiming HTLC with preimage {} from our monitor", &preimage);
-                                                       self.claim_funds_internal(htlc_update.source, preimage, htlc_update.htlc_value_satoshis.map(|v| v * 1000), true, funding_outpoint);
+                                                       log_trace!(self.logger, "Claiming HTLC with preimage {} from our monitor", preimage);
+                                                       self.claim_funds_internal(htlc_update.source, preimage, htlc_update.htlc_value_satoshis.map(|v| v * 1000), true, counterparty_node_id, funding_outpoint);
                                                } else {
                                                        log_trace!(self.logger, "Failing HTLC with hash {} from our monitor", &htlc_update.payment_hash);
                                                        let receiver = HTLCDestination::NextHopChannel { node_id: counterparty_node_id, channel_id: funding_outpoint.to_channel_id() };
@@ -7674,12 +7747,22 @@ where
        }
 
        fn handle_channel_reestablish(&self, counterparty_node_id: &PublicKey, msg: &msgs::ChannelReestablish) {
-               let _persistence_guard = PersistenceNotifierGuard::notify_on_drop(self);
-               let _ = handle_error!(self, self.internal_channel_reestablish(counterparty_node_id, msg), *counterparty_node_id);
+               let _persistence_guard = PersistenceNotifierGuard::optionally_notify(self, || {
+                       let res = self.internal_channel_reestablish(counterparty_node_id, msg);
+                       let persist = match &res {
+                               Err(e) if e.closes_channel() => NotifyOption::DoPersist,
+                               Err(_) => NotifyOption::SkipPersistHandleEvents,
+                               Ok(persist) => *persist,
+                       };
+                       let _ = handle_error!(self, res, *counterparty_node_id);
+                       persist
+               });
        }
 
        fn peer_disconnected(&self, counterparty_node_id: &PublicKey) {
-               let _persistence_guard = PersistenceNotifierGuard::notify_on_drop(self);
+               let _persistence_guard = PersistenceNotifierGuard::optionally_notify(
+                       self, || NotifyOption::SkipPersistHandleEvents);
+
                let mut failed_channels = Vec::new();
                let mut per_peer_state = self.per_peer_state.write().unwrap();
                let remove_peer = {
@@ -9435,6 +9518,7 @@ where
                                                                        // downstream chan is closed (because we don't have a
                                                                        // channel_id -> peer map entry).
                                                                        counterparty_opt.is_none(),
+                                                                       counterparty_opt.cloned().or(monitor.get_counterparty_node_id()),
                                                                        monitor.get_funding_txo().0))
                                                        } else { None }
                                                } else {
@@ -9715,12 +9799,12 @@ where
                        channel_manager.fail_htlc_backwards_internal(&source, &payment_hash, &reason, receiver);
                }
 
-               for (source, preimage, downstream_value, downstream_closed, downstream_funding) in pending_claims_to_replay {
+               for (source, preimage, downstream_value, downstream_closed, downstream_node_id, downstream_funding) in pending_claims_to_replay {
                        // We use `downstream_closed` in place of `from_onchain` here just as a guess - we
                        // don't remember in the `ChannelMonitor` where we got a preimage from, but if the
                        // channel is closed we just assume that it probably came from an on-chain claim.
                        channel_manager.claim_funds_internal(source, preimage, Some(downstream_value),
-                               downstream_closed, downstream_funding);
+                               downstream_closed, downstream_node_id, downstream_funding);
                }
 
                //TODO: Broadcast channel update for closed channels, but only after we've made a
@@ -9975,7 +10059,7 @@ mod tests {
 
                // To start (1), send a regular payment but don't claim it.
                let expected_route = [&nodes[1]];
-               let (payment_preimage, payment_hash, _) = route_payment(&nodes[0], &expected_route, 100_000);
+               let (payment_preimage, payment_hash, ..) = route_payment(&nodes[0], &expected_route, 100_000);
 
                // Next, attempt a keysend payment and make sure it fails.
                let route_params = RouteParameters::from_payment_params_and_value(