Merge pull request #1115 from TheBlueMatt/2021-10-expose-addr-vec
[rust-lightning] / lightning / src / ln / channelmanager.rs
index 66c785654ab3eb886658447a758eb30bc577d94b..298f88ce0cc018162cf5f2a3201512641b1cd8fd 100644 (file)
@@ -402,7 +402,7 @@ struct PendingInboundPayment {
 
 /// Stores the session_priv for each part of a payment that is still pending. For versions 0.0.102
 /// and later, also stores information for retrying the payment.
-enum PendingOutboundPayment {
+pub(crate) enum PendingOutboundPayment {
        Legacy {
                session_privs: HashSet<[u8; 32]>,
        },
@@ -413,6 +413,8 @@ enum PendingOutboundPayment {
                pending_amt_msat: u64,
                /// The total payment amount across all paths, used to verify that a retry is not overpaying.
                total_msat: u64,
+               /// Our best known block height at the time this payment was initiated.
+               starting_block_height: u32,
        },
 }
 
@@ -1382,6 +1384,18 @@ impl<Signer: Sign, M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelMana
                self.list_channels_with_filter(|&(_, ref channel)| channel.is_live())
        }
 
+       /// Helper function that issues the channel close events
+       fn issue_channel_close_events(&self, channel: &Channel<Signer>, closure_reason: ClosureReason) {
+               let mut pending_events_lock = self.pending_events.lock().unwrap();
+               match channel.unbroadcasted_funding() {
+                       Some(transaction) => {
+                               pending_events_lock.push(events::Event::DiscardFunding { channel_id: channel.channel_id(), transaction })
+                       },
+                       None => {},
+               }
+               pending_events_lock.push(events::Event::ChannelClosed { channel_id: channel.channel_id(), reason: closure_reason });
+       }
+
        fn close_channel_internal(&self, channel_id: &[u8; 32], target_feerate_sats_per_1000_weight: Option<u32>) -> Result<(), APIError> {
                let _persistence_guard = PersistenceNotifierGuard::notify_on_drop(&self.total_consistency_lock, &self.persistence_notifier);
 
@@ -1428,12 +1442,7 @@ impl<Signer: Sign, M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelMana
                                                                msg: channel_update
                                                        });
                                                }
-                                               if let Ok(mut pending_events_lock) = self.pending_events.lock() {
-                                                       pending_events_lock.push(events::Event::ChannelClosed {
-                                                               channel_id: *channel_id,
-                                                               reason: ClosureReason::HolderForceClosed
-                                                       });
-                                               }
+                                               self.issue_channel_close_events(&channel, ClosureReason::HolderForceClosed);
                                        }
                                        break Ok(());
                                },
@@ -1524,13 +1533,12 @@ impl<Signer: Sign, M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelMana
                                if let Some(short_id) = chan.get().get_short_channel_id() {
                                        channel_state.short_to_id.remove(&short_id);
                                }
-                               let mut pending_events_lock = self.pending_events.lock().unwrap();
                                if peer_node_id.is_some() {
                                        if let Some(peer_msg) = peer_msg {
-                                               pending_events_lock.push(events::Event::ChannelClosed { channel_id: *channel_id, reason: ClosureReason::CounterpartyForceClosed { peer_msg: peer_msg.to_string() } });
+                                               self.issue_channel_close_events(chan.get(),ClosureReason::CounterpartyForceClosed { peer_msg: peer_msg.to_string() });
                                        }
                                } else {
-                                       pending_events_lock.push(events::Event::ChannelClosed { channel_id: *channel_id, reason: ClosureReason::HolderForceClosed });
+                                       self.issue_channel_close_events(chan.get(),ClosureReason::HolderForceClosed);
                                }
                                chan.remove_entry().1
                        } else {
@@ -1949,15 +1957,6 @@ impl<Signer: Sign, M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelMana
                let onion_packet = onion_utils::construct_onion_packet(onion_payloads, onion_keys, prng_seed, payment_hash);
 
                let _persistence_guard = PersistenceNotifierGuard::notify_on_drop(&self.total_consistency_lock, &self.persistence_notifier);
-               let mut pending_outbounds = self.pending_outbound_payments.lock().unwrap();
-               let payment = pending_outbounds.entry(payment_id).or_insert_with(|| PendingOutboundPayment::Retryable {
-                       session_privs: HashSet::new(),
-                       pending_amt_msat: 0,
-                       payment_hash: *payment_hash,
-                       payment_secret: *payment_secret,
-                       total_msat: total_value,
-               });
-               assert!(payment.insert(session_priv_bytes, path.last().unwrap().fee_msat));
 
                let err: Result<(), _> = loop {
                        let mut channel_lock = self.channel_state.lock().unwrap();
@@ -1975,12 +1974,27 @@ impl<Signer: Sign, M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelMana
                                        if !chan.get().is_live() {
                                                return Err(APIError::ChannelUnavailable{err: "Peer for first hop currently disconnected/pending monitor update!".to_owned()});
                                        }
-                                       break_chan_entry!(self, chan.get_mut().send_htlc_and_commit(htlc_msat, payment_hash.clone(), htlc_cltv, HTLCSource::OutboundRoute {
-                                               path: path.clone(),
-                                               session_priv: session_priv.clone(),
-                                               first_hop_htlc_msat: htlc_msat,
-                                               payment_id,
-                                       }, onion_packet, &self.logger), channel_state, chan)
+                                       let send_res = break_chan_entry!(self, chan.get_mut().send_htlc_and_commit(
+                                               htlc_msat, payment_hash.clone(), htlc_cltv, HTLCSource::OutboundRoute {
+                                                       path: path.clone(),
+                                                       session_priv: session_priv.clone(),
+                                                       first_hop_htlc_msat: htlc_msat,
+                                                       payment_id,
+                                               }, onion_packet, &self.logger),
+                                       channel_state, chan);
+
+                                       let mut pending_outbounds = self.pending_outbound_payments.lock().unwrap();
+                                       let payment = pending_outbounds.entry(payment_id).or_insert_with(|| PendingOutboundPayment::Retryable {
+                                               session_privs: HashSet::new(),
+                                               pending_amt_msat: 0,
+                                               payment_hash: *payment_hash,
+                                               payment_secret: *payment_secret,
+                                               starting_block_height: self.best_block.read().unwrap().height(),
+                                               total_msat: total_value,
+                                       });
+                                       assert!(payment.insert(session_priv_bytes, path.last().unwrap().fee_msat));
+
+                                       send_res
                                } {
                                        Some((update_add, commitment_signed, monitor_update)) => {
                                                if let Err(e) = self.chain_monitor.update_channel(chan.get().get_funding_txo().unwrap(), monitor_update) {
@@ -2170,7 +2184,7 @@ impl<Signer: Sign, M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelMana
                                }
                        } else {
                                return Err(PaymentSendFailure::ParameterError(APIError::APIMisuseError {
-                                       err: "Payment with ID {} not found".to_string()
+                                       err: format!("Payment with ID {} not found", log_bytes!(payment_id.0)),
                                }))
                        }
                };
@@ -3002,15 +3016,13 @@ impl<Signer: Sign, M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelMana
                                                                        network_update: None,
                                                                        all_paths_failed: payment.get().remaining_parts() == 0,
                                                                        path: path.clone(),
+                                                                       short_channel_id: None,
                                                                        #[cfg(test)]
                                                                        error_code: None,
                                                                        #[cfg(test)]
                                                                        error_data: None,
                                                                }
                                                        );
-                                                       if payment.get().remaining_parts() == 0 {
-                                                               payment.remove();
-                                                       }
                                                }
                                        } else {
                                                log_trace!(self.logger, "Received duplicative fail for HTLC with payment_hash {}", log_bytes!(payment_hash.0));
@@ -3048,7 +3060,6 @@ impl<Signer: Sign, M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelMana
                                        }
                                        if sessions.get().remaining_parts() == 0 {
                                                all_paths_failed = true;
-                                               sessions.remove();
                                        }
                                } else {
                                        log_trace!(self.logger, "Received duplicative fail for HTLC with payment_hash {}", log_bytes!(payment_hash.0));
@@ -3059,9 +3070,9 @@ impl<Signer: Sign, M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelMana
                                match &onion_error {
                                        &HTLCFailReason::LightningError { ref err } => {
 #[cfg(test)]
-                                               let (network_update, payment_retryable, onion_error_code, onion_error_data) = onion_utils::process_onion_failure(&self.secp_ctx, &self.logger, &source, err.data.clone());
+                                               let (network_update, short_channel_id, payment_retryable, onion_error_code, onion_error_data) = onion_utils::process_onion_failure(&self.secp_ctx, &self.logger, &source, err.data.clone());
 #[cfg(not(test))]
-                                               let (network_update, payment_retryable, _, _) = onion_utils::process_onion_failure(&self.secp_ctx, &self.logger, &source, err.data.clone());
+                                               let (network_update, short_channel_id, payment_retryable, _, _) = onion_utils::process_onion_failure(&self.secp_ctx, &self.logger, &source, err.data.clone());
                                                // TODO: If we decided to blame ourselves (or one of our channels) in
                                                // process_onion_failure we should close that channel as it implies our
                                                // next-hop is needlessly blaming us!
@@ -3072,6 +3083,7 @@ impl<Signer: Sign, M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelMana
                                                                network_update,
                                                                all_paths_failed,
                                                                path: path.clone(),
+                                                               short_channel_id,
 #[cfg(test)]
                                                                error_code: onion_error_code,
 #[cfg(test)]
@@ -3099,6 +3111,7 @@ impl<Signer: Sign, M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelMana
                                                                network_update: None,
                                                                all_paths_failed,
                                                                path: path.clone(),
+                                                               short_channel_id: Some(path.first().unwrap().short_channel_id),
 #[cfg(test)]
                                                                error_code: Some(*failure_code),
 #[cfg(test)]
@@ -3304,8 +3317,12 @@ impl<Signer: Sign, M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelMana
                                        sessions.remove(&session_priv_bytes, path.last().unwrap().fee_msat)
                                } else { false };
                                if found_payment {
+                                       let payment_hash = PaymentHash(Sha256::hash(&payment_preimage.0).into_inner());
                                        self.pending_events.lock().unwrap().push(
-                                               events::Event::PaymentSent { payment_preimage }
+                                               events::Event::PaymentSent {
+                                                       payment_preimage,
+                                                       payment_hash: payment_hash
+                                               }
                                        );
                                } else {
                                        log_trace!(self.logger, "Received duplicative fulfill for HTLC with payment_preimage {}", log_bytes!(payment_preimage.0));
@@ -3545,7 +3562,16 @@ impl<Signer: Sign, M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelMana
                                                Err(e) => try_chan_entry!(self, Err(e), channel_state, chan),
                                        };
                                        if let Err(e) = self.chain_monitor.watch_channel(chan.get().get_funding_txo().unwrap(), monitor) {
-                                               return_monitor_err!(self, e, channel_state, chan, RAACommitmentOrder::RevokeAndACKFirst, false, false);
+                                               let mut res = handle_monitor_err!(self, e, channel_state, chan, RAACommitmentOrder::RevokeAndACKFirst, false, false);
+                                               if let Err(MsgHandleErrInternal { ref mut shutdown_finish, .. }) = res {
+                                                       // We weren't able to watch the channel to begin with, so no updates should be made on
+                                                       // it. Previously, full_stack_target found an (unreachable) panic when the
+                                                       // monitor update contained within `shutdown_finish` was applied.
+                                                       if let Some((ref mut shutdown_finish, _)) = shutdown_finish {
+                                                               shutdown_finish.0.take();
+                                                       }
+                                               }
+                                               return res
                                        }
                                        funding_tx
                                },
@@ -3688,7 +3714,7 @@ impl<Signer: Sign, M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelMana
                                        msg: update
                                });
                        }
-                       self.pending_events.lock().unwrap().push(events::Event::ChannelClosed { channel_id: msg.channel_id,  reason: ClosureReason::CooperativeClosure });
+                       self.issue_channel_close_events(&chan, ClosureReason::CooperativeClosure);
                }
                Ok(())
        }
@@ -4100,7 +4126,7 @@ impl<Signer: Sign, M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelMana
                                                                msg: update
                                                        });
                                                }
-                                               self.pending_events.lock().unwrap().push(events::Event::ChannelClosed { channel_id: chan.channel_id(),  reason: ClosureReason::CommitmentTxConfirmed });
+                                               self.issue_channel_close_events(&chan, ClosureReason::CommitmentTxConfirmed);
                                                pending_msg_events.push(events::MessageSendEvent::HandleError {
                                                        node_id: chan.get_counterparty_node_id(),
                                                        action: msgs::ErrorAction::SendErrorMessage {
@@ -4216,12 +4242,7 @@ impl<Signer: Sign, M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelMana
                                                                });
                                                        }
 
-                                                       if let Ok(mut pending_events_lock) = self.pending_events.lock() {
-                                                               pending_events_lock.push(events::Event::ChannelClosed {
-                                                                       channel_id: *channel_id,
-                                                                       reason: ClosureReason::CooperativeClosure
-                                                               });
-                                                       }
+                                                       self.issue_channel_close_events(chan, ClosureReason::CooperativeClosure);
 
                                                        log_info!(self.logger, "Broadcasting {}", log_tx!(tx));
                                                        self.tx_broadcaster.broadcast_transaction(&tx);
@@ -4375,6 +4396,11 @@ impl<Signer: Sign, M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelMana
                self.process_pending_events(&event_handler);
                events.into_inner()
        }
+
+       #[cfg(test)]
+       pub fn has_pending_payments(&self) -> bool {
+               !self.pending_outbound_payments.lock().unwrap().is_empty()
+       }
 }
 
 impl<Signer: Sign, M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> MessageSendEventsProvider for ChannelManager<Signer, M, T, K, F, L>
@@ -4550,6 +4576,16 @@ where
                payment_secrets.retain(|_, inbound_payment| {
                        inbound_payment.expiry_time > header.time as u64
                });
+
+               let mut outbounds = self.pending_outbound_payments.lock().unwrap();
+               outbounds.retain(|_, payment| {
+                       const PAYMENT_EXPIRY_BLOCKS: u32 = 3;
+                       if payment.remaining_parts() != 0 { return true }
+                       if let PendingOutboundPayment::Retryable { starting_block_height, .. } = payment {
+                               return *starting_block_height + PAYMENT_EXPIRY_BLOCKS > height
+                       }
+                       true
+               });
        }
 
        fn get_relevant_txids(&self) -> Vec<Txid> {
@@ -4643,7 +4679,7 @@ where
                                                        msg: update
                                                });
                                        }
-                                       self.pending_events.lock().unwrap().push(events::Event::ChannelClosed { channel_id: channel.channel_id(),  reason: ClosureReason::CommitmentTxConfirmed });
+                                       self.issue_channel_close_events(channel, ClosureReason::CommitmentTxConfirmed);
                                        pending_msg_events.push(events::MessageSendEvent::HandleError {
                                                node_id: channel.get_counterparty_node_id(),
                                                action: msgs::ErrorAction::SendErrorMessage { msg: e },
@@ -4834,7 +4870,7 @@ impl<Signer: Sign, M: Deref , T: Deref , K: Deref , F: Deref , L: Deref >
                                                                msg: update
                                                        });
                                                }
-                                               self.pending_events.lock().unwrap().push(events::Event::ChannelClosed { channel_id: chan.channel_id(),  reason: ClosureReason::DisconnectedPeer });
+                                               self.issue_channel_close_events(chan, ClosureReason::DisconnectedPeer);
                                                false
                                        } else {
                                                true
@@ -4849,7 +4885,7 @@ impl<Signer: Sign, M: Deref , T: Deref , K: Deref , F: Deref , L: Deref >
                                                        if let Some(short_id) = chan.get_short_channel_id() {
                                                                short_to_id.remove(&short_id);
                                                        }
-                                                       self.pending_events.lock().unwrap().push(events::Event::ChannelClosed { channel_id: chan.channel_id(),  reason: ClosureReason::DisconnectedPeer });
+                                                       self.issue_channel_close_events(chan, ClosureReason::DisconnectedPeer);
                                                        return false;
                                                } else {
                                                        no_channels_remain = false;
@@ -5281,6 +5317,7 @@ impl_writeable_tlv_based_enum!(PendingOutboundPayment,
                (4, payment_secret, option),
                (6, total_msat, required),
                (8, pending_amt_msat, required),
+               (10, starting_block_height, required),
        },
 ;);
 
@@ -5639,6 +5676,16 @@ impl<'a, Signer: Sign, M: Deref, T: Deref, K: Deref, F: Deref, L: Deref>
                                None => continue,
                        }
                }
+               if forward_htlcs_count > 0 {
+                       // If we have pending HTLCs to forward, assume we either dropped a
+                       // `PendingHTLCsForwardable` or the user received it but never processed it as they
+                       // shut down before the timer hit. Either way, set the time_forwardable to a small
+                       // constant as enough time has likely passed that we should simply handle the forwards
+                       // now, or at least after the user gets a chance to reconnect to our peers.
+                       pending_events_read.push(events::Event::PendingHTLCsForwardable {
+                               time_forwardable: Duration::from_secs(2),
+                       });
+               }
 
                let background_event_count: u64 = Readable::read(reader)?;
                let mut pending_background_events_read: Vec<BackgroundEvent> = Vec::with_capacity(cmp::min(background_event_count as usize, MAX_ALLOC_SIZE/mem::size_of::<BackgroundEvent>()));
@@ -5978,8 +6025,9 @@ mod tests {
                // further events will be generated for subsequence path successes.
                let events = nodes[0].node.get_and_clear_pending_events();
                match events[0] {
-                       Event::PaymentSent { payment_preimage: ref preimage } => {
+                       Event::PaymentSent { payment_preimage: ref preimage, payment_hash: ref hash } => {
                                assert_eq!(payment_preimage, *preimage);
+                               assert_eq!(our_payment_hash, *hash);
                        },
                        _ => panic!("Unexpected event"),
                }