X-Git-Url: http://git.bitcoin.ninja/index.cgi?a=blobdiff_plain;f=lightning%2Fsrc%2Fln%2Fchannelmanager.rs;h=c231258eaa02809bdc99a12d6946eaf7200936bc;hb=a1fc379151063aec45a01a7b22441ba3c9593d5e;hp=c06fe0075ec4188768e4e390fa717b8ccd80e108;hpb=801d6e5256d6ac91d5d5668da1fa5a2b55303246;p=rust-lightning diff --git a/lightning/src/ln/channelmanager.rs b/lightning/src/ln/channelmanager.rs index c06fe007..c231258e 100644 --- a/lightning/src/ln/channelmanager.rs +++ b/lightning/src/ln/channelmanager.rs @@ -52,7 +52,7 @@ use ln::onion_utils; use ln::msgs::{ChannelMessageHandler, DecodeError, LightningError, OptionalField}; use chain::keysinterface::{Sign, KeysInterface, KeysManager, InMemorySigner}; use util::config::UserConfig; -use util::events::{EventHandler, EventsProvider, MessageSendEvent, MessageSendEventsProvider}; +use util::events::{EventHandler, EventsProvider, MessageSendEvent, MessageSendEventsProvider, ClosureReason}; use util::{byte_utils, events}; use util::ser::{BigSize, FixedLengthReader, Readable, ReadableArgs, MaybeReadable, Writeable, Writer}; use util::chacha20::{ChaCha20, ChaChaReader}; @@ -172,20 +172,20 @@ struct ClaimableHTLC { onion_payload: OnionPayload, } -/// A payment identifier used to correlate an MPP payment's per-path HTLC sources internally. +/// A payment identifier used to uniquely identify a payment to LDK. #[derive(Hash, Copy, Clone, PartialEq, Eq, Debug)] -pub(crate) struct MppId(pub [u8; 32]); +pub struct PaymentId(pub [u8; 32]); -impl Writeable for MppId { +impl Writeable for PaymentId { fn write(&self, w: &mut W) -> Result<(), io::Error> { self.0.write(w) } } -impl Readable for MppId { +impl Readable for PaymentId { fn read(r: &mut R) -> Result { let buf: [u8; 32] = Readable::read(r)?; - Ok(MppId(buf)) + Ok(PaymentId(buf)) } } /// Tracks the inbound corresponding to an outbound HTLC @@ -198,7 +198,7 @@ pub(crate) enum HTLCSource { /// Technically we can recalculate this from the route, but we cache it here to avoid /// doing a double-pass on route when we get a failure back first_hop_htlc_msat: u64, - mpp_id: MppId, + payment_id: PaymentId, }, } #[cfg(test)] @@ -208,7 +208,7 @@ impl HTLCSource { path: Vec::new(), session_priv: SecretKey::from_slice(&[1; 32]).unwrap(), first_hop_htlc_msat: 0, - mpp_id: MppId([2; 32]), + payment_id: PaymentId([2; 32]), } } } @@ -242,6 +242,7 @@ type ShutdownResult = (Option<(OutPoint, ChannelMonitorUpdate)>, Vec<(HTLCSource struct MsgHandleErrInternal { err: msgs::LightningError, + chan_id: Option<[u8; 32]>, // If Some a channel of ours has been closed shutdown_finish: Option<(ShutdownResult, Option)>, } impl MsgHandleErrInternal { @@ -257,6 +258,7 @@ impl MsgHandleErrInternal { }, }, }, + chan_id: None, shutdown_finish: None, } } @@ -267,12 +269,13 @@ impl MsgHandleErrInternal { err, action: msgs::ErrorAction::IgnoreError, }, + chan_id: None, shutdown_finish: None, } } #[inline] fn from_no_close(err: msgs::LightningError) -> Self { - Self { err, shutdown_finish: None } + Self { err, chan_id: None, shutdown_finish: None } } #[inline] fn from_finish_shutdown(err: String, channel_id: [u8; 32], shutdown_res: ShutdownResult, channel_update: Option) -> Self { @@ -286,6 +289,7 @@ impl MsgHandleErrInternal { }, }, }, + chan_id: Some(channel_id), shutdown_finish: Some((shutdown_res, channel_update)), } } @@ -320,6 +324,7 @@ impl MsgHandleErrInternal { }, }, }, + chan_id: None, shutdown_finish: None, } } @@ -484,7 +489,7 @@ pub struct ChannelManager>>, + pending_outbound_payments: Mutex>>, our_network_key: SecretKey, our_network_pubkey: PublicKey, @@ -813,12 +818,13 @@ macro_rules! handle_error { ($self: ident, $internal: expr, $counterparty_node_id: expr) => { match $internal { Ok(msg) => Ok(msg), - Err(MsgHandleErrInternal { err, shutdown_finish }) => { + Err(MsgHandleErrInternal { err, chan_id, shutdown_finish }) => { #[cfg(debug_assertions)] { // In testing, ensure there are no deadlocks where the lock is already held upon // entering the macro. assert!($self.channel_state.try_lock().is_ok()); + assert!($self.pending_events.try_lock().is_ok()); } let mut msg_events = Vec::with_capacity(2); @@ -830,6 +836,9 @@ macro_rules! handle_error { msg: update }); } + if let Some(channel_id) = chan_id { + $self.pending_events.lock().unwrap().push(events::Event::ChannelClosed { channel_id, reason: ClosureReason::ProcessingError { err: err.err.clone() } }); + } } log_error!($self.logger, "{}", err.err); @@ -1363,6 +1372,12 @@ impl ChannelMana msg: channel_update }); } + if let Ok(mut pending_events_lock) = self.pending_events.lock() { + pending_events_lock.push(events::Event::ChannelClosed { + channel_id: *channel_id, + reason: ClosureReason::HolderForceClosed + }); + } } break Ok(()); }, @@ -1438,7 +1453,9 @@ impl ChannelMana } } - fn force_close_channel_with_peer(&self, channel_id: &[u8; 32], peer_node_id: Option<&PublicKey>) -> Result { + /// `peer_node_id` should be set when we receive a message from a peer, but not set when the + /// user closes, which will be re-exposed as the `ChannelClosed` reason. + fn force_close_channel_with_peer(&self, channel_id: &[u8; 32], peer_node_id: Option<&PublicKey>, peer_msg: Option<&String>) -> Result { let mut chan = { let mut channel_state_lock = self.channel_state.lock().unwrap(); let channel_state = &mut *channel_state_lock; @@ -1451,6 +1468,14 @@ impl ChannelMana if let Some(short_id) = chan.get().get_short_channel_id() { channel_state.short_to_id.remove(&short_id); } + let mut pending_events_lock = self.pending_events.lock().unwrap(); + if peer_node_id.is_some() { + if let Some(peer_msg) = peer_msg { + pending_events_lock.push(events::Event::ChannelClosed { channel_id: *channel_id, reason: ClosureReason::CounterpartyForceClosed { peer_msg: peer_msg.to_string() } }); + } + } else { + pending_events_lock.push(events::Event::ChannelClosed { channel_id: *channel_id, reason: ClosureReason::HolderForceClosed }); + } chan.remove_entry().1 } else { return Err(APIError::ChannelUnavailable{err: "No such channel".to_owned()}); @@ -1472,7 +1497,7 @@ impl ChannelMana /// the chain and rejecting new HTLCs on the given channel. Fails if channel_id is unknown to the manager. pub fn force_close_channel(&self, channel_id: &[u8; 32]) -> Result<(), APIError> { let _persistence_guard = PersistenceNotifierGuard::notify_on_drop(&self.total_consistency_lock, &self.persistence_notifier); - match self.force_close_channel_with_peer(channel_id, None) { + match self.force_close_channel_with_peer(channel_id, None, None) { Ok(counterparty_node_id) => { self.channel_state.lock().unwrap().pending_msg_events.push( events::MessageSendEvent::HandleError { @@ -1853,7 +1878,7 @@ impl ChannelMana } // Only public for testing, this should otherwise never be called direcly - pub(crate) fn send_payment_along_path(&self, path: &Vec, payment_hash: &PaymentHash, payment_secret: &Option, total_value: u64, cur_height: u32, mpp_id: MppId, keysend_preimage: &Option) -> Result<(), APIError> { + pub(crate) fn send_payment_along_path(&self, path: &Vec, payment_hash: &PaymentHash, payment_secret: &Option, total_value: u64, cur_height: u32, payment_id: PaymentId, keysend_preimage: &Option) -> Result<(), APIError> { log_trace!(self.logger, "Attempting to send payment for path with next hop {}", path.first().unwrap().short_channel_id); let prng_seed = self.keys_manager.get_secure_random_bytes(); let session_priv_bytes = self.keys_manager.get_secure_random_bytes(); @@ -1869,7 +1894,7 @@ impl ChannelMana let _persistence_guard = PersistenceNotifierGuard::notify_on_drop(&self.total_consistency_lock, &self.persistence_notifier); let mut pending_outbounds = self.pending_outbound_payments.lock().unwrap(); - let sessions = pending_outbounds.entry(mpp_id).or_insert(HashSet::new()); + let sessions = pending_outbounds.entry(payment_id).or_insert(HashSet::new()); assert!(sessions.insert(session_priv_bytes)); let err: Result<(), _> = loop { @@ -1892,7 +1917,7 @@ impl ChannelMana path: path.clone(), session_priv: session_priv.clone(), first_hop_htlc_msat: htlc_msat, - mpp_id, + payment_id, }, onion_packet, &self.logger), channel_state, chan) } { Some((update_add, commitment_signed, monitor_update)) => { @@ -1972,11 +1997,11 @@ impl ChannelMana /// If a payment_secret *is* provided, we assume that the invoice had the payment_secret feature /// bit set (either as required or as available). If multiple paths are present in the Route, /// we assume the invoice had the basic_mpp feature set. - pub fn send_payment(&self, route: &Route, payment_hash: PaymentHash, payment_secret: &Option) -> Result<(), PaymentSendFailure> { + pub fn send_payment(&self, route: &Route, payment_hash: PaymentHash, payment_secret: &Option) -> Result { self.send_payment_internal(route, payment_hash, payment_secret, None) } - fn send_payment_internal(&self, route: &Route, payment_hash: PaymentHash, payment_secret: &Option, keysend_preimage: Option) -> Result<(), PaymentSendFailure> { + fn send_payment_internal(&self, route: &Route, payment_hash: PaymentHash, payment_secret: &Option, keysend_preimage: Option) -> Result { if route.paths.len() < 1 { return Err(PaymentSendFailure::ParameterError(APIError::RouteError{err: "There must be at least one path to send over"})); } @@ -1992,7 +2017,7 @@ impl ChannelMana let mut total_value = 0; let our_node_id = self.get_our_node_id(); let mut path_errs = Vec::with_capacity(route.paths.len()); - let mpp_id = MppId(self.keys_manager.get_secure_random_bytes()); + let payment_id = PaymentId(self.keys_manager.get_secure_random_bytes()); 'path_check: for path in route.paths.iter() { if path.len() < 1 || path.len() > 20 { path_errs.push(Err(APIError::RouteError{err: "Path didn't go anywhere/had bogus size"})); @@ -2014,7 +2039,7 @@ impl ChannelMana let cur_height = self.best_block.read().unwrap().height() + 1; let mut results = Vec::new(); for path in route.paths.iter() { - results.push(self.send_payment_along_path(&path, &payment_hash, payment_secret, total_value, cur_height, mpp_id, &keysend_preimage)); + results.push(self.send_payment_along_path(&path, &payment_hash, payment_secret, total_value, cur_height, payment_id, &keysend_preimage)); } let mut has_ok = false; let mut has_err = false; @@ -2034,7 +2059,7 @@ impl ChannelMana } else if has_err { Err(PaymentSendFailure::AllFailedRetrySafe(results.drain(..).map(|r| r.unwrap_err()).collect())) } else { - Ok(()) + Ok(payment_id) } } @@ -2052,14 +2077,14 @@ impl ChannelMana /// Note that `route` must have exactly one path. /// /// [`send_payment`]: Self::send_payment - pub fn send_spontaneous_payment(&self, route: &Route, payment_preimage: Option) -> Result { + pub fn send_spontaneous_payment(&self, route: &Route, payment_preimage: Option) -> Result<(PaymentHash, PaymentId), PaymentSendFailure> { let preimage = match payment_preimage { Some(p) => p, None => PaymentPreimage(self.keys_manager.get_secure_random_bytes()), }; let payment_hash = PaymentHash(Sha256::hash(&preimage.0).into_inner()); match self.send_payment_internal(route, payment_hash, &None, Some(preimage)) { - Ok(()) => Ok(payment_hash), + Ok(payment_id) => Ok((payment_hash, payment_id)), Err(e) => Err(e) } } @@ -2416,6 +2441,7 @@ impl ChannelMana if let Some(short_id) = channel.get_short_channel_id() { channel_state.short_to_id.remove(&short_id); } + // ChannelClosed event is generated by handle_error for us. Err(MsgHandleErrInternal::from_finish_shutdown(msg, channel_id, channel.force_shutdown(true), self.get_channel_update_for_broadcast(&channel).ok())) }, ChannelError::CloseDelayBroadcast(_) => { panic!("Wait is only generated on receipt of channel_reestablish, which is handled by try_chan_entry, we don't bother to support it here"); } @@ -2849,18 +2875,19 @@ impl ChannelMana self.fail_htlc_backwards_internal(channel_state, htlc_src, &payment_hash, HTLCFailReason::Reason { failure_code, data: onion_failure_data}); }, - HTLCSource::OutboundRoute { session_priv, mpp_id, .. } => { + HTLCSource::OutboundRoute { session_priv, payment_id, path, .. } => { let mut session_priv_bytes = [0; 32]; session_priv_bytes.copy_from_slice(&session_priv[..]); let mut outbounds = self.pending_outbound_payments.lock().unwrap(); - if let hash_map::Entry::Occupied(mut sessions) = outbounds.entry(mpp_id) { + if let hash_map::Entry::Occupied(mut sessions) = outbounds.entry(payment_id) { if sessions.get_mut().remove(&session_priv_bytes) { self.pending_events.lock().unwrap().push( - events::Event::PaymentFailed { + events::Event::PaymentPathFailed { payment_hash, rejected_by_dest: false, network_update: None, all_paths_failed: sessions.get().len() == 0, + path: path.clone(), #[cfg(test)] error_code: None, #[cfg(test)] @@ -2895,12 +2922,12 @@ impl ChannelMana // from block_connected which may run during initialization prior to the chain_monitor // being fully configured. See the docs for `ChannelManagerReadArgs` for more. match source { - HTLCSource::OutboundRoute { ref path, session_priv, mpp_id, .. } => { + HTLCSource::OutboundRoute { ref path, session_priv, payment_id, .. } => { let mut session_priv_bytes = [0; 32]; session_priv_bytes.copy_from_slice(&session_priv[..]); let mut outbounds = self.pending_outbound_payments.lock().unwrap(); let mut all_paths_failed = false; - if let hash_map::Entry::Occupied(mut sessions) = outbounds.entry(mpp_id) { + if let hash_map::Entry::Occupied(mut sessions) = outbounds.entry(payment_id) { if !sessions.get_mut().remove(&session_priv_bytes) { log_trace!(self.logger, "Received duplicative fail for HTLC with payment_hash {}", log_bytes!(payment_hash.0)); return; @@ -2925,11 +2952,12 @@ impl ChannelMana // process_onion_failure we should close that channel as it implies our // next-hop is needlessly blaming us! self.pending_events.lock().unwrap().push( - events::Event::PaymentFailed { + events::Event::PaymentPathFailed { payment_hash: payment_hash.clone(), rejected_by_dest: !payment_retryable, network_update, all_paths_failed, + path: path.clone(), #[cfg(test)] error_code: onion_error_code, #[cfg(test)] @@ -2951,11 +2979,12 @@ impl ChannelMana // TODO: For non-temporary failures, we really should be closing the // channel here as we apparently can't relay through them anyway. self.pending_events.lock().unwrap().push( - events::Event::PaymentFailed { + events::Event::PaymentPathFailed { payment_hash: payment_hash.clone(), rejected_by_dest: path.len() == 1, network_update: None, all_paths_failed, + path: path.clone(), #[cfg(test)] error_code: Some(*failure_code), #[cfg(test)] @@ -3152,12 +3181,12 @@ impl ChannelMana fn claim_funds_internal(&self, mut channel_state_lock: MutexGuard>, source: HTLCSource, payment_preimage: PaymentPreimage, forwarded_htlc_value_msat: Option, from_onchain: bool) { match source { - HTLCSource::OutboundRoute { session_priv, mpp_id, .. } => { + HTLCSource::OutboundRoute { session_priv, payment_id, .. } => { mem::drop(channel_state_lock); let mut session_priv_bytes = [0; 32]; session_priv_bytes.copy_from_slice(&session_priv[..]); let mut outbounds = self.pending_outbound_payments.lock().unwrap(); - let found_payment = if let Some(mut sessions) = outbounds.remove(&mpp_id) { + let found_payment = if let Some(mut sessions) = outbounds.remove(&payment_id) { sessions.remove(&session_priv_bytes) } else { false }; if found_payment { @@ -3545,6 +3574,7 @@ impl ChannelMana msg: update }); } + self.pending_events.lock().unwrap().push(events::Event::ChannelClosed { channel_id: msg.channel_id, reason: ClosureReason::CooperativeClosure }); } Ok(()) } @@ -3940,7 +3970,7 @@ impl ChannelMana self.fail_htlc_backwards_internal(self.channel_state.lock().unwrap(), htlc_update.source, &htlc_update.payment_hash, HTLCFailReason::Reason { failure_code: 0x4000 | 8, data: Vec::new() }); } }, - MonitorEvent::CommitmentTxBroadcasted(funding_outpoint) => { + MonitorEvent::CommitmentTxConfirmed(funding_outpoint) => { let mut channel_lock = self.channel_state.lock().unwrap(); let channel_state = &mut *channel_lock; let by_id = &mut channel_state.by_id; @@ -3956,6 +3986,7 @@ impl ChannelMana msg: update }); } + self.pending_events.lock().unwrap().push(events::Event::ChannelClosed { channel_id: chan.channel_id(), reason: ClosureReason::CommitmentTxConfirmed }); pending_msg_events.push(events::MessageSendEvent::HandleError { node_id: chan.get_counterparty_node_id(), action: msgs::ErrorAction::SendErrorMessage { @@ -4017,6 +4048,7 @@ impl ChannelMana Err(e) => { let (close_channel, res) = convert_chan_err!(self, e, short_to_id, chan, channel_id); handle_errors.push((chan.get_counterparty_node_id(), Err(res))); + // ChannelClosed event is generated by handle_error for us !close_channel } } @@ -4070,6 +4102,13 @@ impl ChannelMana }); } + if let Ok(mut pending_events_lock) = self.pending_events.lock() { + pending_events_lock.push(events::Event::ChannelClosed { + channel_id: *channel_id, + reason: ClosureReason::CooperativeClosure + }); + } + log_info!(self.logger, "Broadcasting {}", log_tx!(tx)); self.tx_broadcaster.broadcast_transaction(&tx); false @@ -4490,6 +4529,7 @@ where msg: update }); } + self.pending_events.lock().unwrap().push(events::Event::ChannelClosed { channel_id: channel.channel_id(), reason: ClosureReason::CommitmentTxConfirmed }); pending_msg_events.push(events::MessageSendEvent::HandleError { node_id: channel.get_counterparty_node_id(), action: msgs::ErrorAction::SendErrorMessage { msg: e }, @@ -4680,6 +4720,7 @@ impl msg: update }); } + self.pending_events.lock().unwrap().push(events::Event::ChannelClosed { channel_id: chan.channel_id(), reason: ClosureReason::DisconnectedPeer }); false } else { true @@ -4694,6 +4735,7 @@ impl if let Some(short_id) = chan.get_short_channel_id() { short_to_id.remove(&short_id); } + self.pending_events.lock().unwrap().push(events::Event::ChannelClosed { channel_id: chan.channel_id(), reason: ClosureReason::DisconnectedPeer }); return false; } else { no_channels_remain = false; @@ -4784,12 +4826,12 @@ impl for chan in self.list_channels() { if chan.counterparty.node_id == *counterparty_node_id { // Untrusted messages from peer, we throw away the error if id points to a non-existent channel - let _ = self.force_close_channel_with_peer(&chan.channel_id, Some(counterparty_node_id)); + let _ = self.force_close_channel_with_peer(&chan.channel_id, Some(counterparty_node_id), Some(&msg.data)); } } } else { // Untrusted messages from peer, we throw away the error if id points to a non-existent channel - let _ = self.force_close_channel_with_peer(&msg.channel_id, Some(counterparty_node_id)); + let _ = self.force_close_channel_with_peer(&msg.channel_id, Some(counterparty_node_id), Some(&msg.data)); } } } @@ -5037,23 +5079,23 @@ impl Readable for HTLCSource { let mut session_priv: ::util::ser::OptionDeserWrapper = ::util::ser::OptionDeserWrapper(None); let mut first_hop_htlc_msat: u64 = 0; let mut path = Some(Vec::new()); - let mut mpp_id = None; + let mut payment_id = None; read_tlv_fields!(reader, { (0, session_priv, required), - (1, mpp_id, option), + (1, payment_id, option), (2, first_hop_htlc_msat, required), (4, path, vec_type), }); - if mpp_id.is_none() { - // For backwards compat, if there was no mpp_id written, use the session_priv bytes + if payment_id.is_none() { + // For backwards compat, if there was no payment_id written, use the session_priv bytes // instead. - mpp_id = Some(MppId(*session_priv.0.unwrap().as_ref())); + payment_id = Some(PaymentId(*session_priv.0.unwrap().as_ref())); } Ok(HTLCSource::OutboundRoute { session_priv: session_priv.0.unwrap(), first_hop_htlc_msat: first_hop_htlc_msat, path: path.unwrap(), - mpp_id: mpp_id.unwrap(), + payment_id: payment_id.unwrap(), }) } 1 => Ok(HTLCSource::PreviousHopData(Readable::read(reader)?)), @@ -5065,12 +5107,12 @@ impl Readable for HTLCSource { impl Writeable for HTLCSource { fn write(&self, writer: &mut W) -> Result<(), ::io::Error> { match self { - HTLCSource::OutboundRoute { ref session_priv, ref first_hop_htlc_msat, ref path, mpp_id } => { + HTLCSource::OutboundRoute { ref session_priv, ref first_hop_htlc_msat, ref path, payment_id } => { 0u8.write(writer)?; - let mpp_id_opt = Some(mpp_id); + let payment_id_opt = Some(payment_id); write_tlv_fields!(writer, { (0, session_priv, required), - (1, mpp_id_opt, option), + (1, payment_id_opt, option), (2, first_hop_htlc_msat, required), (4, path, vec_type), }); @@ -5354,6 +5396,7 @@ impl<'a, Signer: Sign, M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> let mut funding_txo_set = HashSet::with_capacity(cmp::min(channel_count as usize, 128)); let mut by_id = HashMap::with_capacity(cmp::min(channel_count as usize, 128)); let mut short_to_id = HashMap::with_capacity(cmp::min(channel_count as usize, 128)); + let mut channel_closures = Vec::new(); for _ in 0..channel_count { let mut channel: Channel = Channel::read(reader, &args.keys_manager)?; let funding_txo = channel.get_funding_txo().ok_or(DecodeError::InvalidValue)?; @@ -5384,6 +5427,10 @@ impl<'a, Signer: Sign, M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> let (_, mut new_failed_htlcs) = channel.force_shutdown(true); failed_htlcs.append(&mut new_failed_htlcs); monitor.broadcast_latest_holder_commitment_txn(&args.tx_broadcaster, &args.logger); + channel_closures.push(events::Event::ChannelClosed { + channel_id: channel.channel_id(), + reason: ClosureReason::OutdatedChannelManager + }); } else { if let Some(short_channel_id) = channel.get_short_channel_id() { short_to_id.insert(short_channel_id, channel.channel_id()); @@ -5471,11 +5518,11 @@ impl<'a, Signer: Sign, M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> } let pending_outbound_payments_count_compat: u64 = Readable::read(reader)?; - let mut pending_outbound_payments_compat: HashMap> = + let mut pending_outbound_payments_compat: HashMap> = HashMap::with_capacity(cmp::min(pending_outbound_payments_count_compat as usize, MAX_ALLOC_SIZE/32)); for _ in 0..pending_outbound_payments_count_compat { let session_priv = Readable::read(reader)?; - if pending_outbound_payments_compat.insert(MppId(session_priv), [session_priv].iter().cloned().collect()).is_some() { + if pending_outbound_payments_compat.insert(PaymentId(session_priv), [session_priv].iter().cloned().collect()).is_some() { return Err(DecodeError::InvalidValue) }; } @@ -5491,6 +5538,10 @@ impl<'a, Signer: Sign, M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> let mut secp_ctx = Secp256k1::new(); secp_ctx.seeded_randomize(&args.keys_manager.get_secure_random_bytes()); + if !channel_closures.is_empty() { + pending_events_read.append(&mut channel_closures); + } + let channel_manager = ChannelManager { genesis_hash, fee_estimator: args.fee_estimator, @@ -5545,7 +5596,7 @@ mod tests { use bitcoin::hashes::sha256::Hash as Sha256; use core::time::Duration; use ln::{PaymentPreimage, PaymentHash, PaymentSecret}; - use ln::channelmanager::{MppId, PaymentSendFailure}; + use ln::channelmanager::{PaymentId, PaymentSendFailure}; use ln::features::{InitFeatures, InvoiceFeatures}; use ln::functional_test_utils::*; use ln::msgs; @@ -5696,11 +5747,11 @@ mod tests { let net_graph_msg_handler = &nodes[0].net_graph_msg_handler; let route = get_route(&nodes[0].node.get_our_node_id(), &net_graph_msg_handler.network_graph, &nodes[1].node.get_our_node_id(), Some(InvoiceFeatures::known()), None, &Vec::new(), 100_000, TEST_FINAL_CLTV, &logger).unwrap(); let (payment_preimage, our_payment_hash, payment_secret) = get_payment_preimage_hash!(&nodes[1]); - let mpp_id = MppId([42; 32]); + let payment_id = PaymentId([42; 32]); // Use the utility function send_payment_along_path to send the payment with MPP data which // indicates there are more HTLCs coming. let cur_height = CHAN_CONFIRM_DEPTH + 1; // route_payment calls send_payment, which adds 1 to the current height. So we do the same here to match. - nodes[0].node.send_payment_along_path(&route.paths[0], &our_payment_hash, &Some(payment_secret), 200_000, cur_height, mpp_id, &None).unwrap(); + nodes[0].node.send_payment_along_path(&route.paths[0], &our_payment_hash, &Some(payment_secret), 200_000, cur_height, payment_id, &None).unwrap(); check_added_monitors!(nodes[0], 1); let mut events = nodes[0].node.get_and_clear_pending_msg_events(); assert_eq!(events.len(), 1); @@ -5730,7 +5781,7 @@ mod tests { expect_payment_failed!(nodes[0], our_payment_hash, true); // Send the second half of the original MPP payment. - nodes[0].node.send_payment_along_path(&route.paths[0], &our_payment_hash, &Some(payment_secret), 200_000, cur_height, mpp_id, &None).unwrap(); + nodes[0].node.send_payment_along_path(&route.paths[0], &our_payment_hash, &Some(payment_secret), 200_000, cur_height, payment_id, &None).unwrap(); check_added_monitors!(nodes[0], 1); let mut events = nodes[0].node.get_and_clear_pending_msg_events(); assert_eq!(events.len(), 1); @@ -5826,7 +5877,7 @@ mod tests { // To start (2), send a keysend payment but don't claim it. let payment_preimage = PaymentPreimage([42; 32]); let route = get_route(&nodes[0].node.get_our_node_id(), &nodes[0].net_graph_msg_handler.network_graph, &expected_route.last().unwrap().node.get_our_node_id(), Some(InvoiceFeatures::known()), None, &Vec::new(), 100_000, TEST_FINAL_CLTV, &logger).unwrap(); - let payment_hash = nodes[0].node.send_spontaneous_payment(&route, Some(payment_preimage)).unwrap(); + let (payment_hash, _) = nodes[0].node.send_spontaneous_payment(&route, Some(payment_preimage)).unwrap(); check_added_monitors!(nodes[0], 1); let mut events = nodes[0].node.get_and_clear_pending_msg_events(); assert_eq!(events.len(), 1);