Renamed script_for_p2wpkh to get_p2wpkh_redeemscript to match convention
[rust-lightning] / lightning / src / ln / channelmanager.rs
index b29199e862442109963b8372a7bdc207ead7142a..5b29828ebaad09042dd65923512569ce47a31ef8 100644 (file)
@@ -36,9 +36,9 @@ use bitcoin::secp256k1::ecdh::SharedSecret;
 use bitcoin::secp256k1;
 
 use chain;
-use chain::{Confirm, Watch, BestBlock};
+use chain::{Confirm, ChannelMonitorUpdateErr, Watch, BestBlock};
 use chain::chaininterface::{BroadcasterInterface, ConfirmationTarget, FeeEstimator};
-use chain::channelmonitor::{ChannelMonitor, ChannelMonitorUpdate, ChannelMonitorUpdateStep, ChannelMonitorUpdateErr, HTLC_FAIL_BACK_BUFFER, CLTV_CLAIM_BUFFER, LATENCY_GRACE_PERIOD_BLOCKS, ANTI_REORG_DELAY, MonitorEvent, CLOSED_CHANNEL_UPDATE_ID};
+use chain::channelmonitor::{ChannelMonitor, ChannelMonitorUpdate, ChannelMonitorUpdateStep, HTLC_FAIL_BACK_BUFFER, CLTV_CLAIM_BUFFER, LATENCY_GRACE_PERIOD_BLOCKS, ANTI_REORG_DELAY, MonitorEvent, CLOSED_CHANNEL_UPDATE_ID};
 use chain::transaction::{OutPoint, TransactionData};
 // Since this struct is returned in `list_channels` methods, expose it here in case users want to
 // construct one themselves.
@@ -242,7 +242,7 @@ type ShutdownResult = (Option<(OutPoint, ChannelMonitorUpdate)>, Vec<(HTLCSource
 
 struct MsgHandleErrInternal {
        err: msgs::LightningError,
-       chan_id: Option<[u8; 32]>, // If Some a channel of ours has been closed
+       chan_id: Option<([u8; 32], u64)>, // If Some a channel of ours has been closed
        shutdown_finish: Option<(ShutdownResult, Option<msgs::ChannelUpdate>)>,
 }
 impl MsgHandleErrInternal {
@@ -278,7 +278,7 @@ impl MsgHandleErrInternal {
                Self { err, chan_id: None, shutdown_finish: None }
        }
        #[inline]
-       fn from_finish_shutdown(err: String, channel_id: [u8; 32], shutdown_res: ShutdownResult, channel_update: Option<msgs::ChannelUpdate>) -> Self {
+       fn from_finish_shutdown(err: String, channel_id: [u8; 32], user_channel_id: u64, shutdown_res: ShutdownResult, channel_update: Option<msgs::ChannelUpdate>) -> Self {
                Self {
                        err: LightningError {
                                err: err.clone(),
@@ -289,7 +289,7 @@ impl MsgHandleErrInternal {
                                        },
                                },
                        },
-                       chan_id: Some(channel_id),
+                       chan_id: Some((channel_id, user_channel_id)),
                        shutdown_finish: Some((shutdown_res, channel_update)),
                }
        }
@@ -402,7 +402,7 @@ struct PendingInboundPayment {
 
 /// Stores the session_priv for each part of a payment that is still pending. For versions 0.0.102
 /// and later, also stores information for retrying the payment.
-enum PendingOutboundPayment {
+pub(crate) enum PendingOutboundPayment {
        Legacy {
                session_privs: HashSet<[u8; 32]>,
        },
@@ -776,8 +776,8 @@ pub struct ChannelDetails {
        ///
        /// [`outbound_capacity_msat`]: ChannelDetails::outbound_capacity_msat
        pub unspendable_punishment_reserve: Option<u64>,
-       /// The user_id passed in to create_channel, or 0 if the channel was inbound.
-       pub user_id: u64,
+       /// The `user_channel_id` passed in to create_channel, or 0 if the channel was inbound.
+       pub user_channel_id: u64,
        /// The available outbound capacity for sending HTLCs to the remote peer. This does not include
        /// any pending HTLCs which are not yet fully resolved (and, thus, who's balance is not
        /// available for inclusion in new outbound HTLCs). This further does not include any pending
@@ -894,8 +894,11 @@ macro_rules! handle_error {
                                                        msg: update
                                                });
                                        }
-                                       if let Some(channel_id) = chan_id {
-                                               $self.pending_events.lock().unwrap().push(events::Event::ChannelClosed { channel_id,  reason: ClosureReason::ProcessingError { err: err.err.clone() } });
+                                       if let Some((channel_id, user_channel_id)) = chan_id {
+                                               $self.pending_events.lock().unwrap().push(events::Event::ChannelClosed {
+                                                       channel_id, user_channel_id,
+                                                       reason: ClosureReason::ProcessingError { err: err.err.clone() }
+                                               });
                                        }
                                }
 
@@ -937,7 +940,8 @@ macro_rules! convert_chan_err {
                                        $short_to_id.remove(&short_id);
                                }
                                let shutdown_res = $channel.force_shutdown(true);
-                               (true, MsgHandleErrInternal::from_finish_shutdown(msg, *$channel_id, shutdown_res, $self.get_channel_update_for_broadcast(&$channel).ok()))
+                               (true, MsgHandleErrInternal::from_finish_shutdown(msg, *$channel_id, $channel.get_user_id(),
+                                       shutdown_res, $self.get_channel_update_for_broadcast(&$channel).ok()))
                        },
                        ChannelError::CloseDelayBroadcast(msg) => {
                                log_error!($self.logger, "Channel {} need to be shutdown but closing transactions not broadcast due to {}", log_bytes!($channel_id[..]), msg);
@@ -945,7 +949,8 @@ macro_rules! convert_chan_err {
                                        $short_to_id.remove(&short_id);
                                }
                                let shutdown_res = $channel.force_shutdown(false);
-                               (true, MsgHandleErrInternal::from_finish_shutdown(msg, *$channel_id, shutdown_res, $self.get_channel_update_for_broadcast(&$channel).ok()))
+                               (true, MsgHandleErrInternal::from_finish_shutdown(msg, *$channel_id, $channel.get_user_id(),
+                                       shutdown_res, $self.get_channel_update_for_broadcast(&$channel).ok()))
                        }
                }
        }
@@ -1013,7 +1018,7 @@ macro_rules! handle_monitor_err {
                                // splitting hairs we'd prefer to claim payments that were to us, but we haven't
                                // given up the preimage yet, so might as well just wait until the payment is
                                // retried, avoiding the on-chain fees.
-                               let res: Result<(), _> = Err(MsgHandleErrInternal::from_finish_shutdown("ChannelMonitor storage failure".to_owned(), *$chan_id,
+                               let res: Result<(), _> = Err(MsgHandleErrInternal::from_finish_shutdown("ChannelMonitor storage failure".to_owned(), *$chan_id, $chan.get_user_id(),
                                                $chan.force_shutdown(true), $self.get_channel_update_for_broadcast(&$chan).ok() ));
                                (res, true)
                        },
@@ -1267,22 +1272,31 @@ impl<Signer: Sign, M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelMana
 
        /// Creates a new outbound channel to the given remote node and with the given value.
        ///
-       /// user_id will be provided back as user_channel_id in FundingGenerationReady events to allow
-       /// tracking of which events correspond with which create_channel call. Note that the
-       /// user_channel_id defaults to 0 for inbound channels, so you may wish to avoid using 0 for
-       /// user_id here. user_id has no meaning inside of LDK, it is simply copied to events and
-       /// otherwise ignored.
-       ///
-       /// If successful, will generate a SendOpenChannel message event, so you should probably poll
-       /// PeerManager::process_events afterwards.
+       /// `user_channel_id` will be provided back as in
+       /// [`Event::FundingGenerationReady::user_channel_id`] to allow tracking of which events
+       /// correspond with which `create_channel` call. Note that the `user_channel_id` defaults to 0
+       /// for inbound channels, so you may wish to avoid using 0 for `user_channel_id` here.
+       /// `user_channel_id` has no meaning inside of LDK, it is simply copied to events and otherwise
+       /// ignored.
        ///
-       /// Raises APIError::APIMisuseError when channel_value_satoshis > 2**24 or push_msat is
-       /// greater than channel_value_satoshis * 1k or channel_value_satoshis is < 1000.
+       /// Raises [`APIError::APIMisuseError`] when `channel_value_satoshis` > 2**24 or `push_msat` is
+       /// greater than `channel_value_satoshis * 1k` or `channel_value_satoshis < 1000`.
        ///
        /// Note that we do not check if you are currently connected to the given peer. If no
        /// connection is available, the outbound `open_channel` message may fail to send, resulting in
-       /// the channel eventually being silently forgotten.
-       pub fn create_channel(&self, their_network_key: PublicKey, channel_value_satoshis: u64, push_msat: u64, user_id: u64, override_config: Option<UserConfig>) -> Result<(), APIError> {
+       /// the channel eventually being silently forgotten (dropped on reload).
+       ///
+       /// Returns the new Channel's temporary `channel_id`. This ID will appear as
+       /// [`Event::FundingGenerationReady::temporary_channel_id`] and in
+       /// [`ChannelDetails::channel_id`] until after
+       /// [`ChannelManager::funding_transaction_generated`] is called, swapping the Channel's ID for
+       /// one derived from the funding transaction's TXID. If the counterparty rejects the channel
+       /// immediately, this temporary ID will appear in [`Event::ChannelClosed::channel_id`].
+       ///
+       /// [`Event::FundingGenerationReady::user_channel_id`]: events::Event::FundingGenerationReady::user_channel_id
+       /// [`Event::FundingGenerationReady::temporary_channel_id`]: events::Event::FundingGenerationReady::temporary_channel_id
+       /// [`Event::ChannelClosed::channel_id`]: events::Event::ChannelClosed::channel_id
+       pub fn create_channel(&self, their_network_key: PublicKey, channel_value_satoshis: u64, push_msat: u64, user_channel_id: u64, override_config: Option<UserConfig>) -> Result<[u8; 32], APIError> {
                if channel_value_satoshis < 1000 {
                        return Err(APIError::APIMisuseError { err: format!("Channel value must be at least 1000 satoshis. It was {}", channel_value_satoshis) });
                }
@@ -1294,7 +1308,7 @@ impl<Signer: Sign, M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelMana
                                        let peer_state = peer_state.lock().unwrap();
                                        let their_features = &peer_state.latest_features;
                                        let config = if override_config.is_some() { override_config.as_ref().unwrap() } else { &self.default_configuration };
-                                       Channel::new_outbound(&self.fee_estimator, &self.keys_manager, their_network_key, their_features, channel_value_satoshis, push_msat, user_id, config)?
+                                       Channel::new_outbound(&self.fee_estimator, &self.keys_manager, their_network_key, their_features, channel_value_satoshis, push_msat, user_channel_id, config)?
                                },
                                None => return Err(APIError::ChannelUnavailable { err: format!("Not connected to node: {}", their_network_key) }),
                        }
@@ -1305,8 +1319,9 @@ impl<Signer: Sign, M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelMana
                // We want to make sure the lock is actually acquired by PersistenceNotifierGuard.
                debug_assert!(&self.total_consistency_lock.try_write().is_err());
 
+               let temporary_channel_id = channel.channel_id();
                let mut channel_state = self.channel_state.lock().unwrap();
-               match channel_state.by_id.entry(channel.channel_id()) {
+               match channel_state.by_id.entry(temporary_channel_id) {
                        hash_map::Entry::Occupied(_) => {
                                if cfg!(feature = "fuzztarget") {
                                        return Err(APIError::APIMisuseError { err: "Fuzzy bad RNG".to_owned() });
@@ -1320,7 +1335,7 @@ impl<Signer: Sign, M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelMana
                        node_id: their_network_key,
                        msg: res,
                });
-               Ok(())
+               Ok(temporary_channel_id)
        }
 
        fn list_channels_with_filter<Fn: FnMut(&(&[u8; 32], &Channel<Signer>)) -> bool>(&self, f: Fn) -> Vec<ChannelDetails> {
@@ -1346,7 +1361,7 @@ impl<Signer: Sign, M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelMana
                                        unspendable_punishment_reserve: to_self_reserve_satoshis,
                                        inbound_capacity_msat,
                                        outbound_capacity_msat,
-                                       user_id: channel.get_user_id(),
+                                       user_channel_id: channel.get_user_id(),
                                        confirmations_required: channel.minimum_depth(),
                                        force_close_spend_delay: channel.get_counterparty_selected_contest_delay(),
                                        is_outbound: channel.is_outbound(),
@@ -1384,6 +1399,22 @@ impl<Signer: Sign, M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelMana
                self.list_channels_with_filter(|&(_, ref channel)| channel.is_live())
        }
 
+       /// Helper function that issues the channel close events
+       fn issue_channel_close_events(&self, channel: &Channel<Signer>, closure_reason: ClosureReason) {
+               let mut pending_events_lock = self.pending_events.lock().unwrap();
+               match channel.unbroadcasted_funding() {
+                       Some(transaction) => {
+                               pending_events_lock.push(events::Event::DiscardFunding { channel_id: channel.channel_id(), transaction })
+                       },
+                       None => {},
+               }
+               pending_events_lock.push(events::Event::ChannelClosed {
+                       channel_id: channel.channel_id(),
+                       user_channel_id: channel.get_user_id(),
+                       reason: closure_reason
+               });
+       }
+
        fn close_channel_internal(&self, channel_id: &[u8; 32], target_feerate_sats_per_1000_weight: Option<u32>) -> Result<(), APIError> {
                let _persistence_guard = PersistenceNotifierGuard::notify_on_drop(&self.total_consistency_lock, &self.persistence_notifier);
 
@@ -1430,12 +1461,7 @@ impl<Signer: Sign, M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelMana
                                                                msg: channel_update
                                                        });
                                                }
-                                               if let Ok(mut pending_events_lock) = self.pending_events.lock() {
-                                                       pending_events_lock.push(events::Event::ChannelClosed {
-                                                               channel_id: *channel_id,
-                                                               reason: ClosureReason::HolderForceClosed
-                                                       });
-                                               }
+                                               self.issue_channel_close_events(&channel, ClosureReason::HolderForceClosed);
                                        }
                                        break Ok(());
                                },
@@ -1526,13 +1552,12 @@ impl<Signer: Sign, M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelMana
                                if let Some(short_id) = chan.get().get_short_channel_id() {
                                        channel_state.short_to_id.remove(&short_id);
                                }
-                               let mut pending_events_lock = self.pending_events.lock().unwrap();
                                if peer_node_id.is_some() {
                                        if let Some(peer_msg) = peer_msg {
-                                               pending_events_lock.push(events::Event::ChannelClosed { channel_id: *channel_id, reason: ClosureReason::CounterpartyForceClosed { peer_msg: peer_msg.to_string() } });
+                                               self.issue_channel_close_events(chan.get(),ClosureReason::CounterpartyForceClosed { peer_msg: peer_msg.to_string() });
                                        }
                                } else {
-                                       pending_events_lock.push(events::Event::ChannelClosed { channel_id: *channel_id, reason: ClosureReason::HolderForceClosed });
+                                       self.issue_channel_close_events(chan.get(),ClosureReason::HolderForceClosed);
                                }
                                chan.remove_entry().1
                        } else {
@@ -1951,16 +1976,6 @@ impl<Signer: Sign, M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelMana
                let onion_packet = onion_utils::construct_onion_packet(onion_payloads, onion_keys, prng_seed, payment_hash);
 
                let _persistence_guard = PersistenceNotifierGuard::notify_on_drop(&self.total_consistency_lock, &self.persistence_notifier);
-               let mut pending_outbounds = self.pending_outbound_payments.lock().unwrap();
-               let payment = pending_outbounds.entry(payment_id).or_insert_with(|| PendingOutboundPayment::Retryable {
-                       session_privs: HashSet::new(),
-                       pending_amt_msat: 0,
-                       payment_hash: *payment_hash,
-                       payment_secret: *payment_secret,
-                       starting_block_height: self.best_block.read().unwrap().height(),
-                       total_msat: total_value,
-               });
-               assert!(payment.insert(session_priv_bytes, path.last().unwrap().fee_msat));
 
                let err: Result<(), _> = loop {
                        let mut channel_lock = self.channel_state.lock().unwrap();
@@ -1978,12 +1993,27 @@ impl<Signer: Sign, M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelMana
                                        if !chan.get().is_live() {
                                                return Err(APIError::ChannelUnavailable{err: "Peer for first hop currently disconnected/pending monitor update!".to_owned()});
                                        }
-                                       break_chan_entry!(self, chan.get_mut().send_htlc_and_commit(htlc_msat, payment_hash.clone(), htlc_cltv, HTLCSource::OutboundRoute {
-                                               path: path.clone(),
-                                               session_priv: session_priv.clone(),
-                                               first_hop_htlc_msat: htlc_msat,
-                                               payment_id,
-                                       }, onion_packet, &self.logger), channel_state, chan)
+                                       let send_res = break_chan_entry!(self, chan.get_mut().send_htlc_and_commit(
+                                               htlc_msat, payment_hash.clone(), htlc_cltv, HTLCSource::OutboundRoute {
+                                                       path: path.clone(),
+                                                       session_priv: session_priv.clone(),
+                                                       first_hop_htlc_msat: htlc_msat,
+                                                       payment_id,
+                                               }, onion_packet, &self.logger),
+                                       channel_state, chan);
+
+                                       let mut pending_outbounds = self.pending_outbound_payments.lock().unwrap();
+                                       let payment = pending_outbounds.entry(payment_id).or_insert_with(|| PendingOutboundPayment::Retryable {
+                                               session_privs: HashSet::new(),
+                                               pending_amt_msat: 0,
+                                               payment_hash: *payment_hash,
+                                               payment_secret: *payment_secret,
+                                               starting_block_height: self.best_block.read().unwrap().height(),
+                                               total_msat: total_value,
+                                       });
+                                       assert!(payment.insert(session_priv_bytes, path.last().unwrap().fee_msat));
+
+                                       send_res
                                } {
                                        Some((update_add, commitment_signed, monitor_update)) => {
                                                if let Err(e) = self.chain_monitor.update_channel(chan.get().get_funding_txo().unwrap(), monitor_update) {
@@ -2173,7 +2203,7 @@ impl<Signer: Sign, M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelMana
                                }
                        } else {
                                return Err(PaymentSendFailure::ParameterError(APIError::APIMisuseError {
-                                       err: "Payment with ID {} not found".to_string()
+                                       err: format!("Payment with ID {} not found", log_bytes!(payment_id.0)),
                                }))
                        }
                };
@@ -2217,7 +2247,7 @@ impl<Signer: Sign, M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelMana
 
                                        (chan.get_outbound_funding_created(funding_transaction, funding_txo, &self.logger)
                                                .map_err(|e| if let ChannelError::Close(msg) = e {
-                                                       MsgHandleErrInternal::from_finish_shutdown(msg, chan.channel_id(), chan.force_shutdown(true), None)
+                                                       MsgHandleErrInternal::from_finish_shutdown(msg, chan.channel_id(), chan.get_user_id(), chan.force_shutdown(true), None)
                                                } else { unreachable!(); })
                                        , chan)
                                },
@@ -2559,7 +2589,7 @@ impl<Signer: Sign, M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelMana
                                                                                                channel_state.short_to_id.remove(&short_id);
                                                                                        }
                                                                                        // ChannelClosed event is generated by handle_error for us.
-                                                                                       Err(MsgHandleErrInternal::from_finish_shutdown(msg, channel_id, channel.force_shutdown(true), self.get_channel_update_for_broadcast(&channel).ok()))
+                                                                                       Err(MsgHandleErrInternal::from_finish_shutdown(msg, channel_id, channel.get_user_id(), channel.force_shutdown(true), self.get_channel_update_for_broadcast(&channel).ok()))
                                                                                },
                                                                                ChannelError::CloseDelayBroadcast(_) => { panic!("Wait is only generated on receipt of channel_reestablish, which is handled by try_chan_entry, we don't bother to support it here"); }
                                                                        };
@@ -3005,6 +3035,7 @@ impl<Signer: Sign, M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelMana
                                                                        network_update: None,
                                                                        all_paths_failed: payment.get().remaining_parts() == 0,
                                                                        path: path.clone(),
+                                                                       short_channel_id: None,
                                                                        #[cfg(test)]
                                                                        error_code: None,
                                                                        #[cfg(test)]
@@ -3058,9 +3089,9 @@ impl<Signer: Sign, M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelMana
                                match &onion_error {
                                        &HTLCFailReason::LightningError { ref err } => {
 #[cfg(test)]
-                                               let (network_update, payment_retryable, onion_error_code, onion_error_data) = onion_utils::process_onion_failure(&self.secp_ctx, &self.logger, &source, err.data.clone());
+                                               let (network_update, short_channel_id, payment_retryable, onion_error_code, onion_error_data) = onion_utils::process_onion_failure(&self.secp_ctx, &self.logger, &source, err.data.clone());
 #[cfg(not(test))]
-                                               let (network_update, payment_retryable, _, _) = onion_utils::process_onion_failure(&self.secp_ctx, &self.logger, &source, err.data.clone());
+                                               let (network_update, short_channel_id, payment_retryable, _, _) = onion_utils::process_onion_failure(&self.secp_ctx, &self.logger, &source, err.data.clone());
                                                // TODO: If we decided to blame ourselves (or one of our channels) in
                                                // process_onion_failure we should close that channel as it implies our
                                                // next-hop is needlessly blaming us!
@@ -3071,6 +3102,7 @@ impl<Signer: Sign, M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelMana
                                                                network_update,
                                                                all_paths_failed,
                                                                path: path.clone(),
+                                                               short_channel_id,
 #[cfg(test)]
                                                                error_code: onion_error_code,
 #[cfg(test)]
@@ -3098,6 +3130,7 @@ impl<Signer: Sign, M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelMana
                                                                network_update: None,
                                                                all_paths_failed,
                                                                path: path.clone(),
+                                                               short_channel_id: Some(path.first().unwrap().short_channel_id),
 #[cfg(test)]
                                                                error_code: Some(*failure_code),
 #[cfg(test)]
@@ -3303,8 +3336,12 @@ impl<Signer: Sign, M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelMana
                                        sessions.remove(&session_priv_bytes, path.last().unwrap().fee_msat)
                                } else { false };
                                if found_payment {
+                                       let payment_hash = PaymentHash(Sha256::hash(&payment_preimage.0).into_inner());
                                        self.pending_events.lock().unwrap().push(
-                                               events::Event::PaymentSent { payment_preimage }
+                                               events::Event::PaymentSent {
+                                                       payment_preimage,
+                                                       payment_hash: payment_hash
+                                               }
                                        );
                                } else {
                                        log_trace!(self.logger, "Received duplicative fulfill for HTLC with payment_preimage {}", log_bytes!(payment_preimage.0));
@@ -3696,7 +3733,7 @@ impl<Signer: Sign, M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelMana
                                        msg: update
                                });
                        }
-                       self.pending_events.lock().unwrap().push(events::Event::ChannelClosed { channel_id: msg.channel_id,  reason: ClosureReason::CooperativeClosure });
+                       self.issue_channel_close_events(&chan, ClosureReason::CooperativeClosure);
                }
                Ok(())
        }
@@ -4108,7 +4145,7 @@ impl<Signer: Sign, M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelMana
                                                                msg: update
                                                        });
                                                }
-                                               self.pending_events.lock().unwrap().push(events::Event::ChannelClosed { channel_id: chan.channel_id(),  reason: ClosureReason::CommitmentTxConfirmed });
+                                               self.issue_channel_close_events(&chan, ClosureReason::CommitmentTxConfirmed);
                                                pending_msg_events.push(events::MessageSendEvent::HandleError {
                                                        node_id: chan.get_counterparty_node_id(),
                                                        action: msgs::ErrorAction::SendErrorMessage {
@@ -4224,12 +4261,7 @@ impl<Signer: Sign, M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelMana
                                                                });
                                                        }
 
-                                                       if let Ok(mut pending_events_lock) = self.pending_events.lock() {
-                                                               pending_events_lock.push(events::Event::ChannelClosed {
-                                                                       channel_id: *channel_id,
-                                                                       reason: ClosureReason::CooperativeClosure
-                                                               });
-                                                       }
+                                                       self.issue_channel_close_events(chan, ClosureReason::CooperativeClosure);
 
                                                        log_info!(self.logger, "Broadcasting {}", log_tx!(tx));
                                                        self.tx_broadcaster.broadcast_transaction(&tx);
@@ -4383,6 +4415,11 @@ impl<Signer: Sign, M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelMana
                self.process_pending_events(&event_handler);
                events.into_inner()
        }
+
+       #[cfg(test)]
+       pub fn has_pending_payments(&self) -> bool {
+               !self.pending_outbound_payments.lock().unwrap().is_empty()
+       }
 }
 
 impl<Signer: Sign, M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> MessageSendEventsProvider for ChannelManager<Signer, M, T, K, F, L>
@@ -4661,7 +4698,7 @@ where
                                                        msg: update
                                                });
                                        }
-                                       self.pending_events.lock().unwrap().push(events::Event::ChannelClosed { channel_id: channel.channel_id(),  reason: ClosureReason::CommitmentTxConfirmed });
+                                       self.issue_channel_close_events(channel, ClosureReason::CommitmentTxConfirmed);
                                        pending_msg_events.push(events::MessageSendEvent::HandleError {
                                                node_id: channel.get_counterparty_node_id(),
                                                action: msgs::ErrorAction::SendErrorMessage { msg: e },
@@ -4852,7 +4889,7 @@ impl<Signer: Sign, M: Deref , T: Deref , K: Deref , F: Deref , L: Deref >
                                                                msg: update
                                                        });
                                                }
-                                               self.pending_events.lock().unwrap().push(events::Event::ChannelClosed { channel_id: chan.channel_id(),  reason: ClosureReason::DisconnectedPeer });
+                                               self.issue_channel_close_events(chan, ClosureReason::DisconnectedPeer);
                                                false
                                        } else {
                                                true
@@ -4867,7 +4904,7 @@ impl<Signer: Sign, M: Deref , T: Deref , K: Deref , F: Deref , L: Deref >
                                                        if let Some(short_id) = chan.get_short_channel_id() {
                                                                short_to_id.remove(&short_id);
                                                        }
-                                                       self.pending_events.lock().unwrap().push(events::Event::ChannelClosed { channel_id: chan.channel_id(),  reason: ClosureReason::DisconnectedPeer });
+                                                       self.issue_channel_close_events(chan, ClosureReason::DisconnectedPeer);
                                                        return false;
                                                } else {
                                                        no_channels_remain = false;
@@ -5591,6 +5628,7 @@ impl<'a, Signer: Sign, M: Deref, T: Deref, K: Deref, F: Deref, L: Deref>
                                        monitor.broadcast_latest_holder_commitment_txn(&args.tx_broadcaster, &args.logger);
                                        channel_closures.push(events::Event::ChannelClosed {
                                                channel_id: channel.channel_id(),
+                                               user_channel_id: channel.get_user_id(),
                                                reason: ClosureReason::OutdatedChannelManager
                                        });
                                } else {
@@ -5786,6 +5824,7 @@ mod tests {
        use ln::msgs;
        use ln::msgs::ChannelMessageHandler;
        use routing::router::{get_keysend_route, get_route};
+       use routing::scorer::Scorer;
        use util::errors::APIError;
        use util::events::{Event, MessageSendEvent, MessageSendEventsProvider};
        use util::test_utils;
@@ -5925,12 +5964,9 @@ mod tests {
                let node_chanmgrs = create_node_chanmgrs(2, &node_cfgs, &[None, None]);
                let nodes = create_network(2, &node_cfgs, &node_chanmgrs);
                create_announced_chan_between_nodes(&nodes, 0, 1, InitFeatures::known(), InitFeatures::known());
-               let logger = test_utils::TestLogger::new();
 
                // First, send a partial MPP payment.
-               let net_graph_msg_handler = &nodes[0].net_graph_msg_handler;
-               let route = get_route(&nodes[0].node.get_our_node_id(), &net_graph_msg_handler.network_graph, &nodes[1].node.get_our_node_id(), Some(InvoiceFeatures::known()), None, &Vec::new(), 100_000, TEST_FINAL_CLTV, &logger).unwrap();
-               let (payment_preimage, our_payment_hash, payment_secret) = get_payment_preimage_hash!(&nodes[1]);
+               let (route, our_payment_hash, payment_preimage, payment_secret) = get_route_and_payment_hash!(&nodes[0], nodes[1], 100_000);
                let payment_id = PaymentId([42; 32]);
                // Use the utility function send_payment_along_path to send the payment with MPP data which
                // indicates there are more HTLCs coming.
@@ -6007,8 +6043,9 @@ mod tests {
                // further events will be generated for subsequence path successes.
                let events = nodes[0].node.get_and_clear_pending_events();
                match events[0] {
-                       Event::PaymentSent { payment_preimage: ref preimage } => {
+                       Event::PaymentSent { payment_preimage: ref preimage, payment_hash: ref hash } => {
                                assert_eq!(payment_preimage, *preimage);
+                               assert_eq!(our_payment_hash, *hash);
                        },
                        _ => panic!("Unexpected event"),
                }
@@ -6026,13 +6063,14 @@ mod tests {
                let nodes = create_network(2, &node_cfgs, &node_chanmgrs);
                create_announced_chan_between_nodes(&nodes, 0, 1, InitFeatures::known(), InitFeatures::known());
                let logger = test_utils::TestLogger::new();
+               let scorer = Scorer::new(0);
 
                // To start (1), send a regular payment but don't claim it.
                let expected_route = [&nodes[1]];
                let (payment_preimage, payment_hash, _) = route_payment(&nodes[0], &expected_route, 100_000);
 
                // Next, attempt a keysend payment and make sure it fails.
-               let route = get_route(&nodes[0].node.get_our_node_id(), &nodes[0].net_graph_msg_handler.network_graph, &expected_route.last().unwrap().node.get_our_node_id(), Some(InvoiceFeatures::known()), None, &Vec::new(), 100_000, TEST_FINAL_CLTV, &logger).unwrap();
+               let route = get_route(&nodes[0].node.get_our_node_id(), &nodes[0].net_graph_msg_handler.network_graph, &expected_route.last().unwrap().node.get_our_node_id(), Some(InvoiceFeatures::known()), None, &Vec::new(), 100_000, TEST_FINAL_CLTV, &logger, &scorer).unwrap();
                nodes[0].node.send_spontaneous_payment(&route, Some(payment_preimage)).unwrap();
                check_added_monitors!(nodes[0], 1);
                let mut events = nodes[0].node.get_and_clear_pending_msg_events();
@@ -6060,7 +6098,7 @@ mod tests {
 
                // To start (2), send a keysend payment but don't claim it.
                let payment_preimage = PaymentPreimage([42; 32]);
-               let route = get_route(&nodes[0].node.get_our_node_id(), &nodes[0].net_graph_msg_handler.network_graph, &expected_route.last().unwrap().node.get_our_node_id(), Some(InvoiceFeatures::known()), None, &Vec::new(), 100_000, TEST_FINAL_CLTV, &logger).unwrap();
+               let route = get_route(&nodes[0].node.get_our_node_id(), &nodes[0].net_graph_msg_handler.network_graph, &expected_route.last().unwrap().node.get_our_node_id(), Some(InvoiceFeatures::known()), None, &Vec::new(), 100_000, TEST_FINAL_CLTV, &logger, &scorer).unwrap();
                let (payment_hash, _) = nodes[0].node.send_spontaneous_payment(&route, Some(payment_preimage)).unwrap();
                check_added_monitors!(nodes[0], 1);
                let mut events = nodes[0].node.get_and_clear_pending_msg_events();
@@ -6114,9 +6152,10 @@ mod tests {
                let _chan = create_chan_between_nodes(&nodes[0], &nodes[1], InitFeatures::known(), InitFeatures::known());
                let network_graph = &nodes[0].net_graph_msg_handler.network_graph;
                let first_hops = nodes[0].node.list_usable_channels();
+               let scorer = Scorer::new(0);
                let route = get_keysend_route(&payer_pubkey, network_graph, &payee_pubkey,
                                   Some(&first_hops.iter().collect::<Vec<_>>()), &vec![], 10000, 40,
-                                  nodes[0].logger).unwrap();
+                                  nodes[0].logger, &scorer).unwrap();
 
                let test_preimage = PaymentPreimage([42; 32]);
                let mismatch_payment_hash = PaymentHash([43; 32]);
@@ -6150,9 +6189,10 @@ mod tests {
                let _chan = create_chan_between_nodes(&nodes[0], &nodes[1], InitFeatures::known(), InitFeatures::known());
                let network_graph = &nodes[0].net_graph_msg_handler.network_graph;
                let first_hops = nodes[0].node.list_usable_channels();
+               let scorer = Scorer::new(0);
                let route = get_keysend_route(&payer_pubkey, network_graph, &payee_pubkey,
                                   Some(&first_hops.iter().collect::<Vec<_>>()), &vec![], 10000, 40,
-                                  nodes[0].logger).unwrap();
+                                  nodes[0].logger, &scorer).unwrap();
 
                let test_preimage = PaymentPreimage([42; 32]);
                let test_secret = PaymentSecret([43; 32]);
@@ -6182,12 +6222,9 @@ mod tests {
                let chan_2_id = create_announced_chan_between_nodes(&nodes, 0, 2, InitFeatures::known(), InitFeatures::known()).0.contents.short_channel_id;
                let chan_3_id = create_announced_chan_between_nodes(&nodes, 1, 3, InitFeatures::known(), InitFeatures::known()).0.contents.short_channel_id;
                let chan_4_id = create_announced_chan_between_nodes(&nodes, 2, 3, InitFeatures::known(), InitFeatures::known()).0.contents.short_channel_id;
-               let logger = test_utils::TestLogger::new();
 
                // Marshall an MPP route.
-               let (_, payment_hash, _) = get_payment_preimage_hash!(&nodes[3]);
-               let net_graph_msg_handler = &nodes[0].net_graph_msg_handler;
-               let mut route = get_route(&nodes[0].node.get_our_node_id(), &net_graph_msg_handler.network_graph, &nodes[3].node.get_our_node_id(), Some(InvoiceFeatures::known()), None, &[], 100000, TEST_FINAL_CLTV, &logger).unwrap();
+               let (mut route, payment_hash, _, _) = get_route_and_payment_hash!(&nodes[0], nodes[3], 100000);
                let path = route.paths[0].clone();
                route.paths.push(path);
                route.paths[0][0].pubkey = nodes[1].node.get_our_node_id();
@@ -6208,8 +6245,7 @@ mod tests {
 #[cfg(all(any(test, feature = "_test_utils"), feature = "unstable"))]
 pub mod bench {
        use chain::Listen;
-       use chain::chainmonitor::ChainMonitor;
-       use chain::channelmonitor::Persist;
+       use chain::chainmonitor::{ChainMonitor, Persist};
        use chain::keysinterface::{KeysManager, InMemorySigner};
        use ln::channelmanager::{BestBlock, ChainParameters, ChannelManager, PaymentHash, PaymentPreimage};
        use ln::features::{InitFeatures, InvoiceFeatures};
@@ -6217,6 +6253,7 @@ pub mod bench {
        use ln::msgs::{ChannelMessageHandler, Init};
        use routing::network_graph::NetworkGraph;
        use routing::router::get_route;
+       use routing::scorer::Scorer;
        use util::test_utils;
        use util::config::UserConfig;
        use util::events::{Event, MessageSendEvent, MessageSendEventsProvider, PaymentPurpose};
@@ -6324,8 +6361,9 @@ pub mod bench {
                macro_rules! send_payment {
                        ($node_a: expr, $node_b: expr) => {
                                let usable_channels = $node_a.list_usable_channels();
+                               let scorer = Scorer::new(0);
                                let route = get_route(&$node_a.get_our_node_id(), &dummy_graph, &$node_b.get_our_node_id(), Some(InvoiceFeatures::known()),
-                                       Some(&usable_channels.iter().map(|r| r).collect::<Vec<_>>()), &[], 10_000, TEST_FINAL_CLTV, &logger_a).unwrap();
+                                       Some(&usable_channels.iter().map(|r| r).collect::<Vec<_>>()), &[], 10_000, TEST_FINAL_CLTV, &logger_a, &scorer).unwrap();
 
                                let mut payment_preimage = PaymentPreimage([0; 32]);
                                payment_preimage.0[0..8].copy_from_slice(&payment_count.to_le_bytes());