Merge pull request #1418 from bruteforcecat/timeout-outbound-paymnet-retires-instead...
authorMatt Corallo <649246+TheBlueMatt@users.noreply.github.com>
Tue, 17 May 2022 17:32:46 +0000 (17:32 +0000)
committerGitHub <noreply@github.com>
Tue, 17 May 2022 17:32:46 +0000 (17:32 +0000)
add timeout retry strategy to outbound payment

18 files changed:
fuzz/src/chanmon_consistency.rs
fuzz/src/full_stack.rs
lightning-background-processor/src/lib.rs
lightning-persister/src/lib.rs
lightning/src/chain/chainmonitor.rs
lightning/src/chain/mod.rs
lightning/src/ln/chanmon_update_fail_tests.rs
lightning/src/ln/channelmanager.rs
lightning/src/ln/functional_test_utils.rs
lightning/src/ln/functional_tests.rs
lightning/src/ln/monitor_tests.rs
lightning/src/ln/payment_tests.rs
lightning/src/ln/peer_handler.rs
lightning/src/ln/priv_short_conf_tests.rs
lightning/src/ln/reorg_tests.rs
lightning/src/ln/shutdown_tests.rs
lightning/src/util/events.rs
lightning/src/util/test_utils.rs

index 8472f8fa6278a4e158e810a600895545cbbd27ae..1b662c0e721c05b49b55bd65a35af52026988481 100644 (file)
@@ -148,7 +148,7 @@ impl chain::Watch<EnforcingSigner> for TestChainMonitor {
                self.chain_monitor.update_channel(funding_txo, update)
        }
 
-       fn release_pending_monitor_events(&self) -> Vec<MonitorEvent> {
+       fn release_pending_monitor_events(&self) -> Vec<(OutPoint, Vec<MonitorEvent>)> {
                return self.chain_monitor.release_pending_monitor_events();
        }
 }
@@ -442,7 +442,7 @@ pub fn do_test<Out: Output>(data: &[u8], underlying_out: Out) {
                                                value: *channel_value_satoshis, script_pubkey: output_script.clone(),
                                        }]};
                                        funding_output = OutPoint { txid: tx.txid(), index: 0 };
-                                       $source.funding_transaction_generated(&temporary_channel_id, tx.clone()).unwrap();
+                                       $source.funding_transaction_generated(&temporary_channel_id, &$dest.get_our_node_id(), tx.clone()).unwrap();
                                        channel_txn.push(tx);
                                } else { panic!("Wrong event type"); }
                        }
index 330124f8a8942a23585517148096a80bc984dd4b..e3c8290b16ea820e4a26bf585a5684384f1f46c3 100644 (file)
@@ -408,7 +408,7 @@ pub fn do_test(data: &[u8], logger: &Arc<dyn Logger>) {
        let mut should_forward = false;
        let mut payments_received: Vec<PaymentHash> = Vec::new();
        let mut payments_sent = 0;
-       let mut pending_funding_generation: Vec<([u8; 32], u64, Script)> = Vec::new();
+       let mut pending_funding_generation: Vec<([u8; 32], PublicKey, u64, Script)> = Vec::new();
        let mut pending_funding_signatures = HashMap::new();
 
        loop {
@@ -516,7 +516,7 @@ pub fn do_test(data: &[u8], logger: &Arc<dyn Logger>) {
                                let channel_id = get_slice!(1)[0] as usize;
                                if channel_id >= channels.len() { return; }
                                channels.sort_by(|a, b| { a.channel_id.cmp(&b.channel_id) });
-                               if channelmanager.close_channel(&channels[channel_id].channel_id).is_err() { return; }
+                               if channelmanager.close_channel(&channels[channel_id].channel_id, &channels[channel_id].counterparty.node_id).is_err() { return; }
                        },
                        7 => {
                                if should_forward {
@@ -556,7 +556,7 @@ pub fn do_test(data: &[u8], logger: &Arc<dyn Logger>) {
                        10 => {
                                'outer_loop: for funding_generation in pending_funding_generation.drain(..) {
                                        let mut tx = Transaction { version: 0, lock_time: 0, input: Vec::new(), output: vec![TxOut {
-                                                       value: funding_generation.1, script_pubkey: funding_generation.2,
+                                                       value: funding_generation.2, script_pubkey: funding_generation.3,
                                                }] };
                                        let funding_output = 'search_loop: loop {
                                                let funding_txid = tx.txid();
@@ -575,7 +575,7 @@ pub fn do_test(data: &[u8], logger: &Arc<dyn Logger>) {
                                                        continue 'outer_loop;
                                                }
                                        };
-                                       if let Err(e) = channelmanager.funding_transaction_generated(&funding_generation.0, tx.clone()) {
+                                       if let Err(e) = channelmanager.funding_transaction_generated(&funding_generation.0, &funding_generation.1, tx.clone()) {
                                                // It's possible the channel has been closed in the mean time, but any other
                                                // failure may be a bug.
                                                if let APIError::ChannelUnavailable { err } = e {
@@ -624,7 +624,7 @@ pub fn do_test(data: &[u8], logger: &Arc<dyn Logger>) {
                                let channel_id = get_slice!(1)[0] as usize;
                                if channel_id >= channels.len() { return; }
                                channels.sort_by(|a, b| { a.channel_id.cmp(&b.channel_id) });
-                               channelmanager.force_close_channel(&channels[channel_id].channel_id).unwrap();
+                               channelmanager.force_close_channel(&channels[channel_id].channel_id, &channels[channel_id].counterparty.node_id).unwrap();
                        },
                        // 15 is above
                        _ => return,
@@ -632,8 +632,8 @@ pub fn do_test(data: &[u8], logger: &Arc<dyn Logger>) {
                loss_detector.handler.process_events();
                for event in loss_detector.manager.get_and_clear_pending_events() {
                        match event {
-                               Event::FundingGenerationReady { temporary_channel_id, channel_value_satoshis, output_script, .. } => {
-                                       pending_funding_generation.push((temporary_channel_id, channel_value_satoshis, output_script));
+                               Event::FundingGenerationReady { temporary_channel_id, counterparty_node_id, channel_value_satoshis, output_script, .. } => {
+                                       pending_funding_generation.push((temporary_channel_id, counterparty_node_id, channel_value_satoshis, output_script));
                                },
                                Event::PaymentReceived { payment_hash, .. } => {
                                        //TODO: enhance by fetching random amounts from fuzz input?
index c61eb37d27dd71b5d1b58750590c955598d693bb..dab376178488654d70322155400c0bd263b740a4 100644 (file)
@@ -540,7 +540,7 @@ mod tests {
        macro_rules! handle_funding_generation_ready {
                ($event: expr, $channel_value: expr) => {{
                        match $event {
-                               &Event::FundingGenerationReady { temporary_channel_id, channel_value_satoshis, ref output_script, user_channel_id } => {
+                               &Event::FundingGenerationReady { temporary_channel_id, channel_value_satoshis, ref output_script, user_channel_id, .. } => {
                                        assert_eq!(channel_value_satoshis, $channel_value);
                                        assert_eq!(user_channel_id, 42);
 
@@ -556,7 +556,7 @@ mod tests {
 
        macro_rules! end_open_channel {
                ($node_a: expr, $node_b: expr, $temporary_channel_id: expr, $tx: expr) => {{
-                       $node_a.node.funding_transaction_generated(&$temporary_channel_id, $tx.clone()).unwrap();
+                       $node_a.node.funding_transaction_generated(&$temporary_channel_id, &$node_b.node.get_our_node_id(), $tx.clone()).unwrap();
                        $node_b.node.handle_funding_created(&$node_a.node.get_our_node_id(), &get_event_msg!($node_a, MessageSendEvent::SendFundingCreated, $node_b.node.get_our_node_id()));
                        $node_a.node.handle_funding_signed(&$node_b.node.get_our_node_id(), &get_event_msg!($node_b, MessageSendEvent::SendFundingSigned, $node_a.node.get_our_node_id()));
                }}
@@ -637,7 +637,7 @@ mod tests {
                }
 
                // Force-close the channel.
-               nodes[0].node.force_close_channel(&OutPoint { txid: tx.txid(), index: 0 }.to_channel_id()).unwrap();
+               nodes[0].node.force_close_channel(&OutPoint { txid: tx.txid(), index: 0 }.to_channel_id(), &nodes[1].node.get_our_node_id()).unwrap();
 
                // Check that the force-close updates are persisted.
                check_persisted_data!(nodes[0].node, filepath.clone());
@@ -776,7 +776,7 @@ mod tests {
                let bg_processor = BackgroundProcessor::start(persister, event_handler, nodes[0].chain_monitor.clone(), nodes[0].node.clone(), nodes[0].net_graph_msg_handler.clone(), nodes[0].peer_manager.clone(), nodes[0].logger.clone(), Some(nodes[0].scorer.clone()));
 
                // Force close the channel and check that the SpendableOutputs event was handled.
-               nodes[0].node.force_close_channel(&nodes[0].node.list_channels()[0].channel_id).unwrap();
+               nodes[0].node.force_close_channel(&nodes[0].node.list_channels()[0].channel_id, &nodes[1].node.get_our_node_id()).unwrap();
                let commitment_tx = nodes[0].tx_broadcaster.txn_broadcasted.lock().unwrap().pop().unwrap();
                confirm_transaction_depth(&mut nodes[0], &commitment_tx, BREAKDOWN_TIMEOUT as u32);
                let event = receiver
index c23baf8ad34bccb7ea7efcaa32639a43f4b23c42..28845150c44606f79aced14c8984e72c10317d37 100644 (file)
@@ -213,7 +213,7 @@ mod tests {
 
                // Force close because cooperative close doesn't result in any persisted
                // updates.
-               nodes[0].node.force_close_channel(&nodes[0].node.list_channels()[0].channel_id).unwrap();
+               nodes[0].node.force_close_channel(&nodes[0].node.list_channels()[0].channel_id, &nodes[1].node.get_our_node_id()).unwrap();
                check_closed_event!(nodes[0], 1, ClosureReason::HolderForceClosed);
                check_closed_broadcast!(nodes[0], true);
                check_added_monitors!(nodes[0], 1);
@@ -247,7 +247,7 @@ mod tests {
                let node_chanmgrs = create_node_chanmgrs(2, &node_cfgs, &[None, None]);
                let nodes = create_network(2, &node_cfgs, &node_chanmgrs);
                let chan = create_announced_chan_between_nodes(&nodes, 0, 1, InitFeatures::known(), InitFeatures::known());
-               nodes[1].node.force_close_channel(&chan.2).unwrap();
+               nodes[1].node.force_close_channel(&chan.2, &nodes[0].node.get_our_node_id()).unwrap();
                check_closed_event!(nodes[1], 1, ClosureReason::HolderForceClosed);
                let mut added_monitors = nodes[1].chain_monitor.added_monitors.lock().unwrap();
                let update_map = nodes[1].chain_monitor.latest_monitor_update_id.lock().unwrap();
@@ -286,7 +286,7 @@ mod tests {
                let node_chanmgrs = create_node_chanmgrs(2, &node_cfgs, &[None, None]);
                let nodes = create_network(2, &node_cfgs, &node_chanmgrs);
                let chan = create_announced_chan_between_nodes(&nodes, 0, 1, InitFeatures::known(), InitFeatures::known());
-               nodes[1].node.force_close_channel(&chan.2).unwrap();
+               nodes[1].node.force_close_channel(&chan.2, &nodes[0].node.get_our_node_id()).unwrap();
                check_closed_event!(nodes[1], 1, ClosureReason::HolderForceClosed);
                let mut added_monitors = nodes[1].chain_monitor.added_monitors.lock().unwrap();
                let update_map = nodes[1].chain_monitor.latest_monitor_update_id.lock().unwrap();
index aae260e735bdbae5c0a538af8024e7e2ef2a78cb..503e6bdee0669551d1853932447a5db08fc92c17 100644 (file)
@@ -235,7 +235,7 @@ pub struct ChainMonitor<ChannelSigner: Sign, C: Deref, T: Deref, F: Deref, L: De
        persister: P,
        /// "User-provided" (ie persistence-completion/-failed) [`MonitorEvent`]s. These came directly
        /// from the user and not from a [`ChannelMonitor`].
-       pending_monitor_events: Mutex<Vec<MonitorEvent>>,
+       pending_monitor_events: Mutex<Vec<(OutPoint, Vec<MonitorEvent>)>>,
        /// The best block height seen, used as a proxy for the passage of time.
        highest_chain_height: AtomicUsize,
 }
@@ -299,7 +299,7 @@ where C::Target: chain::Filter,
                                                        log_trace!(self.logger, "Finished syncing Channel Monitor for channel {}", log_funding_info!(monitor)),
                                                Err(ChannelMonitorUpdateErr::PermanentFailure) => {
                                                        monitor_state.channel_perm_failed.store(true, Ordering::Release);
-                                                       self.pending_monitor_events.lock().unwrap().push(MonitorEvent::UpdateFailed(*funding_outpoint));
+                                                       self.pending_monitor_events.lock().unwrap().push((*funding_outpoint, vec![MonitorEvent::UpdateFailed(*funding_outpoint)]));
                                                },
                                                Err(ChannelMonitorUpdateErr::TemporaryFailure) => {
                                                        log_debug!(self.logger, "Channel Monitor sync for channel {} in progress, holding events until completion!", log_funding_info!(monitor));
@@ -455,10 +455,10 @@ where C::Target: chain::Filter,
                                        // UpdateCompleted event.
                                        return Ok(());
                                }
-                               self.pending_monitor_events.lock().unwrap().push(MonitorEvent::UpdateCompleted {
+                               self.pending_monitor_events.lock().unwrap().push((funding_txo, vec![MonitorEvent::UpdateCompleted {
                                        funding_txo,
                                        monitor_update_id: monitor_data.monitor.get_latest_update_id(),
-                               });
+                               }]));
                        },
                        MonitorUpdateId { contents: UpdateOrigin::ChainSync(_) } => {
                                if !monitor_data.has_pending_chainsync_updates(&pending_monitor_updates) {
@@ -476,10 +476,10 @@ where C::Target: chain::Filter,
        /// channel_monitor_updated once with the highest ID.
        #[cfg(any(test, fuzzing))]
        pub fn force_channel_monitor_updated(&self, funding_txo: OutPoint, monitor_update_id: u64) {
-               self.pending_monitor_events.lock().unwrap().push(MonitorEvent::UpdateCompleted {
+               self.pending_monitor_events.lock().unwrap().push((funding_txo, vec![MonitorEvent::UpdateCompleted {
                        funding_txo,
                        monitor_update_id,
-               });
+               }]));
        }
 
        #[cfg(any(test, fuzzing, feature = "_test_utils"))]
@@ -666,7 +666,7 @@ where C::Target: chain::Filter,
                }
        }
 
-       fn release_pending_monitor_events(&self) -> Vec<MonitorEvent> {
+       fn release_pending_monitor_events(&self) -> Vec<(OutPoint, Vec<MonitorEvent>)> {
                let mut pending_monitor_events = self.pending_monitor_events.lock().unwrap().split_off(0);
                for monitor_state in self.monitors.read().unwrap().values() {
                        let is_pending_monitor_update = monitor_state.has_pending_chainsync_updates(&monitor_state.pending_monitor_updates.lock().unwrap());
@@ -692,7 +692,11 @@ where C::Target: chain::Filter,
                                        log_error!(self.logger, "   To avoid funds-loss, we are allowing monitor updates to be released.");
                                        log_error!(self.logger, "   This may cause duplicate payment events to be generated.");
                                }
-                               pending_monitor_events.append(&mut monitor_state.monitor.get_and_clear_pending_monitor_events());
+                               let monitor_events = monitor_state.monitor.get_and_clear_pending_monitor_events();
+                               if monitor_events.len() > 0 {
+                                       let monitor_outpoint = monitor_state.monitor.get_funding_txo().0;
+                                       pending_monitor_events.push((monitor_outpoint, monitor_events));
+                               }
                        }
                }
                pending_monitor_events
index 24eb09e7090b212ea9867aba9844ed6993c83735..48aa712f39c1d55263f7cd5129e0b3cce3690d41 100644 (file)
@@ -302,7 +302,7 @@ pub trait Watch<ChannelSigner: Sign> {
        ///
        /// For details on asynchronous [`ChannelMonitor`] updating and returning
        /// [`MonitorEvent::UpdateCompleted`] here, see [`ChannelMonitorUpdateErr::TemporaryFailure`].
-       fn release_pending_monitor_events(&self) -> Vec<MonitorEvent>;
+       fn release_pending_monitor_events(&self) -> Vec<(OutPoint, Vec<MonitorEvent>)>;
 }
 
 /// The `Filter` trait defines behavior for indicating chain activity of interest pertaining to
index 58fe30ba2618026311066f347da4a99f2d5f4c3f..91688cc4fa3c3eed26b46b991c00dee94b2106e6 100644 (file)
@@ -224,7 +224,7 @@ fn do_test_simple_monitor_temporary_update_fail(disconnect: bool) {
        }
 
        // ...and make sure we can force-close a frozen channel
-       nodes[0].node.force_close_channel(&channel_id).unwrap();
+       nodes[0].node.force_close_channel(&channel_id, &nodes[1].node.get_our_node_id()).unwrap();
        check_added_monitors!(nodes[0], 1);
        check_closed_broadcast!(nodes[0], true);
 
@@ -1102,7 +1102,7 @@ fn test_monitor_update_fail_reestablish() {
        assert!(updates.update_fee.is_none());
        assert_eq!(updates.update_fulfill_htlcs.len(), 1);
        nodes[1].node.handle_update_fulfill_htlc(&nodes[2].node.get_our_node_id(), &updates.update_fulfill_htlcs[0]);
-       expect_payment_forwarded!(nodes[1], nodes[0], Some(1000), false);
+       expect_payment_forwarded!(nodes[1], nodes[0], nodes[2], Some(1000), false, false);
        check_added_monitors!(nodes[1], 1);
        assert!(nodes[1].node.get_and_clear_pending_msg_events().is_empty());
        commitment_signed_dance!(nodes[1], nodes[2], updates.commitment_signed, false);
@@ -1809,9 +1809,9 @@ fn do_during_funding_monitor_fail(confirm_a_first: bool, restore_b_before_conf:
        nodes[1].node.handle_open_channel(&nodes[0].node.get_our_node_id(), InitFeatures::known(), &get_event_msg!(nodes[0], MessageSendEvent::SendOpenChannel, nodes[1].node.get_our_node_id()));
        nodes[0].node.handle_accept_channel(&nodes[1].node.get_our_node_id(), InitFeatures::known(), &get_event_msg!(nodes[1], MessageSendEvent::SendAcceptChannel, nodes[0].node.get_our_node_id()));
 
-       let (temporary_channel_id, funding_tx, funding_output) = create_funding_transaction(&nodes[0], 100000, 43);
+       let (temporary_channel_id, funding_tx, funding_output) = create_funding_transaction(&nodes[0], &nodes[1].node.get_our_node_id(), 100000, 43);
 
-       nodes[0].node.funding_transaction_generated(&temporary_channel_id, funding_tx.clone()).unwrap();
+       nodes[0].node.funding_transaction_generated(&temporary_channel_id, &nodes[1].node.get_our_node_id(), funding_tx.clone()).unwrap();
        check_added_monitors!(nodes[0], 0);
 
        chanmon_cfgs[1].persister.set_update_ret(Err(ChannelMonitorUpdateErr::TemporaryFailure));
@@ -2087,7 +2087,7 @@ fn test_fail_htlc_on_broadcast_after_claim() {
        nodes[1].node.handle_update_fulfill_htlc(&nodes[2].node.get_our_node_id(), &cs_updates.update_fulfill_htlcs[0]);
        let bs_updates = get_htlc_update_msgs!(nodes[1], nodes[0].node.get_our_node_id());
        check_added_monitors!(nodes[1], 1);
-       expect_payment_forwarded!(nodes[1], nodes[0], Some(1000), false);
+       expect_payment_forwarded!(nodes[1], nodes[0], nodes[2], Some(1000), false, false);
 
        mine_transaction(&nodes[1], &bs_txn[0]);
        check_closed_event!(nodes[1], 1, ClosureReason::CommitmentTxConfirmed);
@@ -2468,7 +2468,7 @@ fn do_test_reconnect_dup_htlc_claims(htlc_status: HTLCStatusAtDupClaim, second_f
                assert_eq!(fulfill_msg, cs_updates.update_fulfill_htlcs[0]);
        }
        nodes[1].node.handle_update_fulfill_htlc(&nodes[2].node.get_our_node_id(), &fulfill_msg);
-       expect_payment_forwarded!(nodes[1], nodes[0], Some(1000), false);
+       expect_payment_forwarded!(nodes[1], nodes[0], nodes[2], Some(1000), false, false);
        check_added_monitors!(nodes[1], 1);
 
        let mut bs_updates = None;
@@ -2538,7 +2538,7 @@ fn test_temporary_error_during_shutdown() {
        chanmon_cfgs[0].persister.set_update_ret(Err(ChannelMonitorUpdateErr::TemporaryFailure));
        chanmon_cfgs[1].persister.set_update_ret(Err(ChannelMonitorUpdateErr::TemporaryFailure));
 
-       nodes[0].node.close_channel(&channel_id).unwrap();
+       nodes[0].node.close_channel(&channel_id, &nodes[1].node.get_our_node_id()).unwrap();
        nodes[1].node.handle_shutdown(&nodes[0].node.get_our_node_id(), &InitFeatures::known(), &get_event_msg!(nodes[0], MessageSendEvent::SendShutdown, nodes[1].node.get_our_node_id()));
        check_added_monitors!(nodes[1], 1);
 
@@ -2591,7 +2591,7 @@ fn test_permanent_error_during_sending_shutdown() {
        let channel_id = create_announced_chan_between_nodes(&nodes, 0, 1, InitFeatures::known(), InitFeatures::known()).2;
        chanmon_cfgs[0].persister.set_update_ret(Err(ChannelMonitorUpdateErr::PermanentFailure));
 
-       assert!(nodes[0].node.close_channel(&channel_id).is_ok());
+       assert!(nodes[0].node.close_channel(&channel_id, &nodes[1].node.get_our_node_id()).is_ok());
        check_closed_broadcast!(nodes[0], true);
        check_added_monitors!(nodes[0], 2);
        check_closed_event!(nodes[0], 1, ClosureReason::ProcessingError { err: "ChannelMonitor storage failure".to_string() });
@@ -2612,7 +2612,7 @@ fn test_permanent_error_during_handling_shutdown() {
        let channel_id = create_announced_chan_between_nodes(&nodes, 0, 1, InitFeatures::known(), InitFeatures::known()).2;
        chanmon_cfgs[1].persister.set_update_ret(Err(ChannelMonitorUpdateErr::PermanentFailure));
 
-       assert!(nodes[0].node.close_channel(&channel_id).is_ok());
+       assert!(nodes[0].node.close_channel(&channel_id, &nodes[1].node.get_our_node_id()).is_ok());
        let shutdown = get_event_msg!(nodes[0], MessageSendEvent::SendShutdown, nodes[1].node.get_our_node_id());
        nodes[1].node.handle_shutdown(&nodes[0].node.get_our_node_id(), &InitFeatures::known(), &shutdown);
        check_closed_broadcast!(nodes[1], true);
index 2ce466c9ac6d4efe7c0cea8a2af324a7d4ecaf19..8725059c360c47435698c71672f81f545b8bab77 100644 (file)
@@ -1769,17 +1769,18 @@ impl<Signer: Sign, M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelMana
                });
        }
 
-       fn close_channel_internal(&self, channel_id: &[u8; 32], target_feerate_sats_per_1000_weight: Option<u32>) -> Result<(), APIError> {
+       fn close_channel_internal(&self, channel_id: &[u8; 32], counterparty_node_id: &PublicKey, target_feerate_sats_per_1000_weight: Option<u32>) -> Result<(), APIError> {
                let _persistence_guard = PersistenceNotifierGuard::notify_on_drop(&self.total_consistency_lock, &self.persistence_notifier);
 
-               let counterparty_node_id;
                let mut failed_htlcs: Vec<(HTLCSource, PaymentHash)>;
                let result: Result<(), _> = loop {
                        let mut channel_state_lock = self.channel_state.lock().unwrap();
                        let channel_state = &mut *channel_state_lock;
                        match channel_state.by_id.entry(channel_id.clone()) {
                                hash_map::Entry::Occupied(mut chan_entry) => {
-                                       counterparty_node_id = chan_entry.get().get_counterparty_node_id();
+                                       if *counterparty_node_id != chan_entry.get().get_counterparty_node_id(){
+                                               return Err(APIError::APIMisuseError { err: "The passed counterparty_node_id doesn't match the channel's counterparty node_id".to_owned() });
+                                       }
                                        let per_peer_state = self.per_peer_state.read().unwrap();
                                        let (shutdown_msg, monitor_update, htlcs) = match per_peer_state.get(&counterparty_node_id) {
                                                Some(peer_state) => {
@@ -1804,7 +1805,7 @@ impl<Signer: Sign, M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelMana
                                        }
 
                                        channel_state.pending_msg_events.push(events::MessageSendEvent::SendShutdown {
-                                               node_id: counterparty_node_id,
+                                               node_id: *counterparty_node_id,
                                                msg: shutdown_msg
                                        });
 
@@ -1827,7 +1828,7 @@ impl<Signer: Sign, M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelMana
                        self.fail_htlc_backwards_internal(self.channel_state.lock().unwrap(), htlc_source.0, &htlc_source.1, HTLCFailReason::Reason { failure_code: 0x4000 | 8, data: Vec::new() });
                }
 
-               let _ = handle_error!(self, result, counterparty_node_id);
+               let _ = handle_error!(self, result, *counterparty_node_id);
                Ok(())
        }
 
@@ -1848,8 +1849,8 @@ impl<Signer: Sign, M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelMana
        /// [`ChannelConfig::force_close_avoidance_max_fee_satoshis`]: crate::util::config::ChannelConfig::force_close_avoidance_max_fee_satoshis
        /// [`Background`]: crate::chain::chaininterface::ConfirmationTarget::Background
        /// [`Normal`]: crate::chain::chaininterface::ConfirmationTarget::Normal
-       pub fn close_channel(&self, channel_id: &[u8; 32]) -> Result<(), APIError> {
-               self.close_channel_internal(channel_id, None)
+       pub fn close_channel(&self, channel_id: &[u8; 32], counterparty_node_id: &PublicKey) -> Result<(), APIError> {
+               self.close_channel_internal(channel_id, counterparty_node_id, None)
        }
 
        /// Begins the process of closing a channel. After this call (plus some timeout), no new HTLCs
@@ -1871,8 +1872,8 @@ impl<Signer: Sign, M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelMana
        /// [`ChannelConfig::force_close_avoidance_max_fee_satoshis`]: crate::util::config::ChannelConfig::force_close_avoidance_max_fee_satoshis
        /// [`Background`]: crate::chain::chaininterface::ConfirmationTarget::Background
        /// [`Normal`]: crate::chain::chaininterface::ConfirmationTarget::Normal
-       pub fn close_channel_with_target_feerate(&self, channel_id: &[u8; 32], target_feerate_sats_per_1000_weight: u32) -> Result<(), APIError> {
-               self.close_channel_internal(channel_id, Some(target_feerate_sats_per_1000_weight))
+       pub fn close_channel_with_target_feerate(&self, channel_id: &[u8; 32], counterparty_node_id: &PublicKey, target_feerate_sats_per_1000_weight: u32) -> Result<(), APIError> {
+               self.close_channel_internal(channel_id, counterparty_node_id, Some(target_feerate_sats_per_1000_weight))
        }
 
        #[inline]
@@ -1891,22 +1892,18 @@ impl<Signer: Sign, M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelMana
                }
        }
 
-       /// `peer_node_id` should be set when we receive a message from a peer, but not set when the
+       /// `peer_msg` should be set when we receive a message from a peer, but not set when the
        /// user closes, which will be re-exposed as the `ChannelClosed` reason.
-       fn force_close_channel_with_peer(&self, channel_id: &[u8; 32], peer_node_id: Option<&PublicKey>, peer_msg: Option<&String>) -> Result<PublicKey, APIError> {
+       fn force_close_channel_with_peer(&self, channel_id: &[u8; 32], peer_node_id: &PublicKey, peer_msg: Option<&String>) -> Result<PublicKey, APIError> {
                let mut chan = {
                        let mut channel_state_lock = self.channel_state.lock().unwrap();
                        let channel_state = &mut *channel_state_lock;
                        if let hash_map::Entry::Occupied(chan) = channel_state.by_id.entry(channel_id.clone()) {
-                               if let Some(node_id) = peer_node_id {
-                                       if chan.get().get_counterparty_node_id() != *node_id {
-                                               return Err(APIError::ChannelUnavailable{err: "No such channel".to_owned()});
-                                       }
+                               if chan.get().get_counterparty_node_id() != *peer_node_id {
+                                       return Err(APIError::ChannelUnavailable{err: "No such channel".to_owned()});
                                }
-                               if peer_node_id.is_some() {
-                                       if let Some(peer_msg) = peer_msg {
-                                               self.issue_channel_close_events(chan.get(),ClosureReason::CounterpartyForceClosed { peer_msg: peer_msg.to_string() });
-                                       }
+                               if let Some(peer_msg) = peer_msg {
+                                       self.issue_channel_close_events(chan.get(),ClosureReason::CounterpartyForceClosed { peer_msg: peer_msg.to_string() });
                                } else {
                                        self.issue_channel_close_events(chan.get(),ClosureReason::HolderForceClosed);
                                }
@@ -1928,10 +1925,12 @@ impl<Signer: Sign, M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelMana
        }
 
        /// Force closes a channel, immediately broadcasting the latest local commitment transaction to
-       /// the chain and rejecting new HTLCs on the given channel. Fails if channel_id is unknown to the manager.
-       pub fn force_close_channel(&self, channel_id: &[u8; 32]) -> Result<(), APIError> {
+       /// the chain and rejecting new HTLCs on the given channel. Fails if `channel_id` is unknown to
+       /// the manager, or if the `counterparty_node_id` isn't the counterparty of the corresponding
+       /// channel.
+       pub fn force_close_channel(&self, channel_id: &[u8; 32], counterparty_node_id: &PublicKey) -> Result<(), APIError> {
                let _persistence_guard = PersistenceNotifierGuard::notify_on_drop(&self.total_consistency_lock, &self.persistence_notifier);
-               match self.force_close_channel_with_peer(channel_id, None, None) {
+               match self.force_close_channel_with_peer(channel_id, counterparty_node_id, None) {
                        Ok(counterparty_node_id) => {
                                self.channel_state.lock().unwrap().pending_msg_events.push(
                                        events::MessageSendEvent::HandleError {
@@ -1951,7 +1950,7 @@ impl<Signer: Sign, M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelMana
        /// for each to the chain and rejecting new HTLCs on each.
        pub fn force_close_all_channels(&self) {
                for chan in self.list_channels() {
-                       let _ = self.force_close_channel(&chan.channel_id);
+                       let _ = self.force_close_channel(&chan.channel_id, &chan.counterparty.node_id);
                }
        }
 
@@ -2687,8 +2686,9 @@ impl<Signer: Sign, M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelMana
 
        /// Handles the generation of a funding transaction, optionally (for tests) with a function
        /// which checks the correctness of the funding transaction given the associated channel.
-       fn funding_transaction_generated_intern<FundingOutput: Fn(&Channel<Signer>, &Transaction) -> Result<OutPoint, APIError>>
-                       (&self, temporary_channel_id: &[u8; 32], funding_transaction: Transaction, find_funding_output: FundingOutput) -> Result<(), APIError> {
+       fn funding_transaction_generated_intern<FundingOutput: Fn(&Channel<Signer>, &Transaction) -> Result<OutPoint, APIError>>(
+               &self, temporary_channel_id: &[u8; 32], _counterparty_node_id: &PublicKey, funding_transaction: Transaction, find_funding_output: FundingOutput
+       ) -> Result<(), APIError> {
                let (chan, msg) = {
                        let (res, chan) = match self.channel_state.lock().unwrap().by_id.remove(temporary_channel_id) {
                                Some(mut chan) => {
@@ -2729,8 +2729,8 @@ impl<Signer: Sign, M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelMana
        }
 
        #[cfg(test)]
-       pub(crate) fn funding_transaction_generated_unchecked(&self, temporary_channel_id: &[u8; 32], funding_transaction: Transaction, output_index: u16) -> Result<(), APIError> {
-               self.funding_transaction_generated_intern(temporary_channel_id, funding_transaction, |_, tx| {
+       pub(crate) fn funding_transaction_generated_unchecked(&self, temporary_channel_id: &[u8; 32], counterparty_node_id: &PublicKey, funding_transaction: Transaction, output_index: u16) -> Result<(), APIError> {
+               self.funding_transaction_generated_intern(temporary_channel_id, counterparty_node_id, funding_transaction, |_, tx| {
                        Ok(OutPoint { txid: tx.txid(), index: output_index })
                })
        }
@@ -2757,7 +2757,7 @@ impl<Signer: Sign, M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelMana
        ///
        /// [`Event::FundingGenerationReady`]: crate::util::events::Event::FundingGenerationReady
        /// [`Event::ChannelClosed`]: crate::util::events::Event::ChannelClosed
-       pub fn funding_transaction_generated(&self, temporary_channel_id: &[u8; 32], funding_transaction: Transaction) -> Result<(), APIError> {
+       pub fn funding_transaction_generated(&self, temporary_channel_id: &[u8; 32], counterparty_node_id: &PublicKey, funding_transaction: Transaction) -> Result<(), APIError> {
                let _persistence_guard = PersistenceNotifierGuard::notify_on_drop(&self.total_consistency_lock, &self.persistence_notifier);
 
                for inp in funding_transaction.input.iter() {
@@ -2767,7 +2767,7 @@ impl<Signer: Sign, M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelMana
                                });
                        }
                }
-               self.funding_transaction_generated_intern(temporary_channel_id, funding_transaction, |chan, tx| {
+               self.funding_transaction_generated_intern(temporary_channel_id, counterparty_node_id, funding_transaction, |chan, tx| {
                        let mut output_index = None;
                        let expected_spk = chan.get_funding_redeemscript().to_v0_p2wsh();
                        for (idx, outp) in tx.output.iter().enumerate() {
@@ -3958,7 +3958,7 @@ impl<Signer: Sign, M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelMana
                }
        }
 
-       fn claim_funds_internal(&self, mut channel_state_lock: MutexGuard<ChannelHolder<Signer>>, source: HTLCSource, payment_preimage: PaymentPreimage, forwarded_htlc_value_msat: Option<u64>, from_onchain: bool) {
+       fn claim_funds_internal(&self, mut channel_state_lock: MutexGuard<ChannelHolder<Signer>>, source: HTLCSource, payment_preimage: PaymentPreimage, forwarded_htlc_value_msat: Option<u64>, from_onchain: bool, next_channel_id: [u8; 32]) {
                match source {
                        HTLCSource::OutboundRoute { session_priv, payment_id, path, .. } => {
                                mem::drop(channel_state_lock);
@@ -4049,12 +4049,14 @@ impl<Signer: Sign, M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelMana
                                                } else { None };
 
                                                let mut pending_events = self.pending_events.lock().unwrap();
+                                               let prev_channel_id = Some(prev_outpoint.to_channel_id());
+                                               let next_channel_id = Some(next_channel_id);
 
-                                               let source_channel_id = Some(prev_outpoint.to_channel_id());
                                                pending_events.push(events::Event::PaymentForwarded {
-                                                       source_channel_id,
                                                        fee_earned_msat,
                                                        claim_from_onchain_tx: from_onchain,
+                                                       prev_channel_id,
+                                                       next_channel_id,
                                                });
                                        }
                                }
@@ -4112,7 +4114,9 @@ impl<Signer: Sign, M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelMana
        /// Called to accept a request to open a channel after [`Event::OpenChannelRequest`] has been
        /// triggered.
        ///
-       /// The `temporary_channel_id` parameter indicates which inbound channel should be accepted.
+       /// The `temporary_channel_id` parameter indicates which inbound channel should be accepted,
+       /// and the `counterparty_node_id` parameter is the id of the peer which has requested to open
+       /// the channel.
        ///
        /// For inbound channels, the `user_channel_id` parameter will be provided back in
        /// [`Event::ChannelClosed::user_channel_id`] to allow tracking of which events correspond
@@ -4120,7 +4124,7 @@ impl<Signer: Sign, M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelMana
        ///
        /// [`Event::OpenChannelRequest`]: events::Event::OpenChannelRequest
        /// [`Event::ChannelClosed::user_channel_id`]: events::Event::ChannelClosed::user_channel_id
-       pub fn accept_inbound_channel(&self, temporary_channel_id: &[u8; 32], user_channel_id: u64) -> Result<(), APIError> {
+       pub fn accept_inbound_channel(&self, temporary_channel_id: &[u8; 32], counterparty_node_id: &PublicKey, user_channel_id: u64) -> Result<(), APIError> {
                let _persistence_guard = PersistenceNotifierGuard::notify_on_drop(&self.total_consistency_lock, &self.persistence_notifier);
 
                let mut channel_state_lock = self.channel_state.lock().unwrap();
@@ -4130,6 +4134,9 @@ impl<Signer: Sign, M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelMana
                                if !channel.get().inbound_is_awaiting_accept() {
                                        return Err(APIError::APIMisuseError { err: "The channel isn't currently awaiting to be accepted.".to_owned() });
                                }
+                               if *counterparty_node_id != channel.get().get_counterparty_node_id() {
+                                       return Err(APIError::APIMisuseError { err: "The passed counterparty_node_id doesn't match the channel's counterparty node_id".to_owned() });
+                               }
                                channel_state.pending_msg_events.push(events::MessageSendEvent::SendAcceptChannel {
                                        node_id: channel.get().get_counterparty_node_id(),
                                        msg: channel.get_mut().accept_inbound_channel(user_channel_id),
@@ -4212,6 +4219,7 @@ impl<Signer: Sign, M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelMana
                let mut pending_events = self.pending_events.lock().unwrap();
                pending_events.push(events::Event::FundingGenerationReady {
                        temporary_channel_id: msg.temporary_channel_id,
+                       counterparty_node_id: *counterparty_node_id,
                        channel_value_satoshis: value,
                        output_script,
                        user_channel_id: user_id,
@@ -4507,7 +4515,7 @@ impl<Signer: Sign, M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelMana
                                hash_map::Entry::Vacant(_) => return Err(MsgHandleErrInternal::send_err_msg_no_close("Failed to find corresponding channel".to_owned(), msg.channel_id))
                        }
                };
-               self.claim_funds_internal(channel_lock, htlc_source, msg.payment_preimage.clone(), Some(forwarded_htlc_value), false);
+               self.claim_funds_internal(channel_lock, htlc_source, msg.payment_preimage.clone(), Some(forwarded_htlc_value), false, msg.channel_id);
                Ok(())
        }
 
@@ -4827,48 +4835,50 @@ impl<Signer: Sign, M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelMana
                let mut failed_channels = Vec::new();
                let mut pending_monitor_events = self.chain_monitor.release_pending_monitor_events();
                let has_pending_monitor_events = !pending_monitor_events.is_empty();
-               for monitor_event in pending_monitor_events.drain(..) {
-                       match monitor_event {
-                               MonitorEvent::HTLCEvent(htlc_update) => {
-                                       if let Some(preimage) = htlc_update.payment_preimage {
-                                               log_trace!(self.logger, "Claiming HTLC with preimage {} from our monitor", log_bytes!(preimage.0));
-                                               self.claim_funds_internal(self.channel_state.lock().unwrap(), htlc_update.source, preimage, htlc_update.onchain_value_satoshis.map(|v| v * 1000), true);
-                                       } else {
-                                               log_trace!(self.logger, "Failing HTLC with hash {} from our monitor", log_bytes!(htlc_update.payment_hash.0));
-                                               self.fail_htlc_backwards_internal(self.channel_state.lock().unwrap(), htlc_update.source, &htlc_update.payment_hash, HTLCFailReason::Reason { failure_code: 0x4000 | 8, data: Vec::new() });
-                                       }
-                               },
-                               MonitorEvent::CommitmentTxConfirmed(funding_outpoint) |
-                               MonitorEvent::UpdateFailed(funding_outpoint) => {
-                                       let mut channel_lock = self.channel_state.lock().unwrap();
-                                       let channel_state = &mut *channel_lock;
-                                       let by_id = &mut channel_state.by_id;
-                                       let pending_msg_events = &mut channel_state.pending_msg_events;
-                                       if let hash_map::Entry::Occupied(chan_entry) = by_id.entry(funding_outpoint.to_channel_id()) {
-                                               let mut chan = remove_channel!(self, channel_state, chan_entry);
-                                               failed_channels.push(chan.force_shutdown(false));
-                                               if let Ok(update) = self.get_channel_update_for_broadcast(&chan) {
-                                                       pending_msg_events.push(events::MessageSendEvent::BroadcastChannelUpdate {
-                                                               msg: update
+               for (funding_outpoint, mut monitor_events) in pending_monitor_events.drain(..) {
+                       for monitor_event in monitor_events.drain(..) {
+                               match monitor_event {
+                                       MonitorEvent::HTLCEvent(htlc_update) => {
+                                               if let Some(preimage) = htlc_update.payment_preimage {
+                                                       log_trace!(self.logger, "Claiming HTLC with preimage {} from our monitor", log_bytes!(preimage.0));
+                                                       self.claim_funds_internal(self.channel_state.lock().unwrap(), htlc_update.source, preimage, htlc_update.onchain_value_satoshis.map(|v| v * 1000), true, funding_outpoint.to_channel_id());
+                                               } else {
+                                                       log_trace!(self.logger, "Failing HTLC with hash {} from our monitor", log_bytes!(htlc_update.payment_hash.0));
+                                                       self.fail_htlc_backwards_internal(self.channel_state.lock().unwrap(), htlc_update.source, &htlc_update.payment_hash, HTLCFailReason::Reason { failure_code: 0x4000 | 8, data: Vec::new() });
+                                               }
+                                       },
+                                       MonitorEvent::CommitmentTxConfirmed(funding_outpoint) |
+                                       MonitorEvent::UpdateFailed(funding_outpoint) => {
+                                               let mut channel_lock = self.channel_state.lock().unwrap();
+                                               let channel_state = &mut *channel_lock;
+                                               let by_id = &mut channel_state.by_id;
+                                               let pending_msg_events = &mut channel_state.pending_msg_events;
+                                               if let hash_map::Entry::Occupied(chan_entry) = by_id.entry(funding_outpoint.to_channel_id()) {
+                                                       let mut chan = remove_channel!(self, channel_state, chan_entry);
+                                                       failed_channels.push(chan.force_shutdown(false));
+                                                       if let Ok(update) = self.get_channel_update_for_broadcast(&chan) {
+                                                               pending_msg_events.push(events::MessageSendEvent::BroadcastChannelUpdate {
+                                                                       msg: update
+                                                               });
+                                                       }
+                                                       let reason = if let MonitorEvent::UpdateFailed(_) = monitor_event {
+                                                               ClosureReason::ProcessingError { err: "Failed to persist ChannelMonitor update during chain sync".to_string() }
+                                                       } else {
+                                                               ClosureReason::CommitmentTxConfirmed
+                                                       };
+                                                       self.issue_channel_close_events(&chan, reason);
+                                                       pending_msg_events.push(events::MessageSendEvent::HandleError {
+                                                               node_id: chan.get_counterparty_node_id(),
+                                                               action: msgs::ErrorAction::SendErrorMessage {
+                                                                       msg: msgs::ErrorMessage { channel_id: chan.channel_id(), data: "Channel force-closed".to_owned() }
+                                                               },
                                                        });
                                                }
-                                               let reason = if let MonitorEvent::UpdateFailed(_) = monitor_event {
-                                                       ClosureReason::ProcessingError { err: "Failed to persist ChannelMonitor update during chain sync".to_string() }
-                                               } else {
-                                                       ClosureReason::CommitmentTxConfirmed
-                                               };
-                                               self.issue_channel_close_events(&chan, reason);
-                                               pending_msg_events.push(events::MessageSendEvent::HandleError {
-                                                       node_id: chan.get_counterparty_node_id(),
-                                                       action: msgs::ErrorAction::SendErrorMessage {
-                                                               msg: msgs::ErrorMessage { channel_id: chan.channel_id(), data: "Channel force-closed".to_owned() }
-                                                       },
-                                               });
-                                       }
-                               },
-                               MonitorEvent::UpdateCompleted { funding_txo, monitor_update_id } => {
-                                       self.channel_monitor_updated(&funding_txo, monitor_update_id);
-                               },
+                                       },
+                                       MonitorEvent::UpdateCompleted { funding_txo, monitor_update_id } => {
+                                               self.channel_monitor_updated(&funding_txo, monitor_update_id);
+                                       },
+                               }
                        }
                }
 
@@ -5698,39 +5708,21 @@ impl<Signer: Sign, M: Deref , T: Deref , K: Deref , F: Deref , L: Deref >
                        let channel_state = &mut *channel_state_lock;
                        let pending_msg_events = &mut channel_state.pending_msg_events;
                        let short_to_id = &mut channel_state.short_to_id;
-                       if no_connection_possible {
-                               log_debug!(self.logger, "Failing all channels with {} due to no_connection_possible", log_pubkey!(counterparty_node_id));
-                               channel_state.by_id.retain(|_, chan| {
-                                       if chan.get_counterparty_node_id() == *counterparty_node_id {
+                       log_debug!(self.logger, "Marking channels with {} disconnected and generating channel_updates. We believe we {} make future connections to this peer.",
+                               log_pubkey!(counterparty_node_id), if no_connection_possible { "cannot" } else { "can" });
+                       channel_state.by_id.retain(|_, chan| {
+                               if chan.get_counterparty_node_id() == *counterparty_node_id {
+                                       chan.remove_uncommitted_htlcs_and_mark_paused(&self.logger);
+                                       if chan.is_shutdown() {
                                                update_maps_on_chan_removal!(self, short_to_id, chan);
-                                               failed_channels.push(chan.force_shutdown(true));
-                                               if let Ok(update) = self.get_channel_update_for_broadcast(&chan) {
-                                                       pending_msg_events.push(events::MessageSendEvent::BroadcastChannelUpdate {
-                                                               msg: update
-                                                       });
-                                               }
                                                self.issue_channel_close_events(chan, ClosureReason::DisconnectedPeer);
-                                               false
+                                               return false;
                                        } else {
-                                               true
+                                               no_channels_remain = false;
                                        }
-                               });
-                       } else {
-                               log_debug!(self.logger, "Marking channels with {} disconnected and generating channel_updates", log_pubkey!(counterparty_node_id));
-                               channel_state.by_id.retain(|_, chan| {
-                                       if chan.get_counterparty_node_id() == *counterparty_node_id {
-                                               chan.remove_uncommitted_htlcs_and_mark_paused(&self.logger);
-                                               if chan.is_shutdown() {
-                                                       update_maps_on_chan_removal!(self, short_to_id, chan);
-                                                       self.issue_channel_close_events(chan, ClosureReason::DisconnectedPeer);
-                                                       return false;
-                                               } else {
-                                                       no_channels_remain = false;
-                                               }
-                                       }
-                                       true
-                               })
-                       }
+                               }
+                               true
+                       });
                        pending_msg_events.retain(|msg| {
                                match msg {
                                        &events::MessageSendEvent::SendAcceptChannel { ref node_id, .. } => node_id != counterparty_node_id,
@@ -5814,7 +5806,7 @@ impl<Signer: Sign, M: Deref , T: Deref , K: Deref , F: Deref , L: Deref >
                        for chan in self.list_channels() {
                                if chan.counterparty.node_id == *counterparty_node_id {
                                        // Untrusted messages from peer, we throw away the error if id points to a non-existent channel
-                                       let _ = self.force_close_channel_with_peer(&chan.channel_id, Some(counterparty_node_id), Some(&msg.data));
+                                       let _ = self.force_close_channel_with_peer(&chan.channel_id, counterparty_node_id, Some(&msg.data));
                                }
                        }
                } else {
@@ -5836,7 +5828,7 @@ impl<Signer: Sign, M: Deref , T: Deref , K: Deref , F: Deref , L: Deref >
                        }
 
                        // Untrusted messages from peer, we throw away the error if id points to a non-existent channel
-                       let _ = self.force_close_channel_with_peer(&msg.channel_id, Some(counterparty_node_id), Some(&msg.data));
+                       let _ = self.force_close_channel_with_peer(&msg.channel_id, counterparty_node_id, Some(&msg.data));
                }
        }
 }
@@ -7424,7 +7416,7 @@ pub mod bench {
                        tx = Transaction { version: 2, lock_time: 0, input: Vec::new(), output: vec![TxOut {
                                value: 8_000_000, script_pubkey: output_script,
                        }]};
-                       node_a.funding_transaction_generated(&temporary_channel_id, tx.clone()).unwrap();
+                       node_a.funding_transaction_generated(&temporary_channel_id, &node_b.get_our_node_id(), tx.clone()).unwrap();
                } else { panic!(); }
 
                node_b.handle_funding_created(&node_a.get_our_node_id(), &get_event_msg!(node_a_holder, MessageSendEvent::SendFundingCreated, node_b.get_our_node_id()));
index b4807ea688629a5a1940a1bb37e9b9ba56aa9760..c0e33e5b93a51a4d620f6c19eec016a15468f70e 100644 (file)
@@ -553,13 +553,14 @@ macro_rules! check_added_monitors {
        }
 }
 
-pub fn create_funding_transaction<'a, 'b, 'c>(node: &Node<'a, 'b, 'c>, expected_chan_value: u64, expected_user_chan_id: u64) -> ([u8; 32], Transaction, OutPoint) {
+pub fn create_funding_transaction<'a, 'b, 'c>(node: &Node<'a, 'b, 'c>, expected_counterparty_node_id: &PublicKey, expected_chan_value: u64, expected_user_chan_id: u64) -> ([u8; 32], Transaction, OutPoint) {
        let chan_id = *node.network_chan_count.borrow();
 
        let events = node.node.get_and_clear_pending_events();
        assert_eq!(events.len(), 1);
        match events[0] {
-               Event::FundingGenerationReady { ref temporary_channel_id, ref channel_value_satoshis, ref output_script, user_channel_id } => {
+               Event::FundingGenerationReady { ref temporary_channel_id, ref counterparty_node_id, ref channel_value_satoshis, ref output_script, user_channel_id } => {
+                       assert_eq!(counterparty_node_id, expected_counterparty_node_id);
                        assert_eq!(*channel_value_satoshis, expected_chan_value);
                        assert_eq!(user_channel_id, expected_user_chan_id);
 
@@ -573,10 +574,10 @@ pub fn create_funding_transaction<'a, 'b, 'c>(node: &Node<'a, 'b, 'c>, expected_
        }
 }
 pub fn sign_funding_transaction<'a, 'b, 'c>(node_a: &Node<'a, 'b, 'c>, node_b: &Node<'a, 'b, 'c>, channel_value: u64, expected_temporary_channel_id: [u8; 32]) -> Transaction {
-       let (temporary_channel_id, tx, funding_output) = create_funding_transaction(node_a, channel_value, 42);
+       let (temporary_channel_id, tx, funding_output) = create_funding_transaction(node_a, &node_b.node.get_our_node_id(), channel_value, 42);
        assert_eq!(temporary_channel_id, expected_temporary_channel_id);
 
-       assert!(node_a.node.funding_transaction_generated(&temporary_channel_id, tx.clone()).is_ok());
+       assert!(node_a.node.funding_transaction_generated(&temporary_channel_id, &node_b.node.get_our_node_id(), tx.clone()).is_ok());
        check_added_monitors!(node_a, 0);
 
        let funding_created_msg = get_event_msg!(node_a, MessageSendEvent::SendFundingCreated, node_b.node.get_our_node_id());
@@ -605,7 +606,7 @@ pub fn sign_funding_transaction<'a, 'b, 'c>(node_a: &Node<'a, 'b, 'c>, node_b: &
        node_a.tx_broadcaster.txn_broadcasted.lock().unwrap().clear();
 
        // Ensure that funding_transaction_generated is idempotent.
-       assert!(node_a.node.funding_transaction_generated(&temporary_channel_id, tx.clone()).is_err());
+       assert!(node_a.node.funding_transaction_generated(&temporary_channel_id, &node_b.node.get_our_node_id(), tx.clone()).is_err());
        assert!(node_a.node.get_and_clear_pending_msg_events().is_empty());
        check_added_monitors!(node_a, 0);
 
@@ -722,8 +723,8 @@ pub fn create_unannounced_chan_between_nodes_with_value<'a, 'b, 'c, 'd>(nodes: &
        let accept_channel = get_event_msg!(nodes[b], MessageSendEvent::SendAcceptChannel, nodes[a].node.get_our_node_id());
        nodes[a].node.handle_accept_channel(&nodes[b].node.get_our_node_id(), b_flags, &accept_channel);
 
-       let (temporary_channel_id, tx, _) = create_funding_transaction(&nodes[a], channel_value, 42);
-       nodes[a].node.funding_transaction_generated(&temporary_channel_id, tx.clone()).unwrap();
+       let (temporary_channel_id, tx, _) = create_funding_transaction(&nodes[a], &nodes[b].node.get_our_node_id(), channel_value, 42);
+       nodes[a].node.funding_transaction_generated(&temporary_channel_id, &nodes[b].node.get_our_node_id(), tx.clone()).unwrap();
        nodes[b].node.handle_funding_created(&nodes[a].node.get_our_node_id(), &get_event_msg!(nodes[a], MessageSendEvent::SendFundingCreated, nodes[b].node.get_our_node_id()));
        check_added_monitors!(nodes[b], 1);
 
@@ -968,7 +969,7 @@ pub fn close_channel<'a, 'b, 'c>(outbound_node: &Node<'a, 'b, 'c>, inbound_node:
        let (node_b, broadcaster_b, struct_b) = if close_inbound_first { (&outbound_node.node, &outbound_node.tx_broadcaster, outbound_node) } else { (&inbound_node.node, &inbound_node.tx_broadcaster, inbound_node) };
        let (tx_a, tx_b);
 
-       node_a.close_channel(channel_id).unwrap();
+       node_a.close_channel(channel_id, &node_b.get_our_node_id()).unwrap();
        node_b.handle_shutdown(&node_a.get_our_node_id(), &InitFeatures::known(), &get_event_msg!(struct_a, MessageSendEvent::SendShutdown, node_b.get_our_node_id()));
 
        let events_1 = node_b.get_and_clear_pending_msg_events();
@@ -1334,15 +1335,20 @@ macro_rules! expect_payment_path_successful {
 }
 
 macro_rules! expect_payment_forwarded {
-       ($node: expr, $source_node: expr, $expected_fee: expr, $upstream_force_closed: expr) => {
+       ($node: expr, $prev_node: expr, $next_node: expr, $expected_fee: expr, $upstream_force_closed: expr, $downstream_force_closed: expr) => {
                let events = $node.node.get_and_clear_pending_events();
                assert_eq!(events.len(), 1);
                match events[0] {
-                       Event::PaymentForwarded { fee_earned_msat, source_channel_id, claim_from_onchain_tx } => {
+                       Event::PaymentForwarded { fee_earned_msat, prev_channel_id, claim_from_onchain_tx, next_channel_id } => {
                                assert_eq!(fee_earned_msat, $expected_fee);
                                if fee_earned_msat.is_some() {
-                                       // Is the event channel_id in one of the channels between the two nodes?
-                                       assert!($node.node.list_channels().iter().any(|x| x.counterparty.node_id == $source_node.node.get_our_node_id() && x.channel_id == source_channel_id.unwrap()));
+                                       // Is the event prev_channel_id in one of the channels between the two nodes?
+                                       assert!($node.node.list_channels().iter().any(|x| x.counterparty.node_id == $prev_node.node.get_our_node_id() && x.channel_id == prev_channel_id.unwrap()));
+                               }
+                               // We check for force closures since a force closed channel is removed from the
+                               // node's channel list
+                               if !$downstream_force_closed {
+                                       assert!($node.node.list_channels().iter().any(|x| x.counterparty.node_id == $next_node.node.get_our_node_id() && x.channel_id == next_channel_id.unwrap()));
                                }
                                assert_eq!(claim_from_onchain_tx, $upstream_force_closed);
                        },
@@ -1586,7 +1592,7 @@ pub fn do_claim_payment_along_route<'a, 'b, 'c>(origin_node: &Node<'a, 'b, 'c>,
                                {
                                        $node.node.handle_update_fulfill_htlc(&$prev_node.node.get_our_node_id(), &next_msgs.as_ref().unwrap().0);
                                        let fee = $node.node.channel_state.lock().unwrap().by_id.get(&next_msgs.as_ref().unwrap().0.channel_id).unwrap().config.forwarding_fee_base_msat;
-                                       expect_payment_forwarded!($node, $next_node, Some(fee as u64), false);
+                                       expect_payment_forwarded!($node, $next_node, $prev_node, Some(fee as u64), false, false);
                                        expected_total_fee_msat += fee as u64;
                                        check_added_monitors!($node, 1);
                                        let new_next_msgs = if $new_msgs {
index aa68454063a596e6f5895632fe9f1c0763453d66..48b4b07c7d76d1f73238b27eb851ac32e720adbb 100644 (file)
@@ -515,10 +515,10 @@ fn do_test_sanity_on_in_flight_opens(steps: u8) {
        if steps & 0x0f == 2 { return; }
        nodes[0].node.handle_accept_channel(&nodes[1].node.get_our_node_id(), InitFeatures::known(), &accept_channel);
 
-       let (temporary_channel_id, tx, funding_output) = create_funding_transaction(&nodes[0], 100000, 42);
+       let (temporary_channel_id, tx, funding_output) = create_funding_transaction(&nodes[0], &nodes[1].node.get_our_node_id(), 100000, 42);
 
        if steps & 0x0f == 3 { return; }
-       nodes[0].node.funding_transaction_generated(&temporary_channel_id, tx.clone()).unwrap();
+       nodes[0].node.funding_transaction_generated(&temporary_channel_id, &nodes[1].node.get_our_node_id(), tx.clone()).unwrap();
        check_added_monitors!(nodes[0], 0);
        let funding_created = get_event_msg!(nodes[0], MessageSendEvent::SendFundingCreated, nodes[1].node.get_our_node_id());
 
@@ -2178,9 +2178,9 @@ fn channel_monitor_network_test() {
        send_payment(&nodes[0], &vec!(&nodes[1], &nodes[2], &nodes[3], &nodes[4])[..], 8000000);
 
        // Simple case with no pending HTLCs:
-       nodes[1].node.peer_disconnected(&nodes[0].node.get_our_node_id(), true);
+       nodes[1].node.force_close_channel(&chan_1.2, &nodes[0].node.get_our_node_id()).unwrap();
        check_added_monitors!(nodes[1], 1);
-       check_closed_broadcast!(nodes[1], false);
+       check_closed_broadcast!(nodes[1], true);
        {
                let mut node_txn = test_txn_broadcast(&nodes[1], &chan_1, None, HTLCType::NONE);
                assert_eq!(node_txn.len(), 1);
@@ -2192,15 +2192,15 @@ fn channel_monitor_network_test() {
        assert_eq!(nodes[0].node.list_channels().len(), 0);
        assert_eq!(nodes[1].node.list_channels().len(), 1);
        check_closed_event!(nodes[0], 1, ClosureReason::CommitmentTxConfirmed);
-       check_closed_event!(nodes[1], 1, ClosureReason::DisconnectedPeer);
+       check_closed_event!(nodes[1], 1, ClosureReason::HolderForceClosed);
 
        // One pending HTLC is discarded by the force-close:
        let payment_preimage_1 = route_payment(&nodes[1], &vec!(&nodes[2], &nodes[3])[..], 3000000).0;
 
        // Simple case of one pending HTLC to HTLC-Timeout (note that the HTLC-Timeout is not
        // broadcasted until we reach the timelock time).
-       nodes[1].node.peer_disconnected(&nodes[2].node.get_our_node_id(), true);
-       check_closed_broadcast!(nodes[1], false);
+       nodes[1].node.force_close_channel(&chan_2.2, &nodes[2].node.get_our_node_id()).unwrap();
+       check_closed_broadcast!(nodes[1], true);
        check_added_monitors!(nodes[1], 1);
        {
                let mut node_txn = test_txn_broadcast(&nodes[1], &chan_2, None, HTLCType::NONE);
@@ -2213,7 +2213,7 @@ fn channel_monitor_network_test() {
        check_closed_broadcast!(nodes[2], true);
        assert_eq!(nodes[1].node.list_channels().len(), 0);
        assert_eq!(nodes[2].node.list_channels().len(), 1);
-       check_closed_event!(nodes[1], 1, ClosureReason::DisconnectedPeer);
+       check_closed_event!(nodes[1], 1, ClosureReason::HolderForceClosed);
        check_closed_event!(nodes[2], 1, ClosureReason::CommitmentTxConfirmed);
 
        macro_rules! claim_funds {
@@ -2238,9 +2238,9 @@ fn channel_monitor_network_test() {
 
        // nodes[3] gets the preimage, but nodes[2] already disconnected, resulting in a nodes[2]
        // HTLC-Timeout and a nodes[3] claim against it (+ its own announces)
-       nodes[2].node.peer_disconnected(&nodes[3].node.get_our_node_id(), true);
+       nodes[2].node.force_close_channel(&chan_3.2, &nodes[3].node.get_our_node_id()).unwrap();
        check_added_monitors!(nodes[2], 1);
-       check_closed_broadcast!(nodes[2], false);
+       check_closed_broadcast!(nodes[2], true);
        let node2_commitment_txid;
        {
                let node_txn = test_txn_broadcast(&nodes[2], &chan_3, None, HTLCType::NONE);
@@ -2257,7 +2257,7 @@ fn channel_monitor_network_test() {
        check_closed_broadcast!(nodes[3], true);
        assert_eq!(nodes[2].node.list_channels().len(), 0);
        assert_eq!(nodes[3].node.list_channels().len(), 1);
-       check_closed_event!(nodes[2], 1, ClosureReason::DisconnectedPeer);
+       check_closed_event!(nodes[2], 1, ClosureReason::HolderForceClosed);
        check_closed_event!(nodes[3], 1, ClosureReason::CommitmentTxConfirmed);
 
        // Drop the ChannelMonitor for the previous channel to avoid it broadcasting transactions and
@@ -2708,18 +2708,20 @@ fn test_htlc_on_chain_success() {
        }
        let chan_id = Some(chan_1.2);
        match forwarded_events[1] {
-               Event::PaymentForwarded { fee_earned_msat, source_channel_id, claim_from_onchain_tx } => {
+               Event::PaymentForwarded { fee_earned_msat, prev_channel_id, claim_from_onchain_tx, next_channel_id } => {
                        assert_eq!(fee_earned_msat, Some(1000));
-                       assert_eq!(source_channel_id, chan_id);
+                       assert_eq!(prev_channel_id, chan_id);
                        assert_eq!(claim_from_onchain_tx, true);
+                       assert_eq!(next_channel_id, Some(chan_2.2));
                },
                _ => panic!()
        }
        match forwarded_events[2] {
-               Event::PaymentForwarded { fee_earned_msat, source_channel_id, claim_from_onchain_tx } => {
+               Event::PaymentForwarded { fee_earned_msat, prev_channel_id, claim_from_onchain_tx, next_channel_id } => {
                        assert_eq!(fee_earned_msat, Some(1000));
-                       assert_eq!(source_channel_id, chan_id);
+                       assert_eq!(prev_channel_id, chan_id);
                        assert_eq!(claim_from_onchain_tx, true);
+                       assert_eq!(next_channel_id, Some(chan_2.2));
                },
                _ => panic!()
        }
@@ -3362,7 +3364,7 @@ fn test_htlc_ignore_latest_remote_commitment() {
        create_announced_chan_between_nodes(&nodes, 0, 1, InitFeatures::known(), InitFeatures::known());
 
        route_payment(&nodes[0], &[&nodes[1]], 10000000);
-       nodes[0].node.force_close_channel(&nodes[0].node.list_channels()[0].channel_id).unwrap();
+       nodes[0].node.force_close_channel(&nodes[0].node.list_channels()[0].channel_id, &nodes[1].node.get_our_node_id()).unwrap();
        connect_blocks(&nodes[0], TEST_FINAL_CLTV + LATENCY_GRACE_PERIOD_BLOCKS + 1);
        check_closed_broadcast!(nodes[0], true);
        check_added_monitors!(nodes[0], 1);
@@ -3425,7 +3427,7 @@ fn test_force_close_fail_back() {
        // state or updated nodes[1]' state. Now force-close and broadcast that commitment/HTLC
        // transaction and ensure nodes[1] doesn't fail-backwards (this was originally a bug!).
 
-       nodes[2].node.force_close_channel(&payment_event.commitment_msg.channel_id).unwrap();
+       nodes[2].node.force_close_channel(&payment_event.commitment_msg.channel_id, &nodes[1].node.get_our_node_id()).unwrap();
        check_closed_broadcast!(nodes[2], true);
        check_added_monitors!(nodes[2], 1);
        check_closed_event!(nodes[2], 1, ClosureReason::HolderForceClosed);
@@ -3506,10 +3508,10 @@ fn test_peer_disconnected_before_funding_broadcasted() {
        let accept_channel = get_event_msg!(nodes[1], MessageSendEvent::SendAcceptChannel, nodes[0].node.get_our_node_id());
        nodes[0].node.handle_accept_channel(&nodes[1].node.get_our_node_id(), InitFeatures::known(), &accept_channel);
 
-       let (temporary_channel_id, tx, _funding_output) = create_funding_transaction(&nodes[0], 1_000_000, 42);
+       let (temporary_channel_id, tx, _funding_output) = create_funding_transaction(&nodes[0], &nodes[1].node.get_our_node_id(), 1_000_000, 42);
        assert_eq!(temporary_channel_id, expected_temporary_channel_id);
 
-       assert!(nodes[0].node.funding_transaction_generated(&temporary_channel_id, tx.clone()).is_ok());
+       assert!(nodes[0].node.funding_transaction_generated(&temporary_channel_id, &nodes[1].node.get_our_node_id(), tx.clone()).is_ok());
 
        let funding_created_msg = get_event_msg!(nodes[0], MessageSendEvent::SendFundingCreated, nodes[1].node.get_our_node_id());
        assert_eq!(funding_created_msg.temporary_channel_id, expected_temporary_channel_id);
@@ -4426,9 +4428,9 @@ fn test_manager_serialize_deserialize_events() {
        node_b.node.handle_open_channel(&node_a.node.get_our_node_id(), a_flags, &get_event_msg!(node_a, MessageSendEvent::SendOpenChannel, node_b.node.get_our_node_id()));
        node_a.node.handle_accept_channel(&node_b.node.get_our_node_id(), b_flags, &get_event_msg!(node_b, MessageSendEvent::SendAcceptChannel, node_a.node.get_our_node_id()));
 
-       let (temporary_channel_id, tx, funding_output) = create_funding_transaction(&node_a, channel_value, 42);
+       let (temporary_channel_id, tx, funding_output) = create_funding_transaction(&node_a, &node_b.node.get_our_node_id(), channel_value, 42);
 
-       node_a.node.funding_transaction_generated(&temporary_channel_id, tx.clone()).unwrap();
+       node_a.node.funding_transaction_generated(&temporary_channel_id, &node_b.node.get_our_node_id(), tx.clone()).unwrap();
        check_added_monitors!(node_a, 0);
 
        node_b.node.handle_funding_created(&node_a.node.get_our_node_id(), &get_event_msg!(node_a, MessageSendEvent::SendFundingCreated, node_b.node.get_our_node_id()));
@@ -4749,7 +4751,7 @@ fn test_claim_sizeable_push_msat() {
        let nodes = create_network(2, &node_cfgs, &node_chanmgrs);
 
        let chan = create_announced_chan_between_nodes_with_value(&nodes, 0, 1, 100_000, 98_000_000, InitFeatures::known(), InitFeatures::known());
-       nodes[1].node.force_close_channel(&chan.2).unwrap();
+       nodes[1].node.force_close_channel(&chan.2, &nodes[0].node.get_our_node_id()).unwrap();
        check_closed_broadcast!(nodes[1], true);
        check_added_monitors!(nodes[1], 1);
        check_closed_event!(nodes[1], 1, ClosureReason::HolderForceClosed);
@@ -4778,7 +4780,7 @@ fn test_claim_on_remote_sizeable_push_msat() {
        let nodes = create_network(2, &node_cfgs, &node_chanmgrs);
 
        let chan = create_announced_chan_between_nodes_with_value(&nodes, 0, 1, 100_000, 98_000_000, InitFeatures::known(), InitFeatures::known());
-       nodes[0].node.force_close_channel(&chan.2).unwrap();
+       nodes[0].node.force_close_channel(&chan.2, &nodes[1].node.get_our_node_id()).unwrap();
        check_closed_broadcast!(nodes[0], true);
        check_added_monitors!(nodes[0], 1);
        check_closed_event!(nodes[0], 1, ClosureReason::HolderForceClosed);
@@ -5180,10 +5182,11 @@ fn test_onchain_to_onchain_claim() {
                _ => panic!("Unexpected event"),
        }
        match events[1] {
-               Event::PaymentForwarded { fee_earned_msat, source_channel_id, claim_from_onchain_tx } => {
+               Event::PaymentForwarded { fee_earned_msat, prev_channel_id, claim_from_onchain_tx, next_channel_id } => {
                        assert_eq!(fee_earned_msat, Some(1000));
-                       assert_eq!(source_channel_id, Some(chan_1.2));
+                       assert_eq!(prev_channel_id, Some(chan_1.2));
                        assert_eq!(claim_from_onchain_tx, true);
+                       assert_eq!(next_channel_id, Some(chan_2.2));
                },
                _ => panic!("Unexpected event"),
        }
@@ -5350,7 +5353,7 @@ fn test_duplicate_payment_hash_one_failure_one_success() {
        // Note that the fee paid is effectively double as the HTLC value (including the nodes[1] fee
        // and nodes[2] fee) is rounded down and then claimed in full.
        mine_transaction(&nodes[1], &htlc_success_txn[0]);
-       expect_payment_forwarded!(nodes[1], nodes[0], Some(196*2), true);
+       expect_payment_forwarded!(nodes[1], nodes[0], nodes[2], Some(196*2), true, true);
        let updates = get_htlc_update_msgs!(nodes[1], nodes[0].node.get_our_node_id());
        assert!(updates.update_add_htlcs.is_empty());
        assert!(updates.update_fail_htlcs.is_empty());
@@ -8283,7 +8286,7 @@ fn test_manually_accept_inbound_channel_request() {
        let events = nodes[1].node.get_and_clear_pending_events();
        match events[0] {
                Event::OpenChannelRequest { temporary_channel_id, .. } => {
-                       nodes[1].node.accept_inbound_channel(&temporary_channel_id, 23).unwrap();
+                       nodes[1].node.accept_inbound_channel(&temporary_channel_id, &nodes[0].node.get_our_node_id(), 23).unwrap();
                }
                _ => panic!("Unexpected event"),
        }
@@ -8298,7 +8301,7 @@ fn test_manually_accept_inbound_channel_request() {
                _ => panic!("Unexpected event"),
        }
 
-       nodes[1].node.force_close_channel(&temp_channel_id).unwrap();
+       nodes[1].node.force_close_channel(&temp_channel_id, &nodes[0].node.get_our_node_id()).unwrap();
 
        let close_msg_ev = nodes[1].node.get_and_clear_pending_msg_events();
        assert_eq!(close_msg_ev.len(), 1);
@@ -8333,7 +8336,7 @@ fn test_manually_reject_inbound_channel_request() {
        let events = nodes[1].node.get_and_clear_pending_events();
        match events[0] {
                Event::OpenChannelRequest { temporary_channel_id, .. } => {
-                       nodes[1].node.force_close_channel(&temporary_channel_id).unwrap();
+                       nodes[1].node.force_close_channel(&temporary_channel_id, &nodes[0].node.get_our_node_id()).unwrap();
                }
                _ => panic!("Unexpected event"),
        }
@@ -8388,9 +8391,9 @@ fn test_reject_funding_before_inbound_channel_accepted() {
                nodes[0].node.handle_accept_channel(&nodes[1].node.get_our_node_id(), InitFeatures::known(), &accept_chan_msg);
        }
 
-       let (temporary_channel_id, tx, _) = create_funding_transaction(&nodes[0], 100000, 42);
+       let (temporary_channel_id, tx, _) = create_funding_transaction(&nodes[0], &nodes[1].node.get_our_node_id(), 100000, 42);
 
-       nodes[0].node.funding_transaction_generated(&temporary_channel_id, tx.clone()).unwrap();
+       nodes[0].node.funding_transaction_generated(&temporary_channel_id, &nodes[1].node.get_our_node_id(), tx.clone()).unwrap();
        let funding_created_msg = get_event_msg!(nodes[0], MessageSendEvent::SendFundingCreated, nodes[1].node.get_our_node_id());
 
        // The `funding_created_msg` should be rejected by `nodes[1]` as it hasn't accepted the channel
@@ -8433,8 +8436,8 @@ fn test_can_not_accept_inbound_channel_twice() {
        let events = nodes[1].node.get_and_clear_pending_events();
        match events[0] {
                Event::OpenChannelRequest { temporary_channel_id, .. } => {
-                       nodes[1].node.accept_inbound_channel(&temporary_channel_id, 0).unwrap();
-                       let api_res = nodes[1].node.accept_inbound_channel(&temporary_channel_id, 0);
+                       nodes[1].node.accept_inbound_channel(&temporary_channel_id, &nodes[0].node.get_our_node_id(), 0).unwrap();
+                       let api_res = nodes[1].node.accept_inbound_channel(&temporary_channel_id, &nodes[0].node.get_our_node_id(), 0);
                        match api_res {
                                Err(APIError::APIMisuseError { err }) => {
                                        assert_eq!(err, "The channel isn't currently awaiting to be accepted.");
@@ -8460,13 +8463,13 @@ fn test_can_not_accept_inbound_channel_twice() {
 
 #[test]
 fn test_can_not_accept_unknown_inbound_channel() {
-       let chanmon_cfg = create_chanmon_cfgs(1);
-       let node_cfg = create_node_cfgs(1, &chanmon_cfg);
-       let node_chanmgr = create_node_chanmgrs(1, &node_cfg, &[None]);
-       let node = create_network(1, &node_cfg, &node_chanmgr)[0].node;
+       let chanmon_cfg = create_chanmon_cfgs(2);
+       let node_cfg = create_node_cfgs(2, &chanmon_cfg);
+       let node_chanmgr = create_node_chanmgrs(2, &node_cfg, &[None, None]);
+       let nodes = create_network(2, &node_cfg, &node_chanmgr);
 
        let unknown_channel_id = [0; 32];
-       let api_res = node.accept_inbound_channel(&unknown_channel_id, 0);
+       let api_res = nodes[0].node.accept_inbound_channel(&unknown_channel_id, &nodes[1].node.get_our_node_id(), 0);
        match api_res {
                Err(APIError::ChannelUnavailable { err }) => {
                        assert_eq!(err, "Can't accept a channel that doesn't exist");
@@ -8875,9 +8878,9 @@ fn test_pre_lockin_no_chan_closed_update() {
        nodes[0].node.handle_accept_channel(&nodes[1].node.get_our_node_id(), InitFeatures::known(), &accept_chan_msg);
 
        // Move the first channel through the funding flow...
-       let (temporary_channel_id, tx, _) = create_funding_transaction(&nodes[0], 100000, 42);
+       let (temporary_channel_id, tx, _) = create_funding_transaction(&nodes[0], &nodes[1].node.get_our_node_id(), 100000, 42);
 
-       nodes[0].node.funding_transaction_generated(&temporary_channel_id, tx.clone()).unwrap();
+       nodes[0].node.funding_transaction_generated(&temporary_channel_id, &nodes[1].node.get_our_node_id(), tx.clone()).unwrap();
        check_added_monitors!(nodes[0], 0);
 
        let funding_created_msg = get_event_msg!(nodes[0], MessageSendEvent::SendFundingCreated, nodes[1].node.get_our_node_id());
@@ -8975,8 +8978,14 @@ fn do_test_onchain_htlc_settlement_after_close(broadcast_alice: bool, go_onchain
        // If `go_onchain_before_fufill`, broadcast the relevant commitment transaction and check that Bob
        // responds by (1) broadcasting a channel update and (2) adding a new ChannelMonitor.
        let mut force_closing_node = 0; // Alice force-closes
-       if !broadcast_alice { force_closing_node = 1; } // Bob force-closes
-       nodes[force_closing_node].node.force_close_channel(&chan_ab.2).unwrap();
+       let mut counterparty_node = 1; // Bob if Alice force-closes
+
+       // Bob force-closes
+       if !broadcast_alice {
+               force_closing_node = 1;
+               counterparty_node = 0;
+       }
+       nodes[force_closing_node].node.force_close_channel(&chan_ab.2, &nodes[counterparty_node].node.get_our_node_id()).unwrap();
        check_closed_broadcast!(nodes[force_closing_node], true);
        check_added_monitors!(nodes[force_closing_node], 1);
        check_closed_event!(nodes[force_closing_node], 1, ClosureReason::HolderForceClosed);
@@ -9010,7 +9019,7 @@ fn do_test_onchain_htlc_settlement_after_close(broadcast_alice: bool, go_onchain
        assert_eq!(carol_updates.update_fulfill_htlcs.len(), 1);
 
        nodes[1].node.handle_update_fulfill_htlc(&nodes[2].node.get_our_node_id(), &carol_updates.update_fulfill_htlcs[0]);
-       expect_payment_forwarded!(nodes[1], nodes[0], if go_onchain_before_fulfill || force_closing_node == 1 { None } else { Some(1000) }, false);
+       expect_payment_forwarded!(nodes[1], nodes[0], nodes[2], if go_onchain_before_fulfill || force_closing_node == 1 { None } else { Some(1000) }, false, false);
        // If Alice broadcasted but Bob doesn't know yet, here he prepares to tell her about the preimage.
        if !go_onchain_before_fulfill && broadcast_alice {
                let events = nodes[1].node.get_and_clear_pending_msg_events();
@@ -9164,9 +9173,9 @@ fn test_duplicate_chan_id() {
        }
 
        // Move the first channel through the funding flow...
-       let (temporary_channel_id, tx, funding_output) = create_funding_transaction(&nodes[0], 100000, 42);
+       let (temporary_channel_id, tx, funding_output) = create_funding_transaction(&nodes[0], &nodes[1].node.get_our_node_id(), 100000, 42);
 
-       nodes[0].node.funding_transaction_generated(&temporary_channel_id, tx.clone()).unwrap();
+       nodes[0].node.funding_transaction_generated(&temporary_channel_id, &nodes[1].node.get_our_node_id(), tx.clone()).unwrap();
        check_added_monitors!(nodes[0], 0);
 
        let mut funding_created_msg = get_event_msg!(nodes[0], MessageSendEvent::SendFundingCreated, nodes[1].node.get_our_node_id());
@@ -9209,7 +9218,7 @@ fn test_duplicate_chan_id() {
        let open_chan_2_msg = get_event_msg!(nodes[0], MessageSendEvent::SendOpenChannel, nodes[1].node.get_our_node_id());
        nodes[1].node.handle_open_channel(&nodes[0].node.get_our_node_id(), InitFeatures::known(), &open_chan_2_msg);
        nodes[0].node.handle_accept_channel(&nodes[1].node.get_our_node_id(), InitFeatures::known(), &get_event_msg!(nodes[1], MessageSendEvent::SendAcceptChannel, nodes[0].node.get_our_node_id()));
-       create_funding_transaction(&nodes[0], 100000, 42); // Get and check the FundingGenerationReady event
+       create_funding_transaction(&nodes[0], &nodes[1].node.get_our_node_id(), 100000, 42); // Get and check the FundingGenerationReady event
 
        let funding_created = {
                let mut a_channel_lock = nodes[0].node.channel_state.lock().unwrap();
@@ -9342,13 +9351,13 @@ fn test_invalid_funding_tx() {
        nodes[1].node.handle_open_channel(&nodes[0].node.get_our_node_id(), InitFeatures::known(), &get_event_msg!(nodes[0], MessageSendEvent::SendOpenChannel, nodes[1].node.get_our_node_id()));
        nodes[0].node.handle_accept_channel(&nodes[1].node.get_our_node_id(), InitFeatures::known(), &get_event_msg!(nodes[1], MessageSendEvent::SendAcceptChannel, nodes[0].node.get_our_node_id()));
 
-       let (temporary_channel_id, mut tx, _) = create_funding_transaction(&nodes[0], 100_000, 42);
+       let (temporary_channel_id, mut tx, _) = create_funding_transaction(&nodes[0], &nodes[1].node.get_our_node_id(), 100_000, 42);
        for output in tx.output.iter_mut() {
                // Make the confirmed funding transaction have a bogus script_pubkey
                output.script_pubkey = bitcoin::Script::new();
        }
 
-       nodes[0].node.funding_transaction_generated_unchecked(&temporary_channel_id, tx.clone(), 0).unwrap();
+       nodes[0].node.funding_transaction_generated_unchecked(&temporary_channel_id, &nodes[1].node.get_our_node_id(), tx.clone(), 0).unwrap();
        nodes[1].node.handle_funding_created(&nodes[0].node.get_our_node_id(), &get_event_msg!(nodes[0], MessageSendEvent::SendFundingCreated, nodes[1].node.get_our_node_id()));
        check_added_monitors!(nodes[1], 1);
 
@@ -9406,7 +9415,7 @@ fn do_test_tx_confirmed_skipping_blocks_immediate_broadcast(test_height_before_t
        nodes[1].node.peer_disconnected(&nodes[2].node.get_our_node_id(), false);
        nodes[2].node.peer_disconnected(&nodes[1].node.get_our_node_id(), false);
 
-       nodes[1].node.force_close_channel(&channel_id).unwrap();
+       nodes[1].node.force_close_channel(&channel_id, &nodes[2].node.get_our_node_id()).unwrap();
        check_closed_broadcast!(nodes[1], true);
        check_closed_event!(nodes[1], 1, ClosureReason::HolderForceClosed);
        check_added_monitors!(nodes[1], 1);
@@ -9878,7 +9887,7 @@ fn do_test_max_dust_htlc_exposure(dust_outbound_balance: bool, exposure_breach_e
 
        let opt_anchors = false;
 
-       let (temporary_channel_id, tx, _) = create_funding_transaction(&nodes[0], 1_000_000, 42);
+       let (temporary_channel_id, tx, _) = create_funding_transaction(&nodes[0], &nodes[1].node.get_our_node_id(), 1_000_000, 42);
 
        if on_holder_tx {
                if let Some(mut chan) = nodes[0].node.channel_state.lock().unwrap().by_id.get_mut(&temporary_channel_id) {
@@ -9886,7 +9895,7 @@ fn do_test_max_dust_htlc_exposure(dust_outbound_balance: bool, exposure_breach_e
                }
        }
 
-       nodes[0].node.funding_transaction_generated(&temporary_channel_id, tx.clone()).unwrap();
+       nodes[0].node.funding_transaction_generated(&temporary_channel_id, &nodes[1].node.get_our_node_id(), tx.clone()).unwrap();
        nodes[1].node.handle_funding_created(&nodes[0].node.get_our_node_id(), &get_event_msg!(nodes[0], MessageSendEvent::SendFundingCreated, nodes[1].node.get_our_node_id()));
        check_added_monitors!(nodes[1], 1);
 
index 6fa0aab9d311fb2ed5f9dff09ac3b487b2298051..af42644dd710164a8c1a633ef54975aa2a238873 100644 (file)
@@ -108,7 +108,7 @@ fn chanmon_claim_value_coop_close() {
        assert_eq!(vec![Balance::ClaimableOnChannelClose { claimable_amount_satoshis: 1_000, }],
                nodes[1].chain_monitor.chain_monitor.get_monitor(funding_outpoint).unwrap().get_claimable_balances());
 
-       nodes[0].node.close_channel(&chan_id).unwrap();
+       nodes[0].node.close_channel(&chan_id, &nodes[1].node.get_our_node_id()).unwrap();
        let node_0_shutdown = get_event_msg!(nodes[0], MessageSendEvent::SendShutdown, nodes[1].node.get_our_node_id());
        nodes[1].node.handle_shutdown(&nodes[0].node.get_our_node_id(), &InitFeatures::known(), &node_0_shutdown);
        let node_1_shutdown = get_event_msg!(nodes[1], MessageSendEvent::SendShutdown, nodes[0].node.get_our_node_id());
index 46d5d22b49a07112f680086c3e701d902088fcd1..2d57950029e319fcabfb9532b87cda421b283dcf 100644 (file)
@@ -495,7 +495,7 @@ fn do_retry_with_no_persist(confirm_before_reload: bool) {
        let bs_htlc_claim_txn = nodes[1].tx_broadcaster.txn_broadcasted.lock().unwrap().split_off(0);
        assert_eq!(bs_htlc_claim_txn.len(), 1);
        check_spends!(bs_htlc_claim_txn[0], as_commitment_tx);
-       expect_payment_forwarded!(nodes[1], nodes[0], None, false);
+       expect_payment_forwarded!(nodes[1], nodes[0], nodes[2], None, false, false);
 
        if !confirm_before_reload {
                mine_transaction(&nodes[0], &as_commitment_tx);
@@ -565,7 +565,7 @@ fn do_test_dup_htlc_onchain_fails_on_reload(persist_manager_post_event: bool, co
        // Route a payment, but force-close the channel before the HTLC fulfill message arrives at
        // nodes[0].
        let (payment_preimage, payment_hash, _) = route_payment(&nodes[0], &[&nodes[1]], 10000000);
-       nodes[0].node.force_close_channel(&nodes[0].node.list_channels()[0].channel_id).unwrap();
+       nodes[0].node.force_close_channel(&nodes[0].node.list_channels()[0].channel_id, &nodes[1].node.get_our_node_id()).unwrap();
        check_closed_broadcast!(nodes[0], true);
        check_added_monitors!(nodes[0], 1);
        check_closed_event!(nodes[0], 1, ClosureReason::HolderForceClosed);
index 633876ee375124ae2f9008fe8c5f0194bb60deb3..4b777b0b99446e0d4781e6029592b60ee0e24ff7 100644 (file)
@@ -258,8 +258,13 @@ pub trait SocketDescriptor : cmp::Eq + hash::Hash + Clone {
 /// descriptor.
 #[derive(Clone)]
 pub struct PeerHandleError {
-       /// Used to indicate that we probably can't make any future connections to this peer, implying
-       /// we should go ahead and force-close any channels we have with it.
+       /// Used to indicate that we probably can't make any future connections to this peer (e.g.
+       /// because we required features that our peer was missing, or vice versa).
+       ///
+       /// While LDK's [`ChannelManager`] will not do it automatically, you likely wish to force-close
+       /// any channels with this peer or check for new versions of LDK.
+       ///
+       /// [`ChannelManager`]: crate::ln::channelmanager::ChannelManager
        pub no_connection_possible: bool,
 }
 impl fmt::Debug for PeerHandleError {
index d7b940eb90e09dfce8e6557656327440fb987159..47e2fb33e3c7321103350865f97b5f5cc5897d14 100644 (file)
@@ -389,8 +389,8 @@ fn test_inbound_scid_privacy() {
        let accept_channel = get_event_msg!(nodes[2], MessageSendEvent::SendAcceptChannel, nodes[1].node.get_our_node_id());
        nodes[1].node.handle_accept_channel(&nodes[2].node.get_our_node_id(), InitFeatures::known(), &accept_channel);
 
-       let (temporary_channel_id, tx, _) = create_funding_transaction(&nodes[1], 100_000, 42);
-       nodes[1].node.funding_transaction_generated(&temporary_channel_id, tx.clone()).unwrap();
+       let (temporary_channel_id, tx, _) = create_funding_transaction(&nodes[1], &nodes[2].node.get_our_node_id(), 100_000, 42);
+       nodes[1].node.funding_transaction_generated(&temporary_channel_id, &nodes[2].node.get_our_node_id(), tx.clone()).unwrap();
        nodes[2].node.handle_funding_created(&nodes[1].node.get_our_node_id(), &get_event_msg!(nodes[1], MessageSendEvent::SendFundingCreated, nodes[2].node.get_our_node_id()));
        check_added_monitors!(nodes[2], 1);
 
index 96fda526d68d2d46369b9c9ec831d96551e86112..8a4ec2dc3518a4d9ca67ae7040c500563f7d9e3c 100644 (file)
@@ -138,7 +138,7 @@ fn do_test_onchain_htlc_reorg(local_commitment: bool, claim: bool) {
                // ChannelManager only polls chain::Watch::release_pending_monitor_events when we
                // probe it for events, so we probe non-message events here (which should just be the
                // PaymentForwarded event).
-               expect_payment_forwarded!(nodes[1], nodes[0], Some(1000), true);
+               expect_payment_forwarded!(nodes[1], nodes[0], nodes[2], Some(1000), true, true);
        } else {
                // Confirm the timeout tx and check that we fail the HTLC backwards
                let block = Block {
index 59004547b1489afe21f0a88ae5b71788b1f90fe1..063fa01608679e71011dbb39d89afbdfff1a4774 100644 (file)
@@ -46,7 +46,7 @@ fn pre_funding_lock_shutdown_test() {
        mine_transaction(&nodes[0], &tx);
        mine_transaction(&nodes[1], &tx);
 
-       nodes[0].node.close_channel(&OutPoint { txid: tx.txid(), index: 0 }.to_channel_id()).unwrap();
+       nodes[0].node.close_channel(&OutPoint { txid: tx.txid(), index: 0 }.to_channel_id(), &nodes[1].node.get_our_node_id()).unwrap();
        let node_0_shutdown = get_event_msg!(nodes[0], MessageSendEvent::SendShutdown, nodes[1].node.get_our_node_id());
        nodes[1].node.handle_shutdown(&nodes[0].node.get_our_node_id(), &InitFeatures::known(), &node_0_shutdown);
        let node_1_shutdown = get_event_msg!(nodes[1], MessageSendEvent::SendShutdown, nodes[0].node.get_our_node_id());
@@ -83,7 +83,7 @@ fn updates_shutdown_wait() {
 
        let (payment_preimage, _, _) = route_payment(&nodes[0], &[&nodes[1], &nodes[2]], 100000);
 
-       nodes[0].node.close_channel(&chan_1.2).unwrap();
+       nodes[0].node.close_channel(&chan_1.2, &nodes[1].node.get_our_node_id()).unwrap();
        let node_0_shutdown = get_event_msg!(nodes[0], MessageSendEvent::SendShutdown, nodes[1].node.get_our_node_id());
        nodes[1].node.handle_shutdown(&nodes[0].node.get_our_node_id(), &InitFeatures::known(), &node_0_shutdown);
        let node_1_shutdown = get_event_msg!(nodes[1], MessageSendEvent::SendShutdown, nodes[0].node.get_our_node_id());
@@ -110,7 +110,7 @@ fn updates_shutdown_wait() {
        assert!(updates.update_fee.is_none());
        assert_eq!(updates.update_fulfill_htlcs.len(), 1);
        nodes[1].node.handle_update_fulfill_htlc(&nodes[2].node.get_our_node_id(), &updates.update_fulfill_htlcs[0]);
-       expect_payment_forwarded!(nodes[1], nodes[0], Some(1000), false);
+       expect_payment_forwarded!(nodes[1], nodes[0], nodes[2], Some(1000), false, false);
        check_added_monitors!(nodes[1], 1);
        let updates_2 = get_htlc_update_msgs!(nodes[1], nodes[0].node.get_our_node_id());
        commitment_signed_dance!(nodes[1], nodes[2], updates.commitment_signed, false);
@@ -166,7 +166,7 @@ fn htlc_fail_async_shutdown() {
        assert!(updates.update_fail_malformed_htlcs.is_empty());
        assert!(updates.update_fee.is_none());
 
-       nodes[1].node.close_channel(&chan_1.2).unwrap();
+       nodes[1].node.close_channel(&chan_1.2, &nodes[0].node.get_our_node_id()).unwrap();
        let node_1_shutdown = get_event_msg!(nodes[1], MessageSendEvent::SendShutdown, nodes[0].node.get_our_node_id());
        nodes[0].node.handle_shutdown(&nodes[1].node.get_our_node_id(), &InitFeatures::known(), &node_1_shutdown);
        let node_0_shutdown = get_event_msg!(nodes[0], MessageSendEvent::SendShutdown, nodes[1].node.get_our_node_id());
@@ -232,7 +232,7 @@ fn do_test_shutdown_rebroadcast(recv_count: u8) {
 
        let (payment_preimage, _, _) = route_payment(&nodes[0], &[&nodes[1], &nodes[2]], 100000);
 
-       nodes[1].node.close_channel(&chan_1.2).unwrap();
+       nodes[1].node.close_channel(&chan_1.2, &nodes[0].node.get_our_node_id()).unwrap();
        let node_1_shutdown = get_event_msg!(nodes[1], MessageSendEvent::SendShutdown, nodes[0].node.get_our_node_id());
        if recv_count > 0 {
                nodes[0].node.handle_shutdown(&nodes[1].node.get_our_node_id(), &InitFeatures::known(), &node_1_shutdown);
@@ -279,7 +279,7 @@ fn do_test_shutdown_rebroadcast(recv_count: u8) {
        assert!(updates.update_fee.is_none());
        assert_eq!(updates.update_fulfill_htlcs.len(), 1);
        nodes[1].node.handle_update_fulfill_htlc(&nodes[2].node.get_our_node_id(), &updates.update_fulfill_htlcs[0]);
-       expect_payment_forwarded!(nodes[1], nodes[0], Some(1000), false);
+       expect_payment_forwarded!(nodes[1], nodes[0], nodes[2], Some(1000), false, false);
        check_added_monitors!(nodes[1], 1);
        let updates_2 = get_htlc_update_msgs!(nodes[1], nodes[0].node.get_our_node_id());
        commitment_signed_dance!(nodes[1], nodes[2], updates.commitment_signed, false);
@@ -417,7 +417,7 @@ fn test_upfront_shutdown_script() {
        // We test that in case of peer committing upfront to a script, if it changes at closing, we refuse to sign
        let flags = InitFeatures::known();
        let chan = create_announced_chan_between_nodes_with_value(&nodes, 0, 2, 1000000, 1000000, flags.clone(), flags.clone());
-       nodes[0].node.close_channel(&OutPoint { txid: chan.3.txid(), index: 0 }.to_channel_id()).unwrap();
+       nodes[0].node.close_channel(&OutPoint { txid: chan.3.txid(), index: 0 }.to_channel_id(), &nodes[2].node.get_our_node_id()).unwrap();
        let node_0_orig_shutdown = get_event_msg!(nodes[0], MessageSendEvent::SendShutdown, nodes[2].node.get_our_node_id());
        let mut node_0_shutdown = node_0_orig_shutdown.clone();
        node_0_shutdown.scriptpubkey = Builder::new().push_opcode(opcodes::all::OP_RETURN).into_script().to_p2sh();
@@ -432,7 +432,7 @@ fn test_upfront_shutdown_script() {
 
        // We test that in case of peer committing upfront to a script, if it doesn't change at closing, we sign
        let chan = create_announced_chan_between_nodes_with_value(&nodes, 0, 2, 1000000, 1000000, flags.clone(), flags.clone());
-       nodes[0].node.close_channel(&OutPoint { txid: chan.3.txid(), index: 0 }.to_channel_id()).unwrap();
+       nodes[0].node.close_channel(&OutPoint { txid: chan.3.txid(), index: 0 }.to_channel_id(), &nodes[2].node.get_our_node_id()).unwrap();
        let node_0_shutdown = get_event_msg!(nodes[0], MessageSendEvent::SendShutdown, nodes[2].node.get_our_node_id());
        // We test that in case of peer committing upfront to a script, if it oesn't change at closing, we sign
        nodes[2].node.handle_shutdown(&nodes[0].node.get_our_node_id(), &InitFeatures::known(), &node_0_shutdown);
@@ -446,7 +446,7 @@ fn test_upfront_shutdown_script() {
        // We test that if case of peer non-signaling we don't enforce committed script at channel opening
        let flags_no = InitFeatures::known().clear_upfront_shutdown_script();
        let chan = create_announced_chan_between_nodes_with_value(&nodes, 0, 1, 1000000, 1000000, flags_no, flags.clone());
-       nodes[0].node.close_channel(&OutPoint { txid: chan.3.txid(), index: 0 }.to_channel_id()).unwrap();
+       nodes[0].node.close_channel(&OutPoint { txid: chan.3.txid(), index: 0 }.to_channel_id(), &nodes[1].node.get_our_node_id()).unwrap();
        let node_1_shutdown = get_event_msg!(nodes[0], MessageSendEvent::SendShutdown, nodes[1].node.get_our_node_id());
        nodes[1].node.handle_shutdown(&nodes[0].node.get_our_node_id(), &InitFeatures::known(), &node_1_shutdown);
        check_added_monitors!(nodes[1], 1);
@@ -460,7 +460,7 @@ fn test_upfront_shutdown_script() {
        // We test that if user opt-out, we provide a zero-length script at channel opening and we are able to close
        // channel smoothly, opt-out is from channel initiator here
        let chan = create_announced_chan_between_nodes_with_value(&nodes, 1, 0, 1000000, 1000000, flags.clone(), flags.clone());
-       nodes[1].node.close_channel(&OutPoint { txid: chan.3.txid(), index: 0 }.to_channel_id()).unwrap();
+       nodes[1].node.close_channel(&OutPoint { txid: chan.3.txid(), index: 0 }.to_channel_id(), &nodes[0].node.get_our_node_id()).unwrap();
        check_added_monitors!(nodes[1], 1);
        let node_0_shutdown = get_event_msg!(nodes[1], MessageSendEvent::SendShutdown, nodes[0].node.get_our_node_id());
        nodes[0].node.handle_shutdown(&nodes[1].node.get_our_node_id(), &InitFeatures::known(), &node_0_shutdown);
@@ -474,7 +474,7 @@ fn test_upfront_shutdown_script() {
        //// We test that if user opt-out, we provide a zero-length script at channel opening and we are able to close
        //// channel smoothly
        let chan = create_announced_chan_between_nodes_with_value(&nodes, 0, 1, 1000000, 1000000, flags.clone(), flags.clone());
-       nodes[1].node.close_channel(&OutPoint { txid: chan.3.txid(), index: 0 }.to_channel_id()).unwrap();
+       nodes[1].node.close_channel(&OutPoint { txid: chan.3.txid(), index: 0 }.to_channel_id(), &nodes[0].node.get_our_node_id()).unwrap();
        check_added_monitors!(nodes[1], 1);
        let node_0_shutdown = get_event_msg!(nodes[1], MessageSendEvent::SendShutdown, nodes[0].node.get_our_node_id());
        nodes[0].node.handle_shutdown(&nodes[1].node.get_our_node_id(), &InitFeatures::known(), &node_0_shutdown);
@@ -580,7 +580,7 @@ fn test_segwit_v0_shutdown_script() {
        let nodes = create_network(3, &node_cfgs, &node_chanmgrs);
 
        let chan = create_announced_chan_between_nodes(&nodes, 0, 1, InitFeatures::known(), InitFeatures::known());
-       nodes[1].node.close_channel(&OutPoint { txid: chan.3.txid(), index: 0 }.to_channel_id()).unwrap();
+       nodes[1].node.close_channel(&OutPoint { txid: chan.3.txid(), index: 0 }.to_channel_id(), &nodes[0].node.get_our_node_id()).unwrap();
        check_added_monitors!(nodes[1], 1);
 
        // Use a segwit v0 script supported even without option_shutdown_anysegwit
@@ -615,7 +615,7 @@ fn test_anysegwit_shutdown_script() {
        let nodes = create_network(3, &node_cfgs, &node_chanmgrs);
 
        let chan = create_announced_chan_between_nodes(&nodes, 0, 1, InitFeatures::known(), InitFeatures::known());
-       nodes[1].node.close_channel(&OutPoint { txid: chan.3.txid(), index: 0 }.to_channel_id()).unwrap();
+       nodes[1].node.close_channel(&OutPoint { txid: chan.3.txid(), index: 0 }.to_channel_id(), &nodes[0].node.get_our_node_id()).unwrap();
        check_added_monitors!(nodes[1], 1);
 
        // Use a non-v0 segwit script supported by option_shutdown_anysegwit
@@ -660,14 +660,14 @@ fn test_unsupported_anysegwit_shutdown_script() {
                .expect(OnGetShutdownScriptpubkey { returns: supported_shutdown_script });
 
        let chan = create_announced_chan_between_nodes(&nodes, 0, 1, node_cfgs[0].features.clone(), node_cfgs[1].features.clone());
-       match nodes[1].node.close_channel(&OutPoint { txid: chan.3.txid(), index: 0 }.to_channel_id()) {
+       match nodes[1].node.close_channel(&OutPoint { txid: chan.3.txid(), index: 0 }.to_channel_id(), &nodes[0].node.get_our_node_id()) {
                Err(APIError::IncompatibleShutdownScript { script }) => {
                        assert_eq!(script.into_inner(), unsupported_shutdown_script.clone().into_inner());
                },
                Err(e) => panic!("Unexpected error: {:?}", e),
                Ok(_) => panic!("Expected error"),
        }
-       nodes[1].node.close_channel(&OutPoint { txid: chan.3.txid(), index: 0 }.to_channel_id()).unwrap();
+       nodes[1].node.close_channel(&OutPoint { txid: chan.3.txid(), index: 0 }.to_channel_id(), &nodes[0].node.get_our_node_id()).unwrap();
        check_added_monitors!(nodes[1], 1);
 
        // Use a non-v0 segwit script unsupported without option_shutdown_anysegwit
@@ -692,7 +692,7 @@ fn test_invalid_shutdown_script() {
        let nodes = create_network(3, &node_cfgs, &node_chanmgrs);
 
        let chan = create_announced_chan_between_nodes(&nodes, 0, 1, InitFeatures::known(), InitFeatures::known());
-       nodes[1].node.close_channel(&OutPoint { txid: chan.3.txid(), index: 0 }.to_channel_id()).unwrap();
+       nodes[1].node.close_channel(&OutPoint { txid: chan.3.txid(), index: 0 }.to_channel_id(), &nodes[0].node.get_our_node_id()).unwrap();
        check_added_monitors!(nodes[1], 1);
 
        // Use a segwit v0 script with an unsupported witness program
@@ -730,7 +730,7 @@ fn do_test_closing_signed_reinit_timeout(timeout_step: TimeoutStep) {
 
        send_payment(&nodes[0], &[&nodes[1]], 8_000_000);
 
-       nodes[0].node.close_channel(&chan_id).unwrap();
+       nodes[0].node.close_channel(&chan_id, &nodes[1].node.get_our_node_id()).unwrap();
        let node_0_shutdown = get_event_msg!(nodes[0], MessageSendEvent::SendShutdown, nodes[1].node.get_our_node_id());
        nodes[1].node.handle_shutdown(&nodes[0].node.get_our_node_id(), &InitFeatures::known(), &node_0_shutdown);
        let node_1_shutdown = get_event_msg!(nodes[1], MessageSendEvent::SendShutdown, nodes[0].node.get_our_node_id());
@@ -831,7 +831,7 @@ fn do_simple_legacy_shutdown_test(high_initiator_fee: bool) {
                *feerate_lock *= 10;
        }
 
-       nodes[0].node.close_channel(&OutPoint { txid: chan.3.txid(), index: 0 }.to_channel_id()).unwrap();
+       nodes[0].node.close_channel(&OutPoint { txid: chan.3.txid(), index: 0 }.to_channel_id(), &nodes[1].node.get_our_node_id()).unwrap();
        let node_0_shutdown = get_event_msg!(nodes[0], MessageSendEvent::SendShutdown, nodes[1].node.get_our_node_id());
        nodes[1].node.handle_shutdown(&nodes[0].node.get_our_node_id(), &InitFeatures::known(), &node_0_shutdown);
        let node_1_shutdown = get_event_msg!(nodes[1], MessageSendEvent::SendShutdown, nodes[0].node.get_our_node_id());
@@ -873,9 +873,9 @@ fn simple_target_feerate_shutdown() {
        let chan = create_announced_chan_between_nodes(&nodes, 0, 1, InitFeatures::known(), InitFeatures::known());
        let chan_id = OutPoint { txid: chan.3.txid(), index: 0 }.to_channel_id();
 
-       nodes[0].node.close_channel_with_target_feerate(&chan_id, 253 * 10).unwrap();
+       nodes[0].node.close_channel_with_target_feerate(&chan_id, &nodes[1].node.get_our_node_id(), 253 * 10).unwrap();
        let node_0_shutdown = get_event_msg!(nodes[0], MessageSendEvent::SendShutdown, nodes[1].node.get_our_node_id());
-       nodes[1].node.close_channel_with_target_feerate(&chan_id, 253 * 5).unwrap();
+       nodes[1].node.close_channel_with_target_feerate(&chan_id, &nodes[0].node.get_our_node_id(), 253 * 5).unwrap();
        let node_1_shutdown = get_event_msg!(nodes[1], MessageSendEvent::SendShutdown, nodes[0].node.get_our_node_id());
 
        nodes[1].node.handle_shutdown(&nodes[0].node.get_our_node_id(), &InitFeatures::known(), &node_0_shutdown);
index b67c2ec77b3e3ed0bac42335e3bd9fb07209392e..ef0c619b02e9f7a1b03cd6dd14b439e7799264dc 100644 (file)
@@ -100,12 +100,11 @@ pub enum ClosureReason {
                /// A developer-readable error message which we generated.
                err: String,
        },
-       /// The `PeerManager` informed us that we've disconnected from the peer. We close channels
-       /// if the `PeerManager` informed us that it is unlikely we'll be able to connect to the
-       /// peer again in the future or if the peer disconnected before we finished negotiating
-       /// the channel open. The first case may be caused by incompatible features which our
-       /// counterparty, or we, require.
-       //TODO: split between PeerUnconnectable/PeerDisconnected ?
+       /// The peer disconnected prior to funding completing. In this case the spec mandates that we
+       /// forget the channel entirely - we can attempt again if the peer reconnects.
+       ///
+       /// In LDK versions prior to 0.0.107 this could also occur if we were unable to connect to the
+       /// peer because of mutual incompatibility between us and our channel counterparty.
        DisconnectedPeer,
        /// Closure generated from `ChannelManager::read` if the ChannelMonitor is newer than
        /// the ChannelManager deserialized.
@@ -162,8 +161,15 @@ pub enum Event {
        /// [`ChannelManager::funding_transaction_generated`]: crate::ln::channelmanager::ChannelManager::funding_transaction_generated
        FundingGenerationReady {
                /// The random channel_id we picked which you'll need to pass into
-               /// ChannelManager::funding_transaction_generated.
+               /// [`ChannelManager::funding_transaction_generated`].
+               ///
+               /// [`ChannelManager::funding_transaction_generated`]: crate::ln::channelmanager::ChannelManager::funding_transaction_generated
                temporary_channel_id: [u8; 32],
+               /// The counterparty's node_id, which you'll need to pass back into
+               /// [`ChannelManager::funding_transaction_generated`].
+               ///
+               /// [`ChannelManager::funding_transaction_generated`]: crate::ln::channelmanager::ChannelManager::funding_transaction_generated
+               counterparty_node_id: PublicKey,
                /// The value, in satoshis, that the output should have.
                channel_value_satoshis: u64,
                /// The script which should be used in the transaction output.
@@ -363,9 +369,12 @@ pub enum Event {
        /// This event is generated when a payment has been successfully forwarded through us and a
        /// forwarding fee earned.
        PaymentForwarded {
-               /// The channel between the source node and us. Optional because versions prior to 0.0.107
-               /// do not serialize this field.
-               source_channel_id: Option<[u8; 32]>,
+               /// The incoming channel between the previous node and us. This is only `None` for events
+               /// generated or serialized by versions prior to 0.0.107.
+               prev_channel_id: Option<[u8; 32]>,
+               /// The outgoing channel between the next node and us. This is only `None` for events
+               /// generated or serialized by versions prior to 0.0.107.
+               next_channel_id: Option<[u8; 32]>,
                /// The fee, in milli-satoshis, which was earned as a result of the payment.
                ///
                /// Note that if we force-closed the channel over which we forwarded an HTLC while the HTLC
@@ -425,13 +434,21 @@ pub enum Event {
                /// The temporary channel ID of the channel requested to be opened.
                ///
                /// When responding to the request, the `temporary_channel_id` should be passed
-               /// back to the ChannelManager with [`ChannelManager::accept_inbound_channel`] to accept,
-               /// or to [`ChannelManager::force_close_channel`] to reject.
+               /// back to the ChannelManager through [`ChannelManager::accept_inbound_channel`] to accept,
+               /// or through [`ChannelManager::force_close_channel`] to reject.
                ///
                /// [`ChannelManager::accept_inbound_channel`]: crate::ln::channelmanager::ChannelManager::accept_inbound_channel
                /// [`ChannelManager::force_close_channel`]: crate::ln::channelmanager::ChannelManager::force_close_channel
                temporary_channel_id: [u8; 32],
                /// The node_id of the counterparty requesting to open the channel.
+               ///
+               /// When responding to the request, the `counterparty_node_id` should be passed
+               /// back to the `ChannelManager` through [`ChannelManager::accept_inbound_channel`] to
+               /// accept the request, or through [`ChannelManager::force_close_channel`] to reject the
+               /// request.
+               ///
+               /// [`ChannelManager::accept_inbound_channel`]: crate::ln::channelmanager::ChannelManager::accept_inbound_channel
+               /// [`ChannelManager::force_close_channel`]: crate::ln::channelmanager::ChannelManager::force_close_channel
                counterparty_node_id: PublicKey,
                /// The channel value of the requested channel.
                funding_satoshis: u64,
@@ -523,12 +540,13 @@ impl Writeable for Event {
                                        (0, VecWriteWrapper(outputs), required),
                                });
                        },
-                       &Event::PaymentForwarded { fee_earned_msat, source_channel_id, claim_from_onchain_tx } => {
+                       &Event::PaymentForwarded { fee_earned_msat, prev_channel_id, claim_from_onchain_tx, next_channel_id } => {
                                7u8.write(writer)?;
                                write_tlv_fields!(writer, {
                                        (0, fee_earned_msat, option),
-                                       (1, source_channel_id, option),
+                                       (1, prev_channel_id, option),
                                        (2, claim_from_onchain_tx, required),
+                                       (3, next_channel_id, option),
                                });
                        },
                        &Event::ChannelClosed { ref channel_id, ref user_channel_id, ref reason } => {
@@ -688,14 +706,16 @@ impl MaybeReadable for Event {
                        7u8 => {
                                let f = || {
                                        let mut fee_earned_msat = None;
-                                       let mut source_channel_id = None;
+                                       let mut prev_channel_id = None;
                                        let mut claim_from_onchain_tx = false;
+                                       let mut next_channel_id = None;
                                        read_tlv_fields!(reader, {
                                                (0, fee_earned_msat, option),
-                                               (1, source_channel_id, option),
+                                               (1, prev_channel_id, option),
                                                (2, claim_from_onchain_tx, required),
+                                               (3, next_channel_id, option),
                                        });
-                                       Ok(Some(Event::PaymentForwarded { fee_earned_msat, source_channel_id, claim_from_onchain_tx }))
+                                       Ok(Some(Event::PaymentForwarded { fee_earned_msat, prev_channel_id, claim_from_onchain_tx, next_channel_id }))
                                };
                                f()
                        },
index 5e9591d8eba76652b4b2820959c0fd5e8cb5d67c..3682a0e8f0af02d1ac654853bff16e3096f4025a 100644 (file)
@@ -164,7 +164,7 @@ impl<'a> chain::Watch<EnforcingSigner> for TestChainMonitor<'a> {
                update_res
        }
 
-       fn release_pending_monitor_events(&self) -> Vec<MonitorEvent> {
+       fn release_pending_monitor_events(&self) -> Vec<(OutPoint, Vec<MonitorEvent>)> {
                return self.chain_monitor.release_pending_monitor_events();
        }
 }