Merge pull request #1504 from TheBlueMatt/2022-05-pub-io
[rust-lightning] / lightning / src / ln / functional_tests.rs
index 2d52d56aaed46745ce30c5e0de5284b3b1a8efbd..d1dbad4720b814db3fb89476e0e8e8b7cee22193 100644 (file)
@@ -515,10 +515,10 @@ fn do_test_sanity_on_in_flight_opens(steps: u8) {
        if steps & 0x0f == 2 { return; }
        nodes[0].node.handle_accept_channel(&nodes[1].node.get_our_node_id(), InitFeatures::known(), &accept_channel);
 
-       let (temporary_channel_id, tx, funding_output) = create_funding_transaction(&nodes[0], 100000, 42);
+       let (temporary_channel_id, tx, funding_output) = create_funding_transaction(&nodes[0], &nodes[1].node.get_our_node_id(), 100000, 42);
 
        if steps & 0x0f == 3 { return; }
-       nodes[0].node.funding_transaction_generated(&temporary_channel_id, tx.clone()).unwrap();
+       nodes[0].node.funding_transaction_generated(&temporary_channel_id, &nodes[1].node.get_our_node_id(), tx.clone()).unwrap();
        check_added_monitors!(nodes[0], 0);
        let funding_created = get_event_msg!(nodes[0], MessageSendEvent::SendFundingCreated, nodes[1].node.get_our_node_id());
 
@@ -1259,6 +1259,7 @@ fn test_duplicate_htlc_different_direction_onchain() {
 
        // Provide preimage to node 0 by claiming payment
        nodes[0].node.claim_funds(payment_preimage);
+       expect_payment_claimed!(nodes[0], payment_hash, 800_000);
        check_added_monitors!(nodes[0], 1);
 
        // Broadcast node 1 commitment txn
@@ -1971,9 +1972,9 @@ fn test_channel_reserve_holding_cell_htlcs() {
        let events = nodes[2].node.get_and_clear_pending_events();
        assert_eq!(events.len(), 2);
        match events[0] {
-               Event::PaymentReceived { ref payment_hash, ref purpose, amt } => {
+               Event::PaymentReceived { ref payment_hash, ref purpose, amount_msat } => {
                        assert_eq!(our_payment_hash_21, *payment_hash);
-                       assert_eq!(recv_value_21, amt);
+                       assert_eq!(recv_value_21, amount_msat);
                        match &purpose {
                                PaymentPurpose::InvoicePayment { payment_preimage, payment_secret, .. } => {
                                        assert!(payment_preimage.is_none());
@@ -1985,9 +1986,9 @@ fn test_channel_reserve_holding_cell_htlcs() {
                _ => panic!("Unexpected event"),
        }
        match events[1] {
-               Event::PaymentReceived { ref payment_hash, ref purpose, amt } => {
+               Event::PaymentReceived { ref payment_hash, ref purpose, amount_msat } => {
                        assert_eq!(our_payment_hash_22, *payment_hash);
-                       assert_eq!(recv_value_22, amt);
+                       assert_eq!(recv_value_22, amount_msat);
                        match &purpose {
                                PaymentPurpose::InvoicePayment { payment_preimage, payment_secret, .. } => {
                                        assert!(payment_preimage.is_none());
@@ -2049,8 +2050,9 @@ fn channel_reserve_in_flight_removes() {
 
        let b_chan_values = get_channel_value_stat!(nodes[1], chan_1.2);
        // Route the first two HTLCs.
-       let (payment_preimage_1, _, _) = route_payment(&nodes[0], &[&nodes[1]], b_chan_values.channel_reserve_msat - b_chan_values.value_to_self_msat - 10000);
-       let (payment_preimage_2, _, _) = route_payment(&nodes[0], &[&nodes[1]], 20000);
+       let payment_value_1 = b_chan_values.channel_reserve_msat - b_chan_values.value_to_self_msat - 10000;
+       let (payment_preimage_1, payment_hash_1, _) = route_payment(&nodes[0], &[&nodes[1]], payment_value_1);
+       let (payment_preimage_2, payment_hash_2, _) = route_payment(&nodes[0], &[&nodes[1]], 20_000);
 
        // Start routing the third HTLC (this is just used to get everyone in the right state).
        let (route, payment_hash_3, payment_preimage_3, payment_secret_3) = get_route_and_payment_hash!(nodes[0], nodes[1], 100000);
@@ -2064,13 +2066,15 @@ fn channel_reserve_in_flight_removes() {
 
        // Now claim both of the first two HTLCs on B's end, putting B in AwaitingRAA and generating an
        // initial fulfill/CS.
-       assert!(nodes[1].node.claim_funds(payment_preimage_1));
+       nodes[1].node.claim_funds(payment_preimage_1);
+       expect_payment_claimed!(nodes[1], payment_hash_1, payment_value_1);
        check_added_monitors!(nodes[1], 1);
        let bs_removes = get_htlc_update_msgs!(nodes[1], nodes[0].node.get_our_node_id());
 
        // This claim goes in B's holding cell, allowing us to have a pending B->A RAA which does not
        // remove the second HTLC when we send the HTLC back from B to A.
-       assert!(nodes[1].node.claim_funds(payment_preimage_2));
+       nodes[1].node.claim_funds(payment_preimage_2);
+       expect_payment_claimed!(nodes[1], payment_hash_2, 20_000);
        check_added_monitors!(nodes[1], 1);
        assert!(nodes[1].node.get_and_clear_pending_msg_events().is_empty());
 
@@ -2196,7 +2200,7 @@ fn channel_monitor_network_test() {
        send_payment(&nodes[0], &vec!(&nodes[1], &nodes[2], &nodes[3], &nodes[4])[..], 8000000);
 
        // Simple case with no pending HTLCs:
-       nodes[1].node.force_close_channel(&chan_1.2).unwrap();
+       nodes[1].node.force_close_channel(&chan_1.2, &nodes[0].node.get_our_node_id()).unwrap();
        check_added_monitors!(nodes[1], 1);
        check_closed_broadcast!(nodes[1], true);
        {
@@ -2213,11 +2217,11 @@ fn channel_monitor_network_test() {
        check_closed_event!(nodes[1], 1, ClosureReason::HolderForceClosed);
 
        // One pending HTLC is discarded by the force-close:
-       let payment_preimage_1 = route_payment(&nodes[1], &vec!(&nodes[2], &nodes[3])[..], 3000000).0;
+       let (payment_preimage_1, payment_hash_1, _) = route_payment(&nodes[1], &[&nodes[2], &nodes[3]], 3_000_000);
 
        // Simple case of one pending HTLC to HTLC-Timeout (note that the HTLC-Timeout is not
        // broadcasted until we reach the timelock time).
-       nodes[1].node.force_close_channel(&chan_2.2).unwrap();
+       nodes[1].node.force_close_channel(&chan_2.2, &nodes[2].node.get_our_node_id()).unwrap();
        check_closed_broadcast!(nodes[1], true);
        check_added_monitors!(nodes[1], 1);
        {
@@ -2235,9 +2239,10 @@ fn channel_monitor_network_test() {
        check_closed_event!(nodes[2], 1, ClosureReason::CommitmentTxConfirmed);
 
        macro_rules! claim_funds {
-               ($node: expr, $prev_node: expr, $preimage: expr) => {
+               ($node: expr, $prev_node: expr, $preimage: expr, $payment_hash: expr) => {
                        {
-                               assert!($node.node.claim_funds($preimage));
+                               $node.node.claim_funds($preimage);
+                               expect_payment_claimed!($node, $payment_hash, 3_000_000);
                                check_added_monitors!($node, 1);
 
                                let events = $node.node.get_and_clear_pending_msg_events();
@@ -2256,7 +2261,7 @@ fn channel_monitor_network_test() {
 
        // nodes[3] gets the preimage, but nodes[2] already disconnected, resulting in a nodes[2]
        // HTLC-Timeout and a nodes[3] claim against it (+ its own announces)
-       nodes[2].node.force_close_channel(&chan_3.2).unwrap();
+       nodes[2].node.force_close_channel(&chan_3.2, &nodes[3].node.get_our_node_id()).unwrap();
        check_added_monitors!(nodes[2], 1);
        check_closed_broadcast!(nodes[2], true);
        let node2_commitment_txid;
@@ -2267,7 +2272,7 @@ fn channel_monitor_network_test() {
                node2_commitment_txid = node_txn[0].txid();
 
                // Claim the payment on nodes[3], giving it knowledge of the preimage
-               claim_funds!(nodes[3], nodes[2], payment_preimage_1);
+               claim_funds!(nodes[3], nodes[2], payment_preimage_1, payment_hash_1);
                mine_transaction(&nodes[3], &node_txn[0]);
                check_added_monitors!(nodes[3], 1);
                check_preimage_claim(&nodes[3], &node_txn);
@@ -2283,7 +2288,7 @@ fn channel_monitor_network_test() {
        let chan_3_mon = nodes[3].chain_monitor.chain_monitor.remove_monitor(&OutPoint { txid: chan_3.3.txid(), index: 0 });
 
        // One pending HTLC to time out:
-       let payment_preimage_2 = route_payment(&nodes[3], &vec!(&nodes[4])[..], 3000000).0;
+       let (payment_preimage_2, payment_hash_2, _) = route_payment(&nodes[3], &[&nodes[4]], 3_000_000);
        // CLTV expires at TEST_FINAL_CLTV + 1 (current height) + 1 (added in send_payment for
        // buffer space).
 
@@ -2318,7 +2323,7 @@ fn channel_monitor_network_test() {
                let node_txn = test_txn_broadcast(&nodes[3], &chan_4, None, HTLCType::TIMEOUT);
 
                // Claim the payment on nodes[4], giving it knowledge of the preimage
-               claim_funds!(nodes[4], nodes[3], payment_preimage_2);
+               claim_funds!(nodes[4], nodes[3], payment_preimage_2, payment_hash_2);
 
                connect_blocks(&nodes[4], TEST_FINAL_CLTV - CLTV_CLAIM_BUFFER + 2);
                let events = nodes[4].node.get_and_clear_pending_msg_events();
@@ -2670,8 +2675,8 @@ fn test_htlc_on_chain_success() {
        send_payment(&nodes[0], &vec!(&nodes[1], &nodes[2])[..], 8000000);
        send_payment(&nodes[0], &vec!(&nodes[1], &nodes[2])[..], 8000000);
 
-       let (our_payment_preimage, payment_hash_1, _payment_secret) = route_payment(&nodes[0], &vec!(&nodes[1], &nodes[2]), 3000000);
-       let (our_payment_preimage_2, payment_hash_2, _payment_secret_2) = route_payment(&nodes[0], &vec!(&nodes[1], &nodes[2]), 3000000);
+       let (our_payment_preimage, payment_hash_1, _payment_secret) = route_payment(&nodes[0], &[&nodes[1], &nodes[2]], 3_000_000);
+       let (our_payment_preimage_2, payment_hash_2, _payment_secret_2) = route_payment(&nodes[0], &[&nodes[1], &nodes[2]], 3_000_000);
 
        // Broadcast legit commitment tx from C on B's chain
        // Broadcast HTLC Success transaction by C on received output from C's commitment tx on B's chain
@@ -2679,7 +2684,9 @@ fn test_htlc_on_chain_success() {
        assert_eq!(commitment_tx.len(), 1);
        check_spends!(commitment_tx[0], chan_2.3);
        nodes[2].node.claim_funds(our_payment_preimage);
+       expect_payment_claimed!(nodes[2], payment_hash_1, 3_000_000);
        nodes[2].node.claim_funds(our_payment_preimage_2);
+       expect_payment_claimed!(nodes[2], payment_hash_2, 3_000_000);
        check_added_monitors!(nodes[2], 2);
        let updates = get_htlc_update_msgs!(nodes[2], nodes[1].node.get_our_node_id());
        assert!(updates.update_add_htlcs.is_empty());
@@ -2723,18 +2730,20 @@ fn test_htlc_on_chain_success() {
        }
        let chan_id = Some(chan_1.2);
        match forwarded_events[1] {
-               Event::PaymentForwarded { fee_earned_msat, source_channel_id, claim_from_onchain_tx } => {
+               Event::PaymentForwarded { fee_earned_msat, prev_channel_id, claim_from_onchain_tx, next_channel_id } => {
                        assert_eq!(fee_earned_msat, Some(1000));
-                       assert_eq!(source_channel_id, chan_id);
+                       assert_eq!(prev_channel_id, chan_id);
                        assert_eq!(claim_from_onchain_tx, true);
+                       assert_eq!(next_channel_id, Some(chan_2.2));
                },
                _ => panic!()
        }
        match forwarded_events[2] {
-               Event::PaymentForwarded { fee_earned_msat, source_channel_id, claim_from_onchain_tx } => {
+               Event::PaymentForwarded { fee_earned_msat, prev_channel_id, claim_from_onchain_tx, next_channel_id } => {
                        assert_eq!(fee_earned_msat, Some(1000));
-                       assert_eq!(source_channel_id, chan_id);
+                       assert_eq!(prev_channel_id, chan_id);
                        assert_eq!(claim_from_onchain_tx, true);
+                       assert_eq!(next_channel_id, Some(chan_2.2));
                },
                _ => panic!()
        }
@@ -3095,7 +3104,7 @@ fn do_test_commitment_revoked_fail_backward_exhaustive(deliver_bs_raa: bool, use
        let (_, second_payment_hash, _) = route_payment(&nodes[0], &[&nodes[1], &nodes[2]], value);
        let (_, third_payment_hash, _) = route_payment(&nodes[0], &[&nodes[1], &nodes[2]], value);
 
-       assert!(nodes[2].node.fail_htlc_backwards(&first_payment_hash));
+       nodes[2].node.fail_htlc_backwards(&first_payment_hash);
        expect_pending_htlcs_forwardable!(nodes[2]);
        check_added_monitors!(nodes[2], 1);
        let updates = get_htlc_update_msgs!(nodes[2], nodes[1].node.get_our_node_id());
@@ -3108,7 +3117,7 @@ fn do_test_commitment_revoked_fail_backward_exhaustive(deliver_bs_raa: bool, use
        let bs_raa = commitment_signed_dance!(nodes[1], nodes[2], updates.commitment_signed, false, true, false, true);
        // Drop the last RAA from 3 -> 2
 
-       assert!(nodes[2].node.fail_htlc_backwards(&second_payment_hash));
+       nodes[2].node.fail_htlc_backwards(&second_payment_hash);
        expect_pending_htlcs_forwardable!(nodes[2]);
        check_added_monitors!(nodes[2], 1);
        let updates = get_htlc_update_msgs!(nodes[2], nodes[1].node.get_our_node_id());
@@ -3125,7 +3134,7 @@ fn do_test_commitment_revoked_fail_backward_exhaustive(deliver_bs_raa: bool, use
        nodes[2].node.handle_revoke_and_ack(&nodes[1].node.get_our_node_id(), &as_raa);
        check_added_monitors!(nodes[2], 1);
 
-       assert!(nodes[2].node.fail_htlc_backwards(&third_payment_hash));
+       nodes[2].node.fail_htlc_backwards(&third_payment_hash);
        expect_pending_htlcs_forwardable!(nodes[2]);
        check_added_monitors!(nodes[2], 1);
        let updates = get_htlc_update_msgs!(nodes[2], nodes[1].node.get_our_node_id());
@@ -3377,7 +3386,7 @@ fn test_htlc_ignore_latest_remote_commitment() {
        create_announced_chan_between_nodes(&nodes, 0, 1, InitFeatures::known(), InitFeatures::known());
 
        route_payment(&nodes[0], &[&nodes[1]], 10000000);
-       nodes[0].node.force_close_channel(&nodes[0].node.list_channels()[0].channel_id).unwrap();
+       nodes[0].node.force_close_channel(&nodes[0].node.list_channels()[0].channel_id, &nodes[1].node.get_our_node_id()).unwrap();
        connect_blocks(&nodes[0], TEST_FINAL_CLTV + LATENCY_GRACE_PERIOD_BLOCKS + 1);
        check_closed_broadcast!(nodes[0], true);
        check_added_monitors!(nodes[0], 1);
@@ -3440,7 +3449,7 @@ fn test_force_close_fail_back() {
        // state or updated nodes[1]' state. Now force-close and broadcast that commitment/HTLC
        // transaction and ensure nodes[1] doesn't fail-backwards (this was originally a bug!).
 
-       nodes[2].node.force_close_channel(&payment_event.commitment_msg.channel_id).unwrap();
+       nodes[2].node.force_close_channel(&payment_event.commitment_msg.channel_id, &nodes[1].node.get_our_node_id()).unwrap();
        check_closed_broadcast!(nodes[2], true);
        check_added_monitors!(nodes[2], 1);
        check_closed_event!(nodes[2], 1, ClosureReason::HolderForceClosed);
@@ -3489,9 +3498,10 @@ fn test_dup_events_on_peer_disconnect() {
        let nodes = create_network(2, &node_cfgs, &node_chanmgrs);
        create_announced_chan_between_nodes(&nodes, 0, 1, InitFeatures::known(), InitFeatures::known());
 
-       let payment_preimage = route_payment(&nodes[0], &[&nodes[1]], 1000000).0;
+       let (payment_preimage, payment_hash, _) = route_payment(&nodes[0], &[&nodes[1]], 1_000_000);
 
-       assert!(nodes[1].node.claim_funds(payment_preimage));
+       nodes[1].node.claim_funds(payment_preimage);
+       expect_payment_claimed!(nodes[1], payment_hash, 1_000_000);
        check_added_monitors!(nodes[1], 1);
        let claim_msgs = get_htlc_update_msgs!(nodes[1], nodes[0].node.get_our_node_id());
        nodes[0].node.handle_update_fulfill_htlc(&nodes[1].node.get_our_node_id(), &claim_msgs.update_fulfill_htlcs[0]);
@@ -3521,10 +3531,10 @@ fn test_peer_disconnected_before_funding_broadcasted() {
        let accept_channel = get_event_msg!(nodes[1], MessageSendEvent::SendAcceptChannel, nodes[0].node.get_our_node_id());
        nodes[0].node.handle_accept_channel(&nodes[1].node.get_our_node_id(), InitFeatures::known(), &accept_channel);
 
-       let (temporary_channel_id, tx, _funding_output) = create_funding_transaction(&nodes[0], 1_000_000, 42);
+       let (temporary_channel_id, tx, _funding_output) = create_funding_transaction(&nodes[0], &nodes[1].node.get_our_node_id(), 1_000_000, 42);
        assert_eq!(temporary_channel_id, expected_temporary_channel_id);
 
-       assert!(nodes[0].node.funding_transaction_generated(&temporary_channel_id, tx.clone()).is_ok());
+       assert!(nodes[0].node.funding_transaction_generated(&temporary_channel_id, &nodes[1].node.get_our_node_id(), tx.clone()).is_ok());
 
        let funding_created_msg = get_event_msg!(nodes[0], MessageSendEvent::SendFundingCreated, nodes[1].node.get_our_node_id());
        assert_eq!(funding_created_msg.temporary_channel_id, expected_temporary_channel_id);
@@ -3614,18 +3624,18 @@ fn do_test_drop_messages_peer_disconnect(messages_delivered: u8, simulate_broken
        let node_chanmgrs = create_node_chanmgrs(2, &node_cfgs, &[None, None]);
        let mut nodes = create_network(2, &node_cfgs, &node_chanmgrs);
 
-       let mut as_funding_locked = None;
+       let mut as_channel_ready = None;
        if messages_delivered == 0 {
-               let (funding_locked, _, _) = create_chan_between_nodes_with_value_a(&nodes[0], &nodes[1], 100000, 10001, InitFeatures::known(), InitFeatures::known());
-               as_funding_locked = Some(funding_locked);
-               // nodes[1] doesn't receive the funding_locked message (it'll be re-sent on reconnect)
+               let (channel_ready, _, _) = create_chan_between_nodes_with_value_a(&nodes[0], &nodes[1], 100000, 10001, InitFeatures::known(), InitFeatures::known());
+               as_channel_ready = Some(channel_ready);
+               // nodes[1] doesn't receive the channel_ready message (it'll be re-sent on reconnect)
                // Note that we store it so that if we're running with `simulate_broken_lnd` we can deliver
                // it before the channel_reestablish message.
        } else {
                create_announced_chan_between_nodes(&nodes, 0, 1, InitFeatures::known(), InitFeatures::known());
        }
 
-       let (route, payment_hash_1, payment_preimage_1, payment_secret_1) = get_route_and_payment_hash!(nodes[0], nodes[1], 1000000);
+       let (route, payment_hash_1, payment_preimage_1, payment_secret_1) = get_route_and_payment_hash!(nodes[0], nodes[1], 1_000_000);
 
        let payment_event = {
                nodes[0].node.send_payment(&route, payment_hash_1, &Some(payment_secret_1)).unwrap();
@@ -3671,17 +3681,17 @@ fn do_test_drop_messages_peer_disconnect(messages_delivered: u8, simulate_broken
        nodes[1].node.peer_disconnected(&nodes[0].node.get_our_node_id(), false);
        if messages_delivered < 3 {
                if simulate_broken_lnd {
-                       // lnd has a long-standing bug where they send a funding_locked prior to a
-                       // channel_reestablish if you reconnect prior to funding_locked time.
+                       // lnd has a long-standing bug where they send a channel_ready prior to a
+                       // channel_reestablish if you reconnect prior to channel_ready time.
                        //
-                       // Here we simulate that behavior, delivering a funding_locked immediately on
-                       // reconnect. Note that we don't bother skipping the now-duplicate funding_locked sent
+                       // Here we simulate that behavior, delivering a channel_ready immediately on
+                       // reconnect. Note that we don't bother skipping the now-duplicate channel_ready sent
                        // in `reconnect_nodes` but we currently don't fail based on that.
                        //
                        // See-also <https://github.com/lightningnetwork/lnd/issues/4006>
-                       nodes[1].node.handle_funding_locked(&nodes[0].node.get_our_node_id(), &as_funding_locked.as_ref().unwrap().0);
+                       nodes[1].node.handle_channel_ready(&nodes[0].node.get_our_node_id(), &as_channel_ready.as_ref().unwrap().0);
                }
-               // Even if the funding_locked messages get exchanged, as long as nothing further was
+               // Even if the channel_ready messages get exchanged, as long as nothing further was
                // received on either side, both sides will need to resend them.
                reconnect_nodes(&nodes[0], &nodes[1], (true, true), (0, 1), (0, 0), (0, 0), (0, 0), (0, 0), (false, false));
        } else if messages_delivered == 3 {
@@ -3714,9 +3724,9 @@ fn do_test_drop_messages_peer_disconnect(messages_delivered: u8, simulate_broken
        let events_2 = nodes[1].node.get_and_clear_pending_events();
        assert_eq!(events_2.len(), 1);
        match events_2[0] {
-               Event::PaymentReceived { ref payment_hash, ref purpose, amt } => {
+               Event::PaymentReceived { ref payment_hash, ref purpose, amount_msat } => {
                        assert_eq!(payment_hash_1, *payment_hash);
-                       assert_eq!(amt, 1000000);
+                       assert_eq!(amount_msat, 1_000_000);
                        match &purpose {
                                PaymentPurpose::InvoicePayment { payment_preimage, payment_secret, .. } => {
                                        assert!(payment_preimage.is_none());
@@ -3730,6 +3740,7 @@ fn do_test_drop_messages_peer_disconnect(messages_delivered: u8, simulate_broken
 
        nodes[1].node.claim_funds(payment_preimage_1);
        check_added_monitors!(nodes[1], 1);
+       expect_payment_claimed!(nodes[1], payment_hash_1, 1_000_000);
 
        let events_3 = nodes[1].node.get_and_clear_pending_msg_events();
        assert_eq!(events_3.len(), 1);
@@ -3875,26 +3886,26 @@ fn test_funding_peer_disconnect() {
        nodes[1].node.peer_connected(&nodes[0].node.get_our_node_id(), &msgs::Init { features: InitFeatures::empty(), remote_network_address: None });
        let bs_reestablish = get_event_msg!(nodes[1], MessageSendEvent::SendChannelReestablish, nodes[0].node.get_our_node_id());
 
-       // nodes[0] hasn't yet received a funding_locked, so it only sends that on reconnect.
+       // nodes[0] hasn't yet received a channel_ready, so it only sends that on reconnect.
        nodes[0].node.handle_channel_reestablish(&nodes[1].node.get_our_node_id(), &bs_reestablish);
        let events_3 = nodes[0].node.get_and_clear_pending_msg_events();
        assert_eq!(events_3.len(), 1);
-       let as_funding_locked = match events_3[0] {
-               MessageSendEvent::SendFundingLocked { ref node_id, ref msg } => {
+       let as_channel_ready = match events_3[0] {
+               MessageSendEvent::SendChannelReady { ref node_id, ref msg } => {
                        assert_eq!(*node_id, nodes[1].node.get_our_node_id());
                        msg.clone()
                },
                _ => panic!("Unexpected event {:?}", events_3[0]),
        };
 
-       // nodes[1] received nodes[0]'s funding_locked on the first reconnect above, so it should send
+       // nodes[1] received nodes[0]'s channel_ready on the first reconnect above, so it should send
        // announcement_signatures as well as channel_update.
        nodes[1].node.handle_channel_reestablish(&nodes[0].node.get_our_node_id(), &as_reestablish);
        let events_4 = nodes[1].node.get_and_clear_pending_msg_events();
        assert_eq!(events_4.len(), 3);
        let chan_id;
-       let bs_funding_locked = match events_4[0] {
-               MessageSendEvent::SendFundingLocked { ref node_id, ref msg } => {
+       let bs_channel_ready = match events_4[0] {
+               MessageSendEvent::SendChannelReady { ref node_id, ref msg } => {
                        assert_eq!(*node_id, nodes[0].node.get_our_node_id());
                        chan_id = msg.channel_id;
                        msg.clone()
@@ -3915,9 +3926,9 @@ fn test_funding_peer_disconnect() {
                _ => panic!("Unexpected event {:?}", events_4[2]),
        }
 
-       // Re-deliver nodes[0]'s funding_locked, which nodes[1] can safely ignore. It currently
+       // Re-deliver nodes[0]'s channel_ready, which nodes[1] can safely ignore. It currently
        // generates a duplicative private channel_update
-       nodes[1].node.handle_funding_locked(&nodes[0].node.get_our_node_id(), &as_funding_locked);
+       nodes[1].node.handle_channel_ready(&nodes[0].node.get_our_node_id(), &as_channel_ready);
        let events_5 = nodes[1].node.get_and_clear_pending_msg_events();
        assert_eq!(events_5.len(), 1);
        match events_5[0] {
@@ -3927,9 +3938,9 @@ fn test_funding_peer_disconnect() {
                _ => panic!("Unexpected event {:?}", events_5[0]),
        };
 
-       // When we deliver nodes[1]'s funding_locked, however, nodes[0] will generate its
+       // When we deliver nodes[1]'s channel_ready, however, nodes[0] will generate its
        // announcement_signatures.
-       nodes[0].node.handle_funding_locked(&nodes[1].node.get_our_node_id(), &bs_funding_locked);
+       nodes[0].node.handle_channel_ready(&nodes[1].node.get_our_node_id(), &bs_channel_ready);
        let events_6 = nodes[0].node.get_and_clear_pending_msg_events();
        assert_eq!(events_6.len(), 1);
        let as_announcement_sigs = match events_6[0] {
@@ -4032,11 +4043,11 @@ fn test_funding_peer_disconnect() {
 }
 
 #[test]
-fn test_funding_locked_without_best_block_updated() {
+fn test_channel_ready_without_best_block_updated() {
        // Previously, if we were offline when a funding transaction was locked in, and then we came
        // back online, calling best_block_updated once followed by transactions_confirmed, we'd not
-       // generate a funding_locked until a later best_block_updated. This tests that we generate the
-       // funding_locked immediately instead.
+       // generate a channel_ready until a later best_block_updated. This tests that we generate the
+       // channel_ready immediately instead.
        let chanmon_cfgs = create_chanmon_cfgs(2);
        let node_cfgs = create_node_cfgs(2, &chanmon_cfgs);
        let node_chanmgrs = create_node_chanmgrs(2, &node_cfgs, &[None, None]);
@@ -4052,9 +4063,9 @@ fn test_funding_locked_without_best_block_updated() {
        let conf_block_header = nodes[0].get_block_header(conf_height);
        nodes[0].node.transactions_confirmed(&conf_block_header, &conf_txn[..], conf_height);
 
-       // Ensure nodes[0] generates a funding_locked after the transactions_confirmed
-       let as_funding_locked = get_event_msg!(nodes[0], MessageSendEvent::SendFundingLocked, nodes[1].node.get_our_node_id());
-       nodes[1].node.handle_funding_locked(&nodes[0].node.get_our_node_id(), &as_funding_locked);
+       // Ensure nodes[0] generates a channel_ready after the transactions_confirmed
+       let as_channel_ready = get_event_msg!(nodes[0], MessageSendEvent::SendChannelReady, nodes[1].node.get_our_node_id());
+       nodes[1].node.handle_channel_ready(&nodes[0].node.get_our_node_id(), &as_channel_ready);
 }
 
 #[test]
@@ -4067,7 +4078,7 @@ fn test_drop_messages_peer_disconnect_dual_htlc() {
        let mut nodes = create_network(2, &node_cfgs, &node_chanmgrs);
        create_announced_chan_between_nodes(&nodes, 0, 1, InitFeatures::known(), InitFeatures::known());
 
-       let (payment_preimage_1, payment_hash_1, _) = route_payment(&nodes[0], &[&nodes[1]], 1000000);
+       let (payment_preimage_1, payment_hash_1, _) = route_payment(&nodes[0], &[&nodes[1]], 1_000_000);
 
        // Now try to send a second payment which will fail to send
        let (route, payment_hash_2, payment_preimage_2, payment_secret_2) = get_route_and_payment_hash!(nodes[0], nodes[1], 1000000);
@@ -4081,7 +4092,8 @@ fn test_drop_messages_peer_disconnect_dual_htlc() {
                _ => panic!("Unexpected event"),
        }
 
-       assert!(nodes[1].node.claim_funds(payment_preimage_1));
+       nodes[1].node.claim_funds(payment_preimage_1);
+       expect_payment_claimed!(nodes[1], payment_hash_1, 1_000_000);
        check_added_monitors!(nodes[1], 1);
 
        let events_2 = nodes[1].node.get_and_clear_pending_msg_events();
@@ -4406,8 +4418,8 @@ fn test_no_txn_manager_serialize_deserialize() {
        nodes[0].node.handle_channel_reestablish(&nodes[1].node.get_our_node_id(), &reestablish_2[0]);
        assert!(nodes[0].node.get_and_clear_pending_msg_events().is_empty());
 
-       let (funding_locked, _) = create_chan_between_nodes_with_value_confirm(&nodes[0], &nodes[1], &tx);
-       let (announcement, as_update, bs_update) = create_chan_between_nodes_with_value_b(&nodes[0], &nodes[1], &funding_locked);
+       let (channel_ready, _) = create_chan_between_nodes_with_value_confirm(&nodes[0], &nodes[1], &tx);
+       let (announcement, as_update, bs_update) = create_chan_between_nodes_with_value_b(&nodes[0], &nodes[1], &channel_ready);
        for node in nodes.iter() {
                assert!(node.net_graph_msg_handler.handle_channel_announcement(&announcement).unwrap());
                node.net_graph_msg_handler.handle_channel_update(&as_update).unwrap();
@@ -4441,9 +4453,9 @@ fn test_manager_serialize_deserialize_events() {
        node_b.node.handle_open_channel(&node_a.node.get_our_node_id(), a_flags, &get_event_msg!(node_a, MessageSendEvent::SendOpenChannel, node_b.node.get_our_node_id()));
        node_a.node.handle_accept_channel(&node_b.node.get_our_node_id(), b_flags, &get_event_msg!(node_b, MessageSendEvent::SendAcceptChannel, node_a.node.get_our_node_id()));
 
-       let (temporary_channel_id, tx, funding_output) = create_funding_transaction(&node_a, channel_value, 42);
+       let (temporary_channel_id, tx, funding_output) = create_funding_transaction(&node_a, &node_b.node.get_our_node_id(), channel_value, 42);
 
-       node_a.node.funding_transaction_generated(&temporary_channel_id, tx.clone()).unwrap();
+       node_a.node.funding_transaction_generated(&temporary_channel_id, &node_b.node.get_our_node_id(), tx.clone()).unwrap();
        check_added_monitors!(node_a, 0);
 
        node_b.node.handle_funding_created(&node_a.node.get_our_node_id(), &get_event_msg!(node_a, MessageSendEvent::SendFundingCreated, node_b.node.get_our_node_id()));
@@ -4526,8 +4538,8 @@ fn test_manager_serialize_deserialize_events() {
        nodes[0].node.handle_channel_reestablish(&nodes[1].node.get_our_node_id(), &reestablish_2[0]);
        assert!(nodes[0].node.get_and_clear_pending_msg_events().is_empty());
 
-       let (funding_locked, _) = create_chan_between_nodes_with_value_confirm(&nodes[0], &nodes[1], &tx);
-       let (announcement, as_update, bs_update) = create_chan_between_nodes_with_value_b(&nodes[0], &nodes[1], &funding_locked);
+       let (channel_ready, _) = create_chan_between_nodes_with_value_confirm(&nodes[0], &nodes[1], &tx);
+       let (announcement, as_update, bs_update) = create_chan_between_nodes_with_value_b(&nodes[0], &nodes[1], &channel_ready);
        for node in nodes.iter() {
                assert!(node.net_graph_msg_handler.handle_channel_announcement(&announcement).unwrap());
                node.net_graph_msg_handler.handle_channel_update(&as_update).unwrap();
@@ -4764,7 +4776,7 @@ fn test_claim_sizeable_push_msat() {
        let nodes = create_network(2, &node_cfgs, &node_chanmgrs);
 
        let chan = create_announced_chan_between_nodes_with_value(&nodes, 0, 1, 100_000, 98_000_000, InitFeatures::known(), InitFeatures::known());
-       nodes[1].node.force_close_channel(&chan.2).unwrap();
+       nodes[1].node.force_close_channel(&chan.2, &nodes[0].node.get_our_node_id()).unwrap();
        check_closed_broadcast!(nodes[1], true);
        check_added_monitors!(nodes[1], 1);
        check_closed_event!(nodes[1], 1, ClosureReason::HolderForceClosed);
@@ -4793,7 +4805,7 @@ fn test_claim_on_remote_sizeable_push_msat() {
        let nodes = create_network(2, &node_cfgs, &node_chanmgrs);
 
        let chan = create_announced_chan_between_nodes_with_value(&nodes, 0, 1, 100_000, 98_000_000, InitFeatures::known(), InitFeatures::known());
-       nodes[0].node.force_close_channel(&chan.2).unwrap();
+       nodes[0].node.force_close_channel(&chan.2, &nodes[1].node.get_our_node_id()).unwrap();
        check_closed_broadcast!(nodes[0], true);
        check_added_monitors!(nodes[0], 1);
        check_closed_event!(nodes[0], 1, ClosureReason::HolderForceClosed);
@@ -4857,14 +4869,15 @@ fn test_static_spendable_outputs_preimage_tx() {
        // Create some initial channels
        let chan_1 = create_announced_chan_between_nodes(&nodes, 0, 1, InitFeatures::known(), InitFeatures::known());
 
-       let payment_preimage = route_payment(&nodes[0], &vec!(&nodes[1])[..], 3000000).0;
+       let (payment_preimage, payment_hash, _) = route_payment(&nodes[0], &[&nodes[1]], 3_000_000);
 
        let commitment_tx = get_local_commitment_txn!(nodes[0], chan_1.2);
        assert_eq!(commitment_tx[0].input.len(), 1);
        assert_eq!(commitment_tx[0].input[0].previous_output.txid, chan_1.3.txid());
 
        // Settle A's commitment tx on B's chain
-       assert!(nodes[1].node.claim_funds(payment_preimage));
+       nodes[1].node.claim_funds(payment_preimage);
+       expect_payment_claimed!(nodes[1], payment_hash, 3_000_000);
        check_added_monitors!(nodes[1], 1);
        mine_transaction(&nodes[1], &commitment_tx[0]);
        check_added_monitors!(nodes[1], 1);
@@ -5157,10 +5170,11 @@ fn test_onchain_to_onchain_claim() {
        send_payment(&nodes[0], &vec!(&nodes[1], &nodes[2])[..], 8000000);
        send_payment(&nodes[0], &vec!(&nodes[1], &nodes[2])[..], 8000000);
 
-       let (payment_preimage, _payment_hash, _payment_secret) = route_payment(&nodes[0], &vec!(&nodes[1], &nodes[2]), 3000000);
+       let (payment_preimage, payment_hash, _payment_secret) = route_payment(&nodes[0], &[&nodes[1], &nodes[2]], 3_000_000);
        let commitment_tx = get_local_commitment_txn!(nodes[2], chan_2.2);
        check_spends!(commitment_tx[0], chan_2.3);
        nodes[2].node.claim_funds(payment_preimage);
+       expect_payment_claimed!(nodes[2], payment_hash, 3_000_000);
        check_added_monitors!(nodes[2], 1);
        let updates = get_htlc_update_msgs!(nodes[2], nodes[1].node.get_our_node_id());
        assert!(updates.update_add_htlcs.is_empty());
@@ -5195,10 +5209,11 @@ fn test_onchain_to_onchain_claim() {
                _ => panic!("Unexpected event"),
        }
        match events[1] {
-               Event::PaymentForwarded { fee_earned_msat, source_channel_id, claim_from_onchain_tx } => {
+               Event::PaymentForwarded { fee_earned_msat, prev_channel_id, claim_from_onchain_tx, next_channel_id } => {
                        assert_eq!(fee_earned_msat, Some(1000));
-                       assert_eq!(source_channel_id, Some(chan_1.2));
+                       assert_eq!(prev_channel_id, Some(chan_1.2));
                        assert_eq!(claim_from_onchain_tx, true);
+                       assert_eq!(next_channel_id, Some(chan_2.2));
                },
                _ => panic!("Unexpected event"),
        }
@@ -5274,7 +5289,7 @@ fn test_duplicate_payment_hash_one_failure_one_success() {
        connect_blocks(&nodes[2], node_max_height - nodes[2].best_block_info().1);
        connect_blocks(&nodes[3], node_max_height - nodes[3].best_block_info().1);
 
-       let (our_payment_preimage, duplicate_payment_hash, _) = route_payment(&nodes[0], &vec!(&nodes[1], &nodes[2])[..], 900000);
+       let (our_payment_preimage, duplicate_payment_hash, _) = route_payment(&nodes[0], &[&nodes[1], &nodes[2]], 900_000);
 
        let payment_secret = nodes[3].node.create_inbound_payment_for_hash(duplicate_payment_hash, None, 7200).unwrap();
        // We reduce the final CLTV here by a somewhat arbitrary constant to keep it under the one-byte
@@ -5326,6 +5341,8 @@ fn test_duplicate_payment_hash_one_failure_one_success() {
        }
 
        nodes[2].node.claim_funds(our_payment_preimage);
+       expect_payment_claimed!(nodes[2], duplicate_payment_hash, 900_000);
+
        mine_transaction(&nodes[2], &commitment_txn[0]);
        check_added_monitors!(nodes[2], 2);
        check_closed_event!(nodes[2], 1, ClosureReason::CommitmentTxConfirmed);
@@ -5374,7 +5391,7 @@ fn test_duplicate_payment_hash_one_failure_one_success() {
        // Note that the fee paid is effectively double as the HTLC value (including the nodes[1] fee
        // and nodes[2] fee) is rounded down and then claimed in full.
        mine_transaction(&nodes[1], &htlc_success_txn[0]);
-       expect_payment_forwarded!(nodes[1], nodes[0], Some(196*2), true);
+       expect_payment_forwarded!(nodes[1], nodes[0], nodes[2], Some(196*2), true, true);
        let updates = get_htlc_update_msgs!(nodes[1], nodes[0].node.get_our_node_id());
        assert!(updates.update_add_htlcs.is_empty());
        assert!(updates.update_fail_htlcs.is_empty());
@@ -5406,7 +5423,7 @@ fn test_dynamic_spendable_outputs_local_htlc_success_tx() {
        // Create some initial channels
        let chan_1 = create_announced_chan_between_nodes(&nodes, 0, 1, InitFeatures::known(), InitFeatures::known());
 
-       let payment_preimage = route_payment(&nodes[0], &vec!(&nodes[1])[..], 9000000).0;
+       let (payment_preimage, payment_hash, _) = route_payment(&nodes[0], &[&nodes[1]], 9_000_000);
        let local_txn = get_local_commitment_txn!(nodes[1], chan_1.2);
        assert_eq!(local_txn.len(), 1);
        assert_eq!(local_txn[0].input.len(), 1);
@@ -5414,7 +5431,9 @@ fn test_dynamic_spendable_outputs_local_htlc_success_tx() {
 
        // Give B knowledge of preimage to be able to generate a local HTLC-Success Tx
        nodes[1].node.claim_funds(payment_preimage);
+       expect_payment_claimed!(nodes[1], payment_hash, 9_000_000);
        check_added_monitors!(nodes[1], 1);
+
        mine_transaction(&nodes[1], &local_txn[0]);
        check_added_monitors!(nodes[1], 1);
        check_closed_event!(nodes[1], 1, ClosureReason::CommitmentTxConfirmed);
@@ -5523,10 +5542,10 @@ fn do_test_fail_backwards_unrevoked_remote_announce(deliver_last_raa: bool, anno
 
        // Now fail back three of the over-dust-limit and three of the under-dust-limit payments in one go.
        // Fail 0th below-dust, 4th above-dust, 8th above-dust, 10th below-dust HTLCs
-       assert!(nodes[4].node.fail_htlc_backwards(&payment_hash_1));
-       assert!(nodes[4].node.fail_htlc_backwards(&payment_hash_3));
-       assert!(nodes[4].node.fail_htlc_backwards(&payment_hash_5));
-       assert!(nodes[4].node.fail_htlc_backwards(&payment_hash_6));
+       nodes[4].node.fail_htlc_backwards(&payment_hash_1);
+       nodes[4].node.fail_htlc_backwards(&payment_hash_3);
+       nodes[4].node.fail_htlc_backwards(&payment_hash_5);
+       nodes[4].node.fail_htlc_backwards(&payment_hash_6);
        check_added_monitors!(nodes[4], 0);
        expect_pending_htlcs_forwardable!(nodes[4]);
        check_added_monitors!(nodes[4], 1);
@@ -5539,8 +5558,8 @@ fn do_test_fail_backwards_unrevoked_remote_announce(deliver_last_raa: bool, anno
        commitment_signed_dance!(nodes[3], nodes[4], four_removes.commitment_signed, false);
 
        // Fail 3rd below-dust and 7th above-dust HTLCs
-       assert!(nodes[5].node.fail_htlc_backwards(&payment_hash_2));
-       assert!(nodes[5].node.fail_htlc_backwards(&payment_hash_4));
+       nodes[5].node.fail_htlc_backwards(&payment_hash_2);
+       nodes[5].node.fail_htlc_backwards(&payment_hash_4);
        check_added_monitors!(nodes[5], 0);
        expect_pending_htlcs_forwardable!(nodes[5]);
        check_added_monitors!(nodes[5], 1);
@@ -5889,12 +5908,13 @@ fn do_htlc_claim_local_commitment_only(use_dust: bool) {
        let nodes = create_network(2, &node_cfgs, &node_chanmgrs);
        let chan = create_announced_chan_between_nodes(&nodes, 0, 1, InitFeatures::known(), InitFeatures::known());
 
-       let (payment_preimage, _, _) = route_payment(&nodes[0], &[&nodes[1]], if use_dust { 50000 } else { 3000000 });
+       let (payment_preimage, payment_hash, _) = route_payment(&nodes[0], &[&nodes[1]], if use_dust { 50000 } else { 3_000_000 });
 
        // Claim the payment, but don't deliver A's commitment_signed, resulting in the HTLC only being
        // present in B's local commitment transaction, but none of A's commitment transactions.
-       assert!(nodes[1].node.claim_funds(payment_preimage));
+       nodes[1].node.claim_funds(payment_preimage);
        check_added_monitors!(nodes[1], 1);
+       expect_payment_claimed!(nodes[1], payment_hash, if use_dust { 50000 } else { 3_000_000 });
 
        let bs_updates = get_htlc_update_msgs!(nodes[1], nodes[0].node.get_our_node_id());
        nodes[0].node.handle_update_fulfill_htlc(&nodes[1].node.get_our_node_id(), &bs_updates.update_fulfill_htlcs[0]);
@@ -5964,7 +5984,7 @@ fn do_htlc_claim_previous_remote_commitment_only(use_dust: bool, check_revoke_no
        // actually revoked.
        let htlc_value = if use_dust { 50000 } else { 3000000 };
        let (_, our_payment_hash, _) = route_payment(&nodes[0], &[&nodes[1]], htlc_value);
-       assert!(nodes[1].node.fail_htlc_backwards(&our_payment_hash));
+       nodes[1].node.fail_htlc_backwards(&our_payment_hash);
        expect_pending_htlcs_forwardable!(nodes[1]);
        check_added_monitors!(nodes[1], 1);
 
@@ -6328,6 +6348,8 @@ fn test_free_and_fail_holding_cell_htlcs() {
        }
        nodes[1].node.claim_funds(payment_preimage_1);
        check_added_monitors!(nodes[1], 1);
+       expect_payment_claimed!(nodes[1], payment_hash_1, amt_1);
+
        let update_msgs = get_htlc_update_msgs!(nodes[1], nodes[0].node.get_our_node_id());
        nodes[0].node.handle_update_fulfill_htlc(&nodes[1].node.get_our_node_id(), &update_msgs.update_fulfill_htlcs[0]);
        commitment_signed_dance!(nodes[0], nodes[1], update_msgs.commitment_signed, false, true);
@@ -6916,10 +6938,11 @@ fn test_update_fulfill_htlc_bolt2_incorrect_htlc_id() {
        let nodes = create_network(2, &node_cfgs, &node_chanmgrs);
        create_announced_chan_between_nodes(&nodes, 0, 1, InitFeatures::known(), InitFeatures::known());
 
-       let our_payment_preimage = route_payment(&nodes[0], &[&nodes[1]], 100000).0;
+       let (our_payment_preimage, our_payment_hash, _) = route_payment(&nodes[0], &[&nodes[1]], 100_000);
 
        nodes[1].node.claim_funds(our_payment_preimage);
        check_added_monitors!(nodes[1], 1);
+       expect_payment_claimed!(nodes[1], our_payment_hash, 100_000);
 
        let events = nodes[1].node.get_and_clear_pending_msg_events();
        assert_eq!(events.len(), 1);
@@ -6958,10 +6981,11 @@ fn test_update_fulfill_htlc_bolt2_wrong_preimage() {
        let nodes = create_network(2, &node_cfgs, &node_chanmgrs);
        create_announced_chan_between_nodes(&nodes, 0, 1, InitFeatures::known(), InitFeatures::known());
 
-       let our_payment_preimage = route_payment(&nodes[0], &[&nodes[1]], 100000).0;
+       let (our_payment_preimage, our_payment_hash, _) = route_payment(&nodes[0], &[&nodes[1]], 100_000);
 
        nodes[1].node.claim_funds(our_payment_preimage);
        check_added_monitors!(nodes[1], 1);
+       expect_payment_claimed!(nodes[1], our_payment_hash, 100_000);
 
        let events = nodes[1].node.get_and_clear_pending_msg_events();
        assert_eq!(events.len(), 1);
@@ -7136,7 +7160,7 @@ fn do_test_failure_delay_dust_htlc_local_commitment(announce_latest: bool) {
        let as_prev_commitment_tx = get_local_commitment_txn!(nodes[0], chan.2);
 
        // Fail one HTLC to prune it in the will-be-latest-local commitment tx
-       assert!(nodes[1].node.fail_htlc_backwards(&payment_hash_2));
+       nodes[1].node.fail_htlc_backwards(&payment_hash_2);
        check_added_monitors!(nodes[1], 0);
        expect_pending_htlcs_forwardable!(nodes[1]);
        check_added_monitors!(nodes[1], 1);
@@ -7948,7 +7972,7 @@ fn test_bump_penalty_txn_on_remote_commitment() {
        let nodes = create_network(2, &node_cfgs, &node_chanmgrs);
 
        let chan = create_announced_chan_between_nodes_with_value(&nodes, 0, 1, 1000000, 59000000, InitFeatures::known(), InitFeatures::known());
-       let payment_preimage = route_payment(&nodes[0], &vec!(&nodes[1])[..], 3000000).0;
+       let (payment_preimage, payment_hash, _) = route_payment(&nodes[0], &[&nodes[1]], 3_000_000);
        route_payment(&nodes[1], &vec!(&nodes[0])[..], 3000000).0;
 
        // Remote commitment txn with 4 outputs : to_local, to_remote, 1 outgoing HTLC, 1 incoming HTLC
@@ -7959,6 +7983,7 @@ fn test_bump_penalty_txn_on_remote_commitment() {
 
        // Claim a HTLC without revocation (provide B monitor with preimage)
        nodes[1].node.claim_funds(payment_preimage);
+       expect_payment_claimed!(nodes[1], payment_hash, 3_000_000);
        mine_transaction(&nodes[1], &remote_txn[0]);
        check_added_monitors!(nodes[1], 2);
        connect_blocks(&nodes[1], TEST_FINAL_CLTV - 1); // Confirm blocks until the HTLC expires
@@ -8145,8 +8170,9 @@ fn test_pending_claimed_htlc_no_balance_underflow() {
        let nodes = create_network(2, &node_cfgs, &node_chanmgrs);
        create_announced_chan_between_nodes_with_value(&nodes, 0, 1, 100_000, 0, InitFeatures::known(), InitFeatures::known());
 
-       let payment_preimage = route_payment(&nodes[0], &[&nodes[1]], 1_010_000).0;
+       let (payment_preimage, payment_hash, _) = route_payment(&nodes[0], &[&nodes[1]], 1_010_000);
        nodes[1].node.claim_funds(payment_preimage);
+       expect_payment_claimed!(nodes[1], payment_hash, 1_010_000);
        check_added_monitors!(nodes[1], 1);
        let fulfill_ev = get_htlc_update_msgs!(nodes[1], nodes[0].node.get_our_node_id());
 
@@ -8318,7 +8344,7 @@ fn test_manually_accept_inbound_channel_request() {
        let events = nodes[1].node.get_and_clear_pending_events();
        match events[0] {
                Event::OpenChannelRequest { temporary_channel_id, .. } => {
-                       nodes[1].node.accept_inbound_channel(&temporary_channel_id, 23).unwrap();
+                       nodes[1].node.accept_inbound_channel(&temporary_channel_id, &nodes[0].node.get_our_node_id(), 23).unwrap();
                }
                _ => panic!("Unexpected event"),
        }
@@ -8333,7 +8359,7 @@ fn test_manually_accept_inbound_channel_request() {
                _ => panic!("Unexpected event"),
        }
 
-       nodes[1].node.force_close_channel(&temp_channel_id).unwrap();
+       nodes[1].node.force_close_channel(&temp_channel_id, &nodes[0].node.get_our_node_id()).unwrap();
 
        let close_msg_ev = nodes[1].node.get_and_clear_pending_msg_events();
        assert_eq!(close_msg_ev.len(), 1);
@@ -8368,7 +8394,7 @@ fn test_manually_reject_inbound_channel_request() {
        let events = nodes[1].node.get_and_clear_pending_events();
        match events[0] {
                Event::OpenChannelRequest { temporary_channel_id, .. } => {
-                       nodes[1].node.force_close_channel(&temporary_channel_id).unwrap();
+                       nodes[1].node.force_close_channel(&temporary_channel_id, &nodes[0].node.get_our_node_id()).unwrap();
                }
                _ => panic!("Unexpected event"),
        }
@@ -8423,9 +8449,9 @@ fn test_reject_funding_before_inbound_channel_accepted() {
                nodes[0].node.handle_accept_channel(&nodes[1].node.get_our_node_id(), InitFeatures::known(), &accept_chan_msg);
        }
 
-       let (temporary_channel_id, tx, _) = create_funding_transaction(&nodes[0], 100000, 42);
+       let (temporary_channel_id, tx, _) = create_funding_transaction(&nodes[0], &nodes[1].node.get_our_node_id(), 100000, 42);
 
-       nodes[0].node.funding_transaction_generated(&temporary_channel_id, tx.clone()).unwrap();
+       nodes[0].node.funding_transaction_generated(&temporary_channel_id, &nodes[1].node.get_our_node_id(), tx.clone()).unwrap();
        let funding_created_msg = get_event_msg!(nodes[0], MessageSendEvent::SendFundingCreated, nodes[1].node.get_our_node_id());
 
        // The `funding_created_msg` should be rejected by `nodes[1]` as it hasn't accepted the channel
@@ -8468,8 +8494,8 @@ fn test_can_not_accept_inbound_channel_twice() {
        let events = nodes[1].node.get_and_clear_pending_events();
        match events[0] {
                Event::OpenChannelRequest { temporary_channel_id, .. } => {
-                       nodes[1].node.accept_inbound_channel(&temporary_channel_id, 0).unwrap();
-                       let api_res = nodes[1].node.accept_inbound_channel(&temporary_channel_id, 0);
+                       nodes[1].node.accept_inbound_channel(&temporary_channel_id, &nodes[0].node.get_our_node_id(), 0).unwrap();
+                       let api_res = nodes[1].node.accept_inbound_channel(&temporary_channel_id, &nodes[0].node.get_our_node_id(), 0);
                        match api_res {
                                Err(APIError::APIMisuseError { err }) => {
                                        assert_eq!(err, "The channel isn't currently awaiting to be accepted.");
@@ -8495,13 +8521,13 @@ fn test_can_not_accept_inbound_channel_twice() {
 
 #[test]
 fn test_can_not_accept_unknown_inbound_channel() {
-       let chanmon_cfg = create_chanmon_cfgs(1);
-       let node_cfg = create_node_cfgs(1, &chanmon_cfg);
-       let node_chanmgr = create_node_chanmgrs(1, &node_cfg, &[None]);
-       let node = create_network(1, &node_cfg, &node_chanmgr)[0].node;
+       let chanmon_cfg = create_chanmon_cfgs(2);
+       let node_cfg = create_node_cfgs(2, &chanmon_cfg);
+       let node_chanmgr = create_node_chanmgrs(2, &node_cfg, &[None, None]);
+       let nodes = create_network(2, &node_cfg, &node_chanmgr);
 
        let unknown_channel_id = [0; 32];
-       let api_res = node.accept_inbound_channel(&unknown_channel_id, 0);
+       let api_res = nodes[0].node.accept_inbound_channel(&unknown_channel_id, &nodes[1].node.get_our_node_id(), 0);
        match api_res {
                Err(APIError::ChannelUnavailable { err }) => {
                        assert_eq!(err, "Can't accept a channel that doesn't exist");
@@ -8728,7 +8754,7 @@ fn test_update_err_monitor_lockdown() {
        send_payment(&nodes[0], &vec!(&nodes[1])[..], 10_000_000);
 
        // Route a HTLC from node 0 to node 1 (but don't settle)
-       let preimage = route_payment(&nodes[0], &vec!(&nodes[1])[..], 9_000_000).0;
+       let (preimage, payment_hash, _) = route_payment(&nodes[0], &[&nodes[1]], 9_000_000);
 
        // Copy ChainMonitor to simulate a watchtower and update block height of node 0 until its ChannelMonitor timeout HTLC onchain
        let chain_source = test_utils::TestChainSource::new(Network::Testnet);
@@ -8753,8 +8779,10 @@ fn test_update_err_monitor_lockdown() {
        watchtower.chain_monitor.block_connected(&block, 200);
 
        // Try to update ChannelMonitor
-       assert!(nodes[1].node.claim_funds(preimage));
+       nodes[1].node.claim_funds(preimage);
        check_added_monitors!(nodes[1], 1);
+       expect_payment_claimed!(nodes[1], payment_hash, 9_000_000);
+
        let updates = get_htlc_update_msgs!(nodes[1], nodes[0].node.get_our_node_id());
        assert_eq!(updates.update_fulfill_htlcs.len(), 1);
        nodes[0].node.handle_update_fulfill_htlc(&nodes[1].node.get_our_node_id(), &updates.update_fulfill_htlcs[0]);
@@ -8912,9 +8940,9 @@ fn test_pre_lockin_no_chan_closed_update() {
        nodes[0].node.handle_accept_channel(&nodes[1].node.get_our_node_id(), InitFeatures::known(), &accept_chan_msg);
 
        // Move the first channel through the funding flow...
-       let (temporary_channel_id, tx, _) = create_funding_transaction(&nodes[0], 100000, 42);
+       let (temporary_channel_id, tx, _) = create_funding_transaction(&nodes[0], &nodes[1].node.get_our_node_id(), 100000, 42);
 
-       nodes[0].node.funding_transaction_generated(&temporary_channel_id, tx.clone()).unwrap();
+       nodes[0].node.funding_transaction_generated(&temporary_channel_id, &nodes[1].node.get_our_node_id(), tx.clone()).unwrap();
        check_added_monitors!(nodes[0], 0);
 
        let funding_created_msg = get_event_msg!(nodes[0], MessageSendEvent::SendFundingCreated, nodes[1].node.get_our_node_id());
@@ -8998,7 +9026,7 @@ fn do_test_onchain_htlc_settlement_after_close(broadcast_alice: bool, go_onchain
 
        // Steps (1) and (2):
        // Send an HTLC Alice --> Bob --> Carol, but Carol doesn't settle the HTLC back.
-       let (payment_preimage, _payment_hash, _payment_secret) = route_payment(&nodes[0], &vec!(&nodes[1], &nodes[2]), 3_000_000);
+       let (payment_preimage, payment_hash, _payment_secret) = route_payment(&nodes[0], &[&nodes[1], &nodes[2]], 3_000_000);
 
        // Check that Alice's commitment transaction now contains an output for this HTLC.
        let alice_txn = get_local_commitment_txn!(nodes[0], chan_ab.2);
@@ -9012,8 +9040,14 @@ fn do_test_onchain_htlc_settlement_after_close(broadcast_alice: bool, go_onchain
        // If `go_onchain_before_fufill`, broadcast the relevant commitment transaction and check that Bob
        // responds by (1) broadcasting a channel update and (2) adding a new ChannelMonitor.
        let mut force_closing_node = 0; // Alice force-closes
-       if !broadcast_alice { force_closing_node = 1; } // Bob force-closes
-       nodes[force_closing_node].node.force_close_channel(&chan_ab.2).unwrap();
+       let mut counterparty_node = 1; // Bob if Alice force-closes
+
+       // Bob force-closes
+       if !broadcast_alice {
+               force_closing_node = 1;
+               counterparty_node = 0;
+       }
+       nodes[force_closing_node].node.force_close_channel(&chan_ab.2, &nodes[counterparty_node].node.get_our_node_id()).unwrap();
        check_closed_broadcast!(nodes[force_closing_node], true);
        check_added_monitors!(nodes[force_closing_node], 1);
        check_closed_event!(nodes[force_closing_node], 1, ClosureReason::HolderForceClosed);
@@ -9037,8 +9071,10 @@ fn do_test_onchain_htlc_settlement_after_close(broadcast_alice: bool, go_onchain
        // Step (5):
        // Carol then claims the funds and sends an update_fulfill message to Bob, and they go through the
        // process of removing the HTLC from their commitment transactions.
-       assert!(nodes[2].node.claim_funds(payment_preimage));
+       nodes[2].node.claim_funds(payment_preimage);
        check_added_monitors!(nodes[2], 1);
+       expect_payment_claimed!(nodes[2], payment_hash, 3_000_000);
+
        let carol_updates = get_htlc_update_msgs!(nodes[2], nodes[1].node.get_our_node_id());
        assert!(carol_updates.update_add_htlcs.is_empty());
        assert!(carol_updates.update_fail_htlcs.is_empty());
@@ -9047,7 +9083,7 @@ fn do_test_onchain_htlc_settlement_after_close(broadcast_alice: bool, go_onchain
        assert_eq!(carol_updates.update_fulfill_htlcs.len(), 1);
 
        nodes[1].node.handle_update_fulfill_htlc(&nodes[2].node.get_our_node_id(), &carol_updates.update_fulfill_htlcs[0]);
-       expect_payment_forwarded!(nodes[1], nodes[0], if go_onchain_before_fulfill || force_closing_node == 1 { None } else { Some(1000) }, false);
+       expect_payment_forwarded!(nodes[1], nodes[0], nodes[2], if go_onchain_before_fulfill || force_closing_node == 1 { None } else { Some(1000) }, false, false);
        // If Alice broadcasted but Bob doesn't know yet, here he prepares to tell her about the preimage.
        if !go_onchain_before_fulfill && broadcast_alice {
                let events = nodes[1].node.get_and_clear_pending_msg_events();
@@ -9201,9 +9237,9 @@ fn test_duplicate_chan_id() {
        }
 
        // Move the first channel through the funding flow...
-       let (temporary_channel_id, tx, funding_output) = create_funding_transaction(&nodes[0], 100000, 42);
+       let (temporary_channel_id, tx, funding_output) = create_funding_transaction(&nodes[0], &nodes[1].node.get_our_node_id(), 100000, 42);
 
-       nodes[0].node.funding_transaction_generated(&temporary_channel_id, tx.clone()).unwrap();
+       nodes[0].node.funding_transaction_generated(&temporary_channel_id, &nodes[1].node.get_our_node_id(), tx.clone()).unwrap();
        check_added_monitors!(nodes[0], 0);
 
        let mut funding_created_msg = get_event_msg!(nodes[0], MessageSendEvent::SendFundingCreated, nodes[1].node.get_our_node_id());
@@ -9246,11 +9282,15 @@ fn test_duplicate_chan_id() {
        let open_chan_2_msg = get_event_msg!(nodes[0], MessageSendEvent::SendOpenChannel, nodes[1].node.get_our_node_id());
        nodes[1].node.handle_open_channel(&nodes[0].node.get_our_node_id(), InitFeatures::known(), &open_chan_2_msg);
        nodes[0].node.handle_accept_channel(&nodes[1].node.get_our_node_id(), InitFeatures::known(), &get_event_msg!(nodes[1], MessageSendEvent::SendAcceptChannel, nodes[0].node.get_our_node_id()));
-       create_funding_transaction(&nodes[0], 100000, 42); // Get and check the FundingGenerationReady event
+       create_funding_transaction(&nodes[0], &nodes[1].node.get_our_node_id(), 100000, 42); // Get and check the FundingGenerationReady event
 
        let funding_created = {
                let mut a_channel_lock = nodes[0].node.channel_state.lock().unwrap();
-               let mut as_chan = a_channel_lock.by_id.get_mut(&open_chan_2_msg.temporary_channel_id).unwrap();
+               // Once we call `get_outbound_funding_created` the channel has a duplicate channel_id as
+               // another channel in the ChannelManager - an invalid state. Thus, we'd panic later when we
+               // try to create another channel. Instead, we drop the channel entirely here (leaving the
+               // channelmanager in a possibly nonsense state instead).
+               let mut as_chan = a_channel_lock.by_id.remove(&open_chan_2_msg.temporary_channel_id).unwrap();
                let logger = test_utils::TestLogger::new();
                as_chan.get_outbound_funding_created(tx.clone(), funding_outpoint, &&logger).unwrap()
        };
@@ -9288,10 +9328,10 @@ fn test_duplicate_chan_id() {
        let events_4 = nodes[0].node.get_and_clear_pending_events();
        assert_eq!(events_4.len(), 0);
        assert_eq!(nodes[0].tx_broadcaster.txn_broadcasted.lock().unwrap().len(), 1);
-       assert_eq!(nodes[0].tx_broadcaster.txn_broadcasted.lock().unwrap()[0].txid(), funding_output.txid);
+       assert_eq!(nodes[0].tx_broadcaster.txn_broadcasted.lock().unwrap()[0], tx);
 
-       let (funding_locked, _) = create_chan_between_nodes_with_value_confirm(&nodes[0], &nodes[1], &tx);
-       let (announcement, as_update, bs_update) = create_chan_between_nodes_with_value_b(&nodes[0], &nodes[1], &funding_locked);
+       let (channel_ready, _) = create_chan_between_nodes_with_value_confirm(&nodes[0], &nodes[1], &tx);
+       let (announcement, as_update, bs_update) = create_chan_between_nodes_with_value_b(&nodes[0], &nodes[1], &channel_ready);
        update_nodes_with_chan_announce(&nodes, 0, 1, &announcement, &as_update, &bs_update);
        send_payment(&nodes[0], &[&nodes[1]], 8000000);
 }
@@ -9379,13 +9419,13 @@ fn test_invalid_funding_tx() {
        nodes[1].node.handle_open_channel(&nodes[0].node.get_our_node_id(), InitFeatures::known(), &get_event_msg!(nodes[0], MessageSendEvent::SendOpenChannel, nodes[1].node.get_our_node_id()));
        nodes[0].node.handle_accept_channel(&nodes[1].node.get_our_node_id(), InitFeatures::known(), &get_event_msg!(nodes[1], MessageSendEvent::SendAcceptChannel, nodes[0].node.get_our_node_id()));
 
-       let (temporary_channel_id, mut tx, _) = create_funding_transaction(&nodes[0], 100_000, 42);
+       let (temporary_channel_id, mut tx, _) = create_funding_transaction(&nodes[0], &nodes[1].node.get_our_node_id(), 100_000, 42);
        for output in tx.output.iter_mut() {
                // Make the confirmed funding transaction have a bogus script_pubkey
                output.script_pubkey = bitcoin::Script::new();
        }
 
-       nodes[0].node.funding_transaction_generated_unchecked(&temporary_channel_id, tx.clone(), 0).unwrap();
+       nodes[0].node.funding_transaction_generated_unchecked(&temporary_channel_id, &nodes[1].node.get_our_node_id(), tx.clone(), 0).unwrap();
        nodes[1].node.handle_funding_created(&nodes[0].node.get_our_node_id(), &get_event_msg!(nodes[0], MessageSendEvent::SendFundingCreated, nodes[1].node.get_our_node_id()));
        check_added_monitors!(nodes[1], 1);
 
@@ -9443,7 +9483,7 @@ fn do_test_tx_confirmed_skipping_blocks_immediate_broadcast(test_height_before_t
        nodes[1].node.peer_disconnected(&nodes[2].node.get_our_node_id(), false);
        nodes[2].node.peer_disconnected(&nodes[1].node.get_our_node_id(), false);
 
-       nodes[1].node.force_close_channel(&channel_id).unwrap();
+       nodes[1].node.force_close_channel(&channel_id, &nodes[2].node.get_our_node_id()).unwrap();
        check_closed_broadcast!(nodes[1], true);
        check_closed_event!(nodes[1], 1, ClosureReason::HolderForceClosed);
        check_added_monitors!(nodes[1], 1);
@@ -9591,7 +9631,7 @@ fn test_forwardable_regen() {
        check_added_monitors!(nodes[1], 2);
 
        reconnect_nodes(&nodes[0], &nodes[1], (false, false), (0, 0), (0, 0), (0, 0), (0, 0), (0, 0), (false, false));
-       // Note that nodes[1] and nodes[2] resend their funding_locked here since they haven't updated
+       // Note that nodes[1] and nodes[2] resend their channel_ready here since they haven't updated
        // the commitment state.
        reconnect_nodes(&nodes[1], &nodes[2], (true, true), (0, 0), (0, 0), (0, 0), (0, 0), (0, 0), (false, false));
 
@@ -9871,6 +9911,249 @@ fn test_keysend_payments_to_private_node() {
        claim_payment(&nodes[0], &path, test_preimage);
 }
 
+#[test]
+fn test_double_partial_claim() {
+       // Test what happens if a node receives a payment, generates a PaymentReceived event, the HTLCs
+       // time out, the sender resends only some of the MPP parts, then the user processes the
+       // PaymentReceived event, ensuring they don't inadvertently claim only part of the full payment
+       // amount.
+       let chanmon_cfgs = create_chanmon_cfgs(4);
+       let node_cfgs = create_node_cfgs(4, &chanmon_cfgs);
+       let node_chanmgrs = create_node_chanmgrs(4, &node_cfgs, &[None, None, None, None]);
+       let nodes = create_network(4, &node_cfgs, &node_chanmgrs);
+
+       create_announced_chan_between_nodes_with_value(&nodes, 0, 1, 100_000, 0, InitFeatures::known(), InitFeatures::known());
+       create_announced_chan_between_nodes_with_value(&nodes, 0, 2, 100_000, 0, InitFeatures::known(), InitFeatures::known());
+       create_announced_chan_between_nodes_with_value(&nodes, 1, 3, 100_000, 0, InitFeatures::known(), InitFeatures::known());
+       create_announced_chan_between_nodes_with_value(&nodes, 2, 3, 100_000, 0, InitFeatures::known(), InitFeatures::known());
+
+       let (mut route, payment_hash, payment_preimage, payment_secret) = get_route_and_payment_hash!(nodes[0], nodes[3], 15_000_000);
+       assert_eq!(route.paths.len(), 2);
+       route.paths.sort_by(|path_a, _| {
+               // Sort the path so that the path through nodes[1] comes first
+               if path_a[0].pubkey == nodes[1].node.get_our_node_id() {
+                       core::cmp::Ordering::Less } else { core::cmp::Ordering::Greater }
+       });
+
+       send_along_route_with_secret(&nodes[0], route.clone(), &[&[&nodes[1], &nodes[3]], &[&nodes[2], &nodes[3]]], 15_000_000, payment_hash, payment_secret);
+       // nodes[3] has now received a PaymentReceived event...which it will take some (exorbitant)
+       // amount of time to respond to.
+
+       // Connect some blocks to time out the payment
+       connect_blocks(&nodes[3], TEST_FINAL_CLTV);
+       connect_blocks(&nodes[0], TEST_FINAL_CLTV); // To get the same height for sending later
+
+       expect_pending_htlcs_forwardable!(nodes[3]);
+
+       pass_failed_payment_back(&nodes[0], &[&[&nodes[1], &nodes[3]], &[&nodes[2], &nodes[3]]], false, payment_hash);
+
+       // nodes[1] now retries one of the two paths...
+       nodes[0].node.send_payment(&route, payment_hash, &Some(payment_secret)).unwrap();
+       check_added_monitors!(nodes[0], 2);
+
+       let mut events = nodes[0].node.get_and_clear_pending_msg_events();
+       assert_eq!(events.len(), 2);
+       pass_along_path(&nodes[0], &[&nodes[1], &nodes[3]], 15_000_000, payment_hash, Some(payment_secret), events.drain(..).next().unwrap(), false, None);
+
+       // At this point nodes[3] has received one half of the payment, and the user goes to handle
+       // that PaymentReceived event they got hours ago and never handled...we should refuse to claim.
+       nodes[3].node.claim_funds(payment_preimage);
+       check_added_monitors!(nodes[3], 0);
+       assert!(nodes[3].node.get_and_clear_pending_msg_events().is_empty());
+}
+
+fn do_test_partial_claim_before_restart(persist_both_monitors: bool) {
+       // Test what happens if a node receives an MPP payment, claims it, but crashes before
+       // persisting the ChannelManager. If `persist_both_monitors` is false, also crash after only
+       // updating one of the two channels' ChannelMonitors. As a result, on startup, we'll (a) still
+       // have the PaymentReceived event, (b) have one (or two) channel(s) that goes on chain with the
+       // HTLC preimage in them, and (c) optionally have one channel that is live off-chain but does
+       // not have the preimage tied to the still-pending HTLC.
+       //
+       // To get to the correct state, on startup we should propagate the preimage to the
+       // still-off-chain channel, claiming the HTLC as soon as the peer connects, with the monitor
+       // receiving the preimage without a state update.
+       //
+       // Further, we should generate a `PaymentClaimed` event to inform the user that the payment was
+       // definitely claimed.
+       let chanmon_cfgs = create_chanmon_cfgs(4);
+       let node_cfgs = create_node_cfgs(4, &chanmon_cfgs);
+       let node_chanmgrs = create_node_chanmgrs(4, &node_cfgs, &[None, None, None, None]);
+
+       let persister: test_utils::TestPersister;
+       let new_chain_monitor: test_utils::TestChainMonitor;
+       let nodes_3_deserialized: ChannelManager<EnforcingSigner, &test_utils::TestChainMonitor, &test_utils::TestBroadcaster, &test_utils::TestKeysInterface, &test_utils::TestFeeEstimator, &test_utils::TestLogger>;
+
+       let mut nodes = create_network(4, &node_cfgs, &node_chanmgrs);
+
+       create_announced_chan_between_nodes_with_value(&nodes, 0, 1, 100_000, 0, InitFeatures::known(), InitFeatures::known());
+       create_announced_chan_between_nodes_with_value(&nodes, 0, 2, 100_000, 0, InitFeatures::known(), InitFeatures::known());
+       let chan_id_persisted = create_announced_chan_between_nodes_with_value(&nodes, 1, 3, 100_000, 0, InitFeatures::known(), InitFeatures::known()).2;
+       let chan_id_not_persisted = create_announced_chan_between_nodes_with_value(&nodes, 2, 3, 100_000, 0, InitFeatures::known(), InitFeatures::known()).2;
+
+       // Create an MPP route for 15k sats, more than the default htlc-max of 10%
+       let (mut route, payment_hash, payment_preimage, payment_secret) = get_route_and_payment_hash!(nodes[0], nodes[3], 15_000_000);
+       assert_eq!(route.paths.len(), 2);
+       route.paths.sort_by(|path_a, _| {
+               // Sort the path so that the path through nodes[1] comes first
+               if path_a[0].pubkey == nodes[1].node.get_our_node_id() {
+                       core::cmp::Ordering::Less } else { core::cmp::Ordering::Greater }
+       });
+
+       nodes[0].node.send_payment(&route, payment_hash, &Some(payment_secret)).unwrap();
+       check_added_monitors!(nodes[0], 2);
+
+       // Send the payment through to nodes[3] *without* clearing the PaymentReceived event
+       let mut send_events = nodes[0].node.get_and_clear_pending_msg_events();
+       assert_eq!(send_events.len(), 2);
+       do_pass_along_path(&nodes[0], &[&nodes[1], &nodes[3]], 15_000_000, payment_hash, Some(payment_secret), send_events[0].clone(), true, false, None);
+       do_pass_along_path(&nodes[0], &[&nodes[2], &nodes[3]], 15_000_000, payment_hash, Some(payment_secret), send_events[1].clone(), true, false, None);
+
+       // Now that we have an MPP payment pending, get the latest encoded copies of nodes[3]'s
+       // monitors and ChannelManager, for use later, if we don't want to persist both monitors.
+       let mut original_monitor = test_utils::TestVecWriter(Vec::new());
+       if !persist_both_monitors {
+               for outpoint in nodes[3].chain_monitor.chain_monitor.list_monitors() {
+                       if outpoint.to_channel_id() == chan_id_not_persisted {
+                               assert!(original_monitor.0.is_empty());
+                               nodes[3].chain_monitor.chain_monitor.get_monitor(outpoint).unwrap().write(&mut original_monitor).unwrap();
+                       }
+               }
+       }
+
+       let mut original_manager = test_utils::TestVecWriter(Vec::new());
+       nodes[3].node.write(&mut original_manager).unwrap();
+
+       expect_payment_received!(nodes[3], payment_hash, payment_secret, 15_000_000);
+
+       nodes[3].node.claim_funds(payment_preimage);
+       check_added_monitors!(nodes[3], 2);
+       expect_payment_claimed!(nodes[3], payment_hash, 15_000_000);
+
+       // Now fetch one of the two updated ChannelMonitors from nodes[3], and restart pretending we
+       // crashed in between the two persistence calls - using one old ChannelMonitor and one new one,
+       // with the old ChannelManager.
+       let mut updated_monitor = test_utils::TestVecWriter(Vec::new());
+       for outpoint in nodes[3].chain_monitor.chain_monitor.list_monitors() {
+               if outpoint.to_channel_id() == chan_id_persisted {
+                       assert!(updated_monitor.0.is_empty());
+                       nodes[3].chain_monitor.chain_monitor.get_monitor(outpoint).unwrap().write(&mut updated_monitor).unwrap();
+               }
+       }
+       // If `persist_both_monitors` is set, get the second monitor here as well
+       if persist_both_monitors {
+               for outpoint in nodes[3].chain_monitor.chain_monitor.list_monitors() {
+                       if outpoint.to_channel_id() == chan_id_not_persisted {
+                               assert!(original_monitor.0.is_empty());
+                               nodes[3].chain_monitor.chain_monitor.get_monitor(outpoint).unwrap().write(&mut original_monitor).unwrap();
+                       }
+               }
+       }
+
+       // Now restart nodes[3].
+       persister = test_utils::TestPersister::new();
+       let keys_manager = &chanmon_cfgs[3].keys_manager;
+       new_chain_monitor = test_utils::TestChainMonitor::new(Some(nodes[3].chain_source), nodes[3].tx_broadcaster.clone(), nodes[3].logger, node_cfgs[3].fee_estimator, &persister, keys_manager);
+       nodes[3].chain_monitor = &new_chain_monitor;
+       let mut monitors = Vec::new();
+       for mut monitor_data in [original_monitor, updated_monitor].iter() {
+               let (_, mut deserialized_monitor) = <(BlockHash, ChannelMonitor<EnforcingSigner>)>::read(&mut &monitor_data.0[..], keys_manager).unwrap();
+               monitors.push(deserialized_monitor);
+       }
+
+       let config = UserConfig::default();
+       nodes_3_deserialized = {
+               let mut channel_monitors = HashMap::new();
+               for monitor in monitors.iter_mut() {
+                       channel_monitors.insert(monitor.get_funding_txo().0, monitor);
+               }
+               <(BlockHash, ChannelManager<EnforcingSigner, &test_utils::TestChainMonitor, &test_utils::TestBroadcaster, &test_utils::TestKeysInterface, &test_utils::TestFeeEstimator, &test_utils::TestLogger>)>::read(&mut &original_manager.0[..], ChannelManagerReadArgs {
+                       default_config: config,
+                       keys_manager,
+                       fee_estimator: node_cfgs[3].fee_estimator,
+                       chain_monitor: nodes[3].chain_monitor,
+                       tx_broadcaster: nodes[3].tx_broadcaster.clone(),
+                       logger: nodes[3].logger,
+                       channel_monitors,
+               }).unwrap().1
+       };
+       nodes[3].node = &nodes_3_deserialized;
+
+       for monitor in monitors {
+               // On startup the preimage should have been copied into the non-persisted monitor:
+               assert!(monitor.get_stored_preimages().contains_key(&payment_hash));
+               nodes[3].chain_monitor.watch_channel(monitor.get_funding_txo().0.clone(), monitor).unwrap();
+       }
+       check_added_monitors!(nodes[3], 2);
+
+       nodes[1].node.peer_disconnected(&nodes[3].node.get_our_node_id(), false);
+       nodes[2].node.peer_disconnected(&nodes[3].node.get_our_node_id(), false);
+
+       // During deserialization, we should have closed one channel and broadcast its latest
+       // commitment transaction. We should also still have the original PaymentReceived event we
+       // never finished processing.
+       let events = nodes[3].node.get_and_clear_pending_events();
+       assert_eq!(events.len(), if persist_both_monitors { 4 } else { 3 });
+       if let Event::PaymentReceived { amount_msat: 15_000_000, .. } = events[0] { } else { panic!(); }
+       if let Event::ChannelClosed { reason: ClosureReason::OutdatedChannelManager, .. } = events[1] { } else { panic!(); }
+       if persist_both_monitors {
+               if let Event::ChannelClosed { reason: ClosureReason::OutdatedChannelManager, .. } = events[2] { } else { panic!(); }
+       }
+
+       // On restart, we should also get a duplicate PaymentClaimed event as we persisted the
+       // ChannelManager prior to handling the original one.
+       if let Event::PaymentClaimed { payment_hash: our_payment_hash, amount_msat: 15_000_000, .. } =
+               events[if persist_both_monitors { 3 } else { 2 }]
+       {
+               assert_eq!(payment_hash, our_payment_hash);
+       } else { panic!(); }
+
+       assert_eq!(nodes[3].node.list_channels().len(), if persist_both_monitors { 0 } else { 1 });
+       if !persist_both_monitors {
+               // If one of the two channels is still live, reveal the payment preimage over it.
+
+               nodes[3].node.peer_connected(&nodes[2].node.get_our_node_id(), &msgs::Init { features: InitFeatures::empty(), remote_network_address: None });
+               let reestablish_1 = get_chan_reestablish_msgs!(nodes[3], nodes[2]);
+               nodes[2].node.peer_connected(&nodes[3].node.get_our_node_id(), &msgs::Init { features: InitFeatures::empty(), remote_network_address: None });
+               let reestablish_2 = get_chan_reestablish_msgs!(nodes[2], nodes[3]);
+
+               nodes[2].node.handle_channel_reestablish(&nodes[3].node.get_our_node_id(), &reestablish_1[0]);
+               get_event_msg!(nodes[2], MessageSendEvent::SendChannelUpdate, nodes[3].node.get_our_node_id());
+               assert!(nodes[2].node.get_and_clear_pending_msg_events().is_empty());
+
+               nodes[3].node.handle_channel_reestablish(&nodes[2].node.get_our_node_id(), &reestablish_2[0]);
+
+               // Once we call `get_and_clear_pending_msg_events` the holding cell is cleared and the HTLC
+               // claim should fly.
+               let ds_msgs = nodes[3].node.get_and_clear_pending_msg_events();
+               check_added_monitors!(nodes[3], 1);
+               assert_eq!(ds_msgs.len(), 2);
+               if let MessageSendEvent::SendChannelUpdate { .. } = ds_msgs[1] {} else { panic!(); }
+
+               let cs_updates = match ds_msgs[0] {
+                       MessageSendEvent::UpdateHTLCs { ref updates, .. } => {
+                               nodes[2].node.handle_update_fulfill_htlc(&nodes[3].node.get_our_node_id(), &updates.update_fulfill_htlcs[0]);
+                               check_added_monitors!(nodes[2], 1);
+                               let cs_updates = get_htlc_update_msgs!(nodes[2], nodes[0].node.get_our_node_id());
+                               expect_payment_forwarded!(nodes[2], nodes[0], nodes[3], Some(1000), false, false);
+                               commitment_signed_dance!(nodes[2], nodes[3], updates.commitment_signed, false, true);
+                               cs_updates
+                       }
+                       _ => panic!(),
+               };
+
+               nodes[0].node.handle_update_fulfill_htlc(&nodes[2].node.get_our_node_id(), &cs_updates.update_fulfill_htlcs[0]);
+               commitment_signed_dance!(nodes[0], nodes[2], cs_updates.commitment_signed, false, true);
+               expect_payment_sent!(nodes[0], payment_preimage);
+       }
+}
+
+#[test]
+fn test_partial_claim_before_restart() {
+       do_test_partial_claim_before_restart(false);
+       do_test_partial_claim_before_restart(true);
+}
+
 /// The possible events which may trigger a `max_dust_htlc_exposure` breach
 #[derive(Clone, Copy, PartialEq)]
 enum ExposureEvent {
@@ -9915,7 +10198,7 @@ fn do_test_max_dust_htlc_exposure(dust_outbound_balance: bool, exposure_breach_e
 
        let opt_anchors = false;
 
-       let (temporary_channel_id, tx, _) = create_funding_transaction(&nodes[0], 1_000_000, 42);
+       let (temporary_channel_id, tx, _) = create_funding_transaction(&nodes[0], &nodes[1].node.get_our_node_id(), 1_000_000, 42);
 
        if on_holder_tx {
                if let Some(mut chan) = nodes[0].node.channel_state.lock().unwrap().by_id.get_mut(&temporary_channel_id) {
@@ -9923,15 +10206,15 @@ fn do_test_max_dust_htlc_exposure(dust_outbound_balance: bool, exposure_breach_e
                }
        }
 
-       nodes[0].node.funding_transaction_generated(&temporary_channel_id, tx.clone()).unwrap();
+       nodes[0].node.funding_transaction_generated(&temporary_channel_id, &nodes[1].node.get_our_node_id(), tx.clone()).unwrap();
        nodes[1].node.handle_funding_created(&nodes[0].node.get_our_node_id(), &get_event_msg!(nodes[0], MessageSendEvent::SendFundingCreated, nodes[1].node.get_our_node_id()));
        check_added_monitors!(nodes[1], 1);
 
        nodes[0].node.handle_funding_signed(&nodes[1].node.get_our_node_id(), &get_event_msg!(nodes[1], MessageSendEvent::SendFundingSigned, nodes[0].node.get_our_node_id()));
        check_added_monitors!(nodes[0], 1);
 
-       let (funding_locked, channel_id) = create_chan_between_nodes_with_value_confirm(&nodes[0], &nodes[1], &tx);
-       let (announcement, as_update, bs_update) = create_chan_between_nodes_with_value_b(&nodes[0], &nodes[1], &funding_locked);
+       let (channel_ready, channel_id) = create_chan_between_nodes_with_value_confirm(&nodes[0], &nodes[1], &tx);
+       let (announcement, as_update, bs_update) = create_chan_between_nodes_with_value_b(&nodes[0], &nodes[1], &channel_ready);
        update_nodes_with_chan_announce(&nodes, 0, 1, &announcement, &as_update, &bs_update);
 
        let dust_buffer_feerate = {