Add a `claim_deadline` field to `PaymentClaimable` with guarantees
[rust-lightning] / lightning / src / ln / functional_tests.rs
index 3da49ca7dc8d6db2a2a3942c42c1a43e2355351b..9c0a4b4b066ac1e50c5cacabf836979cd9dec792 100644 (file)
@@ -18,6 +18,7 @@ use crate::chain::channelmonitor;
 use crate::chain::channelmonitor::{CLTV_CLAIM_BUFFER, LATENCY_GRACE_PERIOD_BLOCKS, ANTI_REORG_DELAY};
 use crate::chain::transaction::OutPoint;
 use crate::chain::keysinterface::{ChannelSigner, EcdsaChannelSigner, EntropySource};
+use crate::events::{Event, MessageSendEvent, MessageSendEventsProvider, PathFailure, PaymentPurpose, ClosureReason, HTLCDestination};
 use crate::ln::{PaymentPreimage, PaymentSecret, PaymentHash};
 use crate::ln::channel::{commitment_tx_base_weight, COMMITMENT_TX_WEIGHT_PER_HTLC, CONCURRENT_INBOUND_HTLC_FEE_BUFFER, FEE_SPIKE_BUFFER_FEE_INCREASE_MULTIPLE, MIN_AFFORDABLE_HTLC_COUNT};
 use crate::ln::channelmanager::{self, PaymentId, RAACommitmentOrder, PaymentSendFailure, BREAKDOWN_TIMEOUT, MIN_CLTV_EXPIRY_DELTA};
@@ -31,9 +32,9 @@ use crate::ln::msgs;
 use crate::ln::msgs::{ChannelMessageHandler, RoutingMessageHandler, ErrorAction};
 use crate::util::enforcing_trait_impls::EnforcingSigner;
 use crate::util::test_utils;
-use crate::util::events::{Event, MessageSendEvent, MessageSendEventsProvider, PaymentPurpose, ClosureReason, HTLCDestination};
 use crate::util::errors::APIError;
 use crate::util::ser::{Writeable, ReadableArgs};
+use crate::util::string::UntrustedString;
 use crate::util::config::UserConfig;
 
 use bitcoin::hash_types::BlockHash;
@@ -94,7 +95,7 @@ fn test_insane_channel_opens() {
                if let MessageSendEvent::HandleError { ref action, .. } = msg_events[0] {
                        match action {
                                &ErrorAction::SendErrorMessage { .. } => {
-                                       nodes[1].logger.assert_log_regex("lightning::ln::channelmanager".to_string(), expected_regex, 1);
+                                       nodes[1].logger.assert_log_regex("lightning::ln::channelmanager", expected_regex, 1);
                                },
                                _ => panic!("unexpected event!"),
                        }
@@ -540,6 +541,8 @@ fn do_test_sanity_on_in_flight_opens(steps: u8) {
                assert_eq!(added_monitors[0].0, funding_output);
                added_monitors.clear();
        }
+       expect_channel_pending_event(&nodes[1], &nodes[0].node.get_our_node_id());
+
        let funding_signed = get_event_msg!(nodes[1], MessageSendEvent::SendFundingSigned, nodes[0].node.get_our_node_id());
 
        if steps & 0x0f == 5 { return; }
@@ -551,6 +554,7 @@ fn do_test_sanity_on_in_flight_opens(steps: u8) {
                added_monitors.clear();
        }
 
+       expect_channel_pending_event(&nodes[0], &nodes[1].node.get_our_node_id());
        let events_4 = nodes[0].node.get_and_clear_pending_events();
        assert_eq!(events_4.len(), 0);
 
@@ -1118,7 +1122,7 @@ fn holding_cell_htlc_counting() {
                unwrap_send_err!(nodes[1].node.send_payment(&route, payment_hash_1, &Some(payment_secret_1), PaymentId(payment_hash_1.0)), true, APIError::ChannelUnavailable { ref err },
                        assert!(regex::Regex::new(r"Cannot push more than their max accepted HTLCs \(\d+\)").unwrap().is_match(err)));
                assert!(nodes[1].node.get_and_clear_pending_msg_events().is_empty());
-               nodes[1].logger.assert_log_contains("lightning::ln::channelmanager".to_string(), "Cannot push more than their max accepted HTLCs".to_string(), 1);
+               nodes[1].logger.assert_log_contains("lightning::ln::channelmanager", "Cannot push more than their max accepted HTLCs", 1);
        }
 
        // This should also be true if we try to forward a payment.
@@ -1346,7 +1350,7 @@ fn test_basic_channel_reserve() {
                _ => panic!("Unexpected error variant"),
        }
        assert!(nodes[0].node.get_and_clear_pending_msg_events().is_empty());
-       nodes[0].logger.assert_log_contains("lightning::ln::channelmanager".to_string(), "Cannot send value that would put our balance under counterparty-announced channel reserve value".to_string(), 1);
+       nodes[0].logger.assert_log_contains("lightning::ln::channelmanager", "Cannot send value that would put our balance under counterparty-announced channel reserve value", 1);
 
        send_payment(&nodes[0], &vec![&nodes[1]], max_can_send);
 }
@@ -1811,7 +1815,7 @@ fn test_channel_reserve_holding_cell_htlcs() {
                unwrap_send_err!(nodes[0].node.send_payment(&route, our_payment_hash, &Some(our_payment_secret), PaymentId(our_payment_hash.0)), true, APIError::ChannelUnavailable { ref err },
                        assert!(regex::Regex::new(r"Cannot send value that would put us over the max HTLC value in flight our peer will accept \(\d+\)").unwrap().is_match(err)));
                assert!(nodes[0].node.get_and_clear_pending_msg_events().is_empty());
-               nodes[0].logger.assert_log_contains("lightning::ln::channelmanager".to_string(), "Cannot send value that would put us over the max HTLC value in flight our peer will accept".to_string(), 1);
+               nodes[0].logger.assert_log_contains("lightning::ln::channelmanager", "Cannot send value that would put us over the max HTLC value in flight our peer will accept", 1);
        }
 
        // channel reserve is bigger than their_max_htlc_value_in_flight_msat so loop to deplete
@@ -1906,7 +1910,7 @@ fn test_channel_reserve_holding_cell_htlcs() {
                unwrap_send_err!(nodes[0].node.send_payment(&route, our_payment_hash, &Some(our_payment_secret), PaymentId(our_payment_hash.0)), true, APIError::ChannelUnavailable { ref err },
                        assert!(regex::Regex::new(r"Cannot send value that would put our balance under counterparty-announced channel reserve value \(\d+\)").unwrap().is_match(err)));
                assert!(nodes[0].node.get_and_clear_pending_msg_events().is_empty());
-               nodes[0].logger.assert_log_contains("lightning::ln::channelmanager".to_string(), "Cannot send value that would put our balance under counterparty-announced channel reserve value".to_string(), 2);
+               nodes[0].logger.assert_log_contains("lightning::ln::channelmanager", "Cannot send value that would put our balance under counterparty-announced channel reserve value", 2);
        }
 
        let (route_22, our_payment_hash_22, our_payment_preimage_22, our_payment_secret_22) = get_route_and_payment_hash!(nodes[0], nodes[2], recv_value_22);
@@ -1962,7 +1966,7 @@ fn test_channel_reserve_holding_cell_htlcs() {
        let events = nodes[2].node.get_and_clear_pending_events();
        assert_eq!(events.len(), 2);
        match events[0] {
-               Event::PaymentClaimable { ref payment_hash, ref purpose, amount_msat, receiver_node_id, via_channel_id, via_user_channel_id: _ } => {
+               Event::PaymentClaimable { ref payment_hash, ref purpose, amount_msat, receiver_node_id, via_channel_id, .. } => {
                        assert_eq!(our_payment_hash_21, *payment_hash);
                        assert_eq!(recv_value_21, amount_msat);
                        assert_eq!(nodes[2].node.get_our_node_id(), receiver_node_id.unwrap());
@@ -1978,7 +1982,7 @@ fn test_channel_reserve_holding_cell_htlcs() {
                _ => panic!("Unexpected event"),
        }
        match events[1] {
-               Event::PaymentClaimable { ref payment_hash, ref purpose, amount_msat, receiver_node_id, via_channel_id, via_user_channel_id: _ } => {
+               Event::PaymentClaimable { ref payment_hash, ref purpose, amount_msat, receiver_node_id, via_channel_id, .. } => {
                        assert_eq!(our_payment_hash_22, *payment_hash);
                        assert_eq!(recv_value_22, amount_msat);
                        assert_eq!(nodes[2].node.get_our_node_id(), receiver_node_id.unwrap());
@@ -2730,20 +2734,22 @@ fn test_htlc_on_chain_success() {
        }
        let chan_id = Some(chan_1.2);
        match forwarded_events[1] {
-               Event::PaymentForwarded { fee_earned_msat, prev_channel_id, claim_from_onchain_tx, next_channel_id } => {
+               Event::PaymentForwarded { fee_earned_msat, prev_channel_id, claim_from_onchain_tx, next_channel_id, outbound_amount_forwarded_msat } => {
                        assert_eq!(fee_earned_msat, Some(1000));
                        assert_eq!(prev_channel_id, chan_id);
                        assert_eq!(claim_from_onchain_tx, true);
                        assert_eq!(next_channel_id, Some(chan_2.2));
+                       assert_eq!(outbound_amount_forwarded_msat, Some(3000000));
                },
                _ => panic!()
        }
        match forwarded_events[2] {
-               Event::PaymentForwarded { fee_earned_msat, prev_channel_id, claim_from_onchain_tx, next_channel_id } => {
+               Event::PaymentForwarded { fee_earned_msat, prev_channel_id, claim_from_onchain_tx, next_channel_id, outbound_amount_forwarded_msat } => {
                        assert_eq!(fee_earned_msat, Some(1000));
                        assert_eq!(prev_channel_id, chan_id);
                        assert_eq!(claim_from_onchain_tx, true);
                        assert_eq!(next_channel_id, Some(chan_2.2));
+                       assert_eq!(outbound_amount_forwarded_msat, Some(3000000));
                },
                _ => panic!()
        }
@@ -2842,7 +2848,7 @@ fn test_htlc_on_chain_success() {
        assert_eq!(commitment_spend.input.len(), 2);
        assert_eq!(commitment_spend.input[0].witness.last().unwrap().len(), OFFERED_HTLC_SCRIPT_WEIGHT);
        assert_eq!(commitment_spend.input[1].witness.last().unwrap().len(), OFFERED_HTLC_SCRIPT_WEIGHT);
-       assert_eq!(commitment_spend.lock_time.0, 0);
+       assert_eq!(commitment_spend.lock_time.0, nodes[1].best_block_info().1 + 1);
        assert!(commitment_spend.output[0].script_pubkey.is_v0_p2wpkh()); // direct payment
        // We don't bother to check that B can claim the HTLC output on its commitment tx here as
        // we already checked the same situation with A.
@@ -3235,12 +3241,12 @@ fn do_test_commitment_revoked_fail_backward_exhaustive(deliver_bs_raa: bool, use
                        let events = nodes[0].node.get_and_clear_pending_events();
                        assert_eq!(events.len(), 6);
                        match events[0] {
-                               Event::PaymentPathFailed { ref payment_hash, ref network_update, .. } => {
+                               Event::PaymentPathFailed { ref payment_hash, ref failure, .. } => {
                                        assert!(failed_htlcs.insert(payment_hash.0));
                                        // If we delivered B's RAA we got an unknown preimage error, not something
                                        // that we should update our routing table for.
                                        if !deliver_bs_raa {
-                                               assert!(network_update.is_some());
+                                               if let PathFailure::OnPath { network_update: Some(_) } = failure { } else { panic!("Unexpected path failure") }
                                        }
                                },
                                _ => panic!("Unexpected event"),
@@ -3252,9 +3258,8 @@ fn do_test_commitment_revoked_fail_backward_exhaustive(deliver_bs_raa: bool, use
                                _ => panic!("Unexpected event"),
                        }
                        match events[2] {
-                               Event::PaymentPathFailed { ref payment_hash, ref network_update, .. } => {
+                               Event::PaymentPathFailed { ref payment_hash, failure: PathFailure::OnPath { network_update: Some(_) }, .. } => {
                                        assert!(failed_htlcs.insert(payment_hash.0));
-                                       assert!(network_update.is_some());
                                },
                                _ => panic!("Unexpected event"),
                        }
@@ -3265,9 +3270,8 @@ fn do_test_commitment_revoked_fail_backward_exhaustive(deliver_bs_raa: bool, use
                                _ => panic!("Unexpected event"),
                        }
                        match events[4] {
-                               Event::PaymentPathFailed { ref payment_hash, ref network_update, .. } => {
+                               Event::PaymentPathFailed { ref payment_hash, failure: PathFailure::OnPath { network_update: Some(_) }, .. } => {
                                        assert!(failed_htlcs.insert(payment_hash.0));
-                                       assert!(network_update.is_some());
                                },
                                _ => panic!("Unexpected event"),
                        }
@@ -3524,8 +3528,8 @@ fn test_dup_events_on_peer_disconnect() {
        nodes[0].node.handle_update_fulfill_htlc(&nodes[1].node.get_our_node_id(), &claim_msgs.update_fulfill_htlcs[0]);
        expect_payment_sent_without_paths!(nodes[0], payment_preimage);
 
-       nodes[0].node.peer_disconnected(&nodes[1].node.get_our_node_id(), false);
-       nodes[1].node.peer_disconnected(&nodes[0].node.get_our_node_id(), false);
+       nodes[0].node.peer_disconnected(&nodes[1].node.get_our_node_id());
+       nodes[1].node.peer_disconnected(&nodes[0].node.get_our_node_id());
 
        reconnect_nodes(&nodes[0], &nodes[1], (false, false), (0, 0), (1, 0), (0, 0), (0, 0), (0, 0), (false, false));
        expect_payment_path_successful!(nodes[0]);
@@ -3565,8 +3569,8 @@ fn test_peer_disconnected_before_funding_broadcasted() {
 
        // Ensure that the channel is closed with `ClosureReason::DisconnectedPeer` when the peers are
        // disconnected before the funding transaction was broadcasted.
-       nodes[0].node.peer_disconnected(&nodes[1].node.get_our_node_id(), false);
-       nodes[1].node.peer_disconnected(&nodes[0].node.get_our_node_id(), false);
+       nodes[0].node.peer_disconnected(&nodes[1].node.get_our_node_id());
+       nodes[1].node.peer_disconnected(&nodes[0].node.get_our_node_id());
 
        check_closed_event!(nodes[0], 1, ClosureReason::DisconnectedPeer);
        check_closed_event!(nodes[1], 1, ClosureReason::DisconnectedPeer);
@@ -3582,8 +3586,8 @@ fn test_simple_peer_disconnect() {
        create_announced_chan_between_nodes(&nodes, 0, 1);
        create_announced_chan_between_nodes(&nodes, 1, 2);
 
-       nodes[0].node.peer_disconnected(&nodes[1].node.get_our_node_id(), false);
-       nodes[1].node.peer_disconnected(&nodes[0].node.get_our_node_id(), false);
+       nodes[0].node.peer_disconnected(&nodes[1].node.get_our_node_id());
+       nodes[1].node.peer_disconnected(&nodes[0].node.get_our_node_id());
        reconnect_nodes(&nodes[0], &nodes[1], (true, true), (0, 0), (0, 0), (0, 0), (0, 0), (0, 0), (false, false));
 
        let payment_preimage_1 = route_payment(&nodes[0], &vec!(&nodes[1], &nodes[2])[..], 1000000).0;
@@ -3591,8 +3595,8 @@ fn test_simple_peer_disconnect() {
        fail_payment(&nodes[0], &vec!(&nodes[1], &nodes[2]), payment_hash_2);
        claim_payment(&nodes[0], &vec!(&nodes[1], &nodes[2]), payment_preimage_1);
 
-       nodes[0].node.peer_disconnected(&nodes[1].node.get_our_node_id(), false);
-       nodes[1].node.peer_disconnected(&nodes[0].node.get_our_node_id(), false);
+       nodes[0].node.peer_disconnected(&nodes[1].node.get_our_node_id());
+       nodes[1].node.peer_disconnected(&nodes[0].node.get_our_node_id());
        reconnect_nodes(&nodes[0], &nodes[1], (false, false), (0, 0), (0, 0), (0, 0), (0, 0), (0, 0), (false, false));
 
        let (payment_preimage_3, payment_hash_3, _) = route_payment(&nodes[0], &vec!(&nodes[1], &nodes[2])[..], 1000000);
@@ -3600,8 +3604,8 @@ fn test_simple_peer_disconnect() {
        let payment_hash_5 = route_payment(&nodes[0], &vec!(&nodes[1], &nodes[2])[..], 1000000).1;
        let payment_hash_6 = route_payment(&nodes[0], &vec!(&nodes[1], &nodes[2])[..], 1000000).1;
 
-       nodes[0].node.peer_disconnected(&nodes[1].node.get_our_node_id(), false);
-       nodes[1].node.peer_disconnected(&nodes[0].node.get_our_node_id(), false);
+       nodes[0].node.peer_disconnected(&nodes[1].node.get_our_node_id());
+       nodes[1].node.peer_disconnected(&nodes[0].node.get_our_node_id());
 
        claim_payment_along_route(&nodes[0], &[&[&nodes[1], &nodes[2]]], true, payment_preimage_3);
        fail_payment_along_route(&nodes[0], &[&[&nodes[1], &nodes[2]]], true, payment_hash_5);
@@ -3618,22 +3622,22 @@ fn test_simple_peer_disconnect() {
                        _ => panic!("Unexpected event"),
                }
                match events[1] {
+                       Event::PaymentPathSuccessful { .. } => {},
+                       _ => panic!("Unexpected event"),
+               }
+               match events[2] {
                        Event::PaymentPathFailed { payment_hash, payment_failed_permanently, .. } => {
                                assert_eq!(payment_hash, payment_hash_5);
                                assert!(payment_failed_permanently);
                        },
                        _ => panic!("Unexpected event"),
                }
-               match events[2] {
+               match events[3] {
                        Event::PaymentFailed { payment_hash, .. } => {
                                assert_eq!(payment_hash, payment_hash_5);
                        },
                        _ => panic!("Unexpected event"),
                }
-               match events[3] {
-                       Event::PaymentPathSuccessful { .. } => {},
-                       _ => panic!("Unexpected event"),
-               }
        }
 
        claim_payment(&nodes[0], &vec!(&nodes[1], &nodes[2]), payment_preimage_4);
@@ -3701,8 +3705,8 @@ fn do_test_drop_messages_peer_disconnect(messages_delivered: u8, simulate_broken
                }
        }
 
-       nodes[0].node.peer_disconnected(&nodes[1].node.get_our_node_id(), false);
-       nodes[1].node.peer_disconnected(&nodes[0].node.get_our_node_id(), false);
+       nodes[0].node.peer_disconnected(&nodes[1].node.get_our_node_id());
+       nodes[1].node.peer_disconnected(&nodes[0].node.get_our_node_id());
        if messages_delivered < 3 {
                if simulate_broken_lnd {
                        // lnd has a long-standing bug where they send a channel_ready prior to a
@@ -3751,8 +3755,8 @@ fn do_test_drop_messages_peer_disconnect(messages_delivered: u8, simulate_broken
                };
        }
 
-       nodes[0].node.peer_disconnected(&nodes[1].node.get_our_node_id(), false);
-       nodes[1].node.peer_disconnected(&nodes[0].node.get_our_node_id(), false);
+       nodes[0].node.peer_disconnected(&nodes[1].node.get_our_node_id());
+       nodes[1].node.peer_disconnected(&nodes[0].node.get_our_node_id());
        reconnect_nodes(&nodes[0], &nodes[1], (false, false), (0, 0), (0, 0), (0, 0), (0, 0), (0, 0), (false, false));
 
        nodes[1].node.process_pending_htlc_forwards();
@@ -3760,7 +3764,7 @@ fn do_test_drop_messages_peer_disconnect(messages_delivered: u8, simulate_broken
        let events_2 = nodes[1].node.get_and_clear_pending_events();
        assert_eq!(events_2.len(), 1);
        match events_2[0] {
-               Event::PaymentClaimable { ref payment_hash, ref purpose, amount_msat, receiver_node_id, via_channel_id, via_user_channel_id: _ } => {
+               Event::PaymentClaimable { ref payment_hash, ref purpose, amount_msat, receiver_node_id, via_channel_id, .. } => {
                        assert_eq!(payment_hash_1, *payment_hash);
                        assert_eq!(amount_msat, 1_000_000);
                        assert_eq!(receiver_node_id.unwrap(), nodes[1].node.get_our_node_id());
@@ -3834,8 +3838,8 @@ fn do_test_drop_messages_peer_disconnect(messages_delivered: u8, simulate_broken
                }
        }
 
-       nodes[0].node.peer_disconnected(&nodes[1].node.get_our_node_id(), false);
-       nodes[1].node.peer_disconnected(&nodes[0].node.get_our_node_id(), false);
+       nodes[0].node.peer_disconnected(&nodes[1].node.get_our_node_id());
+       nodes[1].node.peer_disconnected(&nodes[0].node.get_our_node_id());
        if messages_delivered < 2 {
                reconnect_nodes(&nodes[0], &nodes[1], (false, false), (0, 0), (1, 0), (0, 0), (0, 0), (0, 0), (false, false));
                if messages_delivered < 1 {
@@ -3861,8 +3865,8 @@ fn do_test_drop_messages_peer_disconnect(messages_delivered: u8, simulate_broken
                expect_payment_path_successful!(nodes[0]);
        }
        if messages_delivered <= 5 {
-               nodes[0].node.peer_disconnected(&nodes[1].node.get_our_node_id(), false);
-               nodes[1].node.peer_disconnected(&nodes[0].node.get_our_node_id(), false);
+               nodes[0].node.peer_disconnected(&nodes[1].node.get_our_node_id());
+               nodes[1].node.peer_disconnected(&nodes[0].node.get_our_node_id());
        }
        reconnect_nodes(&nodes[0], &nodes[1], (false, false), (0, 0), (0, 0), (0, 0), (0, 0), (0, 0), (false, false));
 
@@ -3976,13 +3980,13 @@ fn test_drop_messages_peer_disconnect_dual_htlc() {
                _ => panic!("Unexpected event"),
        }
 
-       nodes[0].node.peer_disconnected(&nodes[1].node.get_our_node_id(), false);
-       nodes[1].node.peer_disconnected(&nodes[0].node.get_our_node_id(), false);
+       nodes[0].node.peer_disconnected(&nodes[1].node.get_our_node_id());
+       nodes[1].node.peer_disconnected(&nodes[0].node.get_our_node_id());
 
-       nodes[0].node.peer_connected(&nodes[1].node.get_our_node_id(), &msgs::Init { features: nodes[1].node.init_features(), remote_network_address: None }).unwrap();
+       nodes[0].node.peer_connected(&nodes[1].node.get_our_node_id(), &msgs::Init { features: nodes[1].node.init_features(), remote_network_address: None }, true).unwrap();
        let reestablish_1 = get_chan_reestablish_msgs!(nodes[0], nodes[1]);
        assert_eq!(reestablish_1.len(), 1);
-       nodes[1].node.peer_connected(&nodes[0].node.get_our_node_id(), &msgs::Init { features: nodes[0].node.init_features(), remote_network_address: None }).unwrap();
+       nodes[1].node.peer_connected(&nodes[0].node.get_our_node_id(), &msgs::Init { features: nodes[0].node.init_features(), remote_network_address: None }, false).unwrap();
        let reestablish_2 = get_chan_reestablish_msgs!(nodes[1], nodes[0]);
        assert_eq!(reestablish_2.len(), 1);
 
@@ -4085,7 +4089,7 @@ fn do_test_htlc_timeout(send_partial_mpp: bool) {
                let cur_height = CHAN_CONFIRM_DEPTH + 1; // route_payment calls send_payment, which adds 1 to the current height. So we do the same here to match.
                let payment_id = PaymentId([42; 32]);
                let session_privs = nodes[0].node.test_add_new_pending_payment(our_payment_hash, Some(payment_secret), payment_id, &route).unwrap();
-               nodes[0].node.send_payment_along_path(&route.paths[0], &route.payment_params, &our_payment_hash, &Some(payment_secret), 200_000, cur_height, payment_id, &None, session_privs[0]).unwrap();
+               nodes[0].node.test_send_payment_along_path(&route.paths[0], &our_payment_hash, &Some(payment_secret), 200_000, cur_height, payment_id, &None, session_privs[0]).unwrap();
                check_added_monitors!(nodes[0], 1);
                let mut events = nodes[0].node.get_and_clear_pending_msg_events();
                assert_eq!(events.len(), 1);
@@ -4654,11 +4658,12 @@ fn test_onchain_to_onchain_claim() {
                _ => panic!("Unexpected event"),
        }
        match events[1] {
-               Event::PaymentForwarded { fee_earned_msat, prev_channel_id, claim_from_onchain_tx, next_channel_id } => {
+               Event::PaymentForwarded { fee_earned_msat, prev_channel_id, claim_from_onchain_tx, next_channel_id, outbound_amount_forwarded_msat } => {
                        assert_eq!(fee_earned_msat, Some(1000));
                        assert_eq!(prev_channel_id, Some(chan_1.2));
                        assert_eq!(claim_from_onchain_tx, true);
                        assert_eq!(next_channel_id, Some(chan_2.2));
+                       assert_eq!(outbound_amount_forwarded_msat, Some(3000000));
                },
                _ => panic!("Unexpected event"),
        }
@@ -4700,7 +4705,7 @@ fn test_onchain_to_onchain_claim() {
        check_spends!(b_txn[0], commitment_tx[0]);
        assert_eq!(b_txn[0].input[0].witness.clone().last().unwrap().len(), OFFERED_HTLC_SCRIPT_WEIGHT);
        assert!(b_txn[0].output[0].script_pubkey.is_v0_p2wpkh()); // direct payment
-       assert_eq!(b_txn[0].lock_time.0, 0); // Success tx
+       assert_eq!(b_txn[0].lock_time.0, nodes[1].best_block_info().1 + 1); // Success tx
 
        check_closed_broadcast!(nodes[1], true);
        check_added_monitors!(nodes[1], 1);
@@ -5148,14 +5153,14 @@ fn do_test_fail_backwards_unrevoked_remote_announce(deliver_last_raa: bool, anno
        let mut as_failds = HashSet::new();
        let mut as_updates = 0;
        for event in as_events.iter() {
-               if let &Event::PaymentPathFailed { ref payment_hash, ref payment_failed_permanently, ref network_update, .. } = event {
+               if let &Event::PaymentPathFailed { ref payment_hash, ref payment_failed_permanently, ref failure, .. } = event {
                        assert!(as_failds.insert(*payment_hash));
                        if *payment_hash != payment_hash_2 {
                                assert_eq!(*payment_failed_permanently, deliver_last_raa);
                        } else {
                                assert!(!payment_failed_permanently);
                        }
-                       if network_update.is_some() {
+                       if let PathFailure::OnPath { network_update: Some(_) } = failure {
                                as_updates += 1;
                        }
                } else if let &Event::PaymentFailed { .. } = event {
@@ -5174,14 +5179,14 @@ fn do_test_fail_backwards_unrevoked_remote_announce(deliver_last_raa: bool, anno
        let mut bs_failds = HashSet::new();
        let mut bs_updates = 0;
        for event in bs_events.iter() {
-               if let &Event::PaymentPathFailed { ref payment_hash, ref payment_failed_permanently, ref network_update, .. } = event {
+               if let &Event::PaymentPathFailed { ref payment_hash, ref payment_failed_permanently, ref failure, .. } = event {
                        assert!(bs_failds.insert(*payment_hash));
                        if *payment_hash != payment_hash_1 && *payment_hash != payment_hash_5 {
                                assert_eq!(*payment_failed_permanently, deliver_last_raa);
                        } else {
                                assert!(!payment_failed_permanently);
                        }
-                       if network_update.is_some() {
+                       if let PathFailure::OnPath { network_update: Some(_) } = failure {
                                bs_updates += 1;
                        }
                } else if let &Event::PaymentFailed { .. } = event {
@@ -5280,7 +5285,7 @@ fn test_key_derivation_params() {
        let seed = [42; 32];
        let keys_manager = test_utils::TestKeysInterface::new(&seed, Network::Testnet);
        let chain_monitor = test_utils::TestChainMonitor::new(Some(&chanmon_cfgs[0].chain_source), &chanmon_cfgs[0].tx_broadcaster, &chanmon_cfgs[0].logger, &chanmon_cfgs[0].fee_estimator, &chanmon_cfgs[0].persister, &keys_manager);
-       let network_graph = Arc::new(NetworkGraph::new(chanmon_cfgs[0].chain_source.genesis_hash, &chanmon_cfgs[0].logger));
+       let network_graph = Arc::new(NetworkGraph::new(Network::Testnet, &chanmon_cfgs[0].logger));
        let scorer = Mutex::new(test_utils::TestScorer::new());
        let router = test_utils::TestRouter::new(network_graph.clone(), &scorer);
        let node = NodeCfg { chain_source: &chanmon_cfgs[0].chain_source, logger: &chanmon_cfgs[0].logger, tx_broadcaster: &chanmon_cfgs[0].tx_broadcaster, fee_estimator: &chanmon_cfgs[0].fee_estimator, router, chain_monitor, keys_manager: &keys_manager, network_graph, node_seed: seed, override_init_features: alloc::rc::Rc::new(core::cell::RefCell::new(None)) };
@@ -5695,12 +5700,10 @@ fn test_fail_holding_cell_htlc_upon_free() {
        let events = nodes[0].node.get_and_clear_pending_events();
        assert_eq!(events.len(), 2);
        match &events[0] {
-               &Event::PaymentPathFailed { ref payment_id, ref payment_hash, ref payment_failed_permanently, ref network_update, ref all_paths_failed, ref short_channel_id, .. } => {
+               &Event::PaymentPathFailed { ref payment_id, ref payment_hash, ref payment_failed_permanently, failure: PathFailure::OnPath { network_update: None }, ref short_channel_id, .. } => {
                        assert_eq!(PaymentId(our_payment_hash.0), *payment_id.as_ref().unwrap());
                        assert_eq!(our_payment_hash.clone(), *payment_hash);
                        assert_eq!(*payment_failed_permanently, false);
-                       assert_eq!(*all_paths_failed, true);
-                       assert_eq!(*network_update, None);
                        assert_eq!(*short_channel_id, Some(route.paths[0][0].short_channel_id));
                },
                _ => panic!("Unexpected event"),
@@ -5786,12 +5789,10 @@ fn test_free_and_fail_holding_cell_htlcs() {
        let events = nodes[0].node.get_and_clear_pending_events();
        assert_eq!(events.len(), 2);
        match &events[0] {
-               &Event::PaymentPathFailed { ref payment_id, ref payment_hash, ref payment_failed_permanently, ref network_update, ref all_paths_failed, ref short_channel_id, .. } => {
+               &Event::PaymentPathFailed { ref payment_id, ref payment_hash, ref payment_failed_permanently, failure: PathFailure::OnPath { network_update: None }, ref short_channel_id, .. } => {
                        assert_eq!(payment_id_2, *payment_id.as_ref().unwrap());
                        assert_eq!(payment_hash_2.clone(), *payment_hash);
                        assert_eq!(*payment_failed_permanently, false);
-                       assert_eq!(*all_paths_failed, true);
-                       assert_eq!(*network_update, None);
                        assert_eq!(*short_channel_id, Some(route_2.paths[0][0].short_channel_id));
                },
                _ => panic!("Unexpected event"),
@@ -5993,7 +5994,7 @@ fn test_update_add_htlc_bolt2_sender_value_below_minimum_msat() {
        unwrap_send_err!(nodes[0].node.send_payment(&route, our_payment_hash, &Some(our_payment_secret), PaymentId(our_payment_hash.0)), true, APIError::ChannelUnavailable { ref err },
                assert!(regex::Regex::new(r"Cannot send less than their minimum HTLC value \(\d+\)").unwrap().is_match(err)));
        assert!(nodes[0].node.get_and_clear_pending_msg_events().is_empty());
-       nodes[0].logger.assert_log_contains("lightning::ln::channelmanager".to_string(), "Cannot send less than their minimum HTLC value".to_string(), 1);
+       nodes[0].logger.assert_log_contains("lightning::ln::channelmanager", "Cannot send less than their minimum HTLC value", 1);
 }
 
 #[test]
@@ -6011,7 +6012,7 @@ fn test_update_add_htlc_bolt2_sender_zero_value_msat() {
                assert_eq!(err, "Cannot send 0-msat HTLC"));
 
        assert!(nodes[0].node.get_and_clear_pending_msg_events().is_empty());
-       nodes[0].logger.assert_log_contains("lightning::ln::channelmanager".to_string(), "Cannot send 0-msat HTLC".to_string(), 1);
+       nodes[0].logger.assert_log_contains("lightning::ln::channelmanager", "Cannot send 0-msat HTLC", 1);
 }
 
 #[test]
@@ -6094,7 +6095,7 @@ fn test_update_add_htlc_bolt2_sender_exceed_max_htlc_num_and_htlc_id_increment()
                assert!(regex::Regex::new(r"Cannot push more than their max accepted HTLCs \(\d+\)").unwrap().is_match(err)));
 
        assert!(nodes[0].node.get_and_clear_pending_msg_events().is_empty());
-       nodes[0].logger.assert_log_contains("lightning::ln::channelmanager".to_string(), "Cannot push more than their max accepted HTLCs".to_string(), 1);
+       nodes[0].logger.assert_log_contains("lightning::ln::channelmanager", "Cannot push more than their max accepted HTLCs", 1);
 }
 
 #[test]
@@ -6118,7 +6119,7 @@ fn test_update_add_htlc_bolt2_sender_exceed_max_htlc_value_in_flight() {
                assert!(regex::Regex::new(r"Cannot send value that would put us over the max HTLC value in flight our peer will accept \(\d+\)").unwrap().is_match(err)));
 
        assert!(nodes[0].node.get_and_clear_pending_msg_events().is_empty());
-       nodes[0].logger.assert_log_contains("lightning::ln::channelmanager".to_string(), "Cannot send value that would put us over the max HTLC value in flight our peer will accept".to_string(), 1);
+       nodes[0].logger.assert_log_contains("lightning::ln::channelmanager", "Cannot send value that would put us over the max HTLC value in flight our peer will accept", 1);
 
        send_payment(&nodes[0], &[&nodes[1]], max_in_flight);
 }
@@ -6292,12 +6293,12 @@ fn test_update_add_htlc_bolt2_receiver_check_repeated_id_ignore() {
        nodes[1].node.handle_update_add_htlc(&nodes[0].node.get_our_node_id(), &updates.update_add_htlcs[0]);
 
        //Disconnect and Reconnect
-       nodes[0].node.peer_disconnected(&nodes[1].node.get_our_node_id(), false);
-       nodes[1].node.peer_disconnected(&nodes[0].node.get_our_node_id(), false);
-       nodes[0].node.peer_connected(&nodes[1].node.get_our_node_id(), &msgs::Init { features: nodes[1].node.init_features(), remote_network_address: None }).unwrap();
+       nodes[0].node.peer_disconnected(&nodes[1].node.get_our_node_id());
+       nodes[1].node.peer_disconnected(&nodes[0].node.get_our_node_id());
+       nodes[0].node.peer_connected(&nodes[1].node.get_our_node_id(), &msgs::Init { features: nodes[1].node.init_features(), remote_network_address: None }, true).unwrap();
        let reestablish_1 = get_chan_reestablish_msgs!(nodes[0], nodes[1]);
        assert_eq!(reestablish_1.len(), 1);
-       nodes[1].node.peer_connected(&nodes[0].node.get_our_node_id(), &msgs::Init { features: nodes[0].node.init_features(), remote_network_address: None }).unwrap();
+       nodes[1].node.peer_connected(&nodes[0].node.get_our_node_id(), &msgs::Init { features: nodes[0].node.init_features(), remote_network_address: None }, false).unwrap();
        let reestablish_2 = get_chan_reestablish_msgs!(nodes[1], nodes[0]);
        assert_eq!(reestablish_2.len(), 1);
        nodes[0].node.handle_channel_reestablish(&nodes[1].node.get_our_node_id(), &reestablish_2[0]);
@@ -6689,8 +6690,7 @@ fn test_channel_failed_after_message_with_badonion_node_perm_bits_set() {
        // Expect a PaymentPathFailed event with a ChannelFailure network update for the channel between
        // the node originating the error to its next hop.
        match events_5[0] {
-               Event::PaymentPathFailed { network_update:
-                       Some(NetworkUpdate::ChannelFailure { short_channel_id, is_permanent }), error_code, ..
+               Event::PaymentPathFailed { error_code, failure: PathFailure::OnPath { network_update: Some(NetworkUpdate::ChannelFailure { short_channel_id, is_permanent }) }, ..
                } => {
                        assert_eq!(short_channel_id, chan_2.0.contents.short_channel_id);
                        assert!(is_permanent);
@@ -6866,7 +6866,7 @@ fn do_test_sweep_outbound_htlc_failure_update(revoked: bool, local: bool) {
                if !revoked {
                        assert_eq!(timeout_tx[0].input[0].witness.last().unwrap().len(), ACCEPTED_HTLC_SCRIPT_WEIGHT);
                } else {
-                       assert_eq!(timeout_tx[0].lock_time.0, 0);
+                       assert_eq!(timeout_tx[0].lock_time.0, 12);
                }
                // We fail non-dust-HTLC 2 by broadcast of local timeout/revocation-claim tx
                mine_transaction(&nodes[0], &timeout_tx[0]);
@@ -7032,8 +7032,8 @@ fn test_announce_disable_channels() {
        create_announced_chan_between_nodes(&nodes, 0, 1);
 
        // Disconnect peers
-       nodes[0].node.peer_disconnected(&nodes[1].node.get_our_node_id(), false);
-       nodes[1].node.peer_disconnected(&nodes[0].node.get_our_node_id(), false);
+       nodes[0].node.peer_disconnected(&nodes[1].node.get_our_node_id());
+       nodes[1].node.peer_disconnected(&nodes[0].node.get_our_node_id());
 
        nodes[0].node.timer_tick_occurred(); // Enabled -> DisabledStaged
        nodes[0].node.timer_tick_occurred(); // DisabledStaged -> Disabled
@@ -7053,10 +7053,10 @@ fn test_announce_disable_channels() {
                }
        }
        // Reconnect peers
-       nodes[0].node.peer_connected(&nodes[1].node.get_our_node_id(), &msgs::Init { features: nodes[1].node.init_features(), remote_network_address: None }).unwrap();
+       nodes[0].node.peer_connected(&nodes[1].node.get_our_node_id(), &msgs::Init { features: nodes[1].node.init_features(), remote_network_address: None }, true).unwrap();
        let reestablish_1 = get_chan_reestablish_msgs!(nodes[0], nodes[1]);
        assert_eq!(reestablish_1.len(), 3);
-       nodes[1].node.peer_connected(&nodes[0].node.get_our_node_id(), &msgs::Init { features: nodes[0].node.init_features(), remote_network_address: None }).unwrap();
+       nodes[1].node.peer_connected(&nodes[0].node.get_our_node_id(), &msgs::Init { features: nodes[0].node.init_features(), remote_network_address: None }, false).unwrap();
        let reestablish_2 = get_chan_reestablish_msgs!(nodes[1], nodes[0]);
        assert_eq!(reestablish_2.len(), 3);
 
@@ -7932,6 +7932,171 @@ fn test_can_not_accept_unknown_inbound_channel() {
        }
 }
 
+#[test]
+fn test_onion_value_mpp_set_calculation() {
+       // Test that we use the onion value `amt_to_forward` when
+       // calculating whether we've reached the `total_msat` of an MPP
+       // by having a routing node forward more than `amt_to_forward`
+       // and checking that the receiving node doesn't generate
+       // a PaymentClaimable event too early
+       let node_count = 4;
+       let chanmon_cfgs = create_chanmon_cfgs(node_count);
+       let node_cfgs = create_node_cfgs(node_count, &chanmon_cfgs);
+       let node_chanmgrs = create_node_chanmgrs(node_count, &node_cfgs, &vec![None; node_count]);
+       let mut nodes = create_network(node_count, &node_cfgs, &node_chanmgrs);
+
+       let chan_1_id = create_announced_chan_between_nodes(&nodes, 0, 1).0.contents.short_channel_id;
+       let chan_2_id = create_announced_chan_between_nodes(&nodes, 0, 2).0.contents.short_channel_id;
+       let chan_3_id = create_announced_chan_between_nodes(&nodes, 1, 3).0.contents.short_channel_id;
+       let chan_4_id = create_announced_chan_between_nodes(&nodes, 2, 3).0.contents.short_channel_id;
+
+       let total_msat = 100_000;
+       let expected_paths: &[&[&Node]] = &[&[&nodes[1], &nodes[3]], &[&nodes[2], &nodes[3]]];
+       let (mut route, our_payment_hash, our_payment_preimage, our_payment_secret) = get_route_and_payment_hash!(&nodes[0], nodes[3], total_msat);
+       let sample_path = route.paths.pop().unwrap();
+
+       let mut path_1 = sample_path.clone();
+       path_1[0].pubkey = nodes[1].node.get_our_node_id();
+       path_1[0].short_channel_id = chan_1_id;
+       path_1[1].pubkey = nodes[3].node.get_our_node_id();
+       path_1[1].short_channel_id = chan_3_id;
+       path_1[1].fee_msat = 100_000;
+       route.paths.push(path_1);
+
+       let mut path_2 = sample_path.clone();
+       path_2[0].pubkey = nodes[2].node.get_our_node_id();
+       path_2[0].short_channel_id = chan_2_id;
+       path_2[1].pubkey = nodes[3].node.get_our_node_id();
+       path_2[1].short_channel_id = chan_4_id;
+       path_2[1].fee_msat = 1_000;
+       route.paths.push(path_2);
+
+       // Send payment
+       let payment_id = PaymentId(nodes[0].keys_manager.backing.get_secure_random_bytes());
+       let onion_session_privs = nodes[0].node.test_add_new_pending_payment(our_payment_hash, Some(our_payment_secret), payment_id, &route).unwrap();
+       nodes[0].node.test_send_payment_internal(&route, our_payment_hash, &Some(our_payment_secret), None, payment_id, Some(total_msat), onion_session_privs).unwrap();
+       check_added_monitors!(nodes[0], expected_paths.len());
+
+       let mut events = nodes[0].node.get_and_clear_pending_msg_events();
+       assert_eq!(events.len(), expected_paths.len());
+
+       // First path
+       let ev = remove_first_msg_event_to_node(&expected_paths[0][0].node.get_our_node_id(), &mut events);
+       let mut payment_event = SendEvent::from_event(ev);
+       let mut prev_node = &nodes[0];
+
+       for (idx, &node) in expected_paths[0].iter().enumerate() {
+               assert_eq!(node.node.get_our_node_id(), payment_event.node_id);
+
+               if idx == 0 { // routing node
+                       let session_priv = [3; 32];
+                       let height = nodes[0].best_block_info().1;
+                       let session_priv = SecretKey::from_slice(&session_priv).unwrap();
+                       let mut onion_keys = onion_utils::construct_onion_keys(&Secp256k1::new(), &route.paths[0], &session_priv).unwrap();
+                       let (mut onion_payloads, _, _) = onion_utils::build_onion_payloads(&route.paths[0], 100_000, &Some(our_payment_secret), height + 1, &None).unwrap();
+                       // Edit amt_to_forward to simulate the sender having set
+                       // the final amount and the routing node taking less fee
+                       onion_payloads[1].amt_to_forward = 99_000;
+                       let new_onion_packet = onion_utils::construct_onion_packet(onion_payloads, onion_keys, [0; 32], &our_payment_hash);
+                       payment_event.msgs[0].onion_routing_packet = new_onion_packet;
+               }
+
+               node.node.handle_update_add_htlc(&prev_node.node.get_our_node_id(), &payment_event.msgs[0]);
+               check_added_monitors!(node, 0);
+               commitment_signed_dance!(node, prev_node, payment_event.commitment_msg, false);
+               expect_pending_htlcs_forwardable!(node);
+
+               if idx == 0 {
+                       let mut events_2 = node.node.get_and_clear_pending_msg_events();
+                       assert_eq!(events_2.len(), 1);
+                       check_added_monitors!(node, 1);
+                       payment_event = SendEvent::from_event(events_2.remove(0));
+                       assert_eq!(payment_event.msgs.len(), 1);
+               } else {
+                       let events_2 = node.node.get_and_clear_pending_events();
+                       assert!(events_2.is_empty());
+               }
+
+               prev_node = node;
+       }
+
+       // Second path
+       let ev = remove_first_msg_event_to_node(&expected_paths[1][0].node.get_our_node_id(), &mut events);
+       pass_along_path(&nodes[0], expected_paths[1], 101_000, our_payment_hash.clone(), Some(our_payment_secret), ev, true, None);
+
+       claim_payment_along_route(&nodes[0], expected_paths, false, our_payment_preimage);
+}
+
+fn do_test_overshoot_mpp(msat_amounts: &[u64], total_msat: u64) {
+
+       let routing_node_count = msat_amounts.len();
+       let node_count = routing_node_count + 2;
+
+       let chanmon_cfgs = create_chanmon_cfgs(node_count);
+       let node_cfgs = create_node_cfgs(node_count, &chanmon_cfgs);
+       let node_chanmgrs = create_node_chanmgrs(node_count, &node_cfgs, &vec![None; node_count]);
+       let nodes = create_network(node_count, &node_cfgs, &node_chanmgrs);
+
+       let src_idx = 0;
+       let dst_idx = 1;
+
+       // Create channels for each amount
+       let mut expected_paths = Vec::with_capacity(routing_node_count);
+       let mut src_chan_ids = Vec::with_capacity(routing_node_count);
+       let mut dst_chan_ids = Vec::with_capacity(routing_node_count);
+       for i in 0..routing_node_count {
+               let routing_node = 2 + i;
+               let src_chan_id = create_announced_chan_between_nodes(&nodes, src_idx, routing_node).0.contents.short_channel_id;
+               src_chan_ids.push(src_chan_id);
+               let dst_chan_id = create_announced_chan_between_nodes(&nodes, routing_node, dst_idx).0.contents.short_channel_id;
+               dst_chan_ids.push(dst_chan_id);
+               let path = vec![&nodes[routing_node], &nodes[dst_idx]];
+               expected_paths.push(path);
+       }
+       let expected_paths: Vec<&[&Node]> = expected_paths.iter().map(|route| route.as_slice()).collect();
+
+       // Create a route for each amount
+       let example_amount = 100000;
+       let (mut route, our_payment_hash, our_payment_preimage, our_payment_secret) = get_route_and_payment_hash!(&nodes[src_idx], nodes[dst_idx], example_amount);
+       let sample_path = route.paths.pop().unwrap();
+       for i in 0..routing_node_count {
+               let routing_node = 2 + i;
+               let mut path = sample_path.clone();
+               path[0].pubkey = nodes[routing_node].node.get_our_node_id();
+               path[0].short_channel_id = src_chan_ids[i];
+               path[1].pubkey = nodes[dst_idx].node.get_our_node_id();
+               path[1].short_channel_id = dst_chan_ids[i];
+               path[1].fee_msat = msat_amounts[i];
+               route.paths.push(path);
+       }
+
+       // Send payment with manually set total_msat
+       let payment_id = PaymentId(nodes[src_idx].keys_manager.backing.get_secure_random_bytes());
+       let onion_session_privs = nodes[src_idx].node.test_add_new_pending_payment(our_payment_hash, Some(our_payment_secret), payment_id, &route).unwrap();
+       nodes[src_idx].node.test_send_payment_internal(&route, our_payment_hash, &Some(our_payment_secret), None, payment_id, Some(total_msat), onion_session_privs).unwrap();
+       check_added_monitors!(nodes[src_idx], expected_paths.len());
+
+       let mut events = nodes[src_idx].node.get_and_clear_pending_msg_events();
+       assert_eq!(events.len(), expected_paths.len());
+       let mut amount_received = 0;
+       for (path_idx, expected_path) in expected_paths.iter().enumerate() {
+               let ev = remove_first_msg_event_to_node(&expected_path[0].node.get_our_node_id(), &mut events);
+
+               let current_path_amount = msat_amounts[path_idx];
+               amount_received += current_path_amount;
+               let became_claimable_now = amount_received >= total_msat && amount_received - current_path_amount < total_msat;
+               pass_along_path(&nodes[src_idx], expected_path, amount_received, our_payment_hash.clone(), Some(our_payment_secret), ev, became_claimable_now, None);
+       }
+
+       claim_payment_along_route(&nodes[src_idx], &expected_paths, false, our_payment_preimage);
+}
+
+#[test]
+fn test_overshoot_mpp() {
+       do_test_overshoot_mpp(&[100_000, 101_000], 200_000);
+       do_test_overshoot_mpp(&[100_000, 10_000, 100_000], 200_000);
+}
+
 #[test]
 fn test_simple_mpp() {
        // Simple test of sending a multi-path payment.
@@ -8157,12 +8322,13 @@ fn test_update_err_monitor_lockdown() {
        let logger = test_utils::TestLogger::with_id(format!("node {}", 0));
        let persister = test_utils::TestPersister::new();
        let watchtower = {
-               let monitor = nodes[0].chain_monitor.chain_monitor.get_monitor(outpoint).unwrap();
-               let mut w = test_utils::TestVecWriter(Vec::new());
-               monitor.write(&mut w).unwrap();
-               let new_monitor = <(BlockHash, channelmonitor::ChannelMonitor<EnforcingSigner>)>::read(
-                               &mut io::Cursor::new(&w.0), (nodes[0].keys_manager, nodes[0].keys_manager)).unwrap().1;
-               assert!(new_monitor == *monitor);
+               let new_monitor = {
+                       let monitor = nodes[0].chain_monitor.chain_monitor.get_monitor(outpoint).unwrap();
+                       let new_monitor = <(BlockHash, channelmonitor::ChannelMonitor<EnforcingSigner>)>::read(
+                                       &mut io::Cursor::new(&monitor.encode()), (nodes[0].keys_manager, nodes[0].keys_manager)).unwrap().1;
+                       assert!(new_monitor == *monitor);
+                       new_monitor
+               };
                let watchtower = test_utils::TestChainMonitor::new(Some(&chain_source), &chanmon_cfgs[0].tx_broadcaster, &logger, &chanmon_cfgs[0].fee_estimator, &persister, &node_cfgs[0].keys_manager);
                assert_eq!(watchtower.watch_channel(outpoint, new_monitor), ChannelMonitorUpdateStatus::Completed);
                watchtower
@@ -8186,7 +8352,7 @@ fn test_update_err_monitor_lockdown() {
                let mut node_0_per_peer_lock;
                let mut node_0_peer_state_lock;
                let mut channel = get_channel_ref!(nodes[0], nodes[1], node_0_per_peer_lock, node_0_peer_state_lock, chan_1.2);
-               if let Ok((_, _, update)) = channel.commitment_signed(&updates.commitment_signed, &node_cfgs[0].logger) {
+               if let Ok(update) = channel.commitment_signed(&updates.commitment_signed, &node_cfgs[0].logger) {
                        assert_eq!(watchtower.chain_monitor.update_channel(outpoint, &update), ChannelMonitorUpdateStatus::PermanentFailure);
                        assert_eq!(nodes[0].chain_monitor.update_channel(outpoint, &update), ChannelMonitorUpdateStatus::Completed);
                } else { assert!(false); }
@@ -8224,12 +8390,13 @@ fn test_concurrent_monitor_claim() {
        let logger = test_utils::TestLogger::with_id(format!("node {}", "Alice"));
        let persister = test_utils::TestPersister::new();
        let watchtower_alice = {
-               let monitor = nodes[0].chain_monitor.chain_monitor.get_monitor(outpoint).unwrap();
-               let mut w = test_utils::TestVecWriter(Vec::new());
-               monitor.write(&mut w).unwrap();
-               let new_monitor = <(BlockHash, channelmonitor::ChannelMonitor<EnforcingSigner>)>::read(
-                               &mut io::Cursor::new(&w.0), (nodes[0].keys_manager, nodes[0].keys_manager)).unwrap().1;
-               assert!(new_monitor == *monitor);
+               let new_monitor = {
+                       let monitor = nodes[0].chain_monitor.chain_monitor.get_monitor(outpoint).unwrap();
+                       let new_monitor = <(BlockHash, channelmonitor::ChannelMonitor<EnforcingSigner>)>::read(
+                                       &mut io::Cursor::new(&monitor.encode()), (nodes[0].keys_manager, nodes[0].keys_manager)).unwrap().1;
+                       assert!(new_monitor == *monitor);
+                       new_monitor
+               };
                let watchtower = test_utils::TestChainMonitor::new(Some(&chain_source), &chanmon_cfgs[0].tx_broadcaster, &logger, &chanmon_cfgs[0].fee_estimator, &persister, &node_cfgs[0].keys_manager);
                assert_eq!(watchtower.watch_channel(outpoint, new_monitor), ChannelMonitorUpdateStatus::Completed);
                watchtower
@@ -8253,12 +8420,13 @@ fn test_concurrent_monitor_claim() {
        let logger = test_utils::TestLogger::with_id(format!("node {}", "Bob"));
        let persister = test_utils::TestPersister::new();
        let watchtower_bob = {
-               let monitor = nodes[0].chain_monitor.chain_monitor.get_monitor(outpoint).unwrap();
-               let mut w = test_utils::TestVecWriter(Vec::new());
-               monitor.write(&mut w).unwrap();
-               let new_monitor = <(BlockHash, channelmonitor::ChannelMonitor<EnforcingSigner>)>::read(
-                               &mut io::Cursor::new(&w.0), (nodes[0].keys_manager, nodes[0].keys_manager)).unwrap().1;
-               assert!(new_monitor == *monitor);
+               let new_monitor = {
+                       let monitor = nodes[0].chain_monitor.chain_monitor.get_monitor(outpoint).unwrap();
+                       let new_monitor = <(BlockHash, channelmonitor::ChannelMonitor<EnforcingSigner>)>::read(
+                                       &mut io::Cursor::new(&monitor.encode()), (nodes[0].keys_manager, nodes[0].keys_manager)).unwrap().1;
+                       assert!(new_monitor == *monitor);
+                       new_monitor
+               };
                let watchtower = test_utils::TestChainMonitor::new(Some(&chain_source), &chanmon_cfgs[0].tx_broadcaster, &logger, &chanmon_cfgs[0].fee_estimator, &persister, &node_cfgs[0].keys_manager);
                assert_eq!(watchtower.watch_channel(outpoint, new_monitor), ChannelMonitorUpdateStatus::Completed);
                watchtower
@@ -8280,7 +8448,7 @@ fn test_concurrent_monitor_claim() {
                let mut node_0_per_peer_lock;
                let mut node_0_peer_state_lock;
                let mut channel = get_channel_ref!(nodes[0], nodes[1], node_0_per_peer_lock, node_0_peer_state_lock, chan_1.2);
-               if let Ok((_, _, update)) = channel.commitment_signed(&updates.commitment_signed, &node_cfgs[0].logger) {
+               if let Ok(update) = channel.commitment_signed(&updates.commitment_signed, &node_cfgs[0].logger) {
                        // Watchtower Alice should already have seen the block and reject the update
                        assert_eq!(watchtower_alice.chain_monitor.update_channel(outpoint, &update), ChannelMonitorUpdateStatus::PermanentFailure);
                        assert_eq!(watchtower_bob.chain_monitor.update_channel(outpoint, &update), ChannelMonitorUpdateStatus::Completed);
@@ -8348,7 +8516,7 @@ fn test_pre_lockin_no_chan_closed_update() {
        let channel_id = crate::chain::transaction::OutPoint { txid: funding_created_msg.funding_txid, index: funding_created_msg.funding_output_index }.to_channel_id();
        nodes[0].node.handle_error(&nodes[1].node.get_our_node_id(), &msgs::ErrorMessage { channel_id, data: "Hi".to_owned() });
        assert!(nodes[0].chain_monitor.added_monitors.lock().unwrap().is_empty());
-       check_closed_event!(nodes[0], 2, ClosureReason::CounterpartyForceClosed { peer_msg: "Hi".to_string() }, true);
+       check_closed_event!(nodes[0], 2, ClosureReason::CounterpartyForceClosed { peer_msg: UntrustedString("Hi".to_string()) }, true);
 }
 
 #[test]
@@ -8689,6 +8857,8 @@ fn test_duplicate_chan_id() {
                assert_eq!(added_monitors[0].0, funding_output);
                added_monitors.clear();
        }
+       expect_channel_pending_event(&nodes[1], &nodes[0].node.get_our_node_id());
+
        let funding_signed_msg = get_event_msg!(nodes[1], MessageSendEvent::SendFundingSigned, nodes[0].node.get_our_node_id());
 
        let funding_outpoint = crate::chain::transaction::OutPoint { txid: funding_created_msg.funding_txid, index: funding_created_msg.funding_output_index };
@@ -8736,9 +8906,9 @@ fn test_duplicate_chan_id() {
        };
        check_added_monitors!(nodes[0], 0);
        nodes[1].node.handle_funding_created(&nodes[0].node.get_our_node_id(), &funding_created);
-       // At this point we'll try to add a duplicate channel monitor, which will be rejected, but
-       // still needs to be cleared here.
-       check_added_monitors!(nodes[1], 1);
+       // At this point we'll look up if the channel_id is present and immediately fail the channel
+       // without trying to persist the `ChannelMonitor`.
+       check_added_monitors!(nodes[1], 0);
 
        // ...still, nodes[1] will reject the duplicate channel.
        {
@@ -8764,6 +8934,7 @@ fn test_duplicate_chan_id() {
                assert_eq!(added_monitors[0].0, funding_output);
                added_monitors.clear();
        }
+       expect_channel_pending_event(&nodes[0], &nodes[1].node.get_our_node_id());
 
        let events_4 = nodes[0].node.get_and_clear_pending_events();
        assert_eq!(events_4.len(), 0);
@@ -8806,7 +8977,7 @@ fn test_error_chans_closed() {
        nodes[0].node.handle_error(&nodes[1].node.get_our_node_id(), &msgs::ErrorMessage { channel_id: chan_2.2, data: "ERR".to_owned() });
        check_added_monitors!(nodes[0], 1);
        check_closed_broadcast!(nodes[0], false);
-       check_closed_event!(nodes[0], 1, ClosureReason::CounterpartyForceClosed { peer_msg: "ERR".to_string() });
+       check_closed_event!(nodes[0], 1, ClosureReason::CounterpartyForceClosed { peer_msg: UntrustedString("ERR".to_string()) });
        assert_eq!(nodes[0].tx_broadcaster.txn_broadcasted.lock().unwrap().split_off(0).len(), 1);
        assert_eq!(nodes[0].node.list_usable_channels().len(), 2);
        assert!(nodes[0].node.list_usable_channels()[0].channel_id == chan_1.2 || nodes[0].node.list_usable_channels()[1].channel_id == chan_1.2);
@@ -8816,7 +8987,7 @@ fn test_error_chans_closed() {
        let _chan_4 = create_announced_chan_between_nodes_with_value(&nodes, 0, 1, 100000, 10001);
        nodes[0].node.handle_error(&nodes[1].node.get_our_node_id(), &msgs::ErrorMessage { channel_id: [0; 32], data: "ERR".to_owned() });
        check_added_monitors!(nodes[0], 2);
-       check_closed_event!(nodes[0], 2, ClosureReason::CounterpartyForceClosed { peer_msg: "ERR".to_string() });
+       check_closed_event!(nodes[0], 2, ClosureReason::CounterpartyForceClosed { peer_msg: UntrustedString("ERR".to_string()) });
        let events = nodes[0].node.get_and_clear_pending_msg_events();
        assert_eq!(events.len(), 2);
        match events[0] {
@@ -8832,13 +9003,11 @@ fn test_error_chans_closed() {
                _ => panic!("Unexpected event"),
        }
        // Note that at this point users of a standard PeerHandler will end up calling
-       // peer_disconnected with no_connection_possible set to false, duplicating the
-       // close-all-channels logic. That's OK, we don't want to end up not force-closing channels for
-       // users with their own peer handling logic. We duplicate the call here, however.
+       // peer_disconnected.
        assert_eq!(nodes[0].node.list_usable_channels().len(), 1);
        assert!(nodes[0].node.list_usable_channels()[0].channel_id == chan_3.2);
 
-       nodes[0].node.peer_disconnected(&nodes[1].node.get_our_node_id(), true);
+       nodes[0].node.peer_disconnected(&nodes[1].node.get_our_node_id());
        assert_eq!(nodes[0].node.list_usable_channels().len(), 1);
        assert!(nodes[0].node.list_usable_channels()[0].channel_id == chan_3.2);
 }
@@ -8880,9 +9049,11 @@ fn test_invalid_funding_tx() {
        nodes[0].node.funding_transaction_generated_unchecked(&temporary_channel_id, &nodes[1].node.get_our_node_id(), tx.clone(), 0).unwrap();
        nodes[1].node.handle_funding_created(&nodes[0].node.get_our_node_id(), &get_event_msg!(nodes[0], MessageSendEvent::SendFundingCreated, nodes[1].node.get_our_node_id()));
        check_added_monitors!(nodes[1], 1);
+       expect_channel_pending_event(&nodes[1], &nodes[0].node.get_our_node_id());
 
        nodes[0].node.handle_funding_signed(&nodes[1].node.get_our_node_id(), &get_event_msg!(nodes[1], MessageSendEvent::SendFundingSigned, nodes[0].node.get_our_node_id()));
        check_added_monitors!(nodes[0], 1);
+       expect_channel_pending_event(&nodes[0], &nodes[1].node.get_our_node_id());
 
        let events_1 = nodes[0].node.get_and_clear_pending_events();
        assert_eq!(events_1.len(), 0);
@@ -8954,8 +9125,8 @@ fn do_test_tx_confirmed_skipping_blocks_immediate_broadcast(test_height_before_t
        create_announced_chan_between_nodes(&nodes, 0, 1);
        let (chan_announce, _, channel_id, _) = create_announced_chan_between_nodes(&nodes, 1, 2);
        let (_, payment_hash, _) = route_payment(&nodes[0], &[&nodes[1], &nodes[2]], 1_000_000);
-       nodes[1].node.peer_disconnected(&nodes[2].node.get_our_node_id(), false);
-       nodes[2].node.peer_disconnected(&nodes[1].node.get_our_node_id(), false);
+       nodes[1].node.peer_disconnected(&nodes[2].node.get_our_node_id());
+       nodes[2].node.peer_disconnected(&nodes[1].node.get_our_node_id());
 
        nodes[1].node.force_close_broadcasting_latest_txn(&channel_id, &nodes[2].node.get_our_node_id()).unwrap();
        check_closed_broadcast!(nodes[1], true);
@@ -9136,7 +9307,6 @@ fn test_inconsistent_mpp_params() {
                if path_a[0].pubkey == nodes[1].node.get_our_node_id() {
                        core::cmp::Ordering::Less } else { core::cmp::Ordering::Greater }
        });
-       let payment_params_opt = Some(payment_params);
 
        let (our_payment_preimage, our_payment_hash, our_payment_secret) = get_payment_preimage_hash!(&nodes[3]);
 
@@ -9150,20 +9320,20 @@ fn test_inconsistent_mpp_params() {
                dup_route.paths.push(route.paths[1].clone());
                nodes[0].node.test_add_new_pending_payment(our_payment_hash, Some(our_payment_secret), payment_id, &dup_route).unwrap()
        };
-       {
-               nodes[0].node.send_payment_along_path(&route.paths[0], &payment_params_opt, &our_payment_hash, &Some(our_payment_secret), 15_000_000, cur_height, payment_id, &None, session_privs[0]).unwrap();
-               check_added_monitors!(nodes[0], 1);
+       nodes[0].node.test_send_payment_along_path(&route.paths[0], &our_payment_hash, &Some(our_payment_secret), 15_000_000, cur_height, payment_id, &None, session_privs[0]).unwrap();
+       check_added_monitors!(nodes[0], 1);
 
+       {
                let mut events = nodes[0].node.get_and_clear_pending_msg_events();
                assert_eq!(events.len(), 1);
                pass_along_path(&nodes[0], &[&nodes[1], &nodes[3]], 15_000_000, our_payment_hash, Some(our_payment_secret), events.pop().unwrap(), false, None);
        }
        assert!(nodes[3].node.get_and_clear_pending_events().is_empty());
 
-       {
-               nodes[0].node.send_payment_along_path(&route.paths[1], &payment_params_opt, &our_payment_hash, &Some(our_payment_secret), 14_000_000, cur_height, payment_id, &None, session_privs[1]).unwrap();
-               check_added_monitors!(nodes[0], 1);
+       nodes[0].node.test_send_payment_along_path(&route.paths[1], &our_payment_hash, &Some(our_payment_secret), 14_000_000, cur_height, payment_id, &None, session_privs[1]).unwrap();
+       check_added_monitors!(nodes[0], 1);
 
+       {
                let mut events = nodes[0].node.get_and_clear_pending_msg_events();
                assert_eq!(events.len(), 1);
                let payment_event = SendEvent::from_event(events.pop().unwrap());
@@ -9206,7 +9376,7 @@ fn test_inconsistent_mpp_params() {
 
        expect_payment_failed_conditions(&nodes[0], our_payment_hash, true, PaymentFailedConditions::new().mpp_parts_remain());
 
-       nodes[0].node.send_payment_along_path(&route.paths[1], &payment_params_opt, &our_payment_hash, &Some(our_payment_secret), 15_000_000, cur_height, payment_id, &None, session_privs[2]).unwrap();
+       nodes[0].node.test_send_payment_along_path(&route.paths[1], &our_payment_hash, &Some(our_payment_secret), 15_000_000, cur_height, payment_id, &None, session_privs[2]).unwrap();
        check_added_monitors!(nodes[0], 1);
 
        let mut events = nodes[0].node.get_and_clear_pending_msg_events();
@@ -9250,7 +9420,6 @@ fn test_keysend_payments_to_public_node() {
        let route_params = RouteParameters {
                payment_params: PaymentParameters::for_keysend(payee_pubkey, 40),
                final_value_msat: 10000,
-               final_cltv_expiry_delta: 40,
        };
        let scorer = test_utils::TestScorer::new();
        let random_seed_bytes = chanmon_cfgs[1].keys_manager.get_secure_random_bytes();
@@ -9281,7 +9450,6 @@ fn test_keysend_payments_to_private_node() {
        let route_params = RouteParameters {
                payment_params: PaymentParameters::for_keysend(payee_pubkey, 40),
                final_value_msat: 10000,
-               final_cltv_expiry_delta: 40,
        };
        let network_graph = nodes[0].network_graph.clone();
        let first_hops = nodes[0].node.list_usable_channels();
@@ -9415,9 +9583,11 @@ fn do_test_max_dust_htlc_exposure(dust_outbound_balance: bool, exposure_breach_e
        nodes[0].node.funding_transaction_generated(&temporary_channel_id, &nodes[1].node.get_our_node_id(), tx.clone()).unwrap();
        nodes[1].node.handle_funding_created(&nodes[0].node.get_our_node_id(), &get_event_msg!(nodes[0], MessageSendEvent::SendFundingCreated, nodes[1].node.get_our_node_id()));
        check_added_monitors!(nodes[1], 1);
+       expect_channel_pending_event(&nodes[1], &nodes[0].node.get_our_node_id());
 
        nodes[0].node.handle_funding_signed(&nodes[1].node.get_our_node_id(), &get_event_msg!(nodes[1], MessageSendEvent::SendFundingSigned, nodes[0].node.get_our_node_id()));
        check_added_monitors!(nodes[0], 1);
+       expect_channel_pending_event(&nodes[0], &nodes[1].node.get_our_node_id());
 
        let (channel_ready, channel_id) = create_chan_between_nodes_with_value_confirm(&nodes[0], &nodes[1], &tx);
        let (announcement, as_update, bs_update) = create_chan_between_nodes_with_value_b(&nodes[0], &nodes[1], &channel_ready);
@@ -9511,7 +9681,7 @@ fn do_test_max_dust_htlc_exposure(dust_outbound_balance: bool, exposure_breach_e
                }
                nodes[0].node.timer_tick_occurred();
                check_added_monitors!(nodes[0], 1);
-               nodes[0].logger.assert_log_contains("lightning::ln::channel".to_string(), "Cannot afford to send new feerate at 2530 without infringing max dust htlc exposure".to_string(), 1);
+               nodes[0].logger.assert_log_contains("lightning::ln::channel", "Cannot afford to send new feerate at 2530 without infringing max dust htlc exposure", 1);
        }
 
        let _ = nodes[0].node.get_and_clear_pending_msg_events();