Merge pull request #1178 from jkczyz/2021-11-payment-path-successful
[rust-lightning] / lightning / src / ln / functional_tests.rs
index 8c0870afaa65e0212212342dce341cf6d95b5e91..bf3f8c1d56ab0cce2290e467cd08fef85a2a0ce8 100644 (file)
@@ -22,10 +22,9 @@ use ln::channel::{COMMITMENT_TX_BASE_WEIGHT, COMMITMENT_TX_WEIGHT_PER_HTLC};
 use ln::channelmanager::{ChannelManager, ChannelManagerReadArgs, PaymentId, RAACommitmentOrder, PaymentSendFailure, BREAKDOWN_TIMEOUT, MIN_CLTV_EXPIRY_DELTA};
 use ln::channel::{Channel, ChannelError};
 use ln::{chan_utils, onion_utils};
-use ln::chan_utils::HTLC_SUCCESS_TX_WEIGHT;
+use ln::chan_utils::{HTLC_SUCCESS_TX_WEIGHT, HTLC_TIMEOUT_TX_WEIGHT, HTLCOutputInCommitment};
 use routing::network_graph::{NetworkUpdate, RoutingFees};
 use routing::router::{Payee, Route, RouteHop, RouteHint, RouteHintHop, RouteParameters, find_route, get_route};
-use routing::scorer::Scorer;
 use ln::features::{ChannelFeatures, InitFeatures, InvoiceFeatures, NodeFeatures};
 use ln::msgs;
 use ln::msgs::{ChannelMessageHandler, RoutingMessageHandler, ErrorAction};
@@ -585,9 +584,10 @@ fn test_update_fee_that_funder_cannot_afford() {
        let node_cfgs = create_node_cfgs(2, &chanmon_cfgs);
        let node_chanmgrs = create_node_chanmgrs(2, &node_cfgs, &[None, None]);
        let nodes = create_network(2, &node_cfgs, &node_chanmgrs);
-       let channel_value = 1888;
+       let channel_value = 1977;
        let chan = create_announced_chan_between_nodes_with_value(&nodes, 0, 1, channel_value, 700000, InitFeatures::known(), InitFeatures::known());
        let channel_id = chan.2;
+       let secp_ctx = Secp256k1::new();
 
        let feerate = 260;
        {
@@ -622,16 +622,70 @@ fn test_update_fee_that_funder_cannot_afford() {
                *feerate_lock = feerate + 2;
        }
        nodes[0].node.timer_tick_occurred();
-       check_added_monitors!(nodes[0], 1);
+       nodes[0].logger.assert_log("lightning::ln::channel".to_string(), format!("Cannot afford to send new feerate at {}", feerate + 2), 1);
+       check_added_monitors!(nodes[0], 0);
+
+       const INITIAL_COMMITMENT_NUMBER: u64 = 281474976710654;
+
+       // Get the EnforcingSigner for each channel, which will be used to (1) get the keys
+       // needed to sign the new commitment tx and (2) sign the new commitment tx.
+       let (local_revocation_basepoint, local_htlc_basepoint, local_funding) = {
+               let chan_lock = nodes[0].node.channel_state.lock().unwrap();
+               let local_chan = chan_lock.by_id.get(&chan.2).unwrap();
+               let chan_signer = local_chan.get_signer();
+               let pubkeys = chan_signer.pubkeys();
+               (pubkeys.revocation_basepoint, pubkeys.htlc_basepoint,
+                pubkeys.funding_pubkey)
+       };
+       let (remote_delayed_payment_basepoint, remote_htlc_basepoint,remote_point, remote_funding) = {
+               let chan_lock = nodes[1].node.channel_state.lock().unwrap();
+               let remote_chan = chan_lock.by_id.get(&chan.2).unwrap();
+               let chan_signer = remote_chan.get_signer();
+               let pubkeys = chan_signer.pubkeys();
+               (pubkeys.delayed_payment_basepoint, pubkeys.htlc_basepoint,
+                chan_signer.get_per_commitment_point(INITIAL_COMMITMENT_NUMBER - 1, &secp_ctx),
+                pubkeys.funding_pubkey)
+       };
+
+       // Assemble the set of keys we can use for signatures for our commitment_signed message.
+       let commit_tx_keys = chan_utils::TxCreationKeys::derive_new(&secp_ctx, &remote_point, &remote_delayed_payment_basepoint,
+               &remote_htlc_basepoint, &local_revocation_basepoint, &local_htlc_basepoint).unwrap();
+
+       let res = {
+               let local_chan_lock = nodes[0].node.channel_state.lock().unwrap();
+               let local_chan = local_chan_lock.by_id.get(&chan.2).unwrap();
+               let local_chan_signer = local_chan.get_signer();
+               let mut htlcs: Vec<(HTLCOutputInCommitment, ())> = vec![];
+               let commitment_tx = CommitmentTransaction::new_with_auxiliary_htlc_data(
+                       INITIAL_COMMITMENT_NUMBER - 1,
+                       700,
+                       999,
+                       false, local_funding, remote_funding,
+                       commit_tx_keys.clone(),
+                       feerate + 124,
+                       &mut htlcs,
+                       &local_chan.channel_transaction_parameters.as_counterparty_broadcastable()
+               );
+               local_chan_signer.sign_counterparty_commitment(&commitment_tx, &secp_ctx).unwrap()
+       };
+
+       let commit_signed_msg = msgs::CommitmentSigned {
+               channel_id: chan.2,
+               signature: res.0,
+               htlc_signatures: res.1
+       };
 
-       let update2_msg = get_htlc_update_msgs!(nodes[0], nodes[1].node.get_our_node_id());
+       let update_fee = msgs::UpdateFee {
+               channel_id: chan.2,
+               feerate_per_kw: feerate + 124,
+       };
 
-       nodes[1].node.handle_update_fee(&nodes[0].node.get_our_node_id(), &update2_msg.update_fee.unwrap());
+       nodes[1].node.handle_update_fee(&nodes[0].node.get_our_node_id(), &update_fee);
 
        //While producing the commitment_signed response after handling a received update_fee request the
        //check to see if the funder, who sent the update_fee request, can afford the new fee (funder_balance >= fee+channel_reserve)
        //Should produce and error.
-       nodes[1].node.handle_commitment_signed(&nodes[0].node.get_our_node_id(), &update2_msg.commitment_signed);
+       nodes[1].node.handle_commitment_signed(&nodes[0].node.get_our_node_id(), &commit_signed_msg);
        nodes[1].logger.assert_log("lightning::ln::channelmanager".to_string(), "Funding remote cannot afford proposed new fee".to_string(), 1);
        check_added_monitors!(nodes[1], 1);
        check_closed_broadcast!(nodes[1], true);
@@ -1153,7 +1207,7 @@ fn test_duplicate_htlc_different_direction_onchain() {
        let (payment_preimage, payment_hash, _) = route_payment(&nodes[0], &vec!(&nodes[1])[..], 900_000);
 
        let (route, _, _, _) = get_route_and_payment_hash!(nodes[1], nodes[0], 800_000);
-       let node_a_payment_secret = nodes[0].node.create_inbound_payment_for_hash(payment_hash, None, 7200, 0).unwrap();
+       let node_a_payment_secret = nodes[0].node.create_inbound_payment_for_hash(payment_hash, None, 7200).unwrap();
        send_along_route_with_secret(&nodes[1], route, &[&[&nodes[0]]], 800_000, payment_hash, node_a_payment_secret);
 
        // Provide preimage to node 0 by claiming payment
@@ -1208,7 +1262,7 @@ fn test_duplicate_htlc_different_direction_onchain() {
                        MessageSendEvent::BroadcastChannelUpdate { .. } => {},
                        MessageSendEvent::HandleError { node_id, action: msgs::ErrorAction::SendErrorMessage { ref msg } } => {
                                assert_eq!(node_id, nodes[1].node.get_our_node_id());
-                               assert_eq!(msg.data, "Commitment or closing transaction was confirmed on chain.");
+                               assert_eq!(msg.data, "Channel closed because commitment or closing transaction was confirmed on chain.");
                        },
                        MessageSendEvent::UpdateHTLCs { ref node_id, updates: msgs::CommitmentUpdate { ref update_add_htlcs, ref update_fulfill_htlcs, ref update_fail_htlcs, ref update_fail_malformed_htlcs, .. } } => {
                                assert!(update_add_htlcs.is_empty());
@@ -1905,7 +1959,7 @@ fn channel_reserve_in_flight_removes() {
        nodes[0].node.handle_commitment_signed(&nodes[1].node.get_our_node_id(), &bs_removes.commitment_signed);
        check_added_monitors!(nodes[0], 1);
        let as_raa = get_event_msg!(nodes[0], MessageSendEvent::SendRevokeAndACK, nodes[1].node.get_our_node_id());
-       expect_payment_sent!(nodes[0], payment_preimage_1);
+       expect_payment_sent_without_paths!(nodes[0], payment_preimage_1);
 
        nodes[1].node.handle_update_add_htlc(&nodes[0].node.get_our_node_id(), &send_1.msgs[0]);
        nodes[1].node.handle_commitment_signed(&nodes[0].node.get_our_node_id(), &send_1.commitment_msg);
@@ -1934,7 +1988,7 @@ fn channel_reserve_in_flight_removes() {
        nodes[0].node.handle_commitment_signed(&nodes[1].node.get_our_node_id(), &bs_cs.commitment_signed);
        check_added_monitors!(nodes[0], 1);
        let as_raa = get_event_msg!(nodes[0], MessageSendEvent::SendRevokeAndACK, nodes[1].node.get_our_node_id());
-       expect_payment_sent!(nodes[0], payment_preimage_2);
+       expect_payment_sent_without_paths!(nodes[0], payment_preimage_2);
 
        nodes[1].node.handle_revoke_and_ack(&nodes[0].node.get_our_node_id(), &as_raa);
        check_added_monitors!(nodes[1], 1);
@@ -1947,6 +2001,7 @@ fn channel_reserve_in_flight_removes() {
        // resolve the second HTLC from A's point of view.
        nodes[0].node.handle_revoke_and_ack(&nodes[1].node.get_our_node_id(), &bs_raa);
        check_added_monitors!(nodes[0], 1);
+       expect_payment_path_successful!(nodes[0]);
        let as_cs = get_htlc_update_msgs!(nodes[0], nodes[1].node.get_our_node_id());
 
        // Now that B doesn't have the second RAA anymore, but A still does, send a payment from B back
@@ -1976,6 +2031,7 @@ fn channel_reserve_in_flight_removes() {
 
        nodes[0].node.handle_revoke_and_ack(&nodes[1].node.get_our_node_id(), &bs_raa);
        check_added_monitors!(nodes[0], 1);
+       expect_payment_path_successful!(nodes[0]);
        let as_cs = get_htlc_update_msgs!(nodes[0], nodes[1].node.get_our_node_id());
 
        nodes[1].node.handle_commitment_signed(&nodes[0].node.get_our_node_id(), &as_cs.commitment_signed);
@@ -2655,7 +2711,7 @@ fn test_htlc_on_chain_success() {
        check_closed_broadcast!(nodes[0], true);
        check_added_monitors!(nodes[0], 1);
        let events = nodes[0].node.get_and_clear_pending_events();
-       assert_eq!(events.len(), 3);
+       assert_eq!(events.len(), 5);
        let mut first_claimed = false;
        for event in events {
                match event {
@@ -2668,6 +2724,7 @@ fn test_htlc_on_chain_success() {
                                        assert_eq!(payment_hash, payment_hash_2);
                                }
                        },
+                       Event::PaymentPathSuccessful { .. } => {},
                        Event::ChannelClosed { reason: ClosureReason::CommitmentTxConfirmed, .. } => {},
                        _ => panic!("Unexpected event"),
                }
@@ -3018,7 +3075,7 @@ fn do_test_commitment_revoked_fail_backward_exhaustive(deliver_bs_raa: bool, use
        match events[if deliver_bs_raa { 2 } else { 1 }] {
                MessageSendEvent::HandleError { action: ErrorAction::SendErrorMessage { msg: msgs::ErrorMessage { channel_id, ref data } }, node_id: _ } => {
                        assert_eq!(channel_id, chan_2.2);
-                       assert_eq!(data.as_str(), "Commitment or closing transaction was confirmed on chain.");
+                       assert_eq!(data.as_str(), "Channel closed because commitment or closing transaction was confirmed on chain.");
                },
                _ => panic!("Unexpected event"),
        }
@@ -3302,13 +3359,13 @@ fn test_dup_events_on_peer_disconnect() {
        check_added_monitors!(nodes[1], 1);
        let claim_msgs = get_htlc_update_msgs!(nodes[1], nodes[0].node.get_our_node_id());
        nodes[0].node.handle_update_fulfill_htlc(&nodes[1].node.get_our_node_id(), &claim_msgs.update_fulfill_htlcs[0]);
-       expect_payment_sent!(nodes[0], payment_preimage);
+       expect_payment_sent_without_paths!(nodes[0], payment_preimage);
 
        nodes[0].node.peer_disconnected(&nodes[1].node.get_our_node_id(), false);
        nodes[1].node.peer_disconnected(&nodes[0].node.get_our_node_id(), false);
 
        reconnect_nodes(&nodes[0], &nodes[1], (false, false), (0, 0), (1, 0), (0, 0), (0, 0), (0, 0), (false, false));
-       assert!(nodes[0].node.get_and_clear_pending_events().is_empty());
+       expect_payment_path_successful!(nodes[0]);
 }
 
 #[test]
@@ -3348,7 +3405,7 @@ fn test_simple_peer_disconnect() {
        reconnect_nodes(&nodes[0], &nodes[1], (false, false), (0, 0), (0, 0), (0, 0), (1, 0), (1, 0), (false, false));
        {
                let events = nodes[0].node.get_and_clear_pending_events();
-               assert_eq!(events.len(), 2);
+               assert_eq!(events.len(), 3);
                match events[0] {
                        Event::PaymentSent { payment_preimage, payment_hash, .. } => {
                                assert_eq!(payment_preimage, payment_preimage_3);
@@ -3363,6 +3420,10 @@ fn test_simple_peer_disconnect() {
                        },
                        _ => panic!("Unexpected event"),
                }
+               match events[2] {
+                       Event::PaymentPathSuccessful { .. } => {},
+                       _ => panic!("Unexpected event"),
+               }
        }
 
        claim_payment(&nodes[0], &vec!(&nodes[1], &nodes[2]), payment_preimage_4);
@@ -3552,15 +3613,7 @@ fn do_test_drop_messages_peer_disconnect(messages_delivered: u8, simulate_broken
        if messages_delivered < 2 {
                reconnect_nodes(&nodes[0], &nodes[1], (false, false), (0, 0), (1, 0), (0, 0), (0, 0), (0, 0), (false, false));
                if messages_delivered < 1 {
-                       let events_4 = nodes[0].node.get_and_clear_pending_events();
-                       assert_eq!(events_4.len(), 1);
-                       match events_4[0] {
-                               Event::PaymentSent { ref payment_preimage, ref payment_hash, .. } => {
-                                       assert_eq!(payment_preimage_1, *payment_preimage);
-                                       assert_eq!(payment_hash_1, *payment_hash);
-                               },
-                               _ => panic!("Unexpected event"),
-                       }
+                       expect_payment_sent!(nodes[0], payment_preimage_1);
                } else {
                        assert!(nodes[0].node.get_and_clear_pending_msg_events().is_empty());
                }
@@ -3578,10 +3631,18 @@ fn do_test_drop_messages_peer_disconnect(messages_delivered: u8, simulate_broken
                reconnect_nodes(&nodes[0], &nodes[1], (false, false), (0, 0), (0, 0), (0, 0), (0, 0), (0, 0), (false, false));
        }
 
+       if messages_delivered == 1 || messages_delivered == 2 {
+               expect_payment_path_successful!(nodes[0]);
+       }
+
        nodes[0].node.peer_disconnected(&nodes[1].node.get_our_node_id(), false);
        nodes[1].node.peer_disconnected(&nodes[0].node.get_our_node_id(), false);
        reconnect_nodes(&nodes[0], &nodes[1], (false, false), (0, 0), (0, 0), (0, 0), (0, 0), (0, 0), (false, false));
 
+       if messages_delivered > 2 {
+               expect_payment_path_successful!(nodes[0]);
+       }
+
        // Channel should still work fine...
        let (route, _, _, _) = get_route_and_payment_hash!(nodes[0], nodes[1], 1000000);
        let payment_preimage_2 = send_along_route(&nodes[0], route, &[&nodes[1]], 1000000).0;
@@ -3893,6 +3954,7 @@ fn test_drop_messages_peer_disconnect_dual_htlc() {
        assert!(nodes[0].node.get_and_clear_pending_msg_events().is_empty());
        check_added_monitors!(nodes[0], 1);
 
+       expect_payment_path_successful!(nodes[0]);
        claim_payment(&nodes[0], &[&nodes[1]], payment_preimage_2);
 }
 
@@ -3995,7 +4057,7 @@ fn do_test_holding_cell_htlc_add_timeouts(forwarded_htlc: bool) {
        }
        check_added_monitors!(nodes[1], 0);
 
-       connect_blocks(&nodes[1], TEST_FINAL_CLTV - CLTV_CLAIM_BUFFER - LATENCY_GRACE_PERIOD_BLOCKS);
+       connect_blocks(&nodes[1], TEST_FINAL_CLTV - LATENCY_GRACE_PERIOD_BLOCKS);
        assert!(nodes[1].node.get_and_clear_pending_msg_events().is_empty());
        assert!(nodes[1].node.get_and_clear_pending_events().is_empty());
        connect_blocks(&nodes[1], 1);
@@ -4958,7 +5020,7 @@ fn test_duplicate_payment_hash_one_failure_one_success() {
 
        let (our_payment_preimage, duplicate_payment_hash, _) = route_payment(&nodes[0], &vec!(&nodes[1], &nodes[2])[..], 900000);
 
-       let payment_secret = nodes[3].node.create_inbound_payment_for_hash(duplicate_payment_hash, None, 7200, 0).unwrap();
+       let payment_secret = nodes[3].node.create_inbound_payment_for_hash(duplicate_payment_hash, None, 7200).unwrap();
        // We reduce the final CLTV here by a somewhat arbitrary constant to keep it under the one-byte
        // script push size limit so that the below script length checks match
        // ACCEPTED_HTLC_SCRIPT_WEIGHT.
@@ -5161,30 +5223,30 @@ fn do_test_fail_backwards_unrevoked_remote_announce(deliver_last_raa: bool, anno
        let (_, payment_hash_2, _) = route_payment(&nodes[0], &[&nodes[2], &nodes[3], &nodes[4]], ds_dust_limit*1000); // not added < dust limit + HTLC tx fee
        let (route, _, _, _) = get_route_and_payment_hash!(nodes[1], nodes[5], ds_dust_limit*1000);
        // 2nd HTLC:
-       send_along_route_with_secret(&nodes[1], route.clone(), &[&[&nodes[2], &nodes[3], &nodes[5]]], ds_dust_limit*1000, payment_hash_1, nodes[5].node.create_inbound_payment_for_hash(payment_hash_1, None, 7200, 0).unwrap()); // not added < dust limit + HTLC tx fee
+       send_along_route_with_secret(&nodes[1], route.clone(), &[&[&nodes[2], &nodes[3], &nodes[5]]], ds_dust_limit*1000, payment_hash_1, nodes[5].node.create_inbound_payment_for_hash(payment_hash_1, None, 7200).unwrap()); // not added < dust limit + HTLC tx fee
        // 3rd HTLC:
-       send_along_route_with_secret(&nodes[1], route, &[&[&nodes[2], &nodes[3], &nodes[5]]], ds_dust_limit*1000, payment_hash_2, nodes[5].node.create_inbound_payment_for_hash(payment_hash_2, None, 7200, 0).unwrap()); // not added < dust limit + HTLC tx fee
+       send_along_route_with_secret(&nodes[1], route, &[&[&nodes[2], &nodes[3], &nodes[5]]], ds_dust_limit*1000, payment_hash_2, nodes[5].node.create_inbound_payment_for_hash(payment_hash_2, None, 7200).unwrap()); // not added < dust limit + HTLC tx fee
        // 4th HTLC:
        let (_, payment_hash_3, _) = route_payment(&nodes[0], &[&nodes[2], &nodes[3], &nodes[4]], 1000000);
        // 5th HTLC:
        let (_, payment_hash_4, _) = route_payment(&nodes[0], &[&nodes[2], &nodes[3], &nodes[4]], 1000000);
        let (route, _, _, _) = get_route_and_payment_hash!(nodes[1], nodes[5], 1000000);
        // 6th HTLC:
-       send_along_route_with_secret(&nodes[1], route.clone(), &[&[&nodes[2], &nodes[3], &nodes[5]]], 1000000, payment_hash_3, nodes[5].node.create_inbound_payment_for_hash(payment_hash_3, None, 7200, 0).unwrap());
+       send_along_route_with_secret(&nodes[1], route.clone(), &[&[&nodes[2], &nodes[3], &nodes[5]]], 1000000, payment_hash_3, nodes[5].node.create_inbound_payment_for_hash(payment_hash_3, None, 7200).unwrap());
        // 7th HTLC:
-       send_along_route_with_secret(&nodes[1], route, &[&[&nodes[2], &nodes[3], &nodes[5]]], 1000000, payment_hash_4, nodes[5].node.create_inbound_payment_for_hash(payment_hash_4, None, 7200, 0).unwrap());
+       send_along_route_with_secret(&nodes[1], route, &[&[&nodes[2], &nodes[3], &nodes[5]]], 1000000, payment_hash_4, nodes[5].node.create_inbound_payment_for_hash(payment_hash_4, None, 7200).unwrap());
 
        // 8th HTLC:
        let (_, payment_hash_5, _) = route_payment(&nodes[0], &[&nodes[2], &nodes[3], &nodes[4]], 1000000);
        // 9th HTLC:
        let (route, _, _, _) = get_route_and_payment_hash!(nodes[1], nodes[5], ds_dust_limit*1000);
-       send_along_route_with_secret(&nodes[1], route, &[&[&nodes[2], &nodes[3], &nodes[5]]], ds_dust_limit*1000, payment_hash_5, nodes[5].node.create_inbound_payment_for_hash(payment_hash_5, None, 7200, 0).unwrap()); // not added < dust limit + HTLC tx fee
+       send_along_route_with_secret(&nodes[1], route, &[&[&nodes[2], &nodes[3], &nodes[5]]], ds_dust_limit*1000, payment_hash_5, nodes[5].node.create_inbound_payment_for_hash(payment_hash_5, None, 7200).unwrap()); // not added < dust limit + HTLC tx fee
 
        // 10th HTLC:
        let (_, payment_hash_6, _) = route_payment(&nodes[0], &[&nodes[2], &nodes[3], &nodes[4]], ds_dust_limit*1000); // not added < dust limit + HTLC tx fee
        // 11th HTLC:
        let (route, _, _, _) = get_route_and_payment_hash!(nodes[1], nodes[5], 1000000);
-       send_along_route_with_secret(&nodes[1], route, &[&[&nodes[2], &nodes[3], &nodes[5]]], 1000000, payment_hash_6, nodes[5].node.create_inbound_payment_for_hash(payment_hash_6, None, 7200, 0).unwrap());
+       send_along_route_with_secret(&nodes[1], route, &[&[&nodes[2], &nodes[3], &nodes[5]]], 1000000, payment_hash_6, nodes[5].node.create_inbound_payment_for_hash(payment_hash_6, None, 7200).unwrap());
 
        // Double-check that six of the new HTLC were added
        // We now have six HTLCs pending over the dust limit and six HTLCs under the dust limit (ie,
@@ -5560,24 +5622,16 @@ fn do_htlc_claim_local_commitment_only(use_dust: bool) {
        let nodes = create_network(2, &node_cfgs, &node_chanmgrs);
        let chan = create_announced_chan_between_nodes(&nodes, 0, 1, InitFeatures::known(), InitFeatures::known());
 
-       let (our_payment_preimage, our_payment_hash, _) = route_payment(&nodes[0], &[&nodes[1]], if use_dust { 50000 } else { 3000000 });
+       let (payment_preimage, _, _) = route_payment(&nodes[0], &[&nodes[1]], if use_dust { 50000 } else { 3000000 });
 
        // Claim the payment, but don't deliver A's commitment_signed, resulting in the HTLC only being
        // present in B's local commitment transaction, but none of A's commitment transactions.
-       assert!(nodes[1].node.claim_funds(our_payment_preimage));
+       assert!(nodes[1].node.claim_funds(payment_preimage));
        check_added_monitors!(nodes[1], 1);
 
        let bs_updates = get_htlc_update_msgs!(nodes[1], nodes[0].node.get_our_node_id());
        nodes[0].node.handle_update_fulfill_htlc(&nodes[1].node.get_our_node_id(), &bs_updates.update_fulfill_htlcs[0]);
-       let events = nodes[0].node.get_and_clear_pending_events();
-       assert_eq!(events.len(), 1);
-       match events[0] {
-               Event::PaymentSent { payment_preimage, payment_hash, .. } => {
-                       assert_eq!(payment_preimage, our_payment_preimage);
-                       assert_eq!(payment_hash, our_payment_hash);
-               },
-               _ => panic!("Unexpected event"),
-       }
+       expect_payment_sent_without_paths!(nodes[0], payment_preimage);
 
        nodes[0].node.handle_commitment_signed(&nodes[1].node.get_our_node_id(), &bs_updates.commitment_signed);
        check_added_monitors!(nodes[0], 1);
@@ -5999,15 +6053,7 @@ fn test_free_and_fail_holding_cell_htlcs() {
        let update_msgs = get_htlc_update_msgs!(nodes[1], nodes[0].node.get_our_node_id());
        nodes[0].node.handle_update_fulfill_htlc(&nodes[1].node.get_our_node_id(), &update_msgs.update_fulfill_htlcs[0]);
        commitment_signed_dance!(nodes[0], nodes[1], update_msgs.commitment_signed, false, true);
-       let events = nodes[0].node.get_and_clear_pending_events();
-       assert_eq!(events.len(), 1);
-       match events[0] {
-               Event::PaymentSent { ref payment_preimage, ref payment_hash, .. } => {
-                       assert_eq!(*payment_preimage, payment_preimage_1);
-                       assert_eq!(*payment_hash, payment_hash_1);
-               }
-               _ => panic!("Unexpected event"),
-       }
+       expect_payment_sent!(nodes[0], payment_preimage_1);
 }
 
 // Test that if we fail to forward an HTLC that is being freed from the holding cell that the
@@ -6983,7 +7029,7 @@ fn test_user_configurable_csv_delay() {
        let nodes = create_network(2, &node_cfgs, &node_chanmgrs);
 
        // We test config.our_to_self > BREAKDOWN_TIMEOUT is enforced in Channel::new_outbound()
-       if let Err(error) = Channel::new_outbound(&&test_utils::TestFeeEstimator { sat_per_kw: Mutex::new(253) }, &nodes[0].keys_manager, nodes[1].node.get_our_node_id(), &InitFeatures::known(), 1000000, 1000000, 0, &low_our_to_self_config) {
+       if let Err(error) = Channel::new_outbound(&&test_utils::TestFeeEstimator { sat_per_kw: Mutex::new(253) }, &nodes[0].keys_manager, nodes[1].node.get_our_node_id(), &InitFeatures::known(), 1000000, 1000000, 0, &low_our_to_self_config, 0) {
                match error {
                        APIError::APIMisuseError { err } => { assert!(regex::Regex::new(r"Configured with an unreasonable our_to_self_delay \(\d+\) putting user funds at risks").unwrap().is_match(err.as_str())); },
                        _ => panic!("Unexpected event"),
@@ -6994,7 +7040,7 @@ fn test_user_configurable_csv_delay() {
        nodes[1].node.create_channel(nodes[0].node.get_our_node_id(), 1000000, 1000000, 42, None).unwrap();
        let mut open_channel = get_event_msg!(nodes[1], MessageSendEvent::SendOpenChannel, nodes[0].node.get_our_node_id());
        open_channel.to_self_delay = 200;
-       if let Err(error) = Channel::new_from_req(&&test_utils::TestFeeEstimator { sat_per_kw: Mutex::new(253) }, &nodes[0].keys_manager, nodes[1].node.get_our_node_id(), &InitFeatures::known(), &open_channel, 0, &low_our_to_self_config) {
+       if let Err(error) = Channel::new_from_req(&&test_utils::TestFeeEstimator { sat_per_kw: Mutex::new(253) }, &nodes[0].keys_manager, nodes[1].node.get_our_node_id(), &InitFeatures::known(), &open_channel, 0, &low_our_to_self_config, 0) {
                match error {
                        ChannelError::Close(err) => { assert!(regex::Regex::new(r"Configured with an unreasonable our_to_self_delay \(\d+\) putting user funds at risks").unwrap().is_match(err.as_str()));  },
                        _ => panic!("Unexpected event"),
@@ -7023,7 +7069,7 @@ fn test_user_configurable_csv_delay() {
        nodes[1].node.create_channel(nodes[0].node.get_our_node_id(), 1000000, 1000000, 42, None).unwrap();
        let mut open_channel = get_event_msg!(nodes[1], MessageSendEvent::SendOpenChannel, nodes[0].node.get_our_node_id());
        open_channel.to_self_delay = 200;
-       if let Err(error) = Channel::new_from_req(&&test_utils::TestFeeEstimator { sat_per_kw: Mutex::new(253) }, &nodes[0].keys_manager, nodes[1].node.get_our_node_id(), &InitFeatures::known(), &open_channel, 0, &high_their_to_self_config) {
+       if let Err(error) = Channel::new_from_req(&&test_utils::TestFeeEstimator { sat_per_kw: Mutex::new(253) }, &nodes[0].keys_manager, nodes[1].node.get_our_node_id(), &InitFeatures::known(), &open_channel, 0, &high_their_to_self_config, 0) {
                match error {
                        ChannelError::Close(err) => { assert!(regex::Regex::new(r"They wanted our payments to be delayed by a needlessly long period\. Upper limit: \d+\. Actual: \d+").unwrap().is_match(err.as_str())); },
                        _ => panic!("Unexpected event"),
@@ -7161,11 +7207,11 @@ fn test_check_htlc_underpaying() {
        // Create some initial channels
        create_announced_chan_between_nodes(&nodes, 0, 1, InitFeatures::known(), InitFeatures::known());
 
-       let scorer = Scorer::with_fixed_penalty(0);
-       let payee = Payee::new(nodes[1].node.get_our_node_id()).with_features(InvoiceFeatures::known());
+       let scorer = test_utils::TestScorer::with_fixed_penalty(0);
+       let payee = Payee::from_node_id(nodes[1].node.get_our_node_id()).with_features(InvoiceFeatures::known());
        let route = get_route(&nodes[0].node.get_our_node_id(), &payee, nodes[0].network_graph, None, 10_000, TEST_FINAL_CLTV, nodes[0].logger, &scorer).unwrap();
        let (_, our_payment_hash, _) = get_payment_preimage_hash!(nodes[0]);
-       let our_payment_secret = nodes[1].node.create_inbound_payment_for_hash(our_payment_hash, Some(100_000), 7200, 0).unwrap();
+       let our_payment_secret = nodes[1].node.create_inbound_payment_for_hash(our_payment_hash, Some(100_000), 7200).unwrap();
        nodes[0].node.send_payment(&route, our_payment_hash, &Some(our_payment_secret)).unwrap();
        check_added_monitors!(nodes[0], 1);
 
@@ -7560,12 +7606,12 @@ fn test_bump_penalty_txn_on_revoked_htlcs() {
 
        let chan = create_announced_chan_between_nodes_with_value(&nodes, 0, 1, 1000000, 59000000, InitFeatures::known(), InitFeatures::known());
        // Lock HTLC in both directions (using a slightly lower CLTV delay to provide timely RBF bumps)
-       let payee = Payee::new(nodes[1].node.get_our_node_id()).with_features(InvoiceFeatures::known());
-       let scorer = Scorer::with_fixed_penalty(0);
+       let payee = Payee::from_node_id(nodes[1].node.get_our_node_id()).with_features(InvoiceFeatures::known());
+       let scorer = test_utils::TestScorer::with_fixed_penalty(0);
        let route = get_route(&nodes[0].node.get_our_node_id(), &payee, &nodes[0].network_graph, None,
                3_000_000, 50, nodes[0].logger, &scorer).unwrap();
        let payment_preimage = send_along_route(&nodes[0], route, &[&nodes[1]], 3_000_000).0;
-       let payee = Payee::new(nodes[0].node.get_our_node_id()).with_features(InvoiceFeatures::known());
+       let payee = Payee::from_node_id(nodes[0].node.get_our_node_id()).with_features(InvoiceFeatures::known());
        let route = get_route(&nodes[1].node.get_our_node_id(), &payee, nodes[1].network_graph, None,
                3_000_000, 50, nodes[0].logger, &scorer).unwrap();
        send_along_route(&nodes[1], route, &[&nodes[0]], 3_000_000);
@@ -7909,6 +7955,42 @@ fn test_bump_txn_sanitize_tracking_maps() {
        }
 }
 
+#[test]
+fn test_channel_conf_timeout() {
+       // Tests that, for inbound channels, we give up on them if the funding transaction does not
+       // confirm within 2016 blocks, as recommended by BOLT 2.
+       let chanmon_cfgs = create_chanmon_cfgs(2);
+       let node_cfgs = create_node_cfgs(2, &chanmon_cfgs);
+       let node_chanmgrs = create_node_chanmgrs(2, &node_cfgs, &[None, None]);
+       let nodes = create_network(2, &node_cfgs, &node_chanmgrs);
+
+       let _funding_tx = create_chan_between_nodes_with_value_init(&nodes[0], &nodes[1], 1_000_000, 100_000, InitFeatures::known(), InitFeatures::known());
+
+       // The outbound node should wait forever for confirmation:
+       // This matches `channel::FUNDING_CONF_DEADLINE_BLOCKS` and BOLT 2's suggested timeout, thus is
+       // copied here instead of directly referencing the constant.
+       connect_blocks(&nodes[0], 2016);
+       assert!(nodes[0].node.get_and_clear_pending_msg_events().is_empty());
+
+       // The inbound node should fail the channel after exactly 2016 blocks
+       connect_blocks(&nodes[1], 2015);
+       check_added_monitors!(nodes[1], 0);
+       assert!(nodes[0].node.get_and_clear_pending_msg_events().is_empty());
+
+       connect_blocks(&nodes[1], 1);
+       check_added_monitors!(nodes[1], 1);
+       check_closed_event!(nodes[1], 1, ClosureReason::FundingTimedOut);
+       let close_ev = nodes[1].node.get_and_clear_pending_msg_events();
+       assert_eq!(close_ev.len(), 1);
+       match close_ev[0] {
+               MessageSendEvent::HandleError { action: ErrorAction::SendErrorMessage { ref msg }, ref node_id } => {
+                       assert_eq!(*node_id, nodes[0].node.get_our_node_id());
+                       assert_eq!(msg.data, "Channel closed because funding transaction failed to confirm within 2016 blocks");
+               },
+               _ => panic!("Unexpected event"),
+       }
+}
+
 #[test]
 fn test_override_channel_config() {
        let chanmon_cfgs = create_chanmon_cfgs(2);
@@ -7983,7 +8065,7 @@ fn test_preimage_storage() {
        create_announced_chan_between_nodes(&nodes, 0, 1, InitFeatures::known(), InitFeatures::known()).0.contents.short_channel_id;
 
        {
-               let (payment_hash, payment_secret) = nodes[1].node.create_inbound_payment(Some(100_000), 7200, 42);
+               let (payment_hash, payment_secret) = nodes[1].node.create_inbound_payment(Some(100_000), 7200);
                let (route, _, _, _) = get_route_and_payment_hash!(nodes[0], nodes[1], 100_000);
                nodes[0].node.send_payment(&route, payment_hash, &Some(payment_secret)).unwrap();
                check_added_monitors!(nodes[0], 1);
@@ -8000,8 +8082,7 @@ fn test_preimage_storage() {
        match events[0] {
                Event::PaymentReceived { ref purpose, .. } => {
                        match &purpose {
-                               PaymentPurpose::InvoicePayment { payment_preimage, user_payment_id, .. } => {
-                                       assert_eq!(*user_payment_id, 42);
+                               PaymentPurpose::InvoicePayment { payment_preimage, .. } => {
                                        claim_payment(&nodes[0], &[&nodes[1]], payment_preimage.unwrap());
                                },
                                _ => panic!("expected PaymentPurpose::InvoicePayment")
@@ -8021,11 +8102,11 @@ fn test_secret_timeout() {
 
        create_announced_chan_between_nodes(&nodes, 0, 1, InitFeatures::known(), InitFeatures::known()).0.contents.short_channel_id;
 
-       let (payment_hash, payment_secret_1) = nodes[1].node.create_inbound_payment(Some(100_000), 2, 0);
+       let (payment_hash, payment_secret_1) = nodes[1].node.create_inbound_payment(Some(100_000), 2);
 
        // We should fail to register the same payment hash twice, at least until we've connected a
        // block with time 7200 + CHAN_CONFIRM_DEPTH + 1.
-       if let Err(APIError::APIMisuseError { err }) = nodes[1].node.create_inbound_payment_for_hash(payment_hash, Some(100_000), 2, 0) {
+       if let Err(APIError::APIMisuseError { err }) = nodes[1].node.create_inbound_payment_for_hash(payment_hash, Some(100_000), 2) {
                assert_eq!(err, "Duplicate payment hash");
        } else { panic!(); }
        let mut block = {
@@ -8040,16 +8121,16 @@ fn test_secret_timeout() {
                }
        };
        connect_block(&nodes[1], &block);
-       if let Err(APIError::APIMisuseError { err }) = nodes[1].node.create_inbound_payment_for_hash(payment_hash, Some(100_000), 2, 0) {
+       if let Err(APIError::APIMisuseError { err }) = nodes[1].node.create_inbound_payment_for_hash(payment_hash, Some(100_000), 2) {
                assert_eq!(err, "Duplicate payment hash");
        } else { panic!(); }
 
        // If we then connect the second block, we should be able to register the same payment hash
-       // again with a different user_payment_id (this time getting a new payment secret).
+       // again (this time getting a new payment secret).
        block.header.prev_blockhash = block.header.block_hash();
        block.header.time += 1;
        connect_block(&nodes[1], &block);
-       let our_payment_secret = nodes[1].node.create_inbound_payment_for_hash(payment_hash, Some(100_000), 2, 42).unwrap();
+       let our_payment_secret = nodes[1].node.create_inbound_payment_for_hash(payment_hash, Some(100_000), 2).unwrap();
        assert_ne!(payment_secret_1, our_payment_secret);
 
        {
@@ -8067,9 +8148,8 @@ fn test_secret_timeout() {
        let events = nodes[1].node.get_and_clear_pending_events();
        assert_eq!(events.len(), 1);
        match events[0] {
-               Event::PaymentReceived { purpose: PaymentPurpose::InvoicePayment { payment_preimage, payment_secret, user_payment_id }, .. } => {
+               Event::PaymentReceived { purpose: PaymentPurpose::InvoicePayment { payment_preimage, payment_secret }, .. } => {
                        assert!(payment_preimage.is_none());
-                       assert_eq!(user_payment_id, 42);
                        assert_eq!(payment_secret, our_payment_secret);
                        // We don't actually have the payment preimage with which to claim this payment!
                },
@@ -8089,7 +8169,7 @@ fn test_bad_secret_hash() {
 
        let random_payment_hash = PaymentHash([42; 32]);
        let random_payment_secret = PaymentSecret([43; 32]);
-       let (our_payment_hash, our_payment_secret) = nodes[1].node.create_inbound_payment(Some(100_000), 2, 0);
+       let (our_payment_hash, our_payment_secret) = nodes[1].node.create_inbound_payment(Some(100_000), 2);
        let (route, _, _, _) = get_route_and_payment_hash!(nodes[0], nodes[1], 100_000);
 
        // All the below cases should end up being handled exactly identically, so we macro the
@@ -8832,15 +8912,16 @@ fn test_invalid_funding_tx() {
        assert_eq!(nodes[0].tx_broadcaster.txn_broadcasted.lock().unwrap()[0], tx);
        nodes[0].tx_broadcaster.txn_broadcasted.lock().unwrap().clear();
 
+       let expected_err = "funding tx had wrong script/value or output index";
        confirm_transaction_at(&nodes[1], &tx, 1);
-       check_closed_event!(nodes[1], 1, ClosureReason::CommitmentTxConfirmed);
+       check_closed_event!(nodes[1], 1, ClosureReason::ProcessingError { err: expected_err.to_string() });
        check_added_monitors!(nodes[1], 1);
        let events_2 = nodes[1].node.get_and_clear_pending_msg_events();
        assert_eq!(events_2.len(), 1);
        if let MessageSendEvent::HandleError { node_id, action } = &events_2[0] {
                assert_eq!(*node_id, nodes[0].node.get_our_node_id());
                if let msgs::ErrorAction::SendErrorMessage { msg } = action {
-                       assert_eq!(msg.data, "funding tx had wrong script/value or output index");
+                       assert_eq!(msg.data, "Channel closed because of an exception: ".to_owned() + expected_err);
                } else { panic!(); }
        } else { panic!(); }
        assert_eq!(nodes[1].node.list_channels().len(), 0);
@@ -9061,7 +9142,7 @@ fn test_keysend_payments_to_public_node() {
                final_value_msat: 10000,
                final_cltv_expiry_delta: 40,
        };
-       let scorer = Scorer::with_fixed_penalty(0);
+       let scorer = test_utils::TestScorer::with_fixed_penalty(0);
        let route = find_route(&payer_pubkey, &params, network_graph, None, nodes[0].logger, &scorer).unwrap();
 
        let test_preimage = PaymentPreimage([42; 32]);
@@ -9095,7 +9176,7 @@ fn test_keysend_payments_to_private_node() {
        };
        let network_graph = nodes[0].network_graph;
        let first_hops = nodes[0].node.list_usable_channels();
-       let scorer = Scorer::with_fixed_penalty(0);
+       let scorer = test_utils::TestScorer::with_fixed_penalty(0);
        let route = find_route(
                &payer_pubkey, &params, network_graph, Some(&first_hops.iter().collect::<Vec<_>>()),
                nodes[0].logger, &scorer
@@ -9111,3 +9192,175 @@ fn test_keysend_payments_to_private_node() {
        pass_along_path(&nodes[0], &path, 10000, payment_hash, None, event, true, Some(test_preimage));
        claim_payment(&nodes[0], &path, test_preimage);
 }
+
+/// The possible events which may trigger a `max_dust_htlc_exposure` breach
+#[derive(Clone, Copy, PartialEq)]
+enum ExposureEvent {
+       /// Breach occurs at HTLC forwarding (see `send_htlc`)
+       AtHTLCForward,
+       /// Breach occurs at HTLC reception (see `update_add_htlc`)
+       AtHTLCReception,
+       /// Breach occurs at outbound update_fee (see `send_update_fee`)
+       AtUpdateFeeOutbound,
+}
+
+fn do_test_max_dust_htlc_exposure(dust_outbound_balance: bool, exposure_breach_event: ExposureEvent, on_holder_tx: bool) {
+       // Test that we properly reject dust HTLC violating our `max_dust_htlc_exposure_msat`
+       // policy.
+       //
+       // At HTLC forward (`send_payment()`), if the sum of the trimmed-to-dust HTLC inbound and
+       // trimmed-to-dust HTLC outbound balance and this new payment as included on next
+       // counterparty commitment are above our `max_dust_htlc_exposure_msat`, we'll reject the
+       // update. At HTLC reception (`update_add_htlc()`), if the sum of the trimmed-to-dust HTLC
+       // inbound and trimmed-to-dust HTLC outbound balance and this new received HTLC as included
+       // on next counterparty commitment are above our `max_dust_htlc_exposure_msat`, we'll fail
+       // the update. Note, we return a `temporary_channel_failure` (0x1000 | 7), as the channel
+       // might be available again for HTLC processing once the dust bandwidth has cleared up.
+
+       let chanmon_cfgs = create_chanmon_cfgs(2);
+       let mut config = test_default_channel_config();
+       config.channel_options.max_dust_htlc_exposure_msat = 5_000_000; // default setting value
+       let node_cfgs = create_node_cfgs(2, &chanmon_cfgs);
+       let node_chanmgrs = create_node_chanmgrs(2, &node_cfgs, &[Some(config), None]);
+       let mut nodes = create_network(2, &node_cfgs, &node_chanmgrs);
+
+       nodes[0].node.create_channel(nodes[1].node.get_our_node_id(), 1_000_000, 500_000_000, 42, None).unwrap();
+       let mut open_channel = get_event_msg!(nodes[0], MessageSendEvent::SendOpenChannel, nodes[1].node.get_our_node_id());
+       open_channel.max_htlc_value_in_flight_msat = 50_000_000;
+       open_channel.max_accepted_htlcs = 60;
+       if on_holder_tx {
+               open_channel.dust_limit_satoshis = 546;
+       }
+       nodes[1].node.handle_open_channel(&nodes[0].node.get_our_node_id(), InitFeatures::known(), &open_channel);
+       let mut accept_channel = get_event_msg!(nodes[1], MessageSendEvent::SendAcceptChannel, nodes[0].node.get_our_node_id());
+       nodes[0].node.handle_accept_channel(&nodes[1].node.get_our_node_id(), InitFeatures::known(), &accept_channel);
+
+       let (temporary_channel_id, tx, _) = create_funding_transaction(&nodes[0], 1_000_000, 42);
+
+       if on_holder_tx {
+               if let Some(mut chan) = nodes[0].node.channel_state.lock().unwrap().by_id.get_mut(&temporary_channel_id) {
+                       chan.holder_dust_limit_satoshis = 546;
+               }
+       }
+
+       nodes[0].node.funding_transaction_generated(&temporary_channel_id, tx.clone()).unwrap();
+       nodes[1].node.handle_funding_created(&nodes[0].node.get_our_node_id(), &get_event_msg!(nodes[0], MessageSendEvent::SendFundingCreated, nodes[1].node.get_our_node_id()));
+       check_added_monitors!(nodes[1], 1);
+
+       nodes[0].node.handle_funding_signed(&nodes[1].node.get_our_node_id(), &get_event_msg!(nodes[1], MessageSendEvent::SendFundingSigned, nodes[0].node.get_our_node_id()));
+       check_added_monitors!(nodes[0], 1);
+
+       let (funding_locked, channel_id) = create_chan_between_nodes_with_value_confirm(&nodes[0], &nodes[1], &tx);
+       let (announcement, as_update, bs_update) = create_chan_between_nodes_with_value_b(&nodes[0], &nodes[1], &funding_locked);
+       update_nodes_with_chan_announce(&nodes, 0, 1, &announcement, &as_update, &bs_update);
+
+       let dust_buffer_feerate = {
+               let chan_lock = nodes[0].node.channel_state.lock().unwrap();
+               let chan = chan_lock.by_id.get(&channel_id).unwrap();
+               chan.get_dust_buffer_feerate(None) as u64
+       };
+       let dust_outbound_htlc_on_holder_tx_msat: u64 = (dust_buffer_feerate * HTLC_TIMEOUT_TX_WEIGHT / 1000 + open_channel.dust_limit_satoshis - 1) * 1000;
+       let dust_outbound_htlc_on_holder_tx: u64 = config.channel_options.max_dust_htlc_exposure_msat / dust_outbound_htlc_on_holder_tx_msat;
+
+       let dust_inbound_htlc_on_holder_tx_msat: u64 = (dust_buffer_feerate * HTLC_SUCCESS_TX_WEIGHT / 1000 + open_channel.dust_limit_satoshis - 1) * 1000;
+       let dust_inbound_htlc_on_holder_tx: u64 = config.channel_options.max_dust_htlc_exposure_msat / dust_inbound_htlc_on_holder_tx_msat;
+
+       let dust_htlc_on_counterparty_tx: u64 = 25;
+       let dust_htlc_on_counterparty_tx_msat: u64 = config.channel_options.max_dust_htlc_exposure_msat / dust_htlc_on_counterparty_tx;
+
+       if on_holder_tx {
+               if dust_outbound_balance {
+                       // Outbound dust threshold: 2223 sats (`dust_buffer_feerate` * HTLC_TIMEOUT_TX_WEIGHT / 1000 + holder's `dust_limit_satoshis`)
+                       // Outbound dust balance: 4372 sats
+                       // Note, we need sent payment to be above outbound dust threshold on counterparty_tx of 2132 sats
+                       for i in 0..dust_outbound_htlc_on_holder_tx {
+                               let (route, payment_hash, _, payment_secret) = get_route_and_payment_hash!(nodes[0], nodes[1], dust_outbound_htlc_on_holder_tx_msat);
+                               if let Err(_) = nodes[0].node.send_payment(&route, payment_hash, &Some(payment_secret)) { panic!("Unexpected event at dust HTLC {}", i); }
+                       }
+               } else {
+                       // Inbound dust threshold: 2324 sats (`dust_buffer_feerate` * HTLC_SUCCESS_TX_WEIGHT / 1000 + holder's `dust_limit_satoshis`)
+                       // Inbound dust balance: 4372 sats
+                       // Note, we need sent payment to be above outbound dust threshold on counterparty_tx of 2031 sats
+                       for _ in 0..dust_inbound_htlc_on_holder_tx {
+                               route_payment(&nodes[1], &[&nodes[0]], dust_inbound_htlc_on_holder_tx_msat);
+                       }
+               }
+       } else {
+               if dust_outbound_balance {
+                       // Outbound dust threshold: 2132 sats (`dust_buffer_feerate` * HTLC_TIMEOUT_TX_WEIGHT / 1000 + counteparty's `dust_limit_satoshis`)
+                       // Outbound dust balance: 5000 sats
+                       for i in 0..dust_htlc_on_counterparty_tx {
+                               let (route, payment_hash, _, payment_secret) = get_route_and_payment_hash!(nodes[0], nodes[1], dust_htlc_on_counterparty_tx_msat);
+                               if let Err(_) = nodes[0].node.send_payment(&route, payment_hash, &Some(payment_secret)) { panic!("Unexpected event at dust HTLC {}", i); }
+                       }
+               } else {
+                       // Inbound dust threshold: 2031 sats (`dust_buffer_feerate` * HTLC_TIMEOUT_TX_WEIGHT / 1000 + counteparty's `dust_limit_satoshis`)
+                       // Inbound dust balance: 5000 sats
+                       for _ in 0..dust_htlc_on_counterparty_tx {
+                               route_payment(&nodes[1], &[&nodes[0]], dust_htlc_on_counterparty_tx_msat);
+                       }
+               }
+       }
+
+       let dust_overflow = dust_htlc_on_counterparty_tx_msat * (dust_htlc_on_counterparty_tx + 1);
+       if exposure_breach_event == ExposureEvent::AtHTLCForward {
+               let (route, payment_hash, _, payment_secret) = get_route_and_payment_hash!(nodes[0], nodes[1], if on_holder_tx { dust_outbound_htlc_on_holder_tx_msat } else { dust_htlc_on_counterparty_tx_msat });
+               let mut config = UserConfig::default();
+               // With default dust exposure: 5000 sats
+               if on_holder_tx {
+                       let dust_outbound_overflow = dust_outbound_htlc_on_holder_tx_msat * (dust_outbound_htlc_on_holder_tx + 1);
+                       let dust_inbound_overflow = dust_inbound_htlc_on_holder_tx_msat * dust_inbound_htlc_on_holder_tx + dust_outbound_htlc_on_holder_tx_msat;
+                       unwrap_send_err!(nodes[0].node.send_payment(&route, payment_hash, &Some(payment_secret)), true, APIError::ChannelUnavailable { ref err }, assert_eq!(err, &format!("Cannot send value that would put our exposure to dust HTLCs at {} over the limit {} on holder commitment tx", if dust_outbound_balance { dust_outbound_overflow } else { dust_inbound_overflow }, config.channel_options.max_dust_htlc_exposure_msat)));
+               } else {
+                       unwrap_send_err!(nodes[0].node.send_payment(&route, payment_hash, &Some(payment_secret)), true, APIError::ChannelUnavailable { ref err }, assert_eq!(err, &format!("Cannot send value that would put our exposure to dust HTLCs at {} over the limit {} on counterparty commitment tx", dust_overflow, config.channel_options.max_dust_htlc_exposure_msat)));
+               }
+       } else if exposure_breach_event == ExposureEvent::AtHTLCReception {
+               let (route, payment_hash, _, payment_secret) = get_route_and_payment_hash!(nodes[1], nodes[0], if on_holder_tx { dust_inbound_htlc_on_holder_tx_msat } else { dust_htlc_on_counterparty_tx_msat });
+               nodes[1].node.send_payment(&route, payment_hash, &Some(payment_secret)).unwrap();
+               check_added_monitors!(nodes[1], 1);
+               let mut events = nodes[1].node.get_and_clear_pending_msg_events();
+               assert_eq!(events.len(), 1);
+               let payment_event = SendEvent::from_event(events.remove(0));
+               nodes[0].node.handle_update_add_htlc(&nodes[1].node.get_our_node_id(), &payment_event.msgs[0]);
+               // With default dust exposure: 5000 sats
+               if on_holder_tx {
+                       // Outbound dust balance: 6399 sats
+                       let dust_inbound_overflow = dust_inbound_htlc_on_holder_tx_msat * (dust_inbound_htlc_on_holder_tx + 1);
+                       let dust_outbound_overflow = dust_outbound_htlc_on_holder_tx_msat * dust_outbound_htlc_on_holder_tx + dust_inbound_htlc_on_holder_tx_msat;
+                       nodes[0].logger.assert_log("lightning::ln::channel".to_string(), format!("Cannot accept value that would put our exposure to dust HTLCs at {} over the limit {} on holder commitment tx", if dust_outbound_balance { dust_outbound_overflow } else { dust_inbound_overflow }, config.channel_options.max_dust_htlc_exposure_msat), 1);
+               } else {
+                       // Outbound dust balance: 5200 sats
+                       nodes[0].logger.assert_log("lightning::ln::channel".to_string(), format!("Cannot accept value that would put our exposure to dust HTLCs at {} over the limit {} on counterparty commitment tx", dust_overflow, config.channel_options.max_dust_htlc_exposure_msat), 1);
+               }
+       } else if exposure_breach_event == ExposureEvent::AtUpdateFeeOutbound {
+               let (route, payment_hash, _, payment_secret) = get_route_and_payment_hash!(nodes[0], nodes[1], 2_500_000);
+               if let Err(_) = nodes[0].node.send_payment(&route, payment_hash, &Some(payment_secret)) { panic!("Unexpected event at update_fee-swallowed HTLC", ); }
+               {
+                       let mut feerate_lock = chanmon_cfgs[0].fee_estimator.sat_per_kw.lock().unwrap();
+                       *feerate_lock = *feerate_lock * 10;
+               }
+               nodes[0].node.timer_tick_occurred();
+               check_added_monitors!(nodes[0], 1);
+               nodes[0].logger.assert_log_contains("lightning::ln::channel".to_string(), "Cannot afford to send new feerate at 2530 without infringing max dust htlc exposure".to_string(), 1);
+       }
+
+       let _ = nodes[0].node.get_and_clear_pending_msg_events();
+       let mut added_monitors = nodes[0].chain_monitor.added_monitors.lock().unwrap();
+       added_monitors.clear();
+}
+
+#[test]
+fn test_max_dust_htlc_exposure() {
+       do_test_max_dust_htlc_exposure(true, ExposureEvent::AtHTLCForward, true);
+       do_test_max_dust_htlc_exposure(false, ExposureEvent::AtHTLCForward, true);
+       do_test_max_dust_htlc_exposure(false, ExposureEvent::AtHTLCReception, true);
+       do_test_max_dust_htlc_exposure(false, ExposureEvent::AtHTLCReception, false);
+       do_test_max_dust_htlc_exposure(true, ExposureEvent::AtHTLCForward, false);
+       do_test_max_dust_htlc_exposure(true, ExposureEvent::AtHTLCReception, false);
+       do_test_max_dust_htlc_exposure(true, ExposureEvent::AtHTLCReception, true);
+       do_test_max_dust_htlc_exposure(false, ExposureEvent::AtHTLCForward, false);
+       do_test_max_dust_htlc_exposure(true, ExposureEvent::AtUpdateFeeOutbound, true);
+       do_test_max_dust_htlc_exposure(true, ExposureEvent::AtUpdateFeeOutbound, false);
+       do_test_max_dust_htlc_exposure(false, ExposureEvent::AtUpdateFeeOutbound, false);
+       do_test_max_dust_htlc_exposure(false, ExposureEvent::AtUpdateFeeOutbound, true);
+}