Merge pull request #3129 from optout21/splicing-msgs-update
[rust-lightning] / lightning / src / ln / chanmon_update_fail_tests.rs
index 5b821bfb868df77523a3ba8ec32a55ab9db5a680..ce07cb73713aff10e222a4a55775b2edbcd7bcf6 100644 (file)
@@ -216,7 +216,7 @@ fn do_test_simple_monitor_temporary_update_fail(disconnect: bool) {
        // PaymentPathFailed event
 
        assert_eq!(nodes[0].node.list_channels().len(), 0);
-       check_closed_event!(nodes[0], 1, ClosureReason::HolderForceClosed, [nodes[1].node.get_our_node_id()], 100000);
+       check_closed_event!(nodes[0], 1, ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(true) }, [nodes[1].node.get_our_node_id()], 100000);
 }
 
 #[test]
@@ -1612,7 +1612,10 @@ fn test_monitor_update_fail_claim() {
        let (payment_preimage_1, payment_hash_1, ..) = route_payment(&nodes[0], &[&nodes[1]], 1_000_000);
 
        chanmon_cfgs[1].persister.set_update_ret(ChannelMonitorUpdateStatus::InProgress);
+       // As long as the preimage isn't on-chain, we shouldn't expose the `PaymentClaimed` event to
+       // users nor send the preimage to peers in the new commitment update.
        nodes[1].node.claim_funds(payment_preimage_1);
+       assert!(nodes[1].node.get_and_clear_pending_events().is_empty());
        assert!(nodes[1].node.get_and_clear_pending_msg_events().is_empty());
        check_added_monitors!(nodes[1], 1);
 
@@ -3232,13 +3235,13 @@ fn do_test_durable_preimages_on_closed_channel(close_chans_before_reload: bool,
                        chanmon_cfgs[1].persister.set_update_ret(ChannelMonitorUpdateStatus::InProgress);
                        nodes[1].node.force_close_broadcasting_latest_txn(&chan_id_bc, &nodes[2].node.get_our_node_id(), error_message.to_string()).unwrap();
                        check_closed_broadcast(&nodes[1], 1, true);
-                       check_closed_event(&nodes[1], 1, ClosureReason::HolderForceClosed, false, &[nodes[2].node.get_our_node_id()], 100000);
+                       check_closed_event(&nodes[1], 1, ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(true) }, false, &[nodes[2].node.get_our_node_id()], 100000);
                }
 
                chanmon_cfgs[1].persister.set_update_ret(ChannelMonitorUpdateStatus::InProgress);
                nodes[1].node.force_close_broadcasting_latest_txn(&chan_id_ab, &nodes[0].node.get_our_node_id(), error_message.to_string()).unwrap();
                check_closed_broadcast(&nodes[1], 1, true);
-               check_closed_event(&nodes[1], 1, ClosureReason::HolderForceClosed, false, &[nodes[0].node.get_our_node_id()], 100000);
+               check_closed_event(&nodes[1], 1, ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(true) }, false, &[nodes[0].node.get_our_node_id()], 100000);
        }
 
        // Now reload node B
@@ -3260,7 +3263,7 @@ fn do_test_durable_preimages_on_closed_channel(close_chans_before_reload: bool,
        let error_message = "Channel force-closed";
 
        nodes[0].node.force_close_broadcasting_latest_txn(&chan_id_ab, &nodes[1].node.get_our_node_id(), error_message.to_string()).unwrap();
-       check_closed_event(&nodes[0], 1, ClosureReason::HolderForceClosed, false, &[nodes[1].node.get_our_node_id()], 100000);
+       check_closed_event(&nodes[0], 1, ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(true) }, false, &[nodes[1].node.get_our_node_id()], 100000);
        let as_closing_tx = nodes[0].tx_broadcaster.txn_broadcasted.lock().unwrap().split_off(0);
        assert_eq!(as_closing_tx.len(), 1);
 
@@ -3490,7 +3493,7 @@ fn do_test_reload_mon_update_completion_actions(close_during_reload: bool) {
                nodes[0].node.force_close_broadcasting_latest_txn(&chan_id_ab, &nodes[1].node.get_our_node_id(), error_message.to_string()).unwrap();
                check_added_monitors!(nodes[0], 1);
                check_closed_broadcast!(nodes[0], true);
-               check_closed_event(&nodes[0], 1, ClosureReason::HolderForceClosed, false, &[nodes[1].node.get_our_node_id()], 100_000);
+               check_closed_event(&nodes[0], 1, ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(true) }, false, &[nodes[1].node.get_our_node_id()], 100_000);
                let as_closing_tx = nodes[0].tx_broadcaster.txn_broadcasted.lock().unwrap().split_off(0);
                mine_transaction_without_consistency_checks(&nodes[1], &as_closing_tx[0]);
        }