Add `source_channel_id` in `PaymentForwarded` event
[rust-lightning] / lightning / src / ln / functional_test_utils.rs
index d65bf2ee3445303d60e52c7f9bcfa7e23f7c92fd..1d033f32f764a9e2d2ae8a822611d3ebfc7ec33f 100644 (file)
@@ -1328,12 +1328,16 @@ macro_rules! expect_payment_path_successful {
 }
 
 macro_rules! expect_payment_forwarded {
-       ($node: expr, $expected_fee: expr, $upstream_force_closed: expr) => {
+       ($node: expr, $source_node: expr, $expected_fee: expr, $upstream_force_closed: expr) => {
                let events = $node.node.get_and_clear_pending_events();
                assert_eq!(events.len(), 1);
                match events[0] {
-                       Event::PaymentForwarded { fee_earned_msat, claim_from_onchain_tx } => {
+                       Event::PaymentForwarded { fee_earned_msat, source_channel_id, claim_from_onchain_tx } => {
                                assert_eq!(fee_earned_msat, $expected_fee);
+                               if fee_earned_msat.is_some() {
+                                       // Is the event channel_id in one of the channels between the two nodes?
+                                       assert!($node.node.list_channels().iter().any(|x| x.counterparty.node_id == $source_node.node.get_our_node_id() && x.channel_id == source_channel_id.unwrap()));
+                               }
                                assert_eq!(claim_from_onchain_tx, $upstream_force_closed);
                        },
                        _ => panic!("Unexpected event"),
@@ -1572,11 +1576,11 @@ pub fn do_claim_payment_along_route<'a, 'b, 'c>(origin_node: &Node<'a, 'b, 'c>,
                        }
                }
                macro_rules! mid_update_fulfill_dance {
-                       ($node: expr, $prev_node: expr, $new_msgs: expr) => {
+                       ($node: expr, $prev_node: expr, $next_node: expr, $new_msgs: expr) => {
                                {
                                        $node.node.handle_update_fulfill_htlc(&$prev_node.node.get_our_node_id(), &next_msgs.as_ref().unwrap().0);
                                        let fee = $node.node.channel_state.lock().unwrap().by_id.get(&next_msgs.as_ref().unwrap().0.channel_id).unwrap().config.forwarding_fee_base_msat;
-                                       expect_payment_forwarded!($node, Some(fee as u64), false);
+                                       expect_payment_forwarded!($node, $next_node, Some(fee as u64), false);
                                        expected_total_fee_msat += fee as u64;
                                        check_added_monitors!($node, 1);
                                        let new_next_msgs = if $new_msgs {
@@ -1600,7 +1604,14 @@ pub fn do_claim_payment_along_route<'a, 'b, 'c>(origin_node: &Node<'a, 'b, 'c>,
                        assert_eq!(expected_next_node, node.node.get_our_node_id());
                        let update_next_msgs = !skip_last || idx != expected_route.len() - 1;
                        if next_msgs.is_some() {
-                               mid_update_fulfill_dance!(node, prev_node, update_next_msgs);
+                               // Since we are traversing in reverse, next_node is actually the previous node
+                               let next_node: &Node;
+                               if idx == expected_route.len() - 1 {
+                                       next_node = origin_node;
+                               } else {
+                                       next_node = expected_route[expected_route.len() - 1 - idx - 1];
+                               }
+                               mid_update_fulfill_dance!(node, prev_node, next_node, update_next_msgs);
                        } else {
                                assert!(!update_next_msgs);
                                assert!(node.node.get_and_clear_pending_msg_events().is_empty());
@@ -1897,8 +1908,8 @@ pub fn create_network<'a, 'b: 'a, 'c: 'b>(node_count: usize, cfgs: &'b Vec<NodeC
 
        for i in 0..node_count {
                for j in (i+1)..node_count {
-                       nodes[i].node.peer_connected(&nodes[j].node.get_our_node_id(), &msgs::Init { features: cfgs[j].features.clone() });
-                       nodes[j].node.peer_connected(&nodes[i].node.get_our_node_id(), &msgs::Init { features: cfgs[i].features.clone() });
+                       nodes[i].node.peer_connected(&nodes[j].node.get_our_node_id(), &msgs::Init { features: cfgs[j].features.clone(), remote_network_address: None });
+                       nodes[j].node.peer_connected(&nodes[i].node.get_our_node_id(), &msgs::Init { features: cfgs[i].features.clone(), remote_network_address: None });
                }
        }
 
@@ -2157,9 +2168,9 @@ macro_rules! handle_chan_reestablish_msgs {
 /// pending_htlc_adds includes both the holding cell and in-flight update_add_htlcs, whereas
 /// for claims/fails they are separated out.
 pub fn reconnect_nodes<'a, 'b, 'c>(node_a: &Node<'a, 'b, 'c>, node_b: &Node<'a, 'b, 'c>, send_funding_locked: (bool, bool), pending_htlc_adds: (i64, i64), pending_htlc_claims: (usize, usize), pending_htlc_fails: (usize, usize), pending_cell_htlc_claims: (usize, usize), pending_cell_htlc_fails: (usize, usize), pending_raa: (bool, bool))  {
-       node_a.node.peer_connected(&node_b.node.get_our_node_id(), &msgs::Init { features: InitFeatures::empty() });
+       node_a.node.peer_connected(&node_b.node.get_our_node_id(), &msgs::Init { features: InitFeatures::empty(), remote_network_address: None });
        let reestablish_1 = get_chan_reestablish_msgs!(node_a, node_b);
-       node_b.node.peer_connected(&node_a.node.get_our_node_id(), &msgs::Init { features: InitFeatures::empty() });
+       node_b.node.peer_connected(&node_a.node.get_our_node_id(), &msgs::Init { features: InitFeatures::empty(), remote_network_address: None });
        let reestablish_2 = get_chan_reestablish_msgs!(node_b, node_a);
 
        if send_funding_locked.0 {