Rename Channel's latest-monitor-update fetch method for clarity
[rust-lightning] / lightning / src / ln / functional_test_utils.rs
index 56f08c7656a8c071f0a4a4bc4633e970c4130a0f..dc5f2b41fe5055e96d5acf10637eac4d1d4c5109 100644 (file)
@@ -85,16 +85,14 @@ pub fn confirm_transactions_at<'a, 'b, 'c, 'd>(node: &'a Node<'b, 'c, 'd>, txn:
        if conf_height > first_connect_height {
                connect_blocks(node, conf_height - first_connect_height);
        }
-       let mut block = Block {
-               header: BlockHeader { version: 0x20000000, prev_blockhash: node.best_block_hash(), merkle_root: TxMerkleNode::all_zeros(), time: conf_height, bits: 42, nonce: 42 },
-               txdata: Vec::new(),
-       };
+       let mut txdata = Vec::new();
        for _ in 0..*node.network_chan_count.borrow() { // Make sure we don't end up with channels at the same short id by offsetting by chan_count
-               block.txdata.push(Transaction { version: 0, lock_time: PackedLockTime::ZERO, input: Vec::new(), output: Vec::new() });
+               txdata.push(Transaction { version: 0, lock_time: PackedLockTime::ZERO, input: Vec::new(), output: Vec::new() });
        }
        for tx in txn {
-               block.txdata.push((*tx).clone());
+               txdata.push((*tx).clone());
        }
+       let block = create_dummy_block(node.best_block_hash(), conf_height, txdata);
        connect_block(node, &block);
        scid_utils::scid_from_parts(conf_height as u64, block.txdata.len() as u64 - 1, 0).unwrap()
 }
@@ -191,22 +189,31 @@ impl ConnectStyle {
        }
 }
 
+pub fn create_dummy_header(prev_blockhash: BlockHash, time: u32) -> BlockHeader {
+       BlockHeader {
+               version: 0x2000_0000,
+               prev_blockhash,
+               merkle_root: TxMerkleNode::all_zeros(),
+               time,
+               bits: 42,
+               nonce: 42,
+       }
+}
+
+pub fn create_dummy_block(prev_blockhash: BlockHash, time: u32, txdata: Vec<Transaction>) -> Block {
+       Block { header: create_dummy_header(prev_blockhash, time), txdata }
+}
+
 pub fn connect_blocks<'a, 'b, 'c, 'd>(node: &'a Node<'b, 'c, 'd>, depth: u32) -> BlockHash {
        let skip_intermediaries = node.connect_style.borrow().skips_blocks();
 
        let height = node.best_block_info().1 + 1;
-       let mut block = Block {
-               header: BlockHeader { version: 0x2000000, prev_blockhash: node.best_block_hash(), merkle_root: TxMerkleNode::all_zeros(), time: height, bits: 42, nonce: 42 },
-               txdata: vec![],
-       };
+       let mut block = create_dummy_block(node.best_block_hash(), height, Vec::new());
        assert!(depth >= 1);
        for i in 1..depth {
                let prev_blockhash = block.header.block_hash();
                do_connect_block(node, block, skip_intermediaries);
-               block = Block {
-                       header: BlockHeader { version: 0x20000000, prev_blockhash, merkle_root: TxMerkleNode::all_zeros(), time: height + i, bits: 42, nonce: 42 },
-                       txdata: vec![],
-               };
+               block = create_dummy_block(prev_blockhash, height + i, Vec::new());
        }
        let hash = block.header.block_hash();
        do_connect_block(node, block, false);
@@ -776,6 +783,28 @@ macro_rules! get_channel_ref {
        }
 }
 
+#[cfg(test)]
+macro_rules! get_outbound_v1_channel_ref {
+       ($node: expr, $counterparty_node: expr, $per_peer_state_lock: ident, $peer_state_lock: ident, $channel_id: expr) => {
+               {
+                       $per_peer_state_lock = $node.node.per_peer_state.read().unwrap();
+                       $peer_state_lock = $per_peer_state_lock.get(&$counterparty_node.node.get_our_node_id()).unwrap().lock().unwrap();
+                       $peer_state_lock.outbound_v1_channel_by_id.get_mut(&$channel_id).unwrap()
+               }
+       }
+}
+
+#[cfg(test)]
+macro_rules! get_inbound_v1_channel_ref {
+       ($node: expr, $counterparty_node: expr, $per_peer_state_lock: ident, $peer_state_lock: ident, $channel_id: expr) => {
+               {
+                       $per_peer_state_lock = $node.node.per_peer_state.read().unwrap();
+                       $peer_state_lock = $per_peer_state_lock.get(&$counterparty_node.node.get_our_node_id()).unwrap().lock().unwrap();
+                       $peer_state_lock.inbound_v1_channel_by_id.get_mut(&$channel_id).unwrap()
+               }
+       }
+}
+
 #[cfg(test)]
 macro_rules! get_feerate {
        ($node: expr, $counterparty_node: expr, $channel_id: expr) => {
@@ -783,7 +812,7 @@ macro_rules! get_feerate {
                        let mut per_peer_state_lock;
                        let mut peer_state_lock;
                        let chan = get_channel_ref!($node, $counterparty_node, per_peer_state_lock, peer_state_lock, $channel_id);
-                       chan.get_feerate_sat_per_1000_weight()
+                       chan.context.get_feerate_sat_per_1000_weight()
                }
        }
 }
@@ -795,7 +824,7 @@ macro_rules! get_opt_anchors {
                        let mut per_peer_state_lock;
                        let mut peer_state_lock;
                        let chan = get_channel_ref!($node, $counterparty_node, per_peer_state_lock, peer_state_lock, $channel_id);
-                       chan.opt_anchors()
+                       chan.context.opt_anchors()
                }
        }
 }
@@ -2095,7 +2124,7 @@ pub fn do_pass_along_path<'a, 'b, 'c>(origin_node: &Node<'a, 'b, 'c>, expected_p
                                match &events_2[0] {
                                        Event::PaymentClaimable { ref payment_hash, ref purpose, amount_msat,
                                                receiver_node_id, ref via_channel_id, ref via_user_channel_id,
-                                               claim_deadline, onion_fields,
+                                               claim_deadline, onion_fields, ..
                                        } => {
                                                assert_eq!(our_payment_hash, *payment_hash);
                                                assert_eq!(node.node.get_our_node_id(), receiver_node_id.unwrap());
@@ -2108,7 +2137,7 @@ pub fn do_pass_along_path<'a, 'b, 'c>(origin_node: &Node<'a, 'b, 'c>, expected_p
                                                        },
                                                        PaymentPurpose::SpontaneousPayment(payment_preimage) => {
                                                                assert_eq!(expected_preimage.unwrap(), *payment_preimage);
-                                                               assert!(our_payment_secret.is_none());
+                                                               assert_eq!(our_payment_secret, onion_fields.as_ref().unwrap().payment_secret);
                                                        },
                                                }
                                                assert_eq!(*amount_msat, recv_value);
@@ -2157,7 +2186,20 @@ pub fn send_along_route<'a, 'b, 'c>(origin_node: &Node<'a, 'b, 'c>, route: Route
        (our_payment_preimage, our_payment_hash, our_payment_secret, payment_id)
 }
 
-pub fn do_claim_payment_along_route<'a, 'b, 'c>(origin_node: &Node<'a, 'b, 'c>, expected_paths: &[&[&Node<'a, 'b, 'c>]], skip_last: bool, our_payment_preimage: PaymentPreimage) -> u64 {
+pub fn do_claim_payment_along_route<'a, 'b, 'c>(
+       origin_node: &Node<'a, 'b, 'c>, expected_paths: &[&[&Node<'a, 'b, 'c>]], skip_last: bool,
+       our_payment_preimage: PaymentPreimage
+) -> u64 {
+       let extra_fees = vec![0; expected_paths.len()];
+       do_claim_payment_along_route_with_extra_penultimate_hop_fees(origin_node, expected_paths,
+               &extra_fees[..], skip_last, our_payment_preimage)
+}
+
+pub fn do_claim_payment_along_route_with_extra_penultimate_hop_fees<'a, 'b, 'c>(
+       origin_node: &Node<'a, 'b, 'c>, expected_paths: &[&[&Node<'a, 'b, 'c>]], expected_extra_fees:
+       &[u32], skip_last: bool, our_payment_preimage: PaymentPreimage
+) -> u64 {
+       assert_eq!(expected_paths.len(), expected_extra_fees.len());
        for path in expected_paths.iter() {
                assert_eq!(path.last().unwrap().node.get_our_node_id(), expected_paths[0].last().unwrap().node.get_our_node_id());
        }
@@ -2207,7 +2249,7 @@ pub fn do_claim_payment_along_route<'a, 'b, 'c>(origin_node: &Node<'a, 'b, 'c>,
                }
        }
 
-       for (expected_route, (path_msgs, next_hop)) in expected_paths.iter().zip(per_path_msgs.drain(..)) {
+       for (i, (expected_route, (path_msgs, next_hop))) in expected_paths.iter().zip(per_path_msgs.drain(..)).enumerate() {
                let mut next_msgs = Some(path_msgs);
                let mut expected_next_node = next_hop;
 
@@ -2222,20 +2264,21 @@ pub fn do_claim_payment_along_route<'a, 'b, 'c>(origin_node: &Node<'a, 'b, 'c>,
                        }
                }
                macro_rules! mid_update_fulfill_dance {
-                       ($node: expr, $prev_node: expr, $next_node: expr, $new_msgs: expr) => {
+                       ($idx: expr, $node: expr, $prev_node: expr, $next_node: expr, $new_msgs: expr) => {
                                {
                                        $node.node.handle_update_fulfill_htlc(&$prev_node.node.get_our_node_id(), &next_msgs.as_ref().unwrap().0);
-                                       let fee = {
+                                       let mut fee = {
                                                let per_peer_state = $node.node.per_peer_state.read().unwrap();
                                                let peer_state = per_peer_state.get(&$prev_node.node.get_our_node_id())
                                                        .unwrap().lock().unwrap();
                                                let channel = peer_state.channel_by_id.get(&next_msgs.as_ref().unwrap().0.channel_id).unwrap();
-                                               if let Some(prev_config) = channel.prev_config() {
+                                               if let Some(prev_config) = channel.context.prev_config() {
                                                        prev_config.forwarding_fee_base_msat
                                                } else {
-                                                       channel.config().forwarding_fee_base_msat
+                                                       channel.context.config().forwarding_fee_base_msat
                                                }
                                        };
+                                       if $idx == 1 { fee += expected_extra_fees[i]; }
                                        expect_payment_forwarded!($node, $next_node, $prev_node, Some(fee as u64), false, false);
                                        expected_total_fee_msat += fee as u64;
                                        check_added_monitors!($node, 1);
@@ -2267,7 +2310,7 @@ pub fn do_claim_payment_along_route<'a, 'b, 'c>(origin_node: &Node<'a, 'b, 'c>,
                                } else {
                                        next_node = expected_route[expected_route.len() - 1 - idx - 1];
                                }
-                               mid_update_fulfill_dance!(node, prev_node, next_node, update_next_msgs);
+                               mid_update_fulfill_dance!(idx, node, prev_node, next_node, update_next_msgs);
                        } else {
                                assert!(!update_next_msgs);
                                assert!(node.node.get_and_clear_pending_msg_events().is_empty());
@@ -2564,8 +2607,16 @@ pub fn create_network<'a, 'b: 'a, 'c: 'b>(node_count: usize, cfgs: &'b Vec<NodeC
 
        for i in 0..node_count {
                for j in (i+1)..node_count {
-                       nodes[i].node.peer_connected(&nodes[j].node.get_our_node_id(), &msgs::Init { features: nodes[j].override_init_features.borrow().clone().unwrap_or_else(|| nodes[j].node.init_features()), remote_network_address: None }, true).unwrap();
-                       nodes[j].node.peer_connected(&nodes[i].node.get_our_node_id(), &msgs::Init { features: nodes[i].override_init_features.borrow().clone().unwrap_or_else(|| nodes[i].node.init_features()), remote_network_address: None }, false).unwrap();
+                       nodes[i].node.peer_connected(&nodes[j].node.get_our_node_id(), &msgs::Init {
+                               features: nodes[j].override_init_features.borrow().clone().unwrap_or_else(|| nodes[j].node.init_features()),
+                               networks: None,
+                               remote_network_address: None,
+                       }, true).unwrap();
+                       nodes[j].node.peer_connected(&nodes[i].node.get_our_node_id(), &msgs::Init {
+                               features: nodes[i].override_init_features.borrow().clone().unwrap_or_else(|| nodes[i].node.init_features()),
+                               networks: None,
+                               remote_network_address: None,
+                       }, false).unwrap();
                }
        }
 
@@ -2849,9 +2900,13 @@ macro_rules! handle_chan_reestablish_msgs {
 /// pending_htlc_adds includes both the holding cell and in-flight update_add_htlcs, whereas
 /// for claims/fails they are separated out.
 pub fn reconnect_nodes<'a, 'b, 'c>(node_a: &Node<'a, 'b, 'c>, node_b: &Node<'a, 'b, 'c>, send_channel_ready: (bool, bool), pending_htlc_adds: (i64, i64), pending_htlc_claims: (usize, usize), pending_htlc_fails: (usize, usize), pending_cell_htlc_claims: (usize, usize), pending_cell_htlc_fails: (usize, usize), pending_raa: (bool, bool))  {
-       node_a.node.peer_connected(&node_b.node.get_our_node_id(), &msgs::Init { features: node_b.node.init_features(), remote_network_address: None }, true).unwrap();
+       node_a.node.peer_connected(&node_b.node.get_our_node_id(), &msgs::Init {
+               features: node_b.node.init_features(), networks: None, remote_network_address: None
+       }, true).unwrap();
        let reestablish_1 = get_chan_reestablish_msgs!(node_a, node_b);
-       node_b.node.peer_connected(&node_a.node.get_our_node_id(), &msgs::Init { features: node_a.node.init_features(), remote_network_address: None }, false).unwrap();
+       node_b.node.peer_connected(&node_a.node.get_our_node_id(), &msgs::Init {
+               features: node_a.node.init_features(), networks: None, remote_network_address: None
+       }, false).unwrap();
        let reestablish_2 = get_chan_reestablish_msgs!(node_b, node_a);
 
        if send_channel_ready.0 {