Merge pull request #1626 from TheBlueMatt/2022-07-fix-pm-docs
[rust-lightning] / lightning / src / ln / functional_test_utils.rs
index 38c05998e6a2769db297179d4f0ce0dc76589b9e..cff632e4bc86b6a9e2a9f36b9b9787c0d3d6f8e7 100644 (file)
@@ -352,6 +352,11 @@ impl<'a, 'b, 'c> Drop for Node<'a, 'b, 'c> {
                                }
                        }
 
+                       let broadcaster = test_utils::TestBroadcaster {
+                               txn_broadcasted: Mutex::new(self.tx_broadcaster.txn_broadcasted.lock().unwrap().clone()),
+                               blocks: Arc::new(Mutex::new(self.tx_broadcaster.blocks.lock().unwrap().clone())),
+                       };
+
                        // Before using all the new monitors to check the watch outpoints, use the full set of
                        // them to ensure we can write and reload our ChannelManager.
                        {
@@ -367,20 +372,13 @@ impl<'a, 'b, 'c> Drop for Node<'a, 'b, 'c> {
                                        keys_manager: self.keys_manager,
                                        fee_estimator: &test_utils::TestFeeEstimator { sat_per_kw: Mutex::new(253) },
                                        chain_monitor: self.chain_monitor,
-                                       tx_broadcaster: &test_utils::TestBroadcaster {
-                                               txn_broadcasted: Mutex::new(self.tx_broadcaster.txn_broadcasted.lock().unwrap().clone()),
-                                               blocks: Arc::new(Mutex::new(self.tx_broadcaster.blocks.lock().unwrap().clone())),
-                                       },
+                                       tx_broadcaster: &broadcaster,
                                        logger: &self.logger,
                                        channel_monitors,
                                }).unwrap();
                        }
 
                        let persister = test_utils::TestPersister::new();
-                       let broadcaster = test_utils::TestBroadcaster {
-                               txn_broadcasted: Mutex::new(self.tx_broadcaster.txn_broadcasted.lock().unwrap().clone()),
-                               blocks: Arc::new(Mutex::new(self.tx_broadcaster.blocks.lock().unwrap().clone())),
-                       };
                        let chain_source = test_utils::TestChainSource::new(Network::Testnet);
                        let chain_monitor = test_utils::TestChainMonitor::new(Some(&chain_source), &broadcaster, &self.logger, &feeest, &persister, &self.keys_manager);
                        for deserialized_monitor in deserialized_monitors.drain(..) {
@@ -1492,7 +1490,7 @@ pub fn expect_payment_failed_conditions<'a, 'b, 'c, 'd, 'e>(
        let mut events = node.node.get_and_clear_pending_events();
        assert_eq!(events.len(), 1);
        let expected_payment_id = match events.pop().unwrap() {
-               Event::PaymentPathFailed { payment_hash, rejected_by_dest, path, retry, payment_id, network_update,
+               Event::PaymentPathFailed { payment_hash, rejected_by_dest, path, retry, payment_id, network_update, short_channel_id,
                        #[cfg(test)]
                        error_code,
                        #[cfg(test)]
@@ -1502,6 +1500,9 @@ pub fn expect_payment_failed_conditions<'a, 'b, 'c, 'd, 'e>(
                        assert!(retry.is_some(), "expected retry.is_some()");
                        assert_eq!(retry.as_ref().unwrap().final_value_msat, path.last().unwrap().fee_msat, "Retry amount should match last hop in path");
                        assert_eq!(retry.as_ref().unwrap().payment_params.payee_pubkey, path.last().unwrap().pubkey, "Retry payee node_id should match last hop in path");
+                       if let Some(scid) = short_channel_id {
+                               assert!(retry.as_ref().unwrap().payment_params.previously_failed_channels.contains(&scid));
+                       }
 
                        #[cfg(test)]
                        {
@@ -1689,9 +1690,16 @@ pub fn do_claim_payment_along_route<'a, 'b, 'c>(origin_node: &Node<'a, 'b, 'c>,
                        ($node: expr, $prev_node: expr, $next_node: expr, $new_msgs: expr) => {
                                {
                                        $node.node.handle_update_fulfill_htlc(&$prev_node.node.get_our_node_id(), &next_msgs.as_ref().unwrap().0);
-                                       let fee = $node.node.channel_state.lock().unwrap()
-                                               .by_id.get(&next_msgs.as_ref().unwrap().0.channel_id).unwrap()
-                                               .config.options.forwarding_fee_base_msat;
+                                       let fee = {
+                                               let channel_state = $node.node.channel_state.lock().unwrap();
+                                               let channel = channel_state
+                                                       .by_id.get(&next_msgs.as_ref().unwrap().0.channel_id).unwrap();
+                                               if let Some(prev_config) = channel.prev_config() {
+                                                       prev_config.forwarding_fee_base_msat
+                                               } else {
+                                                       channel.config().forwarding_fee_base_msat
+                                               }
+                                       };
                                        expect_payment_forwarded!($node, $next_node, $prev_node, Some(fee as u64), false, false);
                                        expected_total_fee_msat += fee as u64;
                                        check_added_monitors!($node, 1);