Move the public path for `channel_state` types to their new path
[rust-lightning] / fuzz / src / chanmon_consistency.rs
index 81bf9f4415423b4ba074fa796f3c8a17346e28d9..1bcba1fbf378db62bde396740006bf50b9ca0e4e 100644 (file)
 //! send-side handling is correct, other peers. We consider it a failure if any action results in a
 //! channel being force-closed.
 
+use bitcoin::amount::Amount;
 use bitcoin::blockdata::constants::genesis_block;
 use bitcoin::blockdata::transaction::{Transaction, TxOut};
 use bitcoin::blockdata::script::{Builder, ScriptBuf};
 use bitcoin::blockdata::opcodes;
 use bitcoin::blockdata::locktime::absolute::LockTime;
-use bitcoin::network::constants::Network;
+use bitcoin::network::Network;
+use bitcoin::transaction::Version;
 
+use bitcoin::WPubkeyHash;
 use bitcoin::hashes::Hash as TraitImport;
 use bitcoin::hashes::sha256::Hash as Sha256;
 use bitcoin::hashes::sha256d::Hash as Sha256dHash;
-use bitcoin::hash_types::{BlockHash, WPubkeyHash};
+use bitcoin::hash_types::BlockHash;
 
 use lightning::blinded_path::BlindedPath;
+use lightning::blinded_path::message::ForwardNode;
 use lightning::blinded_path::payment::ReceiveTlvs;
 use lightning::chain;
 use lightning::chain::{BestBlock, ChannelMonitorUpdateStatus, chainmonitor, channelmonitor, Confirm, Watch};
@@ -40,8 +44,9 @@ use lightning::chain::chaininterface::{BroadcasterInterface, ConfirmationTarget,
 use lightning::sign::{KeyMaterial, InMemorySigner, Recipient, EntropySource, NodeSigner, SignerProvider};
 use lightning::events;
 use lightning::events::MessageSendEventsProvider;
-use lightning::ln::{PaymentHash, PaymentPreimage, PaymentSecret};
-use lightning::ln::channelmanager::{ChainParameters, ChannelDetails, ChannelManager, PaymentSendFailure, ChannelManagerReadArgs, PaymentId, RecipientOnionFields};
+use lightning::ln::{ChannelId, PaymentHash, PaymentPreimage, PaymentSecret};
+use lightning::ln::channel_state::ChannelDetails;
+use lightning::ln::channelmanager::{ChainParameters,ChannelManager, PaymentSendFailure, ChannelManagerReadArgs, PaymentId, RecipientOnionFields};
 use lightning::ln::channel::FEE_SPIKE_BUFFER_FEE_INCREASE_MULTIPLE;
 use lightning::ln::msgs::{self, CommitmentUpdate, ChannelMessageHandler, DecodeError, UpdateAddHTLC, Init};
 use lightning::ln::script::ShutdownScript;
@@ -51,6 +56,7 @@ use lightning::offers::invoice_request::UnsignedInvoiceRequest;
 use lightning::onion_message::messenger::{Destination, MessageRouter, OnionMessagePath};
 use lightning::util::test_channel_signer::{TestChannelSigner, EnforcementState};
 use lightning::util::errors::APIError;
+use lightning::util::hash_tables::*;
 use lightning::util::logger::Logger;
 use lightning::util::config::UserConfig;
 use lightning::util::ser::{Readable, ReadableArgs, Writeable, Writer};
@@ -66,11 +72,10 @@ use bitcoin::secp256k1::schnorr;
 
 use std::mem;
 use std::cmp::{self, Ordering};
-use hashbrown::{HashSet, hash_map, HashMap};
 use std::sync::{Arc,Mutex};
 use std::sync::atomic;
 use std::io::Cursor;
-use bitcoin::bech32::u5;
+use bech32::u5;
 
 const MAX_FEE: u32 = 10_000;
 struct FuzzEstimator {
@@ -84,7 +89,7 @@ impl FeeEstimator for FuzzEstimator {
                // Background feerate which is <= the minimum Normal feerate.
                match conf_target {
                        ConfirmationTarget::OnChainSweep => MAX_FEE,
-                       ConfirmationTarget::ChannelCloseMinimum|ConfirmationTarget::AnchorChannelFee|ConfirmationTarget::MinAllowedAnchorChannelRemoteFee|ConfirmationTarget::MinAllowedNonAnchorChannelRemoteFee => 253,
+                       ConfirmationTarget::ChannelCloseMinimum|ConfirmationTarget::AnchorChannelFee|ConfirmationTarget::MinAllowedAnchorChannelRemoteFee|ConfirmationTarget::MinAllowedNonAnchorChannelRemoteFee|ConfirmationTarget::OutputSpendingFee => 253,
                        ConfirmationTarget::NonAnchorChannelFee => cmp::min(self.ret_val.load(atomic::Ordering::Acquire), MAX_FEE),
                }
        }
@@ -103,11 +108,9 @@ impl Router for FuzzRouter {
                })
        }
 
-       fn create_blinded_payment_paths<
-               ES: EntropySource + ?Sized, T: secp256k1::Signing + secp256k1::Verification
-       >(
+       fn create_blinded_payment_paths<T: secp256k1::Signing + secp256k1::Verification>(
                &self, _recipient: PublicKey, _first_hops: Vec<ChannelDetails>, _tlvs: ReceiveTlvs,
-               _amount_msats: u64, _entropy_source: &ES, _secp_ctx: &Secp256k1<T>
+               _amount_msats: u64, _secp_ctx: &Secp256k1<T>,
        ) -> Result<Vec<(BlindedPayInfo, BlindedPath)>, ()> {
                unreachable!()
        }
@@ -120,11 +123,8 @@ impl MessageRouter for FuzzRouter {
                unreachable!()
        }
 
-       fn create_blinded_paths<
-               ES: EntropySource + ?Sized, T: secp256k1::Signing + secp256k1::Verification
-       >(
-               &self, _recipient: PublicKey, _peers: Vec<PublicKey>, _entropy_source: &ES,
-               _secp_ctx: &Secp256k1<T>
+       fn create_blinded_paths<T: secp256k1::Signing + secp256k1::Verification>(
+               &self, _recipient: PublicKey, _peers: Vec<ForwardNode>, _secp_ctx: &Secp256k1<T>,
        ) -> Result<Vec<BlindedPath>, ()> {
                unreachable!()
        }
@@ -162,7 +162,7 @@ impl TestChainMonitor {
                        logger,
                        keys,
                        persister,
-                       latest_monitors: Mutex::new(HashMap::new()),
+                       latest_monitors: Mutex::new(new_hash_map()),
                }
        }
 }
@@ -178,20 +178,17 @@ impl chain::Watch<TestChannelSigner> for TestChainMonitor {
 
        fn update_channel(&self, funding_txo: OutPoint, update: &channelmonitor::ChannelMonitorUpdate) -> chain::ChannelMonitorUpdateStatus {
                let mut map_lock = self.latest_monitors.lock().unwrap();
-               let mut map_entry = match map_lock.entry(funding_txo) {
-                       hash_map::Entry::Occupied(entry) => entry,
-                       hash_map::Entry::Vacant(_) => panic!("Didn't have monitor on update call"),
-               };
+               let map_entry = map_lock.get_mut(&funding_txo).expect("Didn't have monitor on update call");
                let deserialized_monitor = <(BlockHash, channelmonitor::ChannelMonitor<TestChannelSigner>)>::
-                       read(&mut Cursor::new(&map_entry.get().1), (&*self.keys, &*self.keys)).unwrap().1;
+                       read(&mut Cursor::new(&map_entry.1), (&*self.keys, &*self.keys)).unwrap().1;
                deserialized_monitor.update_monitor(update, &&TestBroadcaster{}, &&FuzzEstimator { ret_val: atomic::AtomicU32::new(253) }, &self.logger).unwrap();
                let mut ser = VecWriter(Vec::new());
                deserialized_monitor.write(&mut ser).unwrap();
-               map_entry.insert((update.update_id, ser.0));
+               *map_entry = (update.update_id, ser.0);
                self.chain_monitor.update_channel(funding_txo, update)
        }
 
-       fn release_pending_monitor_events(&self) -> Vec<(OutPoint, Vec<MonitorEvent>, Option<PublicKey>)> {
+       fn release_pending_monitor_events(&self) -> Vec<(OutPoint, ChannelId, Vec<MonitorEvent>, Option<PublicKey>)> {
                return self.chain_monitor.release_pending_monitor_events();
        }
 }
@@ -252,7 +249,7 @@ impl NodeSigner for KeyProvider {
        }
 
        fn sign_gossip_message(&self, msg: lightning::ln::msgs::UnsignedGossipMessage) -> Result<Signature, ()> {
-               let msg_hash = Message::from_slice(&Sha256dHash::hash(&msg.encode()[..])[..]).map_err(|_| ())?;
+               let msg_hash = Message::from_digest(Sha256dHash::hash(&msg.encode()[..]).to_byte_array());
                let secp_ctx = Secp256k1::signing_only();
                Ok(secp_ctx.sign_ecdsa(&msg_hash, &self.node_secret))
        }
@@ -472,7 +469,7 @@ pub fn do_test<Out: Output>(data: &[u8], underlying_out: Out, anchors: bool) {
                ($node_id: expr, $fee_estimator: expr) => { {
                        let logger: Arc<dyn Logger> = Arc::new(test_logger::TestLogger::new($node_id.to_string(), out.clone()));
                        let node_secret = SecretKey::from_slice(&[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, $node_id]).unwrap();
-                       let keys_manager = Arc::new(KeyProvider { node_secret, rand_bytes_id: atomic::AtomicU32::new(0), enforcement_states: Mutex::new(HashMap::new()) });
+                       let keys_manager = Arc::new(KeyProvider { node_secret, rand_bytes_id: atomic::AtomicU32::new(0), enforcement_states: Mutex::new(new_hash_map()) });
                        let monitor = Arc::new(TestChainMonitor::new(broadcast.clone(), logger.clone(), $fee_estimator.clone(),
                                Arc::new(TestPersister {
                                        update_ret: Mutex::new(ChannelMonitorUpdateStatus::Completed)
@@ -513,13 +510,13 @@ pub fn do_test<Out: Output>(data: &[u8], underlying_out: Out, anchors: bool) {
                                config.manually_accept_inbound_channels = true;
                        }
 
-                       let mut monitors = HashMap::new();
+                       let mut monitors = new_hash_map();
                        let mut old_monitors = $old_monitors.latest_monitors.lock().unwrap();
                        for (outpoint, (update_id, monitor_ser)) in old_monitors.drain() {
                                monitors.insert(outpoint, <(BlockHash, ChannelMonitor<TestChannelSigner>)>::read(&mut Cursor::new(&monitor_ser), (&*$keys_manager, &*$keys_manager)).expect("Failed to read monitor").1);
                                chain_monitor.latest_monitors.lock().unwrap().insert(outpoint, (update_id, monitor_ser));
                        }
-                       let mut monitor_refs = HashMap::new();
+                       let mut monitor_refs = new_hash_map();
                        for (outpoint, monitor) in monitors.iter_mut() {
                                monitor_refs.insert(*outpoint, monitor);
                        }
@@ -596,8 +593,8 @@ pub fn do_test<Out: Output>(data: &[u8], underlying_out: Out, anchors: bool) {
                                let events = $source.get_and_clear_pending_events();
                                assert_eq!(events.len(), 1);
                                if let events::Event::FundingGenerationReady { ref temporary_channel_id, ref channel_value_satoshis, ref output_script, .. } = events[0] {
-                                       let tx = Transaction { version: $chan_id, lock_time: LockTime::ZERO, input: Vec::new(), output: vec![TxOut {
-                                               value: *channel_value_satoshis, script_pubkey: output_script.clone(),
+                                       let tx = Transaction { version: Version($chan_id), lock_time: LockTime::ZERO, input: Vec::new(), output: vec![TxOut {
+                                               value: Amount::from_sat(*channel_value_satoshis), script_pubkey: output_script.clone(),
                                        }]};
                                        funding_output = OutPoint { txid: tx.txid(), index: 0 };
                                        $source.funding_transaction_generated(&temporary_channel_id, &$dest.get_our_node_id(), tx.clone()).unwrap();
@@ -986,7 +983,7 @@ pub fn do_test<Out: Output>(data: &[u8], underlying_out: Out, anchors: bool) {
                                // In case we get 256 payments we may have a hash collision, resulting in the
                                // second claim/fail call not finding the duplicate-hash HTLC, so we have to
                                // deduplicate the calls here.
-                               let mut claim_set = HashSet::new();
+                               let mut claim_set = new_hash_map();
                                let mut events = nodes[$node].get_and_clear_pending_events();
                                // Sort events so that PendingHTLCsForwardable get processed last. This avoids a
                                // case where we first process a PendingHTLCsForwardable, then claim/fail on a
@@ -1008,7 +1005,7 @@ pub fn do_test<Out: Output>(data: &[u8], underlying_out: Out, anchors: bool) {
                                for event in events.drain(..) {
                                        match event {
                                                events::Event::PaymentClaimable { payment_hash, .. } => {
-                                                       if claim_set.insert(payment_hash.0) {
+                                                       if claim_set.insert(payment_hash.0, ()).is_none() {
                                                                if $fail {
                                                                        nodes[$node].fail_htlc_backwards(&payment_hash);
                                                                } else {
@@ -1289,6 +1286,94 @@ pub fn do_test<Out: Output>(data: &[u8], underlying_out: Out, anchors: bool) {
                        },
                        0x89 => { fee_est_c.ret_val.store(253, atomic::Ordering::Release); nodes[2].maybe_update_chan_fees(); },
 
+                       0xf0 => {
+                               let pending_updates = monitor_a.chain_monitor.list_pending_monitor_updates().remove(&chan_1_funding).unwrap();
+                               if let Some(id) = pending_updates.get(0) {
+                                       monitor_a.chain_monitor.channel_monitor_updated(chan_1_funding, *id).unwrap();
+                               }
+                               nodes[0].process_monitor_events();
+                       }
+                       0xf1 => {
+                               let pending_updates = monitor_a.chain_monitor.list_pending_monitor_updates().remove(&chan_1_funding).unwrap();
+                               if let Some(id) = pending_updates.get(1) {
+                                       monitor_a.chain_monitor.channel_monitor_updated(chan_1_funding, *id).unwrap();
+                               }
+                               nodes[0].process_monitor_events();
+                       }
+                       0xf2 => {
+                               let pending_updates = monitor_a.chain_monitor.list_pending_monitor_updates().remove(&chan_1_funding).unwrap();
+                               if let Some(id) = pending_updates.last() {
+                                       monitor_a.chain_monitor.channel_monitor_updated(chan_1_funding, *id).unwrap();
+                               }
+                               nodes[0].process_monitor_events();
+                       }
+
+                       0xf4 => {
+                               let pending_updates = monitor_b.chain_monitor.list_pending_monitor_updates().remove(&chan_1_funding).unwrap();
+                               if let Some(id) = pending_updates.get(0) {
+                                       monitor_b.chain_monitor.channel_monitor_updated(chan_1_funding, *id).unwrap();
+                               }
+                               nodes[1].process_monitor_events();
+                       }
+                       0xf5 => {
+                               let pending_updates = monitor_b.chain_monitor.list_pending_monitor_updates().remove(&chan_1_funding).unwrap();
+                               if let Some(id) = pending_updates.get(1) {
+                                       monitor_b.chain_monitor.channel_monitor_updated(chan_1_funding, *id).unwrap();
+                               }
+                               nodes[1].process_monitor_events();
+                       }
+                       0xf6 => {
+                               let pending_updates = monitor_b.chain_monitor.list_pending_monitor_updates().remove(&chan_1_funding).unwrap();
+                               if let Some(id) = pending_updates.last() {
+                                       monitor_b.chain_monitor.channel_monitor_updated(chan_1_funding, *id).unwrap();
+                               }
+                               nodes[1].process_monitor_events();
+                       }
+
+                       0xf8 => {
+                               let pending_updates = monitor_b.chain_monitor.list_pending_monitor_updates().remove(&chan_2_funding).unwrap();
+                               if let Some(id) = pending_updates.get(0) {
+                                       monitor_b.chain_monitor.channel_monitor_updated(chan_2_funding, *id).unwrap();
+                               }
+                               nodes[1].process_monitor_events();
+                       }
+                       0xf9 => {
+                               let pending_updates = monitor_b.chain_monitor.list_pending_monitor_updates().remove(&chan_2_funding).unwrap();
+                               if let Some(id) = pending_updates.get(1) {
+                                       monitor_b.chain_monitor.channel_monitor_updated(chan_2_funding, *id).unwrap();
+                               }
+                               nodes[1].process_monitor_events();
+                       }
+                       0xfa => {
+                               let pending_updates = monitor_b.chain_monitor.list_pending_monitor_updates().remove(&chan_2_funding).unwrap();
+                               if let Some(id) = pending_updates.last() {
+                                       monitor_b.chain_monitor.channel_monitor_updated(chan_2_funding, *id).unwrap();
+                               }
+                               nodes[1].process_monitor_events();
+                       }
+
+                       0xfc => {
+                               let pending_updates = monitor_c.chain_monitor.list_pending_monitor_updates().remove(&chan_2_funding).unwrap();
+                               if let Some(id) = pending_updates.get(0) {
+                                       monitor_c.chain_monitor.channel_monitor_updated(chan_2_funding, *id).unwrap();
+                               }
+                               nodes[2].process_monitor_events();
+                       }
+                       0xfd => {
+                               let pending_updates = monitor_c.chain_monitor.list_pending_monitor_updates().remove(&chan_2_funding).unwrap();
+                               if let Some(id) = pending_updates.get(1) {
+                                       monitor_c.chain_monitor.channel_monitor_updated(chan_2_funding, *id).unwrap();
+                               }
+                               nodes[2].process_monitor_events();
+                       }
+                       0xfe => {
+                               let pending_updates = monitor_c.chain_monitor.list_pending_monitor_updates().remove(&chan_2_funding).unwrap();
+                               if let Some(id) = pending_updates.last() {
+                                       monitor_c.chain_monitor.channel_monitor_updated(chan_2_funding, *id).unwrap();
+                               }
+                               nodes[2].process_monitor_events();
+                       }
+
                        0xff => {
                                // Test that no channel is in a stuck state where neither party can send funds even
                                // after we resolve all pending events.