X-Git-Url: http://git.bitcoin.ninja/index.cgi?a=blobdiff_plain;f=fuzz%2Fsrc%2Fchanmon_consistency.rs;h=f8c6c08a2baca464273858365e770e35145655e4;hb=64c58a565b9a4165d795ae7db608e1dc149829ca;hp=0caee0801aa1d747ec6f564a2cc86f9658172f78;hpb=ca5b10884ef0bb754cc5f1c1c346eabad82e5296;p=rust-lightning diff --git a/fuzz/src/chanmon_consistency.rs b/fuzz/src/chanmon_consistency.rs index 0caee080..f8c6c08a 100644 --- a/fuzz/src/chanmon_consistency.rs +++ b/fuzz/src/chanmon_consistency.rs @@ -18,8 +18,6 @@ //! send-side handling is correct, other peers. We consider it a failure if any action results in a //! channel being force-closed. -use bitcoin::TxMerkleNode; -use bitcoin::blockdata::block::BlockHeader; use bitcoin::blockdata::constants::genesis_block; use bitcoin::blockdata::transaction::{Transaction, TxOut}; use bitcoin::blockdata::script::{Builder, Script}; @@ -37,20 +35,21 @@ use lightning::chain::{BestBlock, ChannelMonitorUpdateStatus, chainmonitor, chan use lightning::chain::channelmonitor::{ChannelMonitor, MonitorEvent}; use lightning::chain::transaction::OutPoint; use lightning::chain::chaininterface::{BroadcasterInterface, ConfirmationTarget, FeeEstimator}; -use lightning::chain::keysinterface::{KeyMaterial, InMemorySigner, Recipient, EntropySource, NodeSigner, SignerProvider}; +use lightning::sign::{KeyMaterial, InMemorySigner, Recipient, EntropySource, NodeSigner, SignerProvider}; +use lightning::events; +use lightning::events::MessageSendEventsProvider; use lightning::ln::{PaymentHash, PaymentPreimage, PaymentSecret}; -use lightning::ln::channelmanager::{ChainParameters, ChannelDetails, ChannelManager, PaymentSendFailure, ChannelManagerReadArgs, PaymentId}; +use lightning::ln::channelmanager::{ChainParameters, ChannelDetails, ChannelManager, PaymentSendFailure, ChannelManagerReadArgs, PaymentId, RecipientOnionFields}; use lightning::ln::channel::FEE_SPIKE_BUFFER_FEE_INCREASE_MULTIPLE; use lightning::ln::msgs::{self, CommitmentUpdate, ChannelMessageHandler, DecodeError, UpdateAddHTLC, Init}; use lightning::ln::script::ShutdownScript; +use lightning::ln::functional_test_utils::*; use lightning::util::enforcing_trait_impls::{EnforcingSigner, EnforcementState}; use lightning::util::errors::APIError; -use lightning::util::events; use lightning::util::logger::Logger; use lightning::util::config::UserConfig; -use lightning::util::events::MessageSendEventsProvider; use lightning::util::ser::{Readable, ReadableArgs, Writeable, Writer}; -use lightning::routing::router::{InFlightHtlcs, Route, RouteHop, RouteParameters, Router}; +use lightning::routing::router::{InFlightHtlcs, Path, Route, RouteHop, RouteParameters, Router}; use crate::utils::test_logger::{self, Output}; use crate::utils::test_persister::TestPersister; @@ -97,15 +96,11 @@ impl Router for FuzzRouter { action: msgs::ErrorAction::IgnoreError }) } - fn notify_payment_path_failed(&self, _path: &[&RouteHop], _short_channel_id: u64) {} - fn notify_payment_path_successful(&self, _path: &[&RouteHop]) {} - fn notify_payment_probe_successful(&self, _path: &[&RouteHop]) {} - fn notify_payment_probe_failed(&self, _path: &[&RouteHop], _short_channel_id: u64) {} } pub struct TestBroadcaster {} impl BroadcasterInterface for TestBroadcaster { - fn broadcast_transaction(&self, _tx: &Transaction) { } + fn broadcast_transactions(&self, _txs: &[&Transaction]) { } } pub struct VecWriter(pub Vec); @@ -244,6 +239,7 @@ impl SignerProvider for KeyProvider { [id, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 9, self.node_secret[31]], channel_value_satoshis, channel_keys_id, + channel_keys_id, ); let revoked_commitment = self.make_enforcement_state_cell(keys.commitment_seed); EnforcingSigner::new_with_revoked(keys, revoked_commitment, false) @@ -252,7 +248,7 @@ impl SignerProvider for KeyProvider { fn read_chan_signer(&self, buffer: &[u8]) -> Result { let mut reader = std::io::Cursor::new(buffer); - let inner: InMemorySigner = Readable::read(&mut reader)?; + let inner: InMemorySigner = ReadableArgs::read(&mut reader, self)?; let state = self.make_enforcement_state_cell(inner.commitment_seed); Ok(EnforcingSigner { @@ -262,18 +258,18 @@ impl SignerProvider for KeyProvider { }) } - fn get_destination_script(&self) -> Script { + fn get_destination_script(&self) -> Result { let secp_ctx = Secp256k1::signing_only(); let channel_monitor_claim_key = SecretKey::from_slice(&[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, self.node_secret[31]]).unwrap(); let our_channel_monitor_claim_key_hash = WPubkeyHash::hash(&PublicKey::from_secret_key(&secp_ctx, &channel_monitor_claim_key).serialize()); - Builder::new().push_opcode(opcodes::all::OP_PUSHBYTES_0).push_slice(&our_channel_monitor_claim_key_hash[..]).into_script() + Ok(Builder::new().push_opcode(opcodes::all::OP_PUSHBYTES_0).push_slice(&our_channel_monitor_claim_key_hash[..]).into_script()) } - fn get_shutdown_scriptpubkey(&self) -> ShutdownScript { + fn get_shutdown_scriptpubkey(&self) -> Result { let secp_ctx = Secp256k1::signing_only(); let secret_key = SecretKey::from_slice(&[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, self.node_secret[31]]).unwrap(); let pubkey_hash = WPubkeyHash::hash(&PublicKey::from_secret_key(&secp_ctx, &secret_key).serialize()); - ShutdownScript::new_p2wpkh(&pubkey_hash) + Ok(ShutdownScript::new_p2wpkh(&pubkey_hash)) } } @@ -299,7 +295,7 @@ fn check_api_err(api_err: APIError) { // all others. If you hit this panic, the list of acceptable errors // is probably just stale and you should add new messages here. match err.as_str() { - "Peer for first hop currently disconnected/pending monitor update!" => {}, + "Peer for first hop currently disconnected" => {}, _ if err.starts_with("Cannot push more than their max accepted HTLCs ") => {}, _ if err.starts_with("Cannot send value that would put us over the max HTLC value in flight our peer will accept ") => {}, _ if err.starts_with("Cannot send value that would put our balance under counterparty-announced channel reserve value") => {}, @@ -355,17 +351,17 @@ fn send_payment(source: &ChanMan, dest: &ChanMan, dest_chan_id: u64, amt: u64, p let mut payment_id = [0; 32]; payment_id[0..8].copy_from_slice(&payment_idx.to_ne_bytes()); *payment_idx += 1; - if let Err(err) = source.send_payment(&Route { - paths: vec![vec![RouteHop { + if let Err(err) = source.send_payment_with_route(&Route { + paths: vec![Path { hops: vec![RouteHop { pubkey: dest.get_our_node_id(), node_features: dest.node_features(), short_channel_id: dest_chan_id, channel_features: dest.channel_features(), fee_msat: amt, cltv_expiry_delta: 200, - }]], + }], blinded_tail: None }], payment_params: None, - }, payment_hash, &Some(payment_secret), PaymentId(payment_id)) { + }, payment_hash, RecipientOnionFields::secret_only(payment_secret), PaymentId(payment_id)) { check_payment_err(err); false } else { true } @@ -377,8 +373,8 @@ fn send_hop_payment(source: &ChanMan, middle: &ChanMan, middle_chan_id: u64, des let mut payment_id = [0; 32]; payment_id[0..8].copy_from_slice(&payment_idx.to_ne_bytes()); *payment_idx += 1; - if let Err(err) = source.send_payment(&Route { - paths: vec![vec![RouteHop { + if let Err(err) = source.send_payment_with_route(&Route { + paths: vec![Path { hops: vec![RouteHop { pubkey: middle.get_our_node_id(), node_features: middle.node_features(), short_channel_id: middle_chan_id, @@ -392,9 +388,9 @@ fn send_hop_payment(source: &ChanMan, middle: &ChanMan, middle_chan_id: u64, des channel_features: dest.channel_features(), fee_msat: amt, cltv_expiry_delta: 200, - }]], + }], blinded_tail: None }], payment_params: None, - }, payment_hash, &Some(payment_secret), PaymentId(payment_id)) { + }, payment_hash, RecipientOnionFields::secret_only(payment_secret), PaymentId(payment_id)) { check_payment_err(err); false } else { true } @@ -422,7 +418,7 @@ pub fn do_test(data: &[u8], underlying_out: Out) { let network = Network::Bitcoin; let params = ChainParameters { network, - best_block: BestBlock::from_genesis(network), + best_block: BestBlock::from_network(network), }; (ChannelManager::new($fee_estimator.clone(), monitor.clone(), broadcast.clone(), &router, Arc::clone(&logger), keys_manager.clone(), keys_manager.clone(), keys_manager.clone(), config, params), monitor, keys_manager) @@ -478,8 +474,8 @@ pub fn do_test(data: &[u8], underlying_out: Out) { let mut channel_txn = Vec::new(); macro_rules! make_channel { ($source: expr, $dest: expr, $chan_id: expr) => { { - $source.peer_connected(&$dest.get_our_node_id(), &Init { features: $dest.init_features(), remote_network_address: None }).unwrap(); - $dest.peer_connected(&$source.get_our_node_id(), &Init { features: $source.init_features(), remote_network_address: None }).unwrap(); + $source.peer_connected(&$dest.get_our_node_id(), &Init { features: $dest.init_features(), remote_network_address: None }, true).unwrap(); + $dest.peer_connected(&$source.get_our_node_id(), &Init { features: $source.init_features(), remote_network_address: None }, false).unwrap(); $source.create_channel($dest.get_our_node_id(), 100_000, 42, 0, None).unwrap(); let open_channel = { @@ -530,7 +526,18 @@ pub fn do_test(data: &[u8], underlying_out: Out) { msg.clone() } else { panic!("Wrong event type"); } }; + let events = $dest.get_and_clear_pending_events(); + assert_eq!(events.len(), 1); + if let events::Event::ChannelPending{ ref counterparty_node_id, .. } = events[0] { + assert_eq!(counterparty_node_id, &$source.get_our_node_id()); + } else { panic!("Wrong event type"); } + $source.handle_funding_signed(&$dest.get_our_node_id(), &funding_signed); + let events = $source.get_and_clear_pending_events(); + assert_eq!(events.len(), 1); + if let events::Event::ChannelPending{ ref counterparty_node_id, .. } = events[0] { + assert_eq!(counterparty_node_id, &$dest.get_our_node_id()); + } else { panic!("Wrong event type"); } funding_output } } @@ -539,11 +546,11 @@ pub fn do_test(data: &[u8], underlying_out: Out) { macro_rules! confirm_txn { ($node: expr) => { { let chain_hash = genesis_block(Network::Bitcoin).block_hash(); - let mut header = BlockHeader { version: 0x20000000, prev_blockhash: chain_hash, merkle_root: TxMerkleNode::all_zeros(), time: 42, bits: 42, nonce: 42 }; + let mut header = create_dummy_header(chain_hash, 42); let txdata: Vec<_> = channel_txn.iter().enumerate().map(|(i, tx)| (i + 1, tx)).collect(); $node.transactions_confirmed(&header, &txdata, 1); for _ in 2..100 { - header = BlockHeader { version: 0x20000000, prev_blockhash: header.block_hash(), merkle_root: TxMerkleNode::all_zeros(), time: 42, bits: 42, nonce: 42 }; + header = create_dummy_header(header.block_hash(), 42); } $node.best_block_updated(&header, 99); } } @@ -918,6 +925,7 @@ pub fn do_test(data: &[u8], underlying_out: Out) { events::Event::PaymentClaimed { .. } => {}, events::Event::PaymentPathSuccessful { .. } => {}, events::Event::PaymentPathFailed { .. } => {}, + events::Event::PaymentFailed { .. } => {}, events::Event::ProbeSuccessful { .. } | events::Event::ProbeFailed { .. } => { // Even though we don't explicitly send probes, because probes are // detected based on hashing the payment hash+preimage, its rather @@ -982,31 +990,31 @@ pub fn do_test(data: &[u8], underlying_out: Out) { 0x0c => { if !chan_a_disconnected { - nodes[0].peer_disconnected(&nodes[1].get_our_node_id(), false); - nodes[1].peer_disconnected(&nodes[0].get_our_node_id(), false); + nodes[0].peer_disconnected(&nodes[1].get_our_node_id()); + nodes[1].peer_disconnected(&nodes[0].get_our_node_id()); chan_a_disconnected = true; drain_msg_events_on_disconnect!(0); } }, 0x0d => { if !chan_b_disconnected { - nodes[1].peer_disconnected(&nodes[2].get_our_node_id(), false); - nodes[2].peer_disconnected(&nodes[1].get_our_node_id(), false); + nodes[1].peer_disconnected(&nodes[2].get_our_node_id()); + nodes[2].peer_disconnected(&nodes[1].get_our_node_id()); chan_b_disconnected = true; drain_msg_events_on_disconnect!(2); } }, 0x0e => { if chan_a_disconnected { - nodes[0].peer_connected(&nodes[1].get_our_node_id(), &Init { features: nodes[1].init_features(), remote_network_address: None }).unwrap(); - nodes[1].peer_connected(&nodes[0].get_our_node_id(), &Init { features: nodes[0].init_features(), remote_network_address: None }).unwrap(); + nodes[0].peer_connected(&nodes[1].get_our_node_id(), &Init { features: nodes[1].init_features(), remote_network_address: None }, true).unwrap(); + nodes[1].peer_connected(&nodes[0].get_our_node_id(), &Init { features: nodes[0].init_features(), remote_network_address: None }, false).unwrap(); chan_a_disconnected = false; } }, 0x0f => { if chan_b_disconnected { - nodes[1].peer_connected(&nodes[2].get_our_node_id(), &Init { features: nodes[2].init_features(), remote_network_address: None }).unwrap(); - nodes[2].peer_connected(&nodes[1].get_our_node_id(), &Init { features: nodes[1].init_features(), remote_network_address: None }).unwrap(); + nodes[1].peer_connected(&nodes[2].get_our_node_id(), &Init { features: nodes[2].init_features(), remote_network_address: None }, true).unwrap(); + nodes[2].peer_connected(&nodes[1].get_our_node_id(), &Init { features: nodes[1].init_features(), remote_network_address: None }, false).unwrap(); chan_b_disconnected = false; } }, @@ -1043,7 +1051,7 @@ pub fn do_test(data: &[u8], underlying_out: Out) { 0x2c => { if !chan_a_disconnected { - nodes[1].peer_disconnected(&nodes[0].get_our_node_id(), false); + nodes[1].peer_disconnected(&nodes[0].get_our_node_id()); chan_a_disconnected = true; drain_msg_events_on_disconnect!(0); } @@ -1057,14 +1065,14 @@ pub fn do_test(data: &[u8], underlying_out: Out) { }, 0x2d => { if !chan_a_disconnected { - nodes[0].peer_disconnected(&nodes[1].get_our_node_id(), false); + nodes[0].peer_disconnected(&nodes[1].get_our_node_id()); chan_a_disconnected = true; nodes[0].get_and_clear_pending_msg_events(); ab_events.clear(); ba_events.clear(); } if !chan_b_disconnected { - nodes[2].peer_disconnected(&nodes[1].get_our_node_id(), false); + nodes[2].peer_disconnected(&nodes[1].get_our_node_id()); chan_b_disconnected = true; nodes[2].get_and_clear_pending_msg_events(); bc_events.clear(); @@ -1076,7 +1084,7 @@ pub fn do_test(data: &[u8], underlying_out: Out) { }, 0x2e => { if !chan_b_disconnected { - nodes[1].peer_disconnected(&nodes[2].get_our_node_id(), false); + nodes[1].peer_disconnected(&nodes[2].get_our_node_id()); chan_b_disconnected = true; drain_msg_events_on_disconnect!(2); } @@ -1201,13 +1209,13 @@ pub fn do_test(data: &[u8], underlying_out: Out) { // Next, make sure peers are all connected to each other if chan_a_disconnected { - nodes[0].peer_connected(&nodes[1].get_our_node_id(), &Init { features: nodes[1].init_features(), remote_network_address: None }).unwrap(); - nodes[1].peer_connected(&nodes[0].get_our_node_id(), &Init { features: nodes[0].init_features(), remote_network_address: None }).unwrap(); + nodes[0].peer_connected(&nodes[1].get_our_node_id(), &Init { features: nodes[1].init_features(), remote_network_address: None }, true).unwrap(); + nodes[1].peer_connected(&nodes[0].get_our_node_id(), &Init { features: nodes[0].init_features(), remote_network_address: None }, false).unwrap(); chan_a_disconnected = false; } if chan_b_disconnected { - nodes[1].peer_connected(&nodes[2].get_our_node_id(), &Init { features: nodes[2].init_features(), remote_network_address: None }).unwrap(); - nodes[2].peer_connected(&nodes[1].get_our_node_id(), &Init { features: nodes[1].init_features(), remote_network_address: None }).unwrap(); + nodes[1].peer_connected(&nodes[2].get_our_node_id(), &Init { features: nodes[2].init_features(), remote_network_address: None }, true).unwrap(); + nodes[2].peer_connected(&nodes[1].get_our_node_id(), &Init { features: nodes[1].init_features(), remote_network_address: None }, false).unwrap(); chan_b_disconnected = false; }