X-Git-Url: http://git.bitcoin.ninja/index.cgi?a=blobdiff_plain;f=fuzz%2Fsrc%2Ffull_stack.rs;h=e3c8290b16ea820e4a26bf585a5684384f1f46c3;hb=ce7b0b4ca2fbcc4a9177189a09d031ee2caa8867;hp=19ec541b942592a00ba93fe1687eba2e95559952;hpb=4b77ce195944a87fa2f14735ccbd9f3f9ecac4d9;p=rust-lightning diff --git a/fuzz/src/full_stack.rs b/fuzz/src/full_stack.rs index 19ec541b..e3c8290b 100644 --- a/fuzz/src/full_stack.rs +++ b/fuzz/src/full_stack.rs @@ -50,8 +50,8 @@ use lightning::util::ser::ReadableArgs; use utils::test_logger; use utils::test_persister::TestPersister; -use bitcoin::secp256k1::key::{PublicKey,SecretKey}; -use bitcoin::secp256k1::recovery::RecoverableSignature; +use bitcoin::secp256k1::{PublicKey,SecretKey}; +use bitcoin::secp256k1::ecdsa::RecoverableSignature; use bitcoin::secp256k1::Secp256k1; use std::cell::RefCell; @@ -390,6 +390,9 @@ pub fn do_test(data: &[u8], logger: &Arc) { best_block: BestBlock::from_genesis(network), }; let channelmanager = Arc::new(ChannelManager::new(fee_est.clone(), monitor.clone(), broadcast.clone(), Arc::clone(&logger), keys_manager.clone(), config, params)); + // Adding new calls to `KeysInterface::get_secure_random_bytes` during startup can change all the + // keys subsequently generated in this test. Rather than regenerating all the messages manually, + // it's easier to just increment the counter here so the keys don't change. keys_manager.counter.fetch_sub(1, Ordering::AcqRel); let our_id = PublicKey::from_secret_key(&Secp256k1::signing_only(), &keys_manager.get_node_secret(Recipient::Node).unwrap()); let network_graph = Arc::new(NetworkGraph::new(genesis_block(network).block_hash())); @@ -405,7 +408,7 @@ pub fn do_test(data: &[u8], logger: &Arc) { let mut should_forward = false; let mut payments_received: Vec = Vec::new(); let mut payments_sent = 0; - let mut pending_funding_generation: Vec<([u8; 32], u64, Script)> = Vec::new(); + let mut pending_funding_generation: Vec<([u8; 32], PublicKey, u64, Script)> = Vec::new(); let mut pending_funding_signatures = HashMap::new(); loop { @@ -419,7 +422,7 @@ pub fn do_test(data: &[u8], logger: &Arc) { } } if new_id == 0 { return; } - loss_detector.handler.new_outbound_connection(get_pubkey!(), Peer{id: (new_id - 1) as u8, peers_connected: &peers}).unwrap(); + loss_detector.handler.new_outbound_connection(get_pubkey!(), Peer{id: (new_id - 1) as u8, peers_connected: &peers}, None).unwrap(); peers.borrow_mut()[new_id - 1] = true; }, 1 => { @@ -431,7 +434,7 @@ pub fn do_test(data: &[u8], logger: &Arc) { } } if new_id == 0 { return; } - loss_detector.handler.new_inbound_connection(Peer{id: (new_id - 1) as u8, peers_connected: &peers}).unwrap(); + loss_detector.handler.new_inbound_connection(Peer{id: (new_id - 1) as u8, peers_connected: &peers}, None).unwrap(); peers.borrow_mut()[new_id - 1] = true; }, 2 => { @@ -456,7 +459,8 @@ pub fn do_test(data: &[u8], logger: &Arc) { final_value_msat, final_cltv_expiry_delta: 42, }; - let route = match find_route(&our_id, ¶ms, &network_graph, None, Arc::clone(&logger), &scorer) { + let random_seed_bytes: [u8; 32] = keys_manager.get_secure_random_bytes(); + let route = match find_route(&our_id, ¶ms, &network_graph, None, Arc::clone(&logger), &scorer, &random_seed_bytes) { Ok(route) => route, Err(_) => return, }; @@ -479,7 +483,8 @@ pub fn do_test(data: &[u8], logger: &Arc) { final_value_msat, final_cltv_expiry_delta: 42, }; - let mut route = match find_route(&our_id, ¶ms, &network_graph, None, Arc::clone(&logger), &scorer) { + let random_seed_bytes: [u8; 32] = keys_manager.get_secure_random_bytes(); + let mut route = match find_route(&our_id, ¶ms, &network_graph, None, Arc::clone(&logger), &scorer, &random_seed_bytes) { Ok(route) => route, Err(_) => return, }; @@ -511,7 +516,7 @@ pub fn do_test(data: &[u8], logger: &Arc) { let channel_id = get_slice!(1)[0] as usize; if channel_id >= channels.len() { return; } channels.sort_by(|a, b| { a.channel_id.cmp(&b.channel_id) }); - if channelmanager.close_channel(&channels[channel_id].channel_id).is_err() { return; } + if channelmanager.close_channel(&channels[channel_id].channel_id, &channels[channel_id].counterparty.node_id).is_err() { return; } }, 7 => { if should_forward { @@ -551,7 +556,7 @@ pub fn do_test(data: &[u8], logger: &Arc) { 10 => { 'outer_loop: for funding_generation in pending_funding_generation.drain(..) { let mut tx = Transaction { version: 0, lock_time: 0, input: Vec::new(), output: vec![TxOut { - value: funding_generation.1, script_pubkey: funding_generation.2, + value: funding_generation.2, script_pubkey: funding_generation.3, }] }; let funding_output = 'search_loop: loop { let funding_txid = tx.txid(); @@ -570,7 +575,7 @@ pub fn do_test(data: &[u8], logger: &Arc) { continue 'outer_loop; } }; - if let Err(e) = channelmanager.funding_transaction_generated(&funding_generation.0, tx.clone()) { + if let Err(e) = channelmanager.funding_transaction_generated(&funding_generation.0, &funding_generation.1, tx.clone()) { // It's possible the channel has been closed in the mean time, but any other // failure may be a bug. if let APIError::ChannelUnavailable { err } = e { @@ -619,7 +624,7 @@ pub fn do_test(data: &[u8], logger: &Arc) { let channel_id = get_slice!(1)[0] as usize; if channel_id >= channels.len() { return; } channels.sort_by(|a, b| { a.channel_id.cmp(&b.channel_id) }); - channelmanager.force_close_channel(&channels[channel_id].channel_id).unwrap(); + channelmanager.force_close_channel(&channels[channel_id].channel_id, &channels[channel_id].counterparty.node_id).unwrap(); }, // 15 is above _ => return, @@ -627,8 +632,8 @@ pub fn do_test(data: &[u8], logger: &Arc) { loss_detector.handler.process_events(); for event in loss_detector.manager.get_and_clear_pending_events() { match event { - Event::FundingGenerationReady { temporary_channel_id, channel_value_satoshis, output_script, .. } => { - pending_funding_generation.push((temporary_channel_id, channel_value_satoshis, output_script)); + Event::FundingGenerationReady { temporary_channel_id, counterparty_node_id, channel_value_satoshis, output_script, .. } => { + pending_funding_generation.push((temporary_channel_id, counterparty_node_id, channel_value_satoshis, output_script)); }, Event::PaymentReceived { payment_hash, .. } => { //TODO: enhance by fetching random amounts from fuzz input?