X-Git-Url: http://git.bitcoin.ninja/index.cgi?a=blobdiff_plain;f=lightning%2Fsrc%2Futil%2Ftest_utils.rs;h=387f9df677e969def3eca8bde64186213854a3be;hb=cc4bc1df5a1ab6c363fac3c1b7eddce362e167c0;hp=d53cd39b119fac08d8ebb151204d9c4e53a2a4a3;hpb=ce7463486ee1ae61e9af439c3d34d00244248ee9;p=rust-lightning diff --git a/lightning/src/util/test_utils.rs b/lightning/src/util/test_utils.rs index d53cd39b..387f9df6 100644 --- a/lightning/src/util/test_utils.rs +++ b/lightning/src/util/test_utils.rs @@ -13,10 +13,11 @@ use crate::chain::chaininterface; use crate::chain::chaininterface::ConfirmationTarget; use crate::chain::chaininterface::FEERATE_FLOOR_SATS_PER_KW; use crate::chain::chainmonitor; -use crate::chain::chainmonitor::MonitorUpdateId; +use crate::chain::chainmonitor::{MonitorUpdateId, UpdateOrigin}; use crate::chain::channelmonitor; use crate::chain::channelmonitor::MonitorEvent; use crate::chain::transaction::OutPoint; +use crate::routing::router::CandidateRouteHop; use crate::sign; use crate::events; use crate::events::bump_transaction::{WalletSource, Utxo}; @@ -40,16 +41,15 @@ use crate::util::logger::{Logger, Level, Record}; use crate::util::ser::{Readable, ReadableArgs, Writer, Writeable}; use crate::util::persist::KVStore; -use bitcoin::EcdsaSighashType; use bitcoin::blockdata::constants::ChainHash; use bitcoin::blockdata::constants::genesis_block; use bitcoin::blockdata::transaction::{Transaction, TxOut}; -use bitcoin::blockdata::script::{Builder, Script}; +use bitcoin::blockdata::script::{Builder, Script, ScriptBuf}; use bitcoin::blockdata::opcodes; use bitcoin::blockdata::block::Block; use bitcoin::network::constants::Network; use bitcoin::hash_types::{BlockHash, Txid}; -use bitcoin::util::sighash::SighashCache; +use bitcoin::sighash::{SighashCache, EcdsaSighashType}; use bitcoin::secp256k1::{PublicKey, Scalar, Secp256k1, SecretKey}; use bitcoin::secp256k1::ecdh::SharedSecret; @@ -124,6 +124,7 @@ impl<'a> Router for TestRouter<'a> { if let Some((find_route_query, find_route_res)) = self.next_routes.lock().unwrap().pop_front() { assert_eq!(find_route_query, *params); if let Ok(ref route) = find_route_res { + assert_eq!(route.route_params, Some(find_route_query)); let scorer = self.scorer.read().unwrap(); let scorer = ScorerAccountingForInFlightHtlcs::new(scorer, &inflight_htlcs); for path in &route.paths { @@ -139,10 +140,35 @@ impl<'a> Router for TestRouter<'a> { // Since the path is reversed, the last element in our iteration is the first // hop. if idx == path.hops.len() - 1 { - scorer.channel_penalty_msat(hop.short_channel_id, &NodeId::from_pubkey(payer), &NodeId::from_pubkey(&hop.pubkey), usage, &()); + let first_hops = match first_hops { + Some(hops) => hops, + None => continue, + }; + if first_hops.len() == 0 { + continue; + } + let idx = if first_hops.len() > 1 { route.paths.iter().position(|p| p == path).unwrap_or(0) } else { 0 }; + let node_id = NodeId::from_pubkey(payer); + let candidate = CandidateRouteHop::FirstHop { + details: first_hops[idx], + payer_node_id: &node_id, + }; + scorer.channel_penalty_msat(&candidate, usage, &()); } else { - let curr_hop_path_idx = path.hops.len() - 1 - idx; - scorer.channel_penalty_msat(hop.short_channel_id, &NodeId::from_pubkey(&path.hops[curr_hop_path_idx - 1].pubkey), &NodeId::from_pubkey(&hop.pubkey), usage, &()); + let network_graph = self.network_graph.read_only(); + let channel = match network_graph.channel(hop.short_channel_id) { + Some(channel) => channel, + None => continue, + }; + let channel = match channel.as_directed_to(&NodeId::from_pubkey(&hop.pubkey)) { + Some(channel) => channel, + None => panic!("Channel directed to {} was not found", hop.pubkey), + }; + let candidate = CandidateRouteHop::PublicHop { + info: channel.0, + short_channel_id: hop.short_channel_id, + }; + scorer.channel_penalty_msat(&candidate, usage, &()); } } } @@ -152,7 +178,7 @@ impl<'a> Router for TestRouter<'a> { let logger = TestLogger::new(); find_route( payer, params, &self.network_graph, first_hops, &logger, - &ScorerAccountingForInFlightHtlcs::new(self.scorer.read().unwrap(), &inflight_htlcs), &(), + &ScorerAccountingForInFlightHtlcs::new(self.scorer.read().unwrap(), &inflight_htlcs), &Default::default(), &[42; 32] ) } @@ -175,13 +201,15 @@ impl EntropySource for OnlyReadsKeysInterface { fn get_secure_random_bytes(&self) -> [u8; 32] { [0; 32] }} impl SignerProvider for OnlyReadsKeysInterface { - type Signer = TestChannelSigner; + type EcdsaSigner = TestChannelSigner; + #[cfg(taproot)] + type TaprootSigner = TestChannelSigner; fn generate_channel_keys_id(&self, _inbound: bool, _channel_value_satoshis: u64, _user_channel_id: u128) -> [u8; 32] { unreachable!(); } - fn derive_channel_signer(&self, _channel_value_satoshis: u64, _channel_keys_id: [u8; 32]) -> Self::Signer { unreachable!(); } + fn derive_channel_signer(&self, _channel_value_satoshis: u64, _channel_keys_id: [u8; 32]) -> Self::EcdsaSigner { unreachable!(); } - fn read_chan_signer(&self, mut reader: &[u8]) -> Result { + fn read_chan_signer(&self, mut reader: &[u8]) -> Result { let inner: InMemorySigner = ReadableArgs::read(&mut reader, self)?; let state = Arc::new(Mutex::new(EnforcementState::new())); @@ -192,7 +220,7 @@ impl SignerProvider for OnlyReadsKeysInterface { )) } - fn get_destination_script(&self) -> Result { Err(()) } + fn get_destination_script(&self, _channel_keys_id: [u8; 32]) -> Result { Err(()) } fn get_shutdown_scriptpubkey(&self) -> Result { Err(()) } } @@ -206,6 +234,9 @@ pub struct TestChainMonitor<'a> { /// ChannelForceClosed event for the given channel_id with should_broadcast set to the given /// boolean. pub expect_channel_force_closed: Mutex>, + /// If this is set to Some(), the next round trip serialization check will not hold after an + /// update_channel call (not watch_channel) for the given channel_id. + pub expect_monitor_round_trip_fail: Mutex>, } impl<'a> TestChainMonitor<'a> { pub fn new(chain_source: Option<&'a TestChainSource>, broadcaster: &'a chaininterface::BroadcasterInterface, logger: &'a TestLogger, fee_estimator: &'a TestFeeEstimator, persister: &'a chainmonitor::Persist, keys_manager: &'a TestKeysInterface) -> Self { @@ -216,6 +247,7 @@ impl<'a> TestChainMonitor<'a> { chain_monitor: chainmonitor::ChainMonitor::new(chain_source, broadcaster, logger, fee_estimator, persister), keys_manager, expect_channel_force_closed: Mutex::new(None), + expect_monitor_round_trip_fail: Mutex::new(None), } } @@ -266,7 +298,12 @@ impl<'a> chain::Watch for TestChainMonitor<'a> { monitor.write(&mut w).unwrap(); let new_monitor = <(BlockHash, channelmonitor::ChannelMonitor)>::read( &mut io::Cursor::new(&w.0), (self.keys_manager, self.keys_manager)).unwrap().1; - assert!(new_monitor == *monitor); + if let Some(chan_id) = self.expect_monitor_round_trip_fail.lock().unwrap().take() { + assert_eq!(chan_id, funding_txo.to_channel_id()); + assert!(new_monitor != *monitor); + } else { + assert!(new_monitor == *monitor); + } self.added_monitors.lock().unwrap().push((funding_txo, new_monitor)); update_res } @@ -292,12 +329,12 @@ pub(crate) struct WatchtowerPersister { /// After receiving a revoke_and_ack for a commitment number, we'll form and store the justice /// tx which would be used to provide a watchtower with the data it needs. watchtower_state: Mutex>>, - destination_script: Script, + destination_script: ScriptBuf, } impl WatchtowerPersister { #[cfg(test)] - pub(crate) fn new(destination_script: Script) -> Self { + pub(crate) fn new(destination_script: ScriptBuf) -> Self { WatchtowerPersister { persister: TestPersister::new(), unsigned_justice_tx_data: Mutex::new(HashMap::new()), @@ -325,7 +362,7 @@ impl WatchtowerPersister { } } -impl chainmonitor::Persist for WatchtowerPersister { +impl chainmonitor::Persist for WatchtowerPersister { fn persist_new_channel(&self, funding_txo: OutPoint, data: &channelmonitor::ChannelMonitor, id: MonitorUpdateId ) -> chain::ChannelMonitorUpdateStatus { @@ -405,7 +442,7 @@ impl TestPersister { self.update_rets.lock().unwrap().push_back(next_ret); } } -impl chainmonitor::Persist for TestPersister { +impl chainmonitor::Persist for TestPersister { fn persist_new_channel(&self, _funding_txo: OutPoint, _data: &channelmonitor::ChannelMonitor, _id: MonitorUpdateId) -> chain::ChannelMonitorUpdateStatus { if let Some(update_ret) = self.update_rets.lock().unwrap().pop_front() { return update_ret @@ -413,12 +450,13 @@ impl chainmonitor::Persist fo chain::ChannelMonitorUpdateStatus::Completed } - fn update_persisted_channel(&self, funding_txo: OutPoint, update: Option<&channelmonitor::ChannelMonitorUpdate>, _data: &channelmonitor::ChannelMonitor, update_id: MonitorUpdateId) -> chain::ChannelMonitorUpdateStatus { + fn update_persisted_channel(&self, funding_txo: OutPoint, _update: Option<&channelmonitor::ChannelMonitorUpdate>, _data: &channelmonitor::ChannelMonitor, update_id: MonitorUpdateId) -> chain::ChannelMonitorUpdateStatus { let mut ret = chain::ChannelMonitorUpdateStatus::Completed; if let Some(update_ret) = self.update_rets.lock().unwrap().pop_front() { ret = update_ret; } - if update.is_none() { + let is_chain_sync = if let UpdateOrigin::ChainSync(_) = update_id.contents { true } else { false }; + if is_chain_sync { self.chain_sync_monitor_persistences.lock().unwrap().entry(funding_txo).or_insert(HashSet::new()).insert(update_id); } else { self.offchain_monitor_updates.lock().unwrap().entry(funding_txo).or_insert(HashSet::new()).insert(update_id); @@ -440,12 +478,12 @@ impl TestStore { } impl KVStore for TestStore { - fn read(&self, namespace: &str, sub_namespace: &str, key: &str) -> io::Result> { + fn read(&self, primary_namespace: &str, secondary_namespace: &str, key: &str) -> io::Result> { let persisted_lock = self.persisted_bytes.lock().unwrap(); - let prefixed = if sub_namespace.is_empty() { - namespace.to_string() + let prefixed = if secondary_namespace.is_empty() { + primary_namespace.to_string() } else { - format!("{}/{}", namespace, sub_namespace) + format!("{}/{}", primary_namespace, secondary_namespace) }; if let Some(outer_ref) = persisted_lock.get(&prefixed) { @@ -460,7 +498,7 @@ impl KVStore for TestStore { } } - fn write(&self, namespace: &str, sub_namespace: &str, key: &str, buf: &[u8]) -> io::Result<()> { + fn write(&self, primary_namespace: &str, secondary_namespace: &str, key: &str, buf: &[u8]) -> io::Result<()> { if self.read_only { return Err(io::Error::new( io::ErrorKind::PermissionDenied, @@ -469,10 +507,10 @@ impl KVStore for TestStore { } let mut persisted_lock = self.persisted_bytes.lock().unwrap(); - let prefixed = if sub_namespace.is_empty() { - namespace.to_string() + let prefixed = if secondary_namespace.is_empty() { + primary_namespace.to_string() } else { - format!("{}/{}", namespace, sub_namespace) + format!("{}/{}", primary_namespace, secondary_namespace) }; let outer_e = persisted_lock.entry(prefixed).or_insert(HashMap::new()); let mut bytes = Vec::new(); @@ -481,7 +519,7 @@ impl KVStore for TestStore { Ok(()) } - fn remove(&self, namespace: &str, sub_namespace: &str, key: &str, _lazy: bool) -> io::Result<()> { + fn remove(&self, primary_namespace: &str, secondary_namespace: &str, key: &str, _lazy: bool) -> io::Result<()> { if self.read_only { return Err(io::Error::new( io::ErrorKind::PermissionDenied, @@ -491,10 +529,10 @@ impl KVStore for TestStore { let mut persisted_lock = self.persisted_bytes.lock().unwrap(); - let prefixed = if sub_namespace.is_empty() { - namespace.to_string() + let prefixed = if secondary_namespace.is_empty() { + primary_namespace.to_string() } else { - format!("{}/{}", namespace, sub_namespace) + format!("{}/{}", primary_namespace, secondary_namespace) }; if let Some(outer_ref) = persisted_lock.get_mut(&prefixed) { outer_ref.remove(&key.to_string()); @@ -503,13 +541,13 @@ impl KVStore for TestStore { Ok(()) } - fn list(&self, namespace: &str, sub_namespace: &str) -> io::Result> { + fn list(&self, primary_namespace: &str, secondary_namespace: &str) -> io::Result> { let mut persisted_lock = self.persisted_bytes.lock().unwrap(); - let prefixed = if sub_namespace.is_empty() { - namespace.to_string() + let prefixed = if secondary_namespace.is_empty() { + primary_namespace.to_string() } else { - format!("{}/{}", namespace, sub_namespace) + format!("{}/{}", primary_namespace, secondary_namespace) }; match persisted_lock.entry(prefixed) { hash_map::Entry::Occupied(e) => Ok(e.get().keys().cloned().collect()), @@ -550,9 +588,9 @@ impl TestBroadcaster { impl chaininterface::BroadcasterInterface for TestBroadcaster { fn broadcast_transactions(&self, txs: &[&Transaction]) { for tx in txs { - let lock_time = tx.lock_time.0; + let lock_time = tx.lock_time.to_consensus_u32(); assert!(lock_time < 1_500_000_000); - if bitcoin::LockTime::from(tx.lock_time).is_block_height() && lock_time > self.blocks.lock().unwrap().last().unwrap().1 { + if tx.lock_time.is_block_height() && lock_time > self.blocks.lock().unwrap().last().unwrap().1 { for inp in tx.input.iter() { if inp.sequence != Sequence::MAX { panic!("We should never broadcast a transaction before its locktime ({})!", tx.lock_time); @@ -570,17 +608,17 @@ pub struct TestChannelMessageHandler { expected_recv_msgs: Mutex>>>, connected_peers: Mutex>, pub message_fetch_counter: AtomicUsize, - genesis_hash: ChainHash, + chain_hash: ChainHash, } impl TestChannelMessageHandler { - pub fn new(genesis_hash: ChainHash) -> Self { + pub fn new(chain_hash: ChainHash) -> Self { TestChannelMessageHandler { pending_events: Mutex::new(Vec::new()), expected_recv_msgs: Mutex::new(None), connected_peers: Mutex::new(HashSet::new()), message_fetch_counter: AtomicUsize::new(0), - genesis_hash, + chain_hash, } } @@ -635,6 +673,18 @@ impl msgs::ChannelMessageHandler for TestChannelMessageHandler { fn handle_closing_signed(&self, _their_node_id: &PublicKey, msg: &msgs::ClosingSigned) { self.received_msg(wire::Message::ClosingSigned(msg.clone())); } + fn handle_stfu(&self, _their_node_id: &PublicKey, msg: &msgs::Stfu) { + self.received_msg(wire::Message::Stfu(msg.clone())); + } + fn handle_splice(&self, _their_node_id: &PublicKey, msg: &msgs::Splice) { + self.received_msg(wire::Message::Splice(msg.clone())); + } + fn handle_splice_ack(&self, _their_node_id: &PublicKey, msg: &msgs::SpliceAck) { + self.received_msg(wire::Message::SpliceAck(msg.clone())); + } + fn handle_splice_locked(&self, _their_node_id: &PublicKey, msg: &msgs::SpliceLocked) { + self.received_msg(wire::Message::SpliceLocked(msg.clone())); + } fn handle_update_add_htlc(&self, _their_node_id: &PublicKey, msg: &msgs::UpdateAddHTLC) { self.received_msg(wire::Message::UpdateAddHTLC(msg.clone())); } @@ -684,8 +734,8 @@ impl msgs::ChannelMessageHandler for TestChannelMessageHandler { channelmanager::provided_init_features(&UserConfig::default()) } - fn get_genesis_hashes(&self) -> Option> { - Some(vec![self.genesis_hash]) + fn get_chain_hashes(&self) -> Option> { + Some(vec![self.chain_hash]) } fn handle_open_channel_v2(&self, _their_node_id: &PublicKey, msg: &msgs::OpenChannelV2) { @@ -753,7 +803,7 @@ fn get_dummy_channel_announcement(short_chan_id: u64) -> msgs::ChannelAnnounceme let node_2_btckey = SecretKey::from_slice(&[39; 32]).unwrap(); let unsigned_ann = msgs::UnsignedChannelAnnouncement { features: ChannelFeatures::empty(), - chain_hash: genesis_block(network).header.block_hash(), + chain_hash: ChainHash::using_genesis_block(network), short_channel_id: short_chan_id, node_id_1: NodeId::from_pubkey(&PublicKey::from_secret_key(&secp_ctx, &node_1_privkey)), node_id_2: NodeId::from_pubkey(&PublicKey::from_secret_key(&secp_ctx, &node_2_privkey)), @@ -779,7 +829,7 @@ fn get_dummy_channel_update(short_chan_id: u64) -> msgs::ChannelUpdate { msgs::ChannelUpdate { signature: Signature::from(unsafe { FFISignature::new() }), contents: msgs::UnsignedChannelUpdate { - chain_hash: genesis_block(network).header.block_hash(), + chain_hash: ChainHash::using_genesis_block(network), short_channel_id: short_chan_id, timestamp: 0, flags: 0, @@ -855,7 +905,7 @@ impl msgs::RoutingMessageHandler for TestRoutingMessageHandler { pending_events.push(events::MessageSendEvent::SendGossipTimestampFilter { node_id: their_node_id.clone(), msg: msgs::GossipTimestampFilter { - chain_hash: genesis_block(Network::Testnet).header.block_hash(), + chain_hash: ChainHash::using_genesis_block(Network::Testnet), first_timestamp: gossip_start_time as u32, timestamp_range: u32::max_value(), }, @@ -906,7 +956,8 @@ impl events::MessageSendEventsProvider for TestRoutingMessageHandler { pub struct TestLogger { level: Level, pub(crate) id: String, - pub lines: Mutex>, + pub lines: Mutex>, + pub context: Mutex, Option), usize>>, } impl TestLogger { @@ -917,13 +968,14 @@ impl TestLogger { TestLogger { level: Level::Trace, id, - lines: Mutex::new(HashMap::new()) + lines: Mutex::new(HashMap::new()), + context: Mutex::new(HashMap::new()), } } pub fn enable(&mut self, level: Level) { self.level = level; } - pub fn assert_log(&self, module: String, line: String, count: usize) { + pub fn assert_log(&self, module: &str, line: String, count: usize) { let log_entries = self.lines.lock().unwrap(); assert_eq!(log_entries.get(&(module, line)), Some(&count)); } @@ -935,7 +987,7 @@ impl TestLogger { pub fn assert_log_contains(&self, module: &str, line: &str, count: usize) { let log_entries = self.lines.lock().unwrap(); let l: usize = log_entries.iter().filter(|&(&(ref m, ref l), _c)| { - m == module && l.contains(line) + *m == module && l.contains(line) }).map(|(_, c) | { c }).sum(); assert_eq!(l, count) } @@ -948,18 +1000,29 @@ impl TestLogger { pub fn assert_log_regex(&self, module: &str, pattern: regex::Regex, count: usize) { let log_entries = self.lines.lock().unwrap(); let l: usize = log_entries.iter().filter(|&(&(ref m, ref l), _c)| { - m == module && pattern.is_match(&l) + *m == module && pattern.is_match(&l) }).map(|(_, c) | { c }).sum(); assert_eq!(l, count) } + + pub fn assert_log_context_contains( + &self, module: &str, peer_id: Option, channel_id: Option, count: usize + ) { + let context_entries = self.context.lock().unwrap(); + let l = context_entries.get(&(module, peer_id, channel_id)).unwrap(); + assert_eq!(*l, count) + } } impl Logger for TestLogger { - fn log(&self, record: &Record) { - *self.lines.lock().unwrap().entry((record.module_path.to_string(), format!("{}", record.args))).or_insert(0) += 1; + fn log(&self, record: Record) { + *self.lines.lock().unwrap().entry((record.module_path, format!("{}", record.args))).or_insert(0) += 1; + *self.context.lock().unwrap().entry((record.module_path, record.peer_id, record.channel_id)).or_insert(0) += 1; if record.level >= self.level { - #[cfg(all(not(ldk_bench), feature = "std"))] - println!("{:<5} {} [{} : {}, {}] {}", record.level.to_string(), self.id, record.module_path, record.file, record.line, record.args); + #[cfg(all(not(ldk_bench), feature = "std"))] { + let pfx = format!("{} {} [{}:{}]", self.id, record.level.to_string(), record.module_path, record.line); + println!("{:<55}{}", pfx, record.args); + } } } } @@ -1072,7 +1135,9 @@ impl NodeSigner for TestKeysInterface { } impl SignerProvider for TestKeysInterface { - type Signer = TestChannelSigner; + type EcdsaSigner = TestChannelSigner; + #[cfg(taproot)] + type TaprootSigner = TestChannelSigner; fn generate_channel_keys_id(&self, inbound: bool, channel_value_satoshis: u64, user_channel_id: u128) -> [u8; 32] { self.backing.generate_channel_keys_id(inbound, channel_value_satoshis, user_channel_id) @@ -1084,7 +1149,7 @@ impl SignerProvider for TestKeysInterface { TestChannelSigner::new_with_revoked(keys, state, self.disable_revocation_policy_check) } - fn read_chan_signer(&self, buffer: &[u8]) -> Result { + fn read_chan_signer(&self, buffer: &[u8]) -> Result { let mut reader = io::Cursor::new(buffer); let inner: InMemorySigner = ReadableArgs::read(&mut reader, self)?; @@ -1097,7 +1162,7 @@ impl SignerProvider for TestKeysInterface { )) } - fn get_destination_script(&self) -> Result { self.backing.get_destination_script() } + fn get_destination_script(&self, channel_keys_id: [u8; 32]) -> Result { self.backing.get_destination_script(channel_keys_id) } fn get_shutdown_scriptpubkey(&self) -> Result { match &mut *self.expectations.lock().unwrap() { @@ -1184,18 +1249,18 @@ impl core::fmt::Debug for OnGetShutdownScriptpubkey { } pub struct TestChainSource { - pub genesis_hash: BlockHash, + pub chain_hash: ChainHash, pub utxo_ret: Mutex, pub get_utxo_call_count: AtomicUsize, - pub watched_txn: Mutex>, - pub watched_outputs: Mutex>, + pub watched_txn: Mutex>, + pub watched_outputs: Mutex>, } impl TestChainSource { pub fn new(network: Network) -> Self { let script_pubkey = Builder::new().push_opcode(opcodes::OP_TRUE).into_script(); Self { - genesis_hash: genesis_block(network).block_hash(), + chain_hash: ChainHash::using_genesis_block(network), utxo_ret: Mutex::new(UtxoResult::Sync(Ok(TxOut { value: u64::max_value(), script_pubkey }))), get_utxo_call_count: AtomicUsize::new(0), watched_txn: Mutex::new(HashSet::new()), @@ -1205,9 +1270,9 @@ impl TestChainSource { } impl UtxoLookup for TestChainSource { - fn get_utxo(&self, genesis_hash: &BlockHash, _short_channel_id: u64) -> UtxoResult { + fn get_utxo(&self, chain_hash: &ChainHash, _short_channel_id: u64) -> UtxoResult { self.get_utxo_call_count.fetch_add(1, Ordering::Relaxed); - if self.genesis_hash != *genesis_hash { + if self.chain_hash != *chain_hash { return UtxoResult::Sync(Err(UtxoLookupError::UnknownChain)); } @@ -1217,7 +1282,7 @@ impl UtxoLookup for TestChainSource { impl chain::Filter for TestChainSource { fn register_tx(&self, txid: &Txid, script_pubkey: &Script) { - self.watched_txn.lock().unwrap().insert((*txid, script_pubkey.clone())); + self.watched_txn.lock().unwrap().insert((*txid, script_pubkey.into())); } fn register_output(&self, output: WatchedOutput) { @@ -1258,8 +1323,12 @@ impl crate::util::ser::Writeable for TestScorer { impl ScoreLookUp for TestScorer { type ScoreParams = (); fn channel_penalty_msat( - &self, short_channel_id: u64, _source: &NodeId, _target: &NodeId, usage: ChannelUsage, _score_params: &Self::ScoreParams + &self, candidate: &CandidateRouteHop, usage: ChannelUsage, _score_params: &Self::ScoreParams ) -> u64 { + let short_channel_id = match candidate.globally_unique_short_channel_id() { + Some(scid) => scid, + None => return 0, + }; if let Some(scorer_expectations) = self.scorer_expectations.borrow_mut().as_mut() { match scorer_expectations.pop_front() { Some((scid, expectation)) => { @@ -1337,9 +1406,9 @@ impl WalletSource for TestWalletSource { Ok(self.utxos.borrow().clone()) } - fn get_change_script(&self) -> Result { + fn get_change_script(&self) -> Result { let public_key = bitcoin::PublicKey::new(self.secret_key.public_key(&self.secp)); - Ok(Script::new_p2pkh(&public_key.pubkey_hash())) + Ok(ScriptBuf::new_p2pkh(&public_key.pubkey_hash())) } fn sign_tx(&self, mut tx: Transaction) -> Result { @@ -1349,10 +1418,10 @@ impl WalletSource for TestWalletSource { let sighash = SighashCache::new(&tx) .legacy_signature_hash(i, &utxo.output.script_pubkey, EcdsaSighashType::All as u32) .map_err(|_| ())?; - let sig = self.secp.sign_ecdsa(&sighash.as_hash().into(), &self.secret_key); - let bitcoin_sig = bitcoin::EcdsaSig { sig, hash_ty: EcdsaSighashType::All }.to_vec(); + let sig = self.secp.sign_ecdsa(&(*sighash.as_raw_hash()).into(), &self.secret_key); + let bitcoin_sig = bitcoin::ecdsa::Signature { sig, hash_ty: EcdsaSighashType::All }; tx.input[i].script_sig = Builder::new() - .push_slice(&bitcoin_sig) + .push_slice(&bitcoin_sig.serialize()) .push_slice(&self.secret_key.public_key(&self.secp).serialize()) .into_script(); }