Merge pull request #1660 from TheBlueMatt/2022-08-cleanup-ratelimits
authorMatt Corallo <649246+TheBlueMatt@users.noreply.github.com>
Tue, 16 Aug 2022 04:43:02 +0000 (04:43 +0000)
committerGitHub <noreply@github.com>
Tue, 16 Aug 2022 04:43:02 +0000 (04:43 +0000)
Backfill gossip without buffering directly in LDK

47 files changed:
fuzz/Cargo.toml
fuzz/src/chanmon_consistency.rs
fuzz/src/full_stack.rs
fuzz/src/process_network_graph.rs
lightning-background-processor/Cargo.toml
lightning-background-processor/src/lib.rs
lightning-block-sync/Cargo.toml
lightning-block-sync/src/convert.rs
lightning-block-sync/src/lib.rs
lightning-block-sync/src/test_utils.rs
lightning-invoice/Cargo.toml
lightning-invoice/fuzz/Cargo.toml
lightning-invoice/src/lib.rs
lightning-net-tokio/Cargo.toml
lightning-net-tokio/src/lib.rs
lightning-persister/Cargo.toml
lightning-persister/src/lib.rs
lightning-rapid-gossip-sync/Cargo.toml
lightning-rapid-gossip-sync/src/lib.rs
lightning/Cargo.toml
lightning/src/chain/chainmonitor.rs
lightning/src/chain/channelmonitor.rs
lightning/src/chain/keysinterface.rs
lightning/src/chain/mod.rs
lightning/src/chain/onchaintx.rs
lightning/src/chain/package.rs
lightning/src/lib.rs
lightning/src/ln/chan_utils.rs
lightning/src/ln/chanmon_update_fail_tests.rs
lightning/src/ln/channel.rs
lightning/src/ln/channelmanager.rs
lightning/src/ln/functional_test_utils.rs
lightning/src/ln/functional_tests.rs
lightning/src/ln/monitor_tests.rs
lightning/src/ln/msgs.rs
lightning/src/ln/onion_utils.rs
lightning/src/ln/payment_tests.rs
lightning/src/ln/reorg_tests.rs
lightning/src/ln/script.rs
lightning/src/onion_message/messenger.rs
lightning/src/onion_message/utils.rs
lightning/src/util/events.rs
lightning/src/util/macro_logger.rs
lightning/src/util/ser.rs
lightning/src/util/ser_macros.rs
lightning/src/util/test_utils.rs
lightning/src/util/transaction_utils.rs

index 5569e96cab09b6a4c25c58f0ccfb533e19e63589..be37fb83cdddbbe48034c6bfb990762c299aa9b3 100644 (file)
@@ -20,7 +20,7 @@ stdin_fuzz = []
 afl = { version = "0.4", optional = true }
 lightning = { path = "../lightning", features = ["regex"] }
 lightning-rapid-gossip-sync = { path = "../lightning-rapid-gossip-sync" }
-bitcoin = { version = "0.28.1", features = ["secp-lowmemory"] }
+bitcoin = { version = "0.29.0", features = ["secp-lowmemory"] }
 hex = "0.3"
 honggfuzz = { version = "0.5", optional = true }
 libfuzzer-sys = { version = "0.4", optional = true }
index c11e1baf61712a89ec91a3fbdb0b993b75c8f109..372bed6049370c065c0d7a660f61a466ec715bd4 100644 (file)
 //! send-side handling is correct, other peers. We consider it a failure if any action results in a
 //! channel being force-closed.
 
+use bitcoin::TxMerkleNode;
 use bitcoin::blockdata::block::BlockHeader;
 use bitcoin::blockdata::constants::genesis_block;
 use bitcoin::blockdata::transaction::{Transaction, TxOut};
 use bitcoin::blockdata::script::{Builder, Script};
 use bitcoin::blockdata::opcodes;
+use bitcoin::blockdata::locktime::PackedLockTime;
 use bitcoin::network::constants::Network;
 
 use bitcoin::hashes::Hash as TraitImport;
@@ -53,7 +55,7 @@ use lightning::routing::router::{Route, RouteHop};
 use utils::test_logger::{self, Output};
 use utils::test_persister::TestPersister;
 
-use bitcoin::secp256k1::{PublicKey,SecretKey};
+use bitcoin::secp256k1::{PublicKey, SecretKey, Scalar};
 use bitcoin::secp256k1::ecdh::SharedSecret;
 use bitcoin::secp256k1::ecdsa::RecoverableSignature;
 use bitcoin::secp256k1::Secp256k1;
@@ -166,10 +168,10 @@ impl KeysInterface for KeyProvider {
                Ok(SecretKey::from_slice(&[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, self.node_id]).unwrap())
        }
 
-       fn ecdh(&self, recipient: Recipient, other_key: &PublicKey, tweak: Option<&[u8; 32]>) -> Result<SharedSecret, ()> {
+       fn ecdh(&self, recipient: Recipient, other_key: &PublicKey, tweak: Option<&Scalar>) -> Result<SharedSecret, ()> {
                let mut node_secret = self.get_node_secret(recipient)?;
                if let Some(tweak) = tweak {
-                       node_secret.mul_assign(tweak).map_err(|_| ())?;
+                       node_secret = node_secret.mul_tweak(tweak).unwrap();
                }
                Ok(SharedSecret::new(other_key, &node_secret))
        }
@@ -447,7 +449,7 @@ pub fn do_test<Out: Output>(data: &[u8], underlying_out: Out) {
                                let events = $source.get_and_clear_pending_events();
                                assert_eq!(events.len(), 1);
                                if let events::Event::FundingGenerationReady { ref temporary_channel_id, ref channel_value_satoshis, ref output_script, .. } = events[0] {
-                                       let tx = Transaction { version: $chan_id, lock_time: 0, input: Vec::new(), output: vec![TxOut {
+                                       let tx = Transaction { version: $chan_id, lock_time: PackedLockTime::ZERO, input: Vec::new(), output: vec![TxOut {
                                                value: *channel_value_satoshis, script_pubkey: output_script.clone(),
                                        }]};
                                        funding_output = OutPoint { txid: tx.txid(), index: 0 };
@@ -481,11 +483,11 @@ pub fn do_test<Out: Output>(data: &[u8], underlying_out: Out) {
        macro_rules! confirm_txn {
                ($node: expr) => { {
                        let chain_hash = genesis_block(Network::Bitcoin).block_hash();
-                       let mut header = BlockHeader { version: 0x20000000, prev_blockhash: chain_hash, merkle_root: Default::default(), time: 42, bits: 42, nonce: 42 };
+                       let mut header = BlockHeader { version: 0x20000000, prev_blockhash: chain_hash, merkle_root: TxMerkleNode::all_zeros(), time: 42, bits: 42, nonce: 42 };
                        let txdata: Vec<_> = channel_txn.iter().enumerate().map(|(i, tx)| (i + 1, tx)).collect();
                        $node.transactions_confirmed(&header, &txdata, 1);
                        for _ in 2..100 {
-                               header = BlockHeader { version: 0x20000000, prev_blockhash: header.block_hash(), merkle_root: Default::default(), time: 42, bits: 42, nonce: 42 };
+                               header = BlockHeader { version: 0x20000000, prev_blockhash: header.block_hash(), merkle_root: TxMerkleNode::all_zeros(), time: 42, bits: 42, nonce: 42 };
                        }
                        $node.best_block_updated(&header, 99);
                } }
index f0f8054624d5084fca0fb69f4d3e46596aee5b52..c1d797ea57914cc4e40fb184ccc0a97568bd99df 100644 (file)
 //! or payments to send/ways to handle events generated.
 //! This test has been very useful, though due to its complexity good starting inputs are critical.
 
+use bitcoin::TxMerkleNode;
 use bitcoin::blockdata::block::BlockHeader;
+use bitcoin::blockdata::constants::genesis_block;
 use bitcoin::blockdata::transaction::{Transaction, TxOut};
 use bitcoin::blockdata::script::{Builder, Script};
 use bitcoin::blockdata::opcodes;
+use bitcoin::blockdata::locktime::PackedLockTime;
 use bitcoin::consensus::encode::deserialize;
 use bitcoin::network::constants::Network;
-use bitcoin::blockdata::constants::genesis_block;
 
 use bitcoin::hashes::Hash as TraitImport;
 use bitcoin::hashes::HashEngine as TraitImportEngine;
@@ -50,7 +52,7 @@ use lightning::util::ser::ReadableArgs;
 use utils::test_logger;
 use utils::test_persister::TestPersister;
 
-use bitcoin::secp256k1::{PublicKey,SecretKey};
+use bitcoin::secp256k1::{PublicKey, SecretKey, Scalar};
 use bitcoin::secp256k1::ecdh::SharedSecret;
 use bitcoin::secp256k1::ecdsa::RecoverableSignature;
 use bitcoin::secp256k1::Secp256k1;
@@ -213,7 +215,7 @@ impl<'a> MoneyLossDetector<'a> {
                }
 
                self.blocks_connected += 1;
-               let header = BlockHeader { version: 0x20000000, prev_blockhash: self.header_hashes[self.height].0, merkle_root: Default::default(), time: self.blocks_connected, bits: 42, nonce: 42 };
+               let header = BlockHeader { version: 0x20000000, prev_blockhash: self.header_hashes[self.height].0, merkle_root: TxMerkleNode::all_zeros(), time: self.blocks_connected, bits: 42, nonce: 42 };
                self.height += 1;
                self.manager.transactions_confirmed(&header, &txdata, self.height as u32);
                self.manager.best_block_updated(&header, self.height as u32);
@@ -230,7 +232,7 @@ impl<'a> MoneyLossDetector<'a> {
 
        fn disconnect_block(&mut self) {
                if self.height > 0 && (self.max_height < 6 || self.height >= self.max_height - 6) {
-                       let header = BlockHeader { version: 0x20000000, prev_blockhash: self.header_hashes[self.height - 1].0, merkle_root: Default::default(), time: self.header_hashes[self.height].1, bits: 42, nonce: 42 };
+                       let header = BlockHeader { version: 0x20000000, prev_blockhash: self.header_hashes[self.height - 1].0, merkle_root: TxMerkleNode::all_zeros(), time: self.header_hashes[self.height].1, bits: 42, nonce: 42 };
                        self.manager.block_disconnected(&header, self.height as u32);
                        self.monitor.block_disconnected(&header, self.height as u32);
                        self.height -= 1;
@@ -270,10 +272,10 @@ impl KeysInterface for KeyProvider {
                Ok(self.node_secret.clone())
        }
 
-       fn ecdh(&self, recipient: Recipient, other_key: &PublicKey, tweak: Option<&[u8; 32]>) -> Result<SharedSecret, ()> {
+       fn ecdh(&self, recipient: Recipient, other_key: &PublicKey, tweak: Option<&Scalar>) -> Result<SharedSecret, ()> {
                let mut node_secret = self.get_node_secret(recipient)?;
                if let Some(tweak) = tweak {
-                       node_secret.mul_assign(tweak).map_err(|_| ())?;
+                       node_secret = node_secret.mul_tweak(tweak).unwrap();
                }
                Ok(SharedSecret::new(other_key, &node_secret))
        }
@@ -564,7 +566,7 @@ pub fn do_test(data: &[u8], logger: &Arc<dyn Logger>) {
                        },
                        10 => {
                                'outer_loop: for funding_generation in pending_funding_generation.drain(..) {
-                                       let mut tx = Transaction { version: 0, lock_time: 0, input: Vec::new(), output: vec![TxOut {
+                                       let mut tx = Transaction { version: 0, lock_time: PackedLockTime::ZERO, input: Vec::new(), output: vec![TxOut {
                                                        value: funding_generation.2, script_pubkey: funding_generation.3,
                                                }] };
                                        let funding_output = 'search_loop: loop {
index d649710526ad77f43e8c5dc969a6cdbdca7dd382..3c8e37175d302df47bb35e1991eb8095386d937e 100644 (file)
@@ -1,11 +1,12 @@
 // Imports that need to be added manually
 use lightning_rapid_gossip_sync::RapidGossipSync;
+use bitcoin::hashes::Hash as TraitImport;
 
 use utils::test_logger;
 
 /// Actual fuzz test, method signature and name are fixed
 fn do_test<Out: test_logger::Output>(data: &[u8], out: Out) {
-       let block_hash = bitcoin::BlockHash::default();
+       let block_hash = bitcoin::BlockHash::all_zeros();
        let logger = test_logger::TestLogger::new("".to_owned(), out);
        let network_graph = lightning::routing::gossip::NetworkGraph::new(block_hash, &logger);
        let rapid_sync = RapidGossipSync::new(&network_graph);
index 2df83f2b75a1e2f646ffdf3abf62b60cac6304b3..ef07a3c9df5fe9dfa0dd9752da03c674d8c1271a 100644 (file)
@@ -14,7 +14,7 @@ all-features = true
 rustdoc-args = ["--cfg", "docsrs"]
 
 [dependencies]
-bitcoin = "0.28.1"
+bitcoin = "0.29.0"
 lightning = { version = "0.0.110", path = "../lightning", features = ["std"] }
 lightning-rapid-gossip-sync = { version = "0.0.110", path = "../lightning-rapid-gossip-sync" }
 
index 484439b3907b364dddc3dc2c13bb6c078be2ad0c..8f6f0c49c1dcf0fdb3d5a6cb3468aeaafa38ef20 100644 (file)
@@ -2,7 +2,10 @@
 //! running properly, and (2) either can or should be run in the background. See docs for
 //! [`BackgroundProcessor`] for more details on the nitty-gritty.
 
+// Prefix these with `rustdoc::` when we update our MSRV to be >= 1.52 to remove warnings.
 #![deny(broken_intra_doc_links)]
+#![deny(private_intra_doc_links)]
+
 #![deny(missing_docs)]
 #![deny(unsafe_code)]
 
@@ -488,6 +491,7 @@ impl Drop for BackgroundProcessor {
 mod tests {
        use bitcoin::blockdata::block::BlockHeader;
        use bitcoin::blockdata::constants::genesis_block;
+       use bitcoin::blockdata::locktime::PackedLockTime;
        use bitcoin::blockdata::transaction::{Transaction, TxOut};
        use bitcoin::network::constants::Network;
        use lightning::chain::{BestBlock, Confirm, chainmonitor};
@@ -513,6 +517,8 @@ mod tests {
        use std::sync::{Arc, Mutex};
        use std::sync::mpsc::SyncSender;
        use std::time::Duration;
+       use bitcoin::hashes::Hash;
+       use bitcoin::TxMerkleNode;
        use lightning::routing::scoring::{FixedPenaltyScorer};
        use lightning_rapid_gossip_sync::RapidGossipSync;
        use super::{BackgroundProcessor, GossipSync, FRESHNESS_TIMER};
@@ -700,7 +706,7 @@ mod tests {
                                        assert_eq!(channel_value_satoshis, $channel_value);
                                        assert_eq!(user_channel_id, 42);
 
-                                       let tx = Transaction { version: 1 as i32, lock_time: 0, input: Vec::new(), output: vec![TxOut {
+                                       let tx = Transaction { version: 1 as i32, lock_time: PackedLockTime(0), input: Vec::new(), output: vec![TxOut {
                                                value: channel_value_satoshis, script_pubkey: output_script.clone(),
                                        }]};
                                        (temporary_channel_id, tx)
@@ -722,7 +728,7 @@ mod tests {
                for i in 1..=depth {
                        let prev_blockhash = node.best_block.block_hash();
                        let height = node.best_block.height() + 1;
-                       let header = BlockHeader { version: 0x20000000, prev_blockhash, merkle_root: Default::default(), time: height, bits: 42, nonce: 42 };
+                       let header = BlockHeader { version: 0x20000000, prev_blockhash, merkle_root: TxMerkleNode::all_zeros(), time: height, bits: 42, nonce: 42 };
                        let txdata = vec![(0, tx)];
                        node.best_block = BestBlock::new(header.block_hash(), height);
                        match i {
index c6650208e26023f7c693bb5aa723439dacb91cb3..27fa32149f1c4cadb3864f9214767778ebd75832 100644 (file)
@@ -18,7 +18,7 @@ rest-client = [ "serde", "serde_json", "chunked_transfer" ]
 rpc-client = [ "serde", "serde_json", "chunked_transfer" ]
 
 [dependencies]
-bitcoin = "0.28.1"
+bitcoin = "0.29.0"
 lightning = { version = "0.0.110", path = "../lightning" }
 futures = { version = "0.3" }
 tokio = { version = "1.0", features = [ "io-util", "net", "time" ], optional = true }
index 8023c83751920647caecb9ecb67288cc0cfc6506..ed28833b7b30d4986fe53e2801d2ddaf43dcf765 100644 (file)
@@ -15,6 +15,7 @@ use serde_json;
 use std::convert::From;
 use std::convert::TryFrom;
 use std::convert::TryInto;
+use bitcoin::hashes::Hash;
 
 /// Conversion from `std::io::Error` into `BlockSourceError`.
 impl From<std::io::Error> for BlockSourceError {
@@ -57,7 +58,7 @@ impl TryInto<BlockHeaderData> for JsonResponse {
 
                // Add an empty previousblockhash for the genesis block.
                if let None = header.get("previousblockhash") {
-                       let hash: BlockHash = Default::default();
+                       let hash: BlockHash = BlockHash::all_zeros();
                        header.as_object_mut().unwrap().insert("previousblockhash".to_string(), serde_json::json!(hash.to_hex()));
                }
 
index 321dd57e4713a638c86ba4fc033a89473d0129af..823cb5eb554e2e9a0525004f4c84550adb9ab2a7 100644 (file)
 //! Both features support either blocking I/O using `std::net::TcpStream` or, with feature `tokio`,
 //! non-blocking I/O using `tokio::net::TcpStream` from inside a Tokio runtime.
 
+// Prefix these with `rustdoc::` when we update our MSRV to be >= 1.52 to remove warnings.
 #![deny(broken_intra_doc_links)]
+#![deny(private_intra_doc_links)]
+
 #![deny(missing_docs)]
 #![deny(unsafe_code)]
 
index baaab456b5adeb8fb25df64d6d306c011a0062dd..0c402deb3294663527afaab0011cea1f627569ef 100644 (file)
@@ -7,7 +7,7 @@ use bitcoin::hash_types::BlockHash;
 use bitcoin::network::constants::Network;
 use bitcoin::util::uint::Uint256;
 use bitcoin::util::hash::bitcoin_merkle_root;
-use bitcoin::Transaction;
+use bitcoin::{PackedLockTime, Transaction};
 
 use lightning::chain;
 
@@ -45,7 +45,7 @@ impl Blockchain {
                        // but that's OK because those tests don't trigger the check.
                        let coinbase = Transaction {
                                version: 0,
-                               lock_time: 0,
+                               lock_time: PackedLockTime::ZERO,
                                input: vec![],
                                output: vec![]
                        };
index cc0e68cfc422b1bd098b2b985509b6b8cd1bb710..b7b8f7f014654979c6eec21058828351a2f70938 100644 (file)
@@ -19,9 +19,9 @@ no-std = ["hashbrown", "lightning/no-std", "core2/alloc"]
 std = ["bitcoin_hashes/std", "num-traits/std", "lightning/std", "bech32/std"]
 
 [dependencies]
-bech32 = { version = "0.8", default-features = false }
+bech32 = { version = "0.9.0", default-features = false }
 lightning = { version = "0.0.110", path = "../lightning", default-features = false }
-secp256k1 = { version = "0.22", default-features = false, features = ["recovery", "alloc"] }
+secp256k1 = { version = "0.24.0", default-features = false, features = ["recovery", "alloc"] }
 num-traits = { version = "0.2.8", default-features = false }
 bitcoin_hashes = { version = "0.10", default-features = false }
 hashbrown = { version = "0.11", optional = true }
index d741864aea2ec8d55a824f725d8e875e9a9d6c0c..833606c1ad83eea553af28d9d780639093e249a9 100644 (file)
@@ -16,7 +16,7 @@ honggfuzz = { version = "0.5", optional = true }
 afl = { version = "0.4", optional = true }
 lightning-invoice = { path = ".." }
 lightning = { path = "../../lightning", features = ["regex"] }
-bech32 = "0.8"
+bech32 = "0.9.0"
 
 # Prevent this from interfering with workspaces
 [workspace]
index bad024c66c5a37114d144c146014294e0f747501..c7d7a4042f7aa8a583acc8654d92610684d2746f 100644 (file)
@@ -1,9 +1,12 @@
+// Prefix these with `rustdoc::` when we update our MSRV to be >= 1.52 to remove warnings.
+#![deny(broken_intra_doc_links)]
+#![deny(private_intra_doc_links)]
+
 #![deny(missing_docs)]
 #![deny(non_upper_case_globals)]
 #![deny(non_camel_case_types)]
 #![deny(non_snake_case)]
 #![deny(unused_mut)]
-#![deny(broken_intra_doc_links)]
 
 #![cfg_attr(docsrs, feature(doc_auto_cfg))]
 
index dd7770cbe8cdd65145e5ce663c45f2143754835e..a3e0cbb45860a31ab91d942cd7a8af9e858bc238 100644 (file)
@@ -15,7 +15,7 @@ all-features = true
 rustdoc-args = ["--cfg", "docsrs"]
 
 [dependencies]
-bitcoin = "0.28.1"
+bitcoin = "0.29.0"
 lightning = { version = "0.0.110", path = "../lightning" }
 tokio = { version = "1.0", features = [ "io-util", "macros", "rt", "sync", "net", "time" ] }
 
index 1f00fcb3ca0e76984b6deca2bf4e26fc8c096661..ac9d4bb3bd5899a08e0dfaadebd811fb779e2c04 100644 (file)
 //! }
 //! ```
 
+// Prefix these with `rustdoc::` when we update our MSRV to be >= 1.52 to remove warnings.
 #![deny(broken_intra_doc_links)]
-#![deny(missing_docs)]
+#![deny(private_intra_doc_links)]
 
+#![deny(missing_docs)]
 #![cfg_attr(docsrs, feature(doc_auto_cfg))]
 
 use bitcoin::secp256k1::PublicKey;
index 7de0ddc153c807c9a97016dfb6f1feaa4b1a5fdb..728743c8a4da2b35629c17ef93ebf33b156cd4df 100644 (file)
@@ -16,7 +16,7 @@ rustdoc-args = ["--cfg", "docsrs"]
 _bench_unstable = ["lightning/_bench_unstable"]
 
 [dependencies]
-bitcoin = "0.28.1"
+bitcoin = "0.29.0"
 lightning = { version = "0.0.110", path = "../lightning" }
 libc = "0.2"
 
index 3e32791711e1428746eaadef6f26f11248370439..b277c30ca8cfc9024a6de613f5ad7fea4b7d65c1 100644 (file)
@@ -1,6 +1,9 @@
 //! Utilities that handle persisting Rust-Lightning data to disk via standard filesystem APIs.
 
+// Prefix these with `rustdoc::` when we update our MSRV to be >= 1.52 to remove warnings.
 #![deny(broken_intra_doc_links)]
+#![deny(private_intra_doc_links)]
+
 #![deny(missing_docs)]
 
 #![cfg_attr(docsrs, feature(doc_auto_cfg))]
@@ -134,7 +137,7 @@ mod tests {
        use crate::FilesystemPersister;
        use bitcoin::blockdata::block::{Block, BlockHeader};
        use bitcoin::hashes::hex::FromHex;
-       use bitcoin::Txid;
+       use bitcoin::{Txid, TxMerkleNode};
        use lightning::chain::ChannelMonitorUpdateErr;
        use lightning::chain::chainmonitor::Persist;
        use lightning::chain::transaction::OutPoint;
@@ -144,6 +147,7 @@ mod tests {
        use lightning::util::events::{ClosureReason, MessageSendEventsProvider};
        use lightning::util::test_utils;
        use std::fs;
+       use bitcoin::hashes::Hash;
        #[cfg(target_os = "windows")]
        use {
                lightning::get_event_msg,
@@ -221,7 +225,7 @@ mod tests {
                let node_txn = nodes[0].tx_broadcaster.txn_broadcasted.lock().unwrap();
                assert_eq!(node_txn.len(), 1);
 
-               let header = BlockHeader { version: 0x20000000, prev_blockhash: nodes[0].best_block_hash(), merkle_root: Default::default(), time: 42, bits: 42, nonce: 42 };
+               let header = BlockHeader { version: 0x20000000, prev_blockhash: nodes[0].best_block_hash(), merkle_root: TxMerkleNode::all_zeros(), time: 42, bits: 42, nonce: 42 };
                connect_block(&nodes[1], &Block { header, txdata: vec![node_txn[0].clone(), node_txn[0].clone()]});
                check_closed_broadcast!(nodes[1], true);
                check_closed_event!(nodes[1], 1, ClosureReason::CommitmentTxConfirmed);
index 39518bbbebd672ac6f3960288d48e8118be05ac2..b8bc8437bca04f2c96e08a12f5464635195c6993 100644 (file)
@@ -14,7 +14,7 @@ _bench_unstable = []
 
 [dependencies]
 lightning = { version = "0.0.110", path = "../lightning" }
-bitcoin = { version = "0.28.1", default-features = false }
+bitcoin = { version = "0.29.0", default-features = false }
 
 [dev-dependencies]
 lightning = { version = "0.0.110", path = "../lightning", features = ["_test_utils"] }
index 6e9280f86a3f85909c8c25e49fc254bcc3a06819..70758e1fe07e6a5ae0f5a430ac374ee3ae5708be 100644 (file)
@@ -1,6 +1,9 @@
+// Prefix these with `rustdoc::` when we update our MSRV to be >= 1.52 to remove warnings.
+#![deny(broken_intra_doc_links)]
+#![deny(private_intra_doc_links)]
+
 #![deny(missing_docs)]
 #![deny(unsafe_code)]
-#![deny(broken_intra_doc_links)]
 #![deny(non_upper_case_globals)]
 #![deny(non_camel_case_types)]
 #![deny(non_snake_case)]
index 7980a9b1dc88182c1c7acb2ebd9df40a0afae1ab..fde54661420ec546db00e0ac6ef3d65ef3f19ba8 100644 (file)
@@ -38,7 +38,7 @@ grind_signatures = []
 default = ["std", "grind_signatures"]
 
 [dependencies]
-bitcoin = { version = "0.28.1", default-features = false, features = ["secp-recovery"] }
+bitcoin = { version = "0.29.0", default-features = false, features = ["secp-recovery"] }
 
 hashbrown = { version = "0.11", optional = true }
 hex = { version = "0.4", optional = true }
@@ -52,6 +52,6 @@ hex = "0.4"
 regex = "1.5.6"
 
 [dev-dependencies.bitcoin]
-version = "0.28.1"
+version = "0.29.0"
 default-features = false
 features = ["bitcoinconsensus", "secp-recovery"]
index 5c4ede0b16161819a8c7b54f4b8696cb5274912a..3d84fdf93a52f391200fde732b0954e77e01d724 100644 (file)
@@ -263,82 +263,67 @@ where C::Target: chain::Filter,
        where
                FN: Fn(&ChannelMonitor<ChannelSigner>, &TransactionData) -> Vec<TransactionOutputs>
        {
-               let mut dependent_txdata = Vec::new();
-               {
-                       let monitor_states = self.monitors.write().unwrap();
-                       if let Some(height) = best_height {
-                               // If the best block height is being updated, update highest_chain_height under the
-                               // monitors write lock.
-                               let old_height = self.highest_chain_height.load(Ordering::Acquire);
-                               let new_height = height as usize;
-                               if new_height > old_height {
-                                       self.highest_chain_height.store(new_height, Ordering::Release);
-                               }
+               let monitor_states = self.monitors.write().unwrap();
+               if let Some(height) = best_height {
+                       // If the best block height is being updated, update highest_chain_height under the
+                       // monitors write lock.
+                       let old_height = self.highest_chain_height.load(Ordering::Acquire);
+                       let new_height = height as usize;
+                       if new_height > old_height {
+                               self.highest_chain_height.store(new_height, Ordering::Release);
                        }
+               }
 
-                       for (funding_outpoint, monitor_state) in monitor_states.iter() {
-                               let monitor = &monitor_state.monitor;
-                               let mut txn_outputs;
-                               {
-                                       txn_outputs = process(monitor, txdata);
-                                       let update_id = MonitorUpdateId {
-                                               contents: UpdateOrigin::ChainSync(self.sync_persistence_id.get_increment()),
-                                       };
-                                       let mut pending_monitor_updates = monitor_state.pending_monitor_updates.lock().unwrap();
-                                       if let Some(height) = best_height {
-                                               if !monitor_state.has_pending_chainsync_updates(&pending_monitor_updates) {
-                                                       // If there are not ChainSync persists awaiting completion, go ahead and
-                                                       // set last_chain_persist_height here - we wouldn't want the first
-                                                       // TemporaryFailure to always immediately be considered "overly delayed".
-                                                       monitor_state.last_chain_persist_height.store(height as usize, Ordering::Release);
-                                               }
+               for (funding_outpoint, monitor_state) in monitor_states.iter() {
+                       let monitor = &monitor_state.monitor;
+                       let mut txn_outputs;
+                       {
+                               txn_outputs = process(monitor, txdata);
+                               let update_id = MonitorUpdateId {
+                                       contents: UpdateOrigin::ChainSync(self.sync_persistence_id.get_increment()),
+                               };
+                               let mut pending_monitor_updates = monitor_state.pending_monitor_updates.lock().unwrap();
+                               if let Some(height) = best_height {
+                                       if !monitor_state.has_pending_chainsync_updates(&pending_monitor_updates) {
+                                               // If there are not ChainSync persists awaiting completion, go ahead and
+                                               // set last_chain_persist_height here - we wouldn't want the first
+                                               // TemporaryFailure to always immediately be considered "overly delayed".
+                                               monitor_state.last_chain_persist_height.store(height as usize, Ordering::Release);
                                        }
+                               }
 
-                                       log_trace!(self.logger, "Syncing Channel Monitor for channel {}", log_funding_info!(monitor));
-                                       match self.persister.update_persisted_channel(*funding_outpoint, &None, monitor, update_id) {
-                                               Ok(()) =>
-                                                       log_trace!(self.logger, "Finished syncing Channel Monitor for channel {}", log_funding_info!(monitor)),
-                                               Err(ChannelMonitorUpdateErr::PermanentFailure) => {
-                                                       monitor_state.channel_perm_failed.store(true, Ordering::Release);
-                                                       self.pending_monitor_events.lock().unwrap().push((*funding_outpoint, vec![MonitorEvent::UpdateFailed(*funding_outpoint)], monitor.get_counterparty_node_id()));
-                                               },
-                                               Err(ChannelMonitorUpdateErr::TemporaryFailure) => {
-                                                       log_debug!(self.logger, "Channel Monitor sync for channel {} in progress, holding events until completion!", log_funding_info!(monitor));
-                                                       pending_monitor_updates.push(update_id);
-                                               },
-                                       }
+                               log_trace!(self.logger, "Syncing Channel Monitor for channel {}", log_funding_info!(monitor));
+                               match self.persister.update_persisted_channel(*funding_outpoint, &None, monitor, update_id) {
+                                       Ok(()) =>
+                                               log_trace!(self.logger, "Finished syncing Channel Monitor for channel {}", log_funding_info!(monitor)),
+                                       Err(ChannelMonitorUpdateErr::PermanentFailure) => {
+                                               monitor_state.channel_perm_failed.store(true, Ordering::Release);
+                                               self.pending_monitor_events.lock().unwrap().push((*funding_outpoint, vec![MonitorEvent::UpdateFailed(*funding_outpoint)], monitor.get_counterparty_node_id()));
+                                       },
+                                       Err(ChannelMonitorUpdateErr::TemporaryFailure) => {
+                                               log_debug!(self.logger, "Channel Monitor sync for channel {} in progress, holding events until completion!", log_funding_info!(monitor));
+                                               pending_monitor_updates.push(update_id);
+                                       },
                                }
+                       }
 
-                               // Register any new outputs with the chain source for filtering, storing any dependent
-                               // transactions from within the block that previously had not been included in txdata.
-                               if let Some(ref chain_source) = self.chain_source {
-                                       let block_hash = header.block_hash();
-                                       for (txid, mut outputs) in txn_outputs.drain(..) {
-                                               for (idx, output) in outputs.drain(..) {
-                                                       // Register any new outputs with the chain source for filtering and recurse
-                                                       // if it indicates that there are dependent transactions within the block
-                                                       // that had not been previously included in txdata.
-                                                       let output = WatchedOutput {
-                                                               block_hash: Some(block_hash),
-                                                               outpoint: OutPoint { txid, index: idx as u16 },
-                                                               script_pubkey: output.script_pubkey,
-                                                       };
-                                                       if let Some(tx) = chain_source.register_output(output) {
-                                                               dependent_txdata.push(tx);
-                                                       }
-                                               }
+                       // Register any new outputs with the chain source for filtering, storing any dependent
+                       // transactions from within the block that previously had not been included in txdata.
+                       if let Some(ref chain_source) = self.chain_source {
+                               let block_hash = header.block_hash();
+                               for (txid, mut outputs) in txn_outputs.drain(..) {
+                                       for (idx, output) in outputs.drain(..) {
+                                               // Register any new outputs with the chain source for filtering
+                                               let output = WatchedOutput {
+                                                       block_hash: Some(block_hash),
+                                                       outpoint: OutPoint { txid, index: idx as u16 },
+                                                       script_pubkey: output.script_pubkey,
+                                               };
+                                               chain_source.register_output(output)
                                        }
                                }
                        }
                }
-
-               // Recursively call for any dependent transactions that were identified by the chain source.
-               if !dependent_txdata.is_empty() {
-                       dependent_txdata.sort_unstable_by_key(|(index, _tx)| *index);
-                       dependent_txdata.dedup_by_key(|(index, _tx)| *index);
-                       let txdata: Vec<_> = dependent_txdata.iter().map(|(index, tx)| (*index, tx)).collect();
-                       self.process_chain_data(header, None, &txdata, process); // We skip the best height the second go-around
-               }
        }
 
        /// Creates a new `ChainMonitor` used to watch on-chain activity pertaining to channels.
@@ -733,7 +718,8 @@ impl<ChannelSigner: Sign, C: Deref, T: Deref, F: Deref, L: Deref, P: Deref> even
 
 #[cfg(test)]
 mod tests {
-       use bitcoin::BlockHeader;
+       use bitcoin::{BlockHeader, TxMerkleNode};
+       use bitcoin::hashes::Hash;
        use ::{check_added_monitors, check_closed_broadcast, check_closed_event};
        use ::{expect_payment_sent, expect_payment_claimed, expect_payment_sent_without_paths, expect_payment_path_successful, get_event_msg};
        use ::{get_htlc_update_msgs, get_local_commitment_txn, get_revoke_commit_msgs, get_route_and_payment_hash, unwrap_send_err};
@@ -745,50 +731,6 @@ mod tests {
        use ln::msgs::ChannelMessageHandler;
        use util::errors::APIError;
        use util::events::{ClosureReason, MessageSendEvent, MessageSendEventsProvider};
-       use util::test_utils::{OnRegisterOutput, TxOutReference};
-
-       /// Tests that in-block dependent transactions are processed by `block_connected` when not
-       /// included in `txdata` but returned by [`chain::Filter::register_output`]. For instance,
-       /// a (non-anchor) commitment transaction's HTLC output may be spent in the same block as the
-       /// commitment transaction itself. An Electrum client may filter the commitment transaction but
-       /// needs to return the HTLC transaction so it can be processed.
-       #[test]
-       fn connect_block_checks_dependent_transactions() {
-               let chanmon_cfgs = create_chanmon_cfgs(2);
-               let node_cfgs = create_node_cfgs(2, &chanmon_cfgs);
-               let node_chanmgrs = create_node_chanmgrs(2, &node_cfgs, &[None, None]);
-               let nodes = create_network(2, &node_cfgs, &node_chanmgrs);
-               let channel = create_announced_chan_between_nodes(
-                       &nodes, 0, 1, InitFeatures::known(), InitFeatures::known());
-
-               // Send a payment, saving nodes[0]'s revoked commitment and HTLC-Timeout transactions.
-               let (commitment_tx, htlc_tx) = {
-                       let payment_preimage = route_payment(&nodes[0], &vec!(&nodes[1])[..], 5_000_000).0;
-                       let mut txn = get_local_commitment_txn!(nodes[0], channel.2);
-                       claim_payment(&nodes[0], &vec!(&nodes[1])[..], payment_preimage);
-
-                       assert_eq!(txn.len(), 2);
-                       (txn.remove(0), txn.remove(0))
-               };
-
-               // Set expectations on nodes[1]'s chain source to return dependent transactions.
-               let htlc_output = TxOutReference(commitment_tx.clone(), 0);
-               let to_local_output = TxOutReference(commitment_tx.clone(), 1);
-               let htlc_timeout_output = TxOutReference(htlc_tx.clone(), 0);
-               nodes[1].chain_source
-                       .expect(OnRegisterOutput { with: htlc_output, returns: Some((1, htlc_tx)) })
-                       .expect(OnRegisterOutput { with: to_local_output, returns: None })
-                       .expect(OnRegisterOutput { with: htlc_timeout_output, returns: None });
-
-               // Notify nodes[1] that nodes[0]'s revoked commitment transaction was mined. The chain
-               // source should return the dependent HTLC transaction when the HTLC output is registered.
-               mine_transaction(&nodes[1], &commitment_tx);
-
-               // Clean up so uninteresting assertions don't fail.
-               check_added_monitors!(nodes[1], 1);
-               nodes[1].node.get_and_clear_pending_msg_events();
-               nodes[1].node.get_and_clear_pending_events();
-       }
 
        #[test]
        fn test_async_ooo_offchain_updates() {
@@ -900,7 +842,7 @@ mod tests {
                let new_header = BlockHeader {
                        version: 2, time: 0, bits: 0, nonce: 0,
                        prev_blockhash: nodes[0].best_block_info().0,
-                       merkle_root: Default::default() };
+                       merkle_root: TxMerkleNode::all_zeros() };
                nodes[0].chain_monitor.chain_monitor.transactions_confirmed(&new_header,
                        &[(0, &remote_txn[0]), (1, &remote_txn[1])], nodes[0].best_block_info().1 + 1);
                assert!(nodes[0].chain_monitor.release_pending_monitor_events().is_empty());
@@ -926,7 +868,7 @@ mod tests {
                        let latest_header = BlockHeader {
                                version: 2, time: 0, bits: 0, nonce: 0,
                                prev_blockhash: nodes[0].best_block_info().0,
-                               merkle_root: Default::default() };
+                               merkle_root: TxMerkleNode::all_zeros() };
                        nodes[0].chain_monitor.chain_monitor.best_block_updated(&latest_header, nodes[0].best_block_info().1 + LATENCY_GRACE_PERIOD_BLOCKS);
                } else {
                        let persistences = chanmon_cfgs[0].persister.chain_sync_monitor_persistences.lock().unwrap().clone();
index 855263fe53b9bd09ce0f6bbbd64beabc34db64d5..4bb08724c1700068c9bce24e97ffe297197cc916 100644 (file)
@@ -404,7 +404,7 @@ impl Writeable for OnchainEventEntry {
 
 impl MaybeReadable for OnchainEventEntry {
        fn read<R: io::Read>(reader: &mut R) -> Result<Option<Self>, DecodeError> {
-               let mut txid = Default::default();
+               let mut txid = Txid::all_zeros();
                let mut height = 0;
                let mut event = None;
                read_tlv_fields!(reader, {
@@ -1756,12 +1756,12 @@ macro_rules! fail_unbroadcast_htlcs {
 
 #[cfg(test)]
 pub fn deliberately_bogus_accepted_htlc_witness_program() -> Vec<u8> {
-       let mut ret = [opcodes::all::OP_NOP.into_u8(); 136];
-       ret[131] = opcodes::all::OP_DROP.into_u8();
-       ret[132] = opcodes::all::OP_DROP.into_u8();
-       ret[133] = opcodes::all::OP_DROP.into_u8();
-       ret[134] = opcodes::all::OP_DROP.into_u8();
-       ret[135] = opcodes::OP_TRUE.into_u8();
+       let mut ret = [opcodes::all::OP_NOP.to_u8(); 136];
+       ret[131] = opcodes::all::OP_DROP.to_u8();
+       ret[132] = opcodes::all::OP_DROP.to_u8();
+       ret[133] = opcodes::all::OP_DROP.to_u8();
+       ret[134] = opcodes::all::OP_DROP.to_u8();
+       ret[135] = opcodes::OP_TRUE.to_u8();
        Vec::from(&ret[..])
 }
 
@@ -2110,7 +2110,7 @@ impl<Signer: Sign> ChannelMonitorImpl<Signer> {
                        };
                }
 
-               let commitment_number = 0xffffffffffff - ((((tx.input[0].sequence as u64 & 0xffffff) << 3*8) | (tx.lock_time as u64 & 0xffffff)) ^ self.commitment_transaction_number_obscure_factor);
+               let commitment_number = 0xffffffffffff - ((((tx.input[0].sequence.0 as u64 & 0xffffff) << 3*8) | (tx.lock_time.0 as u64 & 0xffffff)) ^ self.commitment_transaction_number_obscure_factor);
                if commitment_number >= self.get_min_seen_secret() {
                        let secret = self.get_secret(commitment_number).unwrap();
                        let per_commitment_key = ignore_error!(SecretKey::from_slice(&secret));
@@ -2495,7 +2495,7 @@ impl<Signer: Sign> ChannelMonitorImpl<Signer> {
                                        log_info!(logger, "Channel {} closed by funding output spend in txid {}.",
                                                log_bytes!(self.funding_info.0.to_channel_id()), tx.txid());
                                        self.funding_spend_seen = true;
-                                       if (tx.input[0].sequence >> 8*3) as u8 == 0x80 && (tx.lock_time >> 8*3) as u8 == 0x20 {
+                                       if (tx.input[0].sequence.0 >> 8*3) as u8 == 0x80 && (tx.lock_time.0 >> 8*3) as u8 == 0x20 {
                                                let (mut new_outpoints, new_outputs) = self.check_spend_counterparty_transaction(&tx, height, &logger);
                                                if !new_outputs.1.is_empty() {
                                                        watch_outputs.push(new_outputs);
@@ -3469,7 +3469,7 @@ mod tests {
        use util::ser::{ReadableArgs, Writeable};
        use sync::{Arc, Mutex};
        use io;
-       use bitcoin::Witness;
+       use bitcoin::{PackedLockTime, Sequence, TxMerkleNode, Witness};
        use prelude::*;
 
        fn do_test_funding_spend_refuses_updates(use_local_txn: bool) {
@@ -3513,7 +3513,7 @@ mod tests {
                let new_header = BlockHeader {
                        version: 2, time: 0, bits: 0, nonce: 0,
                        prev_blockhash: nodes[0].best_block_info().0,
-                       merkle_root: Default::default() };
+                       merkle_root: TxMerkleNode::all_zeros() };
                let conf_height = nodes[0].best_block_info().1 + 1;
                nodes[1].chain_monitor.chain_monitor.transactions_confirmed(&new_header,
                        &[(0, broadcast_tx)], conf_height);
@@ -3573,7 +3573,7 @@ mod tests {
                let fee_estimator = TestFeeEstimator { sat_per_kw: Mutex::new(253) };
 
                let dummy_key = PublicKey::from_secret_key(&secp_ctx, &SecretKey::from_slice(&[42; 32]).unwrap());
-               let dummy_tx = Transaction { version: 0, lock_time: 0, input: Vec::new(), output: Vec::new() };
+               let dummy_tx = Transaction { version: 0, lock_time: PackedLockTime::ZERO, input: Vec::new(), output: Vec::new() };
 
                let mut preimages = Vec::new();
                {
@@ -3639,7 +3639,7 @@ mod tests {
                        delayed_payment_basepoint: PublicKey::from_secret_key(&secp_ctx, &SecretKey::from_slice(&[47; 32]).unwrap()),
                        htlc_basepoint: PublicKey::from_secret_key(&secp_ctx, &SecretKey::from_slice(&[48; 32]).unwrap())
                };
-               let funding_outpoint = OutPoint { txid: Default::default(), index: u16::max_value() };
+               let funding_outpoint = OutPoint { txid: Txid::all_zeros(), index: u16::max_value() };
                let channel_parameters = ChannelTransactionParameters {
                        holder_pubkeys: keys.holder_channel_pubkeys.clone(),
                        holder_selected_contest_delay: 66,
@@ -3753,7 +3753,7 @@ mod tests {
 
                // Justice tx with 1 to_holder, 2 revoked offered HTLCs, 1 revoked received HTLCs
                for &opt_anchors in [false, true].iter() {
-                       let mut claim_tx = Transaction { version: 0, lock_time: 0, input: Vec::new(), output: Vec::new() };
+                       let mut claim_tx = Transaction { version: 0, lock_time: PackedLockTime::ZERO, input: Vec::new(), output: Vec::new() };
                        let mut sum_actual_sigs = 0;
                        for i in 0..4 {
                                claim_tx.input.push(TxIn {
@@ -3762,7 +3762,7 @@ mod tests {
                                                vout: i,
                                        },
                                        script_sig: Script::new(),
-                                       sequence: 0xfffffffd,
+                                       sequence: Sequence::ENABLE_RBF_NO_LOCKTIME,
                                        witness: Witness::new(),
                                });
                        }
@@ -3785,7 +3785,7 @@ mod tests {
 
                // Claim tx with 1 offered HTLCs, 3 received HTLCs
                for &opt_anchors in [false, true].iter() {
-                       let mut claim_tx = Transaction { version: 0, lock_time: 0, input: Vec::new(), output: Vec::new() };
+                       let mut claim_tx = Transaction { version: 0, lock_time: PackedLockTime::ZERO, input: Vec::new(), output: Vec::new() };
                        let mut sum_actual_sigs = 0;
                        for i in 0..4 {
                                claim_tx.input.push(TxIn {
@@ -3794,7 +3794,7 @@ mod tests {
                                                vout: i,
                                        },
                                        script_sig: Script::new(),
-                                       sequence: 0xfffffffd,
+                                       sequence: Sequence::ENABLE_RBF_NO_LOCKTIME,
                                        witness: Witness::new(),
                                });
                        }
@@ -3817,7 +3817,7 @@ mod tests {
 
                // Justice tx with 1 revoked HTLC-Success tx output
                for &opt_anchors in [false, true].iter() {
-                       let mut claim_tx = Transaction { version: 0, lock_time: 0, input: Vec::new(), output: Vec::new() };
+                       let mut claim_tx = Transaction { version: 0, lock_time: PackedLockTime::ZERO, input: Vec::new(), output: Vec::new() };
                        let mut sum_actual_sigs = 0;
                        claim_tx.input.push(TxIn {
                                previous_output: BitcoinOutPoint {
@@ -3825,7 +3825,7 @@ mod tests {
                                        vout: 0,
                                },
                                script_sig: Script::new(),
-                               sequence: 0xfffffffd,
+                               sequence: Sequence::ENABLE_RBF_NO_LOCKTIME,
                                witness: Witness::new(),
                        });
                        claim_tx.output.push(TxOut {
index 9a3baea8bb442a3c6928cca83559606a66de95ff..73b8a1b98224ace7aef2f6db05a82ca2a020e476 100644 (file)
@@ -25,11 +25,11 @@ use bitcoin::hashes::sha256::Hash as Sha256;
 use bitcoin::hashes::sha256d::Hash as Sha256dHash;
 use bitcoin::hash_types::WPubkeyHash;
 
-use bitcoin::secp256k1::{SecretKey, PublicKey};
+use bitcoin::secp256k1::{SecretKey, PublicKey, Scalar};
 use bitcoin::secp256k1::{Secp256k1, ecdsa::Signature, Signing};
 use bitcoin::secp256k1::ecdh::SharedSecret;
 use bitcoin::secp256k1::ecdsa::RecoverableSignature;
-use bitcoin::{secp256k1, Witness};
+use bitcoin::{PackedLockTime, secp256k1, Sequence, Witness};
 
 use util::{byte_utils, transaction_utils};
 use util::crypto::{hkdf_extract_expand_twice, sign};
@@ -410,7 +410,7 @@ pub trait KeysInterface {
        /// secret, though this is less efficient.
        ///
        /// [`node secret`]: Self::get_node_secret
-       fn ecdh(&self, recipient: Recipient, other_key: &PublicKey, tweak: Option<&[u8; 32]>) -> Result<SharedSecret, ()>;
+       fn ecdh(&self, recipient: Recipient, other_key: &PublicKey, tweak: Option<&Scalar>) -> Result<SharedSecret, ()>;
        /// Get a script pubkey which we send funds to when claiming on-chain contestable outputs.
        ///
        /// This method should return a different value each time it is called, to avoid linking
@@ -626,7 +626,7 @@ impl InMemorySigner {
                if spend_tx.input.len() <= input_idx { return Err(()); }
                if !spend_tx.input[input_idx].script_sig.is_empty() { return Err(()); }
                if spend_tx.input[input_idx].previous_output != descriptor.outpoint.into_bitcoin_outpoint() { return Err(()); }
-               if spend_tx.input[input_idx].sequence != descriptor.to_self_delay as u32 { return Err(()); }
+               if spend_tx.input[input_idx].sequence.0 != descriptor.to_self_delay as u32 { return Err(()); }
 
                let delayed_payment_key = chan_utils::derive_private_key(&secp_ctx, &descriptor.per_commitment_point, &self.delayed_payment_base_key)
                        .expect("We constructed the payment_base_key, so we can only fail here if the RNG is busted.");
@@ -1022,7 +1022,7 @@ impl KeysManager {
                                        input.push(TxIn {
                                                previous_output: descriptor.outpoint.into_bitcoin_outpoint(),
                                                script_sig: Script::new(),
-                                               sequence: 0,
+                                               sequence: Sequence::ZERO,
                                                witness: Witness::new(),
                                        });
                                        witness_weight += StaticPaymentOutputDescriptor::MAX_WITNESS_LENGTH;
@@ -1033,7 +1033,7 @@ impl KeysManager {
                                        input.push(TxIn {
                                                previous_output: descriptor.outpoint.into_bitcoin_outpoint(),
                                                script_sig: Script::new(),
-                                               sequence: descriptor.to_self_delay as u32,
+                                               sequence: Sequence(descriptor.to_self_delay as u32),
                                                witness: Witness::new(),
                                        });
                                        witness_weight += DelayedPaymentOutputDescriptor::MAX_WITNESS_LENGTH;
@@ -1044,7 +1044,7 @@ impl KeysManager {
                                        input.push(TxIn {
                                                previous_output: outpoint.into_bitcoin_outpoint(),
                                                script_sig: Script::new(),
-                                               sequence: 0,
+                                               sequence: Sequence::ZERO,
                                                witness: Witness::new(),
                                        });
                                        witness_weight += 1 + 73 + 34;
@@ -1056,7 +1056,7 @@ impl KeysManager {
                }
                let mut spend_tx = Transaction {
                        version: 2,
-                       lock_time: 0,
+                       lock_time: PackedLockTime(0),
                        input,
                        output: outputs,
                };
@@ -1140,10 +1140,10 @@ impl KeysInterface for KeysManager {
                }
        }
 
-       fn ecdh(&self, recipient: Recipient, other_key: &PublicKey, tweak: Option<&[u8; 32]>) -> Result<SharedSecret, ()> {
+       fn ecdh(&self, recipient: Recipient, other_key: &PublicKey, tweak: Option<&Scalar>) -> Result<SharedSecret, ()> {
                let mut node_secret = self.get_node_secret(recipient)?;
                if let Some(tweak) = tweak {
-                       node_secret.mul_assign(tweak).map_err(|_| ())?;
+                       node_secret = node_secret.mul_tweak(tweak).map_err(|_| ())?;
                }
                Ok(SharedSecret::new(other_key, &node_secret))
        }
@@ -1232,10 +1232,10 @@ impl KeysInterface for PhantomKeysManager {
                }
        }
 
-       fn ecdh(&self, recipient: Recipient, other_key: &PublicKey, tweak: Option<&[u8; 32]>) -> Result<SharedSecret, ()> {
+       fn ecdh(&self, recipient: Recipient, other_key: &PublicKey, tweak: Option<&Scalar>) -> Result<SharedSecret, ()> {
                let mut node_secret = self.get_node_secret(recipient)?;
                if let Some(tweak) = tweak {
-                       node_secret.mul_assign(tweak).map_err(|_| ())?;
+                       node_secret = node_secret.mul_tweak(tweak).map_err(|_| ())?;
                }
                Ok(SharedSecret::new(other_key, &node_secret))
        }
index a0eb17c6be0051eef2d902cd09f4af4ee1ff89a7..42508569575f988bf508ae69ba64ab40abb453e7 100644 (file)
@@ -12,7 +12,7 @@
 use bitcoin::blockdata::block::{Block, BlockHeader};
 use bitcoin::blockdata::constants::genesis_block;
 use bitcoin::blockdata::script::Script;
-use bitcoin::blockdata::transaction::{Transaction, TxOut};
+use bitcoin::blockdata::transaction::TxOut;
 use bitcoin::hash_types::{BlockHash, Txid};
 use bitcoin::network::constants::Network;
 use bitcoin::secp256k1::PublicKey;
@@ -151,15 +151,15 @@ pub trait Confirm {
        /// in the event of a chain reorganization, it must not be called with a `header` that is no
        /// longer in the chain as of the last call to [`best_block_updated`].
        ///
-       /// [chain order]: Confirm#Order
+       /// [chain order]: Confirm#order
        /// [`best_block_updated`]: Self::best_block_updated
        fn transactions_confirmed(&self, header: &BlockHeader, txdata: &TransactionData, height: u32);
 
        /// Processes a transaction that is no longer confirmed as result of a chain reorganization.
        ///
        /// Should be called for any transaction returned by [`get_relevant_txids`] if it has been
-       /// reorganized out of the best chain. Once called, the given transaction should not be returned
-       /// by [`get_relevant_txids`] unless it has been reconfirmed via [`transactions_confirmed`].
+       /// reorganized out of the best chain. Once called, the given transaction will not be returned
+       /// by [`get_relevant_txids`], unless it has been reconfirmed via [`transactions_confirmed`].
        ///
        /// [`get_relevant_txids`]: Self::get_relevant_txids
        /// [`transactions_confirmed`]: Self::transactions_confirmed
@@ -173,9 +173,9 @@ pub trait Confirm {
 
        /// Returns transactions that should be monitored for reorganization out of the chain.
        ///
-       /// Should include any transactions passed to [`transactions_confirmed`] that have insufficient
-       /// confirmations to be safe from a chain reorganization. Should not include any transactions
-       /// passed to [`transaction_unconfirmed`] unless later reconfirmed.
+       /// Will include any transactions passed to [`transactions_confirmed`] that have insufficient
+       /// confirmations to be safe from a chain reorganization. Will not include any transactions
+       /// passed to [`transaction_unconfirmed`], unless later reconfirmed.
        ///
        /// May be called to determine the subset of transactions that must still be monitored for
        /// reorganization. Will be idempotent between calls but may change as a result of calls to the
@@ -333,21 +333,18 @@ pub trait Filter {
 
        /// Registers interest in spends of a transaction output.
        ///
-       /// Optionally, when `output.block_hash` is set, should return any transaction spending the
-       /// output that is found in the corresponding block along with its index.
-       ///
-       /// This return value is useful for Electrum clients in order to supply in-block descendant
-       /// transactions which otherwise were not included. This is not necessary for other clients if
-       /// such descendant transactions were already included (e.g., when a BIP 157 client provides the
-       /// full block).
-       fn register_output(&self, output: WatchedOutput) -> Option<(usize, Transaction)>;
+       /// Note that this method might be called during processing of a new block. You therefore need
+       /// to ensure that also dependent output spents within an already connected block are correctly
+       /// handled, e.g., by re-scanning the block in question whenever new outputs have been
+       /// registered mid-processing.
+       fn register_output(&self, output: WatchedOutput);
 }
 
 /// A transaction output watched by a [`ChannelMonitor`] for spends on-chain.
 ///
 /// Used to convey to a [`Filter`] such an output with a given spending condition. Any transaction
 /// spending the output must be given to [`ChannelMonitor::block_connected`] either directly or via
-/// the return value of [`Filter::register_output`].
+/// [`Confirm::transactions_confirmed`].
 ///
 /// If `block_hash` is `Some`, this indicates the output was created in the corresponding block and
 /// may have been spent there. See [`Filter::register_output`] for details.
index ac01cacaa8649d6676050e3b6eb4d173026250ed..8f62c43c44e5a41b7229e004c13a4d7840cdf1ff 100644 (file)
@@ -38,6 +38,7 @@ use alloc::collections::BTreeMap;
 use core::cmp;
 use core::ops::Deref;
 use core::mem::replace;
+use bitcoin::hashes::Hash;
 
 const MAX_ALLOC_SIZE: usize = 64*1024;
 
@@ -92,7 +93,7 @@ impl Writeable for OnchainEventEntry {
 
 impl MaybeReadable for OnchainEventEntry {
        fn read<R: io::Read>(reader: &mut R) -> Result<Option<Self>, DecodeError> {
-               let mut txid = Default::default();
+               let mut txid = Txid::all_zeros();
                let mut height = 0;
                let mut event = None;
                read_tlv_fields!(reader, {
@@ -389,7 +390,7 @@ impl<ChannelSigner: Sign> OnchainTxHandler<ChannelSigner> {
                if cached_request.is_malleable() {
                        let predicted_weight = cached_request.package_weight(&self.destination_script, self.channel_transaction_parameters.opt_anchors.is_some());
                        if let Some((output_value, new_feerate)) =
-                                       cached_request.compute_package_output(predicted_weight, self.destination_script.dust_value().as_sat(), fee_estimator, logger) {
+                                       cached_request.compute_package_output(predicted_weight, self.destination_script.dust_value().to_sat(), fee_estimator, logger) {
                                assert!(new_feerate != 0);
 
                                let transaction = cached_request.finalize_package(self, output_value, self.destination_script.clone(), logger).unwrap();
index 30530303e59b366239e88c96d846f36da77a49ee..c945d8909da4a61cc656a634a75f0b130eb342d2 100644 (file)
@@ -36,7 +36,7 @@ use prelude::*;
 use core::cmp;
 use core::mem;
 use core::ops::Deref;
-use bitcoin::Witness;
+use bitcoin::{PackedLockTime, Sequence, Witness};
 
 use super::chaininterface::LowerBoundedFeeEstimator;
 
@@ -393,7 +393,7 @@ impl PackageSolvingData {
                                if let Ok(chan_keys) = TxCreationKeys::derive_new(&onchain_handler.secp_ctx, &outp.per_commitment_point, &outp.counterparty_delayed_payment_base_key, &outp.counterparty_htlc_base_key, &onchain_handler.signer.pubkeys().revocation_basepoint, &onchain_handler.signer.pubkeys().htlc_basepoint) {
                                        let witness_script = chan_utils::get_htlc_redeemscript_with_explicit_keys(&outp.htlc, onchain_handler.opt_anchors(), &chan_keys.broadcaster_htlc_key, &chan_keys.countersignatory_htlc_key, &chan_keys.revocation_key);
 
-                                       bumped_tx.lock_time = outp.htlc.cltv_expiry; // Right now we don't aggregate time-locked transaction, if we do we should set lock_time before to avoid breaking hash computation
+                                       bumped_tx.lock_time = PackedLockTime(outp.htlc.cltv_expiry); // Right now we don't aggregate time-locked transaction, if we do we should set lock_time before to avoid breaking hash computation
                                        if let Ok(sig) = onchain_handler.signer.sign_counterparty_htlc_transaction(&bumped_tx, i, &outp.htlc.amount_msat / 1000, &outp.per_commitment_point, &outp.htlc, &onchain_handler.secp_ctx) {
                                                let mut ser_sig = sig.serialize_der().to_vec();
                                                ser_sig.push(EcdsaSighashType::All as u8);
@@ -615,7 +615,7 @@ impl PackageTemplate {
                        PackageMalleability::Malleable => {
                                let mut bumped_tx = Transaction {
                                        version: 2,
-                                       lock_time: 0,
+                                       lock_time: PackedLockTime::ZERO,
                                        input: vec![],
                                        output: vec![TxOut {
                                                script_pubkey: destination_script,
@@ -626,7 +626,7 @@ impl PackageTemplate {
                                        bumped_tx.input.push(TxIn {
                                                previous_output: *outpoint,
                                                script_sig: Script::new(),
-                                               sequence: 0xfffffffd,
+                                               sequence: Sequence::ENABLE_RBF_NO_LOCKTIME,
                                                witness: Witness::new(),
                                        });
                                }
index 2e6b3ab3c0a99cb2d438b15982ed954d4bd35e94..450727107357b8fb8a6c4956303143f096fada44 100644 (file)
 
 #![cfg_attr(not(any(test, fuzzing, feature = "_test_utils")), deny(missing_docs))]
 #![cfg_attr(not(any(test, fuzzing, feature = "_test_utils")), forbid(unsafe_code))]
+
+// Prefix these with `rustdoc::` when we update our MSRV to be >= 1.52 to remove warnings.
 #![deny(broken_intra_doc_links)]
+#![deny(private_intra_doc_links)]
 
 // In general, rust is absolutely horrid at supporting users doing things like,
 // for example, compiling Rust code for real environments. Disable useless lints
index 41d1eff856a71ed88cf8f43c14d8ffe234d27447..d53863289bc5807464739f0707fbb390c25f9869 100644 (file)
@@ -26,10 +26,10 @@ use util::ser::{Readable, Writeable, Writer};
 use util::{byte_utils, transaction_utils};
 
 use bitcoin::hash_types::WPubkeyHash;
-use bitcoin::secp256k1::{SecretKey, PublicKey};
+use bitcoin::secp256k1::{SecretKey, PublicKey, Scalar};
 use bitcoin::secp256k1::{Secp256k1, ecdsa::Signature, Message};
 use bitcoin::secp256k1::Error as SecpError;
-use bitcoin::{secp256k1, Witness};
+use bitcoin::{PackedLockTime, secp256k1, Sequence, Witness};
 
 use io;
 use prelude::*;
@@ -101,7 +101,7 @@ pub fn build_closing_transaction(to_holder_value_sat: u64, to_counterparty_value
                ins.push(TxIn {
                        previous_output: funding_outpoint,
                        script_sig: Script::new(),
-                       sequence: 0xffffffff,
+                       sequence: Sequence::MAX,
                        witness: Witness::new(),
                });
                ins
@@ -132,7 +132,7 @@ pub fn build_closing_transaction(to_holder_value_sat: u64, to_counterparty_value
 
        Transaction {
                version: 2,
-               lock_time: 0,
+               lock_time: PackedLockTime::ZERO,
                input: txins,
                output: outputs,
        }
@@ -264,9 +264,7 @@ pub fn derive_private_key<T: secp256k1::Signing>(secp_ctx: &Secp256k1<T>, per_co
        sha.input(&PublicKey::from_secret_key(&secp_ctx, &base_secret).serialize());
        let res = Sha256::from_engine(sha).into_inner();
 
-       let mut key = base_secret.clone();
-       key.add_assign(&res)?;
-       Ok(key)
+       base_secret.clone().add_tweak(&Scalar::from_be_bytes(res).unwrap())
 }
 
 /// Derives a per-commitment-transaction public key (eg an htlc key or a delayed_payment key)
@@ -313,12 +311,9 @@ pub fn derive_private_revocation_key<T: secp256k1::Signing>(secp_ctx: &Secp256k1
                Sha256::from_engine(sha).into_inner()
        };
 
-       let mut countersignatory_contrib = countersignatory_revocation_base_secret.clone();
-       countersignatory_contrib.mul_assign(&rev_append_commit_hash_key)?;
-       let mut broadcaster_contrib = per_commitment_secret.clone();
-       broadcaster_contrib.mul_assign(&commit_append_rev_hash_key)?;
-       countersignatory_contrib.add_assign(&broadcaster_contrib[..])?;
-       Ok(countersignatory_contrib)
+       let countersignatory_contrib = countersignatory_revocation_base_secret.clone().mul_tweak(&Scalar::from_be_bytes(rev_append_commit_hash_key).unwrap())?;
+       let broadcaster_contrib = per_commitment_secret.clone().mul_tweak(&Scalar::from_be_bytes(commit_append_rev_hash_key).unwrap())?;
+       countersignatory_contrib.add_tweak(&Scalar::from_be_bytes(broadcaster_contrib.secret_bytes()).unwrap())
 }
 
 /// Derives a per-commitment-transaction revocation public key from its constituent parts. This is
@@ -348,10 +343,8 @@ pub fn derive_public_revocation_key<T: secp256k1::Verification>(secp_ctx: &Secp2
                Sha256::from_engine(sha).into_inner()
        };
 
-       let mut countersignatory_contrib = countersignatory_revocation_base_point.clone();
-       countersignatory_contrib.mul_assign(&secp_ctx, &rev_append_commit_hash_key)?;
-       let mut broadcaster_contrib = per_commitment_point.clone();
-       broadcaster_contrib.mul_assign(&secp_ctx, &commit_append_rev_hash_key)?;
+       let countersignatory_contrib = countersignatory_revocation_base_point.clone().mul_tweak(&secp_ctx, &Scalar::from_be_bytes(rev_append_commit_hash_key).unwrap())?;
+       let broadcaster_contrib = per_commitment_point.clone().mul_tweak(&secp_ctx, &Scalar::from_be_bytes(commit_append_rev_hash_key).unwrap())?;
        countersignatory_contrib.combine(&broadcaster_contrib)
 }
 
@@ -614,7 +607,7 @@ pub fn build_htlc_transaction(commitment_txid: &Txid, feerate_per_kw: u32, conte
                        vout: htlc.transaction_output_index.expect("Can't build an HTLC transaction for a dust output"),
                },
                script_sig: Script::new(),
-               sequence: if opt_anchors { 1 } else { 0 },
+               sequence: Sequence(if opt_anchors { 1 } else { 0 }),
                witness: Witness::new(),
        });
 
@@ -633,7 +626,7 @@ pub fn build_htlc_transaction(commitment_txid: &Txid, feerate_per_kw: u32, conte
 
        Transaction {
                version: 2,
-               lock_time: if htlc.offered { htlc.cltv_expiry } else { 0 },
+               lock_time: PackedLockTime(if htlc.offered { htlc.cltv_expiry } else { 0 }),
                input: txins,
                output: txouts,
        }
@@ -863,7 +856,7 @@ impl HolderCommitmentTransaction {
                        holder_selected_contest_delay: 0,
                        is_outbound_from_holder: false,
                        counterparty_parameters: Some(CounterpartyChannelTransactionParameters { pubkeys: channel_pubkeys.clone(), selected_contest_delay: 0 }),
-                       funding_outpoint: Some(chain::transaction::OutPoint { txid: Default::default(), index: 0 }),
+                       funding_outpoint: Some(chain::transaction::OutPoint { txid: Txid::all_zeros(), index: 0 }),
                        opt_anchors: None
                };
                let mut htlcs_with_aux: Vec<(_, ())> = Vec::new();
@@ -1167,7 +1160,7 @@ impl CommitmentTransaction {
        fn make_transaction(obscured_commitment_transaction_number: u64, txins: Vec<TxIn>, outputs: Vec<TxOut>) -> Transaction {
                Transaction {
                        version: 2,
-                       lock_time: ((0x20 as u32) << 8 * 3) | ((obscured_commitment_transaction_number & 0xffffffu64) as u32),
+                       lock_time: PackedLockTime(((0x20 as u32) << 8 * 3) | ((obscured_commitment_transaction_number & 0xffffffu64) as u32)),
                        input: txins,
                        output: outputs,
                }
@@ -1291,8 +1284,8 @@ impl CommitmentTransaction {
                        ins.push(TxIn {
                                previous_output: channel_parameters.funding_outpoint(),
                                script_sig: Script::new(),
-                               sequence: ((0x80 as u32) << 8 * 3)
-                                       | ((obscured_commitment_transaction_number >> 3 * 8) as u32),
+                               sequence: Sequence(((0x80 as u32) << 8 * 3)
+                                       | ((obscured_commitment_transaction_number >> 3 * 8) as u32)),
                                witness: Witness::new(),
                        });
                        ins
@@ -1508,7 +1501,8 @@ mod tests {
        use bitcoin::secp256k1::{PublicKey, SecretKey, Secp256k1};
        use util::test_utils;
        use chain::keysinterface::{KeysInterface, BaseSign};
-       use bitcoin::Network;
+       use bitcoin::{Network, Txid};
+       use bitcoin::hashes::Hash;
        use ln::PaymentHash;
        use bitcoin::hashes::hex::ToHex;
 
@@ -1533,7 +1527,7 @@ mod tests {
                        holder_selected_contest_delay: 0,
                        is_outbound_from_holder: false,
                        counterparty_parameters: Some(CounterpartyChannelTransactionParameters { pubkeys: counterparty_pubkeys.clone(), selected_contest_delay: 0 }),
-                       funding_outpoint: Some(chain::transaction::OutPoint { txid: Default::default(), index: 0 }),
+                       funding_outpoint: Some(chain::transaction::OutPoint { txid: Txid::all_zeros(), index: 0 }),
                        opt_anchors: None
                };
 
index 02ac7b48137fd6cad28c9e37d6b8a16421ea999f..f5977abd4fca7a4bae04814217348383f0c6b342 100644 (file)
@@ -36,6 +36,8 @@ use ln::functional_test_utils::*;
 use util::test_utils;
 
 use io;
+use bitcoin::hashes::Hash;
+use bitcoin::TxMerkleNode;
 use prelude::*;
 use sync::{Arc, Mutex};
 
@@ -116,7 +118,14 @@ fn test_monitor_and_persister_update_fail() {
                assert!(chain_mon.watch_channel(outpoint, new_monitor).is_ok());
                chain_mon
        };
-       let header = BlockHeader { version: 0x20000000, prev_blockhash: Default::default(), merkle_root: Default::default(), time: 42, bits: 42, nonce: 42 };
+       let header = BlockHeader {
+               version: 0x20000000,
+               prev_blockhash: BlockHash::all_zeros(),
+               merkle_root: TxMerkleNode::all_zeros(),
+               time: 42,
+               bits: 42,
+               nonce: 42
+       };
        chain_mon.chain_monitor.block_connected(&Block { header, txdata: vec![] }, 200);
 
        // Set the persister's return value to be a TemporaryFailure.
index fdd073f632a8d4b978453a55dc6c98b7f277e141..624f4d6b688b0e2d81964d39e2deb77e1233af8a 100644 (file)
@@ -6603,7 +6603,7 @@ mod tests {
        use util::errors::APIError;
        use util::test_utils;
        use util::test_utils::OnGetShutdownScriptpubkey;
-       use bitcoin::secp256k1::{Secp256k1, ecdsa::Signature};
+       use bitcoin::secp256k1::{Secp256k1, ecdsa::Signature, Scalar};
        use bitcoin::secp256k1::ffi::Signature as FFISignature;
        use bitcoin::secp256k1::{SecretKey,PublicKey};
        use bitcoin::secp256k1::ecdh::SharedSecret;
@@ -6612,6 +6612,7 @@ mod tests {
        use bitcoin::hashes::Hash;
        use bitcoin::hash_types::WPubkeyHash;
        use bitcoin::bech32::u5;
+       use bitcoin::PackedLockTime;
        use bitcoin::util::address::WitnessVersion;
        use prelude::*;
 
@@ -6647,7 +6648,7 @@ mod tests {
                type Signer = InMemorySigner;
 
                fn get_node_secret(&self, _recipient: Recipient) -> Result<SecretKey, ()> { panic!(); }
-               fn ecdh(&self, _recipient: Recipient, _other_key: &PublicKey, _tweak: Option<&[u8; 32]>) -> Result<SharedSecret, ()> { panic!(); }
+               fn ecdh(&self, _recipient: Recipient, _other_key: &PublicKey, _tweak: Option<&Scalar>) -> Result<SharedSecret, ()> { panic!(); }
                fn get_inbound_payment_key_material(&self) -> KeyMaterial { panic!(); }
                fn get_destination_script(&self) -> Script {
                        let secp_ctx = Secp256k1::signing_only();
@@ -6872,7 +6873,7 @@ mod tests {
 
                // Node A --> Node B: funding created
                let output_script = node_a_chan.get_funding_redeemscript();
-               let tx = Transaction { version: 1, lock_time: 0, input: Vec::new(), output: vec![TxOut {
+               let tx = Transaction { version: 1, lock_time: PackedLockTime::ZERO, input: Vec::new(), output: vec![TxOut {
                        value: 10000000, script_pubkey: output_script.clone(),
                }]};
                let funding_outpoint = OutPoint{ txid: tx.txid(), index: 0 };
index 692d46eda90e8937ec289eed73f47fff6c8d5c8c..e8830ab5ffebef46dfab26d0cc427277e7a56ead 100644 (file)
@@ -32,7 +32,7 @@ use bitcoin::hash_types::{BlockHash, Txid};
 use bitcoin::secp256k1::{SecretKey,PublicKey};
 use bitcoin::secp256k1::Secp256k1;
 use bitcoin::secp256k1::ecdh::SharedSecret;
-use bitcoin::secp256k1;
+use bitcoin::{LockTime, secp256k1, Sequence};
 
 use chain;
 use chain::{Confirm, ChannelMonitorUpdateErr, Watch, BestBlock};
@@ -2897,7 +2897,7 @@ impl<Signer: Sign, M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelMana
                        // constituting our Lightning node might not have perfect sync about their blockchain views. Thus, if
                        // the wallet module is in advance on the LDK view, allow one more block of headroom.
                        // TODO: updated if/when https://github.com/rust-bitcoin/rust-bitcoin/pull/994 landed and rust-bitcoin bumped.
-                       if !funding_transaction.input.iter().all(|input| input.sequence == 0xffffffff) && funding_transaction.lock_time < 500_000_000 && funding_transaction.lock_time > height + 2 {
+                       if !funding_transaction.input.iter().all(|input| input.sequence == Sequence::MAX) && LockTime::from(funding_transaction.lock_time).is_block_height() && funding_transaction.lock_time.0 > height + 2 {
                                return Err(APIError::APIMisuseError {
                                        err: "Funding transaction absolute timelock is non-final".to_owned()
                                });
@@ -7974,7 +7974,7 @@ pub mod bench {
 
        use bitcoin::hashes::Hash;
        use bitcoin::hashes::sha256::Hash as Sha256;
-       use bitcoin::{Block, BlockHeader, Transaction, TxOut};
+       use bitcoin::{Block, BlockHeader, PackedLockTime, Transaction, TxMerkleNode, TxOut};
 
        use sync::{Arc, Mutex};
 
@@ -8036,7 +8036,7 @@ pub mod bench {
 
                let tx;
                if let Event::FundingGenerationReady { temporary_channel_id, output_script, .. } = get_event!(node_a_holder, Event::FundingGenerationReady) {
-                       tx = Transaction { version: 2, lock_time: 0, input: Vec::new(), output: vec![TxOut {
+                       tx = Transaction { version: 2, lock_time: PackedLockTime::ZERO, input: Vec::new(), output: vec![TxOut {
                                value: 8_000_000, script_pubkey: output_script,
                        }]};
                        node_a.funding_transaction_generated(&temporary_channel_id, &node_b.get_our_node_id(), tx.clone()).unwrap();
@@ -8048,7 +8048,7 @@ pub mod bench {
                assert_eq!(&tx_broadcaster.txn_broadcasted.lock().unwrap()[..], &[tx.clone()]);
 
                let block = Block {
-                       header: BlockHeader { version: 0x20000000, prev_blockhash: genesis_hash, merkle_root: Default::default(), time: 42, bits: 42, nonce: 42 },
+                       header: BlockHeader { version: 0x20000000, prev_blockhash: genesis_hash, merkle_root: TxMerkleNode::all_zeros(), time: 42, bits: 42, nonce: 42 },
                        txdata: vec![tx],
                };
                Listen::block_connected(&node_a, &block, 1);
index aa2ad2177c4349dc64d6963cc247cf9140508ba1..ecd1d0ae22850e6e292b4f9b6fa75da388b68448 100644 (file)
@@ -47,6 +47,7 @@ use alloc::rc::Rc;
 use sync::{Arc, Mutex};
 use core::mem;
 use core::iter::repeat;
+use bitcoin::{PackedLockTime, TxMerkleNode};
 
 pub const CHAN_CONFIRM_DEPTH: u32 = 10;
 
@@ -77,11 +78,11 @@ pub fn confirm_transaction_at<'a, 'b, 'c, 'd>(node: &'a Node<'b, 'c, 'd>, tx: &T
                connect_blocks(node, conf_height - first_connect_height);
        }
        let mut block = Block {
-               header: BlockHeader { version: 0x20000000, prev_blockhash: node.best_block_hash(), merkle_root: Default::default(), time: conf_height, bits: 42, nonce: 42 },
+               header: BlockHeader { version: 0x20000000, prev_blockhash: node.best_block_hash(), merkle_root: TxMerkleNode::all_zeros(), time: conf_height, bits: 42, nonce: 42 },
                txdata: Vec::new(),
        };
        for _ in 0..*node.network_chan_count.borrow() { // Make sure we don't end up with channels at the same short id by offsetting by chan_count
-               block.txdata.push(Transaction { version: 0, lock_time: 0, input: Vec::new(), output: Vec::new() });
+               block.txdata.push(Transaction { version: 0, lock_time: PackedLockTime::ZERO, input: Vec::new(), output: Vec::new() });
        }
        block.txdata.push(tx.clone());
        connect_block(node, &block);
@@ -148,7 +149,7 @@ pub fn connect_blocks<'a, 'b, 'c, 'd>(node: &'a Node<'b, 'c, 'd>, depth: u32) ->
 
        let height = node.best_block_info().1 + 1;
        let mut block = Block {
-               header: BlockHeader { version: 0x2000000, prev_blockhash: node.best_block_hash(), merkle_root: Default::default(), time: height, bits: 42, nonce: 42 },
+               header: BlockHeader { version: 0x2000000, prev_blockhash: node.best_block_hash(), merkle_root: TxMerkleNode::all_zeros(), time: height, bits: 42, nonce: 42 },
                txdata: vec![],
        };
        assert!(depth >= 1);
@@ -156,7 +157,7 @@ pub fn connect_blocks<'a, 'b, 'c, 'd>(node: &'a Node<'b, 'c, 'd>, depth: u32) ->
                let prev_blockhash = block.header.block_hash();
                do_connect_block(node, block, skip_intermediaries);
                block = Block {
-                       header: BlockHeader { version: 0x20000000, prev_blockhash, merkle_root: Default::default(), time: height + i, bits: 42, nonce: 42 },
+                       header: BlockHeader { version: 0x20000000, prev_blockhash, merkle_root: TxMerkleNode::all_zeros(), time: height + i, bits: 42, nonce: 42 },
                        txdata: vec![],
                };
        }
@@ -619,7 +620,7 @@ pub fn create_funding_transaction<'a, 'b, 'c>(node: &Node<'a, 'b, 'c>, expected_
                        assert_eq!(*channel_value_satoshis, expected_chan_value);
                        assert_eq!(user_channel_id, expected_user_chan_id);
 
-                       let tx = Transaction { version: chan_id as i32, lock_time: 0, input: Vec::new(), output: vec![TxOut {
+                       let tx = Transaction { version: chan_id as i32, lock_time: PackedLockTime::ZERO, input: Vec::new(), output: vec![TxOut {
                                value: *channel_value_satoshis, script_pubkey: output_script.clone(),
                        }]};
                        let funding_outpoint = OutPoint { txid: tx.txid(), index: 0 };
@@ -893,11 +894,11 @@ macro_rules! check_spends {
                {
                        $(
                        for outp in $spends_txn.output.iter() {
-                               assert!(outp.value >= outp.script_pubkey.dust_value().as_sat(), "Input tx output didn't meet dust limit");
+                               assert!(outp.value >= outp.script_pubkey.dust_value().to_sat(), "Input tx output didn't meet dust limit");
                        }
                        )*
                        for outp in $tx.output.iter() {
-                               assert!(outp.value >= outp.script_pubkey.dust_value().as_sat(), "Spending tx output didn't meet dust limit");
+                               assert!(outp.value >= outp.script_pubkey.dust_value().to_sat(), "Spending tx output didn't meet dust limit");
                        }
                        let get_output = |out_point: &bitcoin::blockdata::transaction::OutPoint| {
                                $(
@@ -2125,9 +2126,9 @@ pub fn test_txn_broadcast<'a, 'b, 'c>(node: &Node<'a, 'b, 'c>, chan: &(msgs::Cha
                        if tx.input.len() == 1 && tx.input[0].previous_output.txid == res[0].txid() {
                                check_spends!(tx, res[0]);
                                if has_htlc_tx == HTLCType::TIMEOUT {
-                                       assert!(tx.lock_time != 0);
+                                       assert!(tx.lock_time.0 != 0);
                                } else {
-                                       assert!(tx.lock_time == 0);
+                                       assert!(tx.lock_time.0 == 0);
                                }
                                res.push(tx.clone());
                                false
index d7be8966250b66341b2580d6656a0067e4678aa8..c41d5402ff3861e2dafaff10da98d9d4b49c25d6 100644 (file)
@@ -42,7 +42,7 @@ use bitcoin::blockdata::script::{Builder, Script};
 use bitcoin::blockdata::opcodes;
 use bitcoin::blockdata::constants::genesis_block;
 use bitcoin::network::constants::Network;
-use bitcoin::{Transaction, TxIn, TxOut, Witness};
+use bitcoin::{PackedLockTime, Sequence, Transaction, TxIn, TxMerkleNode, TxOut, Witness};
 use bitcoin::OutPoint as BitcoinOutPoint;
 
 use bitcoin::secp256k1::Secp256k1;
@@ -55,6 +55,7 @@ use prelude::*;
 use alloc::collections::BTreeSet;
 use core::default::Default;
 use core::iter::repeat;
+use bitcoin::hashes::Hash;
 use sync::{Arc, Mutex};
 
 use ln::functional_test_utils::*;
@@ -503,7 +504,7 @@ fn do_test_sanity_on_in_flight_opens(steps: u8) {
 
        if steps & 0b1000_0000 != 0{
                let block = Block {
-                       header: BlockHeader { version: 0x20000000, prev_blockhash: nodes[0].best_block_hash(), merkle_root: Default::default(), time: 42, bits: 42, nonce: 42 },
+                       header: BlockHeader { version: 0x20000000, prev_blockhash: nodes[0].best_block_hash(), merkle_root: TxMerkleNode::all_zeros(), time: 42, bits: 42, nonce: 42 },
                        txdata: vec![],
                };
                connect_block(&nodes[0], &block);
@@ -2717,11 +2718,11 @@ fn test_htlc_on_chain_success() {
        assert_eq!(node_txn[1].input[0].witness.clone().last().unwrap().len(), ACCEPTED_HTLC_SCRIPT_WEIGHT);
        assert!(node_txn[0].output[0].script_pubkey.is_v0_p2wsh()); // revokeable output
        assert!(node_txn[1].output[0].script_pubkey.is_v0_p2wsh()); // revokeable output
-       assert_eq!(node_txn[0].lock_time, 0);
-       assert_eq!(node_txn[1].lock_time, 0);
+       assert_eq!(node_txn[0].lock_time.0, 0);
+       assert_eq!(node_txn[1].lock_time.0, 0);
 
        // Verify that B's ChannelManager is able to extract preimage from HTLC Success tx and pass it backward
-       let header = BlockHeader { version: 0x20000000, prev_blockhash: nodes[1].best_block_hash(), merkle_root: Default::default(), time: 42, bits: 42, nonce: 42};
+       let header = BlockHeader { version: 0x20000000, prev_blockhash: nodes[1].best_block_hash(), merkle_root: TxMerkleNode::all_zeros(), time: 42, bits: 42, nonce: 42};
        connect_block(&nodes[1], &Block { header, txdata: node_txn});
        connect_blocks(&nodes[1], TEST_FINAL_CLTV - 1); // Confirm blocks until the HTLC expires
        {
@@ -2791,8 +2792,8 @@ fn test_htlc_on_chain_success() {
                        // Node[0]: ChannelManager: 3 (commtiemtn tx, 2*HTLC-Timeout tx), ChannelMonitor: 2 HTLC-timeout
                        check_spends!(node_txn[1], $commitment_tx);
                        check_spends!(node_txn[2], $commitment_tx);
-                       assert_ne!(node_txn[1].lock_time, 0);
-                       assert_ne!(node_txn[2].lock_time, 0);
+                       assert_ne!(node_txn[1].lock_time.0, 0);
+                       assert_ne!(node_txn[2].lock_time.0, 0);
                        if $htlc_offered {
                                assert_eq!(node_txn[1].input[0].witness.last().unwrap().len(), OFFERED_HTLC_SCRIPT_WEIGHT);
                                assert_eq!(node_txn[2].input[0].witness.last().unwrap().len(), OFFERED_HTLC_SCRIPT_WEIGHT);
@@ -2841,7 +2842,7 @@ fn test_htlc_on_chain_success() {
        assert_eq!(commitment_spend.input.len(), 2);
        assert_eq!(commitment_spend.input[0].witness.last().unwrap().len(), OFFERED_HTLC_SCRIPT_WEIGHT);
        assert_eq!(commitment_spend.input[1].witness.last().unwrap().len(), OFFERED_HTLC_SCRIPT_WEIGHT);
-       assert_eq!(commitment_spend.lock_time, 0);
+       assert_eq!(commitment_spend.lock_time.0, 0);
        assert!(commitment_spend.output[0].script_pubkey.is_v0_p2wpkh()); // direct payment
        check_spends!(node_txn[3], chan_1.3);
        assert_eq!(node_txn[3].input[0].witness.clone().last().unwrap().len(), 71);
@@ -2851,7 +2852,7 @@ fn test_htlc_on_chain_success() {
        // we already checked the same situation with A.
 
        // Verify that A's ChannelManager is able to extract preimage from preimage tx and generate PaymentSent
-       let mut header = BlockHeader { version: 0x20000000, prev_blockhash: nodes[0].best_block_hash(), merkle_root: Default::default(), time: 42, bits: 42, nonce: 42};
+       let mut header = BlockHeader { version: 0x20000000, prev_blockhash: nodes[0].best_block_hash(), merkle_root: TxMerkleNode::all_zeros(), time: 42, bits: 42, nonce: 42};
        connect_block(&nodes[0], &Block { header, txdata: vec![node_a_commitment_tx[0].clone(), commitment_spend.clone()] });
        connect_blocks(&nodes[0], TEST_FINAL_CLTV + MIN_CLTV_EXPIRY_DELTA as u32 - 1); // Confirm blocks until the HTLC expires
        check_closed_broadcast!(nodes[0], true);
@@ -3408,7 +3409,7 @@ fn test_htlc_ignore_latest_remote_commitment() {
        assert_eq!(node_txn.len(), 3);
        assert_eq!(node_txn[0], node_txn[1]);
 
-       let mut header = BlockHeader { version: 0x20000000, prev_blockhash: nodes[1].best_block_hash(), merkle_root: Default::default(), time: 42, bits: 42, nonce: 42 };
+       let mut header = BlockHeader { version: 0x20000000, prev_blockhash: nodes[1].best_block_hash(), merkle_root: TxMerkleNode::all_zeros(), time: 42, bits: 42, nonce: 42 };
        connect_block(&nodes[1], &Block { header, txdata: vec![node_txn[0].clone(), node_txn[1].clone()]});
        check_closed_broadcast!(nodes[1], true);
        check_added_monitors!(nodes[1], 1);
@@ -3491,7 +3492,7 @@ fn test_force_close_fail_back() {
        assert_eq!(node_txn.len(), 1);
        assert_eq!(node_txn[0].input.len(), 1);
        assert_eq!(node_txn[0].input[0].previous_output.txid, tx.txid());
-       assert_eq!(node_txn[0].lock_time, 0); // Must be an HTLC-Success
+       assert_eq!(node_txn[0].lock_time.0, 0); // Must be an HTLC-Success
        assert_eq!(node_txn[0].input[0].witness.len(), 5); // Must be an HTLC-Success
 
        check_spends!(node_txn[0], tx);
@@ -4259,7 +4260,7 @@ fn do_test_htlc_timeout(send_partial_mpp: bool) {
        };
 
        let mut block = Block {
-               header: BlockHeader { version: 0x20000000, prev_blockhash: nodes[0].best_block_hash(), merkle_root: Default::default(), time: 42, bits: 42, nonce: 42 },
+               header: BlockHeader { version: 0x20000000, prev_blockhash: nodes[0].best_block_hash(), merkle_root: TxMerkleNode::all_zeros(), time: 42, bits: 42, nonce: 42 },
                txdata: vec![],
        };
        connect_block(&nodes[0], &block);
@@ -4804,7 +4805,7 @@ fn test_claim_sizeable_push_msat() {
        assert_eq!(spend_txn.len(), 1);
        assert_eq!(spend_txn[0].input.len(), 1);
        check_spends!(spend_txn[0], node_txn[0]);
-       assert_eq!(spend_txn[0].input[0].sequence, BREAKDOWN_TIMEOUT as u32);
+       assert_eq!(spend_txn[0].input[0].sequence.0, BREAKDOWN_TIMEOUT as u32);
 }
 
 #[test]
@@ -5034,10 +5035,10 @@ fn test_static_spendable_outputs_justice_tx_revoked_htlc_timeout_tx() {
        assert_eq!(revoked_htlc_txn[1].input.len(), 1);
        assert_eq!(revoked_htlc_txn[1].input[0].witness.last().unwrap().len(), OFFERED_HTLC_SCRIPT_WEIGHT);
        check_spends!(revoked_htlc_txn[1], revoked_local_txn[0]);
-       assert_ne!(revoked_htlc_txn[1].lock_time, 0); // HTLC-Timeout
+       assert_ne!(revoked_htlc_txn[1].lock_time.0, 0); // HTLC-Timeout
 
        // B will generate justice tx from A's revoked commitment/HTLC tx
-       let header = BlockHeader { version: 0x20000000, prev_blockhash: nodes[1].best_block_hash(), merkle_root: Default::default(), time: 42, bits: 42, nonce: 42 };
+       let header = BlockHeader { version: 0x20000000, prev_blockhash: nodes[1].best_block_hash(), merkle_root: TxMerkleNode::all_zeros(), time: 42, bits: 42, nonce: 42 };
        connect_block(&nodes[1], &Block { header, txdata: vec![revoked_local_txn[0].clone(), revoked_htlc_txn[1].clone()] });
        check_closed_broadcast!(nodes[1], true);
        check_added_monitors!(nodes[1], 1);
@@ -5111,7 +5112,7 @@ fn test_static_spendable_outputs_justice_tx_revoked_htlc_success_tx() {
        assert_eq!(revoked_local_txn[0].output[unspent_local_txn_output].script_pubkey.len(), 2 + 20); // P2WPKH
 
        // A will generate justice tx from B's revoked commitment/HTLC tx
-       let header = BlockHeader { version: 0x20000000, prev_blockhash: nodes[0].best_block_hash(), merkle_root: Default::default(), time: 42, bits: 42, nonce: 42 };
+       let header = BlockHeader { version: 0x20000000, prev_blockhash: nodes[0].best_block_hash(), merkle_root: TxMerkleNode::all_zeros(), time: 42, bits: 42, nonce: 42 };
        connect_block(&nodes[0], &Block { header, txdata: vec![revoked_local_txn[0].clone(), revoked_htlc_txn[0].clone()] });
        check_closed_broadcast!(nodes[0], true);
        check_added_monitors!(nodes[0], 1);
@@ -5208,10 +5209,10 @@ fn test_onchain_to_onchain_claim() {
        assert_eq!(c_txn[1].input[0].witness.clone().last().unwrap().len(), 71);
        assert_eq!(c_txn[2].input[0].witness.clone().last().unwrap().len(), ACCEPTED_HTLC_SCRIPT_WEIGHT);
        assert!(c_txn[0].output[0].script_pubkey.is_v0_p2wsh()); // revokeable output
-       assert_eq!(c_txn[0].lock_time, 0); // Success tx
+       assert_eq!(c_txn[0].lock_time.0, 0); // Success tx
 
        // So we broadcast C's commitment tx and HTLC-Success on B's chain, we should successfully be able to extract preimage and update downstream monitor
-       let header = BlockHeader { version: 0x20000000, prev_blockhash: nodes[1].best_block_hash(), merkle_root: Default::default(), time: 42, bits: 42, nonce: 42};
+       let header = BlockHeader { version: 0x20000000, prev_blockhash: nodes[1].best_block_hash(), merkle_root: TxMerkleNode::all_zeros(), time: 42, bits: 42, nonce: 42};
        connect_block(&nodes[1], &Block { header, txdata: vec![c_txn[1].clone(), c_txn[2].clone()]});
        check_added_monitors!(nodes[1], 1);
        let events = nodes[1].node.get_and_clear_pending_events();
@@ -5269,7 +5270,7 @@ fn test_onchain_to_onchain_claim() {
        check_spends!(b_txn[0], commitment_tx[0]);
        assert_eq!(b_txn[0].input[0].witness.clone().last().unwrap().len(), OFFERED_HTLC_SCRIPT_WEIGHT);
        assert!(b_txn[0].output[0].script_pubkey.is_v0_p2wpkh()); // direct payment
-       assert_eq!(b_txn[0].lock_time, 0); // Success tx
+       assert_eq!(b_txn[0].lock_time.0, 0); // Success tx
 
        check_closed_broadcast!(nodes[1], true);
        check_added_monitors!(nodes[1], 1);
@@ -5477,7 +5478,7 @@ fn test_dynamic_spendable_outputs_local_htlc_success_tx() {
        assert_eq!(spend_txn.len(), 1);
        assert_eq!(spend_txn[0].input.len(), 1);
        check_spends!(spend_txn[0], node_tx);
-       assert_eq!(spend_txn[0].input[0].sequence, BREAKDOWN_TIMEOUT as u32);
+       assert_eq!(spend_txn[0].input[0].sequence.0, BREAKDOWN_TIMEOUT as u32);
 }
 
 fn do_test_fail_backwards_unrevoked_remote_announce(deliver_last_raa: bool, announce_latest: bool) {
@@ -5825,11 +5826,11 @@ fn test_dynamic_spendable_outputs_local_htlc_timeout_tx() {
        check_spends!(spend_txn[0], local_txn[0]);
        assert_eq!(spend_txn[1].input.len(), 1);
        check_spends!(spend_txn[1], htlc_timeout);
-       assert_eq!(spend_txn[1].input[0].sequence, BREAKDOWN_TIMEOUT as u32);
+       assert_eq!(spend_txn[1].input[0].sequence.0, BREAKDOWN_TIMEOUT as u32);
        assert_eq!(spend_txn[2].input.len(), 2);
        check_spends!(spend_txn[2], local_txn[0], htlc_timeout);
-       assert!(spend_txn[2].input[0].sequence == BREAKDOWN_TIMEOUT as u32 ||
-               spend_txn[2].input[1].sequence == BREAKDOWN_TIMEOUT as u32);
+       assert!(spend_txn[2].input[0].sequence.0 == BREAKDOWN_TIMEOUT as u32 ||
+               spend_txn[2].input[1].sequence.0 == BREAKDOWN_TIMEOUT as u32);
 }
 
 #[test]
@@ -5908,11 +5909,11 @@ fn test_key_derivation_params() {
        check_spends!(spend_txn[0], local_txn_1[0]);
        assert_eq!(spend_txn[1].input.len(), 1);
        check_spends!(spend_txn[1], htlc_timeout);
-       assert_eq!(spend_txn[1].input[0].sequence, BREAKDOWN_TIMEOUT as u32);
+       assert_eq!(spend_txn[1].input[0].sequence.0, BREAKDOWN_TIMEOUT as u32);
        assert_eq!(spend_txn[2].input.len(), 2);
        check_spends!(spend_txn[2], local_txn_1[0], htlc_timeout);
-       assert!(spend_txn[2].input[0].sequence == BREAKDOWN_TIMEOUT as u32 ||
-               spend_txn[2].input[1].sequence == BREAKDOWN_TIMEOUT as u32);
+       assert!(spend_txn[2].input[0].sequence.0 == BREAKDOWN_TIMEOUT as u32 ||
+               spend_txn[2].input[1].sequence.0 == BREAKDOWN_TIMEOUT as u32);
 }
 
 #[test]
@@ -5971,7 +5972,7 @@ fn do_htlc_claim_local_commitment_only(use_dust: bool) {
 
        let starting_block = nodes[1].best_block_info();
        let mut block = Block {
-               header: BlockHeader { version: 0x20000000, prev_blockhash: starting_block.0, merkle_root: Default::default(), time: 42, bits: 42, nonce: 42 },
+               header: BlockHeader { version: 0x20000000, prev_blockhash: starting_block.0, merkle_root: TxMerkleNode::all_zeros(), time: 42, bits: 42, nonce: 42 },
                txdata: vec![],
        };
        for _ in starting_block.1 + 1..TEST_FINAL_CLTV - CLTV_CLAIM_BUFFER + starting_block.1 + 2 {
@@ -6002,7 +6003,7 @@ fn do_htlc_claim_current_remote_commitment_only(use_dust: bool) {
        // to "time out" the HTLC.
 
        let starting_block = nodes[1].best_block_info();
-       let mut header = BlockHeader { version: 0x20000000, prev_blockhash: starting_block.0, merkle_root: Default::default(), time: 42, bits: 42, nonce: 42 };
+       let mut header = BlockHeader { version: 0x20000000, prev_blockhash: starting_block.0, merkle_root: TxMerkleNode::all_zeros(), time: 42, bits: 42, nonce: 42 };
 
        for _ in starting_block.1 + 1..TEST_FINAL_CLTV + LATENCY_GRACE_PERIOD_BLOCKS + starting_block.1 + 2 {
                connect_block(&nodes[0], &Block { header, txdata: Vec::new()});
@@ -6049,7 +6050,7 @@ fn do_htlc_claim_previous_remote_commitment_only(use_dust: bool, check_revoke_no
 
        let starting_block = nodes[1].best_block_info();
        let mut block = Block {
-               header: BlockHeader { version: 0x20000000, prev_blockhash: starting_block.0, merkle_root: Default::default(), time: 42, bits: 42, nonce: 42 },
+               header: BlockHeader { version: 0x20000000, prev_blockhash: starting_block.0, merkle_root: TxMerkleNode::all_zeros(), time: 42, bits: 42, nonce: 42 },
                txdata: vec![],
        };
        for _ in starting_block.1 + 1..TEST_FINAL_CLTV + LATENCY_GRACE_PERIOD_BLOCKS + CHAN_CONFIRM_DEPTH + 2 {
@@ -7335,7 +7336,7 @@ fn do_test_sweep_outbound_htlc_failure_update(revoked: bool, local: bool) {
                if !revoked {
                        assert_eq!(timeout_tx[0].input[0].witness.last().unwrap().len(), ACCEPTED_HTLC_SCRIPT_WEIGHT);
                } else {
-                       assert_eq!(timeout_tx[0].lock_time, 0);
+                       assert_eq!(timeout_tx[0].lock_time.0, 0);
                }
                // We fail non-dust-HTLC 2 by broadcast of local timeout/revocation-claim tx
                mine_transaction(&nodes[0], &timeout_tx[0]);
@@ -7755,7 +7756,7 @@ fn test_bump_penalty_txn_on_revoked_commitment() {
 
        // Actually revoke tx by claiming a HTLC
        claim_payment(&nodes[0], &vec!(&nodes[1])[..], payment_preimage);
-       let header = BlockHeader { version: 0x20000000, prev_blockhash: header_114, merkle_root: Default::default(), time: 42, bits: 42, nonce: 42 };
+       let header = BlockHeader { version: 0x20000000, prev_blockhash: header_114, merkle_root: TxMerkleNode::all_zeros(), time: 42, bits: 42, nonce: 42 };
        connect_block(&nodes[1], &Block { header, txdata: vec![revoked_txn[0].clone()] });
        check_added_monitors!(nodes[1], 1);
 
@@ -7855,7 +7856,7 @@ fn test_bump_penalty_txn_on_revoked_htlcs() {
        // Revoke local commitment tx
        claim_payment(&nodes[0], &vec!(&nodes[1])[..], payment_preimage);
 
-       let header = BlockHeader { version: 0x20000000, prev_blockhash: nodes[1].best_block_hash(), merkle_root: Default::default(), time: 42, bits: 42, nonce: 42 };
+       let header = BlockHeader { version: 0x20000000, prev_blockhash: nodes[1].best_block_hash(), merkle_root: TxMerkleNode::all_zeros(), time: 42, bits: 42, nonce: 42 };
        // B will generate both revoked HTLC-timeout/HTLC-preimage txn from revoked commitment tx
        connect_block(&nodes[1], &Block { header, txdata: vec![revoked_local_txn[0].clone()] });
        check_closed_broadcast!(nodes[1], true);
@@ -7878,9 +7879,9 @@ fn test_bump_penalty_txn_on_revoked_htlcs() {
 
        // Broadcast set of revoked txn on A
        let hash_128 = connect_blocks(&nodes[0], 40);
-       let header_11 = BlockHeader { version: 0x20000000, prev_blockhash: hash_128, merkle_root: Default::default(), time: 42, bits: 42, nonce: 42 };
+       let header_11 = BlockHeader { version: 0x20000000, prev_blockhash: hash_128, merkle_root: TxMerkleNode::all_zeros(), time: 42, bits: 42, nonce: 42 };
        connect_block(&nodes[0], &Block { header: header_11, txdata: vec![revoked_local_txn[0].clone()] });
-       let header_129 = BlockHeader { version: 0x20000000, prev_blockhash: header_11.block_hash(), merkle_root: Default::default(), time: 42, bits: 42, nonce: 42 };
+       let header_129 = BlockHeader { version: 0x20000000, prev_blockhash: header_11.block_hash(), merkle_root: TxMerkleNode::all_zeros(), time: 42, bits: 42, nonce: 42 };
        connect_block(&nodes[0], &Block { header: header_129, txdata: vec![revoked_htlc_txn[0].clone(), revoked_htlc_txn[2].clone()] });
        let events = nodes[0].node.get_and_clear_pending_events();
        expect_pending_htlcs_forwardable_from_events!(nodes[0], events[0..1], true);
@@ -7937,9 +7938,9 @@ fn test_bump_penalty_txn_on_revoked_htlcs() {
        }
 
        // Connect one more block to see if bumped penalty are issued for HTLC txn
-       let header_130 = BlockHeader { version: 0x20000000, prev_blockhash: header_129.block_hash(), merkle_root: Default::default(), time: 42, bits: 42, nonce: 42 };
+       let header_130 = BlockHeader { version: 0x20000000, prev_blockhash: header_129.block_hash(), merkle_root: TxMerkleNode::all_zeros(), time: 42, bits: 42, nonce: 42 };
        connect_block(&nodes[0], &Block { header: header_130, txdata: penalty_txn });
-       let header_131 = BlockHeader { version: 0x20000000, prev_blockhash: header_130.block_hash(), merkle_root: Default::default(), time: 42, bits: 42, nonce: 42 };
+       let header_131 = BlockHeader { version: 0x20000000, prev_blockhash: header_130.block_hash(), merkle_root: TxMerkleNode::all_zeros(), time: 42, bits: 42, nonce: 42 };
        connect_block(&nodes[0], &Block { header: header_131, txdata: Vec::new() });
        {
                let mut node_txn = nodes[0].tx_broadcaster.txn_broadcasted.lock().unwrap();
@@ -7978,7 +7979,7 @@ fn test_bump_penalty_txn_on_revoked_htlcs() {
                txn
        };
        // Broadcast claim txn and confirm blocks to avoid further bumps on this outputs
-       let header_145 = BlockHeader { version: 0x20000000, prev_blockhash: header_144, merkle_root: Default::default(), time: 42, bits: 42, nonce: 42 };
+       let header_145 = BlockHeader { version: 0x20000000, prev_blockhash: header_144, merkle_root: TxMerkleNode::all_zeros(), time: 42, bits: 42, nonce: 42 };
        connect_block(&nodes[0], &Block { header: header_145, txdata: node_txn });
        connect_blocks(&nodes[0], 20);
        {
@@ -8193,7 +8194,7 @@ fn test_bump_txn_sanitize_tracking_maps() {
                node_txn.clear();
                penalty_txn
        };
-       let header_130 = BlockHeader { version: 0x20000000, prev_blockhash: nodes[0].best_block_hash(), merkle_root: Default::default(), time: 42, bits: 42, nonce: 42 };
+       let header_130 = BlockHeader { version: 0x20000000, prev_blockhash: nodes[0].best_block_hash(), merkle_root: TxMerkleNode::all_zeros(), time: 42, bits: 42, nonce: 42 };
        connect_block(&nodes[0], &Block { header: header_130, txdata: penalty_txn });
        connect_blocks(&nodes[0], ANTI_REORG_DELAY - 1);
        {
@@ -8669,7 +8670,7 @@ fn test_secret_timeout() {
                        header: BlockHeader {
                                version: 0x2000000,
                                prev_blockhash: node_1_blocks.last().unwrap().0.block_hash(),
-                               merkle_root: Default::default(),
+                               merkle_root: TxMerkleNode::all_zeros(),
                                time: node_1_blocks.len() as u32 + 7200, bits: 42, nonce: 42 },
                        txdata: vec![],
                }
@@ -8814,7 +8815,7 @@ fn test_update_err_monitor_lockdown() {
                assert!(watchtower.watch_channel(outpoint, new_monitor).is_ok());
                watchtower
        };
-       let header = BlockHeader { version: 0x20000000, prev_blockhash: Default::default(), merkle_root: Default::default(), time: 42, bits: 42, nonce: 42 };
+       let header = BlockHeader { version: 0x20000000, prev_blockhash: BlockHash::all_zeros(), merkle_root: TxMerkleNode::all_zeros(), time: 42, bits: 42, nonce: 42 };
        let block = Block { header, txdata: vec![] };
        // Make the tx_broadcaster aware of enough blocks that it doesn't think we're violating
        // transaction lock time requirements here.
@@ -8878,7 +8879,7 @@ fn test_concurrent_monitor_claim() {
                assert!(watchtower.watch_channel(outpoint, new_monitor).is_ok());
                watchtower
        };
-       let header = BlockHeader { version: 0x20000000, prev_blockhash: Default::default(), merkle_root: Default::default(), time: 42, bits: 42, nonce: 42 };
+       let header = BlockHeader { version: 0x20000000, prev_blockhash: BlockHash::all_zeros(), merkle_root: TxMerkleNode::all_zeros(), time: 42, bits: 42, nonce: 42 };
        let block = Block { header, txdata: vec![] };
        // Make the tx_broadcaster aware of enough blocks that it doesn't think we're violating
        // transaction lock time requirements here.
@@ -8907,7 +8908,7 @@ fn test_concurrent_monitor_claim() {
                assert!(watchtower.watch_channel(outpoint, new_monitor).is_ok());
                watchtower
        };
-       let header = BlockHeader { version: 0x20000000, prev_blockhash: Default::default(), merkle_root: Default::default(), time: 42, bits: 42, nonce: 42 };
+       let header = BlockHeader { version: 0x20000000, prev_blockhash: BlockHash::all_zeros(), merkle_root: TxMerkleNode::all_zeros(), time: 42, bits: 42, nonce: 42 };
        watchtower_bob.chain_monitor.block_connected(&Block { header, txdata: vec![] }, CHAN_CONFIRM_DEPTH + TEST_FINAL_CLTV + LATENCY_GRACE_PERIOD_BLOCKS);
 
        // Route another payment to generate another update with still previous HTLC pending
@@ -8932,7 +8933,7 @@ fn test_concurrent_monitor_claim() {
        check_added_monitors!(nodes[0], 1);
 
        //// Provide one more block to watchtower Bob, expect broadcast of commitment and HTLC-Timeout
-       let header = BlockHeader { version: 0x20000000, prev_blockhash: Default::default(), merkle_root: Default::default(), time: 42, bits: 42, nonce: 42 };
+       let header = BlockHeader { version: 0x20000000, prev_blockhash: BlockHash::all_zeros(), merkle_root: TxMerkleNode::all_zeros(), time: 42, bits: 42, nonce: 42 };
        watchtower_bob.chain_monitor.block_connected(&Block { header, txdata: vec![] }, CHAN_CONFIRM_DEPTH + 1 + TEST_FINAL_CLTV + LATENCY_GRACE_PERIOD_BLOCKS);
 
        // Watchtower Bob should have broadcast a commitment/HTLC-timeout
@@ -8945,7 +8946,7 @@ fn test_concurrent_monitor_claim() {
        };
 
        // We confirm Bob's state Y on Alice, she should broadcast a HTLC-timeout
-       let header = BlockHeader { version: 0x20000000, prev_blockhash: Default::default(), merkle_root: Default::default(), time: 42, bits: 42, nonce: 42 };
+       let header = BlockHeader { version: 0x20000000, prev_blockhash: BlockHash::all_zeros(), merkle_root: TxMerkleNode::all_zeros(), time: 42, bits: 42, nonce: 42 };
        watchtower_alice.chain_monitor.block_connected(&Block { header, txdata: vec![bob_state_y.clone()] }, CHAN_CONFIRM_DEPTH + 2 + TEST_FINAL_CLTV + LATENCY_GRACE_PERIOD_BLOCKS);
        {
                let htlc_txn = chanmon_cfgs[0].tx_broadcaster.txn_broadcasted.lock().unwrap();
@@ -9020,7 +9021,7 @@ fn test_htlc_no_detection() {
        check_spends!(local_txn[0], chan_1.3);
 
        // Timeout HTLC on A's chain and so it can generate a HTLC-Timeout tx
-       let header = BlockHeader { version: 0x20000000, prev_blockhash: nodes[0].best_block_hash(), merkle_root: Default::default(), time: 42, bits: 42, nonce: 42 };
+       let header = BlockHeader { version: 0x20000000, prev_blockhash: nodes[0].best_block_hash(), merkle_root: TxMerkleNode::all_zeros(), time: 42, bits: 42, nonce: 42 };
        connect_block(&nodes[0], &Block { header, txdata: vec![local_txn[0].clone()] });
        // We deliberately connect the local tx twice as this should provoke a failure calling
        // this test before #653 fix.
@@ -9038,7 +9039,7 @@ fn test_htlc_no_detection() {
                node_txn[1].clone()
        };
 
-       let header_201 = BlockHeader { version: 0x20000000, prev_blockhash: nodes[0].best_block_hash(), merkle_root: Default::default(), time: 42, bits: 42, nonce: 42 };
+       let header_201 = BlockHeader { version: 0x20000000, prev_blockhash: nodes[0].best_block_hash(), merkle_root: TxMerkleNode::all_zeros(), time: 42, bits: 42, nonce: 42 };
        connect_block(&nodes[0], &Block { header: header_201, txdata: vec![htlc_timeout.clone()] });
        connect_blocks(&nodes[0], ANTI_REORG_DELAY - 1);
        expect_payment_failed!(nodes[0], our_payment_hash, true);
@@ -9099,7 +9100,7 @@ fn do_test_onchain_htlc_settlement_after_close(broadcast_alice: bool, go_onchain
                        true => alice_txn.clone(),
                        false => get_local_commitment_txn!(nodes[1], chan_ab.2)
                };
-               let header = BlockHeader { version: 0x20000000, prev_blockhash: nodes[1].best_block_hash(), merkle_root: Default::default(), time: 42, bits: 42, nonce: 42};
+               let header = BlockHeader { version: 0x20000000, prev_blockhash: nodes[1].best_block_hash(), merkle_root: TxMerkleNode::all_zeros(), time: 42, bits: 42, nonce: 42};
                connect_block(&nodes[1], &Block { header, txdata: vec![txn_to_broadcast[0].clone()]});
                let mut bob_txn = nodes[1].tx_broadcaster.txn_broadcasted.lock().unwrap().clone();
                if broadcast_alice {
@@ -9182,7 +9183,7 @@ fn do_test_onchain_htlc_settlement_after_close(broadcast_alice: bool, go_onchain
        let mut txn_to_broadcast = alice_txn.clone();
        if !broadcast_alice { txn_to_broadcast = get_local_commitment_txn!(nodes[1], chan_ab.2); }
        if !go_onchain_before_fulfill {
-               let header = BlockHeader { version: 0x20000000, prev_blockhash: nodes[1].best_block_hash(), merkle_root: Default::default(), time: 42, bits: 42, nonce: 42};
+               let header = BlockHeader { version: 0x20000000, prev_blockhash: nodes[1].best_block_hash(), merkle_root: TxMerkleNode::all_zeros(), time: 42, bits: 42, nonce: 42};
                connect_block(&nodes[1], &Block { header, txdata: vec![txn_to_broadcast[0].clone()]});
                // If Bob was the one to force-close, he will have already passed these checks earlier.
                if broadcast_alice {
@@ -9514,14 +9515,14 @@ fn test_invalid_funding_tx() {
        // long the ChannelMonitor will try to read 32 bytes from the second-to-last element, panicing
        // as its not 32 bytes long.
        let mut spend_tx = Transaction {
-               version: 2i32, lock_time: 0,
+               version: 2i32, lock_time: PackedLockTime::ZERO,
                input: tx.output.iter().enumerate().map(|(idx, _)| TxIn {
                        previous_output: BitcoinOutPoint {
                                txid: tx.txid(),
                                vout: idx as u32,
                        },
                        script_sig: Script::new(),
-                       sequence: 0xfffffffd,
+                       sequence: Sequence::ENABLE_RBF_NO_LOCKTIME,
                        witness: Witness::from_vec(channelmonitor::deliberately_bogus_accepted_htlc_witness())
                }).collect(),
                output: vec![TxOut {
@@ -10432,12 +10433,12 @@ fn test_non_final_funding_tx() {
 
        let chan_id = *nodes[0].network_chan_count.borrow();
        let events = nodes[0].node.get_and_clear_pending_events();
-       let input = TxIn { previous_output: BitcoinOutPoint::null(), script_sig: bitcoin::Script::new(), sequence: 0x1, witness: Witness::from_vec(vec!(vec!(1))) };
+       let input = TxIn { previous_output: BitcoinOutPoint::null(), script_sig: bitcoin::Script::new(), sequence: Sequence(1), witness: Witness::from_vec(vec!(vec!(1))) };
        assert_eq!(events.len(), 1);
        let mut tx = match events[0] {
                Event::FundingGenerationReady { ref channel_value_satoshis, ref output_script, .. } => {
                        // Timelock the transaction _beyond_ the best client height + 2.
-                       Transaction { version: chan_id as i32, lock_time: best_height + 3, input: vec![input], output: vec![TxOut {
+                       Transaction { version: chan_id as i32, lock_time: PackedLockTime(best_height + 3), input: vec![input], output: vec![TxOut {
                                value: *channel_value_satoshis, script_pubkey: output_script.clone(),
                        }]}
                },
@@ -10452,7 +10453,7 @@ fn test_non_final_funding_tx() {
        }
 
        // However, transaction should be accepted if it's in a +2 headroom from best block.
-       tx.lock_time -= 1;
+       tx.lock_time = PackedLockTime(tx.lock_time.0 - 1);
        assert!(nodes[0].node.funding_transaction_generated(&temp_channel_id, &nodes[1].node.get_our_node_id(), tx.clone()).is_ok());
        get_event_msg!(nodes[0], MessageSendEvent::SendFundingCreated, nodes[1].node.get_our_node_id());
 }
index 4f36b9a88810aa5122c3347953028c6b31943a87..9be2059a7059b71dcebda60257086a7209bd1d6e 100644 (file)
@@ -663,7 +663,7 @@ fn test_balances_on_local_commitment_htlcs() {
                        claimable_height: htlc_cltv_timeout,
                }]),
                sorted_vec(nodes[0].chain_monitor.chain_monitor.get_monitor(funding_outpoint).unwrap().get_claimable_balances()));
-       assert_eq!(as_txn[1].lock_time, nodes[0].best_block_info().1 + 1); // as_txn[1] can be included in the next block
+       assert_eq!(as_txn[1].lock_time.0, nodes[0].best_block_info().1 + 1); // as_txn[1] can be included in the next block
 
        // Now confirm nodes[0]'s HTLC-Timeout transaction, which changes the claimable balance to an
        // "awaiting confirmations" one.
index 975b40c9c21f070ad70adc7c9c907e101d285bd4..ffc595ccedab5bbf547f0f33d1426e0214e4e6bc 100644 (file)
@@ -42,7 +42,7 @@ use io_extras::read_to_end;
 
 use util::events::MessageSendEventsProvider;
 use util::logger;
-use util::ser::{LengthReadable, Readable, ReadableArgs, Writeable, Writer, FixedLengthReader, HighZeroBytesDroppedVarInt, Hostname};
+use util::ser::{BigSize, LengthReadable, Readable, ReadableArgs, Writeable, Writer, FixedLengthReader, HighZeroBytesDroppedBigSize, Hostname};
 
 use ln::{PaymentPreimage, PaymentHash, PaymentSecret};
 
@@ -1375,14 +1375,14 @@ impl Writeable for OnionMessage {
 impl Writeable for FinalOnionHopData {
        fn write<W: Writer>(&self, w: &mut W) -> Result<(), io::Error> {
                self.payment_secret.0.write(w)?;
-               HighZeroBytesDroppedVarInt(self.total_msat).write(w)
+               HighZeroBytesDroppedBigSize(self.total_msat).write(w)
        }
 }
 
 impl Readable for FinalOnionHopData {
        fn read<R: Read>(r: &mut R) -> Result<Self, DecodeError> {
                let secret: [u8; 32] = Readable::read(r)?;
-               let amt: HighZeroBytesDroppedVarInt<u64> = Readable::read(r)?;
+               let amt: HighZeroBytesDroppedBigSize<u64> = Readable::read(r)?;
                Ok(Self { payment_secret: PaymentSecret(secret), total_msat: amt.0 })
        }
 }
@@ -1399,15 +1399,15 @@ impl Writeable for OnionHopData {
                        },
                        OnionHopDataFormat::NonFinalNode { short_channel_id } => {
                                encode_varint_length_prefixed_tlv!(w, {
-                                       (2, HighZeroBytesDroppedVarInt(self.amt_to_forward), required),
-                                       (4, HighZeroBytesDroppedVarInt(self.outgoing_cltv_value), required),
+                                       (2, HighZeroBytesDroppedBigSize(self.amt_to_forward), required),
+                                       (4, HighZeroBytesDroppedBigSize(self.outgoing_cltv_value), required),
                                        (6, short_channel_id, required)
                                });
                        },
                        OnionHopDataFormat::FinalNode { ref payment_data, ref keysend_preimage } => {
                                encode_varint_length_prefixed_tlv!(w, {
-                                       (2, HighZeroBytesDroppedVarInt(self.amt_to_forward), required),
-                                       (4, HighZeroBytesDroppedVarInt(self.outgoing_cltv_value), required),
+                                       (2, HighZeroBytesDroppedBigSize(self.amt_to_forward), required),
+                                       (4, HighZeroBytesDroppedBigSize(self.outgoing_cltv_value), required),
                                        (8, payment_data, option),
                                        (5482373484, keysend_preimage, option)
                                });
@@ -1417,36 +1417,23 @@ impl Writeable for OnionHopData {
        }
 }
 
-// ReadableArgs because we need onion_utils::decode_next_hop to accommodate payment packets and
-// onion message packets.
-impl ReadableArgs<()> for OnionHopData {
-       fn read<R: Read>(r: &mut R, _arg: ()) -> Result<Self, DecodeError> {
-               <Self as Readable>::read(r)
-       }
-}
-
 impl Readable for OnionHopData {
-       fn read<R: Read>(mut r: &mut R) -> Result<Self, DecodeError> {
-               use bitcoin::consensus::encode::{Decodable, Error, VarInt};
-               let v: VarInt = Decodable::consensus_decode(&mut r)
-                       .map_err(|e| match e {
-                               Error::Io(ioe) => DecodeError::from(ioe),
-                               _ => DecodeError::InvalidValue
-                       })?;
+       fn read<R: Read>(r: &mut R) -> Result<Self, DecodeError> {
+               let b: BigSize = Readable::read(r)?;
                const LEGACY_ONION_HOP_FLAG: u64 = 0;
-               let (format, amt, cltv_value) = if v.0 != LEGACY_ONION_HOP_FLAG {
-                       let mut rd = FixedLengthReader::new(r, v.0);
-                       let mut amt = HighZeroBytesDroppedVarInt(0u64);
-                       let mut cltv_value = HighZeroBytesDroppedVarInt(0u32);
+               let (format, amt, cltv_value) = if b.0 != LEGACY_ONION_HOP_FLAG {
+                       let mut rd = FixedLengthReader::new(r, b.0);
+                       let mut amt = HighZeroBytesDroppedBigSize(0u64);
+                       let mut cltv_value = HighZeroBytesDroppedBigSize(0u32);
                        let mut short_id: Option<u64> = None;
                        let mut payment_data: Option<FinalOnionHopData> = None;
                        let mut keysend_preimage: Option<PaymentPreimage> = None;
-                       // The TLV type is chosen to be compatible with lnd and c-lightning.
                        decode_tlv_stream!(&mut rd, {
                                (2, amt, required),
                                (4, cltv_value, required),
                                (6, short_id, option),
                                (8, payment_data, option),
+                               // See https://github.com/lightning/blips/blob/master/blip-0003.md
                                (5482373484, keysend_preimage, option)
                        });
                        rd.eat_remaining().map_err(|_| DecodeError::ShortRead)?;
@@ -1488,6 +1475,14 @@ impl Readable for OnionHopData {
        }
 }
 
+// ReadableArgs because we need onion_utils::decode_next_hop to accommodate payment packets and
+// onion message packets.
+impl ReadableArgs<()> for OnionHopData {
+       fn read<R: Read>(r: &mut R, _arg: ()) -> Result<Self, DecodeError> {
+               <Self as Readable>::read(r)
+       }
+}
+
 impl Writeable for Ping {
        fn write<W: Writer>(&self, w: &mut W) -> Result<(), io::Error> {
                self.ponglen.write(w)?;
@@ -1913,7 +1908,7 @@ mod tests {
        use bitcoin::secp256k1::{PublicKey,SecretKey};
        use bitcoin::secp256k1::{Secp256k1, Message};
 
-       use io::Cursor;
+       use io::{self, Cursor};
        use prelude::*;
        use core::convert::TryFrom;
 
@@ -2824,4 +2819,40 @@ mod tests {
                assert_eq!(gossip_timestamp_filter.first_timestamp, 1590000000);
                assert_eq!(gossip_timestamp_filter.timestamp_range, 0xffff_ffff);
        }
+
+       #[test]
+       fn decode_onion_hop_data_len_as_bigsize() {
+               // Tests that we can decode an onion payload that is >253 bytes.
+               // Previously, receiving a payload of this size could've caused us to fail to decode a valid
+               // payload, because we were decoding the length (a BigSize, big-endian) as a VarInt
+               // (little-endian).
+
+               // Encode a test onion payload with a big custom TLV such that it's >253 bytes, forcing the
+               // payload length to be encoded over multiple bytes rather than a single u8.
+               let big_payload = encode_big_payload().unwrap();
+               let mut rd = Cursor::new(&big_payload[..]);
+               <msgs::OnionHopData as Readable>::read(&mut rd).unwrap();
+       }
+       // see above test, needs to be a separate method for use of the serialization macros.
+       fn encode_big_payload() -> Result<Vec<u8>, io::Error> {
+               use util::ser::HighZeroBytesDroppedBigSize;
+               let payload = msgs::OnionHopData {
+                       format: OnionHopDataFormat::NonFinalNode {
+                               short_channel_id: 0xdeadbeef1bad1dea,
+                       },
+                       amt_to_forward: 1000,
+                       outgoing_cltv_value: 0xffffffff,
+               };
+               let mut encoded_payload = Vec::new();
+               let test_bytes = vec![42u8; 1000];
+               if let OnionHopDataFormat::NonFinalNode { short_channel_id } = payload.format {
+                       encode_varint_length_prefixed_tlv!(&mut encoded_payload, {
+                               (1, test_bytes, vec_type),
+                               (2, HighZeroBytesDroppedBigSize(payload.amt_to_forward), required),
+                               (4, HighZeroBytesDroppedBigSize(payload.outgoing_cltv_value), required),
+                               (6, short_channel_id, required)
+                       });
+               }
+               Ok(encoded_payload)
+       }
 }
index f81c619d7353997c1019e60ac3b967da9b6ac1b0..3795ad5ee77d70a03e5124fc3800823f610e0634 100644 (file)
@@ -23,7 +23,7 @@ use bitcoin::hashes::cmp::fixed_time_eq;
 use bitcoin::hashes::hmac::{Hmac, HmacEngine};
 use bitcoin::hashes::sha256::Hash as Sha256;
 
-use bitcoin::secp256k1::{SecretKey,PublicKey};
+use bitcoin::secp256k1::{SecretKey, PublicKey, Scalar};
 use bitcoin::secp256k1::Secp256k1;
 use bitcoin::secp256k1::ecdh::SharedSecret;
 use bitcoin::secp256k1;
@@ -82,7 +82,7 @@ pub(super) fn gen_ammag_from_shared_secret(shared_secret: &[u8]) -> [u8; 32] {
        Hmac::from_engine(hmac).into_inner()
 }
 
-pub(crate) fn next_hop_packet_pubkey<T: secp256k1::Signing + secp256k1::Verification>(secp_ctx: &Secp256k1<T>, mut packet_pubkey: PublicKey, packet_shared_secret: &[u8; 32]) -> Result<PublicKey, secp256k1::Error> {
+pub(crate) fn next_hop_packet_pubkey<T: secp256k1::Signing + secp256k1::Verification>(secp_ctx: &Secp256k1<T>, packet_pubkey: PublicKey, packet_shared_secret: &[u8; 32]) -> Result<PublicKey, secp256k1::Error> {
        let blinding_factor = {
                let mut sha = Sha256::engine();
                sha.input(&packet_pubkey.serialize()[..]);
@@ -90,7 +90,7 @@ pub(crate) fn next_hop_packet_pubkey<T: secp256k1::Signing + secp256k1::Verifica
                Sha256::from_engine(sha).into_inner()
        };
 
-       packet_pubkey.mul_assign(secp_ctx, &blinding_factor[..]).map(|_| packet_pubkey)
+       packet_pubkey.mul_tweak(secp_ctx, &Scalar::from_be_bytes(blinding_factor).unwrap())
 }
 
 // can only fail if an intermediary hop has an invalid public key or session_priv is invalid
@@ -109,7 +109,7 @@ pub(super) fn construct_onion_keys_callback<T: secp256k1::Signing, FType: FnMut(
 
                let ephemeral_pubkey = blinded_pub;
 
-               blinded_priv.mul_assign(&blinding_factor)?;
+               blinded_priv = blinded_priv.mul_tweak(&Scalar::from_be_bytes(blinding_factor).unwrap())?;
                blinded_pub = PublicKey::from_secret_key(secp_ctx, &blinded_priv);
 
                callback(shared_secret, blinding_factor, ephemeral_pubkey, hop, idx);
index 785edecebbe3d0b7eddb0f0f6de2011f895fdde1..cc8f4ee27e8372922109e7e4268f244aba764a7d 100644 (file)
@@ -28,7 +28,8 @@ use util::enforcing_trait_impls::EnforcingSigner;
 use util::ser::{ReadableArgs, Writeable};
 use io;
 
-use bitcoin::{Block, BlockHeader, BlockHash};
+use bitcoin::{Block, BlockHeader, BlockHash, TxMerkleNode};
+use bitcoin::hashes::Hash;
 use bitcoin::network::constants::Network;
 
 use prelude::*;
@@ -605,7 +606,7 @@ fn do_test_dup_htlc_onchain_fails_on_reload(persist_manager_post_event: bool, co
        check_added_monitors!(nodes[1], 1);
        expect_payment_claimed!(nodes[1], payment_hash, 10_000_000);
 
-       let mut header = BlockHeader { version: 0x20000000, prev_blockhash: nodes[1].best_block_hash(), merkle_root: Default::default(), time: 42, bits: 42, nonce: 42 };
+       let mut header = BlockHeader { version: 0x20000000, prev_blockhash: nodes[1].best_block_hash(), merkle_root: TxMerkleNode::all_zeros(), time: 42, bits: 42, nonce: 42 };
        connect_block(&nodes[1], &Block { header, txdata: vec![node_txn[1].clone()]});
        check_closed_broadcast!(nodes[1], true);
        check_added_monitors!(nodes[1], 1);
index fab6962e514957209b832c786916575be95fed4c..cbf920f587db38187dc07a79c3be942c5226fdb0 100644 (file)
@@ -28,6 +28,8 @@ use bitcoin::secp256k1::Secp256k1;
 
 use prelude::*;
 use core::mem;
+use bitcoin::hashes::Hash;
+use bitcoin::TxMerkleNode;
 
 use ln::functional_test_utils::*;
 
@@ -68,7 +70,7 @@ fn do_test_onchain_htlc_reorg(local_commitment: bool, claim: bool) {
        check_added_monitors!(nodes[2], 1);
        get_htlc_update_msgs!(nodes[2], nodes[1].node.get_our_node_id());
 
-       let mut header = BlockHeader { version: 0x2000_0000, prev_blockhash: nodes[2].best_block_hash(), merkle_root: Default::default(), time: 42, bits: 42, nonce: 42 };
+       let mut header = BlockHeader { version: 0x2000_0000, prev_blockhash: nodes[2].best_block_hash(), merkle_root: TxMerkleNode::all_zeros(), time: 42, bits: 42, nonce: 42 };
        let claim_txn = if local_commitment {
                // Broadcast node 1 commitment txn to broadcast the HTLC-Timeout
                let node_1_commitment_txn = get_local_commitment_txn!(nodes[1], chan_2.2);
@@ -131,7 +133,7 @@ fn do_test_onchain_htlc_reorg(local_commitment: bool, claim: bool) {
                disconnect_blocks(&nodes[1], ANTI_REORG_DELAY - 2);
 
                let block = Block {
-                       header: BlockHeader { version: 0x20000000, prev_blockhash: nodes[1].best_block_hash(), merkle_root: Default::default(), time: 42, bits: 42, nonce: 42 },
+                       header: BlockHeader { version: 0x20000000, prev_blockhash: nodes[1].best_block_hash(), merkle_root: TxMerkleNode::all_zeros(), time: 42, bits: 42, nonce: 42 },
                        txdata: claim_txn,
                };
                connect_block(&nodes[1], &block);
@@ -143,7 +145,7 @@ fn do_test_onchain_htlc_reorg(local_commitment: bool, claim: bool) {
        } else {
                // Confirm the timeout tx and check that we fail the HTLC backwards
                let block = Block {
-                       header: BlockHeader { version: 0x20000000, prev_blockhash: nodes[1].best_block_hash(), merkle_root: Default::default(), time: 42, bits: 42, nonce: 42 },
+                       header: BlockHeader { version: 0x20000000, prev_blockhash: nodes[1].best_block_hash(), merkle_root: TxMerkleNode::all_zeros(), time: 42, bits: 42, nonce: 42 },
                        txdata: vec![],
                };
                connect_block(&nodes[1], &block);
index 0e25f46d472141fb2f1e7a2f58ada2bbabc25e15..595085114b0cdec9ee5e2988fa869f5a264eef35 100644 (file)
@@ -122,7 +122,7 @@ pub(crate) fn is_bolt2_compliant(script: &Script, features: &InitFeatures) -> bo
        if script.is_p2pkh() || script.is_p2sh() || script.is_v0_p2wpkh() || script.is_v0_p2wsh() {
                true
        } else if features.supports_shutdown_anysegwit() {
-               script.is_witness_program() && script.as_bytes()[0] != SEGWIT_V0.into_u8()
+               script.is_witness_program() && script.as_bytes()[0] != SEGWIT_V0.to_u8()
        } else {
                false
        }
index 7eba3cdd254af6418132fc29bfac3cde59bd2292..044248961973099560fae9853e8a86cca3037a98 100644 (file)
@@ -13,7 +13,7 @@
 use bitcoin::hashes::{Hash, HashEngine};
 use bitcoin::hashes::hmac::{Hmac, HmacEngine};
 use bitcoin::hashes::sha256::Hash as Sha256;
-use bitcoin::secp256k1::{self, PublicKey, Secp256k1, SecretKey};
+use bitcoin::secp256k1::{self, PublicKey, Scalar, Secp256k1, SecretKey};
 
 use chain::keysinterface::{InMemorySigner, KeysInterface, KeysManager, Recipient, Sign};
 use ln::msgs;
@@ -196,7 +196,7 @@ impl<Signer: Sign, K: Deref, L: Deref> OnionMessenger<Signer, K, L>
                                Hmac::from_engine(hmac).into_inner()
                        };
                        match self.keys_manager.ecdh(Recipient::Node, &msg.onion_routing_packet.public_key,
-                               Some(&blinding_factor))
+                               Some(&Scalar::from_be_bytes(blinding_factor).unwrap()))
                        {
                                Ok(ss) => ss.secret_bytes(),
                                Err(()) => {
@@ -249,11 +249,13 @@ impl<Signer: Sign, K: Deref, L: Deref> OnionMessenger<Signer, K, L>
                                                                        Sha256::from_engine(sha).into_inner()
                                                                };
                                                                let mut next_blinding_point = msg.blinding_point;
-                                                               if let Err(e) = next_blinding_point.mul_assign(&self.secp_ctx, &blinding_factor[..]) {
-                                                                       log_trace!(self.logger, "Failed to compute next blinding point: {}", e);
-                                                                       return
+                                                               match next_blinding_point.mul_tweak(&self.secp_ctx, &Scalar::from_be_bytes(blinding_factor).unwrap()) {
+                                                                       Ok(bp) => bp,
+                                                                       Err(e) => {
+                                                                               log_trace!(self.logger, "Failed to compute next blinding point: {}", e);
+                                                                               return
+                                                                       }
                                                                }
-                                                               next_blinding_point
                                                        },
                                                },
                                                onion_routing_packet: outgoing_packet,
index 9b95183e74bb8bc549ca0a6f252522ae80e050f7..52cadf6c9dbf5db5e3a5747729dde7fbeadf8d93 100644 (file)
@@ -12,7 +12,7 @@
 use bitcoin::hashes::{Hash, HashEngine};
 use bitcoin::hashes::hmac::{Hmac, HmacEngine};
 use bitcoin::hashes::sha256::Hash as Sha256;
-use bitcoin::secp256k1::{self, PublicKey, Secp256k1, SecretKey};
+use bitcoin::secp256k1::{self, PublicKey, Secp256k1, SecretKey, Scalar};
 use bitcoin::secp256k1::ecdh::SharedSecret;
 
 use ln::onion_utils;
@@ -43,9 +43,7 @@ pub(super) fn construct_keys_callback<T: secp256k1::Signing + secp256k1::Verific
                                        hmac.input(encrypted_data_ss.as_ref());
                                        Hmac::from_engine(hmac).into_inner()
                                };
-                               let mut unblinded_pk = $pk;
-                               unblinded_pk.mul_assign(secp_ctx, &hop_pk_blinding_factor)?;
-                               unblinded_pk
+                               $pk.mul_tweak(secp_ctx, &Scalar::from_be_bytes(hop_pk_blinding_factor).unwrap())?
                        };
                        let onion_packet_ss = SharedSecret::new(&blinded_hop_pk, &onion_packet_pubkey_priv);
 
@@ -67,7 +65,7 @@ pub(super) fn construct_keys_callback<T: secp256k1::Signing + secp256k1::Verific
                                Sha256::from_engine(sha).into_inner()
                        };
 
-                       msg_blinding_point_priv.mul_assign(&msg_blinding_point_blinding_factor)?;
+                       msg_blinding_point_priv = msg_blinding_point_priv.mul_tweak(&Scalar::from_be_bytes(msg_blinding_point_blinding_factor).unwrap())?;
                        msg_blinding_point = PublicKey::from_secret_key(secp_ctx, &msg_blinding_point_priv);
 
                        let onion_packet_pubkey_blinding_factor = {
@@ -76,7 +74,7 @@ pub(super) fn construct_keys_callback<T: secp256k1::Signing + secp256k1::Verific
                                sha.input(onion_packet_ss.as_ref());
                                Sha256::from_engine(sha).into_inner()
                        };
-                       onion_packet_pubkey_priv.mul_assign(&onion_packet_pubkey_blinding_factor)?;
+                       onion_packet_pubkey_priv = onion_packet_pubkey_priv.mul_tweak(&Scalar::from_be_bytes(onion_packet_pubkey_blinding_factor).unwrap())?;
                        onion_packet_pubkey = PublicKey::from_secret_key(secp_ctx, &onion_packet_pubkey_priv);
                };
        }
index 42b551633258448fc1a3117a0997913b30edffc4..d56747598563c381ce1e554c2cccda96d0f92be0 100644 (file)
@@ -25,7 +25,7 @@ use routing::gossip::NetworkUpdate;
 use util::ser::{BigSize, FixedLengthReader, Writeable, Writer, MaybeReadable, Readable, VecReadWrapper, VecWriteWrapper};
 use routing::router::{RouteHop, RouteParameters};
 
-use bitcoin::Transaction;
+use bitcoin::{PackedLockTime, Transaction};
 use bitcoin::blockdata::script::Script;
 use bitcoin::hashes::Hash;
 use bitcoin::hashes::sha256::Hash as Sha256;
@@ -907,7 +907,7 @@ impl MaybeReadable for Event {
                        11u8 => {
                                let f = || {
                                        let mut channel_id = [0; 32];
-                                       let mut transaction = Transaction{ version: 2, lock_time: 0, input: Vec::new(), output: Vec::new() };
+                                       let mut transaction = Transaction{ version: 2, lock_time: PackedLockTime::ZERO, input: Vec::new(), output: Vec::new() };
                                        read_tlv_fields!(reader, {
                                                (0, channel_id, required),
                                                (2, transaction, required),
index eadfdbdfc67aaeea13477dc7b5315df4f4ab42cd..63496b28362c8c27b716b8547f24b3a6f9d32f97 100644 (file)
@@ -91,7 +91,7 @@ impl<'a> core::fmt::Display for DebugTx<'a> {
        fn fmt(&self, f: &mut core::fmt::Formatter) -> Result<(), core::fmt::Error> {
                if self.0.input.len() >= 1 && self.0.input.iter().any(|i| !i.witness.is_empty()) {
                        if self.0.input.len() == 1 && self.0.input[0].witness.last().unwrap().len() == 71 &&
-                                       (self.0.input[0].sequence >> 8*3) as u8 == 0x80 {
+                                       (self.0.input[0].sequence.0 >> 8*3) as u8 == 0x80 {
                                write!(f, "commitment tx ")?;
                        } else if self.0.input.len() == 1 && self.0.input[0].witness.last().unwrap().len() == 71 {
                                write!(f, "closing tx ")?;
index ecf85839a5a8e19ebfed3f8e87b8b5832427e4a5..852aa8f15892e5bbc3d14dd265ecf7f168044653 100644 (file)
@@ -400,7 +400,7 @@ impl Readable for BigSize {
 /// variable-length integer which is simply truncated by skipping high zero bytes. This type
 /// encapsulates such integers implementing Readable/Writeable for them.
 #[cfg_attr(test, derive(PartialEq, Debug))]
-pub(crate) struct HighZeroBytesDroppedVarInt<T>(pub T);
+pub(crate) struct HighZeroBytesDroppedBigSize<T>(pub T);
 
 macro_rules! impl_writeable_primitive {
        ($val_type:ty, $len: expr) => {
@@ -410,7 +410,7 @@ macro_rules! impl_writeable_primitive {
                                writer.write_all(&self.to_be_bytes())
                        }
                }
-               impl Writeable for HighZeroBytesDroppedVarInt<$val_type> {
+               impl Writeable for HighZeroBytesDroppedBigSize<$val_type> {
                        #[inline]
                        fn write<W: Writer>(&self, writer: &mut W) -> Result<(), io::Error> {
                                // Skip any full leading 0 bytes when writing (in BE):
@@ -425,9 +425,9 @@ macro_rules! impl_writeable_primitive {
                                Ok(<$val_type>::from_be_bytes(buf))
                        }
                }
-               impl Readable for HighZeroBytesDroppedVarInt<$val_type> {
+               impl Readable for HighZeroBytesDroppedBigSize<$val_type> {
                        #[inline]
-                       fn read<R: Read>(reader: &mut R) -> Result<HighZeroBytesDroppedVarInt<$val_type>, DecodeError> {
+                       fn read<R: Read>(reader: &mut R) -> Result<HighZeroBytesDroppedBigSize<$val_type>, DecodeError> {
                                // We need to accept short reads (read_len == 0) as "EOF" and handle them as simply
                                // the high bytes being dropped. To do so, we start reading into the middle of buf
                                // and then convert the appropriate number of bytes with extra high bytes out of
@@ -443,7 +443,7 @@ macro_rules! impl_writeable_primitive {
                                        let first_byte = $len - ($len - total_read_len);
                                        let mut bytes = [0; $len];
                                        bytes.copy_from_slice(&buf[first_byte..first_byte + $len]);
-                                       Ok(HighZeroBytesDroppedVarInt(<$val_type>::from_be_bytes(bytes)))
+                                       Ok(HighZeroBytesDroppedBigSize(<$val_type>::from_be_bytes(bytes)))
                                } else {
                                        // If the encoding had extra zero bytes, return a failure even though we know
                                        // what they meant (as the TLV test vectors require this)
@@ -860,7 +860,7 @@ macro_rules! impl_consensus_ser {
        ($bitcoin_type: ty) => {
                impl Writeable for $bitcoin_type {
                        fn write<W: Writer>(&self, writer: &mut W) -> Result<(), io::Error> {
-                               match self.consensus_encode(WriterWriteAdaptor(writer)) {
+                               match self.consensus_encode(&mut WriterWriteAdaptor(writer)) {
                                        Ok(_) => Ok(()),
                                        Err(e) => Err(e),
                                }
index 351c2a1f5e25e4d21ad0c999e2f064bf366a6f35..94990fcb8a17c8056dfd01d00b18db78d58e466f 100644 (file)
@@ -563,7 +563,7 @@ mod tests {
        use io::{self, Cursor};
        use prelude::*;
        use ln::msgs::DecodeError;
-       use util::ser::{Writeable, HighZeroBytesDroppedVarInt, VecWriter};
+       use util::ser::{Writeable, HighZeroBytesDroppedBigSize, VecWriter};
        use bitcoin::secp256k1::PublicKey;
 
        // The BOLT TLV test cases don't include any tests which use our "required-value" logic since
@@ -632,9 +632,9 @@ mod tests {
        }
 
        // BOLT TLV test cases
-       fn tlv_reader_n1(s: &[u8]) -> Result<(Option<HighZeroBytesDroppedVarInt<u64>>, Option<u64>, Option<(PublicKey, u64, u64)>, Option<u16>), DecodeError> {
+       fn tlv_reader_n1(s: &[u8]) -> Result<(Option<HighZeroBytesDroppedBigSize<u64>>, Option<u64>, Option<(PublicKey, u64, u64)>, Option<u16>), DecodeError> {
                let mut s = Cursor::new(s);
-               let mut tlv1: Option<HighZeroBytesDroppedVarInt<u64>> = None;
+               let mut tlv1: Option<HighZeroBytesDroppedBigSize<u64>> = None;
                let mut tlv2: Option<u64> = None;
                let mut tlv3: Option<(PublicKey, u64, u64)> = None;
                let mut tlv4: Option<u16> = None;
@@ -765,11 +765,11 @@ mod tests {
                assert_eq!(stream.0, ::hex::decode("06fd00ff02abcd").unwrap());
 
                stream.0.clear();
-               encode_varint_length_prefixed_tlv!(&mut stream, {(0, 1u64, required), (42, None::<u64>, option), (0xff, HighZeroBytesDroppedVarInt(0u64), required)});
+               encode_varint_length_prefixed_tlv!(&mut stream, {(0, 1u64, required), (42, None::<u64>, option), (0xff, HighZeroBytesDroppedBigSize(0u64), required)});
                assert_eq!(stream.0, ::hex::decode("0e00080000000000000001fd00ff00").unwrap());
 
                stream.0.clear();
-               encode_varint_length_prefixed_tlv!(&mut stream, {(0, Some(1u64), option), (0xff, HighZeroBytesDroppedVarInt(0u64), required)});
+               encode_varint_length_prefixed_tlv!(&mut stream, {(0, Some(1u64), option), (0xff, HighZeroBytesDroppedBigSize(0u64), required)});
                assert_eq!(stream.0, ::hex::decode("0e00080000000000000001fd00ff00").unwrap());
 
                Ok(())
index fe009cc16b8a14803cbdb05912b3038863f99bb8..b4d26904706240de647eec14ef97ef5b175ff8cf 100644 (file)
@@ -34,7 +34,7 @@ use bitcoin::blockdata::block::Block;
 use bitcoin::network::constants::Network;
 use bitcoin::hash_types::{BlockHash, Txid};
 
-use bitcoin::secp256k1::{SecretKey, PublicKey, Secp256k1, ecdsa::Signature};
+use bitcoin::secp256k1::{SecretKey, PublicKey, Secp256k1, ecdsa::Signature, Scalar};
 use bitcoin::secp256k1::ecdh::SharedSecret;
 use bitcoin::secp256k1::ecdsa::RecoverableSignature;
 
@@ -51,6 +51,7 @@ use chain::keysinterface::{InMemorySigner, Recipient, KeyMaterial};
 
 #[cfg(feature = "std")]
 use std::time::{SystemTime, UNIX_EPOCH};
+use bitcoin::Sequence;
 
 pub struct TestVecWriter(pub Vec<u8>);
 impl Writer for TestVecWriter {
@@ -74,7 +75,7 @@ impl keysinterface::KeysInterface for OnlyReadsKeysInterface {
        type Signer = EnforcingSigner;
 
        fn get_node_secret(&self, _recipient: Recipient) -> Result<SecretKey, ()> { unreachable!(); }
-       fn ecdh(&self, _recipient: Recipient, _other_key: &PublicKey, _tweak: Option<&[u8; 32]>) -> Result<SharedSecret, ()> { unreachable!(); }
+       fn ecdh(&self, _recipient: Recipient, _other_key: &PublicKey, _tweak: Option<&Scalar>) -> Result<SharedSecret, ()> { unreachable!(); }
        fn get_inbound_payment_key_material(&self) -> KeyMaterial { unreachable!(); }
        fn get_destination_script(&self) -> Script { unreachable!(); }
        fn get_shutdown_scriptpubkey(&self) -> ShutdownScript { unreachable!(); }
@@ -241,10 +242,11 @@ impl TestBroadcaster {
 
 impl chaininterface::BroadcasterInterface for TestBroadcaster {
        fn broadcast_transaction(&self, tx: &Transaction) {
-               assert!(tx.lock_time < 1_500_000_000);
-               if tx.lock_time > self.blocks.lock().unwrap().len() as u32 + 1 && tx.lock_time < 500_000_000 {
+               let lock_time = tx.lock_time.0;
+               assert!(lock_time < 1_500_000_000);
+               if lock_time > self.blocks.lock().unwrap().len() as u32 + 1 && lock_time < 500_000_000 {
                        for inp in tx.input.iter() {
-                               if inp.sequence != 0xffffffff {
+                               if inp.sequence != Sequence::MAX {
                                        panic!("We should never broadcast a transaction before its locktime ({})!", tx.lock_time);
                                }
                        }
@@ -593,7 +595,7 @@ impl keysinterface::KeysInterface for TestKeysInterface {
        fn get_node_secret(&self, recipient: Recipient) -> Result<SecretKey, ()> {
                self.backing.get_node_secret(recipient)
        }
-       fn ecdh(&self, recipient: Recipient, other_key: &PublicKey, tweak: Option<&[u8; 32]>) -> Result<SharedSecret, ()> {
+       fn ecdh(&self, recipient: Recipient, other_key: &PublicKey, tweak: Option<&Scalar>) -> Result<SharedSecret, ()> {
                self.backing.ecdh(recipient, other_key, tweak)
        }
        fn get_inbound_payment_key_material(&self) -> keysinterface::KeyMaterial {
@@ -721,7 +723,6 @@ pub struct TestChainSource {
        pub utxo_ret: Mutex<Result<TxOut, chain::AccessError>>,
        pub watched_txn: Mutex<HashSet<(Txid, Script)>>,
        pub watched_outputs: Mutex<HashSet<(OutPoint, Script)>>,
-       expectations: Mutex<Option<VecDeque<OnRegisterOutput>>>,
 }
 
 impl TestChainSource {
@@ -732,17 +733,8 @@ impl TestChainSource {
                        utxo_ret: Mutex::new(Ok(TxOut { value: u64::max_value(), script_pubkey })),
                        watched_txn: Mutex::new(HashSet::new()),
                        watched_outputs: Mutex::new(HashSet::new()),
-                       expectations: Mutex::new(None),
                }
        }
-
-       /// Sets an expectation that [`chain::Filter::register_output`] is called.
-       pub fn expect(&self, expectation: OnRegisterOutput) -> &Self {
-               self.expectations.lock().unwrap()
-                       .get_or_insert_with(|| VecDeque::new())
-                       .push_back(expectation);
-               self
-       }
 }
 
 impl chain::Access for TestChainSource {
@@ -760,24 +752,8 @@ impl chain::Filter for TestChainSource {
                self.watched_txn.lock().unwrap().insert((*txid, script_pubkey.clone()));
        }
 
-       fn register_output(&self, output: WatchedOutput) -> Option<(usize, Transaction)> {
-               let dependent_tx = match &mut *self.expectations.lock().unwrap() {
-                       None => None,
-                       Some(expectations) => match expectations.pop_front() {
-                               None => {
-                                       panic!("Unexpected register_output: {:?}",
-                                               (output.outpoint, output.script_pubkey));
-                               },
-                               Some(expectation) => {
-                                       assert_eq!(output.outpoint, expectation.outpoint());
-                                       assert_eq!(&output.script_pubkey, expectation.script_pubkey());
-                                       expectation.returns
-                               },
-                       },
-               };
-
+       fn register_output(&self, output: WatchedOutput) {
                self.watched_outputs.lock().unwrap().insert((output.outpoint, output.script_pubkey));
-               dependent_tx
        }
 }
 
@@ -786,47 +762,6 @@ impl Drop for TestChainSource {
                if panicking() {
                        return;
                }
-
-               if let Some(expectations) = &*self.expectations.lock().unwrap() {
-                       if !expectations.is_empty() {
-                               panic!("Unsatisfied expectations: {:?}", expectations);
-                       }
-               }
-       }
-}
-
-/// An expectation that [`chain::Filter::register_output`] was called with a transaction output and
-/// returns an optional dependent transaction that spends the output in the same block.
-pub struct OnRegisterOutput {
-       /// The transaction output to register.
-       pub with: TxOutReference,
-
-       /// A dependent transaction spending the output along with its position in the block.
-       pub returns: Option<(usize, Transaction)>,
-}
-
-/// A transaction output as identified by an index into a transaction's output list.
-pub struct TxOutReference(pub Transaction, pub usize);
-
-impl OnRegisterOutput {
-       fn outpoint(&self) -> OutPoint {
-               let txid = self.with.0.txid();
-               let index = self.with.1 as u16;
-               OutPoint { txid, index }
-       }
-
-       fn script_pubkey(&self) -> &Script {
-               let index = self.with.1;
-               &self.with.0.output[index].script_pubkey
-       }
-}
-
-impl core::fmt::Debug for OnRegisterOutput {
-       fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
-               f.debug_struct("OnRegisterOutput")
-                       .field("outpoint", &self.outpoint())
-                       .field("script_pubkey", self.script_pubkey())
-                       .finish()
        }
 }
 
index 12768543783cd148b7874a0009aa6b44650cd71a..028a08345edebaa867bead864cfda7198b218924 100644 (file)
@@ -56,7 +56,7 @@ pub(crate) fn maybe_add_change_output(tx: &mut Transaction, input_value: u64, wi
        weight_with_change += (VarInt(tx.output.len() as u64 + 1).len() - VarInt(tx.output.len() as u64).len()) as i64 * 4;
        // When calculating weight, add two for the flag bytes
        let change_value: i64 = (input_value - output_value) as i64 - weight_with_change * feerate_sat_per_1000_weight as i64 / 1000;
-       if change_value >= dust_value.as_sat() as i64 {
+       if change_value >= dust_value.to_sat() as i64 {
                change_output.value = change_value as u64;
                tx.output.push(change_output);
                Ok(weight_with_change as usize)
@@ -75,9 +75,8 @@ mod tests {
        use bitcoin::blockdata::script::{Script, Builder};
        use bitcoin::hash_types::{PubkeyHash, Txid};
 
-       use bitcoin::hashes::sha256d::Hash as Sha256dHash;
        use bitcoin::hashes::Hash;
-       use bitcoin::Witness;
+       use bitcoin::{PackedLockTime, Sequence, Witness};
 
        use hex::decode;
 
@@ -215,7 +214,7 @@ mod tests {
        #[test]
        fn test_tx_value_overrun() {
                // If we have a bogus input amount or outputs valued more than inputs, we should fail
-               let mut tx = Transaction { version: 2, lock_time: 0, input: Vec::new(), output: vec![TxOut {
+               let mut tx = Transaction { version: 2, lock_time: PackedLockTime::ZERO, input: Vec::new(), output: vec![TxOut {
                        script_pubkey: Script::new(), value: 1000
                }] };
                assert!(maybe_add_change_output(&mut tx, 21_000_000_0000_0001, 0, 253, Script::new()).is_err());
@@ -226,10 +225,10 @@ mod tests {
        #[test]
        fn test_tx_change_edge() {
                // Check that we never add dust outputs
-               let mut tx = Transaction { version: 2, lock_time: 0, input: Vec::new(), output: Vec::new() };
+               let mut tx = Transaction { version: 2, lock_time: PackedLockTime::ZERO, input: Vec::new(), output: Vec::new() };
                let orig_wtxid = tx.wtxid();
                let output_spk = Script::new_p2pkh(&PubkeyHash::hash(&[0; 0]));
-               assert_eq!(output_spk.dust_value().as_sat(), 546);
+               assert_eq!(output_spk.dust_value().to_sat(), 546);
                // 9 sats isn't enough to pay fee on a dummy transaction...
                assert_eq!(tx.weight() as u64, 40); // ie 10 vbytes
                assert!(maybe_add_change_output(&mut tx, 9, 0, 250, output_spk.clone()).is_err());
@@ -260,8 +259,8 @@ mod tests {
        #[test]
        fn test_tx_extra_outputs() {
                // Check that we correctly handle existing outputs
-               let mut tx = Transaction { version: 2, lock_time: 0, input: vec![TxIn {
-                       previous_output: OutPoint::new(Txid::from_hash(Sha256dHash::default()), 0), script_sig: Script::new(), witness: Witness::new(), sequence: 0,
+               let mut tx = Transaction { version: 2, lock_time: PackedLockTime::ZERO, input: vec![TxIn {
+                       previous_output: OutPoint::new(Txid::all_zeros(), 0), script_sig: Script::new(), witness: Witness::new(), sequence: Sequence::ZERO,
                }], output: vec![TxOut {
                        script_pubkey: Builder::new().push_int(1).into_script(), value: 1000
                }] };
@@ -269,7 +268,7 @@ mod tests {
                let orig_weight = tx.weight();
                assert_eq!(orig_weight / 4, 61);
 
-               assert_eq!(Builder::new().push_int(2).into_script().dust_value().as_sat(), 474);
+               assert_eq!(Builder::new().push_int(2).into_script().dust_value().to_sat(), 474);
 
                // Input value of the output value + fee - 1 should fail:
                assert!(maybe_add_change_output(&mut tx, 1000 + 61 + 100 - 1, 400, 250, Builder::new().push_int(2).into_script()).is_err());