From: Matt Corallo Date: Mon, 12 Aug 2024 19:35:48 +0000 (+0000) Subject: dont use & X-Git-Url: http://git.bitcoin.ninja/?a=commitdiff_plain;h=0ecebc4e15ab8a2fab69d8da1a137b2b8e84b4b7;p=rust-lightning dont use & --- diff --git a/Cargo.toml b/Cargo.toml index 59b545992..0aa7f7624 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -43,11 +43,6 @@ panic = "abort" [patch.crates-io.possiblyrandom] path = "possiblyrandom" -[patch.crates-io] -bitcoin = { path = "../../rust-bitcoin/bitcoin" } -bitcoin-io = {path = "../../rust-bitcoin/io" } -bitcoin_hashes = {path = "../../rust-bitcoin/hashes"} - [workspace.lints.rust.unexpected_cfgs] level = "forbid" # When adding a new cfg attribute, ensure that it is added to this list. diff --git a/lightning/Cargo.toml b/lightning/Cargo.toml index b0d68fc10..f0566433e 100644 --- a/lightning/Cargo.toml +++ b/lightning/Cargo.toml @@ -31,8 +31,8 @@ unsafe_revoked_tx_signing = [] # Override signing to not include randomness when generating signatures for test vectors. _test_vectors = [] -no-std = ["hashbrown", "possiblyrandom", "core2/alloc", "libm"] -std = ["bitcoin/std", "bech32/std", "core2/alloc"] +no-std = ["hashbrown", "possiblyrandom", "libm"] +std = ["bech32/std"] # Generates low-r bitcoin signatures, which saves 1 byte in 50% of the cases grind_signatures = [] @@ -49,7 +49,6 @@ hex = { package = "hex-conservative", version = "0.1.1", default-features = fals regex = { version = "1.5.6", optional = true } backtrace = { version = "0.3", optional = true } -core2 = { version = "0.3.0", optional = true, default-features = false } libm = { version = "0.2", optional = true, default-features = false } [dev-dependencies] diff --git a/lightning/src/blinded_path/mod.rs b/lightning/src/blinded_path/mod.rs index e34e64c7a..84d8b3ca7 100644 --- a/lightning/src/blinded_path/mod.rs +++ b/lightning/src/blinded_path/mod.rs @@ -281,8 +281,10 @@ impl Readable for BlindedPath { 1 => IntroductionNode::DirectedShortChannelId(Direction::NodeTwo, Readable::read(r)?), 2|3 => { use io::Read; - let mut pubkey_read = core::slice::from_mut(&mut first_byte).chain(r.by_ref()); - IntroductionNode::NodeId(Readable::read(&mut pubkey_read)?) + let mut bytes = [0; 33]; + bytes[0] = first_byte; + r.read_exact(&mut bytes[1..])?; + IntroductionNode::NodeId(Readable::read(&mut &bytes[..])?) }, _ => return Err(DecodeError::InvalidValue), }; diff --git a/lightning/src/crypto/streams.rs b/lightning/src/crypto/streams.rs index e8217b8d9..7a2ab86d1 100644 --- a/lightning/src/crypto/streams.rs +++ b/lightning/src/crypto/streams.rs @@ -62,7 +62,7 @@ impl LengthReadableArgs<[u8; 32]> for ChaChaPolyReadAdapter { let mut chacha = ChaCha20Poly1305RFC::new(&secret, &[0; 12], &[]); let decrypted_len = r.total_bytes() - 16; - let s = FixedLengthReader::new(&mut r, decrypted_len); + let s = FixedLengthReader::new(r, decrypted_len); let mut chacha_stream = ChaChaPolyReader { chacha: &mut chacha, read: s }; let readable: T = Readable::read(&mut chacha_stream)?; chacha_stream.read.eat_remaining()?; @@ -194,7 +194,7 @@ mod tests { // Now deserialize the object back and make sure it matches the original. let mut read_adapter: Option> = None; - decode_tlv_stream!(&writer.0[..], { + decode_tlv_stream!(&mut &writer.0[..], { (1, read_adapter, (option: LengthReadableArgs, rho)), }); assert_eq!(writeable, read_adapter.unwrap().readable); diff --git a/lightning/src/lib.rs b/lightning/src/lib.rs index 431fee5a0..749fd8685 100644 --- a/lightning/src/lib.rs +++ b/lightning/src/lib.rs @@ -68,7 +68,6 @@ extern crate core; extern crate hex; #[cfg(any(test, feature = "_test_utils"))] extern crate regex; -#[cfg(not(feature = "std"))] extern crate core2; #[cfg(not(feature = "std"))] extern crate libm; #[cfg(ldk_bench)] extern crate criterion; @@ -403,7 +402,7 @@ pub mod io_extras { Ok(count) } - pub fn read_to_end(mut d: D) -> Result, io::Error> { + pub fn read_to_end(mut d: &mut D) -> Result, io::Error> { let mut result = vec![]; let mut buf = [0u8; 64]; loop { diff --git a/lightning/src/ln/msgs.rs b/lightning/src/ln/msgs.rs index 3d606f4b9..cca0b8cf6 100644 --- a/lightning/src/ln/msgs.rs +++ b/lightning/src/ln/msgs.rs @@ -2721,8 +2721,8 @@ impl ReadableArgs<(Option, &NS)> for InboundOnionPayload w let mut custom_tlvs = Vec::new(); let tlv_len = BigSize::read(r)?; - let rd = FixedLengthReader::new(r, tlv_len.0); - decode_tlv_stream_with_custom_tlv_decode!(rd, { + let mut rd = FixedLengthReader::new(r, tlv_len.0); + decode_tlv_stream_with_custom_tlv_decode!(&mut rd, { (2, amt, (option, encoding: (u64, HighZeroBytesDroppedBigSize))), (4, cltv_value, (option, encoding: (u32, HighZeroBytesDroppedBigSize))), (6, short_id, option), diff --git a/lightning/src/routing/router.rs b/lightning/src/routing/router.rs index 71fde86d0..c47a26ffb 100644 --- a/lightning/src/routing/router.rs +++ b/lightning/src/routing/router.rs @@ -8868,9 +8868,9 @@ pub(crate) mod bench_utils { pub(crate) fn read_graph_scorer(logger: &TestLogger) -> Result<(Arc>, ProbabilisticScorer>, &TestLogger>), &'static str> { let (mut graph_file, mut scorer_file) = get_graph_scorer_file()?; - let graph = Arc::new(NetworkGraph::read(&mut graph_file, logger).unwrap()); + let graph = Arc::new(NetworkGraph::read(&mut std::io::BufReader::new(graph_file), logger).unwrap()); let scorer_args = (Default::default(), Arc::clone(&graph), logger); - let scorer = ProbabilisticScorer::read(&mut scorer_file, scorer_args).unwrap(); + let scorer = ProbabilisticScorer::read(&mut std::io::BufReader::new(scorer_file), scorer_args).unwrap(); Ok((graph, scorer)) } diff --git a/lightning/src/util/ser.rs b/lightning/src/util/ser.rs index 35baf71e5..69ed8f947 100644 --- a/lightning/src/util/ser.rs +++ b/lightning/src/util/ser.rs @@ -65,15 +65,15 @@ impl Writer for W { } // pub(crate) struct ReadBufReadAdapter(pub R); -pub struct BufReader { - inner: R, +pub struct BufReader<'a, R, const S: usize = 4096> { + inner: &'a mut R, buf: [u8; S], pos: usize, cap: usize, } -impl BufReader { - pub fn new(inner: R) -> BufReader { +impl<'a, R: Read, const S: usize> BufReader<'a, R, S> { + pub fn new(inner: &'a mut R) -> BufReader<'a, R, S> { BufReader { inner, buf: [0; S], @@ -83,13 +83,13 @@ impl BufReader { } } -impl Read for BufReader { +impl<'a, R: Read, const S: usize> Read for BufReader<'a, R, S> { fn read(&mut self, buf: &mut [u8]) -> io::Result { // If we don't have any buffered data and we're doing a massive read // (larger than our internal buffer), bypass our internal buffer // entirely. if self.pos == self.cap && buf.len() >= S { - self.discard_buffer(); + //self.discard_buffer(); return self.inner.read(buf); } let nread = { @@ -101,7 +101,7 @@ impl Read for BufReader { } } -impl BufRead for BufReader { +impl<'a, R: Read, const S: usize> BufRead for BufReader<'a, R, S> { fn fill_buf(&mut self) -> io::Result<&[u8]> { // If we've reached the end of our internal buffer then we need to fetch // some more data from the underlying reader. @@ -249,18 +249,18 @@ impl<'a, R: Read> LengthRead for FixedLengthReader<'a, R> { /// between "EOF reached before we started" and "EOF reached mid-read". /// /// This is not exported to bindings users as manual TLV building is not currently supported in bindings -pub struct ReadTrackingReader { - read: R, +pub struct ReadTrackingReader<'a, R: Read> { + read: &'a mut R, /// Returns whether we have read from this reader or not yet. pub have_read: bool, } -impl ReadTrackingReader { +impl<'a, R: Read> ReadTrackingReader<'a, R> { /// Returns a new [`ReadTrackingReader`]. - pub fn new(read: R) -> Self { + pub fn new(read: &'a mut R) -> Self { Self { read, have_read: false } } } -impl Read for ReadTrackingReader { +impl<'a, R: Read> Read for ReadTrackingReader<'a, R> { #[inline] fn read(&mut self, dest: &mut [u8]) -> Result { match self.read.read(dest) { @@ -766,7 +766,7 @@ impl Readable for WithoutLength> { fn read(mut reader: &mut R) -> Result { let mut values = Vec::new(); loop { - let mut track_read = ReadTrackingReader::new(&mut reader); + let mut track_read = ReadTrackingReader::new(reader); match MaybeReadable::read(&mut track_read) { Ok(Some(v)) => { values.push(v); }, Ok(None) => { }, @@ -1344,7 +1344,7 @@ macro_rules! impl_consensus_ser { impl Readable for $bitcoin_type { fn read(r: &mut R) -> Result { - match consensus::encode::Decodable::consensus_decode(&mut BufReader::new(r)) { + match consensus::encode::Decodable::consensus_decode(&mut BufReader::<_, 4096>::new(r)) { Ok(t) => Ok(t), Err(consensus::encode::Error::Io(ref e)) if e.kind() == io::ErrorKind::UnexpectedEof => Err(DecodeError::ShortRead), Err(consensus::encode::Error::Io(e)) => Err(DecodeError::Io(e.kind().into())), diff --git a/lightning/src/util/ser_macros.rs b/lightning/src/util/ser_macros.rs index f7a299d9b..ce7f09872 100644 --- a/lightning/src/util/ser_macros.rs +++ b/lightning/src/util/ser_macros.rs @@ -519,7 +519,7 @@ macro_rules! _decode_tlv_stream_range { // determine whether we should break or return ShortRead if we get an // UnexpectedEof. This should in every case be largely cosmetic, but its nice to // pass the TLV test vectors exactly, which require this distinction. - let mut tracking_reader = ser::ReadTrackingReader::new(&mut stream_ref); + let mut tracking_reader = ser::ReadTrackingReader::new(stream_ref); match <$crate::util::ser::BigSize as $crate::util::ser::Readable>::read(&mut tracking_reader) { Err(DecodeError::ShortRead) => { if !tracking_reader.have_read { @@ -555,8 +555,8 @@ macro_rules! _decode_tlv_stream_range { last_seen_type = Some(typ.0); // Finally, read the length and value itself: - let length: ser::BigSize = $crate::util::ser::Readable::read(&mut stream_ref)?; - let mut s = ser::FixedLengthReader::new(&mut stream_ref, length.0); + let length: ser::BigSize = $crate::util::ser::Readable::read(stream_ref)?; + let mut s = ser::FixedLengthReader::new(stream_ref, length.0); match typ.0 { $(_t if $crate::_decode_tlv_stream_match_check!(_t, $type, $fieldty) => { $crate::_decode_tlv!($stream, s, $field, $fieldty); @@ -1102,7 +1102,7 @@ macro_rules! impl_writeable_tlv_based_enum { }),* $($tuple_variant_id => { let length: $crate::util::ser::BigSize = $crate::util::ser::Readable::read(reader)?; - let mut s = $crate::util::ser::FixedLengthReader::new(&mut reader, length.0); + let mut s = $crate::util::ser::FixedLengthReader::new(reader, length.0); let res = $crate::util::ser::Readable::read(&mut s)?; if s.bytes_remain() { s.eat_remaining()?; // Return ShortRead if there's actually not enough bytes @@ -1214,7 +1214,7 @@ macro_rules! impl_writeable_tlv_based_enum_upgradable { }),* $($tuple_variant_id => { let length: $crate::util::ser::BigSize = $crate::util::ser::Readable::read(reader)?; - let mut s = $crate::util::ser::FixedLengthReader::new(&mut reader, length.0); + let mut s = $crate::util::ser::FixedLengthReader::new(reader, length.0); let res = $crate::util::ser::Readable::read(&mut s)?; if s.bytes_remain() { s.eat_remaining()?; // Return ShortRead if there's actually not enough bytes