X-Git-Url: http://git.bitcoin.ninja/index.cgi?a=blobdiff_plain;f=lightning%2Fsrc%2Futil%2Fser.rs;h=484d603404297d6aaf4e6c59f8f46af2a86fb4dc;hb=e5c0c62c178a33957d9dac13c74a37bf97d618cd;hp=0f88ccbd544c604f84a4a460f4d30a12d51ab200;hpb=e8b91a478bd2fd37fd726901271a8299847def3d;p=rust-lightning diff --git a/lightning/src/util/ser.rs b/lightning/src/util/ser.rs index 0f88ccbd..484d6034 100644 --- a/lightning/src/util/ser.rs +++ b/lightning/src/util/ser.rs @@ -8,42 +8,51 @@ // licenses. //! A very simple serialization framework which is used to serialize/deserialize messages as well -//! as ChannelsManagers and ChannelMonitors. +//! as [`ChannelManager`]s and [`ChannelMonitor`]s. +//! +//! [`ChannelManager`]: crate::ln::channelmanager::ChannelManager +//! [`ChannelMonitor`]: crate::chain::channelmonitor::ChannelMonitor use crate::prelude::*; use crate::io::{self, Read, Seek, Write}; use crate::io_extras::{copy, sink}; use core::hash::Hash; -use crate::sync::Mutex; +use crate::sync::{Mutex, RwLock}; use core::cmp; use core::convert::TryFrom; use core::ops::Deref; +use alloc::collections::BTreeMap; + use bitcoin::secp256k1::{PublicKey, SecretKey}; use bitcoin::secp256k1::constants::{PUBLIC_KEY_SIZE, SECRET_KEY_SIZE, COMPACT_SIGNATURE_SIZE, SCHNORR_SIGNATURE_SIZE}; use bitcoin::secp256k1::ecdsa; use bitcoin::secp256k1::schnorr; use bitcoin::blockdata::constants::ChainHash; -use bitcoin::blockdata::script::Script; +use bitcoin::blockdata::script::{self, ScriptBuf}; use bitcoin::blockdata::transaction::{OutPoint, Transaction, TxOut}; -use bitcoin::consensus; +use bitcoin::{consensus, Witness}; use bitcoin::consensus::Encodable; use bitcoin::hashes::sha256d::Hash as Sha256dHash; use bitcoin::hash_types::{Txid, BlockHash}; use core::marker::Sized; use core::time::Duration; +use crate::chain::ClaimId; use crate::ln::msgs::DecodeError; +#[cfg(taproot)] +use crate::ln::msgs::PartialSignatureWithNonce; use crate::ln::{PaymentPreimage, PaymentHash, PaymentSecret}; use crate::util::byte_utils::{be48_to_array, slice_to_be48}; +use crate::util::string::UntrustedString; /// serialization buffer size pub const MAX_BUF_SIZE: usize = 64 * 1024; -/// A simplified version of std::io::Write that exists largely for backwards compatibility. -/// An impl is provided for any type that also impls std::io::Write. +/// A simplified version of [`std::io::Write`] that exists largely for backwards compatibility. +/// An impl is provided for any type that also impls [`std::io::Write`]. /// -/// (C-not exported) as we only export serialization to/from byte arrays instead +/// This is not exported to bindings users as we only export serialization to/from byte arrays instead pub trait Writer { /// Writes the given buf out. See std::io::Write::write_all for more fn write_all(&mut self, buf: &[u8]) -> Result<(), io::Error>; @@ -84,6 +93,8 @@ impl Writer for VecWriter { /// Writer that only tracks the amount of data written - useful if you need to calculate the length /// of some data when serialized but don't yet need the full data. +/// +/// This is not exported to bindings users as manual TLV building is not currently supported in bindings pub struct LengthCalculatingWriter(pub usize); impl Writer for LengthCalculatingWriter { #[inline] @@ -95,6 +106,8 @@ impl Writer for LengthCalculatingWriter { /// Essentially [`std::io::Take`] but a bit simpler and with a method to walk the underlying stream /// forward to ensure we always consume exactly the fixed length specified. +/// +/// This is not exported to bindings users as manual TLV building is not currently supported in bindings pub struct FixedLengthReader { read: R, bytes_read: u64, @@ -150,6 +163,8 @@ impl LengthRead for FixedLengthReader { /// A [`Read`] implementation which tracks whether any bytes have been read at all. This allows us to distinguish /// between "EOF reached before we started" and "EOF reached mid-read". +/// +/// This is not exported to bindings users as manual TLV building is not currently supported in bindings pub struct ReadTrackingReader { read: R, /// Returns whether we have read from this reader or not yet. @@ -175,27 +190,34 @@ impl Read for ReadTrackingReader { } } -/// A trait that various rust-lightning types implement allowing them to be written out to a Writer +/// A trait that various LDK types implement allowing them to be written out to a [`Writer`]. /// -/// (C-not exported) as we only export serialization to/from byte arrays instead +/// This is not exported to bindings users as we only export serialization to/from byte arrays instead pub trait Writeable { - /// Writes self out to the given Writer + /// Writes `self` out to the given [`Writer`]. fn write(&self, writer: &mut W) -> Result<(), io::Error>; - /// Writes self out to a Vec + /// Writes `self` out to a `Vec`. fn encode(&self) -> Vec { - let mut msg = VecWriter(Vec::new()); + let len = self.serialized_length(); + let mut msg = VecWriter(Vec::with_capacity(len)); self.write(&mut msg).unwrap(); + // Note that objects with interior mutability may change size between when we called + // serialized_length and when we called write. That's okay, but shouldn't happen during + // testing as most of our tests are not threaded. + #[cfg(test)] + debug_assert_eq!(len, msg.0.len()); msg.0 } - /// Writes self out to a Vec + /// Writes `self` out to a `Vec`. #[cfg(test)] fn encode_with_len(&self) -> Vec { let mut msg = VecWriter(Vec::new()); 0u16.write(&mut msg).unwrap(); self.write(&mut msg).unwrap(); let len = msg.0.len(); + debug_assert_eq!(len - 2, self.serialized_length()); msg.0[..2].copy_from_slice(&(len as u16 - 2).to_be_bytes()); msg.0 } @@ -215,64 +237,64 @@ impl<'a, T: Writeable> Writeable for &'a T { fn write(&self, writer: &mut W) -> Result<(), io::Error> { (*self).write(writer) } } -/// A trait that various rust-lightning types implement allowing them to be read in from a Read +/// A trait that various LDK types implement allowing them to be read in from a [`Read`]. /// -/// (C-not exported) as we only export serialization to/from byte arrays instead +/// This is not exported to bindings users as we only export serialization to/from byte arrays instead pub trait Readable where Self: Sized { - /// Reads a Self in from the given Read + /// Reads a `Self` in from the given [`Read`]. fn read(reader: &mut R) -> Result; } -/// A trait that various rust-lightning types implement allowing them to be read in from a -/// `Read + Seek`. +/// A trait that various LDK types implement allowing them to be read in from a +/// [`Read`]` + `[`Seek`]. pub(crate) trait SeekReadable where Self: Sized { - /// Reads a Self in from the given Read + /// Reads a `Self` in from the given [`Read`]. fn read(reader: &mut R) -> Result; } -/// A trait that various higher-level rust-lightning types implement allowing them to be read in -/// from a Read given some additional set of arguments which is required to deserialize. +/// A trait that various higher-level LDK types implement allowing them to be read in +/// from a [`Read`] given some additional set of arguments which is required to deserialize. /// -/// (C-not exported) as we only export serialization to/from byte arrays instead +/// This is not exported to bindings users as we only export serialization to/from byte arrays instead pub trait ReadableArgs

where Self: Sized { - /// Reads a Self in from the given Read + /// Reads a `Self` in from the given [`Read`]. fn read(reader: &mut R, params: P) -> Result; } -/// A std::io::Read that also provides the total bytes available to read. +/// A [`std::io::Read`] that also provides the total bytes available to be read. pub(crate) trait LengthRead: Read { - /// The total number of bytes available to read. + /// The total number of bytes available to be read. fn total_bytes(&self) -> u64; } -/// A trait that various higher-level rust-lightning types implement allowing them to be read in +/// A trait that various higher-level LDK types implement allowing them to be read in /// from a Read given some additional set of arguments which is required to deserialize, requiring /// the implementer to provide the total length of the read. pub(crate) trait LengthReadableArgs

where Self: Sized { - /// Reads a Self in from the given LengthRead + /// Reads a `Self` in from the given [`LengthRead`]. fn read(reader: &mut R, params: P) -> Result; } -/// A trait that various higher-level rust-lightning types implement allowing them to be read in -/// from a Read, requiring the implementer to provide the total length of the read. +/// A trait that various higher-level LDK types implement allowing them to be read in +/// from a [`Read`], requiring the implementer to provide the total length of the read. pub(crate) trait LengthReadable where Self: Sized { - /// Reads a Self in from the given LengthRead + /// Reads a `Self` in from the given [`LengthRead`]. fn read(reader: &mut R) -> Result; } -/// A trait that various rust-lightning types implement allowing them to (maybe) be read in from a Read +/// A trait that various LDK types implement allowing them to (maybe) be read in from a [`Read`]. /// -/// (C-not exported) as we only export serialization to/from byte arrays instead +/// This is not exported to bindings users as we only export serialization to/from byte arrays instead pub trait MaybeReadable where Self: Sized { - /// Reads a Self in from the given Read + /// Reads a `Self` in from the given [`Read`]. fn read(reader: &mut R) -> Result, DecodeError>; } @@ -284,18 +306,40 @@ impl MaybeReadable for T { } /// Wrapper to read a required (non-optional) TLV record. -pub struct OptionDeserWrapper(pub Option); -impl Readable for OptionDeserWrapper { +/// +/// This is not exported to bindings users as manual TLV building is not currently supported in bindings +pub struct RequiredWrapper(pub Option); +impl Readable for RequiredWrapper { #[inline] fn read(reader: &mut R) -> Result { Ok(Self(Some(Readable::read(reader)?))) } } -/// When handling default_values, we want to map the default-value T directly -/// to a OptionDeserWrapper in a way that works for `field: T = t;` as +impl> ReadableArgs for RequiredWrapper { + #[inline] + fn read(reader: &mut R, args: A) -> Result { + Ok(Self(Some(ReadableArgs::read(reader, args)?))) + } +} +/// When handling `default_values`, we want to map the default-value T directly +/// to a `RequiredWrapper` in a way that works for `field: T = t;` as /// well. Thus, we assume `Into for T` does nothing and use that. -impl From for OptionDeserWrapper { - fn from(t: T) -> OptionDeserWrapper { OptionDeserWrapper(Some(t)) } +impl From for RequiredWrapper { + fn from(t: T) -> RequiredWrapper { RequiredWrapper(Some(t)) } +} + +/// Wrapper to read a required (non-optional) TLV record that may have been upgraded without +/// backwards compat. +/// +/// This is not exported to bindings users as manual TLV building is not currently supported in bindings +pub struct UpgradableRequired(pub Option); +impl MaybeReadable for UpgradableRequired { + #[inline] + fn read(reader: &mut R) -> Result, DecodeError> { + let tlv = MaybeReadable::read(reader)?; + if let Some(tlv) = tlv { return Ok(Some(Self(Some(tlv)))) } + Ok(None) + } } pub(crate) struct U48(pub u64); @@ -314,26 +358,27 @@ impl Readable for U48 { } } -/// Lightning TLV uses a custom variable-length integer called BigSize. It is similar to Bitcoin's +/// Lightning TLV uses a custom variable-length integer called `BigSize`. It is similar to Bitcoin's /// variable-length integers except that it is serialized in big-endian instead of little-endian. /// /// Like Bitcoin's variable-length integer, it exhibits ambiguity in that certain values can be /// encoded in several different ways, which we must check for at deserialization-time. Thus, if /// you're looking for an example of a variable-length integer to use for your own project, move /// along, this is a rather poor design. +#[derive(Clone, Copy, Debug, Hash, PartialOrd, Ord, PartialEq, Eq)] pub struct BigSize(pub u64); impl Writeable for BigSize { #[inline] fn write(&self, writer: &mut W) -> Result<(), io::Error> { match self.0 { - 0...0xFC => { + 0..=0xFC => { (self.0 as u8).write(writer) }, - 0xFD...0xFFFF => { + 0xFD..=0xFFFF => { 0xFDu8.write(writer)?; (self.0 as u16).write(writer) }, - 0x10000...0xFFFFFFFF => { + 0x10000..=0xFFFFFFFF => { 0xFEu8.write(writer)?; (self.0 as u32).write(writer) }, @@ -378,9 +423,43 @@ impl Readable for BigSize { } } +/// The lightning protocol uses u16s for lengths in most cases. As our serialization framework +/// primarily targets that, we must as well. However, because we may serialize objects that have +/// more than 65K entries, we need to be able to store larger values. Thus, we define a variable +/// length integer here that is backwards-compatible for values < 0xffff. We treat 0xffff as +/// "read eight more bytes". +/// +/// To ensure we only have one valid encoding per value, we add 0xffff to values written as eight +/// bytes. Thus, 0xfffe is serialized as 0xfffe, whereas 0xffff is serialized as +/// 0xffff0000000000000000 (i.e. read-eight-bytes then zero). +struct CollectionLength(pub u64); +impl Writeable for CollectionLength { + #[inline] + fn write(&self, writer: &mut W) -> Result<(), io::Error> { + if self.0 < 0xffff { + (self.0 as u16).write(writer) + } else { + 0xffffu16.write(writer)?; + (self.0 - 0xffff).write(writer) + } + } +} + +impl Readable for CollectionLength { + #[inline] + fn read(r: &mut R) -> Result { + let mut val: u64 = ::read(r)? as u64; + if val == 0xffff { + val = ::read(r)? + .checked_add(0xffff).ok_or(DecodeError::InvalidValue)?; + } + Ok(CollectionLength(val)) + } +} + /// In TLV we occasionally send fields which only consist of, or potentially end with, a /// variable-length integer which is simply truncated by skipping high zero bytes. This type -/// encapsulates such integers implementing Readable/Writeable for them. +/// encapsulates such integers implementing [`Readable`]/[`Writeable`] for them. #[cfg_attr(test, derive(PartialEq, Eq, Debug))] pub(crate) struct HighZeroBytesDroppedBigSize(pub T); @@ -443,6 +522,10 @@ impl_writeable_primitive!(u128, 16); impl_writeable_primitive!(u64, 8); impl_writeable_primitive!(u32, 4); impl_writeable_primitive!(u16, 2); +impl_writeable_primitive!(i64, 8); +impl_writeable_primitive!(i32, 4); +impl_writeable_primitive!(i16, 2); +impl_writeable_primitive!(i8, 1); impl Writeable for u8 { #[inline] @@ -477,63 +560,55 @@ impl Readable for bool { } } -// u8 arrays macro_rules! impl_array { - ( $size:expr ) => ( - impl Writeable for [u8; $size] - { + ($size:expr, $ty: ty) => ( + impl Writeable for [$ty; $size] { #[inline] fn write(&self, w: &mut W) -> Result<(), io::Error> { - w.write_all(self) + let mut out = [0; $size * core::mem::size_of::<$ty>()]; + for (idx, v) in self.iter().enumerate() { + let startpos = idx * core::mem::size_of::<$ty>(); + out[startpos..startpos + core::mem::size_of::<$ty>()].copy_from_slice(&v.to_be_bytes()); + } + w.write_all(&out) } } - impl Readable for [u8; $size] - { + impl Readable for [$ty; $size] { #[inline] fn read(r: &mut R) -> Result { - let mut buf = [0u8; $size]; + let mut buf = [0u8; $size * core::mem::size_of::<$ty>()]; r.read_exact(&mut buf)?; - Ok(buf) + let mut res = [0; $size]; + for (idx, v) in res.iter_mut().enumerate() { + let startpos = idx * core::mem::size_of::<$ty>(); + let mut arr = [0; core::mem::size_of::<$ty>()]; + arr.copy_from_slice(&buf[startpos..startpos + core::mem::size_of::<$ty>()]); + *v = <$ty>::from_be_bytes(arr); + } + Ok(res) } } ); } -impl_array!(3); // for rgb, ISO 4712 code -impl_array!(4); // for IPv4 -impl_array!(12); // for OnionV2 -impl_array!(16); // for IPv6 -impl_array!(32); // for channel id & hmac -impl_array!(PUBLIC_KEY_SIZE); // for PublicKey -impl_array!(64); // for ecdsa::Signature and schnorr::Signature -impl_array!(1300); // for OnionPacket.hop_data - -impl Writeable for [u16; 8] { - #[inline] - fn write(&self, w: &mut W) -> Result<(), io::Error> { - for v in self.iter() { - w.write_all(&v.to_be_bytes())? - } - Ok(()) - } -} +impl_array!(3, u8); // for rgb, ISO 4712 code +impl_array!(4, u8); // for IPv4 +impl_array!(12, u8); // for OnionV2 +impl_array!(16, u8); // for IPv6 +impl_array!(32, u8); // for channel id & hmac +impl_array!(PUBLIC_KEY_SIZE, u8); // for PublicKey +impl_array!(64, u8); // for ecdsa::Signature and schnorr::Signature +impl_array!(66, u8); // for MuSig2 nonces +impl_array!(1300, u8); // for OnionPacket.hop_data -impl Readable for [u16; 8] { - #[inline] - fn read(r: &mut R) -> Result { - let mut buf = [0u8; 16]; - r.read_exact(&mut buf)?; - let mut res = [0u16; 8]; - for (idx, v) in res.iter_mut().enumerate() { - *v = (buf[idx] as u16) << 8 | (buf[idx + 1] as u16) - } - Ok(res) - } -} +impl_array!(8, u16); +impl_array!(32, u16); -/// For variable-length values within TLV record where the length is encoded as part of the record. +/// A type for variable-length values within TLV record where the length is encoded as part of the record. /// Used to prevent encoding the length twice. +/// +/// This is not exported to bindings users as manual TLV building is not currently supported in bindings pub struct WithoutLength(pub T); impl Writeable for WithoutLength<&String> { @@ -553,6 +628,21 @@ impl<'a> From<&'a String> for WithoutLength<&'a String> { fn from(s: &'a String) -> Self { Self(s) } } + +impl Writeable for WithoutLength<&UntrustedString> { + #[inline] + fn write(&self, w: &mut W) -> Result<(), io::Error> { + WithoutLength(&self.0.0).write(w) + } +} +impl Readable for WithoutLength { + #[inline] + fn read(r: &mut R) -> Result { + let s: WithoutLength = Readable::read(r)?; + Ok(Self(UntrustedString(s.0))) + } +} + impl<'a, T: Writeable> Writeable for WithoutLength<&'a Vec> { #[inline] fn write(&self, writer: &mut W) -> Result<(), io::Error> { @@ -585,50 +675,89 @@ impl<'a, T> From<&'a Vec> for WithoutLength<&'a Vec> { fn from(v: &'a Vec) -> Self { Self(v) } } -// HashMap -impl Writeable for HashMap - where K: Writeable + Eq + Hash, - V: Writeable -{ +impl Writeable for WithoutLength<&ScriptBuf> { #[inline] - fn write(&self, w: &mut W) -> Result<(), io::Error> { - (self.len() as u16).write(w)?; - for (key, value) in self.iter() { - key.write(w)?; - value.write(w)?; - } - Ok(()) + fn write(&self, writer: &mut W) -> Result<(), io::Error> { + writer.write_all(self.0.as_bytes()) } } -impl Readable for HashMap - where K: Readable + Eq + Hash, - V: MaybeReadable -{ +impl Readable for WithoutLength { #[inline] fn read(r: &mut R) -> Result { - let len: u16 = Readable::read(r)?; - let mut ret = HashMap::with_capacity(len as usize); - for _ in 0..len { - let k = K::read(r)?; - let v_opt = V::read(r)?; - if let Some(v) = v_opt { - if ret.insert(k, v).is_some() { - return Err(DecodeError::InvalidValue); + let v: WithoutLength> = Readable::read(r)?; + Ok(WithoutLength(script::Builder::from(v.0).into_script())) + } +} + +#[derive(Debug)] +pub(crate) struct Iterable<'a, I: Iterator + Clone, T: 'a>(pub I); + +impl<'a, I: Iterator + Clone, T: 'a + Writeable> Writeable for Iterable<'a, I, T> { + #[inline] + fn write(&self, writer: &mut W) -> Result<(), io::Error> { + for ref v in self.0.clone() { + v.write(writer)?; + } + Ok(()) + } +} + +#[cfg(test)] +impl<'a, I: Iterator + Clone, T: 'a + PartialEq> PartialEq for Iterable<'a, I, T> { + fn eq(&self, other: &Self) -> bool { + self.0.clone().collect::>() == other.0.clone().collect::>() + } +} + +macro_rules! impl_for_map { + ($ty: ident, $keybound: ident, $constr: expr) => { + impl Writeable for $ty + where K: Writeable + Eq + $keybound, V: Writeable + { + #[inline] + fn write(&self, w: &mut W) -> Result<(), io::Error> { + CollectionLength(self.len() as u64).write(w)?; + for (key, value) in self.iter() { + key.write(w)?; + value.write(w)?; } + Ok(()) + } + } + + impl Readable for $ty + where K: Readable + Eq + $keybound, V: MaybeReadable + { + #[inline] + fn read(r: &mut R) -> Result { + let len: CollectionLength = Readable::read(r)?; + let mut ret = $constr(len.0 as usize); + for _ in 0..len.0 { + let k = K::read(r)?; + let v_opt = V::read(r)?; + if let Some(v) = v_opt { + if ret.insert(k, v).is_some() { + return Err(DecodeError::InvalidValue); + } + } + } + Ok(ret) } } - Ok(ret) } } +impl_for_map!(BTreeMap, Ord, |_| BTreeMap::new()); +impl_for_map!(HashMap, Hash, |len| HashMap::with_capacity(len)); + // HashSet impl Writeable for HashSet where T: Writeable + Eq + Hash { #[inline] fn write(&self, w: &mut W) -> Result<(), io::Error> { - (self.len() as u16).write(w)?; + CollectionLength(self.len() as u64).write(w)?; for item in self.iter() { item.write(w)?; } @@ -641,9 +770,9 @@ where T: Readable + Eq + Hash { #[inline] fn read(r: &mut R) -> Result { - let len: u16 = Readable::read(r)?; - let mut ret = HashSet::with_capacity(len as usize); - for _ in 0..len { + let len: CollectionLength = Readable::read(r)?; + let mut ret = HashSet::with_capacity(cmp::min(len.0 as usize, MAX_BUF_SIZE / core::mem::size_of::())); + for _ in 0..len.0 { if !ret.insert(T::read(r)?) { return Err(DecodeError::InvalidValue) } @@ -653,10 +782,48 @@ where T: Readable + Eq + Hash } // Vectors +macro_rules! impl_writeable_for_vec { + ($ty: ty $(, $name: ident)*) => { + impl<$($name : Writeable),*> Writeable for Vec<$ty> { + #[inline] + fn write(&self, w: &mut W) -> Result<(), io::Error> { + CollectionLength(self.len() as u64).write(w)?; + for elem in self.iter() { + elem.write(w)?; + } + Ok(()) + } + } + } +} +macro_rules! impl_readable_for_vec { + ($ty: ty $(, $name: ident)*) => { + impl<$($name : Readable),*> Readable for Vec<$ty> { + #[inline] + fn read(r: &mut R) -> Result { + let len: CollectionLength = Readable::read(r)?; + let mut ret = Vec::with_capacity(cmp::min(len.0 as usize, MAX_BUF_SIZE / core::mem::size_of::<$ty>())); + for _ in 0..len.0 { + if let Some(val) = MaybeReadable::read(r)? { + ret.push(val); + } + } + Ok(ret) + } + } + } +} +macro_rules! impl_for_vec { + ($ty: ty $(, $name: ident)*) => { + impl_writeable_for_vec!($ty $(, $name)*); + impl_readable_for_vec!($ty $(, $name)*); + } +} + impl Writeable for Vec { #[inline] fn write(&self, w: &mut W) -> Result<(), io::Error> { - (self.len() as u16).write(w)?; + CollectionLength(self.len() as u64).write(w)?; w.write_all(&self) } } @@ -664,53 +831,73 @@ impl Writeable for Vec { impl Readable for Vec { #[inline] fn read(r: &mut R) -> Result { - let len: u16 = Readable::read(r)?; - let mut ret = Vec::with_capacity(len as usize); - ret.resize(len as usize, 0); - r.read_exact(&mut ret)?; + let mut len: CollectionLength = Readable::read(r)?; + let mut ret = Vec::new(); + while len.0 > 0 { + let readamt = cmp::min(len.0 as usize, MAX_BUF_SIZE); + let readstart = ret.len(); + ret.resize(readstart + readamt, 0); + r.read_exact(&mut ret[readstart..])?; + len.0 -= readamt as u64; + } Ok(ret) } } -impl Writeable for Vec { + +impl_for_vec!(ecdsa::Signature); +impl_for_vec!(crate::chain::channelmonitor::ChannelMonitorUpdate); +impl_for_vec!(crate::ln::channelmanager::MonitorUpdateCompletionAction); +impl_for_vec!((A, B), A, B); +impl_writeable_for_vec!(&crate::routing::router::BlindedTail); +impl_readable_for_vec!(crate::routing::router::BlindedTail); + +impl Writeable for Vec { #[inline] fn write(&self, w: &mut W) -> Result<(), io::Error> { (self.len() as u16).write(w)?; - for e in self.iter() { - e.write(w)?; + for witness in self { + (witness.serialized_len() as u16).write(w)?; + witness.write(w)?; } Ok(()) } } -impl Readable for Vec { +impl Readable for Vec { #[inline] fn read(r: &mut R) -> Result { - let len: u16 = Readable::read(r)?; - let byte_size = (len as usize) - .checked_mul(COMPACT_SIGNATURE_SIZE) - .ok_or(DecodeError::BadLengthDescriptor)?; - if byte_size > MAX_BUF_SIZE { - return Err(DecodeError::BadLengthDescriptor); + let num_witnesses = ::read(r)? as usize; + let mut witnesses = Vec::with_capacity(num_witnesses); + for _ in 0..num_witnesses { + // Even though the length of each witness can be inferred in its consensus-encoded form, + // the spec includes a length prefix so that implementations don't have to deserialize + // each initially. We do that here anyway as in general we'll need to be able to make + // assertions on some properties of the witnesses when receiving a message providing a list + // of witnesses. We'll just do a sanity check for the lengths and error if there is a mismatch. + let witness_len = ::read(r)? as usize; + let witness = ::read(r)?; + if witness.serialized_len() != witness_len { + return Err(DecodeError::BadLengthDescriptor); + } + witnesses.push(witness); } - let mut ret = Vec::with_capacity(len as usize); - for _ in 0..len { ret.push(Readable::read(r)?); } - Ok(ret) + Ok(witnesses) } } -impl Writeable for Script { +impl Writeable for ScriptBuf { fn write(&self, w: &mut W) -> Result<(), io::Error> { (self.len() as u16).write(w)?; w.write_all(self.as_bytes()) } } -impl Readable for Script { +impl Readable for ScriptBuf { fn read(r: &mut R) -> Result { let len = ::read(r)? as usize; let mut buf = vec![0; len]; r.read_exact(&mut buf)?; - Ok(Script::from(buf)) + Ok(ScriptBuf::from(buf)) } } @@ -756,6 +943,39 @@ impl Readable for SecretKey { } } +#[cfg(taproot)] +impl Writeable for musig2::types::PublicNonce { + fn write(&self, w: &mut W) -> Result<(), io::Error> { + self.serialize().write(w) + } +} + +#[cfg(taproot)] +impl Readable for musig2::types::PublicNonce { + fn read(r: &mut R) -> Result { + let buf: [u8; PUBLIC_KEY_SIZE * 2] = Readable::read(r)?; + musig2::types::PublicNonce::from_slice(&buf).map_err(|_| DecodeError::InvalidValue) + } +} + +#[cfg(taproot)] +impl Writeable for PartialSignatureWithNonce { + fn write(&self, w: &mut W) -> Result<(), io::Error> { + self.0.serialize().write(w)?; + self.1.write(w) + } +} + +#[cfg(taproot)] +impl Readable for PartialSignatureWithNonce { + fn read(r: &mut R) -> Result { + let partial_signature_buf: [u8; SECRET_KEY_SIZE] = Readable::read(r)?; + let partial_signature = musig2::types::PartialSignature::from_slice(&partial_signature_buf).map_err(|_| DecodeError::InvalidValue)?; + let public_nonce: musig2::types::PublicNonce = Readable::read(r)?; + Ok(PartialSignatureWithNonce(partial_signature, public_nonce)) + } +} + impl Writeable for Sha256dHash { fn write(&self, w: &mut W) -> Result<(), io::Error> { w.write_all(&self[..]) @@ -920,7 +1140,7 @@ impl Writeable for ChainHash { impl Readable for ChainHash { fn read(r: &mut R) -> Result { let buf: [u8; 32] = Readable::read(r)?; - Ok(ChainHash::from(&buf[..])) + Ok(ChainHash::from(buf)) } } @@ -968,6 +1188,7 @@ macro_rules! impl_consensus_ser { } impl_consensus_ser!(Transaction); impl_consensus_ser!(TxOut); +impl_consensus_ser!(Witness); impl Readable for Mutex { fn read(r: &mut R) -> Result { @@ -981,6 +1202,18 @@ impl Writeable for Mutex { } } +impl Readable for RwLock { + fn read(r: &mut R) -> Result { + let t: T = Readable::read(r)?; + Ok(RwLock::new(t)) + } +} +impl Writeable for RwLock { + fn write(&self, w: &mut W) -> Result<(), io::Error> { + self.read().unwrap().write(w) + } +} + impl Readable for (A, B) { fn read(r: &mut R) -> Result { let a: A = Readable::read(r)?; @@ -1011,6 +1244,24 @@ impl Writeable for (A, B, C) { } } +impl Readable for (A, B, C, D) { + fn read(r: &mut R) -> Result { + let a: A = Readable::read(r)?; + let b: B = Readable::read(r)?; + let c: C = Readable::read(r)?; + let d: D = Readable::read(r)?; + Ok((a, b, c, d)) + } +} +impl Writeable for (A, B, C, D) { + fn write(&self, w: &mut W) -> Result<(), io::Error> { + self.0.write(w)?; + self.1.write(w)?; + self.2.write(w)?; + self.3.write(w) + } +} + impl Writeable for () { fn write(&self, _: &mut W) -> Result<(), io::Error> { Ok(()) @@ -1025,7 +1276,7 @@ impl Readable for () { impl Writeable for String { #[inline] fn write(&self, w: &mut W) -> Result<(), io::Error> { - (self.len() as u16).write(w)?; + CollectionLength(self.len() as u64).write(w)?; w.write_all(self.as_bytes()) } } @@ -1042,8 +1293,10 @@ impl Readable for String { /// Only the character set and length will be validated. /// The character set consists of ASCII alphanumeric characters, hyphens, and periods. /// Its length is guaranteed to be representable by a single byte. -/// This serialization is used by BOLT 7 hostnames. -#[derive(Clone, Debug, PartialEq, Eq)] +/// This serialization is used by [`BOLT 7`] hostnames. +/// +/// [`BOLT 7`]: https://github.com/lightning/bolts/blob/master/07-routing-gossip.md +#[derive(Clone, Debug, Hash, PartialEq, Eq)] pub struct Hostname(String); impl Hostname { /// Returns the length of the hostname. @@ -1051,6 +1304,13 @@ impl Hostname { (&self.0).len() as u8 } } + +impl core::fmt::Display for Hostname { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + write!(f, "{}", self.0)?; + Ok(()) + } +} impl Deref for Hostname { type Target = String; @@ -1107,6 +1367,7 @@ impl Readable for Hostname { } } +/// This is not exported to bindings users as `Duration`s are simply mapped as ints. impl Writeable for Duration { #[inline] fn write(&self, w: &mut W) -> Result<(), io::Error> { @@ -1114,6 +1375,7 @@ impl Writeable for Duration { self.subsec_nanos().write(w) } } +/// This is not exported to bindings users as `Duration`s are simply mapped as ints. impl Readable for Duration { #[inline] fn read(r: &mut R) -> Result { @@ -1123,9 +1385,67 @@ impl Readable for Duration { } } +/// A wrapper for a `Transaction` which can only be constructed with [`TransactionU16LenLimited::new`] +/// if the `Transaction`'s consensus-serialized length is <= u16::MAX. +/// +/// Use [`TransactionU16LenLimited::into_transaction`] to convert into the contained `Transaction`. +#[derive(Clone, Debug, Hash, PartialEq, Eq)] +pub struct TransactionU16LenLimited(Transaction); + +impl TransactionU16LenLimited { + /// Constructs a new `TransactionU16LenLimited` from a `Transaction` only if it's consensus- + /// serialized length is <= u16::MAX. + pub fn new(transaction: Transaction) -> Result { + if transaction.serialized_length() > (u16::MAX as usize) { + Err(()) + } else { + Ok(Self(transaction)) + } + } + + /// Consumes this `TransactionU16LenLimited` and returns its contained `Transaction`. + pub fn into_transaction(self) -> Transaction { + self.0 + } +} + +impl Writeable for TransactionU16LenLimited { + fn write(&self, w: &mut W) -> Result<(), io::Error> { + (self.0.serialized_length() as u16).write(w)?; + self.0.write(w) + } +} + +impl Readable for TransactionU16LenLimited { + fn read(r: &mut R) -> Result { + let len = ::read(r)?; + let mut tx_reader = FixedLengthReader::new(r, len as u64); + let tx: Transaction = Readable::read(&mut tx_reader)?; + if tx_reader.bytes_remain() { + Err(DecodeError::BadLengthDescriptor) + } else { + Ok(Self(tx)) + } + } +} + +impl Writeable for ClaimId { + fn write(&self, writer: &mut W) -> Result<(), io::Error> { + self.0.write(writer) + } +} + +impl Readable for ClaimId { + fn read(reader: &mut R) -> Result { + Ok(Self(Readable::read(reader)?)) + } +} + #[cfg(test)] mod tests { use core::convert::TryFrom; + use bitcoin::hashes::hex::FromHex; + use bitcoin::secp256k1::ecdsa; use crate::util::ser::{Readable, Hostname, Writeable}; #[test] @@ -1147,4 +1467,57 @@ mod tests { hostname.write(&mut buf).unwrap(); assert_eq!(Hostname::read(&mut buf.as_slice()).unwrap().as_str(), "test"); } + + #[test] + /// Taproot will likely fill legacy signature fields with all 0s. + /// This test ensures that doing so won't break serialization. + fn null_signature_codec() { + let buffer = vec![0u8; 64]; + let mut cursor = crate::io::Cursor::new(buffer.clone()); + let signature = ecdsa::Signature::read(&mut cursor).unwrap(); + let serialization = signature.serialize_compact(); + assert_eq!(buffer, serialization.to_vec()) + } + + #[test] + fn bigsize_encoding_decoding() { + let values = vec![0, 252, 253, 65535, 65536, 4294967295, 4294967296, 18446744073709551615]; + let bytes = vec![ + "00", + "fc", + "fd00fd", + "fdffff", + "fe00010000", + "feffffffff", + "ff0000000100000000", + "ffffffffffffffffff" + ]; + for i in 0..=7 { + let mut stream = crate::io::Cursor::new(>::from_hex(bytes[i]).unwrap()); + assert_eq!(super::BigSize::read(&mut stream).unwrap().0, values[i]); + let mut stream = super::VecWriter(Vec::new()); + super::BigSize(values[i]).write(&mut stream).unwrap(); + assert_eq!(stream.0, >::from_hex(bytes[i]).unwrap()); + } + let err_bytes = vec![ + "fd00fc", + "fe0000ffff", + "ff00000000ffffffff", + "fd00", + "feffff", + "ffffffffff", + "fd", + "fe", + "ff", + "" + ]; + for i in 0..=9 { + let mut stream = crate::io::Cursor::new(>::from_hex(err_bytes[i]).unwrap()); + if i < 3 { + assert_eq!(super::BigSize::read(&mut stream).err(), Some(crate::ln::msgs::DecodeError::InvalidValue)); + } else { + assert_eq!(super::BigSize::read(&mut stream).err(), Some(crate::ln::msgs::DecodeError::ShortRead)); + } + } + } }