X-Git-Url: http://git.bitcoin.ninja/index.cgi?a=blobdiff_plain;f=lightning%2Fsrc%2Futil%2Fser.rs;h=366e6c8cb1efaef3767b0e8d4232c3763915eb20;hb=a5ecb851716bc0819586894232653ec4b5e2c67f;hp=84d1a2e084feb65e1472969875a586c81ccd08e5;hpb=50d12600b43c0c4a65547c6d37fca3efce0a12f2;p=rust-lightning diff --git a/lightning/src/util/ser.rs b/lightning/src/util/ser.rs index 84d1a2e0..366e6c8c 100644 --- a/lightning/src/util/ser.rs +++ b/lightning/src/util/ser.rs @@ -38,6 +38,8 @@ use bitcoin::hash_types::{Txid, BlockHash}; use core::marker::Sized; use core::time::Duration; use crate::ln::msgs::DecodeError; +#[cfg(taproot)] +use crate::ln::msgs::PartialSignatureWithNonce; use crate::ln::{PaymentPreimage, PaymentHash, PaymentSecret}; use crate::util::byte_utils::{be48_to_array, slice_to_be48}; @@ -48,7 +50,7 @@ pub const MAX_BUF_SIZE: usize = 64 * 1024; /// A simplified version of [`std::io::Write`] that exists largely for backwards compatibility. /// An impl is provided for any type that also impls [`std::io::Write`]. /// -/// (C-not exported) as we only export serialization to/from byte arrays instead +/// This is not exported to bindings users as we only export serialization to/from byte arrays instead pub trait Writer { /// Writes the given buf out. See std::io::Write::write_all for more fn write_all(&mut self, buf: &[u8]) -> Result<(), io::Error>; @@ -89,6 +91,8 @@ impl Writer for VecWriter { /// Writer that only tracks the amount of data written - useful if you need to calculate the length /// of some data when serialized but don't yet need the full data. +/// +/// This is not exported to bindings users as manual TLV building is not currently supported in bindings pub struct LengthCalculatingWriter(pub usize); impl Writer for LengthCalculatingWriter { #[inline] @@ -100,6 +104,8 @@ impl Writer for LengthCalculatingWriter { /// Essentially [`std::io::Take`] but a bit simpler and with a method to walk the underlying stream /// forward to ensure we always consume exactly the fixed length specified. +/// +/// This is not exported to bindings users as manual TLV building is not currently supported in bindings pub struct FixedLengthReader { read: R, bytes_read: u64, @@ -155,6 +161,8 @@ impl LengthRead for FixedLengthReader { /// A [`Read`] implementation which tracks whether any bytes have been read at all. This allows us to distinguish /// between "EOF reached before we started" and "EOF reached mid-read". +/// +/// This is not exported to bindings users as manual TLV building is not currently supported in bindings pub struct ReadTrackingReader { read: R, /// Returns whether we have read from this reader or not yet. @@ -182,7 +190,7 @@ impl Read for ReadTrackingReader { /// A trait that various LDK types implement allowing them to be written out to a [`Writer`]. /// -/// (C-not exported) as we only export serialization to/from byte arrays instead +/// This is not exported to bindings users as we only export serialization to/from byte arrays instead pub trait Writeable { /// Writes `self` out to the given [`Writer`]. fn write(&self, writer: &mut W) -> Result<(), io::Error>; @@ -222,7 +230,7 @@ impl<'a, T: Writeable> Writeable for &'a T { /// A trait that various LDK types implement allowing them to be read in from a [`Read`]. /// -/// (C-not exported) as we only export serialization to/from byte arrays instead +/// This is not exported to bindings users as we only export serialization to/from byte arrays instead pub trait Readable where Self: Sized { @@ -240,7 +248,7 @@ pub(crate) trait SeekReadable where Self: Sized { /// A trait that various higher-level LDK types implement allowing them to be read in /// from a [`Read`] given some additional set of arguments which is required to deserialize. /// -/// (C-not exported) as we only export serialization to/from byte arrays instead +/// This is not exported to bindings users as we only export serialization to/from byte arrays instead pub trait ReadableArgs

where Self: Sized { @@ -273,7 +281,7 @@ pub(crate) trait LengthReadable where Self: Sized /// A trait that various LDK types implement allowing them to (maybe) be read in from a [`Read`]. /// -/// (C-not exported) as we only export serialization to/from byte arrays instead +/// This is not exported to bindings users as we only export serialization to/from byte arrays instead pub trait MaybeReadable where Self: Sized { @@ -289,18 +297,40 @@ impl MaybeReadable for T { } /// Wrapper to read a required (non-optional) TLV record. -pub struct OptionDeserWrapper(pub Option); -impl Readable for OptionDeserWrapper { +/// +/// This is not exported to bindings users as manual TLV building is not currently supported in bindings +pub struct RequiredWrapper(pub Option); +impl Readable for RequiredWrapper { #[inline] fn read(reader: &mut R) -> Result { Ok(Self(Some(Readable::read(reader)?))) } } +impl> ReadableArgs for RequiredWrapper { + #[inline] + fn read(reader: &mut R, args: A) -> Result { + Ok(Self(Some(ReadableArgs::read(reader, args)?))) + } +} /// When handling `default_values`, we want to map the default-value T directly -/// to a `OptionDeserWrapper` in a way that works for `field: T = t;` as +/// to a `RequiredWrapper` in a way that works for `field: T = t;` as /// well. Thus, we assume `Into for T` does nothing and use that. -impl From for OptionDeserWrapper { - fn from(t: T) -> OptionDeserWrapper { OptionDeserWrapper(Some(t)) } +impl From for RequiredWrapper { + fn from(t: T) -> RequiredWrapper { RequiredWrapper(Some(t)) } +} + +/// Wrapper to read a required (non-optional) TLV record that may have been upgraded without +/// backwards compat. +/// +/// This is not exported to bindings users as manual TLV building is not currently supported in bindings +pub struct UpgradableRequired(pub Option); +impl MaybeReadable for UpgradableRequired { + #[inline] + fn read(reader: &mut R) -> Result, DecodeError> { + let tlv = MaybeReadable::read(reader)?; + if let Some(tlv) = tlv { return Ok(Some(Self(Some(tlv)))) } + Ok(None) + } } pub(crate) struct U48(pub u64); @@ -546,6 +576,7 @@ impl_array!(16); // for IPv6 impl_array!(32); // for channel id & hmac impl_array!(PUBLIC_KEY_SIZE); // for PublicKey impl_array!(64); // for ecdsa::Signature and schnorr::Signature +impl_array!(66); // for MuSig2 nonces impl_array!(1300); // for OnionPacket.hop_data impl Writeable for [u16; 8] { @@ -573,6 +604,8 @@ impl Readable for [u16; 8] { /// A type for variable-length values within TLV record where the length is encoded as part of the record. /// Used to prevent encoding the length twice. +/// +/// This is not exported to bindings users as manual TLV building is not currently supported in bindings pub struct WithoutLength(pub T); impl Writeable for WithoutLength<&String> { @@ -624,6 +657,26 @@ impl<'a, T> From<&'a Vec> for WithoutLength<&'a Vec> { fn from(v: &'a Vec) -> Self { Self(v) } } +#[derive(Debug)] +pub(crate) struct Iterable<'a, I: Iterator + Clone, T: 'a>(pub I); + +impl<'a, I: Iterator + Clone, T: 'a + Writeable> Writeable for Iterable<'a, I, T> { + #[inline] + fn write(&self, writer: &mut W) -> Result<(), io::Error> { + for ref v in self.0.clone() { + v.write(writer)?; + } + Ok(()) + } +} + +#[cfg(test)] +impl<'a, I: Iterator + Clone, T: 'a + PartialEq> PartialEq for Iterable<'a, I, T> { + fn eq(&self, other: &Self) -> bool { + self.0.clone().collect::>() == other.0.clone().collect::>() + } +} + macro_rules! impl_for_map { ($ty: ident, $keybound: ident, $constr: expr) => { impl Writeable for $ty @@ -750,6 +803,7 @@ impl Readable for Vec { } impl_for_vec!(ecdsa::Signature); +impl_for_vec!(crate::ln::channelmanager::MonitorUpdateCompletionAction); impl_for_vec!((A, B), A, B); impl Writeable for Script { @@ -810,6 +864,39 @@ impl Readable for SecretKey { } } +#[cfg(taproot)] +impl Writeable for musig2::types::PublicNonce { + fn write(&self, w: &mut W) -> Result<(), io::Error> { + self.serialize().write(w) + } +} + +#[cfg(taproot)] +impl Readable for musig2::types::PublicNonce { + fn read(r: &mut R) -> Result { + let buf: [u8; PUBLIC_KEY_SIZE * 2] = Readable::read(r)?; + musig2::types::PublicNonce::from_slice(&buf).map_err(|_| DecodeError::InvalidValue) + } +} + +#[cfg(taproot)] +impl Writeable for PartialSignatureWithNonce { + fn write(&self, w: &mut W) -> Result<(), io::Error> { + self.0.serialize().write(w)?; + self.1.write(w) + } +} + +#[cfg(taproot)] +impl Readable for PartialSignatureWithNonce { + fn read(r: &mut R) -> Result { + let partial_signature_buf: [u8; SECRET_KEY_SIZE] = Readable::read(r)?; + let partial_signature = musig2::types::PartialSignature::from_slice(&partial_signature_buf).map_err(|_| DecodeError::InvalidValue)?; + let public_nonce: musig2::types::PublicNonce = Readable::read(r)?; + Ok(PartialSignatureWithNonce(partial_signature, public_nonce)) + } +} + impl Writeable for Sha256dHash { fn write(&self, w: &mut W) -> Result<(), io::Error> { w.write_all(&self[..]) @@ -1065,6 +1152,24 @@ impl Writeable for (A, B, C) { } } +impl Readable for (A, B, C, D) { + fn read(r: &mut R) -> Result { + let a: A = Readable::read(r)?; + let b: B = Readable::read(r)?; + let c: C = Readable::read(r)?; + let d: D = Readable::read(r)?; + Ok((a, b, c, d)) + } +} +impl Writeable for (A, B, C, D) { + fn write(&self, w: &mut W) -> Result<(), io::Error> { + self.0.write(w)?; + self.1.write(w)?; + self.2.write(w)?; + self.3.write(w) + } +} + impl Writeable for () { fn write(&self, _: &mut W) -> Result<(), io::Error> { Ok(()) @@ -1182,6 +1287,7 @@ impl Readable for Duration { #[cfg(test)] mod tests { use core::convert::TryFrom; + use bitcoin::secp256k1::ecdsa; use crate::util::ser::{Readable, Hostname, Writeable}; #[test] @@ -1203,4 +1309,57 @@ mod tests { hostname.write(&mut buf).unwrap(); assert_eq!(Hostname::read(&mut buf.as_slice()).unwrap().as_str(), "test"); } + + #[test] + /// Taproot will likely fill legacy signature fields with all 0s. + /// This test ensures that doing so won't break serialization. + fn null_signature_codec() { + let buffer = vec![0u8; 64]; + let mut cursor = crate::io::Cursor::new(buffer.clone()); + let signature = ecdsa::Signature::read(&mut cursor).unwrap(); + let serialization = signature.serialize_compact(); + assert_eq!(buffer, serialization.to_vec()) + } + + #[test] + fn bigsize_encoding_decoding() { + let values = vec![0, 252, 253, 65535, 65536, 4294967295, 4294967296, 18446744073709551615]; + let bytes = vec![ + "00", + "fc", + "fd00fd", + "fdffff", + "fe00010000", + "feffffffff", + "ff0000000100000000", + "ffffffffffffffffff" + ]; + for i in 0..=7 { + let mut stream = crate::io::Cursor::new(::hex::decode(bytes[i]).unwrap()); + assert_eq!(super::BigSize::read(&mut stream).unwrap().0, values[i]); + let mut stream = super::VecWriter(Vec::new()); + super::BigSize(values[i]).write(&mut stream).unwrap(); + assert_eq!(stream.0, ::hex::decode(bytes[i]).unwrap()); + } + let err_bytes = vec![ + "fd00fc", + "fe0000ffff", + "ff00000000ffffffff", + "fd00", + "feffff", + "ffffffffff", + "fd", + "fe", + "ff", + "" + ]; + for i in 0..=9 { + let mut stream = crate::io::Cursor::new(::hex::decode(err_bytes[i]).unwrap()); + if i < 3 { + assert_eq!(super::BigSize::read(&mut stream).err(), Some(crate::ln::msgs::DecodeError::InvalidValue)); + } else { + assert_eq!(super::BigSize::read(&mut stream).err(), Some(crate::ln::msgs::DecodeError::ShortRead)); + } + } + } }