X-Git-Url: http://git.bitcoin.ninja/index.cgi?a=blobdiff_plain;f=lightning%2Fsrc%2Futil%2Fser.rs;h=04dc04e55b025f937f542b27779feae467e064fb;hb=94a07d9caee6d38d42954ac783c49afe1cf89697;hp=fc795f9dd6713b84be341af6525a7ab29299e2c1;hpb=f1b9bd34b83f9e8161b6d7bbc56a420cc572502a;p=rust-lightning diff --git a/lightning/src/util/ser.rs b/lightning/src/util/ser.rs index fc795f9d..04dc04e5 100644 --- a/lightning/src/util/ser.rs +++ b/lightning/src/util/ser.rs @@ -10,16 +10,19 @@ //! A very simple serialization framework which is used to serialize/deserialize messages as well //! as ChannelsManagers and ChannelMonitors. -use prelude::*; -use io::{self, Read, Write}; -use io_extras::{copy, sink}; +use crate::prelude::*; +use crate::io::{self, Read, Seek, Write}; +use crate::io_extras::{copy, sink}; use core::hash::Hash; -use sync::Mutex; +use crate::sync::Mutex; use core::cmp; +use core::convert::TryFrom; +use core::ops::Deref; use bitcoin::secp256k1::{PublicKey, SecretKey}; use bitcoin::secp256k1::constants::{PUBLIC_KEY_SIZE, SECRET_KEY_SIZE, COMPACT_SIGNATURE_SIZE}; use bitcoin::secp256k1::ecdsa::Signature; +use bitcoin::blockdata::constants::ChainHash; use bitcoin::blockdata::script::Script; use bitcoin::blockdata::transaction::{OutPoint, Transaction, TxOut}; use bitcoin::consensus; @@ -28,10 +31,10 @@ use bitcoin::hashes::sha256d::Hash as Sha256dHash; use bitcoin::hash_types::{Txid, BlockHash}; use core::marker::Sized; use core::time::Duration; -use ln::msgs::DecodeError; -use ln::{PaymentPreimage, PaymentHash, PaymentSecret}; +use crate::ln::msgs::DecodeError; +use crate::ln::{PaymentPreimage, PaymentHash, PaymentSecret}; -use util::byte_utils::{be48_to_array, slice_to_be48}; +use crate::util::byte_utils::{be48_to_array, slice_to_be48}; /// serialization buffer size pub const MAX_BUF_SIZE: usize = 64 * 1024; @@ -216,6 +219,13 @@ pub trait Readable fn read(reader: &mut R) -> Result; } +/// A trait that various rust-lightning types implement allowing them to be read in from a +/// `Read + Seek`. +pub(crate) trait SeekReadable where Self: Sized { + /// Reads a Self in from the given Read + fn read(reader: &mut R) -> Result; +} + /// A trait that various higher-level rust-lightning types implement allowing them to be read in /// from a Read given some additional set of arguments which is required to deserialize. /// @@ -242,6 +252,14 @@ pub(crate) trait LengthReadableArgs

where Self: Sized fn read(reader: &mut R, params: P) -> Result; } +/// A trait that various higher-level rust-lightning types implement allowing them to be read in +/// from a Read, requiring the implementer to provide the total length of the read. +pub(crate) trait LengthReadable where Self: Sized +{ + /// Reads a Self in from the given LengthRead + fn read(reader: &mut R) -> Result; +} + /// A trait that various rust-lightning types implement allowing them to (maybe) be read in from a Read /// /// (C-not exported) as we only export serialization to/from byte arrays instead @@ -273,39 +291,6 @@ impl From for OptionDeserWrapper { fn from(t: T) -> OptionDeserWrapper { OptionDeserWrapper(Some(t)) } } -/// Wrapper to write each element of a Vec with no length prefix -pub(crate) struct VecWriteWrapper<'a, T: Writeable>(pub &'a Vec); -impl<'a, T: Writeable> Writeable for VecWriteWrapper<'a, T> { - #[inline] - fn write(&self, writer: &mut W) -> Result<(), io::Error> { - for ref v in self.0.iter() { - v.write(writer)?; - } - Ok(()) - } -} - -/// Wrapper to read elements from a given stream until it reaches the end of the stream. -pub(crate) struct VecReadWrapper(pub Vec); -impl Readable for VecReadWrapper { - #[inline] - fn read(mut reader: &mut R) -> Result { - let mut values = Vec::new(); - loop { - let mut track_read = ReadTrackingReader::new(&mut reader); - match MaybeReadable::read(&mut track_read) { - Ok(Some(v)) => { values.push(v); }, - Ok(None) => { }, - // If we failed to read any bytes at all, we reached the end of our TLV - // stream and have simply exhausted all entries. - Err(ref e) if e == &DecodeError::ShortRead && !track_read.have_read => break, - Err(e) => return Err(e), - } - } - Ok(Self(values)) - } -} - pub(crate) struct U48(pub u64); impl Writeable for U48 { #[inline] @@ -389,8 +374,8 @@ impl Readable for BigSize { /// In TLV we occasionally send fields which only consist of, or potentially end with, a /// variable-length integer which is simply truncated by skipping high zero bytes. This type /// encapsulates such integers implementing Readable/Writeable for them. -#[cfg_attr(test, derive(PartialEq, Debug))] -pub(crate) struct HighZeroBytesDroppedVarInt(pub T); +#[cfg_attr(test, derive(PartialEq, Eq, Debug))] +pub(crate) struct HighZeroBytesDroppedBigSize(pub T); macro_rules! impl_writeable_primitive { ($val_type:ty, $len: expr) => { @@ -400,7 +385,7 @@ macro_rules! impl_writeable_primitive { writer.write_all(&self.to_be_bytes()) } } - impl Writeable for HighZeroBytesDroppedVarInt<$val_type> { + impl Writeable for HighZeroBytesDroppedBigSize<$val_type> { #[inline] fn write(&self, writer: &mut W) -> Result<(), io::Error> { // Skip any full leading 0 bytes when writing (in BE): @@ -415,9 +400,9 @@ macro_rules! impl_writeable_primitive { Ok(<$val_type>::from_be_bytes(buf)) } } - impl Readable for HighZeroBytesDroppedVarInt<$val_type> { + impl Readable for HighZeroBytesDroppedBigSize<$val_type> { #[inline] - fn read(reader: &mut R) -> Result, DecodeError> { + fn read(reader: &mut R) -> Result, DecodeError> { // We need to accept short reads (read_len == 0) as "EOF" and handle them as simply // the high bytes being dropped. To do so, we start reading into the middle of buf // and then convert the appropriate number of bytes with extra high bytes out of @@ -433,7 +418,7 @@ macro_rules! impl_writeable_primitive { let first_byte = $len - ($len - total_read_len); let mut bytes = [0; $len]; bytes.copy_from_slice(&buf[first_byte..first_byte + $len]); - Ok(HighZeroBytesDroppedVarInt(<$val_type>::from_be_bytes(bytes))) + Ok(HighZeroBytesDroppedBigSize(<$val_type>::from_be_bytes(bytes))) } else { // If the encoding had extra zero bytes, return a failure even though we know // what they meant (as the TLV test vectors require this) @@ -441,6 +426,9 @@ macro_rules! impl_writeable_primitive { } } } + impl From<$val_type> for HighZeroBytesDroppedBigSize<$val_type> { + fn from(val: $val_type) -> Self { Self(val) } + } } } @@ -504,7 +492,7 @@ macro_rules! impl_array { ); } -impl_array!(3); // for rgb +impl_array!(3); // for rgb, ISO 4712 code impl_array!(4); // for IPv4 impl_array!(12); // for OnionV2 impl_array!(16); // for IPv6 @@ -513,6 +501,82 @@ impl_array!(PUBLIC_KEY_SIZE); // for PublicKey impl_array!(COMPACT_SIGNATURE_SIZE); // for Signature impl_array!(1300); // for OnionPacket.hop_data +impl Writeable for [u16; 8] { + #[inline] + fn write(&self, w: &mut W) -> Result<(), io::Error> { + for v in self.iter() { + w.write_all(&v.to_be_bytes())? + } + Ok(()) + } +} + +impl Readable for [u16; 8] { + #[inline] + fn read(r: &mut R) -> Result { + let mut buf = [0u8; 16]; + r.read_exact(&mut buf)?; + let mut res = [0u16; 8]; + for (idx, v) in res.iter_mut().enumerate() { + *v = (buf[idx] as u16) << 8 | (buf[idx + 1] as u16) + } + Ok(res) + } +} + +/// For variable-length values within TLV record where the length is encoded as part of the record. +/// Used to prevent encoding the length twice. +pub(crate) struct WithoutLength(pub T); + +impl Writeable for WithoutLength<&String> { + #[inline] + fn write(&self, w: &mut W) -> Result<(), io::Error> { + w.write_all(self.0.as_bytes()) + } +} +impl Readable for WithoutLength { + #[inline] + fn read(r: &mut R) -> Result { + let v: WithoutLength> = Readable::read(r)?; + Ok(Self(String::from_utf8(v.0).map_err(|_| DecodeError::InvalidValue)?)) + } +} +impl<'a> From<&'a String> for WithoutLength<&'a String> { + fn from(s: &'a String) -> Self { Self(s) } +} + +impl<'a, T: Writeable> Writeable for WithoutLength<&'a Vec> { + #[inline] + fn write(&self, writer: &mut W) -> Result<(), io::Error> { + for ref v in self.0.iter() { + v.write(writer)?; + } + Ok(()) + } +} + +impl Readable for WithoutLength> { + #[inline] + fn read(mut reader: &mut R) -> Result { + let mut values = Vec::new(); + loop { + let mut track_read = ReadTrackingReader::new(&mut reader); + match MaybeReadable::read(&mut track_read) { + Ok(Some(v)) => { values.push(v); }, + Ok(None) => { }, + // If we failed to read any bytes at all, we reached the end of our TLV + // stream and have simply exhausted all entries. + Err(ref e) if e == &DecodeError::ShortRead && !track_read.have_read => break, + Err(e) => return Err(e), + } + } + Ok(Self(values)) + } +} +impl<'a, T> From<&'a Vec> for WithoutLength<&'a Vec> { + fn from(v: &'a Vec) -> Self { Self(v) } +} + // HashMap impl Writeable for HashMap where K: Writeable + Eq + Hash, @@ -827,6 +891,19 @@ impl Readable for BlockHash { } } +impl Writeable for ChainHash { + fn write(&self, w: &mut W) -> Result<(), io::Error> { + w.write_all(self.as_bytes()) + } +} + +impl Readable for ChainHash { + fn read(r: &mut R) -> Result { + let buf: [u8; 32] = Readable::read(r)?; + Ok(ChainHash::from(&buf[..])) + } +} + impl Writeable for OutPoint { fn write(&self, w: &mut W) -> Result<(), io::Error> { self.txid.write(w)?; @@ -850,7 +927,7 @@ macro_rules! impl_consensus_ser { ($bitcoin_type: ty) => { impl Writeable for $bitcoin_type { fn write(&self, writer: &mut W) -> Result<(), io::Error> { - match self.consensus_encode(WriterWriteAdaptor(writer)) { + match self.consensus_encode(&mut WriterWriteAdaptor(writer)) { Ok(_) => Ok(()), Err(e) => Err(e), } @@ -941,6 +1018,75 @@ impl Readable for String { } } +/// Represents a hostname for serialization purposes. +/// Only the character set and length will be validated. +/// The character set consists of ASCII alphanumeric characters, hyphens, and periods. +/// Its length is guaranteed to be representable by a single byte. +/// This serialization is used by BOLT 7 hostnames. +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct Hostname(String); +impl Hostname { + /// Returns the length of the hostname. + pub fn len(&self) -> u8 { + (&self.0).len() as u8 + } +} +impl Deref for Hostname { + type Target = String; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} +impl From for String { + fn from(hostname: Hostname) -> Self { + hostname.0 + } +} +impl TryFrom> for Hostname { + type Error = (); + + fn try_from(bytes: Vec) -> Result { + if let Ok(s) = String::from_utf8(bytes) { + Hostname::try_from(s) + } else { + Err(()) + } + } +} +impl TryFrom for Hostname { + type Error = (); + + fn try_from(s: String) -> Result { + if s.len() <= 255 && s.chars().all(|c| + c.is_ascii_alphanumeric() || + c == '.' || + c == '-' + ) { + Ok(Hostname(s)) + } else { + Err(()) + } + } +} +impl Writeable for Hostname { + #[inline] + fn write(&self, w: &mut W) -> Result<(), io::Error> { + self.len().write(w)?; + w.write_all(self.as_bytes()) + } +} +impl Readable for Hostname { + #[inline] + fn read(r: &mut R) -> Result { + let len: u8 = Readable::read(r)?; + let mut vec = Vec::with_capacity(len.into()); + vec.resize(len.into(), 0); + r.read_exact(&mut vec)?; + Hostname::try_from(vec).map_err(|_| DecodeError::InvalidValue) + } +} + impl Writeable for Duration { #[inline] fn write(&self, w: &mut W) -> Result<(), io::Error> { @@ -956,3 +1102,29 @@ impl Readable for Duration { Ok(Duration::new(secs, nanos)) } } + +#[cfg(test)] +mod tests { + use core::convert::TryFrom; + use crate::util::ser::{Readable, Hostname, Writeable}; + + #[test] + fn hostname_conversion() { + assert_eq!(Hostname::try_from(String::from("a-test.com")).unwrap().as_str(), "a-test.com"); + + assert!(Hostname::try_from(String::from("\"")).is_err()); + assert!(Hostname::try_from(String::from("$")).is_err()); + assert!(Hostname::try_from(String::from("⚡")).is_err()); + let mut large_vec = Vec::with_capacity(256); + large_vec.resize(256, b'A'); + assert!(Hostname::try_from(String::from_utf8(large_vec).unwrap()).is_err()); + } + + #[test] + fn hostname_serialization() { + let hostname = Hostname::try_from(String::from("test")).unwrap(); + let mut buf: Vec = Vec::new(); + hostname.write(&mut buf).unwrap(); + assert_eq!(Hostname::read(&mut buf.as_slice()).unwrap().as_str(), "test"); + } +}