X-Git-Url: http://git.bitcoin.ninja/index.cgi?a=blobdiff_plain;f=lightning%2Fsrc%2Futil%2Fser.rs;h=1eb5e7424c82343d0bb88aef7df632dc408563f0;hb=643623229311f461abf1dc64890f78378d0d4a83;hp=da66741dbf43137966c750554fd034e913d9db5d;hpb=12461fcba179a1b6726f5c2557d12cb1f1b33b25;p=rust-lightning diff --git a/lightning/src/util/ser.rs b/lightning/src/util/ser.rs index da66741d..1eb5e742 100644 --- a/lightning/src/util/ser.rs +++ b/lightning/src/util/ser.rs @@ -8,113 +8,127 @@ // licenses. //! A very simple serialization framework which is used to serialize/deserialize messages as well -//! as ChannelsManagers and ChannelMonitors. - -use prelude::*; -use std::io::{Read, Write}; -use std::collections::HashMap; +//! as [`ChannelManager`]s and [`ChannelMonitor`]s. +//! +//! [`ChannelManager`]: crate::ln::channelmanager::ChannelManager +//! [`ChannelMonitor`]: crate::chain::channelmonitor::ChannelMonitor + +use crate::prelude::*; +use crate::io::{self, Read, Seek, Write}; +use crate::io_extras::{copy, sink}; use core::hash::Hash; -use std::sync::Mutex; +use crate::sync::Mutex; use core::cmp; +use core::convert::TryFrom; +use core::ops::Deref; + +use alloc::collections::BTreeMap; -use bitcoin::secp256k1::Signature; -use bitcoin::secp256k1::key::{PublicKey, SecretKey}; -use bitcoin::secp256k1::constants::{PUBLIC_KEY_SIZE, COMPACT_SIGNATURE_SIZE}; -use bitcoin::blockdata::script::Script; +use bitcoin::secp256k1::{PublicKey, SecretKey}; +use bitcoin::secp256k1::constants::{PUBLIC_KEY_SIZE, SECRET_KEY_SIZE, COMPACT_SIGNATURE_SIZE, SCHNORR_SIGNATURE_SIZE}; +use bitcoin::secp256k1::ecdsa; +use bitcoin::secp256k1::schnorr; +use bitcoin::blockdata::constants::ChainHash; +use bitcoin::blockdata::script::{self, Script}; use bitcoin::blockdata::transaction::{OutPoint, Transaction, TxOut}; -use bitcoin::consensus; +use bitcoin::{consensus, Witness}; use bitcoin::consensus::Encodable; use bitcoin::hashes::sha256d::Hash as Sha256dHash; use bitcoin::hash_types::{Txid, BlockHash}; use core::marker::Sized; -use ln::msgs::DecodeError; -use ln::{PaymentPreimage, PaymentHash, PaymentSecret}; -use util::byte_utils; +use core::time::Duration; +use crate::chain::ClaimId; +use crate::ln::msgs::DecodeError; +#[cfg(taproot)] +use crate::ln::msgs::PartialSignatureWithNonce; +use crate::ln::{PaymentPreimage, PaymentHash, PaymentSecret}; -use util::byte_utils::{be64_to_array, be48_to_array, be32_to_array, be16_to_array, slice_to_be16, slice_to_be32, slice_to_be48, slice_to_be64}; +use crate::util::byte_utils::{be48_to_array, slice_to_be48}; +use crate::util::string::UntrustedString; /// serialization buffer size pub const MAX_BUF_SIZE: usize = 64 * 1024; -/// A trait that is similar to std::io::Write but has one extra function which can be used to size -/// buffers being written into. -/// An impl is provided for any type that also impls std::io::Write which simply ignores size -/// hints. +/// A simplified version of [`std::io::Write`] that exists largely for backwards compatibility. +/// An impl is provided for any type that also impls [`std::io::Write`]. /// -/// (C-not exported) as we only export serialization to/from byte arrays instead +/// This is not exported to bindings users as we only export serialization to/from byte arrays instead pub trait Writer { /// Writes the given buf out. See std::io::Write::write_all for more - fn write_all(&mut self, buf: &[u8]) -> Result<(), ::std::io::Error>; - /// Hints that data of the given size is about the be written. This may not always be called - /// prior to data being written and may be safely ignored. - fn size_hint(&mut self, size: usize); + fn write_all(&mut self, buf: &[u8]) -> Result<(), io::Error>; } impl Writer for W { #[inline] - fn write_all(&mut self, buf: &[u8]) -> Result<(), ::std::io::Error> { - ::write_all(self, buf) + fn write_all(&mut self, buf: &[u8]) -> Result<(), io::Error> { + ::write_all(self, buf) } - #[inline] - fn size_hint(&mut self, _size: usize) { } } pub(crate) struct WriterWriteAdaptor<'a, W: Writer + 'a>(pub &'a mut W); impl<'a, W: Writer + 'a> Write for WriterWriteAdaptor<'a, W> { - fn write_all(&mut self, buf: &[u8]) -> Result<(), ::std::io::Error> { + #[inline] + fn write_all(&mut self, buf: &[u8]) -> Result<(), io::Error> { self.0.write_all(buf) } - fn write(&mut self, buf: &[u8]) -> Result { + #[inline] + fn write(&mut self, buf: &[u8]) -> Result { self.0.write_all(buf)?; Ok(buf.len()) } - fn flush(&mut self) -> Result<(), ::std::io::Error> { + #[inline] + fn flush(&mut self) -> Result<(), io::Error> { Ok(()) } } pub(crate) struct VecWriter(pub Vec); impl Writer for VecWriter { - fn write_all(&mut self, buf: &[u8]) -> Result<(), ::std::io::Error> { + #[inline] + fn write_all(&mut self, buf: &[u8]) -> Result<(), io::Error> { self.0.extend_from_slice(buf); Ok(()) } - fn size_hint(&mut self, size: usize) { - self.0.reserve_exact(size); - } } /// Writer that only tracks the amount of data written - useful if you need to calculate the length /// of some data when serialized but don't yet need the full data. -pub(crate) struct LengthCalculatingWriter(pub usize); +/// +/// This is not exported to bindings users as manual TLV building is not currently supported in bindings +pub struct LengthCalculatingWriter(pub usize); impl Writer for LengthCalculatingWriter { #[inline] - fn write_all(&mut self, buf: &[u8]) -> Result<(), ::std::io::Error> { + fn write_all(&mut self, buf: &[u8]) -> Result<(), io::Error> { self.0 += buf.len(); Ok(()) } - #[inline] - fn size_hint(&mut self, _size: usize) {} } -/// Essentially std::io::Take but a bit simpler and with a method to walk the underlying stream +/// Essentially [`std::io::Take`] but a bit simpler and with a method to walk the underlying stream /// forward to ensure we always consume exactly the fixed length specified. -pub(crate) struct FixedLengthReader { +/// +/// This is not exported to bindings users as manual TLV building is not currently supported in bindings +pub struct FixedLengthReader { read: R, bytes_read: u64, total_bytes: u64, } impl FixedLengthReader { + /// Returns a new [`FixedLengthReader`]. pub fn new(read: R, total_bytes: u64) -> Self { Self { read, bytes_read: 0, total_bytes } } + /// Returns whether some bytes are remaining or not. + #[inline] pub fn bytes_remain(&mut self) -> bool { self.bytes_read != self.total_bytes } + /// Consumes the remaining bytes. + #[inline] pub fn eat_remaining(&mut self) -> Result<(), DecodeError> { - ::std::io::copy(self, &mut ::std::io::sink()).unwrap(); + copy(self, &mut sink()).unwrap(); if self.bytes_read != self.total_bytes { Err(DecodeError::ShortRead) } else { @@ -123,7 +137,8 @@ impl FixedLengthReader { } } impl Read for FixedLengthReader { - fn read(&mut self, dest: &mut [u8]) -> Result { + #[inline] + fn read(&mut self, dest: &mut [u8]) -> Result { if self.total_bytes == self.bytes_read { Ok(0) } else { @@ -139,19 +154,31 @@ impl Read for FixedLengthReader { } } -/// A Read which tracks whether any bytes have been read at all. This allows us to distinguish +impl LengthRead for FixedLengthReader { + #[inline] + fn total_bytes(&self) -> u64 { + self.total_bytes + } +} + +/// A [`Read`] implementation which tracks whether any bytes have been read at all. This allows us to distinguish /// between "EOF reached before we started" and "EOF reached mid-read". -pub(crate) struct ReadTrackingReader { +/// +/// This is not exported to bindings users as manual TLV building is not currently supported in bindings +pub struct ReadTrackingReader { read: R, + /// Returns whether we have read from this reader or not yet. pub have_read: bool, } impl ReadTrackingReader { + /// Returns a new [`ReadTrackingReader`]. pub fn new(read: R) -> Self { Self { read, have_read: false } } } impl Read for ReadTrackingReader { - fn read(&mut self, dest: &mut [u8]) -> Result { + #[inline] + fn read(&mut self, dest: &mut [u8]) -> Result { match self.read.read(dest) { Ok(0) => Ok(0), Ok(len) => { @@ -163,70 +190,155 @@ impl Read for ReadTrackingReader { } } -/// A trait that various rust-lightning types implement allowing them to be written out to a Writer +/// A trait that various LDK types implement allowing them to be written out to a [`Writer`]. /// -/// (C-not exported) as we only export serialization to/from byte arrays instead +/// This is not exported to bindings users as we only export serialization to/from byte arrays instead pub trait Writeable { - /// Writes self out to the given Writer - fn write(&self, writer: &mut W) -> Result<(), ::std::io::Error>; + /// Writes `self` out to the given [`Writer`]. + fn write(&self, writer: &mut W) -> Result<(), io::Error>; - /// Writes self out to a Vec + /// Writes `self` out to a `Vec`. fn encode(&self) -> Vec { let mut msg = VecWriter(Vec::new()); self.write(&mut msg).unwrap(); msg.0 } - /// Writes self out to a Vec + /// Writes `self` out to a `Vec`. + #[cfg(test)] fn encode_with_len(&self) -> Vec { let mut msg = VecWriter(Vec::new()); 0u16.write(&mut msg).unwrap(); self.write(&mut msg).unwrap(); let len = msg.0.len(); - msg.0[..2].copy_from_slice(&byte_utils::be16_to_array(len as u16 - 2)); + msg.0[..2].copy_from_slice(&(len as u16 - 2).to_be_bytes()); msg.0 } + + /// Gets the length of this object after it has been serialized. This can be overridden to + /// optimize cases where we prepend an object with its length. + // Note that LLVM optimizes this away in most cases! Check that it isn't before you override! + #[inline] + fn serialized_length(&self) -> usize { + let mut len_calc = LengthCalculatingWriter(0); + self.write(&mut len_calc).expect("No in-memory data may fail to serialize"); + len_calc.0 + } } impl<'a, T: Writeable> Writeable for &'a T { - fn write(&self, writer: &mut W) -> Result<(), ::std::io::Error> { (*self).write(writer) } + fn write(&self, writer: &mut W) -> Result<(), io::Error> { (*self).write(writer) } } -/// A trait that various rust-lightning types implement allowing them to be read in from a Read +/// A trait that various LDK types implement allowing them to be read in from a [`Read`]. /// -/// (C-not exported) as we only export serialization to/from byte arrays instead +/// This is not exported to bindings users as we only export serialization to/from byte arrays instead pub trait Readable where Self: Sized { - /// Reads a Self in from the given Read + /// Reads a `Self` in from the given [`Read`]. fn read(reader: &mut R) -> Result; } -/// A trait that various higher-level rust-lightning types implement allowing them to be read in -/// from a Read given some additional set of arguments which is required to deserialize. +/// A trait that various LDK types implement allowing them to be read in from a +/// [`Read`]` + `[`Seek`]. +pub(crate) trait SeekReadable where Self: Sized { + /// Reads a `Self` in from the given [`Read`]. + fn read(reader: &mut R) -> Result; +} + +/// A trait that various higher-level LDK types implement allowing them to be read in +/// from a [`Read`] given some additional set of arguments which is required to deserialize. /// -/// (C-not exported) as we only export serialization to/from byte arrays instead +/// This is not exported to bindings users as we only export serialization to/from byte arrays instead pub trait ReadableArgs

where Self: Sized { - /// Reads a Self in from the given Read + /// Reads a `Self` in from the given [`Read`]. fn read(reader: &mut R, params: P) -> Result; } -/// A trait that various rust-lightning types implement allowing them to (maybe) be read in from a Read +/// A [`std::io::Read`] that also provides the total bytes available to be read. +pub(crate) trait LengthRead: Read { + /// The total number of bytes available to be read. + fn total_bytes(&self) -> u64; +} + +/// A trait that various higher-level LDK types implement allowing them to be read in +/// from a Read given some additional set of arguments which is required to deserialize, requiring +/// the implementer to provide the total length of the read. +pub(crate) trait LengthReadableArgs

where Self: Sized +{ + /// Reads a `Self` in from the given [`LengthRead`]. + fn read(reader: &mut R, params: P) -> Result; +} + +/// A trait that various higher-level LDK types implement allowing them to be read in +/// from a [`Read`], requiring the implementer to provide the total length of the read. +pub(crate) trait LengthReadable where Self: Sized +{ + /// Reads a `Self` in from the given [`LengthRead`]. + fn read(reader: &mut R) -> Result; +} + +/// A trait that various LDK types implement allowing them to (maybe) be read in from a [`Read`]. /// -/// (C-not exported) as we only export serialization to/from byte arrays instead +/// This is not exported to bindings users as we only export serialization to/from byte arrays instead pub trait MaybeReadable where Self: Sized { - /// Reads a Self in from the given Read + /// Reads a `Self` in from the given [`Read`]. fn read(reader: &mut R) -> Result, DecodeError>; } +impl MaybeReadable for T { + #[inline] + fn read(reader: &mut R) -> Result, DecodeError> { + Ok(Some(Readable::read(reader)?)) + } +} + +/// Wrapper to read a required (non-optional) TLV record. +/// +/// This is not exported to bindings users as manual TLV building is not currently supported in bindings +pub struct RequiredWrapper(pub Option); +impl Readable for RequiredWrapper { + #[inline] + fn read(reader: &mut R) -> Result { + Ok(Self(Some(Readable::read(reader)?))) + } +} +impl> ReadableArgs for RequiredWrapper { + #[inline] + fn read(reader: &mut R, args: A) -> Result { + Ok(Self(Some(ReadableArgs::read(reader, args)?))) + } +} +/// When handling `default_values`, we want to map the default-value T directly +/// to a `RequiredWrapper` in a way that works for `field: T = t;` as +/// well. Thus, we assume `Into for T` does nothing and use that. +impl From for RequiredWrapper { + fn from(t: T) -> RequiredWrapper { RequiredWrapper(Some(t)) } +} + +/// Wrapper to read a required (non-optional) TLV record that may have been upgraded without +/// backwards compat. +/// +/// This is not exported to bindings users as manual TLV building is not currently supported in bindings +pub struct UpgradableRequired(pub Option); +impl MaybeReadable for UpgradableRequired { + #[inline] + fn read(reader: &mut R) -> Result, DecodeError> { + let tlv = MaybeReadable::read(reader)?; + if let Some(tlv) = tlv { return Ok(Some(Self(Some(tlv)))) } + Ok(None) + } +} + pub(crate) struct U48(pub u64); impl Writeable for U48 { #[inline] - fn write(&self, writer: &mut W) -> Result<(), ::std::io::Error> { + fn write(&self, writer: &mut W) -> Result<(), io::Error> { writer.write_all(&be48_to_array(self.0)) } } @@ -239,17 +351,18 @@ impl Readable for U48 { } } -/// Lightning TLV uses a custom variable-length integer called BigSize. It is similar to Bitcoin's +/// Lightning TLV uses a custom variable-length integer called `BigSize`. It is similar to Bitcoin's /// variable-length integers except that it is serialized in big-endian instead of little-endian. /// /// Like Bitcoin's variable-length integer, it exhibits ambiguity in that certain values can be /// encoded in several different ways, which we must check for at deserialization-time. Thus, if /// you're looking for an example of a variable-length integer to use for your own project, move /// along, this is a rather poor design. -pub(crate) struct BigSize(pub u64); +#[derive(Clone, Copy, Debug, Hash, PartialOrd, Ord, PartialEq, Eq)] +pub struct BigSize(pub u64); impl Writeable for BigSize { #[inline] - fn write(&self, writer: &mut W) -> Result<(), ::std::io::Error> { + fn write(&self, writer: &mut W) -> Result<(), io::Error> { match self.0 { 0...0xFC => { (self.0 as u8).write(writer) @@ -303,25 +416,59 @@ impl Readable for BigSize { } } +/// The lightning protocol uses u16s for lengths in most cases. As our serialization framework +/// primarily targets that, we must as well. However, because we may serialize objects that have +/// more than 65K entries, we need to be able to store larger values. Thus, we define a variable +/// length integer here that is backwards-compatible for values < 0xffff. We treat 0xffff as +/// "read eight more bytes". +/// +/// To ensure we only have one valid encoding per value, we add 0xffff to values written as eight +/// bytes. Thus, 0xfffe is serialized as 0xfffe, whereas 0xffff is serialized as +/// 0xffff0000000000000000 (i.e. read-eight-bytes then zero). +struct CollectionLength(pub u64); +impl Writeable for CollectionLength { + #[inline] + fn write(&self, writer: &mut W) -> Result<(), io::Error> { + if self.0 < 0xffff { + (self.0 as u16).write(writer) + } else { + 0xffffu16.write(writer)?; + (self.0 - 0xffff).write(writer) + } + } +} + +impl Readable for CollectionLength { + #[inline] + fn read(r: &mut R) -> Result { + let mut val: u64 = ::read(r)? as u64; + if val == 0xffff { + val = ::read(r)? + .checked_add(0xffff).ok_or(DecodeError::InvalidValue)?; + } + Ok(CollectionLength(val)) + } +} + /// In TLV we occasionally send fields which only consist of, or potentially end with, a /// variable-length integer which is simply truncated by skipping high zero bytes. This type -/// encapsulates such integers implementing Readable/Writeable for them. -#[cfg_attr(test, derive(PartialEq, Debug))] -pub(crate) struct HighZeroBytesDroppedVarInt(pub T); +/// encapsulates such integers implementing [`Readable`]/[`Writeable`] for them. +#[cfg_attr(test, derive(PartialEq, Eq, Debug))] +pub(crate) struct HighZeroBytesDroppedBigSize(pub T); macro_rules! impl_writeable_primitive { - ($val_type:ty, $meth_write:ident, $len: expr, $meth_read:ident) => { + ($val_type:ty, $len: expr) => { impl Writeable for $val_type { #[inline] - fn write(&self, writer: &mut W) -> Result<(), ::std::io::Error> { - writer.write_all(&$meth_write(*self)) + fn write(&self, writer: &mut W) -> Result<(), io::Error> { + writer.write_all(&self.to_be_bytes()) } } - impl Writeable for HighZeroBytesDroppedVarInt<$val_type> { + impl Writeable for HighZeroBytesDroppedBigSize<$val_type> { #[inline] - fn write(&self, writer: &mut W) -> Result<(), ::std::io::Error> { + fn write(&self, writer: &mut W) -> Result<(), io::Error> { // Skip any full leading 0 bytes when writing (in BE): - writer.write_all(&$meth_write(self.0)[(self.0.leading_zeros()/8) as usize..$len]) + writer.write_all(&self.0.to_be_bytes()[(self.0.leading_zeros()/8) as usize..$len]) } } impl Readable for $val_type { @@ -329,12 +476,12 @@ macro_rules! impl_writeable_primitive { fn read(reader: &mut R) -> Result<$val_type, DecodeError> { let mut buf = [0; $len]; reader.read_exact(&mut buf)?; - Ok($meth_read(&buf)) + Ok(<$val_type>::from_be_bytes(buf)) } } - impl Readable for HighZeroBytesDroppedVarInt<$val_type> { + impl Readable for HighZeroBytesDroppedBigSize<$val_type> { #[inline] - fn read(reader: &mut R) -> Result, DecodeError> { + fn read(reader: &mut R) -> Result, DecodeError> { // We need to accept short reads (read_len == 0) as "EOF" and handle them as simply // the high bytes being dropped. To do so, we start reading into the middle of buf // and then convert the appropriate number of bytes with extra high bytes out of @@ -348,7 +495,9 @@ macro_rules! impl_writeable_primitive { } if total_read_len == 0 || buf[$len] != 0 { let first_byte = $len - ($len - total_read_len); - Ok(HighZeroBytesDroppedVarInt($meth_read(&buf[first_byte..first_byte + $len]))) + let mut bytes = [0; $len]; + bytes.copy_from_slice(&buf[first_byte..first_byte + $len]); + Ok(HighZeroBytesDroppedBigSize(<$val_type>::from_be_bytes(bytes))) } else { // If the encoding had extra zero bytes, return a failure even though we know // what they meant (as the TLV test vectors require this) @@ -356,16 +505,24 @@ macro_rules! impl_writeable_primitive { } } } + impl From<$val_type> for HighZeroBytesDroppedBigSize<$val_type> { + fn from(val: $val_type) -> Self { Self(val) } + } } } -impl_writeable_primitive!(u64, be64_to_array, 8, slice_to_be64); -impl_writeable_primitive!(u32, be32_to_array, 4, slice_to_be32); -impl_writeable_primitive!(u16, be16_to_array, 2, slice_to_be16); +impl_writeable_primitive!(u128, 16); +impl_writeable_primitive!(u64, 8); +impl_writeable_primitive!(u32, 4); +impl_writeable_primitive!(u16, 2); +impl_writeable_primitive!(i64, 8); +impl_writeable_primitive!(i32, 4); +impl_writeable_primitive!(i16, 2); +impl_writeable_primitive!(i8, 1); impl Writeable for u8 { #[inline] - fn write(&self, writer: &mut W) -> Result<(), ::std::io::Error> { + fn write(&self, writer: &mut W) -> Result<(), io::Error> { writer.write_all(&[*self]) } } @@ -380,7 +537,7 @@ impl Readable for u8 { impl Writeable for bool { #[inline] - fn write(&self, writer: &mut W) -> Result<(), ::std::io::Error> { + fn write(&self, writer: &mut W) -> Result<(), io::Error> { writer.write_all(&[if *self {1} else {0}]) } } @@ -402,7 +559,7 @@ macro_rules! impl_array { impl Writeable for [u8; $size] { #[inline] - fn write(&self, w: &mut W) -> Result<(), ::std::io::Error> { + fn write(&self, w: &mut W) -> Result<(), io::Error> { w.write_all(self) } } @@ -419,52 +576,258 @@ macro_rules! impl_array { ); } -//TODO: performance issue with [u8; size] with impl_array!() -impl_array!(3); // for rgb +impl_array!(3); // for rgb, ISO 4712 code impl_array!(4); // for IPv4 -impl_array!(10); // for OnionV2 +impl_array!(12); // for OnionV2 impl_array!(16); // for IPv6 impl_array!(32); // for channel id & hmac impl_array!(PUBLIC_KEY_SIZE); // for PublicKey -impl_array!(COMPACT_SIGNATURE_SIZE); // for Signature +impl_array!(64); // for ecdsa::Signature and schnorr::Signature +impl_array!(66); // for MuSig2 nonces impl_array!(1300); // for OnionPacket.hop_data -// HashMap -impl Writeable for HashMap - where K: Writeable + Eq + Hash, - V: Writeable +impl Writeable for [u16; 8] { + #[inline] + fn write(&self, w: &mut W) -> Result<(), io::Error> { + for v in self.iter() { + w.write_all(&v.to_be_bytes())? + } + Ok(()) + } +} + +impl Readable for [u16; 8] { + #[inline] + fn read(r: &mut R) -> Result { + let mut buf = [0u8; 16]; + r.read_exact(&mut buf)?; + let mut res = [0u16; 8]; + for (idx, v) in res.iter_mut().enumerate() { + *v = (buf[idx*2] as u16) << 8 | (buf[idx*2 + 1] as u16) + } + Ok(res) + } +} + +/// A type for variable-length values within TLV record where the length is encoded as part of the record. +/// Used to prevent encoding the length twice. +/// +/// This is not exported to bindings users as manual TLV building is not currently supported in bindings +pub struct WithoutLength(pub T); + +impl Writeable for WithoutLength<&String> { + #[inline] + fn write(&self, w: &mut W) -> Result<(), io::Error> { + w.write_all(self.0.as_bytes()) + } +} +impl Readable for WithoutLength { + #[inline] + fn read(r: &mut R) -> Result { + let v: WithoutLength> = Readable::read(r)?; + Ok(Self(String::from_utf8(v.0).map_err(|_| DecodeError::InvalidValue)?)) + } +} +impl<'a> From<&'a String> for WithoutLength<&'a String> { + fn from(s: &'a String) -> Self { Self(s) } +} + + +impl Writeable for WithoutLength<&UntrustedString> { + #[inline] + fn write(&self, w: &mut W) -> Result<(), io::Error> { + WithoutLength(&self.0.0).write(w) + } +} +impl Readable for WithoutLength { + #[inline] + fn read(r: &mut R) -> Result { + let s: WithoutLength = Readable::read(r)?; + Ok(Self(UntrustedString(s.0))) + } +} + +impl<'a, T: Writeable> Writeable for WithoutLength<&'a Vec> { + #[inline] + fn write(&self, writer: &mut W) -> Result<(), io::Error> { + for ref v in self.0.iter() { + v.write(writer)?; + } + Ok(()) + } +} + +impl Readable for WithoutLength> { + #[inline] + fn read(mut reader: &mut R) -> Result { + let mut values = Vec::new(); + loop { + let mut track_read = ReadTrackingReader::new(&mut reader); + match MaybeReadable::read(&mut track_read) { + Ok(Some(v)) => { values.push(v); }, + Ok(None) => { }, + // If we failed to read any bytes at all, we reached the end of our TLV + // stream and have simply exhausted all entries. + Err(ref e) if e == &DecodeError::ShortRead && !track_read.have_read => break, + Err(e) => return Err(e), + } + } + Ok(Self(values)) + } +} +impl<'a, T> From<&'a Vec> for WithoutLength<&'a Vec> { + fn from(v: &'a Vec) -> Self { Self(v) } +} + +impl Writeable for WithoutLength<&Script> { + #[inline] + fn write(&self, writer: &mut W) -> Result<(), io::Error> { + writer.write_all(self.0.as_bytes()) + } +} + +impl Readable for WithoutLength