// licenses.
//! A very simple serialization framework which is used to serialize/deserialize messages as well
-//! as ChannelsManagers and ChannelMonitors.
-
-use prelude::*;
-use io::{self, Read, Write};
-use io_extras::{copy, sink};
+//! as [`ChannelManager`]s and [`ChannelMonitor`]s.
+//!
+//! [`ChannelManager`]: crate::ln::channelmanager::ChannelManager
+//! [`ChannelMonitor`]: crate::chain::channelmonitor::ChannelMonitor
+
+use crate::prelude::*;
+use crate::io::{self, Read, Seek, Write};
+use crate::io_extras::{copy, sink};
use core::hash::Hash;
-use sync::Mutex;
+use crate::sync::Mutex;
use core::cmp;
use core::convert::TryFrom;
use core::ops::Deref;
+use alloc::collections::BTreeMap;
+
use bitcoin::secp256k1::{PublicKey, SecretKey};
-use bitcoin::secp256k1::constants::{PUBLIC_KEY_SIZE, SECRET_KEY_SIZE, COMPACT_SIGNATURE_SIZE};
-use bitcoin::secp256k1::ecdsa::Signature;
+use bitcoin::secp256k1::constants::{PUBLIC_KEY_SIZE, SECRET_KEY_SIZE, COMPACT_SIGNATURE_SIZE, SCHNORR_SIGNATURE_SIZE};
+use bitcoin::secp256k1::ecdsa;
+use bitcoin::secp256k1::schnorr;
+use bitcoin::blockdata::constants::ChainHash;
use bitcoin::blockdata::script::Script;
use bitcoin::blockdata::transaction::{OutPoint, Transaction, TxOut};
use bitcoin::consensus;
use bitcoin::hash_types::{Txid, BlockHash};
use core::marker::Sized;
use core::time::Duration;
-use ln::msgs::DecodeError;
-use ln::{PaymentPreimage, PaymentHash, PaymentSecret};
+use crate::ln::msgs::DecodeError;
+use crate::ln::{PaymentPreimage, PaymentHash, PaymentSecret};
-use util::byte_utils::{be48_to_array, slice_to_be48};
+use crate::util::byte_utils::{be48_to_array, slice_to_be48};
/// serialization buffer size
pub const MAX_BUF_SIZE: usize = 64 * 1024;
-/// A simplified version of std::io::Write that exists largely for backwards compatibility.
-/// An impl is provided for any type that also impls std::io::Write.
+/// A simplified version of [`std::io::Write`] that exists largely for backwards compatibility.
+/// An impl is provided for any type that also impls [`std::io::Write`].
///
/// (C-not exported) as we only export serialization to/from byte arrays instead
pub trait Writer {
/// Writer that only tracks the amount of data written - useful if you need to calculate the length
/// of some data when serialized but don't yet need the full data.
-pub(crate) struct LengthCalculatingWriter(pub usize);
+pub struct LengthCalculatingWriter(pub usize);
impl Writer for LengthCalculatingWriter {
#[inline]
fn write_all(&mut self, buf: &[u8]) -> Result<(), io::Error> {
}
}
-/// Essentially std::io::Take but a bit simpler and with a method to walk the underlying stream
+/// Essentially [`std::io::Take`] but a bit simpler and with a method to walk the underlying stream
/// forward to ensure we always consume exactly the fixed length specified.
-pub(crate) struct FixedLengthReader<R: Read> {
+pub struct FixedLengthReader<R: Read> {
read: R,
bytes_read: u64,
total_bytes: u64,
}
impl<R: Read> FixedLengthReader<R> {
+ /// Returns a new [`FixedLengthReader`].
pub fn new(read: R, total_bytes: u64) -> Self {
Self { read, bytes_read: 0, total_bytes }
}
+ /// Returns whether some bytes are remaining or not.
#[inline]
pub fn bytes_remain(&mut self) -> bool {
self.bytes_read != self.total_bytes
}
+ /// Consumes the remaining bytes.
#[inline]
pub fn eat_remaining(&mut self) -> Result<(), DecodeError> {
copy(self, &mut sink()).unwrap();
}
}
-/// A Read which tracks whether any bytes have been read at all. This allows us to distinguish
+/// A [`Read`] implementation which tracks whether any bytes have been read at all. This allows us to distinguish
/// between "EOF reached before we started" and "EOF reached mid-read".
-pub(crate) struct ReadTrackingReader<R: Read> {
+pub struct ReadTrackingReader<R: Read> {
read: R,
+ /// Returns whether we have read from this reader or not yet.
pub have_read: bool,
}
impl<R: Read> ReadTrackingReader<R> {
+ /// Returns a new [`ReadTrackingReader`].
pub fn new(read: R) -> Self {
Self { read, have_read: false }
}
}
}
-/// A trait that various rust-lightning types implement allowing them to be written out to a Writer
+/// A trait that various LDK types implement allowing them to be written out to a [`Writer`].
///
/// (C-not exported) as we only export serialization to/from byte arrays instead
pub trait Writeable {
- /// Writes self out to the given Writer
+ /// Writes `self` out to the given [`Writer`].
fn write<W: Writer>(&self, writer: &mut W) -> Result<(), io::Error>;
- /// Writes self out to a Vec<u8>
+ /// Writes `self` out to a `Vec<u8>`.
fn encode(&self) -> Vec<u8> {
let mut msg = VecWriter(Vec::new());
self.write(&mut msg).unwrap();
msg.0
}
- /// Writes self out to a Vec<u8>
+ /// Writes `self` out to a `Vec<u8>`.
#[cfg(test)]
fn encode_with_len(&self) -> Vec<u8> {
let mut msg = VecWriter(Vec::new());
fn write<W: Writer>(&self, writer: &mut W) -> Result<(), io::Error> { (*self).write(writer) }
}
-/// A trait that various rust-lightning types implement allowing them to be read in from a Read
+/// A trait that various LDK types implement allowing them to be read in from a [`Read`].
///
/// (C-not exported) as we only export serialization to/from byte arrays instead
pub trait Readable
where Self: Sized
{
- /// Reads a Self in from the given Read
+ /// Reads a `Self` in from the given [`Read`].
fn read<R: Read>(reader: &mut R) -> Result<Self, DecodeError>;
}
-/// A trait that various higher-level rust-lightning types implement allowing them to be read in
-/// from a Read given some additional set of arguments which is required to deserialize.
+/// A trait that various LDK types implement allowing them to be read in from a
+/// [`Read`]` + `[`Seek`].
+pub(crate) trait SeekReadable where Self: Sized {
+ /// Reads a `Self` in from the given [`Read`].
+ fn read<R: Read + Seek>(reader: &mut R) -> Result<Self, DecodeError>;
+}
+
+/// A trait that various higher-level LDK types implement allowing them to be read in
+/// from a [`Read`] given some additional set of arguments which is required to deserialize.
///
/// (C-not exported) as we only export serialization to/from byte arrays instead
pub trait ReadableArgs<P>
where Self: Sized
{
- /// Reads a Self in from the given Read
+ /// Reads a `Self` in from the given [`Read`].
fn read<R: Read>(reader: &mut R, params: P) -> Result<Self, DecodeError>;
}
-/// A std::io::Read that also provides the total bytes available to read.
+/// A [`std::io::Read`] that also provides the total bytes available to be read.
pub(crate) trait LengthRead: Read {
- /// The total number of bytes available to read.
+ /// The total number of bytes available to be read.
fn total_bytes(&self) -> u64;
}
-/// A trait that various higher-level rust-lightning types implement allowing them to be read in
+/// A trait that various higher-level LDK types implement allowing them to be read in
/// from a Read given some additional set of arguments which is required to deserialize, requiring
/// the implementer to provide the total length of the read.
pub(crate) trait LengthReadableArgs<P> where Self: Sized
{
- /// Reads a Self in from the given LengthRead
+ /// Reads a `Self` in from the given [`LengthRead`].
fn read<R: LengthRead>(reader: &mut R, params: P) -> Result<Self, DecodeError>;
}
-/// A trait that various higher-level rust-lightning types implement allowing them to be read in
-/// from a Read, requiring the implementer to provide the total length of the read.
+/// A trait that various higher-level LDK types implement allowing them to be read in
+/// from a [`Read`], requiring the implementer to provide the total length of the read.
pub(crate) trait LengthReadable where Self: Sized
{
- /// Reads a Self in from the given LengthRead
+ /// Reads a `Self` in from the given [`LengthRead`].
fn read<R: LengthRead>(reader: &mut R) -> Result<Self, DecodeError>;
}
-/// A trait that various rust-lightning types implement allowing them to (maybe) be read in from a Read
+/// A trait that various LDK types implement allowing them to (maybe) be read in from a [`Read`].
///
/// (C-not exported) as we only export serialization to/from byte arrays instead
pub trait MaybeReadable
where Self: Sized
{
- /// Reads a Self in from the given Read
+ /// Reads a `Self` in from the given [`Read`].
fn read<R: Read>(reader: &mut R) -> Result<Option<Self>, DecodeError>;
}
}
}
-pub(crate) struct OptionDeserWrapper<T: Readable>(pub Option<T>);
-impl<T: Readable> Readable for OptionDeserWrapper<T> {
+/// Wrapper to read a required (non-optional) TLV record.
+pub struct RequiredWrapper<T>(pub Option<T>);
+impl<T: Readable> Readable for RequiredWrapper<T> {
#[inline]
fn read<R: Read>(reader: &mut R) -> Result<Self, DecodeError> {
Ok(Self(Some(Readable::read(reader)?)))
}
}
-/// When handling default_values, we want to map the default-value T directly
-/// to a OptionDeserWrapper<T> in a way that works for `field: T = t;` as
-/// well. Thus, we assume `Into<T> for T` does nothing and use that.
-impl<T: Readable> From<T> for OptionDeserWrapper<T> {
- fn from(t: T) -> OptionDeserWrapper<T> { OptionDeserWrapper(Some(t)) }
-}
-
-/// Wrapper to write each element of a Vec with no length prefix
-pub(crate) struct VecWriteWrapper<'a, T: Writeable>(pub &'a Vec<T>);
-impl<'a, T: Writeable> Writeable for VecWriteWrapper<'a, T> {
+impl<A, T: ReadableArgs<A>> ReadableArgs<A> for RequiredWrapper<T> {
#[inline]
- fn write<W: Writer>(&self, writer: &mut W) -> Result<(), io::Error> {
- for ref v in self.0.iter() {
- v.write(writer)?;
- }
- Ok(())
+ fn read<R: Read>(reader: &mut R, args: A) -> Result<Self, DecodeError> {
+ Ok(Self(Some(ReadableArgs::read(reader, args)?)))
}
}
+/// When handling `default_values`, we want to map the default-value T directly
+/// to a `RequiredWrapper<T>` in a way that works for `field: T = t;` as
+/// well. Thus, we assume `Into<T> for T` does nothing and use that.
+impl<T> From<T> for RequiredWrapper<T> {
+ fn from(t: T) -> RequiredWrapper<T> { RequiredWrapper(Some(t)) }
+}
-/// Wrapper to read elements from a given stream until it reaches the end of the stream.
-pub(crate) struct VecReadWrapper<T>(pub Vec<T>);
-impl<T: MaybeReadable> Readable for VecReadWrapper<T> {
+/// Wrapper to read a required (non-optional) TLV record that may have been upgraded without
+/// backwards compat.
+pub struct UpgradableRequired<T: MaybeReadable>(pub Option<T>);
+impl<T: MaybeReadable> MaybeReadable for UpgradableRequired<T> {
#[inline]
- fn read<R: Read>(mut reader: &mut R) -> Result<Self, DecodeError> {
- let mut values = Vec::new();
- loop {
- let mut track_read = ReadTrackingReader::new(&mut reader);
- match MaybeReadable::read(&mut track_read) {
- Ok(Some(v)) => { values.push(v); },
- Ok(None) => { },
- // If we failed to read any bytes at all, we reached the end of our TLV
- // stream and have simply exhausted all entries.
- Err(ref e) if e == &DecodeError::ShortRead && !track_read.have_read => break,
- Err(e) => return Err(e),
- }
- }
- Ok(Self(values))
+ fn read<R: Read>(reader: &mut R) -> Result<Option<Self>, DecodeError> {
+ let tlv = MaybeReadable::read(reader)?;
+ if let Some(tlv) = tlv { return Ok(Some(Self(Some(tlv)))) }
+ Ok(None)
}
}
}
}
-/// Lightning TLV uses a custom variable-length integer called BigSize. It is similar to Bitcoin's
+/// Lightning TLV uses a custom variable-length integer called `BigSize`. It is similar to Bitcoin's
/// variable-length integers except that it is serialized in big-endian instead of little-endian.
///
/// Like Bitcoin's variable-length integer, it exhibits ambiguity in that certain values can be
}
}
+/// The lightning protocol uses u16s for lengths in most cases. As our serialization framework
+/// primarily targets that, we must as well. However, because we may serialize objects that have
+/// more than 65K entries, we need to be able to store larger values. Thus, we define a variable
+/// length integer here that is backwards-compatible for values < 0xffff. We treat 0xffff as
+/// "read eight more bytes".
+///
+/// To ensure we only have one valid encoding per value, we add 0xffff to values written as eight
+/// bytes. Thus, 0xfffe is serialized as 0xfffe, whereas 0xffff is serialized as
+/// 0xffff0000000000000000 (i.e. read-eight-bytes then zero).
+struct CollectionLength(pub u64);
+impl Writeable for CollectionLength {
+ #[inline]
+ fn write<W: Writer>(&self, writer: &mut W) -> Result<(), io::Error> {
+ if self.0 < 0xffff {
+ (self.0 as u16).write(writer)
+ } else {
+ 0xffffu16.write(writer)?;
+ (self.0 - 0xffff).write(writer)
+ }
+ }
+}
+
+impl Readable for CollectionLength {
+ #[inline]
+ fn read<R: Read>(r: &mut R) -> Result<Self, DecodeError> {
+ let mut val: u64 = <u16 as Readable>::read(r)? as u64;
+ if val == 0xffff {
+ val = <u64 as Readable>::read(r)?
+ .checked_add(0xffff).ok_or(DecodeError::InvalidValue)?;
+ }
+ Ok(CollectionLength(val))
+ }
+}
+
/// In TLV we occasionally send fields which only consist of, or potentially end with, a
/// variable-length integer which is simply truncated by skipping high zero bytes. This type
-/// encapsulates such integers implementing Readable/Writeable for them.
-#[cfg_attr(test, derive(PartialEq, Debug))]
+/// encapsulates such integers implementing [`Readable`]/[`Writeable`] for them.
+#[cfg_attr(test, derive(PartialEq, Eq, Debug))]
pub(crate) struct HighZeroBytesDroppedBigSize<T>(pub T);
macro_rules! impl_writeable_primitive {
}
}
}
+ impl From<$val_type> for HighZeroBytesDroppedBigSize<$val_type> {
+ fn from(val: $val_type) -> Self { Self(val) }
+ }
}
}
+impl_writeable_primitive!(u128, 16);
impl_writeable_primitive!(u64, 8);
impl_writeable_primitive!(u32, 4);
impl_writeable_primitive!(u16, 2);
);
}
-impl_array!(3); // for rgb
+impl_array!(3); // for rgb, ISO 4712 code
impl_array!(4); // for IPv4
impl_array!(12); // for OnionV2
impl_array!(16); // for IPv6
impl_array!(32); // for channel id & hmac
impl_array!(PUBLIC_KEY_SIZE); // for PublicKey
-impl_array!(COMPACT_SIGNATURE_SIZE); // for Signature
+impl_array!(64); // for ecdsa::Signature and schnorr::Signature
impl_array!(1300); // for OnionPacket.hop_data
-// HashMap
-impl<K, V> Writeable for HashMap<K, V>
- where K: Writeable + Eq + Hash,
- V: Writeable
-{
+impl Writeable for [u16; 8] {
#[inline]
fn write<W: Writer>(&self, w: &mut W) -> Result<(), io::Error> {
- (self.len() as u16).write(w)?;
- for (key, value) in self.iter() {
- key.write(w)?;
- value.write(w)?;
+ for v in self.iter() {
+ w.write_all(&v.to_be_bytes())?
}
Ok(())
}
}
-impl<K, V> Readable for HashMap<K, V>
- where K: Readable + Eq + Hash,
- V: MaybeReadable
-{
+impl Readable for [u16; 8] {
#[inline]
fn read<R: Read>(r: &mut R) -> Result<Self, DecodeError> {
- let len: u16 = Readable::read(r)?;
- let mut ret = HashMap::with_capacity(len as usize);
- for _ in 0..len {
- let k = K::read(r)?;
- let v_opt = V::read(r)?;
- if let Some(v) = v_opt {
- if ret.insert(k, v).is_some() {
- return Err(DecodeError::InvalidValue);
+ let mut buf = [0u8; 16];
+ r.read_exact(&mut buf)?;
+ let mut res = [0u16; 8];
+ for (idx, v) in res.iter_mut().enumerate() {
+ *v = (buf[idx] as u16) << 8 | (buf[idx + 1] as u16)
+ }
+ Ok(res)
+ }
+}
+
+/// A type for variable-length values within TLV record where the length is encoded as part of the record.
+/// Used to prevent encoding the length twice.
+pub struct WithoutLength<T>(pub T);
+
+impl Writeable for WithoutLength<&String> {
+ #[inline]
+ fn write<W: Writer>(&self, w: &mut W) -> Result<(), io::Error> {
+ w.write_all(self.0.as_bytes())
+ }
+}
+impl Readable for WithoutLength<String> {
+ #[inline]
+ fn read<R: Read>(r: &mut R) -> Result<Self, DecodeError> {
+ let v: WithoutLength<Vec<u8>> = Readable::read(r)?;
+ Ok(Self(String::from_utf8(v.0).map_err(|_| DecodeError::InvalidValue)?))
+ }
+}
+impl<'a> From<&'a String> for WithoutLength<&'a String> {
+ fn from(s: &'a String) -> Self { Self(s) }
+}
+
+impl<'a, T: Writeable> Writeable for WithoutLength<&'a Vec<T>> {
+ #[inline]
+ fn write<W: Writer>(&self, writer: &mut W) -> Result<(), io::Error> {
+ for ref v in self.0.iter() {
+ v.write(writer)?;
+ }
+ Ok(())
+ }
+}
+
+impl<T: MaybeReadable> Readable for WithoutLength<Vec<T>> {
+ #[inline]
+ fn read<R: Read>(mut reader: &mut R) -> Result<Self, DecodeError> {
+ let mut values = Vec::new();
+ loop {
+ let mut track_read = ReadTrackingReader::new(&mut reader);
+ match MaybeReadable::read(&mut track_read) {
+ Ok(Some(v)) => { values.push(v); },
+ Ok(None) => { },
+ // If we failed to read any bytes at all, we reached the end of our TLV
+ // stream and have simply exhausted all entries.
+ Err(ref e) if e == &DecodeError::ShortRead && !track_read.have_read => break,
+ Err(e) => return Err(e),
+ }
+ }
+ Ok(Self(values))
+ }
+}
+impl<'a, T> From<&'a Vec<T>> for WithoutLength<&'a Vec<T>> {
+ fn from(v: &'a Vec<T>) -> Self { Self(v) }
+}
+
+#[derive(Debug)]
+pub(crate) struct Iterable<'a, I: Iterator<Item = &'a T> + Clone, T: 'a>(pub I);
+
+impl<'a, I: Iterator<Item = &'a T> + Clone, T: 'a + Writeable> Writeable for Iterable<'a, I, T> {
+ #[inline]
+ fn write<W: Writer>(&self, writer: &mut W) -> Result<(), io::Error> {
+ for ref v in self.0.clone() {
+ v.write(writer)?;
+ }
+ Ok(())
+ }
+}
+
+#[cfg(test)]
+impl<'a, I: Iterator<Item = &'a T> + Clone, T: 'a + PartialEq> PartialEq for Iterable<'a, I, T> {
+ fn eq(&self, other: &Self) -> bool {
+ self.0.clone().collect::<Vec<_>>() == other.0.clone().collect::<Vec<_>>()
+ }
+}
+
+macro_rules! impl_for_map {
+ ($ty: ident, $keybound: ident, $constr: expr) => {
+ impl<K, V> Writeable for $ty<K, V>
+ where K: Writeable + Eq + $keybound, V: Writeable
+ {
+ #[inline]
+ fn write<W: Writer>(&self, w: &mut W) -> Result<(), io::Error> {
+ CollectionLength(self.len() as u64).write(w)?;
+ for (key, value) in self.iter() {
+ key.write(w)?;
+ value.write(w)?;
}
+ Ok(())
+ }
+ }
+
+ impl<K, V> Readable for $ty<K, V>
+ where K: Readable + Eq + $keybound, V: MaybeReadable
+ {
+ #[inline]
+ fn read<R: Read>(r: &mut R) -> Result<Self, DecodeError> {
+ let len: CollectionLength = Readable::read(r)?;
+ let mut ret = $constr(len.0 as usize);
+ for _ in 0..len.0 {
+ let k = K::read(r)?;
+ let v_opt = V::read(r)?;
+ if let Some(v) = v_opt {
+ if ret.insert(k, v).is_some() {
+ return Err(DecodeError::InvalidValue);
+ }
+ }
+ }
+ Ok(ret)
}
}
- Ok(ret)
}
}
+impl_for_map!(BTreeMap, Ord, |_| BTreeMap::new());
+impl_for_map!(HashMap, Hash, |len| HashMap::with_capacity(len));
+
// HashSet
impl<T> Writeable for HashSet<T>
where T: Writeable + Eq + Hash
{
#[inline]
fn write<W: Writer>(&self, w: &mut W) -> Result<(), io::Error> {
- (self.len() as u16).write(w)?;
+ CollectionLength(self.len() as u64).write(w)?;
for item in self.iter() {
item.write(w)?;
}
{
#[inline]
fn read<R: Read>(r: &mut R) -> Result<Self, DecodeError> {
- let len: u16 = Readable::read(r)?;
- let mut ret = HashSet::with_capacity(len as usize);
- for _ in 0..len {
+ let len: CollectionLength = Readable::read(r)?;
+ let mut ret = HashSet::with_capacity(cmp::min(len.0 as usize, MAX_BUF_SIZE / core::mem::size_of::<T>()));
+ for _ in 0..len.0 {
if !ret.insert(T::read(r)?) {
return Err(DecodeError::InvalidValue)
}
}
// Vectors
-impl Writeable for Vec<u8> {
- #[inline]
- fn write<W: Writer>(&self, w: &mut W) -> Result<(), io::Error> {
- (self.len() as u16).write(w)?;
- w.write_all(&self)
- }
-}
+macro_rules! impl_for_vec {
+ ($ty: ty $(, $name: ident)*) => {
+ impl<$($name : Writeable),*> Writeable for Vec<$ty> {
+ #[inline]
+ fn write<W: Writer>(&self, w: &mut W) -> Result<(), io::Error> {
+ CollectionLength(self.len() as u64).write(w)?;
+ for elem in self.iter() {
+ elem.write(w)?;
+ }
+ Ok(())
+ }
+ }
-impl Readable for Vec<u8> {
- #[inline]
- fn read<R: Read>(r: &mut R) -> Result<Self, DecodeError> {
- let len: u16 = Readable::read(r)?;
- let mut ret = Vec::with_capacity(len as usize);
- ret.resize(len as usize, 0);
- r.read_exact(&mut ret)?;
- Ok(ret)
+ impl<$($name : Readable),*> Readable for Vec<$ty> {
+ #[inline]
+ fn read<R: Read>(r: &mut R) -> Result<Self, DecodeError> {
+ let len: CollectionLength = Readable::read(r)?;
+ let mut ret = Vec::with_capacity(cmp::min(len.0 as usize, MAX_BUF_SIZE / core::mem::size_of::<$ty>()));
+ for _ in 0..len.0 {
+ if let Some(val) = MaybeReadable::read(r)? {
+ ret.push(val);
+ }
+ }
+ Ok(ret)
+ }
+ }
}
}
-impl Writeable for Vec<Signature> {
+
+impl Writeable for Vec<u8> {
#[inline]
fn write<W: Writer>(&self, w: &mut W) -> Result<(), io::Error> {
- (self.len() as u16).write(w)?;
- for e in self.iter() {
- e.write(w)?;
- }
- Ok(())
+ CollectionLength(self.len() as u64).write(w)?;
+ w.write_all(&self)
}
}
-impl Readable for Vec<Signature> {
+impl Readable for Vec<u8> {
#[inline]
fn read<R: Read>(r: &mut R) -> Result<Self, DecodeError> {
- let len: u16 = Readable::read(r)?;
- let byte_size = (len as usize)
- .checked_mul(COMPACT_SIGNATURE_SIZE)
- .ok_or(DecodeError::BadLengthDescriptor)?;
- if byte_size > MAX_BUF_SIZE {
- return Err(DecodeError::BadLengthDescriptor);
+ let mut len: CollectionLength = Readable::read(r)?;
+ let mut ret = Vec::new();
+ while len.0 > 0 {
+ let readamt = cmp::min(len.0 as usize, MAX_BUF_SIZE);
+ let readstart = ret.len();
+ ret.resize(readstart + readamt, 0);
+ r.read_exact(&mut ret[readstart..])?;
+ len.0 -= readamt as u64;
}
- let mut ret = Vec::with_capacity(len as usize);
- for _ in 0..len { ret.push(Readable::read(r)?); }
Ok(ret)
}
}
+impl_for_vec!(ecdsa::Signature);
+impl_for_vec!(crate::ln::channelmanager::MonitorUpdateCompletionAction);
+impl_for_vec!((A, B), A, B);
+
impl Writeable for Script {
fn write<W: Writer>(&self, w: &mut W) -> Result<(), io::Error> {
(self.len() as u16).write(w)?;
}
}
-impl Writeable for Signature {
+impl Writeable for ecdsa::Signature {
fn write<W: Writer>(&self, w: &mut W) -> Result<(), io::Error> {
self.serialize_compact().write(w)
}
- #[inline]
- fn serialized_length(&self) -> usize {
- COMPACT_SIGNATURE_SIZE
- }
}
-impl Readable for Signature {
+impl Readable for ecdsa::Signature {
fn read<R: Read>(r: &mut R) -> Result<Self, DecodeError> {
let buf: [u8; COMPACT_SIGNATURE_SIZE] = Readable::read(r)?;
- match Signature::from_compact(&buf) {
+ match ecdsa::Signature::from_compact(&buf) {
+ Ok(sig) => Ok(sig),
+ Err(_) => return Err(DecodeError::InvalidValue),
+ }
+ }
+}
+
+impl Writeable for schnorr::Signature {
+ fn write<W: Writer>(&self, w: &mut W) -> Result<(), io::Error> {
+ self.as_ref().write(w)
+ }
+}
+
+impl Readable for schnorr::Signature {
+ fn read<R: Read>(r: &mut R) -> Result<Self, DecodeError> {
+ let buf: [u8; SCHNORR_SIGNATURE_SIZE] = Readable::read(r)?;
+ match schnorr::Signature::from_slice(&buf) {
Ok(sig) => Ok(sig),
Err(_) => return Err(DecodeError::InvalidValue),
}
}
}
+impl Writeable for ChainHash {
+ fn write<W: Writer>(&self, w: &mut W) -> Result<(), io::Error> {
+ w.write_all(self.as_bytes())
+ }
+}
+
+impl Readable for ChainHash {
+ fn read<R: Read>(r: &mut R) -> Result<Self, DecodeError> {
+ let buf: [u8; 32] = Readable::read(r)?;
+ Ok(ChainHash::from(&buf[..]))
+ }
+}
+
impl Writeable for OutPoint {
fn write<W: Writer>(&self, w: &mut W) -> Result<(), io::Error> {
self.txid.write(w)?;
($bitcoin_type: ty) => {
impl Writeable for $bitcoin_type {
fn write<W: Writer>(&self, writer: &mut W) -> Result<(), io::Error> {
- match self.consensus_encode(WriterWriteAdaptor(writer)) {
+ match self.consensus_encode(&mut WriterWriteAdaptor(writer)) {
Ok(_) => Ok(()),
Err(e) => Err(e),
}
}
}
+impl<A: Readable, B: Readable, C: Readable, D: Readable> Readable for (A, B, C, D) {
+ fn read<R: Read>(r: &mut R) -> Result<Self, DecodeError> {
+ let a: A = Readable::read(r)?;
+ let b: B = Readable::read(r)?;
+ let c: C = Readable::read(r)?;
+ let d: D = Readable::read(r)?;
+ Ok((a, b, c, d))
+ }
+}
+impl<A: Writeable, B: Writeable, C: Writeable, D: Writeable> Writeable for (A, B, C, D) {
+ fn write<W: Writer>(&self, w: &mut W) -> Result<(), io::Error> {
+ self.0.write(w)?;
+ self.1.write(w)?;
+ self.2.write(w)?;
+ self.3.write(w)
+ }
+}
+
impl Writeable for () {
fn write<W: Writer>(&self, _: &mut W) -> Result<(), io::Error> {
Ok(())
impl Writeable for String {
#[inline]
fn write<W: Writer>(&self, w: &mut W) -> Result<(), io::Error> {
- (self.len() as u16).write(w)?;
+ CollectionLength(self.len() as u64).write(w)?;
w.write_all(self.as_bytes())
}
}
/// Only the character set and length will be validated.
/// The character set consists of ASCII alphanumeric characters, hyphens, and periods.
/// Its length is guaranteed to be representable by a single byte.
-/// This serialization is used by BOLT 7 hostnames.
-#[derive(Clone, Debug, PartialEq)]
+/// This serialization is used by [`BOLT 7`] hostnames.
+///
+/// [`BOLT 7`]: https://github.com/lightning/bolts/blob/master/07-routing-gossip.md
+#[derive(Clone, Debug, PartialEq, Eq)]
pub struct Hostname(String);
impl Hostname {
/// Returns the length of the hostname.
#[cfg(test)]
mod tests {
use core::convert::TryFrom;
- use util::ser::{Readable, Hostname, Writeable};
+ use crate::util::ser::{Readable, Hostname, Writeable};
#[test]
fn hostname_conversion() {