Correct async `lightning-background-processor` exit check docs
[rust-lightning] / lightning / src / util / ser.rs
index 5b1a86a6a95f27a276f1dc221bd705f57fbf8ed4..1bf30fa9f72e421882b1fe612da08c30b65a29eb 100644 (file)
 //! A very simple serialization framework which is used to serialize/deserialize messages as well
 //! as ChannelsManagers and ChannelMonitors.
 
-use prelude::*;
-use io::{self, Read, Write};
-use io_extras::{copy, sink};
+use crate::prelude::*;
+use crate::io::{self, Read, Write};
+use crate::io_extras::{copy, sink};
 use core::hash::Hash;
-use sync::Mutex;
+use crate::sync::Mutex;
 use core::cmp;
 use core::convert::TryFrom;
 use core::ops::Deref;
@@ -30,10 +30,10 @@ use bitcoin::hashes::sha256d::Hash as Sha256dHash;
 use bitcoin::hash_types::{Txid, BlockHash};
 use core::marker::Sized;
 use core::time::Duration;
-use ln::msgs::DecodeError;
-use ln::{PaymentPreimage, PaymentHash, PaymentSecret};
+use crate::ln::msgs::DecodeError;
+use crate::ln::{PaymentPreimage, PaymentHash, PaymentSecret};
 
-use util::byte_utils::{be48_to_array, slice_to_be48};
+use crate::util::byte_utils::{be48_to_array, slice_to_be48};
 
 /// serialization buffer size
 pub const MAX_BUF_SIZE: usize = 64 * 1024;
@@ -244,6 +244,14 @@ pub(crate) trait LengthReadableArgs<P> where Self: Sized
        fn read<R: LengthRead>(reader: &mut R, params: P) -> Result<Self, DecodeError>;
 }
 
+/// A trait that various higher-level rust-lightning types implement allowing them to be read in
+/// from a Read, requiring the implementer to provide the total length of the read.
+pub(crate) trait LengthReadable where Self: Sized
+{
+       /// Reads a Self in from the given LengthRead
+       fn read<R: LengthRead>(reader: &mut R) -> Result<Self, DecodeError>;
+}
+
 /// A trait that various rust-lightning types implement allowing them to (maybe) be read in from a Read
 ///
 /// (C-not exported) as we only export serialization to/from byte arrays instead
@@ -261,6 +269,15 @@ impl<T: Readable> MaybeReadable for T {
        }
 }
 
+/// A trait that various rust-lightning types implement allowing them to (maybe) be read in from a
+/// Read, given some additional set of arguments which is required to deserialize.
+///
+/// (C-not exported) as we only export serialization to/from byte arrays instead
+pub trait MaybeReadableArgs<P> {
+       /// Reads a Self in from the given Read
+       fn read<R: Read>(reader: &mut R, params: P) -> Result<Option<Self>, DecodeError> where Self: Sized;
+}
+
 pub(crate) struct OptionDeserWrapper<T: Readable>(pub Option<T>);
 impl<T: Readable> Readable for OptionDeserWrapper<T> {
        #[inline]
@@ -391,8 +408,8 @@ impl Readable for BigSize {
 /// In TLV we occasionally send fields which only consist of, or potentially end with, a
 /// variable-length integer which is simply truncated by skipping high zero bytes. This type
 /// encapsulates such integers implementing Readable/Writeable for them.
-#[cfg_attr(test, derive(PartialEq, Debug))]
-pub(crate) struct HighZeroBytesDroppedVarInt<T>(pub T);
+#[cfg_attr(test, derive(PartialEq, Eq, Debug))]
+pub(crate) struct HighZeroBytesDroppedBigSize<T>(pub T);
 
 macro_rules! impl_writeable_primitive {
        ($val_type:ty, $len: expr) => {
@@ -402,7 +419,7 @@ macro_rules! impl_writeable_primitive {
                                writer.write_all(&self.to_be_bytes())
                        }
                }
-               impl Writeable for HighZeroBytesDroppedVarInt<$val_type> {
+               impl Writeable for HighZeroBytesDroppedBigSize<$val_type> {
                        #[inline]
                        fn write<W: Writer>(&self, writer: &mut W) -> Result<(), io::Error> {
                                // Skip any full leading 0 bytes when writing (in BE):
@@ -417,9 +434,9 @@ macro_rules! impl_writeable_primitive {
                                Ok(<$val_type>::from_be_bytes(buf))
                        }
                }
-               impl Readable for HighZeroBytesDroppedVarInt<$val_type> {
+               impl Readable for HighZeroBytesDroppedBigSize<$val_type> {
                        #[inline]
-                       fn read<R: Read>(reader: &mut R) -> Result<HighZeroBytesDroppedVarInt<$val_type>, DecodeError> {
+                       fn read<R: Read>(reader: &mut R) -> Result<HighZeroBytesDroppedBigSize<$val_type>, DecodeError> {
                                // We need to accept short reads (read_len == 0) as "EOF" and handle them as simply
                                // the high bytes being dropped. To do so, we start reading into the middle of buf
                                // and then convert the appropriate number of bytes with extra high bytes out of
@@ -435,7 +452,7 @@ macro_rules! impl_writeable_primitive {
                                        let first_byte = $len - ($len - total_read_len);
                                        let mut bytes = [0; $len];
                                        bytes.copy_from_slice(&buf[first_byte..first_byte + $len]);
-                                       Ok(HighZeroBytesDroppedVarInt(<$val_type>::from_be_bytes(bytes)))
+                                       Ok(HighZeroBytesDroppedBigSize(<$val_type>::from_be_bytes(bytes)))
                                } else {
                                        // If the encoding had extra zero bytes, return a failure even though we know
                                        // what they meant (as the TLV test vectors require this)
@@ -515,6 +532,29 @@ impl_array!(PUBLIC_KEY_SIZE); // for PublicKey
 impl_array!(COMPACT_SIGNATURE_SIZE); // for Signature
 impl_array!(1300); // for OnionPacket.hop_data
 
+impl Writeable for [u16; 8] {
+       #[inline]
+       fn write<W: Writer>(&self, w: &mut W) -> Result<(), io::Error> {
+               for v in self.iter() {
+                       w.write_all(&v.to_be_bytes())?
+               }
+               Ok(())
+       }
+}
+
+impl Readable for [u16; 8] {
+       #[inline]
+       fn read<R: Read>(r: &mut R) -> Result<Self, DecodeError> {
+               let mut buf = [0u8; 16];
+               r.read_exact(&mut buf)?;
+               let mut res = [0u16; 8];
+               for (idx, v) in res.iter_mut().enumerate() {
+                       *v = (buf[idx] as u16) << 8 | (buf[idx + 1] as u16)
+               }
+               Ok(res)
+       }
+}
+
 // HashMap
 impl<K, V> Writeable for HashMap<K, V>
        where K: Writeable + Eq + Hash,
@@ -852,7 +892,7 @@ macro_rules! impl_consensus_ser {
        ($bitcoin_type: ty) => {
                impl Writeable for $bitcoin_type {
                        fn write<W: Writer>(&self, writer: &mut W) -> Result<(), io::Error> {
-                               match self.consensus_encode(WriterWriteAdaptor(writer)) {
+                               match self.consensus_encode(&mut WriterWriteAdaptor(writer)) {
                                        Ok(_) => Ok(()),
                                        Err(e) => Err(e),
                                }
@@ -948,7 +988,7 @@ impl Readable for String {
 /// The character set consists of ASCII alphanumeric characters, hyphens, and periods.
 /// Its length is guaranteed to be representable by a single byte.
 /// This serialization is used by BOLT 7 hostnames.
-#[derive(Clone, Debug, PartialEq)]
+#[derive(Clone, Debug, PartialEq, Eq)]
 pub struct Hostname(String);
 impl Hostname {
        /// Returns the length of the hostname.
@@ -1031,7 +1071,7 @@ impl Readable for Duration {
 #[cfg(test)]
 mod tests {
        use core::convert::TryFrom;
-       use util::ser::{Readable, Hostname, Writeable};
+       use crate::util::ser::{Readable, Hostname, Writeable};
 
        #[test]
        fn hostname_conversion() {