Merge pull request #1735 from naumenkogs/2022-09-prune-channels-if-either-not-upd
[rust-lightning] / lightning / src / util / ser.rs
index bec915bae4efc502381068ce3544d6ef29d58021..5ff6dc86a0bf91d20df6fb110f67185f8cd5f355 100644 (file)
 //! A very simple serialization framework which is used to serialize/deserialize messages as well
 //! as ChannelsManagers and ChannelMonitors.
 
-use prelude::*;
-use io::{self, Read, Write};
-use io_extras::{copy, sink};
+use crate::prelude::*;
+use crate::io::{self, Read, Write};
+use crate::io_extras::{copy, sink};
 use core::hash::Hash;
-use sync::Mutex;
+use crate::sync::Mutex;
 use core::cmp;
 use core::convert::TryFrom;
 use core::ops::Deref;
@@ -30,10 +30,10 @@ use bitcoin::hashes::sha256d::Hash as Sha256dHash;
 use bitcoin::hash_types::{Txid, BlockHash};
 use core::marker::Sized;
 use core::time::Duration;
-use ln::msgs::DecodeError;
-use ln::{PaymentPreimage, PaymentHash, PaymentSecret};
+use crate::ln::msgs::DecodeError;
+use crate::ln::{PaymentPreimage, PaymentHash, PaymentSecret};
 
-use util::byte_utils::{be48_to_array, slice_to_be48};
+use crate::util::byte_utils::{be48_to_array, slice_to_be48};
 
 /// serialization buffer size
 pub const MAX_BUF_SIZE: usize = 64 * 1024;
@@ -399,8 +399,8 @@ impl Readable for BigSize {
 /// In TLV we occasionally send fields which only consist of, or potentially end with, a
 /// variable-length integer which is simply truncated by skipping high zero bytes. This type
 /// encapsulates such integers implementing Readable/Writeable for them.
-#[cfg_attr(test, derive(PartialEq, Debug))]
-pub(crate) struct HighZeroBytesDroppedVarInt<T>(pub T);
+#[cfg_attr(test, derive(PartialEq, Eq, Debug))]
+pub(crate) struct HighZeroBytesDroppedBigSize<T>(pub T);
 
 macro_rules! impl_writeable_primitive {
        ($val_type:ty, $len: expr) => {
@@ -410,7 +410,7 @@ macro_rules! impl_writeable_primitive {
                                writer.write_all(&self.to_be_bytes())
                        }
                }
-               impl Writeable for HighZeroBytesDroppedVarInt<$val_type> {
+               impl Writeable for HighZeroBytesDroppedBigSize<$val_type> {
                        #[inline]
                        fn write<W: Writer>(&self, writer: &mut W) -> Result<(), io::Error> {
                                // Skip any full leading 0 bytes when writing (in BE):
@@ -425,9 +425,9 @@ macro_rules! impl_writeable_primitive {
                                Ok(<$val_type>::from_be_bytes(buf))
                        }
                }
-               impl Readable for HighZeroBytesDroppedVarInt<$val_type> {
+               impl Readable for HighZeroBytesDroppedBigSize<$val_type> {
                        #[inline]
-                       fn read<R: Read>(reader: &mut R) -> Result<HighZeroBytesDroppedVarInt<$val_type>, DecodeError> {
+                       fn read<R: Read>(reader: &mut R) -> Result<HighZeroBytesDroppedBigSize<$val_type>, DecodeError> {
                                // We need to accept short reads (read_len == 0) as "EOF" and handle them as simply
                                // the high bytes being dropped. To do so, we start reading into the middle of buf
                                // and then convert the appropriate number of bytes with extra high bytes out of
@@ -443,7 +443,7 @@ macro_rules! impl_writeable_primitive {
                                        let first_byte = $len - ($len - total_read_len);
                                        let mut bytes = [0; $len];
                                        bytes.copy_from_slice(&buf[first_byte..first_byte + $len]);
-                                       Ok(HighZeroBytesDroppedVarInt(<$val_type>::from_be_bytes(bytes)))
+                                       Ok(HighZeroBytesDroppedBigSize(<$val_type>::from_be_bytes(bytes)))
                                } else {
                                        // If the encoding had extra zero bytes, return a failure even though we know
                                        // what they meant (as the TLV test vectors require this)
@@ -523,6 +523,29 @@ impl_array!(PUBLIC_KEY_SIZE); // for PublicKey
 impl_array!(COMPACT_SIGNATURE_SIZE); // for Signature
 impl_array!(1300); // for OnionPacket.hop_data
 
+impl Writeable for [u16; 8] {
+       #[inline]
+       fn write<W: Writer>(&self, w: &mut W) -> Result<(), io::Error> {
+               for v in self.iter() {
+                       w.write_all(&v.to_be_bytes())?
+               }
+               Ok(())
+       }
+}
+
+impl Readable for [u16; 8] {
+       #[inline]
+       fn read<R: Read>(r: &mut R) -> Result<Self, DecodeError> {
+               let mut buf = [0u8; 16];
+               r.read_exact(&mut buf)?;
+               let mut res = [0u16; 8];
+               for (idx, v) in res.iter_mut().enumerate() {
+                       *v = (buf[idx] as u16) << 8 | (buf[idx + 1] as u16)
+               }
+               Ok(res)
+       }
+}
+
 // HashMap
 impl<K, V> Writeable for HashMap<K, V>
        where K: Writeable + Eq + Hash,
@@ -956,7 +979,7 @@ impl Readable for String {
 /// The character set consists of ASCII alphanumeric characters, hyphens, and periods.
 /// Its length is guaranteed to be representable by a single byte.
 /// This serialization is used by BOLT 7 hostnames.
-#[derive(Clone, Debug, PartialEq)]
+#[derive(Clone, Debug, PartialEq, Eq)]
 pub struct Hostname(String);
 impl Hostname {
        /// Returns the length of the hostname.
@@ -1039,7 +1062,7 @@ impl Readable for Duration {
 #[cfg(test)]
 mod tests {
        use core::convert::TryFrom;
-       use util::ser::{Readable, Hostname, Writeable};
+       use crate::util::ser::{Readable, Hostname, Writeable};
 
        #[test]
        fn hostname_conversion() {