X-Git-Url: http://git.bitcoin.ninja/index.cgi?a=blobdiff_plain;f=lightning%2Fsrc%2Futil%2Fser.rs;h=84d1a2e084feb65e1472969875a586c81ccd08e5;hb=refs%2Fheads%2F2023-01-ser-cleanups;hp=3b82da8446da033d5d579cf7ec5a7d87e85225f8;hpb=ad82c9ea5bf735068c11c8eebdf68ac57f135eec;p=rust-lightning diff --git a/lightning/src/util/ser.rs b/lightning/src/util/ser.rs index 3b82da84..84d1a2e0 100644 --- a/lightning/src/util/ser.rs +++ b/lightning/src/util/ser.rs @@ -8,20 +8,27 @@ // licenses. //! A very simple serialization framework which is used to serialize/deserialize messages as well -//! as ChannelsManagers and ChannelMonitors. - -use prelude::*; -use io::{self, Read, Write}; -use io_extras::{copy, sink}; +//! as [`ChannelManager`]s and [`ChannelMonitor`]s. +//! +//! [`ChannelManager`]: crate::ln::channelmanager::ChannelManager +//! [`ChannelMonitor`]: crate::chain::channelmonitor::ChannelMonitor + +use crate::prelude::*; +use crate::io::{self, Read, Seek, Write}; +use crate::io_extras::{copy, sink}; use core::hash::Hash; -use sync::Mutex; +use crate::sync::Mutex; use core::cmp; use core::convert::TryFrom; use core::ops::Deref; +use alloc::collections::BTreeMap; + use bitcoin::secp256k1::{PublicKey, SecretKey}; -use bitcoin::secp256k1::constants::{PUBLIC_KEY_SIZE, SECRET_KEY_SIZE, COMPACT_SIGNATURE_SIZE}; -use bitcoin::secp256k1::ecdsa::Signature; +use bitcoin::secp256k1::constants::{PUBLIC_KEY_SIZE, SECRET_KEY_SIZE, COMPACT_SIGNATURE_SIZE, SCHNORR_SIGNATURE_SIZE}; +use bitcoin::secp256k1::ecdsa; +use bitcoin::secp256k1::schnorr; +use bitcoin::blockdata::constants::ChainHash; use bitcoin::blockdata::script::Script; use bitcoin::blockdata::transaction::{OutPoint, Transaction, TxOut}; use bitcoin::consensus; @@ -30,16 +37,16 @@ use bitcoin::hashes::sha256d::Hash as Sha256dHash; use bitcoin::hash_types::{Txid, BlockHash}; use core::marker::Sized; use core::time::Duration; -use ln::msgs::DecodeError; -use ln::{PaymentPreimage, PaymentHash, PaymentSecret}; +use crate::ln::msgs::DecodeError; +use crate::ln::{PaymentPreimage, PaymentHash, PaymentSecret}; -use util::byte_utils::{be48_to_array, slice_to_be48}; +use crate::util::byte_utils::{be48_to_array, slice_to_be48}; /// serialization buffer size pub const MAX_BUF_SIZE: usize = 64 * 1024; -/// A simplified version of std::io::Write that exists largely for backwards compatibility. -/// An impl is provided for any type that also impls std::io::Write. +/// A simplified version of [`std::io::Write`] that exists largely for backwards compatibility. +/// An impl is provided for any type that also impls [`std::io::Write`]. /// /// (C-not exported) as we only export serialization to/from byte arrays instead pub trait Writer { @@ -82,7 +89,7 @@ impl Writer for VecWriter { /// Writer that only tracks the amount of data written - useful if you need to calculate the length /// of some data when serialized but don't yet need the full data. -pub(crate) struct LengthCalculatingWriter(pub usize); +pub struct LengthCalculatingWriter(pub usize); impl Writer for LengthCalculatingWriter { #[inline] fn write_all(&mut self, buf: &[u8]) -> Result<(), io::Error> { @@ -91,23 +98,26 @@ impl Writer for LengthCalculatingWriter { } } -/// Essentially std::io::Take but a bit simpler and with a method to walk the underlying stream +/// Essentially [`std::io::Take`] but a bit simpler and with a method to walk the underlying stream /// forward to ensure we always consume exactly the fixed length specified. -pub(crate) struct FixedLengthReader { +pub struct FixedLengthReader { read: R, bytes_read: u64, total_bytes: u64, } impl FixedLengthReader { + /// Returns a new [`FixedLengthReader`]. pub fn new(read: R, total_bytes: u64) -> Self { Self { read, bytes_read: 0, total_bytes } } + /// Returns whether some bytes are remaining or not. #[inline] pub fn bytes_remain(&mut self) -> bool { self.bytes_read != self.total_bytes } + /// Consumes the remaining bytes. #[inline] pub fn eat_remaining(&mut self) -> Result<(), DecodeError> { copy(self, &mut sink()).unwrap(); @@ -143,13 +153,15 @@ impl LengthRead for FixedLengthReader { } } -/// A Read which tracks whether any bytes have been read at all. This allows us to distinguish +/// A [`Read`] implementation which tracks whether any bytes have been read at all. This allows us to distinguish /// between "EOF reached before we started" and "EOF reached mid-read". -pub(crate) struct ReadTrackingReader { +pub struct ReadTrackingReader { read: R, + /// Returns whether we have read from this reader or not yet. pub have_read: bool, } impl ReadTrackingReader { + /// Returns a new [`ReadTrackingReader`]. pub fn new(read: R) -> Self { Self { read, have_read: false } } @@ -168,21 +180,21 @@ impl Read for ReadTrackingReader { } } -/// A trait that various rust-lightning types implement allowing them to be written out to a Writer +/// A trait that various LDK types implement allowing them to be written out to a [`Writer`]. /// /// (C-not exported) as we only export serialization to/from byte arrays instead pub trait Writeable { - /// Writes self out to the given Writer + /// Writes `self` out to the given [`Writer`]. fn write(&self, writer: &mut W) -> Result<(), io::Error>; - /// Writes self out to a Vec + /// Writes `self` out to a `Vec`. fn encode(&self) -> Vec { let mut msg = VecWriter(Vec::new()); self.write(&mut msg).unwrap(); msg.0 } - /// Writes self out to a Vec + /// Writes `self` out to a `Vec`. #[cfg(test)] fn encode_with_len(&self) -> Vec { let mut msg = VecWriter(Vec::new()); @@ -208,57 +220,64 @@ impl<'a, T: Writeable> Writeable for &'a T { fn write(&self, writer: &mut W) -> Result<(), io::Error> { (*self).write(writer) } } -/// A trait that various rust-lightning types implement allowing them to be read in from a Read +/// A trait that various LDK types implement allowing them to be read in from a [`Read`]. /// /// (C-not exported) as we only export serialization to/from byte arrays instead pub trait Readable where Self: Sized { - /// Reads a Self in from the given Read + /// Reads a `Self` in from the given [`Read`]. fn read(reader: &mut R) -> Result; } -/// A trait that various higher-level rust-lightning types implement allowing them to be read in -/// from a Read given some additional set of arguments which is required to deserialize. +/// A trait that various LDK types implement allowing them to be read in from a +/// [`Read`]` + `[`Seek`]. +pub(crate) trait SeekReadable where Self: Sized { + /// Reads a `Self` in from the given [`Read`]. + fn read(reader: &mut R) -> Result; +} + +/// A trait that various higher-level LDK types implement allowing them to be read in +/// from a [`Read`] given some additional set of arguments which is required to deserialize. /// /// (C-not exported) as we only export serialization to/from byte arrays instead pub trait ReadableArgs

where Self: Sized { - /// Reads a Self in from the given Read + /// Reads a `Self` in from the given [`Read`]. fn read(reader: &mut R, params: P) -> Result; } -/// A std::io::Read that also provides the total bytes available to read. +/// A [`std::io::Read`] that also provides the total bytes available to be read. pub(crate) trait LengthRead: Read { - /// The total number of bytes available to read. + /// The total number of bytes available to be read. fn total_bytes(&self) -> u64; } -/// A trait that various higher-level rust-lightning types implement allowing them to be read in +/// A trait that various higher-level LDK types implement allowing them to be read in /// from a Read given some additional set of arguments which is required to deserialize, requiring /// the implementer to provide the total length of the read. pub(crate) trait LengthReadableArgs

where Self: Sized { - /// Reads a Self in from the given LengthRead + /// Reads a `Self` in from the given [`LengthRead`]. fn read(reader: &mut R, params: P) -> Result; } -/// A trait that various higher-level rust-lightning types implement allowing them to be read in -/// from a Read, requiring the implementer to provide the total length of the read. +/// A trait that various higher-level LDK types implement allowing them to be read in +/// from a [`Read`], requiring the implementer to provide the total length of the read. pub(crate) trait LengthReadable where Self: Sized { - /// Reads a Self in from the given LengthRead + /// Reads a `Self` in from the given [`LengthRead`]. fn read(reader: &mut R) -> Result; } -/// A trait that various rust-lightning types implement allowing them to (maybe) be read in from a Read +/// A trait that various LDK types implement allowing them to (maybe) be read in from a [`Read`]. /// /// (C-not exported) as we only export serialization to/from byte arrays instead pub trait MaybeReadable where Self: Sized { - /// Reads a Self in from the given Read + /// Reads a `Self` in from the given [`Read`]. fn read(reader: &mut R) -> Result, DecodeError>; } @@ -269,62 +288,21 @@ impl MaybeReadable for T { } } -/// A trait that various rust-lightning types implement allowing them to (maybe) be read in from a -/// Read, given some additional set of arguments which is required to deserialize. -/// -/// (C-not exported) as we only export serialization to/from byte arrays instead -pub trait MaybeReadableArgs

{ - /// Reads a Self in from the given Read - fn read(reader: &mut R, params: P) -> Result, DecodeError> where Self: Sized; -} - -pub(crate) struct OptionDeserWrapper(pub Option); +/// Wrapper to read a required (non-optional) TLV record. +pub struct OptionDeserWrapper(pub Option); impl Readable for OptionDeserWrapper { #[inline] fn read(reader: &mut R) -> Result { Ok(Self(Some(Readable::read(reader)?))) } } -/// When handling default_values, we want to map the default-value T directly -/// to a OptionDeserWrapper in a way that works for `field: T = t;` as +/// When handling `default_values`, we want to map the default-value T directly +/// to a `OptionDeserWrapper` in a way that works for `field: T = t;` as /// well. Thus, we assume `Into for T` does nothing and use that. impl From for OptionDeserWrapper { fn from(t: T) -> OptionDeserWrapper { OptionDeserWrapper(Some(t)) } } -/// Wrapper to write each element of a Vec with no length prefix -pub(crate) struct VecWriteWrapper<'a, T: Writeable>(pub &'a Vec); -impl<'a, T: Writeable> Writeable for VecWriteWrapper<'a, T> { - #[inline] - fn write(&self, writer: &mut W) -> Result<(), io::Error> { - for ref v in self.0.iter() { - v.write(writer)?; - } - Ok(()) - } -} - -/// Wrapper to read elements from a given stream until it reaches the end of the stream. -pub(crate) struct VecReadWrapper(pub Vec); -impl Readable for VecReadWrapper { - #[inline] - fn read(mut reader: &mut R) -> Result { - let mut values = Vec::new(); - loop { - let mut track_read = ReadTrackingReader::new(&mut reader); - match MaybeReadable::read(&mut track_read) { - Ok(Some(v)) => { values.push(v); }, - Ok(None) => { }, - // If we failed to read any bytes at all, we reached the end of our TLV - // stream and have simply exhausted all entries. - Err(ref e) if e == &DecodeError::ShortRead && !track_read.have_read => break, - Err(e) => return Err(e), - } - } - Ok(Self(values)) - } -} - pub(crate) struct U48(pub u64); impl Writeable for U48 { #[inline] @@ -341,7 +319,7 @@ impl Readable for U48 { } } -/// Lightning TLV uses a custom variable-length integer called BigSize. It is similar to Bitcoin's +/// Lightning TLV uses a custom variable-length integer called `BigSize`. It is similar to Bitcoin's /// variable-length integers except that it is serialized in big-endian instead of little-endian. /// /// Like Bitcoin's variable-length integer, it exhibits ambiguity in that certain values can be @@ -405,9 +383,43 @@ impl Readable for BigSize { } } +/// The lightning protocol uses u16s for lengths in most cases. As our serialization framework +/// primarily targets that, we must as well. However, because we may serialize objects that have +/// more than 65K entries, we need to be able to store larger values. Thus, we define a variable +/// length integer here that is backwards-compatible for values < 0xffff. We treat 0xffff as +/// "read eight more bytes". +/// +/// To ensure we only have one valid encoding per value, we add 0xffff to values written as eight +/// bytes. Thus, 0xfffe is serialized as 0xfffe, whereas 0xffff is serialized as +/// 0xffff0000000000000000 (i.e. read-eight-bytes then zero). +struct CollectionLength(pub u64); +impl Writeable for CollectionLength { + #[inline] + fn write(&self, writer: &mut W) -> Result<(), io::Error> { + if self.0 < 0xffff { + (self.0 as u16).write(writer) + } else { + 0xffffu16.write(writer)?; + (self.0 - 0xffff).write(writer) + } + } +} + +impl Readable for CollectionLength { + #[inline] + fn read(r: &mut R) -> Result { + let mut val: u64 = ::read(r)? as u64; + if val == 0xffff { + val = ::read(r)? + .checked_add(0xffff).ok_or(DecodeError::InvalidValue)?; + } + Ok(CollectionLength(val)) + } +} + /// In TLV we occasionally send fields which only consist of, or potentially end with, a /// variable-length integer which is simply truncated by skipping high zero bytes. This type -/// encapsulates such integers implementing Readable/Writeable for them. +/// encapsulates such integers implementing [`Readable`]/[`Writeable`] for them. #[cfg_attr(test, derive(PartialEq, Eq, Debug))] pub(crate) struct HighZeroBytesDroppedBigSize(pub T); @@ -460,9 +472,13 @@ macro_rules! impl_writeable_primitive { } } } + impl From<$val_type> for HighZeroBytesDroppedBigSize<$val_type> { + fn from(val: $val_type) -> Self { Self(val) } + } } } +impl_writeable_primitive!(u128, 16); impl_writeable_primitive!(u64, 8); impl_writeable_primitive!(u32, 4); impl_writeable_primitive!(u16, 2); @@ -523,13 +539,13 @@ macro_rules! impl_array { ); } -impl_array!(3); // for rgb +impl_array!(3); // for rgb, ISO 4712 code impl_array!(4); // for IPv4 impl_array!(12); // for OnionV2 impl_array!(16); // for IPv6 impl_array!(32); // for channel id & hmac impl_array!(PUBLIC_KEY_SIZE); // for PublicKey -impl_array!(COMPACT_SIGNATURE_SIZE); // for Signature +impl_array!(64); // for ecdsa::Signature and schnorr::Signature impl_array!(1300); // for OnionPacket.hop_data impl Writeable for [u16; 8] { @@ -555,50 +571,107 @@ impl Readable for [u16; 8] { } } -// HashMap -impl Writeable for HashMap - where K: Writeable + Eq + Hash, - V: Writeable -{ +/// A type for variable-length values within TLV record where the length is encoded as part of the record. +/// Used to prevent encoding the length twice. +pub struct WithoutLength(pub T); + +impl Writeable for WithoutLength<&String> { #[inline] fn write(&self, w: &mut W) -> Result<(), io::Error> { - (self.len() as u16).write(w)?; - for (key, value) in self.iter() { - key.write(w)?; - value.write(w)?; + w.write_all(self.0.as_bytes()) + } +} +impl Readable for WithoutLength { + #[inline] + fn read(r: &mut R) -> Result { + let v: WithoutLength> = Readable::read(r)?; + Ok(Self(String::from_utf8(v.0).map_err(|_| DecodeError::InvalidValue)?)) + } +} +impl<'a> From<&'a String> for WithoutLength<&'a String> { + fn from(s: &'a String) -> Self { Self(s) } +} + +impl<'a, T: Writeable> Writeable for WithoutLength<&'a Vec> { + #[inline] + fn write(&self, writer: &mut W) -> Result<(), io::Error> { + for ref v in self.0.iter() { + v.write(writer)?; } Ok(()) } } -impl Readable for HashMap - where K: Readable + Eq + Hash, - V: MaybeReadable -{ +impl Readable for WithoutLength> { #[inline] - fn read(r: &mut R) -> Result { - let len: u16 = Readable::read(r)?; - let mut ret = HashMap::with_capacity(len as usize); - for _ in 0..len { - let k = K::read(r)?; - let v_opt = V::read(r)?; - if let Some(v) = v_opt { - if ret.insert(k, v).is_some() { - return Err(DecodeError::InvalidValue); + fn read(mut reader: &mut R) -> Result { + let mut values = Vec::new(); + loop { + let mut track_read = ReadTrackingReader::new(&mut reader); + match MaybeReadable::read(&mut track_read) { + Ok(Some(v)) => { values.push(v); }, + Ok(None) => { }, + // If we failed to read any bytes at all, we reached the end of our TLV + // stream and have simply exhausted all entries. + Err(ref e) if e == &DecodeError::ShortRead && !track_read.have_read => break, + Err(e) => return Err(e), + } + } + Ok(Self(values)) + } +} +impl<'a, T> From<&'a Vec> for WithoutLength<&'a Vec> { + fn from(v: &'a Vec) -> Self { Self(v) } +} + +macro_rules! impl_for_map { + ($ty: ident, $keybound: ident, $constr: expr) => { + impl Writeable for $ty + where K: Writeable + Eq + $keybound, V: Writeable + { + #[inline] + fn write(&self, w: &mut W) -> Result<(), io::Error> { + CollectionLength(self.len() as u64).write(w)?; + for (key, value) in self.iter() { + key.write(w)?; + value.write(w)?; } + Ok(()) + } + } + + impl Readable for $ty + where K: Readable + Eq + $keybound, V: MaybeReadable + { + #[inline] + fn read(r: &mut R) -> Result { + let len: CollectionLength = Readable::read(r)?; + let mut ret = $constr(len.0 as usize); + for _ in 0..len.0 { + let k = K::read(r)?; + let v_opt = V::read(r)?; + if let Some(v) = v_opt { + if ret.insert(k, v).is_some() { + return Err(DecodeError::InvalidValue); + } + } + } + Ok(ret) } } - Ok(ret) } } +impl_for_map!(BTreeMap, Ord, |_| BTreeMap::new()); +impl_for_map!(HashMap, Hash, |len| HashMap::with_capacity(len)); + // HashSet impl Writeable for HashSet where T: Writeable + Eq + Hash { #[inline] fn write(&self, w: &mut W) -> Result<(), io::Error> { - (self.len() as u16).write(w)?; + CollectionLength(self.len() as u64).write(w)?; for item in self.iter() { item.write(w)?; } @@ -611,9 +684,9 @@ where T: Readable + Eq + Hash { #[inline] fn read(r: &mut R) -> Result { - let len: u16 = Readable::read(r)?; - let mut ret = HashSet::with_capacity(len as usize); - for _ in 0..len { + let len: CollectionLength = Readable::read(r)?; + let mut ret = HashSet::with_capacity(cmp::min(len.0 as usize, MAX_BUF_SIZE / core::mem::size_of::())); + for _ in 0..len.0 { if !ret.insert(T::read(r)?) { return Err(DecodeError::InvalidValue) } @@ -623,51 +696,62 @@ where T: Readable + Eq + Hash } // Vectors -impl Writeable for Vec { - #[inline] - fn write(&self, w: &mut W) -> Result<(), io::Error> { - (self.len() as u16).write(w)?; - w.write_all(&self) - } -} +macro_rules! impl_for_vec { + ($ty: ty $(, $name: ident)*) => { + impl<$($name : Writeable),*> Writeable for Vec<$ty> { + #[inline] + fn write(&self, w: &mut W) -> Result<(), io::Error> { + CollectionLength(self.len() as u64).write(w)?; + for elem in self.iter() { + elem.write(w)?; + } + Ok(()) + } + } -impl Readable for Vec { - #[inline] - fn read(r: &mut R) -> Result { - let len: u16 = Readable::read(r)?; - let mut ret = Vec::with_capacity(len as usize); - ret.resize(len as usize, 0); - r.read_exact(&mut ret)?; - Ok(ret) + impl<$($name : Readable),*> Readable for Vec<$ty> { + #[inline] + fn read(r: &mut R) -> Result { + let len: CollectionLength = Readable::read(r)?; + let mut ret = Vec::with_capacity(cmp::min(len.0 as usize, MAX_BUF_SIZE / core::mem::size_of::<$ty>())); + for _ in 0..len.0 { + if let Some(val) = MaybeReadable::read(r)? { + ret.push(val); + } + } + Ok(ret) + } + } } } -impl Writeable for Vec { + +impl Writeable for Vec { #[inline] fn write(&self, w: &mut W) -> Result<(), io::Error> { - (self.len() as u16).write(w)?; - for e in self.iter() { - e.write(w)?; - } - Ok(()) + CollectionLength(self.len() as u64).write(w)?; + w.write_all(&self) } } -impl Readable for Vec { +impl Readable for Vec { #[inline] fn read(r: &mut R) -> Result { - let len: u16 = Readable::read(r)?; - let byte_size = (len as usize) - .checked_mul(COMPACT_SIGNATURE_SIZE) - .ok_or(DecodeError::BadLengthDescriptor)?; - if byte_size > MAX_BUF_SIZE { - return Err(DecodeError::BadLengthDescriptor); + let mut len: CollectionLength = Readable::read(r)?; + let mut ret = Vec::new(); + while len.0 > 0 { + let readamt = cmp::min(len.0 as usize, MAX_BUF_SIZE); + let readstart = ret.len(); + ret.resize(readstart + readamt, 0); + r.read_exact(&mut ret[readstart..])?; + len.0 -= readamt as u64; } - let mut ret = Vec::with_capacity(len as usize); - for _ in 0..len { ret.push(Readable::read(r)?); } Ok(ret) } } +impl_for_vec!(ecdsa::Signature); +impl_for_vec!((A, B), A, B); + impl Writeable for Script { fn write(&self, w: &mut W) -> Result<(), io::Error> { (self.len() as u16).write(w)?; @@ -741,20 +825,32 @@ impl Readable for Sha256dHash { } } -impl Writeable for Signature { +impl Writeable for ecdsa::Signature { fn write(&self, w: &mut W) -> Result<(), io::Error> { self.serialize_compact().write(w) } - #[inline] - fn serialized_length(&self) -> usize { - COMPACT_SIGNATURE_SIZE - } } -impl Readable for Signature { +impl Readable for ecdsa::Signature { fn read(r: &mut R) -> Result { let buf: [u8; COMPACT_SIGNATURE_SIZE] = Readable::read(r)?; - match Signature::from_compact(&buf) { + match ecdsa::Signature::from_compact(&buf) { + Ok(sig) => Ok(sig), + Err(_) => return Err(DecodeError::InvalidValue), + } + } +} + +impl Writeable for schnorr::Signature { + fn write(&self, w: &mut W) -> Result<(), io::Error> { + self.as_ref().write(w) + } +} + +impl Readable for schnorr::Signature { + fn read(r: &mut R) -> Result { + let buf: [u8; SCHNORR_SIGNATURE_SIZE] = Readable::read(r)?; + match schnorr::Signature::from_slice(&buf) { Ok(sig) => Ok(sig), Err(_) => return Err(DecodeError::InvalidValue), } @@ -869,6 +965,19 @@ impl Readable for BlockHash { } } +impl Writeable for ChainHash { + fn write(&self, w: &mut W) -> Result<(), io::Error> { + w.write_all(self.as_bytes()) + } +} + +impl Readable for ChainHash { + fn read(r: &mut R) -> Result { + let buf: [u8; 32] = Readable::read(r)?; + Ok(ChainHash::from(&buf[..])) + } +} + impl Writeable for OutPoint { fn write(&self, w: &mut W) -> Result<(), io::Error> { self.txid.write(w)?; @@ -970,7 +1079,7 @@ impl Readable for () { impl Writeable for String { #[inline] fn write(&self, w: &mut W) -> Result<(), io::Error> { - (self.len() as u16).write(w)?; + CollectionLength(self.len() as u64).write(w)?; w.write_all(self.as_bytes()) } } @@ -987,7 +1096,9 @@ impl Readable for String { /// Only the character set and length will be validated. /// The character set consists of ASCII alphanumeric characters, hyphens, and periods. /// Its length is guaranteed to be representable by a single byte. -/// This serialization is used by BOLT 7 hostnames. +/// This serialization is used by [`BOLT 7`] hostnames. +/// +/// [`BOLT 7`]: https://github.com/lightning/bolts/blob/master/07-routing-gossip.md #[derive(Clone, Debug, PartialEq, Eq)] pub struct Hostname(String); impl Hostname { @@ -1071,7 +1182,7 @@ impl Readable for Duration { #[cfg(test)] mod tests { use core::convert::TryFrom; - use util::ser::{Readable, Hostname, Writeable}; + use crate::util::ser::{Readable, Hostname, Writeable}; #[test] fn hostname_conversion() {