Update code and Cargo.toml to LDK 0.0.109
[ldk-sample] / src / disk.rs
index 827cb7e9e79326f6a3b8ae5022748267c07bf72a..681d742d5444438784e488d95b09348211b6cf0e 100644 (file)
@@ -1,14 +1,17 @@
-use crate::cli;
-use bitcoin::secp256k1::key::PublicKey;
+use crate::{cli, NetworkGraph};
+use bitcoin::secp256k1::PublicKey;
+use bitcoin::BlockHash;
+use chrono::Utc;
+use lightning::routing::scoring::{ProbabilisticScorer, ProbabilisticScoringParameters};
 use lightning::util::logger::{Logger, Record};
-use lightning::util::ser::Writer;
+use lightning::util::ser::{ReadableArgs, Writer};
 use std::collections::HashMap;
 use std::fs;
 use std::fs::File;
 use std::io::{BufRead, BufReader};
 use std::net::SocketAddr;
 use std::path::Path;
-use time::OffsetDateTime;
+use std::sync::Arc;
 
 pub(crate) struct FilesystemLogger {
        data_dir: String,
@@ -25,7 +28,10 @@ impl Logger for FilesystemLogger {
                let raw_log = record.args.to_string();
                let log = format!(
                        "{} {:<5} [{}:{}] {}\n",
-                       OffsetDateTime::now_utc().format("%F %T"),
+                       // Note that a "real" lightning node almost certainly does *not* want subsecond
+                       // precision for message-receipt information as it makes log entries a target for
+                       // deanonymization attacks. For testing, however, its quite useful.
+                       Utc::now().format("%Y-%m-%d %H:%M:%S%.3f"),
                        record.level.to_string(),
                        record.module_path,
                        record.line,
@@ -65,3 +71,27 @@ pub(crate) fn read_channel_peer_data(
        }
        Ok(peer_data)
 }
+
+pub(crate) fn read_network(
+       path: &Path, genesis_hash: BlockHash, logger: Arc<FilesystemLogger>,
+) -> NetworkGraph {
+       if let Ok(file) = File::open(path) {
+               if let Ok(graph) = NetworkGraph::read(&mut BufReader::new(file), logger.clone()) {
+                       return graph;
+               }
+       }
+       NetworkGraph::new(genesis_hash, logger)
+}
+
+pub(crate) fn read_scorer(
+       path: &Path, graph: Arc<NetworkGraph>, logger: Arc<FilesystemLogger>,
+) -> ProbabilisticScorer<Arc<NetworkGraph>, Arc<FilesystemLogger>> {
+       let params = ProbabilisticScoringParameters::default();
+       if let Ok(file) = File::open(path) {
+               let args = (params.clone(), Arc::clone(&graph), Arc::clone(&logger));
+               if let Ok(scorer) = ProbabilisticScorer::read(&mut BufReader::new(file), args) {
+                       return scorer;
+               }
+       }
+       ProbabilisticScorer::new(params, graph, logger)
+}