Upgrade to LDK 0.0.117
[ldk-sample] / src / main.rs
index 40240a729711bd33d5c708730b2fb8a44c24615a..69f0981ca4091403f827f5861258166a2087c5ce 100644 (file)
@@ -24,7 +24,7 @@ use lightning::ln::channelmanager::{
 };
 use lightning::ln::msgs::DecodeError;
 use lightning::ln::peer_handler::{IgnoringMessageHandler, MessageHandler, SimpleArcPeerManager};
-use lightning::ln::{PaymentHash, PaymentPreimage, PaymentSecret};
+use lightning::ln::{ChannelId, PaymentHash, PaymentPreimage, PaymentSecret};
 use lightning::onion_message::{DefaultMessageRouter, SimpleArcOnionMessenger};
 use lightning::routing::gossip;
 use lightning::routing::gossip::{NodeId, P2PGossipSync};
@@ -32,7 +32,7 @@ use lightning::routing::router::DefaultRouter;
 use lightning::routing::scoring::ProbabilisticScoringFeeParameters;
 use lightning::sign::{EntropySource, InMemorySigner, KeysManager, SpendableOutputDescriptor};
 use lightning::util::config::UserConfig;
-use lightning::util::persist::KVStorePersister;
+use lightning::util::persist::{self, read_channel_monitors, KVStore};
 use lightning::util::ser::{Readable, ReadableArgs, Writeable, Writer};
 use lightning::{chain, impl_writeable_tlv_based, impl_writeable_tlv_based_enum};
 use lightning_background_processor::{process_events_async, GossipSync};
@@ -41,7 +41,7 @@ use lightning_block_sync::poll;
 use lightning_block_sync::SpvClient;
 use lightning_block_sync::UnboundedCache;
 use lightning_net_tokio::SocketDescriptor;
-use lightning_persister::FilesystemPersister;
+use lightning_persister::fs_store::FilesystemStore;
 use rand::{thread_rng, Rng};
 use std::collections::hash_map::Entry;
 use std::collections::HashMap;
@@ -53,7 +53,7 @@ use std::io;
 use std::io::Write;
 use std::path::Path;
 use std::sync::atomic::{AtomicBool, Ordering};
-use std::sync::{Arc, Mutex};
+use std::sync::{Arc, Mutex, RwLock};
 use std::time::{Duration, SystemTime};
 
 pub(crate) const PENDING_SPENDABLE_OUTPUT_DIR: &'static str = "pending_spendable_outputs";
@@ -123,7 +123,7 @@ type ChainMonitor = chainmonitor::ChainMonitor<
        Arc<BitcoindClient>,
        Arc<BitcoindClient>,
        Arc<FilesystemLogger>,
-       Arc<FilesystemPersister>,
+       Arc<FilesystemStore>,
 >;
 
 pub(crate) type PeerManager = SimpleArcPeerManager<
@@ -131,7 +131,7 @@ pub(crate) type PeerManager = SimpleArcPeerManager<
        ChainMonitor,
        BitcoindClient,
        BitcoindClient,
-       BitcoindClient,
+       Arc<BitcoindClient>,
        FilesystemLogger,
 >;
 
@@ -153,7 +153,7 @@ async fn handle_ldk_events(
        channel_manager: &Arc<ChannelManager>, bitcoind_client: &BitcoindClient,
        network_graph: &NetworkGraph, keys_manager: &KeysManager,
        bump_tx_event_handler: &BumpTxEventHandler, inbound_payments: Arc<Mutex<PaymentInfoStorage>>,
-       outbound_payments: Arc<Mutex<PaymentInfoStorage>>, persister: &Arc<FilesystemPersister>,
+       outbound_payments: Arc<Mutex<PaymentInfoStorage>>, persister: &Arc<FilesystemStore>,
        network: Network, event: Event,
 ) {
        match event {
@@ -229,7 +229,14 @@ async fn handle_ldk_events(
                        };
                        channel_manager.claim_funds(payment_preimage.unwrap());
                }
-               Event::PaymentClaimed { payment_hash, purpose, amount_msat, receiver_node_id: _ } => {
+               Event::PaymentClaimed {
+                       payment_hash,
+                       purpose,
+                       amount_msat,
+                       receiver_node_id: _,
+                       htlcs: _,
+                       sender_intended_total_msat: _,
+               } => {
                        println!(
                                "\nEVENT: claimed payment from payment hash {} of {} millisatoshis",
                                hex_utils::hex_str(&payment_hash.0),
@@ -260,7 +267,7 @@ async fn handle_ldk_events(
                                        });
                                }
                        }
-                       persister.persist(INBOUND_PAYMENTS_FNAME, &*inbound).unwrap();
+                       persister.write("", "", INBOUND_PAYMENTS_FNAME, &inbound.encode()).unwrap();
                }
                Event::PaymentSent { payment_preimage, payment_hash, fee_paid_msat, .. } => {
                        let mut outbound = outbound_payments.lock().unwrap();
@@ -284,7 +291,7 @@ async fn handle_ldk_events(
                                        io::stdout().flush().unwrap();
                                }
                        }
-                       persister.persist(OUTBOUND_PAYMENTS_FNAME, &*outbound).unwrap();
+                       persister.write("", "", OUTBOUND_PAYMENTS_FNAME, &outbound.encode()).unwrap();
                }
                Event::OpenChannelRequest {
                        ref temporary_channel_id, ref counterparty_node_id, ..
@@ -301,14 +308,14 @@ async fn handle_ldk_events(
                        if let Err(e) = res {
                                print!(
                                        "\nEVENT: Failed to accept inbound channel ({}) from {}: {:?}",
-                                       hex_utils::hex_str(&temporary_channel_id[..]),
+                                       temporary_channel_id,
                                        hex_utils::hex_str(&counterparty_node_id.serialize()),
                                        e,
                                );
                        } else {
                                print!(
                                        "\nEVENT: Accepted inbound channel ({}) from {}",
-                                       hex_utils::hex_str(&temporary_channel_id[..]),
+                                       temporary_channel_id,
                                        hex_utils::hex_str(&counterparty_node_id.serialize()),
                                );
                        }
@@ -333,7 +340,7 @@ async fn handle_ldk_events(
                                let payment = outbound.payments.get_mut(&payment_hash).unwrap();
                                payment.status = HTLCStatus::Failed;
                        }
-                       persister.persist(OUTBOUND_PAYMENTS_FNAME, &*outbound).unwrap();
+                       persister.write("", "", OUTBOUND_PAYMENTS_FNAME, &outbound.encode()).unwrap();
                }
                Event::PaymentForwarded {
                        prev_channel_id,
@@ -346,7 +353,7 @@ async fn handle_ldk_events(
                        let nodes = read_only_network_graph.nodes();
                        let channels = channel_manager.list_channels();
 
-                       let node_str = |channel_id: &Option<[u8; 32]>| match channel_id {
+                       let node_str = |channel_id: &Option<ChannelId>| match channel_id {
                                None => String::new(),
                                Some(channel_id) => match channels.iter().find(|c| c.channel_id == *channel_id) {
                                        None => String::new(),
@@ -363,9 +370,9 @@ async fn handle_ldk_events(
                                        }
                                },
                        };
-                       let channel_str = |channel_id: &Option<[u8; 32]>| {
+                       let channel_str = |channel_id: &Option<ChannelId>| {
                                channel_id
-                                       .map(|channel_id| format!(" with channel {}", hex_utils::hex_str(&channel_id)))
+                                       .map(|channel_id| format!(" with channel {}", channel_id))
                                        .unwrap_or_default()
                        };
                        let from_prev_str =
@@ -407,7 +414,7 @@ async fn handle_ldk_events(
                                forwarding_channel_manager.process_pending_htlc_forwards();
                        });
                }
-               Event::SpendableOutputs { outputs } => {
+               Event::SpendableOutputs { outputs, channel_id: _ } => {
                        // SpendableOutputDescriptors, of which outputs is a vec of, are critical to keep track
                        // of! While a `StaticOutput` descriptor is just an output to a static, well-known key,
                        // other descriptors are not currently ever regenerated for you by LDK. Once we return
@@ -421,15 +428,13 @@ async fn handle_ldk_events(
                                let key = hex_utils::hex_str(&keys_manager.get_secure_random_bytes());
                                // Note that if the type here changes our read code needs to change as well.
                                let output: SpendableOutputDescriptor = output;
-                               persister
-                                       .persist(&format!("{}/{}", PENDING_SPENDABLE_OUTPUT_DIR, key), &output)
-                                       .unwrap();
+                               persister.write(PENDING_SPENDABLE_OUTPUT_DIR, "", &key, &output.encode()).unwrap();
                        }
                }
                Event::ChannelPending { channel_id, counterparty_node_id, .. } => {
                        println!(
                                "\nEVENT: Channel {} with peer {} is pending awaiting funding lock-in!",
-                               hex_utils::hex_str(&channel_id),
+                               channel_id,
                                hex_utils::hex_str(&counterparty_node_id.serialize()),
                        );
                        print!("> ");
@@ -443,16 +448,23 @@ async fn handle_ldk_events(
                } => {
                        println!(
                                "\nEVENT: Channel {} with peer {} is ready to be used!",
-                               hex_utils::hex_str(channel_id),
+                               channel_id,
                                hex_utils::hex_str(&counterparty_node_id.serialize()),
                        );
                        print!("> ");
                        io::stdout().flush().unwrap();
                }
-               Event::ChannelClosed { channel_id, reason, user_channel_id: _ } => {
+               Event::ChannelClosed {
+                       channel_id,
+                       reason,
+                       user_channel_id: _,
+                       counterparty_node_id,
+                       channel_capacity_sats: _,
+               } => {
                        println!(
-                               "\nEVENT: Channel {} closed due to: {:?}",
-                               hex_utils::hex_str(&channel_id),
+                               "\nEVENT: Channel {} with counterparty {} closed due to: {:?}",
+                               channel_id,
+                               counterparty_node_id.map(|id| format!("{}", id)).unwrap_or("".to_owned()),
                                reason
                        );
                        print!("> ");
@@ -527,7 +539,7 @@ async fn start_ldk() {
        let broadcaster = bitcoind_client.clone();
 
        // Step 4: Initialize Persist
-       let persister = Arc::new(FilesystemPersister::new(ldk_data_dir.clone()));
+       let persister = Arc::new(FilesystemStore::new(ldk_data_dir.clone().into()));
 
        // Step 5: Initialize the ChainMonitor
        let chain_monitor: Arc<ChainMonitor> = Arc::new(chainmonitor::ChainMonitor::new(
@@ -575,7 +587,8 @@ async fn start_ldk() {
 
        // Step 7: Read ChannelMonitor state from disk
        let mut channelmonitors =
-               persister.read_channelmonitors(keys_manager.clone(), keys_manager.clone()).unwrap();
+               read_channel_monitors(Arc::clone(&persister), keys_manager.clone(), keys_manager.clone())
+                       .unwrap();
 
        // Step 8: Poll for the best chain tip, which may be used by the channel manager & spv client
        let polled_chain_tip = init::validate_best_block_header(bitcoind_client.as_ref())
@@ -588,7 +601,7 @@ async fn start_ldk() {
                Arc::new(disk::read_network(Path::new(&network_graph_path), args.network, logger.clone()));
 
        let scorer_path = format!("{}/scorer", ldk_data_dir.clone());
-       let scorer = Arc::new(Mutex::new(disk::read_scorer(
+       let scorer = Arc::new(RwLock::new(disk::read_scorer(
                Path::new(&scorer_path),
                Arc::clone(&network_graph),
                Arc::clone(&logger),
@@ -697,7 +710,7 @@ async fn start_ldk() {
                let funding_outpoint = item.2;
                assert_eq!(
                        chain_monitor.watch_channel(funding_outpoint, channel_monitor),
-                       ChannelMonitorUpdateStatus::Completed
+                       Ok(ChannelMonitorUpdateStatus::Completed)
                );
        }
 
@@ -790,8 +803,9 @@ async fn start_ldk() {
                .into_iter()
                .filter_map(|p| match p {
                        RecentPaymentDetails::Pending { payment_hash, .. } => Some(payment_hash),
-                       RecentPaymentDetails::Fulfilled { payment_hash } => payment_hash,
-                       RecentPaymentDetails::Abandoned { payment_hash } => Some(payment_hash),
+                       RecentPaymentDetails::Fulfilled { payment_hash, .. } => payment_hash,
+                       RecentPaymentDetails::Abandoned { payment_hash, .. } => Some(payment_hash),
+                       RecentPaymentDetails::AwaitingInvoice { payment_id: _ } => todo!(),
                })
                .collect::<Vec<PaymentHash>>();
        for (payment_hash, payment_info) in outbound_payments
@@ -805,7 +819,9 @@ async fn start_ldk() {
                        payment_info.status = HTLCStatus::Failed;
                }
        }
-       persister.persist(OUTBOUND_PAYMENTS_FNAME, &*outbound_payments.lock().unwrap()).unwrap();
+       persister
+               .write("", "", OUTBOUND_PAYMENTS_FNAME, &outbound_payments.lock().unwrap().encode())
+               .unwrap();
 
        // Step 18: Handle LDK Events
        let channel_manager_event_listener = Arc::clone(&channel_manager);
@@ -843,7 +859,7 @@ async fn start_ldk() {
        };
 
        // Step 19: Persist ChannelManager and NetworkGraph
-       let persister = Arc::new(FilesystemPersister::new(ldk_data_dir.clone()));
+       let persister = Arc::new(FilesystemStore::new(ldk_data_dir.clone().into()));
 
        // Step 20: Background Processing
        let (bp_exit, bp_exit_check) = tokio::sync::watch::channel(());
@@ -871,7 +887,7 @@ async fn start_ldk() {
        // Regularly reconnect to channel peers.
        let connect_cm = Arc::clone(&channel_manager);
        let connect_pm = Arc::clone(&peer_manager);
-       let peer_data_path = format!("{}/channel_peer_data", ldk_data_dir.clone());
+       let peer_data_path = format!("{}/channel_peer_data", ldk_data_dir);
        let stop_connect = Arc::clone(&stop_listen_connect);
        tokio::spawn(async move {
                let mut interval = tokio::time::interval(Duration::from_secs(1));
@@ -979,7 +995,14 @@ async fn start_ldk() {
        peer_manager.disconnect_all_peers();
 
        if let Err(e) = bg_res {
-               let persist_res = persister.persist("manager", &*channel_manager).unwrap();
+               let persist_res = persister
+                       .write(
+                               persist::CHANNEL_MANAGER_PERSISTENCE_PRIMARY_NAMESPACE,
+                               persist::CHANNEL_MANAGER_PERSISTENCE_SECONDARY_NAMESPACE,
+                               persist::CHANNEL_MANAGER_PERSISTENCE_KEY,
+                               &channel_manager.encode(),
+                       )
+                       .unwrap();
                use lightning::util::logger::Logger;
                lightning::log_error!(
                        &*logger,