pass
elif cfg == "dual_funding":
pass
+ elif cfg == "splicing":
+ pass
else:
print("Bad cfg tag: " + cfg)
assert False
RUSTFLAGS="--cfg=async_signing" cargo test --verbose --color always -p lightning
[ "$CI_MINIMIZE_DISK_USAGE" != "" ] && cargo clean
RUSTFLAGS="--cfg=dual_funding" cargo test --verbose --color always -p lightning
+[ "$CI_MINIMIZE_DISK_USAGE" != "" ] && cargo clean
+RUSTFLAGS="--cfg=splicing" cargo test --verbose --color always -p lightning
// Background feerate which is <= the minimum Normal feerate.
match conf_target {
ConfirmationTarget::OnChainSweep => MAX_FEE,
- ConfirmationTarget::ChannelCloseMinimum|ConfirmationTarget::AnchorChannelFee|ConfirmationTarget::MinAllowedAnchorChannelRemoteFee|ConfirmationTarget::MinAllowedNonAnchorChannelRemoteFee => 253,
+ ConfirmationTarget::ChannelCloseMinimum|ConfirmationTarget::AnchorChannelFee|ConfirmationTarget::MinAllowedAnchorChannelRemoteFee|ConfirmationTarget::MinAllowedNonAnchorChannelRemoteFee|ConfirmationTarget::OutputSpendingFee => 253,
ConfirmationTarget::NonAnchorChannelFee => cmp::min(self.ret_val.load(atomic::Ordering::Acquire), MAX_FEE),
}
}
fn update_persisted_channel(&self, _funding_txo: OutPoint, _update: Option<&channelmonitor::ChannelMonitorUpdate>, _data: &channelmonitor::ChannelMonitor<TestChannelSigner>, _update_id: MonitorUpdateId) -> chain::ChannelMonitorUpdateStatus {
self.update_ret.lock().unwrap().clone()
}
+
+ fn archive_persisted_channel(&self, _: OutPoint) {
+ }
}
[package]
name = "lightning-background-processor"
-version = "0.0.121"
+version = "0.0.123-beta"
authors = ["Valentine Wallace <vwallace@protonmail.com>"]
license = "MIT OR Apache-2.0"
repository = "https://github.com/lightningdevkit/rust-lightning"
[dependencies]
bitcoin = { version = "0.30.2", default-features = false }
-lightning = { version = "0.0.121", path = "../lightning", default-features = false }
-lightning-rapid-gossip-sync = { version = "0.0.121", path = "../lightning-rapid-gossip-sync", default-features = false }
+lightning = { version = "0.0.123-beta", path = "../lightning", default-features = false }
+lightning-rapid-gossip-sync = { version = "0.0.123-beta", path = "../lightning-rapid-gossip-sync", default-features = false }
[dev-dependencies]
tokio = { version = "1.35", features = [ "macros", "rt", "rt-multi-thread", "sync", "time" ] }
-lightning = { version = "0.0.121", path = "../lightning", features = ["_test_utils"] }
-lightning-invoice = { version = "0.29.0", path = "../lightning-invoice" }
-lightning-persister = { version = "0.0.121", path = "../lightning-persister" }
+lightning = { version = "0.0.123-beta", path = "../lightning", features = ["_test_utils"] }
+lightning-invoice = { version = "0.31.0-beta", path = "../lightning-invoice" }
+lightning-persister = { version = "0.0.123-beta", path = "../lightning-persister" }
#[cfg(all(feature = "std", test))]
mod tests {
+ use bitcoin::{ScriptBuf, Txid};
use bitcoin::blockdata::constants::{genesis_block, ChainHash};
use bitcoin::blockdata::locktime::absolute::LockTime;
use bitcoin::blockdata::transaction::{Transaction, TxOut};
+ use bitcoin::hashes::Hash;
use bitcoin::network::constants::Network;
use bitcoin::secp256k1::{SecretKey, PublicKey, Secp256k1};
- use lightning::chain::{BestBlock, Confirm, chainmonitor};
+ use lightning::chain::{BestBlock, Confirm, chainmonitor, Filter};
use lightning::chain::channelmonitor::ANTI_REORG_DELAY;
- use lightning::sign::{InMemorySigner, KeysManager};
+ use lightning::sign::{InMemorySigner, KeysManager, ChangeDestinationSource};
use lightning::chain::transaction::OutPoint;
use lightning::events::{Event, PathFailure, MessageSendEventsProvider, MessageSendEvent};
use lightning::{get_event_msg, get_event};
CHANNEL_MANAGER_PERSISTENCE_PRIMARY_NAMESPACE, CHANNEL_MANAGER_PERSISTENCE_SECONDARY_NAMESPACE, CHANNEL_MANAGER_PERSISTENCE_KEY,
NETWORK_GRAPH_PERSISTENCE_PRIMARY_NAMESPACE, NETWORK_GRAPH_PERSISTENCE_SECONDARY_NAMESPACE, NETWORK_GRAPH_PERSISTENCE_KEY,
SCORER_PERSISTENCE_PRIMARY_NAMESPACE, SCORER_PERSISTENCE_SECONDARY_NAMESPACE, SCORER_PERSISTENCE_KEY};
+ use lightning::util::sweep::{OutputSweeper, OutputSpendStatus};
use lightning_persister::fs_store::FilesystemStore;
use std::collections::VecDeque;
use std::{fs, env};
logger: Arc<test_utils::TestLogger>,
best_block: BestBlock,
scorer: Arc<LockingWrapper<TestScorer>>,
+ sweeper: Arc<OutputSweeper<Arc<test_utils::TestBroadcaster>, Arc<TestWallet>,
+ Arc<test_utils::TestFeeEstimator>, Arc<dyn Filter + Sync + Send>, Arc<FilesystemStore>,
+ Arc<test_utils::TestLogger>, Arc<KeysManager>>>,
}
impl Node {
}
}
+ struct TestWallet {}
+
+ impl ChangeDestinationSource for TestWallet {
+ fn get_change_destination_script(&self) -> Result<ScriptBuf, ()> {
+ Ok(ScriptBuf::new())
+ }
+ }
+
fn get_full_filepath(filepath: String, filename: String) -> String {
let mut path = PathBuf::from(filepath);
path.push(filename);
let router = Arc::new(DefaultRouter::new(network_graph.clone(), logger.clone(), Arc::clone(&keys_manager), scorer.clone(), Default::default()));
let chain_source = Arc::new(test_utils::TestChainSource::new(Network::Bitcoin));
let kv_store = Arc::new(FilesystemStore::new(format!("{}_persister_{}", &persist_dir, i).into()));
+ let now = Duration::from_secs(genesis_block.header.time as u64);
+ let keys_manager = Arc::new(KeysManager::new(&seed, now.as_secs(), now.subsec_nanos()));
let chain_monitor = Arc::new(chainmonitor::ChainMonitor::new(Some(chain_source.clone()), tx_broadcaster.clone(), logger.clone(), fee_estimator.clone(), kv_store.clone()));
let best_block = BestBlock::from_network(network);
let params = ChainParameters { network, best_block };
let manager = Arc::new(ChannelManager::new(fee_estimator.clone(), chain_monitor.clone(), tx_broadcaster.clone(), router.clone(), logger.clone(), keys_manager.clone(), keys_manager.clone(), keys_manager.clone(), UserConfig::default(), params, genesis_block.header.time));
+ let wallet = Arc::new(TestWallet {});
+ let sweeper = Arc::new(OutputSweeper::new(best_block, Arc::clone(&tx_broadcaster), Arc::clone(&fee_estimator),
+ None::<Arc<dyn Filter + Sync + Send>>, Arc::clone(&keys_manager), wallet, Arc::clone(&kv_store), Arc::clone(&logger)));
let p2p_gossip_sync = Arc::new(P2PGossipSync::new(network_graph.clone(), Some(chain_source.clone()), logger.clone()));
let rapid_gossip_sync = Arc::new(RapidGossipSync::new(network_graph.clone(), logger.clone()));
let msg_handler = MessageHandler {
onion_message_handler: IgnoringMessageHandler{}, custom_message_handler: IgnoringMessageHandler{}
};
let peer_manager = Arc::new(PeerManager::new(msg_handler, 0, &seed, logger.clone(), keys_manager.clone()));
- let node = Node { node: manager, p2p_gossip_sync, rapid_gossip_sync, peer_manager, chain_monitor, kv_store, tx_broadcaster, network_graph, logger, best_block, scorer };
+ let node = Node { node: manager, p2p_gossip_sync, rapid_gossip_sync, peer_manager, chain_monitor, kv_store, tx_broadcaster, network_graph, logger, best_block, scorer, sweeper };
nodes.push(node);
}
1 => {
node.node.transactions_confirmed(&header, &txdata, height);
node.chain_monitor.transactions_confirmed(&header, &txdata, height);
+ node.sweeper.transactions_confirmed(&header, &txdata, height);
},
x if x == depth => {
+ // We need the TestBroadcaster to know about the new height so that it doesn't think
+ // we're violating the time lock requirements of transactions broadcasted at that
+ // point.
+ node.tx_broadcaster.blocks.lock().unwrap().push((genesis_block(Network::Bitcoin), height));
node.node.best_block_updated(&header, height);
node.chain_monitor.best_block_updated(&header, height);
+ node.sweeper.best_block_updated(&header, height);
},
_ => {},
}
}
}
+
+ fn advance_chain(node: &mut Node, num_blocks: u32) {
+ for i in 1..=num_blocks {
+ let prev_blockhash = node.best_block.block_hash;
+ let height = node.best_block.height + 1;
+ let header = create_dummy_header(prev_blockhash, height);
+ node.best_block = BestBlock::new(header.block_hash(), height);
+ if i == num_blocks {
+ // We need the TestBroadcaster to know about the new height so that it doesn't think
+ // we're violating the time lock requirements of transactions broadcasted at that
+ // point.
+ node.tx_broadcaster.blocks.lock().unwrap().push((genesis_block(Network::Bitcoin), height));
+ node.node.best_block_updated(&header, height);
+ node.chain_monitor.best_block_updated(&header, height);
+ node.sweeper.best_block_updated(&header, height);
+ }
+ }
+ }
+
fn confirm_transaction(node: &mut Node, tx: &Transaction) {
confirm_transaction_depth(node, tx, ANTI_REORG_DELAY);
}
let _as_channel_update = get_event_msg!(nodes[0], MessageSendEvent::SendChannelUpdate, nodes[1].node.get_our_node_id());
nodes[1].node.handle_channel_ready(&nodes[0].node.get_our_node_id(), &as_funding);
let _bs_channel_update = get_event_msg!(nodes[1], MessageSendEvent::SendChannelUpdate, nodes[0].node.get_our_node_id());
+ let broadcast_funding = nodes[0].tx_broadcaster.txn_broadcasted.lock().unwrap().pop().unwrap();
+ assert_eq!(broadcast_funding.txid(), funding_tx.txid());
+ assert!(nodes[0].tx_broadcaster.txn_broadcasted.lock().unwrap().is_empty());
if !std::thread::panicking() {
bg_processor.stop().unwrap();
.recv_timeout(Duration::from_secs(EVENT_DEADLINE))
.expect("Events not handled within deadline");
match event {
- Event::SpendableOutputs { .. } => {},
+ Event::SpendableOutputs { outputs, channel_id } => {
+ nodes[0].sweeper.track_spendable_outputs(outputs, channel_id, false, Some(153));
+ },
_ => panic!("Unexpected event: {:?}", event),
}
+ // Check we don't generate an initial sweeping tx until we reach the required height.
+ assert_eq!(nodes[0].sweeper.tracked_spendable_outputs().len(), 1);
+ let tracked_output = nodes[0].sweeper.tracked_spendable_outputs().first().unwrap().clone();
+ if let Some(sweep_tx_0) = nodes[0].tx_broadcaster.txn_broadcasted.lock().unwrap().pop() {
+ assert!(!tracked_output.is_spent_in(&sweep_tx_0));
+ match tracked_output.status {
+ OutputSpendStatus::PendingInitialBroadcast { delayed_until_height } => {
+ assert_eq!(delayed_until_height, Some(153));
+ }
+ _ => panic!("Unexpected status"),
+ }
+ }
+
+ advance_chain(&mut nodes[0], 3);
+
+ // Check we generate an initial sweeping tx.
+ assert_eq!(nodes[0].sweeper.tracked_spendable_outputs().len(), 1);
+ let tracked_output = nodes[0].sweeper.tracked_spendable_outputs().first().unwrap().clone();
+ let sweep_tx_0 = nodes[0].tx_broadcaster.txn_broadcasted.lock().unwrap().pop().unwrap();
+ match tracked_output.status {
+ OutputSpendStatus::PendingFirstConfirmation { latest_spending_tx, .. } => {
+ assert_eq!(sweep_tx_0.txid(), latest_spending_tx.txid());
+ }
+ _ => panic!("Unexpected status"),
+ }
+
+ // Check we regenerate and rebroadcast the sweeping tx each block.
+ advance_chain(&mut nodes[0], 1);
+ assert_eq!(nodes[0].sweeper.tracked_spendable_outputs().len(), 1);
+ let tracked_output = nodes[0].sweeper.tracked_spendable_outputs().first().unwrap().clone();
+ let sweep_tx_1 = nodes[0].tx_broadcaster.txn_broadcasted.lock().unwrap().pop().unwrap();
+ match tracked_output.status {
+ OutputSpendStatus::PendingFirstConfirmation { latest_spending_tx, .. } => {
+ assert_eq!(sweep_tx_1.txid(), latest_spending_tx.txid());
+ }
+ _ => panic!("Unexpected status"),
+ }
+ assert_ne!(sweep_tx_0, sweep_tx_1);
+
+ advance_chain(&mut nodes[0], 1);
+ assert_eq!(nodes[0].sweeper.tracked_spendable_outputs().len(), 1);
+ let tracked_output = nodes[0].sweeper.tracked_spendable_outputs().first().unwrap().clone();
+ let sweep_tx_2 = nodes[0].tx_broadcaster.txn_broadcasted.lock().unwrap().pop().unwrap();
+ match tracked_output.status {
+ OutputSpendStatus::PendingFirstConfirmation { latest_spending_tx, .. } => {
+ assert_eq!(sweep_tx_2.txid(), latest_spending_tx.txid());
+ }
+ _ => panic!("Unexpected status"),
+ }
+ assert_ne!(sweep_tx_0, sweep_tx_2);
+ assert_ne!(sweep_tx_1, sweep_tx_2);
+
+ // Check we still track the spendable outputs up to ANTI_REORG_DELAY confirmations.
+ confirm_transaction_depth(&mut nodes[0], &sweep_tx_2, 5);
+ assert_eq!(nodes[0].sweeper.tracked_spendable_outputs().len(), 1);
+ let tracked_output = nodes[0].sweeper.tracked_spendable_outputs().first().unwrap().clone();
+ match tracked_output.status {
+ OutputSpendStatus::PendingThresholdConfirmations { latest_spending_tx, .. } => {
+ assert_eq!(sweep_tx_2.txid(), latest_spending_tx.txid());
+ }
+ _ => panic!("Unexpected status"),
+ }
+
+ // Check we still see the transaction as confirmed if we unconfirm any untracked
+ // transaction. (We previously had a bug that would mark tracked transactions as
+ // unconfirmed if any transaction at an unknown block height would be unconfirmed.)
+ let unconf_txid = Txid::from_slice(&[0; 32]).unwrap();
+ nodes[0].sweeper.transaction_unconfirmed(&unconf_txid);
+
+ assert_eq!(nodes[0].sweeper.tracked_spendable_outputs().len(), 1);
+ let tracked_output = nodes[0].sweeper.tracked_spendable_outputs().first().unwrap().clone();
+ match tracked_output.status {
+ OutputSpendStatus::PendingThresholdConfirmations { latest_spending_tx, .. } => {
+ assert_eq!(sweep_tx_2.txid(), latest_spending_tx.txid());
+ }
+ _ => panic!("Unexpected status"),
+ }
+
+ // Check we stop tracking the spendable outputs when one of the txs reaches
+ // ANTI_REORG_DELAY confirmations.
+ confirm_transaction_depth(&mut nodes[0], &sweep_tx_0, ANTI_REORG_DELAY);
+ assert_eq!(nodes[0].sweeper.tracked_spendable_outputs().len(), 0);
+
if !std::thread::panicking() {
bg_processor.stop().unwrap();
}
[package]
name = "lightning-block-sync"
-version = "0.0.121"
+version = "0.0.123-beta"
authors = ["Jeffrey Czyz", "Matt Corallo"]
license = "MIT OR Apache-2.0"
repository = "https://github.com/lightningdevkit/rust-lightning"
[dependencies]
bitcoin = "0.30.2"
hex = { package = "hex-conservative", version = "0.1.1", default-features = false }
-lightning = { version = "0.0.121", path = "../lightning" }
+lightning = { version = "0.0.123-beta", path = "../lightning" }
tokio = { version = "1.35", features = [ "io-util", "net", "time", "rt" ], optional = true }
serde_json = { version = "1.0", optional = true }
chunked_transfer = { version = "1.4", optional = true }
[dev-dependencies]
-lightning = { version = "0.0.121", path = "../lightning", features = ["_test_utils"] }
+lightning = { version = "0.0.123-beta", path = "../lightning", features = ["_test_utils"] }
tokio = { version = "1.35", features = [ "macros", "rt" ] }
[package]
name = "lightning-custom-message"
-version = "0.0.121"
+version = "0.0.123-beta"
authors = ["Jeffrey Czyz"]
license = "MIT OR Apache-2.0"
repository = "https://github.com/lightningdevkit/rust-lightning"
[dependencies]
bitcoin = "0.30.2"
-lightning = { version = "0.0.121", path = "../lightning" }
+lightning = { version = "0.0.123-beta", path = "../lightning" }
[package]
name = "lightning-invoice"
description = "Data structures to parse and serialize BOLT11 lightning invoices"
-version = "0.29.0"
+version = "0.31.0-beta"
authors = ["Sebastian Geisler <sgeisler@wh2.tu-dresden.de>"]
documentation = "https://docs.rs/lightning-invoice/"
license = "MIT OR Apache-2.0"
[dependencies]
bech32 = { version = "0.9.0", default-features = false }
-lightning = { version = "0.0.121", path = "../lightning", default-features = false }
+lightning = { version = "0.0.123-beta", path = "../lightning", default-features = false }
secp256k1 = { version = "0.27.0", default-features = false, features = ["recovery", "alloc"] }
serde = { version = "1.0.118", optional = true }
bitcoin = { version = "0.30.2", default-features = false }
[dev-dependencies]
-lightning = { version = "0.0.121", path = "../lightning", default-features = false, features = ["_test_utils"] }
+lightning = { version = "0.0.123-beta", path = "../lightning", default-features = false, features = ["_test_utils"] }
hex = { package = "hex-conservative", version = "0.1.1", default-features = false }
serde_json = { version = "1"}
hashbrown = { version = "0.13", default-features = false }
[package]
name = "lightning-net-tokio"
-version = "0.0.121"
+version = "0.0.123-beta"
authors = ["Matt Corallo"]
license = "MIT OR Apache-2.0"
repository = "https://github.com/lightningdevkit/rust-lightning/"
[dependencies]
bitcoin = "0.30.2"
-lightning = { version = "0.0.121", path = "../lightning" }
+lightning = { version = "0.0.123-beta", path = "../lightning" }
tokio = { version = "1.35", features = [ "rt", "sync", "net", "time" ] }
[dev-dependencies]
tokio = { version = "1.35", features = [ "macros", "rt", "rt-multi-thread", "sync", "net", "time" ] }
-lightning = { version = "0.0.121", path = "../lightning", features = ["_test_utils"] }
+lightning = { version = "0.0.123-beta", path = "../lightning", features = ["_test_utils"] }
fn handle_open_channel_v2(&self, _their_node_id: &PublicKey, _msg: &OpenChannelV2) {}
fn handle_accept_channel_v2(&self, _their_node_id: &PublicKey, _msg: &AcceptChannelV2) {}
fn handle_stfu(&self, _their_node_id: &PublicKey, _msg: &Stfu) {}
- #[cfg(dual_funding)]
+ #[cfg(splicing)]
fn handle_splice(&self, _their_node_id: &PublicKey, _msg: &Splice) {}
- #[cfg(dual_funding)]
+ #[cfg(splicing)]
fn handle_splice_ack(&self, _their_node_id: &PublicKey, _msg: &SpliceAck) {}
- #[cfg(dual_funding)]
+ #[cfg(splicing)]
fn handle_splice_locked(&self, _their_node_id: &PublicKey, _msg: &SpliceLocked) {}
fn handle_tx_add_input(&self, _their_node_id: &PublicKey, _msg: &TxAddInput) {}
fn handle_tx_add_output(&self, _their_node_id: &PublicKey, _msg: &TxAddOutput) {}
[package]
name = "lightning-persister"
-version = "0.0.121"
+version = "0.0.123-beta"
authors = ["Valentine Wallace", "Matt Corallo"]
license = "MIT OR Apache-2.0"
repository = "https://github.com/lightningdevkit/rust-lightning"
[dependencies]
bitcoin = "0.30.2"
-lightning = { version = "0.0.121", path = "../lightning" }
+lightning = { version = "0.0.123-beta", path = "../lightning" }
[target.'cfg(windows)'.dependencies]
windows-sys = { version = "0.48.0", default-features = false, features = ["Win32_Storage_FileSystem", "Win32_Foundation"] }
criterion = { version = "0.4", optional = true, default-features = false }
[dev-dependencies]
-lightning = { version = "0.0.121", path = "../lightning", features = ["_test_utils"] }
+lightning = { version = "0.0.123-beta", path = "../lightning", features = ["_test_utils"] }
bitcoin = { version = "0.30.2", default-features = false }
[package]
name = "lightning-rapid-gossip-sync"
-version = "0.0.121"
+version = "0.0.123-beta"
authors = ["Arik Sosman <git@arik.io>"]
license = "MIT OR Apache-2.0"
repository = "https://github.com/lightningdevkit/rust-lightning"
std = ["lightning/std"]
[dependencies]
-lightning = { version = "0.0.121", path = "../lightning", default-features = false }
+lightning = { version = "0.0.123-beta", path = "../lightning", default-features = false }
bitcoin = { version = "0.30.2", default-features = false }
[target.'cfg(ldk_bench)'.dependencies]
criterion = { version = "0.4", optional = true, default-features = false }
[dev-dependencies]
-lightning = { version = "0.0.121", path = "../lightning", features = ["_test_utils"] }
+lightning = { version = "0.0.123-beta", path = "../lightning", features = ["_test_utils"] }
[package]
name = "lightning-transaction-sync"
-version = "0.0.121"
+version = "0.0.123-beta"
authors = ["Elias Rohrer"]
license = "MIT OR Apache-2.0"
repository = "https://github.com/lightningdevkit/rust-lightning"
async-interface = []
[dependencies]
-lightning = { version = "0.0.121", path = "../lightning", default-features = false, features = ["std"] }
+lightning = { version = "0.0.123-beta", path = "../lightning", default-features = false, features = ["std"] }
bitcoin = { version = "0.30.2", default-features = false }
bdk-macros = "0.6"
futures = { version = "0.3", optional = true }
electrum-client = { version = "0.18.0", optional = true }
[dev-dependencies]
-lightning = { version = "0.0.121", path = "../lightning", default-features = false, features = ["std", "_test_utils"] }
+lightning = { version = "0.0.123-beta", path = "../lightning", default-features = false, features = ["std", "_test_utils"] }
tokio = { version = "1.35.0", features = ["full"] }
[target.'cfg(all(not(target_os = "windows"), not(no_download)))'.dev-dependencies]
[package]
name = "lightning"
-version = "0.0.121"
+version = "0.0.123-beta"
authors = ["Matt Corallo"]
license = "MIT OR Apache-2.0"
repository = "https://github.com/lightningdevkit/rust-lightning/"
use crate::ln::features::BlindedHopFeatures;
use crate::ln::msgs::DecodeError;
use crate::offers::invoice::BlindedPayInfo;
+use crate::offers::invoice_request::InvoiceRequestFields;
+use crate::offers::offer::OfferId;
use crate::util::ser::{HighZeroBytesDroppedBigSize, Readable, Writeable, Writer};
#[allow(unused_imports)]
pub payment_secret: PaymentSecret,
/// Constraints for the receiver of this payment.
pub payment_constraints: PaymentConstraints,
+ /// Context for the receiver of this payment.
+ pub payment_context: PaymentContext,
}
/// Data to construct a [`BlindedHop`] for sending a payment over.
pub htlc_minimum_msat: u64,
}
+/// The context of an inbound payment, which is included in a [`BlindedPath`] via [`ReceiveTlvs`]
+/// and surfaced in [`PaymentPurpose`].
+///
+/// [`BlindedPath`]: crate::blinded_path::BlindedPath
+/// [`PaymentPurpose`]: crate::events::PaymentPurpose
+#[derive(Clone, Debug, Eq, PartialEq)]
+pub enum PaymentContext {
+ /// The payment context was unknown.
+ Unknown(UnknownPaymentContext),
+
+ /// The payment was made for an invoice requested from a BOLT 12 [`Offer`].
+ ///
+ /// [`Offer`]: crate::offers::offer::Offer
+ Bolt12Offer(Bolt12OfferContext),
+
+ /// The payment was made for an invoice sent for a BOLT 12 [`Refund`].
+ ///
+ /// [`Refund`]: crate::offers::refund::Refund
+ Bolt12Refund(Bolt12RefundContext),
+}
+
+// Used when writing PaymentContext in Event::PaymentClaimable to avoid cloning.
+pub(crate) enum PaymentContextRef<'a> {
+ Bolt12Offer(&'a Bolt12OfferContext),
+ Bolt12Refund(&'a Bolt12RefundContext),
+}
+
+/// An unknown payment context.
+#[derive(Clone, Debug, Eq, PartialEq)]
+pub struct UnknownPaymentContext(());
+
+/// The context of a payment made for an invoice requested from a BOLT 12 [`Offer`].
+///
+/// [`Offer`]: crate::offers::offer::Offer
+#[derive(Clone, Debug, Eq, PartialEq)]
+pub struct Bolt12OfferContext {
+ /// The identifier of the [`Offer`].
+ ///
+ /// [`Offer`]: crate::offers::offer::Offer
+ pub offer_id: OfferId,
+
+ /// Fields from an [`InvoiceRequest`] sent for a [`Bolt12Invoice`].
+ ///
+ /// [`InvoiceRequest`]: crate::offers::invoice_request::InvoiceRequest
+ /// [`Bolt12Invoice`]: crate::offers::invoice::Bolt12Invoice
+ pub invoice_request: InvoiceRequestFields,
+}
+
+/// The context of a payment made for an invoice sent for a BOLT 12 [`Refund`].
+///
+/// [`Refund`]: crate::offers::refund::Refund
+#[derive(Clone, Debug, Eq, PartialEq)]
+pub struct Bolt12RefundContext {}
+
+impl PaymentContext {
+ pub(crate) fn unknown() -> Self {
+ PaymentContext::Unknown(UnknownPaymentContext(()))
+ }
+}
+
impl TryFrom<CounterpartyForwardingInfo> for PaymentRelay {
type Error = ();
fn write<W: Writer>(&self, w: &mut W) -> Result<(), io::Error> {
encode_tlv_stream!(w, {
(12, self.payment_constraints, required),
- (65536, self.payment_secret, required)
+ (65536, self.payment_secret, required),
+ (65537, self.payment_context, required)
});
Ok(())
}
(12, payment_constraints, required),
(14, features, option),
(65536, payment_secret, option),
+ (65537, payment_context, (default_value, PaymentContext::unknown())),
});
let _padding: Option<utils::Padding> = _padding;
if let Some(short_channel_id) = scid {
- if payment_secret.is_some() { return Err(DecodeError::InvalidValue) }
+ if payment_secret.is_some() {
+ return Err(DecodeError::InvalidValue)
+ }
Ok(BlindedPaymentTlvs::Forward(ForwardTlvs {
short_channel_id,
payment_relay: payment_relay.ok_or(DecodeError::InvalidValue)?,
Ok(BlindedPaymentTlvs::Receive(ReceiveTlvs {
payment_secret: payment_secret.ok_or(DecodeError::InvalidValue)?,
payment_constraints: payment_constraints.0.unwrap(),
+ payment_context: payment_context.0.unwrap(),
}))
}
}
}
}
+impl_writeable_tlv_based_enum!(PaymentContext,
+ ;
+ (0, Unknown),
+ (1, Bolt12Offer),
+ (2, Bolt12Refund),
+);
+
+impl<'a> Writeable for PaymentContextRef<'a> {
+ fn write<W: Writer>(&self, w: &mut W) -> Result<(), io::Error> {
+ match self {
+ PaymentContextRef::Bolt12Offer(context) => {
+ 1u8.write(w)?;
+ context.write(w)?;
+ },
+ PaymentContextRef::Bolt12Refund(context) => {
+ 2u8.write(w)?;
+ context.write(w)?;
+ },
+ }
+
+ Ok(())
+ }
+}
+
+impl Writeable for UnknownPaymentContext {
+ fn write<W: Writer>(&self, _w: &mut W) -> Result<(), io::Error> {
+ Ok(())
+ }
+}
+
+impl Readable for UnknownPaymentContext {
+ fn read<R: io::Read>(_r: &mut R) -> Result<Self, DecodeError> {
+ Ok(UnknownPaymentContext(()))
+ }
+}
+
+impl_writeable_tlv_based!(Bolt12OfferContext, {
+ (0, offer_id, required),
+ (2, invoice_request, required),
+});
+
+impl_writeable_tlv_based!(Bolt12RefundContext, {});
+
#[cfg(test)]
mod tests {
use bitcoin::secp256k1::PublicKey;
- use crate::blinded_path::payment::{ForwardNode, ForwardTlvs, ReceiveTlvs, PaymentConstraints, PaymentRelay};
+ use crate::blinded_path::payment::{ForwardNode, ForwardTlvs, ReceiveTlvs, PaymentConstraints, PaymentContext, PaymentRelay};
use crate::ln::PaymentSecret;
use crate::ln::features::BlindedHopFeatures;
use crate::ln::functional_test_utils::TEST_FINAL_CLTV;
max_cltv_expiry: 0,
htlc_minimum_msat: 1,
},
+ payment_context: PaymentContext::unknown(),
};
let htlc_maximum_msat = 100_000;
let blinded_payinfo = super::compute_payinfo(&intermediate_nodes[..], &recv_tlvs, htlc_maximum_msat, 12).unwrap();
max_cltv_expiry: 0,
htlc_minimum_msat: 1,
},
+ payment_context: PaymentContext::unknown(),
};
let blinded_payinfo = super::compute_payinfo(&[], &recv_tlvs, 4242, TEST_FINAL_CLTV as u16).unwrap();
assert_eq!(blinded_payinfo.fee_base_msat, 0);
max_cltv_expiry: 0,
htlc_minimum_msat: 3,
},
+ payment_context: PaymentContext::unknown(),
};
let htlc_maximum_msat = 100_000;
let blinded_payinfo = super::compute_payinfo(&intermediate_nodes[..], &recv_tlvs, htlc_maximum_msat, TEST_FINAL_CLTV as u16).unwrap();
max_cltv_expiry: 0,
htlc_minimum_msat: 1,
},
+ payment_context: PaymentContext::unknown(),
};
let htlc_minimum_msat = 3798;
assert!(super::compute_payinfo(&intermediate_nodes[..], &recv_tlvs, htlc_minimum_msat - 1, TEST_FINAL_CLTV as u16).is_err());
max_cltv_expiry: 0,
htlc_minimum_msat: 1,
},
+ payment_context: PaymentContext::unknown(),
};
let blinded_payinfo = super::compute_payinfo(&intermediate_nodes[..], &recv_tlvs, 10_000, TEST_FINAL_CLTV as u16).unwrap();
///
/// [`ChannelManager::close_channel_with_feerate_and_script`]: crate::ln::channelmanager::ChannelManager::close_channel_with_feerate_and_script
ChannelCloseMinimum,
+ /// The feerate [`OutputSweeper`] will use on transactions spending
+ /// [`SpendableOutputDescriptor`]s after a channel closure.
+ ///
+ /// Generally spending these outputs is safe as long as they eventually confirm, so a value
+ /// (slightly above) the mempool minimum should suffice. However, as this value will influence
+ /// how long funds will be unavailable after channel closure, [`FeeEstimator`] implementors
+ /// might want to choose a higher feerate to regain control over funds faster.
+ ///
+ /// [`OutputSweeper`]: crate::util::sweep::OutputSweeper
+ /// [`SpendableOutputDescriptor`]: crate::sign::SpendableOutputDescriptor
+ OutputSpendingFee,
}
/// A trait which should be implemented to provide feerate information on a number of time
///
/// [`Writeable::write`]: crate::util::ser::Writeable::write
fn update_persisted_channel(&self, channel_funding_outpoint: OutPoint, update: Option<&ChannelMonitorUpdate>, data: &ChannelMonitor<ChannelSigner>, update_id: MonitorUpdateId) -> ChannelMonitorUpdateStatus;
+ /// Prevents the channel monitor from being loaded on startup.
+ ///
+ /// Archiving the data in a backup location (rather than deleting it fully) is useful for
+ /// hedging against data loss in case of unexpected failure.
+ fn archive_persisted_channel(&self, channel_funding_outpoint: OutPoint);
}
struct MonitorHolder<ChannelSigner: WriteableEcdsaChannelSigner> {
let mut txn_outputs;
{
txn_outputs = process(monitor, txdata);
+ let chain_sync_update_id = self.sync_persistence_id.get_increment();
let update_id = MonitorUpdateId {
- contents: UpdateOrigin::ChainSync(self.sync_persistence_id.get_increment()),
+ contents: UpdateOrigin::ChainSync(chain_sync_update_id),
};
let mut pending_monitor_updates = monitor_state.pending_monitor_updates.lock().unwrap();
if let Some(height) = best_height {
}
}
- log_trace!(logger, "Syncing Channel Monitor for channel {}", log_funding_info!(monitor));
+ log_trace!(logger, "Syncing Channel Monitor for channel {} for block-data update_id {}",
+ log_funding_info!(monitor),
+ chain_sync_update_id
+ );
match self.persister.update_persisted_channel(*funding_outpoint, None, monitor, update_id) {
ChannelMonitorUpdateStatus::Completed =>
- log_trace!(logger, "Finished syncing Channel Monitor for channel {}", log_funding_info!(monitor)),
+ log_trace!(logger, "Finished syncing Channel Monitor for channel {} for block-data update_id {}",
+ log_funding_info!(monitor),
+ chain_sync_update_id
+ ),
ChannelMonitorUpdateStatus::InProgress => {
log_debug!(logger, "Channel Monitor sync for channel {} in progress, holding events until completion!", log_funding_info!(monitor));
pending_monitor_updates.push(update_id);
pending_monitor_updates.retain(|update_id| *update_id != completed_update_id);
match completed_update_id {
- MonitorUpdateId { contents: UpdateOrigin::OffChain(_) } => {
+ MonitorUpdateId { contents: UpdateOrigin::OffChain(completed_update_id) } => {
// Note that we only check for `UpdateOrigin::OffChain` failures here - if
// we're being told that a `UpdateOrigin::OffChain` monitor update completed,
// we only care about ensuring we don't tell the `ChannelManager` to restore
// `MonitorEvent`s from the monitor back to the `ChannelManager` until they
// complete.
let monitor_is_pending_updates = monitor_data.has_pending_offchain_updates(&pending_monitor_updates);
+ log_debug!(self.logger, "Completed off-chain monitor update {} for channel with funding outpoint {:?}, {}",
+ completed_update_id,
+ funding_txo,
+ if monitor_is_pending_updates {
+ "still have pending off-chain updates"
+ } else {
+ "all off-chain updates complete, returning a MonitorEvent"
+ });
if monitor_is_pending_updates {
// If there are still monitor updates pending, we cannot yet construct a
// Completed event.
monitor_update_id: monitor_data.monitor.get_latest_update_id(),
}], monitor_data.monitor.get_counterparty_node_id()));
},
- MonitorUpdateId { contents: UpdateOrigin::ChainSync(_) } => {
- if !monitor_data.has_pending_chainsync_updates(&pending_monitor_updates) {
+ MonitorUpdateId { contents: UpdateOrigin::ChainSync(completed_update_id) } => {
+ let monitor_has_pending_updates =
+ monitor_data.has_pending_chainsync_updates(&pending_monitor_updates);
+ log_debug!(self.logger, "Completed chain sync monitor update {} for channel with funding outpoint {:?}, {}",
+ completed_update_id,
+ funding_txo,
+ if monitor_has_pending_updates {
+ "still have pending chain sync updates"
+ } else {
+ "all chain sync updates complete, releasing pending MonitorEvents"
+ });
+ if !monitor_has_pending_updates {
monitor_data.last_chain_persist_height.store(self.highest_chain_height.load(Ordering::Acquire), Ordering::Release);
// The next time release_pending_monitor_events is called, any events for this
// ChannelMonitor will be returned.
}
}
}
+
+ /// Archives fully resolved channel monitors by calling [`Persist::archive_persisted_channel`].
+ ///
+ /// This is useful for pruning fully resolved monitors from the monitor set and primary
+ /// storage so they are not kept in memory and reloaded on restart.
+ ///
+ /// Should be called occasionally (once every handful of blocks or on startup).
+ ///
+ /// Depending on the implementation of [`Persist::archive_persisted_channel`] the monitor
+ /// data could be moved to an archive location or removed entirely.
+ pub fn archive_fully_resolved_channel_monitors(&self) {
+ let mut have_monitors_to_prune = false;
+ for (_, monitor_holder) in self.monitors.read().unwrap().iter() {
+ let logger = WithChannelMonitor::from(&self.logger, &monitor_holder.monitor);
+ if monitor_holder.monitor.is_fully_resolved(&logger) {
+ have_monitors_to_prune = true;
+ }
+ }
+ if have_monitors_to_prune {
+ let mut monitors = self.monitors.write().unwrap();
+ monitors.retain(|funding_txo, monitor_holder| {
+ let logger = WithChannelMonitor::from(&self.logger, &monitor_holder.monitor);
+ if monitor_holder.monitor.is_fully_resolved(&logger) {
+ log_info!(logger,
+ "Archiving fully resolved ChannelMonitor for funding txo {}",
+ funding_txo
+ );
+ self.persister.archive_persisted_channel(*funding_txo);
+ false
+ } else {
+ true
+ }
+ });
+ }
+ }
}
impl<ChannelSigner: WriteableEcdsaChannelSigner, C: Deref, T: Deref, F: Deref, L: Deref, P: Deref>
Some(monitor_state) => {
let monitor = &monitor_state.monitor;
let logger = WithChannelMonitor::from(&self.logger, &monitor);
- log_trace!(logger, "Updating ChannelMonitor for channel {}", log_funding_info!(monitor));
+ log_trace!(logger, "Updating ChannelMonitor to id {} for channel {}", update.update_id, log_funding_info!(monitor));
let update_res = monitor.update_monitor(update, &self.broadcaster, &self.fee_estimator, &self.logger);
let update_id = MonitorUpdateId::from_monitor_update(update);
match persist_res {
ChannelMonitorUpdateStatus::InProgress => {
pending_monitor_updates.push(update_id);
- log_debug!(logger, "Persistence of ChannelMonitorUpdate for channel {} in progress", log_funding_info!(monitor));
+ log_debug!(logger,
+ "Persistence of ChannelMonitorUpdate id {:?} for channel {} in progress",
+ update_id,
+ log_funding_info!(monitor)
+ );
},
ChannelMonitorUpdateStatus::Completed => {
- log_debug!(logger, "Persistence of ChannelMonitorUpdate for channel {} completed", log_funding_info!(monitor));
+ log_debug!(logger,
+ "Persistence of ChannelMonitorUpdate id {:?} for channel {} completed",
+ update_id,
+ log_funding_info!(monitor)
+ );
},
ChannelMonitorUpdateStatus::UnrecoverableError => {
// Take the monitors lock for writing so that we poison it and any future
/// Ordering of tuple data: (their_per_commitment_point, feerate_per_kw, to_broadcaster_sats,
/// to_countersignatory_sats)
initial_counterparty_commitment_info: Option<(PublicKey, u32, u64, u64)>,
+
+ /// The first block height at which we had no remaining claimable balances.
+ balances_empty_height: Option<u32>,
}
/// Transaction outputs to watch for on-chain spends.
(15, self.counterparty_fulfilled_htlcs, required),
(17, self.initial_counterparty_commitment_info, option),
(19, self.channel_id, required),
+ (21, self.balances_empty_height, option),
});
Ok(())
best_block,
counterparty_node_id: Some(counterparty_node_id),
initial_counterparty_commitment_info: None,
+ balances_empty_height: None,
})
}
spendable_outputs
}
+ /// Checks if the monitor is fully resolved. Resolved monitor is one that has claimed all of
+ /// its outputs and balances (i.e. [`Self::get_claimable_balances`] returns an empty set).
+ ///
+ /// This function returns true only if [`Self::get_claimable_balances`] has been empty for at least
+ /// 2016 blocks as an additional protection against any bugs resulting in spuriously empty balance sets.
+ pub fn is_fully_resolved<L: Logger>(&self, logger: &L) -> bool {
+ let mut is_all_funds_claimed = self.get_claimable_balances().is_empty();
+ let current_height = self.current_best_block().height;
+ let mut inner = self.inner.lock().unwrap();
+
+ if is_all_funds_claimed {
+ if !inner.funding_spend_seen {
+ debug_assert!(false, "We should see funding spend by the time a monitor clears out");
+ is_all_funds_claimed = false;
+ }
+ }
+
+ match (inner.balances_empty_height, is_all_funds_claimed) {
+ (Some(balances_empty_height), true) => {
+ // Claimed all funds, check if reached the blocks threshold.
+ const BLOCKS_THRESHOLD: u32 = 4032; // ~four weeks
+ return current_height >= balances_empty_height + BLOCKS_THRESHOLD;
+ },
+ (Some(_), false) => {
+ // previously assumed we claimed all funds, but we have new funds to claim.
+ // Should not happen in practice.
+ debug_assert!(false, "Thought we were done claiming funds, but claimable_balances now has entries");
+ log_error!(logger,
+ "WARNING: LDK thought it was done claiming all the available funds in the ChannelMonitor for channel {}, but later decided it had more to claim. This is potentially an important bug in LDK, please report it at https://github.com/lightningdevkit/rust-lightning/issues/new",
+ inner.get_funding_txo().0);
+ inner.balances_empty_height = None;
+ false
+ },
+ (None, true) => {
+ // Claimed all funds but `balances_empty_height` is None. It is set to the
+ // current block height.
+ inner.balances_empty_height = Some(current_height);
+ false
+ },
+ (None, false) => {
+ // Have funds to claim.
+ false
+ },
+ }
+ }
+
#[cfg(test)]
pub fn get_counterparty_payment_script(&self) -> ScriptBuf {
self.inner.lock().unwrap().counterparty_payment_script.clone()
revocation_pubkey: broadcasted_holder_revokable_script.2,
channel_keys_id: self.channel_keys_id,
channel_value_satoshis: self.channel_value_satoshis,
+ channel_transaction_parameters: Some(self.onchain_tx_handler.channel_transaction_parameters.clone()),
}));
}
}
let mut spendable_txids_confirmed = Some(Vec::new());
let mut counterparty_fulfilled_htlcs = Some(new_hash_map());
let mut initial_counterparty_commitment_info = None;
+ let mut balances_empty_height = None;
let mut channel_id = None;
read_tlv_fields!(reader, {
(1, funding_spend_confirmed, option),
(15, counterparty_fulfilled_htlcs, option),
(17, initial_counterparty_commitment_info, option),
(19, channel_id, option),
+ (21, balances_empty_height, option),
});
// `HolderForceClosedWithInfo` replaced `HolderForceClosed` in v0.0.122. If we have both
best_block,
counterparty_node_id,
initial_counterparty_commitment_info,
+ balances_empty_height,
})))
}
}
use crate::ln::ChannelId;
use crate::sign::ecdsa::WriteableEcdsaChannelSigner;
use crate::chain::transaction::{OutPoint, TransactionData};
+use crate::impl_writeable_tlv_based;
#[allow(unused_imports)]
use crate::prelude::*;
}
}
+impl_writeable_tlv_based!(BestBlock, {
+ (0, block_hash, required),
+ (2, height, required),
+});
+
/// The `Listen` trait is used to notify when blocks have been connected or disconnected from the
/// chain.
pub use bump_transaction::BumpTransactionEvent;
+use crate::blinded_path::payment::{Bolt12OfferContext, Bolt12RefundContext, PaymentContext, PaymentContextRef};
use crate::sign::SpendableOutputDescriptor;
use crate::ln::channelmanager::{InterceptId, PaymentId, RecipientOnionFields};
use crate::ln::channel::FUNDING_CONF_DEADLINE_BLOCKS;
/// spontaneous payment or a "conventional" lightning payment that's paying an invoice.
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum PaymentPurpose {
- /// Information for receiving a payment that we generated an invoice for.
- InvoicePayment {
+ /// A payment for a BOLT 11 invoice.
+ Bolt11InvoicePayment {
/// The preimage to the payment_hash, if the payment hash (and secret) were fetched via
- /// [`ChannelManager::create_inbound_payment`]. If provided, this can be handed directly to
- /// [`ChannelManager::claim_funds`].
+ /// [`ChannelManager::create_inbound_payment`]. When handling [`Event::PaymentClaimable`],
+ /// this can be passed directly to [`ChannelManager::claim_funds`] to claim the payment. No
+ /// action is needed when seen in [`Event::PaymentClaimed`].
///
/// [`ChannelManager::create_inbound_payment`]: crate::ln::channelmanager::ChannelManager::create_inbound_payment
/// [`ChannelManager::claim_funds`]: crate::ln::channelmanager::ChannelManager::claim_funds
/// [`ChannelManager::create_inbound_payment_for_hash`]: crate::ln::channelmanager::ChannelManager::create_inbound_payment_for_hash
payment_secret: PaymentSecret,
},
+ /// A payment for a BOLT 12 [`Offer`].
+ ///
+ /// [`Offer`]: crate::offers::offer::Offer
+ Bolt12OfferPayment {
+ /// The preimage to the payment hash. When handling [`Event::PaymentClaimable`], this can be
+ /// passed directly to [`ChannelManager::claim_funds`], if provided. No action is needed
+ /// when seen in [`Event::PaymentClaimed`].
+ ///
+ /// [`ChannelManager::claim_funds`]: crate::ln::channelmanager::ChannelManager::claim_funds
+ payment_preimage: Option<PaymentPreimage>,
+ /// The secret used to authenticate the sender to the recipient, preventing a number of
+ /// de-anonymization attacks while routing a payment.
+ ///
+ /// See [`PaymentPurpose::Bolt11InvoicePayment::payment_secret`] for further details.
+ payment_secret: PaymentSecret,
+ /// The context of the payment such as information about the corresponding [`Offer`] and
+ /// [`InvoiceRequest`].
+ ///
+ /// [`Offer`]: crate::offers::offer::Offer
+ /// [`InvoiceRequest`]: crate::offers::invoice_request::InvoiceRequest
+ payment_context: Bolt12OfferContext,
+ },
+ /// A payment for a BOLT 12 [`Refund`].
+ ///
+ /// [`Refund`]: crate::offers::refund::Refund
+ Bolt12RefundPayment {
+ /// The preimage to the payment hash. When handling [`Event::PaymentClaimable`], this can be
+ /// passed directly to [`ChannelManager::claim_funds`], if provided. No action is needed
+ /// when seen in [`Event::PaymentClaimed`].
+ ///
+ /// [`ChannelManager::claim_funds`]: crate::ln::channelmanager::ChannelManager::claim_funds
+ payment_preimage: Option<PaymentPreimage>,
+ /// The secret used to authenticate the sender to the recipient, preventing a number of
+ /// de-anonymization attacks while routing a payment.
+ ///
+ /// See [`PaymentPurpose::Bolt11InvoicePayment::payment_secret`] for further details.
+ payment_secret: PaymentSecret,
+ /// The context of the payment such as information about the corresponding [`Refund`].
+ ///
+ /// [`Refund`]: crate::offers::refund::Refund
+ payment_context: Bolt12RefundContext,
+ },
/// Because this is a spontaneous payment, the payer generated their own preimage rather than us
/// (the payee) providing a preimage.
SpontaneousPayment(PaymentPreimage),
/// Returns the preimage for this payment, if it is known.
pub fn preimage(&self) -> Option<PaymentPreimage> {
match self {
- PaymentPurpose::InvoicePayment { payment_preimage, .. } => *payment_preimage,
+ PaymentPurpose::Bolt11InvoicePayment { payment_preimage, .. } => *payment_preimage,
+ PaymentPurpose::Bolt12OfferPayment { payment_preimage, .. } => *payment_preimage,
+ PaymentPurpose::Bolt12RefundPayment { payment_preimage, .. } => *payment_preimage,
PaymentPurpose::SpontaneousPayment(preimage) => Some(*preimage),
}
}
+
+ pub(crate) fn is_keysend(&self) -> bool {
+ match self {
+ PaymentPurpose::Bolt11InvoicePayment { .. } => false,
+ PaymentPurpose::Bolt12OfferPayment { .. } => false,
+ PaymentPurpose::Bolt12RefundPayment { .. } => false,
+ PaymentPurpose::SpontaneousPayment(..) => true,
+ }
+ }
+
+ pub(crate) fn from_parts(
+ payment_preimage: Option<PaymentPreimage>, payment_secret: PaymentSecret,
+ payment_context: Option<PaymentContext>,
+ ) -> Self {
+ match payment_context {
+ Some(PaymentContext::Unknown(_)) | None => {
+ PaymentPurpose::Bolt11InvoicePayment {
+ payment_preimage,
+ payment_secret,
+ }
+ },
+ Some(PaymentContext::Bolt12Offer(context)) => {
+ PaymentPurpose::Bolt12OfferPayment {
+ payment_preimage,
+ payment_secret,
+ payment_context: context,
+ }
+ },
+ Some(PaymentContext::Bolt12Refund(context)) => {
+ PaymentPurpose::Bolt12RefundPayment {
+ payment_preimage,
+ payment_secret,
+ payment_context: context,
+ }
+ },
+ }
+ }
}
impl_writeable_tlv_based_enum!(PaymentPurpose,
- (0, InvoicePayment) => {
+ (0, Bolt11InvoicePayment) => {
(0, payment_preimage, option),
(2, payment_secret, required),
- };
+ },
+ (4, Bolt12OfferPayment) => {
+ (0, payment_preimage, option),
+ (2, payment_secret, required),
+ (4, payment_context, required),
+ },
+ (6, Bolt12RefundPayment) => {
+ (0, payment_preimage, option),
+ (2, payment_secret, required),
+ (4, payment_context, required),
+ },
+ ;
(2, SpontaneousPayment)
);
},
/// Used to indicate that an output which you should know how to spend was confirmed on chain
/// and is now spendable.
- /// Such an output will *not* ever be spent by rust-lightning, and are not at risk of your
+ ///
+ /// Such an output will *never* be spent directly by LDK, and are not at risk of your
/// counterparty spending them due to some kind of timeout. Thus, you need to store them
/// somewhere and spend them when you create on-chain transactions.
+ ///
+ /// You may hand them to the [`OutputSweeper`] utility which will store and (re-)generate spending
+ /// transactions for you.
+ ///
+ /// [`OutputSweeper`]: crate::util::sweep::OutputSweeper
SpendableOutputs {
/// The outputs which you should store as spendable by you.
outputs: Vec<SpendableOutputDescriptor>,
/// The features that this channel will operate with.
channel_type: ChannelTypeFeatures,
},
- /// Used to indicate that a previously opened channel with the given `channel_id` is in the
- /// process of closure.
+ /// Used to indicate that a channel that got past the initial handshake with the given `channel_id` is in the
+ /// process of closure. This includes previously opened channels, and channels that time out from not being funded.
///
/// Note that this event is only triggered for accepted channels: if the
/// [`UserConfig::manually_accept_inbound_channels`] config flag is set to true and the channel is
1u8.write(writer)?;
let mut payment_secret = None;
let payment_preimage;
+ let mut payment_context = None;
match &purpose {
- PaymentPurpose::InvoicePayment { payment_preimage: preimage, payment_secret: secret } => {
+ PaymentPurpose::Bolt11InvoicePayment {
+ payment_preimage: preimage, payment_secret: secret
+ } => {
+ payment_secret = Some(secret);
+ payment_preimage = *preimage;
+ },
+ PaymentPurpose::Bolt12OfferPayment {
+ payment_preimage: preimage, payment_secret: secret, payment_context: context
+ } => {
+ payment_secret = Some(secret);
+ payment_preimage = *preimage;
+ payment_context = Some(PaymentContextRef::Bolt12Offer(context));
+ },
+ PaymentPurpose::Bolt12RefundPayment {
+ payment_preimage: preimage, payment_secret: secret, payment_context: context
+ } => {
payment_secret = Some(secret);
payment_preimage = *preimage;
+ payment_context = Some(PaymentContextRef::Bolt12Refund(context));
},
PaymentPurpose::SpontaneousPayment(preimage) => {
payment_preimage = Some(*preimage);
(8, payment_preimage, option),
(9, onion_fields, option),
(10, skimmed_fee_opt, option),
+ (11, payment_context, option),
});
},
&Event::PaymentSent { ref payment_id, ref payment_preimage, ref payment_hash, ref fee_paid_msat } => {
let mut claim_deadline = None;
let mut via_user_channel_id = None;
let mut onion_fields = None;
+ let mut payment_context = None;
read_tlv_fields!(reader, {
(0, payment_hash, required),
(1, receiver_node_id, option),
(8, payment_preimage, option),
(9, onion_fields, option),
(10, counterparty_skimmed_fee_msat_opt, option),
+ (11, payment_context, option),
});
let purpose = match payment_secret {
- Some(secret) => PaymentPurpose::InvoicePayment {
- payment_preimage,
- payment_secret: secret
- },
+ Some(secret) => PaymentPurpose::from_parts(payment_preimage, secret, payment_context),
None if payment_preimage.is_some() => PaymentPurpose::SpontaneousPayment(payment_preimage.unwrap()),
None => return Err(msgs::DecodeError::InvalidValue),
};
use bitcoin::secp256k1::{PublicKey, Secp256k1, SecretKey};
use crate::blinded_path::BlindedPath;
-use crate::blinded_path::payment::{ForwardNode, ForwardTlvs, PaymentConstraints, PaymentRelay, ReceiveTlvs};
+use crate::blinded_path::payment::{ForwardNode, ForwardTlvs, PaymentConstraints, PaymentContext, PaymentRelay, ReceiveTlvs};
use crate::events::{Event, HTLCDestination, MessageSendEvent, MessageSendEventsProvider, PaymentFailureReason};
use crate::ln::PaymentSecret;
use crate::ln::channelmanager;
htlc_minimum_msat:
intro_node_min_htlc_opt.unwrap_or_else(|| channel_upds.last().unwrap().htlc_minimum_msat),
},
+ payment_context: PaymentContext::unknown(),
};
let mut secp_ctx = Secp256k1::new();
BlindedPath::new_for_payment(
max_cltv_expiry: u32::max_value(),
htlc_minimum_msat: chan_upd.htlc_minimum_msat,
},
+ payment_context: PaymentContext::unknown(),
};
let mut secp_ctx = Secp256k1::new();
let blinded_path = BlindedPath::one_hop_for_payment(
max_cltv_expiry: u32::max_value(),
htlc_minimum_msat: chan_upd_1_3.htlc_minimum_msat,
},
+ payment_context: PaymentContext::unknown(),
};
let blinded_path = BlindedPath::one_hop_for_payment(
nodes[3].node.get_our_node_id(), payee_tlvs, TEST_FINAL_CLTV as u16,
max_cltv_expiry: u32::max_value(),
htlc_minimum_msat: chan_upd.htlc_minimum_msat,
},
+ payment_context: PaymentContext::unknown(),
};
let mut secp_ctx = Secp256k1::new();
let blinded_path = BlindedPath::one_hop_for_payment(
assert_eq!(receiver_node_id.unwrap(), nodes[1].node.get_our_node_id());
assert_eq!(via_channel_id, Some(channel_id));
match &purpose {
- PaymentPurpose::InvoicePayment { payment_preimage, payment_secret, .. } => {
+ PaymentPurpose::Bolt11InvoicePayment { payment_preimage, payment_secret, .. } => {
assert!(payment_preimage.is_none());
assert_eq!(payment_secret_1, *payment_secret);
},
- _ => panic!("expected PaymentPurpose::InvoicePayment")
+ _ => panic!("expected PaymentPurpose::Bolt11InvoicePayment")
}
},
_ => panic!("Unexpected event"),
assert_eq!(receiver_node_id.unwrap(), nodes[1].node.get_our_node_id());
assert_eq!(via_channel_id, Some(channel_id));
match &purpose {
- PaymentPurpose::InvoicePayment { payment_preimage, payment_secret, .. } => {
+ PaymentPurpose::Bolt11InvoicePayment { payment_preimage, payment_secret, .. } => {
assert!(payment_preimage.is_none());
assert_eq!(payment_secret_2, *payment_secret);
},
- _ => panic!("expected PaymentPurpose::InvoicePayment")
+ _ => panic!("expected PaymentPurpose::Bolt11InvoicePayment")
}
},
_ => panic!("Unexpected event"),
assert_eq!(receiver_node_id.unwrap(), nodes[1].node.get_our_node_id());
assert_eq!(via_channel_id, Some(channel_id));
match &purpose {
- PaymentPurpose::InvoicePayment { payment_preimage, payment_secret, .. } => {
+ PaymentPurpose::Bolt11InvoicePayment { payment_preimage, payment_secret, .. } => {
assert!(payment_preimage.is_none());
assert_eq!(our_payment_secret, *payment_secret);
},
- _ => panic!("expected PaymentPurpose::InvoicePayment")
+ _ => panic!("expected PaymentPurpose::Bolt11InvoicePayment")
}
},
_ => panic!("Unexpected event"),
assert_eq!(via_channel_id, Some(channel_id));
assert_eq!(via_user_channel_id, Some(42));
match &purpose {
- PaymentPurpose::InvoicePayment { payment_preimage, payment_secret, .. } => {
+ PaymentPurpose::Bolt11InvoicePayment { payment_preimage, payment_secret, .. } => {
assert!(payment_preimage.is_none());
assert_eq!(payment_secret_2, *payment_secret);
},
- _ => panic!("expected PaymentPurpose::InvoicePayment")
+ _ => panic!("expected PaymentPurpose::Bolt11InvoicePayment")
}
},
_ => panic!("Unexpected event"),
assert_eq!(receiver_node_id.unwrap(), nodes[0].node.get_our_node_id());
assert_eq!(via_channel_id, Some(channel_id));
match &purpose {
- PaymentPurpose::InvoicePayment { payment_preimage, payment_secret, .. } => {
+ PaymentPurpose::Bolt11InvoicePayment { payment_preimage, payment_secret, .. } => {
assert!(payment_preimage.is_none());
assert_eq!(payment_secret_3, *payment_secret);
},
- _ => panic!("expected PaymentPurpose::InvoicePayment")
+ _ => panic!("expected PaymentPurpose::Bolt11InvoicePayment")
}
},
_ => panic!("Unexpected event"),
pub(super) enum ChannelPhase<SP: Deref> where SP::Target: SignerProvider {
UnfundedOutboundV1(OutboundV1Channel<SP>),
UnfundedInboundV1(InboundV1Channel<SP>),
- #[cfg(dual_funding)]
+ #[cfg(any(dual_funding, splicing))]
UnfundedOutboundV2(OutboundV2Channel<SP>),
- #[cfg(dual_funding)]
+ #[cfg(any(dual_funding, splicing))]
UnfundedInboundV2(InboundV2Channel<SP>),
Funded(Channel<SP>),
}
ChannelPhase::Funded(chan) => &chan.context,
ChannelPhase::UnfundedOutboundV1(chan) => &chan.context,
ChannelPhase::UnfundedInboundV1(chan) => &chan.context,
- #[cfg(dual_funding)]
+ #[cfg(any(dual_funding, splicing))]
ChannelPhase::UnfundedOutboundV2(chan) => &chan.context,
- #[cfg(dual_funding)]
+ #[cfg(any(dual_funding, splicing))]
ChannelPhase::UnfundedInboundV2(chan) => &chan.context,
}
}
ChannelPhase::Funded(ref mut chan) => &mut chan.context,
ChannelPhase::UnfundedOutboundV1(ref mut chan) => &mut chan.context,
ChannelPhase::UnfundedInboundV1(ref mut chan) => &mut chan.context,
- #[cfg(dual_funding)]
+ #[cfg(any(dual_funding, splicing))]
ChannelPhase::UnfundedOutboundV2(ref mut chan) => &mut chan.context,
- #[cfg(dual_funding)]
+ #[cfg(any(dual_funding, splicing))]
ChannelPhase::UnfundedInboundV2(ref mut chan) => &mut chan.context,
}
}
///
/// This is used both for outbound and inbound channels and has lower bound
/// of `dust_limit_satoshis`.
-#[cfg(dual_funding)]
+#[cfg(any(dual_funding, splicing))]
fn get_v2_channel_reserve_satoshis(channel_value_satoshis: u64, dust_limit_satoshis: u64) -> u64 {
// Fixed at 1% of channel value by spec.
let (q, _) = channel_value_satoshis.overflowing_div(100);
}
/// Context for dual-funded channels.
-#[cfg(dual_funding)]
+#[cfg(any(dual_funding, splicing))]
pub(super) struct DualFundingChannelContext {
/// The amount in satoshis we will be contributing to the channel.
pub our_funding_satoshis: u64,
// Counterparty designates channel data owned by the another channel participant entity.
pub(super) struct Channel<SP: Deref> where SP::Target: SignerProvider {
pub context: ChannelContext<SP>,
- #[cfg(dual_funding)]
+ #[cfg(any(dual_funding, splicing))]
pub dual_funding_channel_context: Option<DualFundingChannelContext>,
}
let mut channel = Channel {
context: self.context,
- #[cfg(dual_funding)]
+ #[cfg(any(dual_funding, splicing))]
dual_funding_channel_context: None,
};
// `ChannelMonitor`.
let mut channel = Channel {
context: self.context,
- #[cfg(dual_funding)]
+ #[cfg(any(dual_funding, splicing))]
dual_funding_channel_context: None,
};
let need_channel_ready = channel.check_get_channel_ready(0).is_some();
}
// A not-yet-funded outbound (from holder) channel using V2 channel establishment.
-#[cfg(dual_funding)]
+#[cfg(any(dual_funding, splicing))]
pub(super) struct OutboundV2Channel<SP: Deref> where SP::Target: SignerProvider {
pub context: ChannelContext<SP>,
pub unfunded_context: UnfundedChannelContext,
- #[cfg(dual_funding)]
+ #[cfg(any(dual_funding, splicing))]
pub dual_funding_context: DualFundingChannelContext,
}
-#[cfg(dual_funding)]
+#[cfg(any(dual_funding, splicing))]
impl<SP: Deref> OutboundV2Channel<SP> where SP::Target: SignerProvider {
pub fn new<ES: Deref, F: Deref>(
fee_estimator: &LowerBoundedFeeEstimator<F>, entropy_source: &ES, signer_provider: &SP,
}
// A not-yet-funded inbound (from counterparty) channel using V2 channel establishment.
-#[cfg(dual_funding)]
+#[cfg(any(dual_funding, splicing))]
pub(super) struct InboundV2Channel<SP: Deref> where SP::Target: SignerProvider {
pub context: ChannelContext<SP>,
pub unfunded_context: UnfundedChannelContext,
pub dual_funding_context: DualFundingChannelContext,
}
-#[cfg(dual_funding)]
+#[cfg(any(dual_funding, splicing))]
impl<SP: Deref> InboundV2Channel<SP> where SP::Target: SignerProvider {
/// Creates a new dual-funded channel from a remote side's request for one.
/// Assumes chain_hash has already been checked and corresponds with what we expect!
blocked_monitor_updates: blocked_monitor_updates.unwrap(),
},
- #[cfg(dual_funding)]
+ #[cfg(any(dual_funding, splicing))]
dual_funding_channel_context: None,
})
}
//! Keys used to generate commitment transactions.
//! See: <https://github.com/lightning/bolts/blob/master/03-transactions.md#keys>
-use bitcoin::hashes::Hash;
-use bitcoin::hashes::HashEngine;
-use bitcoin::secp256k1::Scalar;
-use bitcoin::secp256k1::SecretKey;
-use bitcoin::secp256k1::Secp256k1;
-use bitcoin::secp256k1;
+use crate::io;
use crate::ln::msgs::DecodeError;
use crate::util::ser::Readable;
-use crate::io;
-use crate::util::ser::Writer;
use crate::util::ser::Writeable;
-use bitcoin::secp256k1::PublicKey;
+use crate::util::ser::Writer;
use bitcoin::hashes::sha256::Hash as Sha256;
+use bitcoin::hashes::Hash;
+use bitcoin::hashes::HashEngine;
+use bitcoin::secp256k1;
+use bitcoin::secp256k1::PublicKey;
+use bitcoin::secp256k1::Scalar;
+use bitcoin::secp256k1::Secp256k1;
+use bitcoin::secp256k1::SecretKey;
macro_rules! doc_comment {
($x:expr, $($tt:tt)*) => {
pub fn to_public_key(&self) -> PublicKey {
self.0
}
+
+ /// Derives a per-commitment-transaction (eg an htlc key or delayed_payment key) private key addition tweak
+ /// from a basepoint and a per_commitment_point:
+ /// `privkey = basepoint_secret + SHA256(per_commitment_point || basepoint)`
+ /// This calculates the hash part in the tweak derivation process, which is used to ensure
+ /// that each key is unique and cannot be guessed by an external party. It is equivalent
+ /// to the `from_basepoint` method, but without the addition operation, providing just the
+ /// tweak from the hash of the per_commitment_point and the basepoint.
+ pub fn derive_add_tweak(&self, per_commitment_point: &PublicKey) -> [u8; 32] {
+ let mut sha = Sha256::engine();
+ sha.input(&per_commitment_point.serialize());
+ sha.input(&self.to_public_key().serialize());
+ Sha256::from_engine(sha).to_byte_array()
+ }
}
impl From<PublicKey> for $BasepointT {
Self(value)
}
}
-
- }
+ };
}
macro_rules! key_impl {
($BasepointT:ty, $KeyName:expr) => {
Ok(Self(key))
}
}
- }
+ };
}
-
-
/// Base key used in conjunction with a `per_commitment_point` to generate a [`DelayedPaymentKey`].
///
/// The delayed payment key is used to pay the commitment state broadcaster their
basepoint_impl!(DelayedPaymentBasepoint);
key_read_write!(DelayedPaymentBasepoint);
-
/// A derived key built from a [`DelayedPaymentBasepoint`] and `per_commitment_point`.
///
/// The delayed payment key is used to pay the commitment state broadcaster their
/// Derives a per-commitment-transaction public key (eg an htlc key or a delayed_payment key)
/// from the base point and the per_commitment_key. This is the public equivalent of
/// derive_private_key - using only public keys to derive a public key instead of private keys.
-fn derive_public_key<T: secp256k1::Signing>(secp_ctx: &Secp256k1<T>, per_commitment_point: &PublicKey, base_point: &PublicKey) -> PublicKey {
+fn derive_public_key<T: secp256k1::Signing>(
+ secp_ctx: &Secp256k1<T>, per_commitment_point: &PublicKey, base_point: &PublicKey,
+) -> PublicKey {
let mut sha = Sha256::engine();
sha.input(&per_commitment_point.serialize());
sha.input(&base_point.serialize());
let res = Sha256::from_engine(sha).to_byte_array();
- let hashkey = PublicKey::from_secret_key(&secp_ctx,
- &SecretKey::from_slice(&res).expect("Hashes should always be valid keys unless SHA-256 is broken"));
+ add_public_key_tweak(secp_ctx, base_point, &res)
+}
+
+/// Adds a tweak to a public key to derive a new public key.
+pub fn add_public_key_tweak<T: secp256k1::Signing>(
+ secp_ctx: &Secp256k1<T>, base_point: &PublicKey, tweak: &[u8; 32],
+) -> PublicKey {
+ let hashkey = PublicKey::from_secret_key(
+ &secp_ctx,
+ &SecretKey::from_slice(tweak)
+ .expect("Hashes should always be valid keys unless SHA-256 is broken"),
+ );
base_point.combine(&hashkey)
.expect("Addition only fails if the tweak is the inverse of the key. This is not possible when the tweak contains the hash of the key.")
}
basepoint_impl!(RevocationBasepoint);
key_read_write!(RevocationBasepoint);
-
/// The revocation key is used to allow a channel party to revoke their state - giving their
/// counterparty the required material to claim all of their funds if they broadcast that state.
///
///
/// [`chan_utils::derive_private_revocation_key`]: crate::ln::chan_utils::derive_private_revocation_key
pub fn from_basepoint<T: secp256k1::Verification>(
- secp_ctx: &Secp256k1<T>,
- countersignatory_basepoint: &RevocationBasepoint,
+ secp_ctx: &Secp256k1<T>, countersignatory_basepoint: &RevocationBasepoint,
per_commitment_point: &PublicKey,
) -> Self {
let rev_append_commit_hash_key = {
}
key_read_write!(RevocationKey);
-
#[cfg(test)]
mod test {
- use bitcoin::secp256k1::{Secp256k1, SecretKey, PublicKey};
- use bitcoin::hashes::hex::FromHex;
use super::derive_public_key;
+ use bitcoin::hashes::hex::FromHex;
+ use bitcoin::secp256k1::{PublicKey, Secp256k1, SecretKey};
#[test]
fn test_key_derivation() {
// Test vectors from BOLT 3 Appendix E:
let secp_ctx = Secp256k1::new();
- let base_secret = SecretKey::from_slice(&<Vec<u8>>::from_hex("000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f").unwrap()[..]).unwrap();
- let per_commitment_secret = SecretKey::from_slice(&<Vec<u8>>::from_hex("1f1e1d1c1b1a191817161514131211100f0e0d0c0b0a09080706050403020100").unwrap()[..]).unwrap();
+ let base_secret = SecretKey::from_slice(
+ &<Vec<u8>>::from_hex(
+ "000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f",
+ )
+ .unwrap()[..],
+ )
+ .unwrap();
+ let per_commitment_secret = SecretKey::from_slice(
+ &<Vec<u8>>::from_hex(
+ "1f1e1d1c1b1a191817161514131211100f0e0d0c0b0a09080706050403020100",
+ )
+ .unwrap()[..],
+ )
+ .unwrap();
let base_point = PublicKey::from_secret_key(&secp_ctx, &base_secret);
- assert_eq!(base_point.serialize()[..], <Vec<u8>>::from_hex("036d6caac248af96f6afa7f904f550253a0f3ef3f5aa2fe6838a95b216691468e2").unwrap()[..]);
+ assert_eq!(
+ base_point.serialize()[..],
+ <Vec<u8>>::from_hex(
+ "036d6caac248af96f6afa7f904f550253a0f3ef3f5aa2fe6838a95b216691468e2"
+ )
+ .unwrap()[..]
+ );
let per_commitment_point = PublicKey::from_secret_key(&secp_ctx, &per_commitment_secret);
- assert_eq!(per_commitment_point.serialize()[..], <Vec<u8>>::from_hex("025f7117a78150fe2ef97db7cfc83bd57b2e2c0d0dd25eaf467a4a1c2a45ce1486").unwrap()[..]);
-
- assert_eq!(derive_public_key(&secp_ctx, &per_commitment_point, &base_point).serialize()[..],
- <Vec<u8>>::from_hex("0235f2dbfaa89b57ec7b055afe29849ef7ddfeb1cefdb9ebdc43f5494984db29e5").unwrap()[..]);
+ assert_eq!(
+ per_commitment_point.serialize()[..],
+ <Vec<u8>>::from_hex(
+ "025f7117a78150fe2ef97db7cfc83bd57b2e2c0d0dd25eaf467a4a1c2a45ce1486"
+ )
+ .unwrap()[..]
+ );
+
+ assert_eq!(
+ derive_public_key(&secp_ctx, &per_commitment_point, &base_point).serialize()[..],
+ <Vec<u8>>::from_hex(
+ "0235f2dbfaa89b57ec7b055afe29849ef7ddfeb1cefdb9ebdc43f5494984db29e5"
+ )
+ .unwrap()[..]
+ );
}
}
use bitcoin::{secp256k1, Sequence};
use crate::blinded_path::{BlindedPath, NodeIdLookUp};
-use crate::blinded_path::payment::{PaymentConstraints, ReceiveTlvs};
+use crate::blinded_path::payment::{Bolt12OfferContext, Bolt12RefundContext, PaymentConstraints, PaymentContext, ReceiveTlvs};
use crate::chain;
use crate::chain::{Confirm, ChannelMonitorUpdateStatus, Watch, BestBlock};
use crate::chain::chaininterface::{BroadcasterInterface, ConfirmationTarget, FeeEstimator, LowerBoundedFeeEstimator};
use crate::offers::invoice::{BlindedPayInfo, Bolt12Invoice, DEFAULT_RELATIVE_EXPIRY, DerivedSigningPubkey, ExplicitSigningPubkey, InvoiceBuilder, UnsignedBolt12Invoice};
use crate::offers::invoice_error::InvoiceError;
use crate::offers::invoice_request::{DerivedPayerId, InvoiceRequestBuilder};
-use crate::offers::merkle::SignError;
use crate::offers::offer::{Offer, OfferBuilder};
use crate::offers::parse::Bolt12SemanticError;
use crate::offers::refund::{Refund, RefundBuilder};
/// [`Event::PaymentClaimable::onion_fields`] as
/// [`RecipientOnionFields::payment_metadata`].
payment_metadata: Option<Vec<u8>>,
+ /// The context of the payment included by the recipient in a blinded path, or `None` if a
+ /// blinded path was not used.
+ ///
+ /// Used in part to determine the [`events::PaymentPurpose`].
+ payment_context: Option<PaymentContext>,
/// CLTV expiry of the received HTLC.
///
/// Used to track when we should expire pending HTLCs that go unclaimed.
/// This is only here for backwards-compatibility in serialization, in the future it can be
/// removed, breaking clients running 0.0.106 and earlier.
_legacy_hop_data: Option<msgs::FinalOnionHopData>,
+ /// The context of the payment included by the recipient in a blinded path, or `None` if a
+ /// blinded path was not used.
+ ///
+ /// Used in part to determine the [`events::PaymentPurpose`].
+ payment_context: Option<PaymentContext>,
},
/// Contains the payer-provided preimage.
Spontaneous(PaymentPreimage),
match phase {
ChannelPhase::Funded(_) | ChannelPhase::UnfundedOutboundV1(_) => true,
ChannelPhase::UnfundedInboundV1(_) => false,
- #[cfg(dual_funding)]
+ #[cfg(any(dual_funding, splicing))]
ChannelPhase::UnfundedOutboundV2(_) => true,
- #[cfg(dual_funding)]
+ #[cfg(any(dual_funding, splicing))]
ChannelPhase::UnfundedInboundV2(_) => false,
}
)
/// // On the event processing thread
/// channel_manager.process_pending_events(&|event| match event {
/// Event::PaymentClaimable { payment_hash, purpose, .. } => match purpose {
-/// PaymentPurpose::InvoicePayment { payment_preimage: Some(payment_preimage), .. } => {
+/// PaymentPurpose::Bolt11InvoicePayment { payment_preimage: Some(payment_preimage), .. } => {
/// assert_eq!(payment_hash, known_payment_hash);
/// println!("Claiming payment {}", payment_hash);
/// channel_manager.claim_funds(payment_preimage);
/// },
-/// PaymentPurpose::InvoicePayment { payment_preimage: None, .. } => {
+/// PaymentPurpose::Bolt11InvoicePayment { payment_preimage: None, .. } => {
/// println!("Unknown payment hash: {}", payment_hash);
/// },
/// PaymentPurpose::SpontaneousPayment(payment_preimage) => {
/// println!("Claiming spontaneous payment {}", payment_hash);
/// channel_manager.claim_funds(payment_preimage);
/// },
+/// // ...
+/// # _ => {},
/// },
/// Event::PaymentClaimed { payment_hash, amount_msat, .. } => {
/// assert_eq!(payment_hash, known_payment_hash);
/// // On the event processing thread
/// channel_manager.process_pending_events(&|event| match event {
/// Event::PaymentClaimable { payment_hash, purpose, .. } => match purpose {
-/// PaymentPurpose::InvoicePayment { payment_preimage: Some(payment_preimage), .. } => {
+/// PaymentPurpose::Bolt12OfferPayment { payment_preimage: Some(payment_preimage), .. } => {
/// println!("Claiming payment {}", payment_hash);
/// channel_manager.claim_funds(payment_preimage);
/// },
-/// PaymentPurpose::InvoicePayment { payment_preimage: None, .. } => {
+/// PaymentPurpose::Bolt12OfferPayment { payment_preimage: None, .. } => {
/// println!("Unknown payment hash: {}", payment_hash);
/// },
/// // ...
/// #
/// # fn example<T: AChannelManager>(channel_manager: T, refund: &Refund) {
/// # let channel_manager = channel_manager.get_cm();
-/// match channel_manager.request_refund_payment(refund) {
-/// Ok(()) => println!("Requesting payment for refund"),
-/// Err(e) => println!("Unable to request payment for refund: {:?}", e),
-/// }
+/// let known_payment_hash = match channel_manager.request_refund_payment(refund) {
+/// Ok(invoice) => {
+/// let payment_hash = invoice.payment_hash();
+/// println!("Requesting refund payment {}", payment_hash);
+/// payment_hash
+/// },
+/// Err(e) => panic!("Unable to request payment for refund: {:?}", e),
+/// };
///
/// // On the event processing thread
/// channel_manager.process_pending_events(&|event| match event {
/// Event::PaymentClaimable { payment_hash, purpose, .. } => match purpose {
-/// PaymentPurpose::InvoicePayment { payment_preimage: Some(payment_preimage), .. } => {
+/// PaymentPurpose::Bolt12RefundPayment { payment_preimage: Some(payment_preimage), .. } => {
+/// assert_eq!(payment_hash, known_payment_hash);
/// println!("Claiming payment {}", payment_hash);
/// channel_manager.claim_funds(payment_preimage);
/// },
-/// PaymentPurpose::InvoicePayment { payment_preimage: None, .. } => {
+/// PaymentPurpose::Bolt12RefundPayment { payment_preimage: None, .. } => {
/// println!("Unknown payment hash: {}", payment_hash);
/// },
/// // ...
/// # _ => {},
/// },
/// Event::PaymentClaimed { payment_hash, amount_msat, .. } => {
+/// assert_eq!(payment_hash, known_payment_hash);
/// println!("Claimed {} msats", amount_msat);
/// },
/// // ...
ChannelPhase::UnfundedInboundV1(channel) => {
convert_chan_phase_err!($self, $err, channel, $channel_id, UNFUNDED_CHANNEL)
},
- #[cfg(dual_funding)]
+ #[cfg(any(dual_funding, splicing))]
ChannelPhase::UnfundedOutboundV2(channel) => {
convert_chan_phase_err!($self, $err, channel, $channel_id, UNFUNDED_CHANNEL)
},
- #[cfg(dual_funding)]
+ #[cfg(any(dual_funding, splicing))]
ChannelPhase::UnfundedInboundV2(channel) => {
convert_chan_phase_err!($self, $err, channel, $channel_id, UNFUNDED_CHANNEL)
},
// Unfunded channel has no update
(None, chan_phase.context().get_counterparty_node_id())
},
- // TODO(dual_funding): Combine this match arm with above once #[cfg(dual_funding)] is removed.
- #[cfg(dual_funding)]
+ // TODO(dual_funding): Combine this match arm with above once #[cfg(any(dual_funding, splicing))] is removed.
+ #[cfg(any(dual_funding, splicing))]
ChannelPhase::UnfundedOutboundV2(_) | ChannelPhase::UnfundedInboundV2(_) => {
self.finish_close_channel(chan_phase.context_mut().force_shutdown(false, closure_reason));
// Unfunded channel has no update
let blinded_failure = routing.blinded_failure();
let (cltv_expiry, onion_payload, payment_data, phantom_shared_secret, mut onion_fields) = match routing {
PendingHTLCRouting::Receive {
- payment_data, payment_metadata, incoming_cltv_expiry, phantom_shared_secret,
- custom_tlvs, requires_blinded_error: _
+ payment_data, payment_metadata, payment_context,
+ incoming_cltv_expiry, phantom_shared_secret, custom_tlvs,
+ requires_blinded_error: _
} => {
let _legacy_hop_data = Some(payment_data.clone());
let onion_fields = RecipientOnionFields { payment_secret: Some(payment_data.payment_secret),
payment_metadata, custom_tlvs };
- (incoming_cltv_expiry, OnionPayload::Invoice { _legacy_hop_data },
+ (incoming_cltv_expiry, OnionPayload::Invoice { _legacy_hop_data, payment_context },
Some(payment_data), phantom_shared_secret, onion_fields)
},
PendingHTLCRouting::ReceiveKeysend {
macro_rules! check_total_value {
($purpose: expr) => {{
let mut payment_claimable_generated = false;
- let is_keysend = match $purpose {
- events::PaymentPurpose::SpontaneousPayment(_) => true,
- events::PaymentPurpose::InvoicePayment { .. } => false,
- };
+ let is_keysend = $purpose.is_keysend();
let mut claimable_payments = self.claimable_payments.lock().unwrap();
if claimable_payments.pending_claiming_payments.contains_key(&payment_hash) {
fail_htlc!(claimable_htlc, payment_hash);
match payment_secrets.entry(payment_hash) {
hash_map::Entry::Vacant(_) => {
match claimable_htlc.onion_payload {
- OnionPayload::Invoice { .. } => {
+ OnionPayload::Invoice { ref payment_context, .. } => {
let payment_data = payment_data.unwrap();
let (payment_preimage, min_final_cltv_expiry_delta) = match inbound_payment::verify(payment_hash, &payment_data, self.highest_seen_timestamp.load(Ordering::Acquire) as u64, &self.inbound_payment_key, &self.logger) {
Ok(result) => result,
fail_htlc!(claimable_htlc, payment_hash);
}
}
- let purpose = events::PaymentPurpose::InvoicePayment {
- payment_preimage: payment_preimage.clone(),
- payment_secret: payment_data.payment_secret,
- };
+ let purpose = events::PaymentPurpose::from_parts(
+ payment_preimage.clone(),
+ payment_data.payment_secret,
+ payment_context.clone(),
+ );
check_total_value!(purpose);
},
OnionPayload::Spontaneous(preimage) => {
}
},
hash_map::Entry::Occupied(inbound_payment) => {
- if let OnionPayload::Spontaneous(_) = claimable_htlc.onion_payload {
- log_trace!(self.logger, "Failing new keysend HTLC with payment_hash {} because we already have an inbound payment with the same payment hash", &payment_hash);
- fail_htlc!(claimable_htlc, payment_hash);
- }
+ let payment_context = match claimable_htlc.onion_payload {
+ OnionPayload::Spontaneous(_) => {
+ log_trace!(self.logger, "Failing new keysend HTLC with payment_hash {} because we already have an inbound payment with the same payment hash", &payment_hash);
+ fail_htlc!(claimable_htlc, payment_hash);
+ },
+ OnionPayload::Invoice { ref payment_context, .. } => payment_context,
+ };
let payment_data = payment_data.unwrap();
if inbound_payment.get().payment_secret != payment_data.payment_secret {
log_trace!(self.logger, "Failing new HTLC with payment_hash {} as it didn't match our expected payment secret.", &payment_hash);
&payment_hash, payment_data.total_msat, inbound_payment.get().min_value_msat.unwrap());
fail_htlc!(claimable_htlc, payment_hash);
} else {
- let purpose = events::PaymentPurpose::InvoicePayment {
- payment_preimage: inbound_payment.get().payment_preimage,
- payment_secret: payment_data.payment_secret,
- };
+ let purpose = events::PaymentPurpose::from_parts(
+ inbound_payment.get().payment_preimage,
+ payment_data.payment_secret,
+ payment_context.clone(),
+ );
let payment_claimable_generated = check_total_value!(purpose);
if payment_claimable_generated {
inbound_payment.remove_entry();
process_unfunded_channel_tick(chan_id, &mut chan.context, &mut chan.unfunded_context,
pending_msg_events, counterparty_node_id)
},
- #[cfg(dual_funding)]
+ #[cfg(any(dual_funding, splicing))]
ChannelPhase::UnfundedInboundV2(chan) => {
process_unfunded_channel_tick(chan_id, &mut chan.context, &mut chan.unfunded_context,
pending_msg_events, counterparty_node_id)
},
- #[cfg(dual_funding)]
+ #[cfg(any(dual_funding, splicing))]
ChannelPhase::UnfundedOutboundV2(chan) => {
process_unfunded_channel_tick(chan_id, &mut chan.context, &mut chan.unfunded_context,
pending_msg_events, counterparty_node_id)
num_unfunded_channels += 1;
}
},
- // TODO(dual_funding): Combine this match arm with above once #[cfg(dual_funding)] is removed.
- #[cfg(dual_funding)]
+ // TODO(dual_funding): Combine this match arm with above once #[cfg(any(dual_funding, splicing))] is removed.
+ #[cfg(any(dual_funding, splicing))]
ChannelPhase::UnfundedInboundV2(chan) => {
// Only inbound V2 channels that are not 0conf and that we do not contribute to will be
// included in the unfunded count.
// Outbound channels don't contribute to the unfunded count in the DoS context.
continue;
},
- // TODO(dual_funding): Combine this match arm with above once #[cfg(dual_funding)] is removed.
- #[cfg(dual_funding)]
+ // TODO(dual_funding): Combine this match arm with above once #[cfg(any(dual_funding, splicing))] is removed.
+ #[cfg(any(dual_funding, splicing))]
ChannelPhase::UnfundedOutboundV2(_) => {
// Outbound channels don't contribute to the unfunded count in the DoS context.
continue;
finish_shutdown = Some(chan.context_mut().force_shutdown(false, ClosureReason::CounterpartyCoopClosedUnfundedChannel));
},
// TODO(dual_funding): Combine this match arm with above.
- #[cfg(dual_funding)]
+ #[cfg(any(dual_funding, splicing))]
ChannelPhase::UnfundedInboundV2(_) | ChannelPhase::UnfundedOutboundV2(_) => {
let context = phase.context_mut();
log_error!(self.logger, "Immediately closing unfunded channel {} as peer asked to cooperatively shut it down (which is unnecessary)", &msg.channel_id);
///
/// The resulting invoice uses a [`PaymentHash`] recognized by the [`ChannelManager`] and a
/// [`BlindedPath`] containing the [`PaymentSecret`] needed to reconstruct the corresponding
- /// [`PaymentPreimage`].
+ /// [`PaymentPreimage`]. It is returned purely for informational purposes.
///
/// # Limitations
///
/// the invoice.
///
/// [`Bolt12Invoice`]: crate::offers::invoice::Bolt12Invoice
- pub fn request_refund_payment(&self, refund: &Refund) -> Result<(), Bolt12SemanticError> {
+ pub fn request_refund_payment(
+ &self, refund: &Refund
+ ) -> Result<Bolt12Invoice, Bolt12SemanticError> {
let expanded_key = &self.inbound_payment_key;
let entropy = &*self.entropy_source;
let secp_ctx = &self.secp_ctx;
match self.create_inbound_payment(Some(amount_msats), relative_expiry, None) {
Ok((payment_hash, payment_secret)) => {
- let payment_paths = self.create_blinded_payment_paths(amount_msats, payment_secret)
+ let payment_context = PaymentContext::Bolt12Refund(Bolt12RefundContext {});
+ let payment_paths = self.create_blinded_payment_paths(
+ amount_msats, payment_secret, payment_context
+ )
.map_err(|_| Bolt12SemanticError::MissingPaths)?;
#[cfg(feature = "std")]
let mut pending_offers_messages = self.pending_offers_messages.lock().unwrap();
if refund.paths().is_empty() {
let message = new_pending_onion_message(
- OffersMessage::Invoice(invoice),
+ OffersMessage::Invoice(invoice.clone()),
Destination::Node(refund.payer_id()),
Some(reply_path),
);
}
}
- Ok(())
+ Ok(invoice)
},
Err(()) => Err(Bolt12SemanticError::InvalidAmount),
}
/// This differs from [`create_inbound_payment_for_hash`] only in that it generates the
/// [`PaymentHash`] and [`PaymentPreimage`] for you.
///
- /// The [`PaymentPreimage`] will ultimately be returned to you in the [`PaymentClaimable`], which
- /// will have the [`PaymentClaimable::purpose`] be [`PaymentPurpose::InvoicePayment`] with
- /// its [`PaymentPurpose::InvoicePayment::payment_preimage`] field filled in. That should then be
- /// passed directly to [`claim_funds`].
+ /// The [`PaymentPreimage`] will ultimately be returned to you in the [`PaymentClaimable`] event, which
+ /// will have the [`PaymentClaimable::purpose`] return `Some` for [`PaymentPurpose::preimage`]. That
+ /// should then be passed directly to [`claim_funds`].
///
/// See [`create_inbound_payment_for_hash`] for detailed documentation on behavior and requirements.
///
/// [`claim_funds`]: Self::claim_funds
/// [`PaymentClaimable`]: events::Event::PaymentClaimable
/// [`PaymentClaimable::purpose`]: events::Event::PaymentClaimable::purpose
- /// [`PaymentPurpose::InvoicePayment`]: events::PaymentPurpose::InvoicePayment
- /// [`PaymentPurpose::InvoicePayment::payment_preimage`]: events::PaymentPurpose::InvoicePayment::payment_preimage
+ /// [`PaymentPurpose::preimage`]: events::PaymentPurpose::preimage
/// [`create_inbound_payment_for_hash`]: Self::create_inbound_payment_for_hash
pub fn create_inbound_payment(&self, min_value_msat: Option<u64>, invoice_expiry_delta_secs: u32,
min_final_cltv_expiry_delta: Option<u16>) -> Result<(PaymentHash, PaymentSecret), ()> {
/// Creates multi-hop blinded payment paths for the given `amount_msats` by delegating to
/// [`Router::create_blinded_payment_paths`].
fn create_blinded_payment_paths(
- &self, amount_msats: u64, payment_secret: PaymentSecret
+ &self, amount_msats: u64, payment_secret: PaymentSecret, payment_context: PaymentContext
) -> Result<Vec<(BlindedPayInfo, BlindedPath)>, ()> {
let secp_ctx = &self.secp_ctx;
max_cltv_expiry,
htlc_minimum_msat: 1,
},
+ payment_context,
};
self.router.create_blinded_payment_paths(
payee_node_id, first_hops, payee_tlvs, amount_msats, secp_ctx
// Retain unfunded channels.
ChannelPhase::UnfundedOutboundV1(_) | ChannelPhase::UnfundedInboundV1(_) => true,
// TODO(dual_funding): Combine this match arm with above.
- #[cfg(dual_funding)]
+ #[cfg(any(dual_funding, splicing))]
ChannelPhase::UnfundedOutboundV2(_) | ChannelPhase::UnfundedInboundV2(_) => true,
ChannelPhase::Funded(channel) => {
let res = f(channel);
msg.channel_id.clone())), *counterparty_node_id);
}
- #[cfg(dual_funding)]
+ #[cfg(splicing)]
fn handle_splice(&self, counterparty_node_id: &PublicKey, msg: &msgs::Splice) {
let _: Result<(), _> = handle_error!(self, Err(MsgHandleErrInternal::send_err_msg_no_close(
"Splicing not supported".to_owned(),
msg.channel_id.clone())), *counterparty_node_id);
}
- #[cfg(dual_funding)]
+ #[cfg(splicing)]
fn handle_splice_ack(&self, counterparty_node_id: &PublicKey, msg: &msgs::SpliceAck) {
let _: Result<(), _> = handle_error!(self, Err(MsgHandleErrInternal::send_err_msg_no_close(
"Splicing not supported (splice_ack)".to_owned(),
msg.channel_id.clone())), *counterparty_node_id);
}
- #[cfg(dual_funding)]
+ #[cfg(splicing)]
fn handle_splice_locked(&self, counterparty_node_id: &PublicKey, msg: &msgs::SpliceLocked) {
let _: Result<(), _> = handle_error!(self, Err(MsgHandleErrInternal::send_err_msg_no_close(
"Splicing not supported (splice_locked)".to_owned(),
ChannelPhase::UnfundedInboundV1(chan) => {
&mut chan.context
},
- #[cfg(dual_funding)]
+ #[cfg(any(dual_funding, splicing))]
ChannelPhase::UnfundedOutboundV2(chan) => {
&mut chan.context
},
- #[cfg(dual_funding)]
+ #[cfg(any(dual_funding, splicing))]
ChannelPhase::UnfundedInboundV2(chan) => {
&mut chan.context
},
});
}
- // TODO(dual_funding): Combine this match arm with above once #[cfg(dual_funding)] is removed.
- #[cfg(dual_funding)]
+ // TODO(dual_funding): Combine this match arm with above once #[cfg(any(dual_funding, splicing))] is removed.
+ #[cfg(any(dual_funding, splicing))]
ChannelPhase::UnfundedOutboundV2(chan) => {
pending_msg_events.push(events::MessageSendEvent::SendOpenChannelV2 {
node_id: chan.context.get_counterparty_node_id(),
debug_assert!(false);
}
- // TODO(dual_funding): Combine this match arm with above once #[cfg(dual_funding)] is removed.
- #[cfg(dual_funding)]
+ // TODO(dual_funding): Combine this match arm with above once #[cfg(any(dual_funding, splicing))] is removed.
+ #[cfg(any(dual_funding, splicing))]
ChannelPhase::UnfundedInboundV2(channel) => {
// Since unfunded inbound channel maps are cleared upon disconnecting a peer,
// they are not persisted and won't be recovered after a crash.
return;
}
},
- #[cfg(dual_funding)]
+ #[cfg(any(dual_funding, splicing))]
Some(ChannelPhase::UnfundedOutboundV2(ref mut chan)) => {
if let Ok(msg) = chan.maybe_handle_error_without_close(self.chain_hash, &self.fee_estimator) {
peer_state.pending_msg_events.push(events::MessageSendEvent::SendOpenChannelV2 {
}
},
None | Some(ChannelPhase::UnfundedInboundV1(_) | ChannelPhase::Funded(_)) => (),
- #[cfg(dual_funding)]
+ #[cfg(any(dual_funding, splicing))]
Some(ChannelPhase::UnfundedInboundV2(_)) => (),
}
}
},
};
+ let payment_context = PaymentContext::Bolt12Offer(Bolt12OfferContext {
+ offer_id: invoice_request.offer_id,
+ invoice_request: invoice_request.fields(),
+ });
let payment_paths = match self.create_blinded_payment_paths(
- amount_msats, payment_secret
+ amount_msats, payment_secret, payment_context
) {
Ok(payment_paths) => payment_paths,
Err(()) => {
self.highest_seen_timestamp.load(Ordering::Acquire) as u64
);
- if invoice_request.keys.is_some() {
+ let response = if invoice_request.keys.is_some() {
#[cfg(feature = "std")]
let builder = invoice_request.respond_using_derived_keys(
payment_paths, payment_hash
let builder = invoice_request.respond_using_derived_keys_no_std(
payment_paths, payment_hash, created_at
);
- let builder: Result<InvoiceBuilder<DerivedSigningPubkey>, _> =
- builder.map(|b| b.into());
- match builder.and_then(|b| b.allow_mpp().build_and_sign(secp_ctx)) {
- Ok(invoice) => Some(OffersMessage::Invoice(invoice)),
- Err(error) => Some(OffersMessage::InvoiceError(error.into())),
- }
+ builder
+ .map(InvoiceBuilder::<DerivedSigningPubkey>::from)
+ .and_then(|builder| builder.allow_mpp().build_and_sign(secp_ctx))
+ .map_err(InvoiceError::from)
} else {
#[cfg(feature = "std")]
let builder = invoice_request.respond_with(payment_paths, payment_hash);
let builder = invoice_request.respond_with_no_std(
payment_paths, payment_hash, created_at
);
- let builder: Result<InvoiceBuilder<ExplicitSigningPubkey>, _> =
- builder.map(|b| b.into());
- let response = builder.and_then(|builder| builder.allow_mpp().build())
- .map_err(|e| OffersMessage::InvoiceError(e.into()))
+ builder
+ .map(InvoiceBuilder::<ExplicitSigningPubkey>::from)
+ .and_then(|builder| builder.allow_mpp().build())
+ .map_err(InvoiceError::from)
.and_then(|invoice| {
#[cfg(c_bindings)]
let mut invoice = invoice;
- match invoice.sign(|invoice: &UnsignedBolt12Invoice|
- self.node_signer.sign_bolt12_invoice(invoice)
- ) {
- Ok(invoice) => Ok(OffersMessage::Invoice(invoice)),
- Err(SignError::Signing) => Err(OffersMessage::InvoiceError(
- InvoiceError::from_string("Failed signing invoice".to_string())
- )),
- Err(SignError::Verification(_)) => Err(OffersMessage::InvoiceError(
- InvoiceError::from_string("Failed invoice signature verification".to_string())
- )),
- }
- });
- match response {
- Ok(invoice) => Some(invoice),
- Err(error) => Some(error),
- }
+ invoice
+ .sign(|invoice: &UnsignedBolt12Invoice|
+ self.node_signer.sign_bolt12_invoice(invoice)
+ )
+ .map_err(InvoiceError::from)
+ })
+ };
+
+ match response {
+ Ok(invoice) => Some(OffersMessage::Invoice(invoice)),
+ Err(error) => Some(OffersMessage::InvoiceError(error.into())),
}
},
OffersMessage::Invoice(invoice) => {
- match invoice.verify(expanded_key, secp_ctx) {
- Err(()) => {
- Some(OffersMessage::InvoiceError(InvoiceError::from_string("Unrecognized invoice".to_owned())))
- },
- Ok(_) if invoice.invoice_features().requires_unknown_bits_from(&self.bolt12_invoice_features()) => {
- Some(OffersMessage::InvoiceError(Bolt12SemanticError::UnknownRequiredFeatures.into()))
- },
- Ok(payment_id) => {
- if let Err(e) = self.send_payment_for_bolt12_invoice(&invoice, payment_id) {
- log_trace!(self.logger, "Failed paying invoice: {:?}", e);
- Some(OffersMessage::InvoiceError(InvoiceError::from_string(format!("{:?}", e))))
+ let response = invoice
+ .verify(expanded_key, secp_ctx)
+ .map_err(|()| InvoiceError::from_string("Unrecognized invoice".to_owned()))
+ .and_then(|payment_id| {
+ let features = self.bolt12_invoice_features();
+ if invoice.invoice_features().requires_unknown_bits_from(&features) {
+ Err(InvoiceError::from(Bolt12SemanticError::UnknownRequiredFeatures))
} else {
- None
+ self.send_payment_for_bolt12_invoice(&invoice, payment_id)
+ .map_err(|e| {
+ log_trace!(self.logger, "Failed paying invoice: {:?}", e);
+ InvoiceError::from_string(format!("{:?}", e))
+ })
}
- },
+ });
+
+ match response {
+ Ok(()) => None,
+ Err(e) => Some(OffersMessage::InvoiceError(e)),
}
},
OffersMessage::InvoiceError(invoice_error) => {
(3, payment_metadata, option),
(5, custom_tlvs, optional_vec),
(7, requires_blinded_error, (default_value, false)),
+ (9, payment_context, option),
},
(2, ReceiveKeysend) => {
(0, payment_preimage, required),
impl Writeable for ClaimableHTLC {
fn write<W: Writer>(&self, writer: &mut W) -> Result<(), io::Error> {
- let (payment_data, keysend_preimage) = match &self.onion_payload {
- OnionPayload::Invoice { _legacy_hop_data } => (_legacy_hop_data.as_ref(), None),
- OnionPayload::Spontaneous(preimage) => (None, Some(preimage)),
+ let (payment_data, keysend_preimage, payment_context) = match &self.onion_payload {
+ OnionPayload::Invoice { _legacy_hop_data, payment_context } => {
+ (_legacy_hop_data.as_ref(), None, payment_context.as_ref())
+ },
+ OnionPayload::Spontaneous(preimage) => (None, Some(preimage), None),
};
write_tlv_fields!(writer, {
(0, self.prev_hop, required),
(6, self.cltv_expiry, required),
(8, keysend_preimage, option),
(10, self.counterparty_skimmed_fee_msat, option),
+ (11, payment_context, option),
});
Ok(())
}
(6, cltv_expiry, required),
(8, keysend_preimage, option),
(10, counterparty_skimmed_fee_msat, option),
+ (11, payment_context, option),
});
let payment_data: Option<msgs::FinalOnionHopData> = payment_data_opt;
let value = value_ser.0.unwrap();
}
total_msat = Some(payment_data.as_ref().unwrap().total_msat);
}
- OnionPayload::Invoice { _legacy_hop_data: payment_data }
+ OnionPayload::Invoice { _legacy_hop_data: payment_data, payment_context }
},
};
Ok(Self {
best_block.block_hash.write(writer)?;
}
+ let per_peer_state = self.per_peer_state.write().unwrap();
+
let mut serializable_peer_count: u64 = 0;
{
- let per_peer_state = self.per_peer_state.read().unwrap();
let mut number_of_funded_channels = 0;
for (_, peer_state_mutex) in per_peer_state.iter() {
let mut peer_state_lock = peer_state_mutex.lock().unwrap();
decode_update_add_htlcs_opt = Some(decode_update_add_htlcs);
}
- let per_peer_state = self.per_peer_state.write().unwrap();
-
let pending_inbound_payments = self.pending_inbound_payments.lock().unwrap();
let claimable_payments = self.claimable_payments.lock().unwrap();
let pending_outbound_payments = self.pending_outbound_payments.pending_outbound_payments.lock().unwrap();
log_error!(logger, " client applications must ensure that ChannelMonitor data is always available and the latest to avoid funds loss!");
log_error!(logger, " Without the latest ChannelMonitor we cannot continue without risking funds.");
log_error!(logger, " Please ensure the chain::Watch API requirements are met and file a bug report at https://github.com/lightningdevkit/rust-lightning");
+ log_error!(logger, " Pending in-flight updates are: {:?}", chan_in_flight_updates);
return Err(DecodeError::InvalidValue);
}
}
return Err(DecodeError::InvalidValue);
}
let purpose = match &htlcs[0].onion_payload {
- OnionPayload::Invoice { _legacy_hop_data } => {
+ OnionPayload::Invoice { _legacy_hop_data, payment_context: _ } => {
if let Some(hop_data) = _legacy_hop_data {
- events::PaymentPurpose::InvoicePayment {
+ events::PaymentPurpose::Bolt11InvoicePayment {
payment_preimage: match pending_inbound_payments.get(&payment_hash) {
Some(inbound_payment) => inbound_payment.payment_preimage,
None => match inbound_payment::verify(payment_hash, &hop_data, 0, &expanded_inbound_key, &args.logger) {
assert_eq!(expected_recv_value, *amount_msat);
assert_eq!(expected_receiver_node_id, receiver_node_id.unwrap());
match purpose {
- PaymentPurpose::InvoicePayment { payment_preimage, payment_secret, .. } => {
+ PaymentPurpose::Bolt11InvoicePayment { payment_preimage, payment_secret, .. } => {
+ assert_eq!(&expected_payment_preimage, payment_preimage);
+ assert_eq!(expected_payment_secret, *payment_secret);
+ },
+ PaymentPurpose::Bolt12OfferPayment { payment_preimage, payment_secret, .. } => {
+ assert_eq!(&expected_payment_preimage, payment_preimage);
+ assert_eq!(expected_payment_secret, *payment_secret);
+ },
+ PaymentPurpose::Bolt12RefundPayment { payment_preimage, payment_secret, .. } => {
assert_eq!(&expected_payment_preimage, payment_preimage);
assert_eq!(expected_payment_secret, *payment_secret);
},
assert!(onion_fields.is_some());
assert_eq!(onion_fields.as_ref().unwrap().custom_tlvs, custom_tlvs);
match &purpose {
- PaymentPurpose::InvoicePayment { payment_preimage, payment_secret, .. } => {
+ PaymentPurpose::Bolt11InvoicePayment { payment_preimage, payment_secret, .. } => {
+ assert_eq!(expected_preimage, *payment_preimage);
+ assert_eq!(our_payment_secret.unwrap(), *payment_secret);
+ assert_eq!(Some(*payment_secret), onion_fields.as_ref().unwrap().payment_secret);
+ },
+ PaymentPurpose::Bolt12OfferPayment { payment_preimage, payment_secret, .. } => {
+ assert_eq!(expected_preimage, *payment_preimage);
+ assert_eq!(our_payment_secret.unwrap(), *payment_secret);
+ assert_eq!(Some(*payment_secret), onion_fields.as_ref().unwrap().payment_secret);
+ },
+ PaymentPurpose::Bolt12RefundPayment { payment_preimage, payment_secret, .. } => {
assert_eq!(expected_preimage, *payment_preimage);
assert_eq!(our_payment_secret.unwrap(), *payment_secret);
assert_eq!(Some(*payment_secret), onion_fields.as_ref().unwrap().payment_secret);
let mut fwd_amt_msat = 0;
match claim_event[0] {
Event::PaymentClaimed {
- purpose: PaymentPurpose::SpontaneousPayment(preimage),
+ purpose: PaymentPurpose::SpontaneousPayment(preimage)
+ | PaymentPurpose::Bolt11InvoicePayment { payment_preimage: Some(preimage), .. }
+ | PaymentPurpose::Bolt12OfferPayment { payment_preimage: Some(preimage), .. }
+ | PaymentPurpose::Bolt12RefundPayment { payment_preimage: Some(preimage), .. },
amount_msat,
ref htlcs,
- .. }
- | Event::PaymentClaimed {
- purpose: PaymentPurpose::InvoicePayment { payment_preimage: Some(preimage), ..},
- ref htlcs,
- amount_msat,
..
} => {
assert_eq!(preimage, our_payment_preimage);
fwd_amt_msat = amount_msat;
},
Event::PaymentClaimed {
- purpose: PaymentPurpose::InvoicePayment { .. },
+ purpose: PaymentPurpose::Bolt11InvoicePayment { .. }
+ | PaymentPurpose::Bolt12OfferPayment { .. }
+ | PaymentPurpose::Bolt12RefundPayment { .. },
payment_hash,
amount_msat,
ref htlcs,
use crate::chain::channelmonitor;
use crate::chain::channelmonitor::{CLOSED_CHANNEL_UPDATE_ID, CLTV_CLAIM_BUFFER, LATENCY_GRACE_PERIOD_BLOCKS, ANTI_REORG_DELAY};
use crate::chain::transaction::OutPoint;
-use crate::sign::{ecdsa::EcdsaChannelSigner, EntropySource, SignerProvider};
+use crate::sign::{ecdsa::EcdsaChannelSigner, EntropySource, OutputSpender, SignerProvider};
use crate::events::{Event, MessageSendEvent, MessageSendEventsProvider, PathFailure, PaymentPurpose, ClosureReason, HTLCDestination, PaymentFailureReason};
use crate::ln::{ChannelId, PaymentPreimage, PaymentSecret, PaymentHash};
use crate::ln::channel::{commitment_tx_base_weight, COMMITMENT_TX_WEIGHT_PER_HTLC, CONCURRENT_INBOUND_HTLC_FEE_BUFFER, FEE_SPIKE_BUFFER_FEE_INCREASE_MULTIPLE, MIN_AFFORDABLE_HTLC_COUNT, get_holder_selected_channel_reserve_satoshis, OutboundV1Channel, InboundV1Channel, COINBASE_MATURITY, ChannelPhase};
assert_eq!(nodes[2].node.get_our_node_id(), receiver_node_id.unwrap());
assert_eq!(via_channel_id, Some(chan_2.2));
match &purpose {
- PaymentPurpose::InvoicePayment { payment_preimage, payment_secret, .. } => {
+ PaymentPurpose::Bolt11InvoicePayment { payment_preimage, payment_secret, .. } => {
assert!(payment_preimage.is_none());
assert_eq!(our_payment_secret_21, *payment_secret);
},
- _ => panic!("expected PaymentPurpose::InvoicePayment")
+ _ => panic!("expected PaymentPurpose::Bolt11InvoicePayment")
}
},
_ => panic!("Unexpected event"),
assert_eq!(nodes[2].node.get_our_node_id(), receiver_node_id.unwrap());
assert_eq!(via_channel_id, Some(chan_2.2));
match &purpose {
- PaymentPurpose::InvoicePayment { payment_preimage, payment_secret, .. } => {
+ PaymentPurpose::Bolt11InvoicePayment { payment_preimage, payment_secret, .. } => {
assert!(payment_preimage.is_none());
assert_eq!(our_payment_secret_22, *payment_secret);
},
- _ => panic!("expected PaymentPurpose::InvoicePayment")
+ _ => panic!("expected PaymentPurpose::Bolt11InvoicePayment")
}
},
_ => panic!("Unexpected event"),
assert_eq!(receiver_node_id.unwrap(), nodes[1].node.get_our_node_id());
assert_eq!(via_channel_id, Some(channel_id));
match &purpose {
- PaymentPurpose::InvoicePayment { payment_preimage, payment_secret, .. } => {
+ PaymentPurpose::Bolt11InvoicePayment { payment_preimage, payment_secret, .. } => {
assert!(payment_preimage.is_none());
assert_eq!(payment_secret_1, *payment_secret);
},
- _ => panic!("expected PaymentPurpose::InvoicePayment")
+ _ => panic!("expected PaymentPurpose::Bolt11InvoicePayment")
}
},
_ => panic!("Unexpected event"),
Event::PaymentClaimable { ref payment_hash, ref purpose, .. } => {
assert_eq!(payment_hash_2, *payment_hash);
match &purpose {
- PaymentPurpose::InvoicePayment { payment_preimage, payment_secret, .. } => {
+ PaymentPurpose::Bolt11InvoicePayment { payment_preimage, payment_secret, .. } => {
assert!(payment_preimage.is_none());
assert_eq!(payment_secret_2, *payment_secret);
},
- _ => panic!("expected PaymentPurpose::InvoicePayment")
+ _ => panic!("expected PaymentPurpose::Bolt11InvoicePayment")
}
},
_ => panic!("Unexpected event"),
match events[0] {
Event::PaymentClaimable { ref purpose, .. } => {
match &purpose {
- PaymentPurpose::InvoicePayment { payment_preimage, .. } => {
+ PaymentPurpose::Bolt11InvoicePayment { payment_preimage, .. } => {
claim_payment(&nodes[0], &[&nodes[1]], payment_preimage.unwrap());
},
- _ => panic!("expected PaymentPurpose::InvoicePayment")
+ _ => panic!("expected PaymentPurpose::Bolt11InvoicePayment")
}
},
_ => panic!("Unexpected event"),
let dust_outbound_htlc_on_holder_tx: u64 = max_dust_htlc_exposure_msat / dust_outbound_htlc_on_holder_tx_msat;
// Substract 3 sats for multiplier and 2 sats for fixed limit to make sure we are 50% below the dust limit.
- // This is to make sure we fully use the dust limit. If we don't, we could end up with `dust_ibd_htlc_on_holder_tx` being 1
+ // This is to make sure we fully use the dust limit. If we don't, we could end up with `dust_ibd_htlc_on_holder_tx` being 1
// while `max_dust_htlc_exposure_msat` is not equal to `dust_outbound_htlc_on_holder_tx_msat`.
- let dust_inbound_htlc_on_holder_tx_msat: u64 = (dust_buffer_feerate * htlc_success_tx_weight(&channel_type_features) / 1000 + open_channel.common_fields.dust_limit_satoshis - if multiplier_dust_limit { 3 } else { 2 }) * 1000;
+ let dust_inbound_htlc_on_holder_tx_msat: u64 = (dust_buffer_feerate * htlc_success_tx_weight(&channel_type_features) / 1000 + open_channel.common_fields.dust_limit_satoshis - if multiplier_dust_limit { 3 } else { 2 }) * 1000;
let dust_inbound_htlc_on_holder_tx: u64 = max_dust_htlc_exposure_msat / dust_inbound_htlc_on_holder_tx_msat;
let dust_htlc_on_counterparty_tx: u64 = 4;
//! Further functional tests which test blockchain reorganizations.
-use crate::sign::{ecdsa::EcdsaChannelSigner, SpendableOutputDescriptor};
+use crate::sign::{ecdsa::EcdsaChannelSigner, OutputSpender, SpendableOutputDescriptor};
use crate::chain::channelmonitor::{ANTI_REORG_DELAY, LATENCY_GRACE_PERIOD_BLOCKS, Balance};
use crate::chain::transaction::OutPoint;
use crate::chain::chaininterface::{LowerBoundedFeeEstimator, compute_feerate_sat_per_1000_weight};
expect_payment_failed!(nodes[1], payment_hash_1, false);
}
+#[test]
+fn archive_fully_resolved_monitors() {
+ // Test we can archive fully resolved channel monitor.
+ let chanmon_cfgs = create_chanmon_cfgs(2);
+ let node_cfgs = create_node_cfgs(2, &chanmon_cfgs);
+ let mut user_config = test_default_channel_config();
+ let node_chanmgrs = create_node_chanmgrs(2, &node_cfgs, &[Some(user_config), Some(user_config)]);
+ let mut nodes = create_network(2, &node_cfgs, &node_chanmgrs);
+
+ let (_, _, chan_id, funding_tx) =
+ create_announced_chan_between_nodes_with_value(&nodes, 0, 1, 1_000_000, 1_000_000);
+
+ nodes[0].node.close_channel(&chan_id, &nodes[1].node.get_our_node_id()).unwrap();
+ let node_0_shutdown = get_event_msg!(nodes[0], MessageSendEvent::SendShutdown, nodes[1].node.get_our_node_id());
+ nodes[1].node.handle_shutdown(&nodes[0].node.get_our_node_id(), &node_0_shutdown);
+ let node_1_shutdown = get_event_msg!(nodes[1], MessageSendEvent::SendShutdown, nodes[0].node.get_our_node_id());
+ nodes[0].node.handle_shutdown(&nodes[1].node.get_our_node_id(), &node_1_shutdown);
+
+ let node_0_closing_signed = get_event_msg!(nodes[0], MessageSendEvent::SendClosingSigned, nodes[1].node.get_our_node_id());
+ nodes[1].node.handle_closing_signed(&nodes[0].node.get_our_node_id(), &node_0_closing_signed);
+ let node_1_closing_signed = get_event_msg!(nodes[1], MessageSendEvent::SendClosingSigned, nodes[0].node.get_our_node_id());
+ nodes[0].node.handle_closing_signed(&nodes[1].node.get_our_node_id(), &node_1_closing_signed);
+ let (_, node_0_2nd_closing_signed) = get_closing_signed_broadcast!(nodes[0].node, nodes[1].node.get_our_node_id());
+ nodes[1].node.handle_closing_signed(&nodes[0].node.get_our_node_id(), &node_0_2nd_closing_signed.unwrap());
+ let (_, _) = get_closing_signed_broadcast!(nodes[1].node, nodes[0].node.get_our_node_id());
+
+ let shutdown_tx = nodes[0].tx_broadcaster.txn_broadcasted.lock().unwrap().split_off(0);
+
+ mine_transaction(&nodes[0], &shutdown_tx[0]);
+ mine_transaction(&nodes[1], &shutdown_tx[0]);
+
+ connect_blocks(&nodes[0], 6);
+ connect_blocks(&nodes[1], 6);
+
+ check_closed_event!(nodes[0], 1, ClosureReason::LocallyInitiatedCooperativeClosure, [nodes[1].node.get_our_node_id()], 1000000);
+ check_closed_event!(nodes[1], 1, ClosureReason::CounterpartyInitiatedCooperativeClosure, [nodes[0].node.get_our_node_id()], 1000000);
+
+ assert_eq!(nodes[0].chain_monitor.chain_monitor.list_monitors().len(), 1);
+ // First archive should set balances_empty_height to current block height
+ nodes[0].chain_monitor.chain_monitor.archive_fully_resolved_channel_monitors();
+ assert_eq!(nodes[0].chain_monitor.chain_monitor.list_monitors().len(), 1);
+ connect_blocks(&nodes[0], 4032);
+ // Second call after 4032 blocks, should archive the monitor
+ nodes[0].chain_monitor.chain_monitor.archive_fully_resolved_channel_monitors();
+ // Should have no monitors left
+ assert_eq!(nodes[0].chain_monitor.chain_monitor.list_monitors().len(), 0);
+ // Remove the corresponding outputs and transactions the chain source is
+ // watching. This is to make sure the `Drop` function assertions pass.
+ nodes.get_mut(0).unwrap().chain_source.remove_watched_txn_and_outputs(
+ OutPoint { txid: funding_tx.txid(), index: 0 },
+ funding_tx.output[0].script_pubkey.clone()
+ );
+}
+
fn do_chanmon_claim_value_coop_close(anchors: bool) {
// Tests `get_claimable_balances` returns the correct values across a simple cooperative claim.
// Specifically, this tests that the channel non-HTLC balances show up in
// Splicing
/// Handle an incoming `splice` message from the given peer.
- #[cfg(dual_funding)]
+ #[cfg(splicing)]
fn handle_splice(&self, their_node_id: &PublicKey, msg: &Splice);
/// Handle an incoming `splice_ack` message from the given peer.
- #[cfg(dual_funding)]
+ #[cfg(splicing)]
fn handle_splice_ack(&self, their_node_id: &PublicKey, msg: &SpliceAck);
/// Handle an incoming `splice_locked` message from the given peer.
- #[cfg(dual_funding)]
+ #[cfg(splicing)]
fn handle_splice_locked(&self, their_node_id: &PublicKey, msg: &SpliceLocked);
// Interactive channel construction
mod fuzzy_internal_msgs {
use bitcoin::secp256k1::PublicKey;
- use crate::blinded_path::payment::{PaymentConstraints, PaymentRelay};
+ use crate::blinded_path::payment::{PaymentConstraints, PaymentContext, PaymentRelay};
use crate::ln::{PaymentPreimage, PaymentSecret};
use crate::ln::features::BlindedHopFeatures;
use super::{FinalOnionHopData, TrampolineOnionPacket};
cltv_expiry_height: u32,
payment_secret: PaymentSecret,
payment_constraints: PaymentConstraints,
+ payment_context: PaymentContext,
intro_node_blinding_point: Option<PublicKey>,
keysend_preimage: Option<PaymentPreimage>,
custom_tlvs: Vec<(u64, Vec<u8>)>,
})
},
ChaChaPolyReadAdapter { readable: BlindedPaymentTlvs::Receive(ReceiveTlvs {
- payment_secret, payment_constraints
+ payment_secret, payment_constraints, payment_context
})} => {
if total_msat.unwrap_or(0) > MAX_VALUE_MSAT { return Err(DecodeError::InvalidValue) }
Ok(Self::BlindedReceive {
cltv_expiry_height: cltv_value.ok_or(DecodeError::InvalidValue)?,
payment_secret,
payment_constraints,
+ payment_context,
intro_node_blinding_point,
keysend_preimage,
custom_tlvs,
use bitcoin::network::constants::Network;
use core::time::Duration;
use crate::blinded_path::{BlindedPath, IntroductionNode};
+use crate::blinded_path::payment::{Bolt12OfferContext, Bolt12RefundContext, PaymentContext};
use crate::events::{Event, MessageSendEventsProvider, PaymentPurpose};
use crate::ln::channelmanager::{PaymentId, RecentPaymentDetails, Retry, self};
+use crate::ln::features::InvoiceRequestFeatures;
use crate::ln::functional_test_utils::*;
use crate::ln::msgs::{ChannelMessageHandler, Init, NodeAnnouncement, OnionMessage, OnionMessageHandler, RoutingMessageHandler, SocketAddress, UnsignedGossipMessage, UnsignedNodeAnnouncement};
use crate::offers::invoice::Bolt12Invoice;
use crate::offers::invoice_error::InvoiceError;
-use crate::offers::invoice_request::InvoiceRequest;
+use crate::offers::invoice_request::{InvoiceRequest, InvoiceRequestFields};
use crate::offers::parse::Bolt12SemanticError;
use crate::onion_message::messenger::PeeledOnion;
use crate::onion_message::offers::OffersMessage;
do_pass_along_path(args);
}
-fn claim_bolt12_payment<'a, 'b, 'c>(node: &Node<'a, 'b, 'c>, path: &[&Node<'a, 'b, 'c>]) {
+fn claim_bolt12_payment<'a, 'b, 'c>(
+ node: &Node<'a, 'b, 'c>, path: &[&Node<'a, 'b, 'c>], expected_payment_context: PaymentContext
+) {
let recipient = &path[path.len() - 1];
- match get_event!(recipient, Event::PaymentClaimable) {
- Event::PaymentClaimable {
- purpose: PaymentPurpose::InvoicePayment {
- payment_preimage: Some(payment_preimage), ..
- }, ..
- } => claim_payment(node, path, payment_preimage),
- _ => panic!(),
+ let payment_purpose = match get_event!(recipient, Event::PaymentClaimable) {
+ Event::PaymentClaimable { purpose, .. } => purpose,
+ _ => panic!("No Event::PaymentClaimable"),
+ };
+ let payment_preimage = match payment_purpose.preimage() {
+ Some(preimage) => preimage,
+ None => panic!("No preimage in Event::PaymentClaimable"),
};
+ match payment_purpose {
+ PaymentPurpose::Bolt12OfferPayment { payment_context, .. } => {
+ assert_eq!(PaymentContext::Bolt12Offer(payment_context), expected_payment_context);
+ },
+ PaymentPurpose::Bolt12RefundPayment { payment_context, .. } => {
+ assert_eq!(PaymentContext::Bolt12Refund(payment_context), expected_payment_context);
+ },
+ _ => panic!("Unexpected payment purpose: {:?}", payment_purpose),
+ }
+ claim_payment(node, path, payment_preimage);
}
fn extract_invoice_request<'a, 'b, 'c>(
disconnect_peers(david, &[bob, &nodes[4], &nodes[5]]);
let offer = alice.node
- .create_offer_builder("coffee".to_string()).unwrap()
+ .create_offer_builder("coffee".to_string())
+ .unwrap()
.amount_msats(10_000_000)
.build().unwrap();
assert_ne!(offer.signing_pubkey(), alice_id);
alice.onion_messenger.handle_onion_message(&bob_id, &onion_message);
let (invoice_request, reply_path) = extract_invoice_request(alice, &onion_message);
+ let payment_context = PaymentContext::Bolt12Offer(Bolt12OfferContext {
+ offer_id: offer.id(),
+ invoice_request: InvoiceRequestFields {
+ payer_id: invoice_request.payer_id(),
+ amount_msats: None,
+ features: InvoiceRequestFeatures::empty(),
+ quantity: None,
+ payer_note_truncated: None,
+ },
+ });
assert_eq!(invoice_request.amount_msats(), None);
assert_ne!(invoice_request.payer_id(), david_id);
assert_eq!(reply_path.unwrap().introduction_node, IntroductionNode::NodeId(charlie_id));
route_bolt12_payment(david, &[charlie, bob, alice], &invoice);
expect_recent_payment!(david, RecentPaymentDetails::Pending, payment_id);
- claim_bolt12_payment(david, &[charlie, bob, alice]);
+ claim_bolt12_payment(david, &[charlie, bob, alice], payment_context);
expect_recent_payment!(david, RecentPaymentDetails::Fulfilled, payment_id);
}
}
expect_recent_payment!(david, RecentPaymentDetails::AwaitingInvoice, payment_id);
- alice.node.request_refund_payment(&refund).unwrap();
+ let payment_context = PaymentContext::Bolt12Refund(Bolt12RefundContext {});
+ let expected_invoice = alice.node.request_refund_payment(&refund).unwrap();
connect_peers(alice, charlie);
david.onion_messenger.handle_onion_message(&charlie_id, &onion_message);
let invoice = extract_invoice(david, &onion_message);
+ assert_eq!(invoice, expected_invoice);
+
assert_eq!(invoice.amount_msats(), 10_000_000);
assert_ne!(invoice.signing_pubkey(), alice_id);
assert!(!invoice.payment_paths().is_empty());
route_bolt12_payment(david, &[charlie, bob, alice], &invoice);
expect_recent_payment!(david, RecentPaymentDetails::Pending, payment_id);
- claim_bolt12_payment(david, &[charlie, bob, alice]);
+ claim_bolt12_payment(david, &[charlie, bob, alice], payment_context);
expect_recent_payment!(david, RecentPaymentDetails::Fulfilled, payment_id);
}
alice.onion_messenger.handle_onion_message(&bob_id, &onion_message);
let (invoice_request, reply_path) = extract_invoice_request(alice, &onion_message);
+ let payment_context = PaymentContext::Bolt12Offer(Bolt12OfferContext {
+ offer_id: offer.id(),
+ invoice_request: InvoiceRequestFields {
+ payer_id: invoice_request.payer_id(),
+ amount_msats: None,
+ features: InvoiceRequestFeatures::empty(),
+ quantity: None,
+ payer_note_truncated: None,
+ },
+ });
assert_eq!(invoice_request.amount_msats(), None);
assert_ne!(invoice_request.payer_id(), bob_id);
assert_eq!(reply_path.unwrap().introduction_node, IntroductionNode::NodeId(bob_id));
route_bolt12_payment(bob, &[alice], &invoice);
expect_recent_payment!(bob, RecentPaymentDetails::Pending, payment_id);
- claim_bolt12_payment(bob, &[alice]);
+ claim_bolt12_payment(bob, &[alice], payment_context);
expect_recent_payment!(bob, RecentPaymentDetails::Fulfilled, payment_id);
}
}
expect_recent_payment!(bob, RecentPaymentDetails::AwaitingInvoice, payment_id);
- alice.node.request_refund_payment(&refund).unwrap();
+ let payment_context = PaymentContext::Bolt12Refund(Bolt12RefundContext {});
+ let expected_invoice = alice.node.request_refund_payment(&refund).unwrap();
let onion_message = alice.onion_messenger.next_onion_message_for_peer(bob_id).unwrap();
bob.onion_messenger.handle_onion_message(&alice_id, &onion_message);
let invoice = extract_invoice(bob, &onion_message);
+ assert_eq!(invoice, expected_invoice);
+
assert_eq!(invoice.amount_msats(), 10_000_000);
assert_ne!(invoice.signing_pubkey(), alice_id);
assert!(!invoice.payment_paths().is_empty());
route_bolt12_payment(bob, &[alice], &invoice);
expect_recent_payment!(bob, RecentPaymentDetails::Pending, payment_id);
- claim_bolt12_payment(bob, &[alice]);
+ claim_bolt12_payment(bob, &[alice], payment_context);
expect_recent_payment!(bob, RecentPaymentDetails::Fulfilled, payment_id);
}
let onion_message = bob.onion_messenger.next_onion_message_for_peer(alice_id).unwrap();
alice.onion_messenger.handle_onion_message(&bob_id, &onion_message);
+ let (invoice_request, _) = extract_invoice_request(alice, &onion_message);
+ let payment_context = PaymentContext::Bolt12Offer(Bolt12OfferContext {
+ offer_id: offer.id(),
+ invoice_request: InvoiceRequestFields {
+ payer_id: invoice_request.payer_id(),
+ amount_msats: None,
+ features: InvoiceRequestFeatures::empty(),
+ quantity: None,
+ payer_note_truncated: None,
+ },
+ });
+
let onion_message = alice.onion_messenger.next_onion_message_for_peer(bob_id).unwrap();
bob.onion_messenger.handle_onion_message(&alice_id, &onion_message);
route_bolt12_payment(bob, &[alice], &invoice);
expect_recent_payment!(bob, RecentPaymentDetails::Pending, payment_id);
- claim_bolt12_payment(bob, &[alice]);
+ claim_bolt12_payment(bob, &[alice], payment_context);
expect_recent_payment!(bob, RecentPaymentDetails::Fulfilled, payment_id);
}
assert!(refund.paths().is_empty());
expect_recent_payment!(bob, RecentPaymentDetails::AwaitingInvoice, payment_id);
- alice.node.request_refund_payment(&refund).unwrap();
+ let payment_context = PaymentContext::Bolt12Refund(Bolt12RefundContext {});
+ let expected_invoice = alice.node.request_refund_payment(&refund).unwrap();
let onion_message = alice.onion_messenger.next_onion_message_for_peer(bob_id).unwrap();
bob.onion_messenger.handle_onion_message(&alice_id, &onion_message);
let invoice = extract_invoice(bob, &onion_message);
+ assert_eq!(invoice, expected_invoice);
+
route_bolt12_payment(bob, &[alice], &invoice);
expect_recent_payment!(bob, RecentPaymentDetails::Pending, payment_id);
- claim_bolt12_payment(bob, &[alice]);
+ claim_bolt12_payment(bob, &[alice], payment_context);
expect_recent_payment!(bob, RecentPaymentDetails::Fulfilled, payment_id);
}
david.onion_messenger.handle_onion_message(&charlie_id, &onion_message);
// David pays the first invoice
+ let payment_context = PaymentContext::Bolt12Refund(Bolt12RefundContext {});
let invoice1 = extract_invoice(david, &onion_message);
route_bolt12_payment(david, &[charlie, bob, alice], &invoice1);
expect_recent_payment!(david, RecentPaymentDetails::Pending, payment_id);
- claim_bolt12_payment(david, &[charlie, bob, alice]);
+ claim_bolt12_payment(david, &[charlie, bob, alice], payment_context);
expect_recent_payment!(david, RecentPaymentDetails::Fulfilled, payment_id);
disconnect_peers(alice, &[charlie]);
) -> Result<PendingHTLCInfo, InboundHTLCErr> {
let (
payment_data, keysend_preimage, custom_tlvs, onion_amt_msat, onion_cltv_expiry,
- payment_metadata, requires_blinded_error
+ payment_metadata, payment_context, requires_blinded_error
) = match hop_data {
msgs::InboundOnionPayload::Receive {
payment_data, keysend_preimage, custom_tlvs, sender_intended_htlc_amt_msat,
cltv_expiry_height, payment_metadata, ..
} =>
(payment_data, keysend_preimage, custom_tlvs, sender_intended_htlc_amt_msat,
- cltv_expiry_height, payment_metadata, false),
+ cltv_expiry_height, payment_metadata, None, false),
msgs::InboundOnionPayload::BlindedReceive {
sender_intended_htlc_amt_msat, total_msat, cltv_expiry_height, payment_secret,
- intro_node_blinding_point, payment_constraints, keysend_preimage, custom_tlvs
+ intro_node_blinding_point, payment_constraints, payment_context, keysend_preimage,
+ custom_tlvs
} => {
check_blinded_payment_constraints(
sender_intended_htlc_amt_msat, cltv_expiry, &payment_constraints
})?;
let payment_data = msgs::FinalOnionHopData { payment_secret, total_msat };
(Some(payment_data), keysend_preimage, custom_tlvs,
- sender_intended_htlc_amt_msat, cltv_expiry_height, None,
+ sender_intended_htlc_amt_msat, cltv_expiry_height, None, Some(payment_context),
intro_node_blinding_point.is_none())
}
msgs::InboundOnionPayload::Forward { .. } => {
PendingHTLCRouting::Receive {
payment_data: data,
payment_metadata,
+ payment_context,
incoming_cltv_expiry: onion_cltv_expiry,
phantom_shared_secret,
custom_tlvs,
assert_eq!(skimmed_fee_msat * num_mpp_parts as u64, counterparty_skimmed_fee_msat);
assert_eq!(nodes[2].node.get_our_node_id(), receiver_node_id.unwrap());
match purpose {
- crate::events::PaymentPurpose::InvoicePayment { payment_preimage: ev_payment_preimage,
- payment_secret: ev_payment_secret, .. } =>
- {
+ crate::events::PaymentPurpose::Bolt11InvoicePayment {
+ payment_preimage: ev_payment_preimage,
+ payment_secret: ev_payment_secret,
+ ..
+ } => {
assert_eq!(payment_preimage, ev_payment_preimage.unwrap());
assert_eq!(payment_secret, *ev_payment_secret);
},
fn handle_stfu(&self, their_node_id: &PublicKey, msg: &msgs::Stfu) {
ErroringMessageHandler::push_error(&self, their_node_id, msg.channel_id);
}
- #[cfg(dual_funding)]
+ #[cfg(splicing)]
fn handle_splice(&self, their_node_id: &PublicKey, msg: &msgs::Splice) {
ErroringMessageHandler::push_error(&self, their_node_id, msg.channel_id);
}
- #[cfg(dual_funding)]
+ #[cfg(splicing)]
fn handle_splice_ack(&self, their_node_id: &PublicKey, msg: &msgs::SpliceAck) {
ErroringMessageHandler::push_error(&self, their_node_id, msg.channel_id);
}
- #[cfg(dual_funding)]
+ #[cfg(splicing)]
fn handle_splice_locked(&self, their_node_id: &PublicKey, msg: &msgs::SpliceLocked) {
ErroringMessageHandler::push_error(&self, their_node_id, msg.channel_id);
}
self.message_handler.chan_handler.handle_stfu(&their_node_id, &msg);
}
- #[cfg(dual_funding)]
+ #[cfg(splicing)]
// Splicing messages:
wire::Message::Splice(msg) => {
self.message_handler.chan_handler.handle_splice(&their_node_id, &msg);
}
- #[cfg(dual_funding)]
+ #[cfg(splicing)]
wire::Message::SpliceAck(msg) => {
self.message_handler.chan_handler.handle_splice_ack(&their_node_id, &msg);
}
- #[cfg(dual_funding)]
+ #[cfg(splicing)]
wire::Message::SpliceLocked(msg) => {
self.message_handler.chan_handler.handle_splice_locked(&their_node_id, &msg);
}
use crate::chain::Confirm;
use crate::events::{Event, MessageSendEventsProvider, ClosureReason, HTLCDestination, MessageSendEvent};
use crate::ln::msgs::{ChannelMessageHandler, Init};
+use crate::sign::OutputSpender;
use crate::util::test_utils;
use crate::util::ser::Writeable;
use crate::util::string::UntrustedString;
FundingCreated(msgs::FundingCreated),
FundingSigned(msgs::FundingSigned),
Stfu(msgs::Stfu),
- #[cfg(dual_funding)]
+ #[cfg(splicing)]
Splice(msgs::Splice),
- #[cfg(dual_funding)]
+ #[cfg(splicing)]
SpliceAck(msgs::SpliceAck),
- #[cfg(dual_funding)]
+ #[cfg(splicing)]
SpliceLocked(msgs::SpliceLocked),
TxAddInput(msgs::TxAddInput),
TxAddOutput(msgs::TxAddOutput),
&Message::FundingCreated(ref msg) => msg.write(writer),
&Message::FundingSigned(ref msg) => msg.write(writer),
&Message::Stfu(ref msg) => msg.write(writer),
- #[cfg(dual_funding)]
+ #[cfg(splicing)]
&Message::Splice(ref msg) => msg.write(writer),
- #[cfg(dual_funding)]
+ #[cfg(splicing)]
&Message::SpliceAck(ref msg) => msg.write(writer),
- #[cfg(dual_funding)]
+ #[cfg(splicing)]
&Message::SpliceLocked(ref msg) => msg.write(writer),
&Message::TxAddInput(ref msg) => msg.write(writer),
&Message::TxAddOutput(ref msg) => msg.write(writer),
&Message::FundingCreated(ref msg) => msg.type_id(),
&Message::FundingSigned(ref msg) => msg.type_id(),
&Message::Stfu(ref msg) => msg.type_id(),
- #[cfg(dual_funding)]
+ #[cfg(splicing)]
&Message::Splice(ref msg) => msg.type_id(),
- #[cfg(dual_funding)]
+ #[cfg(splicing)]
&Message::SpliceAck(ref msg) => msg.type_id(),
- #[cfg(dual_funding)]
+ #[cfg(splicing)]
&Message::SpliceLocked(ref msg) => msg.type_id(),
&Message::TxAddInput(ref msg) => msg.type_id(),
&Message::TxAddOutput(ref msg) => msg.type_id(),
msgs::FundingSigned::TYPE => {
Ok(Message::FundingSigned(Readable::read(buffer)?))
},
- #[cfg(dual_funding)]
+ #[cfg(splicing)]
msgs::Splice::TYPE => {
Ok(Message::Splice(Readable::read(buffer)?))
},
msgs::Stfu::TYPE => {
Ok(Message::Stfu(Readable::read(buffer)?))
},
- #[cfg(dual_funding)]
+ #[cfg(splicing)]
msgs::SpliceAck::TYPE => {
Ok(Message::SpliceAck(Readable::read(buffer)?))
},
- #[cfg(dual_funding)]
+ #[cfg(splicing)]
msgs::SpliceLocked::TYPE => {
Ok(Message::SpliceLocked(Readable::read(buffer)?))
},
use bitcoin::blockdata::constants::ChainHash;
use bitcoin::hash_types::{WPubkeyHash, WScriptHash};
-use bitcoin::hashes::Hash;
use bitcoin::network::constants::Network;
use bitcoin::secp256k1::{KeyPair, PublicKey, Secp256k1, self};
use bitcoin::secp256k1::schnorr::Signature;
use bitcoin::address::{Address, Payload, WitnessProgram, WitnessVersion};
use bitcoin::key::TweakedPublicKey;
use core::time::Duration;
+use core::hash::{Hash, Hasher};
use crate::io;
use crate::blinded_path::BlindedPath;
use crate::ln::PaymentHash;
/// Successive calls to this method will add another address. Caller is responsible for not
/// adding duplicate addresses and only calling if capable of receiving to P2WSH addresses.
pub fn fallback_v0_p2wsh($($self_mut)* $self: $self_type, script_hash: &WScriptHash) -> $return_type {
+ use bitcoin::hashes::Hash;
let address = FallbackAddress {
version: WitnessVersion::V0.to_num(),
program: Vec::from(script_hash.to_byte_array()),
/// Successive calls to this method will add another address. Caller is responsible for not
/// adding duplicate addresses and only calling if capable of receiving to P2WPKH addresses.
pub fn fallback_v0_p2wpkh($($self_mut)* $self: $self_type, pubkey_hash: &WPubkeyHash) -> $return_type {
+ use bitcoin::hashes::Hash;
let address = FallbackAddress {
version: WitnessVersion::V0.to_num(),
program: Vec::from(pubkey_hash.to_byte_array()),
let mut bytes = Vec::new();
unsigned_tlv_stream.write(&mut bytes).unwrap();
- let tagged_hash = TaggedHash::new(SIGNATURE_TAG, &bytes);
+ let tagged_hash = TaggedHash::from_valid_tlv_stream_bytes(SIGNATURE_TAG, &bytes);
Self { bytes, contents, tagged_hash }
}
/// [`Refund`]: crate::offers::refund::Refund
/// [`InvoiceRequest`]: crate::offers::invoice_request::InvoiceRequest
#[derive(Clone, Debug)]
-#[cfg_attr(test, derive(PartialEq))]
pub struct Bolt12Invoice {
bytes: Vec<u8>,
contents: InvoiceContents,
}
}
+impl PartialEq for Bolt12Invoice {
+ fn eq(&self, other: &Self) -> bool {
+ self.bytes.eq(&other.bytes)
+ }
+}
+
+impl Eq for Bolt12Invoice {}
+
+impl Hash for Bolt12Invoice {
+ fn hash<H: Hasher>(&self, state: &mut H) {
+ self.bytes.hash(state);
+ }
+}
+
impl InvoiceContents {
/// Whether the original offer or refund has expired.
#[cfg(feature = "std")]
(payer_tlv_stream, offer_tlv_stream, invoice_request_tlv_stream, invoice_tlv_stream)
)?;
- let tagged_hash = TaggedHash::new(SIGNATURE_TAG, &bytes);
+ let tagged_hash = TaggedHash::from_valid_tlv_stream_bytes(SIGNATURE_TAG, &bytes);
Ok(UnsignedBolt12Invoice { bytes, contents, tagged_hash })
}
None => return Err(Bolt12ParseError::InvalidSemantics(Bolt12SemanticError::MissingSignature)),
Some(signature) => signature,
};
- let tagged_hash = TaggedHash::new(SIGNATURE_TAG, &bytes);
+ let tagged_hash = TaggedHash::from_valid_tlv_stream_bytes(SIGNATURE_TAG, &bytes);
let pubkey = contents.fields().signing_pubkey;
merkle::verify_signature(&signature, &tagged_hash, pubkey)?;
assert_eq!(invoice.invoice_features(), &Bolt12InvoiceFeatures::empty());
assert_eq!(invoice.signing_pubkey(), recipient_pubkey());
- let message = TaggedHash::new(SIGNATURE_TAG, &invoice.bytes);
+ let message = TaggedHash::from_valid_tlv_stream_bytes(SIGNATURE_TAG, &invoice.bytes);
assert!(merkle::verify_signature(&invoice.signature, &message, recipient_pubkey()).is_ok());
let digest = Message::from_slice(&invoice.signable_hash()).unwrap();
assert_eq!(invoice.invoice_features(), &Bolt12InvoiceFeatures::empty());
assert_eq!(invoice.signing_pubkey(), recipient_pubkey());
- let message = TaggedHash::new(SIGNATURE_TAG, &invoice.bytes);
+ let message = TaggedHash::from_valid_tlv_stream_bytes(SIGNATURE_TAG, &invoice.bytes);
assert!(merkle::verify_signature(&invoice.signature, &message, recipient_pubkey()).is_ok());
assert_eq!(
use crate::io;
use crate::ln::msgs::DecodeError;
+use crate::offers::merkle::SignError;
use crate::offers::parse::Bolt12SemanticError;
use crate::util::ser::{HighZeroBytesDroppedBigSize, Readable, WithoutLength, Writeable, Writer};
use crate::util::string::UntrustedString;
}
}
+impl From<SignError> for InvoiceError {
+ fn from(error: SignError) -> Self {
+ let message = match error {
+ SignError::Signing => "Failed signing invoice",
+ SignError::Verification(_) => "Failed invoice signature verification",
+ };
+ InvoiceError {
+ erroneous_field: None,
+ message: UntrustedString(message.to_string()),
+ }
+ }
+}
+
#[cfg(test)]
mod tests {
use super::{ErroneousField, InvoiceError};
use crate::ln::msgs::DecodeError;
use crate::offers::invoice::BlindedPayInfo;
use crate::offers::merkle::{SignError, SignFn, SignatureTlvStream, SignatureTlvStreamRef, TaggedHash, self};
-use crate::offers::offer::{Offer, OfferContents, OfferTlvStream, OfferTlvStreamRef};
+use crate::offers::offer::{Offer, OfferContents, OfferId, OfferTlvStream, OfferTlvStreamRef};
use crate::offers::parse::{Bolt12ParseError, ParsedMessage, Bolt12SemanticError};
use crate::offers::payer::{PayerContents, PayerTlvStream, PayerTlvStreamRef};
use crate::offers::signer::{Metadata, MetadataMaterial};
-use crate::util::ser::{HighZeroBytesDroppedBigSize, SeekReadable, WithoutLength, Writeable, Writer};
-use crate::util::string::PrintableString;
+use crate::util::ser::{HighZeroBytesDroppedBigSize, Readable, SeekReadable, WithoutLength, Writeable, Writer};
+use crate::util::string::{PrintableString, UntrustedString};
#[cfg(not(c_bindings))]
use {
let mut bytes = Vec::new();
unsigned_tlv_stream.write(&mut bytes).unwrap();
- let tagged_hash = TaggedHash::new(SIGNATURE_TAG, &bytes);
+ let tagged_hash = TaggedHash::from_valid_tlv_stream_bytes(SIGNATURE_TAG, &bytes);
Self { bytes, contents, tagged_hash }
}
/// ways to respond depending on whether the signing keys were derived.
#[derive(Clone, Debug)]
pub struct VerifiedInvoiceRequest {
+ /// The identifier of the [`Offer`] for which the [`InvoiceRequest`] was made.
+ pub offer_id: OfferId,
+
/// The verified request.
inner: InvoiceRequest,
#[cfg(c_bindings)]
secp_ctx: &Secp256k1<secp256k1::All>,
) -> Result<VerifiedInvoiceRequest, ()> {
- let keys = $self.contents.inner.offer.verify(&$self.bytes, key, secp_ctx)?;
+ let (offer_id, keys) = $self.contents.inner.offer.verify(&$self.bytes, key, secp_ctx)?;
Ok(VerifiedInvoiceRequest {
+ offer_id,
#[cfg(not(c_bindings))]
inner: $self,
#[cfg(c_bindings)]
invoice_request_respond_with_derived_signing_pubkey_methods!(self, self.inner, InvoiceBuilder<DerivedSigningPubkey>);
#[cfg(c_bindings)]
invoice_request_respond_with_derived_signing_pubkey_methods!(self, self.inner, InvoiceWithDerivedSigningPubkeyBuilder);
+
+ pub(crate) fn fields(&self) -> InvoiceRequestFields {
+ let InvoiceRequestContents {
+ payer_id,
+ inner: InvoiceRequestContentsWithoutPayerId {
+ payer: _, offer: _, chain: _, amount_msats, features, quantity, payer_note
+ },
+ } = &self.inner.contents;
+
+ InvoiceRequestFields {
+ payer_id: *payer_id,
+ amount_msats: *amount_msats,
+ features: features.clone(),
+ quantity: *quantity,
+ payer_note_truncated: payer_note.clone()
+ .map(|mut s| { s.truncate(PAYER_NOTE_LIMIT); UntrustedString(s) }),
+ }
+ }
}
impl InvoiceRequestContents {
(payer_tlv_stream, offer_tlv_stream, invoice_request_tlv_stream)
)?;
- let tagged_hash = TaggedHash::new(SIGNATURE_TAG, &bytes);
+ let tagged_hash = TaggedHash::from_valid_tlv_stream_bytes(SIGNATURE_TAG, &bytes);
Ok(UnsignedInvoiceRequest { bytes, contents, tagged_hash })
}
None => return Err(Bolt12ParseError::InvalidSemantics(Bolt12SemanticError::MissingSignature)),
Some(signature) => signature,
};
- let message = TaggedHash::new(SIGNATURE_TAG, &bytes);
+ let message = TaggedHash::from_valid_tlv_stream_bytes(SIGNATURE_TAG, &bytes);
merkle::verify_signature(&signature, &message, contents.payer_id)?;
Ok(InvoiceRequest { bytes, contents, signature })
}
}
+/// Fields sent in an [`InvoiceRequest`] message to include in [`PaymentContext::Bolt12Offer`].
+///
+/// [`PaymentContext::Bolt12Offer`]: crate::blinded_path::payment::PaymentContext::Bolt12Offer
+#[derive(Clone, Debug, Eq, PartialEq)]
+pub struct InvoiceRequestFields {
+ /// A possibly transient pubkey used to sign the invoice request.
+ pub payer_id: PublicKey,
+
+ /// The amount to pay in msats (i.e., the minimum lightning-payable unit for [`chain`]), which
+ /// must be greater than or equal to [`Offer::amount`], converted if necessary.
+ ///
+ /// [`chain`]: InvoiceRequest::chain
+ pub amount_msats: Option<u64>,
+
+ /// Features pertaining to requesting an invoice.
+ pub features: InvoiceRequestFeatures,
+
+ /// The quantity of the offer's item conforming to [`Offer::is_valid_quantity`].
+ pub quantity: Option<u64>,
+
+ /// A payer-provided note which will be seen by the recipient and reflected back in the invoice
+ /// response. Truncated to [`PAYER_NOTE_LIMIT`] characters.
+ pub payer_note_truncated: Option<UntrustedString>,
+}
+
+/// The maximum number of characters included in [`InvoiceRequestFields::payer_note_truncated`].
+pub const PAYER_NOTE_LIMIT: usize = 512;
+
+impl Writeable for InvoiceRequestFields {
+ fn write<W: Writer>(&self, writer: &mut W) -> Result<(), io::Error> {
+ write_tlv_fields!(writer, {
+ (0, self.payer_id, required),
+ (2, self.amount_msats.map(|v| HighZeroBytesDroppedBigSize(v)), option),
+ (4, WithoutLength(&self.features), required),
+ (6, self.quantity.map(|v| HighZeroBytesDroppedBigSize(v)), option),
+ (8, self.payer_note_truncated.as_ref().map(|s| WithoutLength(&s.0)), option),
+ });
+ Ok(())
+ }
+}
+
+impl Readable for InvoiceRequestFields {
+ fn read<R: io::Read>(reader: &mut R) -> Result<Self, DecodeError> {
+ _init_and_read_len_prefixed_tlv_fields!(reader, {
+ (0, payer_id, required),
+ (2, amount_msats, (option, encoding: (u64, HighZeroBytesDroppedBigSize))),
+ (4, features, (option, encoding: (InvoiceRequestFeatures, WithoutLength))),
+ (6, quantity, (option, encoding: (u64, HighZeroBytesDroppedBigSize))),
+ (8, payer_note_truncated, (option, encoding: (String, WithoutLength))),
+ });
+ let features = features.unwrap_or(InvoiceRequestFeatures::empty());
+
+ Ok(InvoiceRequestFields {
+ payer_id: payer_id.0.unwrap(), amount_msats, features, quantity,
+ payer_note_truncated: payer_note_truncated.map(|s| UntrustedString(s)),
+ })
+ }
+}
+
#[cfg(test)]
mod tests {
- use super::{InvoiceRequest, InvoiceRequestTlvStreamRef, SIGNATURE_TAG, UnsignedInvoiceRequest};
+ use super::{InvoiceRequest, InvoiceRequestFields, InvoiceRequestTlvStreamRef, PAYER_NOTE_LIMIT, SIGNATURE_TAG, UnsignedInvoiceRequest};
use bitcoin::blockdata::constants::ChainHash;
use bitcoin::network::constants::Network;
use crate::offers::parse::{Bolt12ParseError, Bolt12SemanticError};
use crate::offers::payer::PayerTlvStreamRef;
use crate::offers::test_utils::*;
- use crate::util::ser::{BigSize, Writeable};
- use crate::util::string::PrintableString;
+ use crate::util::ser::{BigSize, Readable, Writeable};
+ use crate::util::string::{PrintableString, UntrustedString};
#[test]
fn builds_invoice_request_with_defaults() {
assert_eq!(invoice_request.payer_id(), payer_pubkey());
assert_eq!(invoice_request.payer_note(), None);
- let message = TaggedHash::new(SIGNATURE_TAG, &invoice_request.bytes);
+ let message = TaggedHash::from_valid_tlv_stream_bytes(SIGNATURE_TAG, &invoice_request.bytes);
assert!(merkle::verify_signature(&invoice_request.signature, &message, payer_pubkey()).is_ok());
assert_eq!(
let mut bytes = Vec::new();
tlv_stream.write(&mut bytes).unwrap();
- let message = TaggedHash::new(INVOICE_SIGNATURE_TAG, &bytes);
+ let message = TaggedHash::from_valid_tlv_stream_bytes(INVOICE_SIGNATURE_TAG, &bytes);
let signature = merkle::sign_message(recipient_sign, &message, recipient_pubkey()).unwrap();
signature_tlv_stream.signature = Some(&signature);
let mut bytes = Vec::new();
tlv_stream.write(&mut bytes).unwrap();
- let message = TaggedHash::new(INVOICE_SIGNATURE_TAG, &bytes);
+ let message = TaggedHash::from_valid_tlv_stream_bytes(INVOICE_SIGNATURE_TAG, &bytes);
let signature = merkle::sign_message(recipient_sign, &message, recipient_pubkey()).unwrap();
signature_tlv_stream.signature = Some(&signature);
let mut bytes = Vec::new();
tlv_stream.write(&mut bytes).unwrap();
- let message = TaggedHash::new(INVOICE_SIGNATURE_TAG, &bytes);
+ let message = TaggedHash::from_valid_tlv_stream_bytes(INVOICE_SIGNATURE_TAG, &bytes);
let signature = merkle::sign_message(recipient_sign, &message, recipient_pubkey()).unwrap();
signature_tlv_stream.signature = Some(&signature);
let mut bytes = Vec::new();
tlv_stream.write(&mut bytes).unwrap();
- let message = TaggedHash::new(INVOICE_SIGNATURE_TAG, &bytes);
+ let message = TaggedHash::from_valid_tlv_stream_bytes(INVOICE_SIGNATURE_TAG, &bytes);
let signature = merkle::sign_message(recipient_sign, &message, recipient_pubkey()).unwrap();
signature_tlv_stream.signature = Some(&signature);
Err(e) => assert_eq!(e, Bolt12ParseError::Decode(DecodeError::InvalidValue)),
}
}
+
+ #[test]
+ fn copies_verified_invoice_request_fields() {
+ let desc = "foo".to_string();
+ let node_id = recipient_pubkey();
+ let expanded_key = ExpandedKey::new(&KeyMaterial([42; 32]));
+ let entropy = FixedEntropy {};
+ let secp_ctx = Secp256k1::new();
+
+ #[cfg(c_bindings)]
+ use crate::offers::offer::OfferWithDerivedMetadataBuilder as OfferBuilder;
+ let offer = OfferBuilder
+ ::deriving_signing_pubkey(desc, node_id, &expanded_key, &entropy, &secp_ctx)
+ .chain(Network::Testnet)
+ .amount_msats(1000)
+ .supported_quantity(Quantity::Unbounded)
+ .build().unwrap();
+ assert_eq!(offer.signing_pubkey(), node_id);
+
+ let invoice_request = offer.request_invoice(vec![1; 32], payer_pubkey()).unwrap()
+ .chain(Network::Testnet).unwrap()
+ .amount_msats(1001).unwrap()
+ .quantity(1).unwrap()
+ .payer_note("0".repeat(PAYER_NOTE_LIMIT * 2))
+ .build().unwrap()
+ .sign(payer_sign).unwrap();
+ match invoice_request.verify(&expanded_key, &secp_ctx) {
+ Ok(invoice_request) => {
+ let fields = invoice_request.fields();
+ assert_eq!(invoice_request.offer_id, offer.id());
+ assert_eq!(
+ fields,
+ InvoiceRequestFields {
+ payer_id: payer_pubkey(),
+ amount_msats: Some(1001),
+ features: InvoiceRequestFeatures::empty(),
+ quantity: Some(1),
+ payer_note_truncated: Some(UntrustedString("0".repeat(PAYER_NOTE_LIMIT))),
+ }
+ );
+
+ let mut buffer = Vec::new();
+ fields.write(&mut buffer).unwrap();
+
+ let deserialized_fields: InvoiceRequestFields =
+ Readable::read(&mut buffer.as_slice()).unwrap();
+ assert_eq!(deserialized_fields, fields);
+ },
+ Err(_) => panic!("unexpected error"),
+ }
+ }
}
}
impl TaggedHash {
+ /// Creates a tagged hash with the given parameters.
+ ///
+ /// Panics if `bytes` is not a well-formed TLV stream containing at least one TLV record.
+ pub(super) fn from_valid_tlv_stream_bytes(tag: &'static str, bytes: &[u8]) -> Self {
+ let tlv_stream = TlvStream::new(bytes);
+ Self::from_tlv_stream(tag, tlv_stream)
+ }
+
/// Creates a tagged hash with the given parameters.
///
/// Panics if `tlv_stream` is not a well-formed TLV stream containing at least one TLV record.
- pub(super) fn new(tag: &'static str, tlv_stream: &[u8]) -> Self {
+ pub(super) fn from_tlv_stream<'a, I: core::iter::Iterator<Item = TlvRecord<'a>>>(
+ tag: &'static str, tlv_stream: I
+ ) -> Self {
let tag_hash = sha256::Hash::hash(tag.as_bytes());
let merkle_root = root_hash(tlv_stream);
let digest = Message::from_slice(tagged_hash(tag_hash, merkle_root).as_byte_array()).unwrap();
pub fn merkle_root(&self) -> sha256::Hash {
self.merkle_root
}
+
+ pub(super) fn to_bytes(&self) -> [u8; 32] {
+ *self.digest.as_ref()
+ }
}
impl AsRef<TaggedHash> for TaggedHash {
/// Computes a merkle root hash for the given data, which must be a well-formed TLV stream
/// containing at least one TLV record.
-fn root_hash(data: &[u8]) -> sha256::Hash {
+fn root_hash<'a, I: core::iter::Iterator<Item = TlvRecord<'a>>>(tlv_stream: I) -> sha256::Hash {
+ let mut tlv_stream = tlv_stream.peekable();
let nonce_tag = tagged_hash_engine(sha256::Hash::from_engine({
- let first_tlv_record = TlvStream::new(&data[..]).next().unwrap();
+ let first_tlv_record = tlv_stream.peek().unwrap();
let mut engine = sha256::Hash::engine();
engine.input("LnNonce".as_bytes());
engine.input(first_tlv_record.record_bytes);
let branch_tag = tagged_hash_engine(sha256::Hash::hash("LnBranch".as_bytes()));
let mut leaves = Vec::new();
- let tlv_stream = TlvStream::new(&data[..]);
- for record in tlv_stream.skip_signatures() {
+ for record in TlvStream::skip_signatures(tlv_stream) {
leaves.push(tagged_hash_from_engine(leaf_tag.clone(), &record.record_bytes));
leaves.push(tagged_hash_from_engine(nonce_tag.clone(), &record.type_bytes));
}
.take_while(move |record| take_range.contains(&record.r#type))
}
- fn skip_signatures(self) -> core::iter::Filter<TlvStream<'a>, fn(&TlvRecord) -> bool> {
- self.filter(|record| !SIGNATURE_TYPES.contains(&record.r#type))
+ fn skip_signatures(
+ tlv_stream: impl core::iter::Iterator<Item = TlvRecord<'a>>
+ ) -> impl core::iter::Iterator<Item = TlvRecord<'a>> {
+ tlv_stream.filter(|record| !SIGNATURE_TYPES.contains(&record.r#type))
}
}
#[inline]
fn write<W: Writer>(&self, writer: &mut W) -> Result<(), io::Error> {
let tlv_stream = TlvStream::new(self.0);
- for record in tlv_stream.skip_signatures() {
+ for record in TlvStream::skip_signatures(tlv_stream) {
writer.write_all(record.record_bytes)?;
}
Ok(())
macro_rules! tlv2 { () => { "02080000010000020003" } }
macro_rules! tlv3 { () => { "03310266e4598d1d3c415f572a8488830b60f7e744ed9235eb0b1ba93283b315c0351800000000000000010000000000000002" } }
assert_eq!(
- super::root_hash(&<Vec<u8>>::from_hex(tlv1!()).unwrap()),
+ super::root_hash(TlvStream::new(&<Vec<u8>>::from_hex(tlv1!()).unwrap())),
sha256::Hash::from_slice(&<Vec<u8>>::from_hex("b013756c8fee86503a0b4abdab4cddeb1af5d344ca6fc2fa8b6c08938caa6f93").unwrap()).unwrap(),
);
assert_eq!(
- super::root_hash(&<Vec<u8>>::from_hex(concat!(tlv1!(), tlv2!())).unwrap()),
+ super::root_hash(TlvStream::new(&<Vec<u8>>::from_hex(concat!(tlv1!(), tlv2!())).unwrap())),
sha256::Hash::from_slice(&<Vec<u8>>::from_hex("c3774abbf4815aa54ccaa026bff6581f01f3be5fe814c620a252534f434bc0d1").unwrap()).unwrap(),
);
assert_eq!(
- super::root_hash(&<Vec<u8>>::from_hex(concat!(tlv1!(), tlv2!(), tlv3!())).unwrap()),
+ super::root_hash(TlvStream::new(&<Vec<u8>>::from_hex(concat!(tlv1!(), tlv2!(), tlv3!())).unwrap())),
sha256::Hash::from_slice(&<Vec<u8>>::from_hex("ab2e79b1283b0b31e0b035258de23782df6b89a38cfa7237bde69aed1a658c5d").unwrap()).unwrap(),
);
}
"lnr1qqyqqqqqqqqqqqqqqcp4256ypqqkgzshgysy6ct5dpjk6ct5d93kzmpq23ex2ct5d9ek293pqthvwfzadd7jejes8q9lhc4rvjxd022zv5l44g6qah82ru5rdpnpjkppqvjx204vgdzgsqpvcp4mldl3plscny0rt707gvpdh6ndydfacz43euzqhrurageg3n7kafgsek6gz3e9w52parv8gs2hlxzk95tzeswywffxlkeyhml0hh46kndmwf4m6xma3tkq2lu04qz3slje2rfthc89vss",
);
assert_eq!(
- super::root_hash(&invoice_request.bytes[..]),
+ super::root_hash(TlvStream::new(&invoice_request.bytes[..])),
sha256::Hash::from_slice(&<Vec<u8>>::from_hex("608407c18ad9a94d9ea2bcdbe170b6c20c462a7833a197621c916f78cf18e624").unwrap()).unwrap(),
);
assert_eq!(
use crate::ln::channelmanager::PaymentId;
use crate::ln::features::OfferFeatures;
use crate::ln::inbound_payment::{ExpandedKey, IV_LEN, Nonce};
-use crate::ln::msgs::MAX_VALUE_MSAT;
-use crate::offers::merkle::TlvStream;
+use crate::ln::msgs::{DecodeError, MAX_VALUE_MSAT};
+use crate::offers::merkle::{TaggedHash, TlvStream};
use crate::offers::parse::{Bech32Encode, Bolt12ParseError, Bolt12SemanticError, ParsedMessage};
use crate::offers::signer::{Metadata, MetadataMaterial, self};
-use crate::util::ser::{HighZeroBytesDroppedBigSize, WithoutLength, Writeable, Writer};
+use crate::util::ser::{HighZeroBytesDroppedBigSize, Readable, WithoutLength, Writeable, Writer};
use crate::util::string::PrintableString;
#[cfg(not(c_bindings))]
pub(super) const IV_BYTES: &[u8; IV_LEN] = b"LDK Offer ~~~~~~";
+/// An identifier for an [`Offer`] built using [`DerivedMetadata`].
+#[derive(Clone, Copy, Debug, Eq, PartialEq)]
+pub struct OfferId(pub [u8; 32]);
+
+impl OfferId {
+ const ID_TAG: &'static str = "LDK Offer ID";
+
+ fn from_valid_offer_tlv_stream(bytes: &[u8]) -> Self {
+ let tagged_hash = TaggedHash::from_valid_tlv_stream_bytes(Self::ID_TAG, bytes);
+ Self(tagged_hash.to_bytes())
+ }
+
+ fn from_valid_invreq_tlv_stream(bytes: &[u8]) -> Self {
+ let tlv_stream = TlvStream::new(bytes).range(OFFER_TYPES);
+ let tagged_hash = TaggedHash::from_tlv_stream(Self::ID_TAG, tlv_stream);
+ Self(tagged_hash.to_bytes())
+ }
+}
+
+impl Writeable for OfferId {
+ fn write<W: Writer>(&self, w: &mut W) -> Result<(), io::Error> {
+ self.0.write(w)
+ }
+}
+
+impl Readable for OfferId {
+ fn read<R: io::Read>(r: &mut R) -> Result<Self, DecodeError> {
+ Ok(OfferId(Readable::read(r)?))
+ }
+}
+
/// Builds an [`Offer`] for the "offer to be paid" flow.
///
/// See [module-level documentation] for usage.
let mut bytes = Vec::new();
$self.offer.write(&mut bytes).unwrap();
+ let id = OfferId::from_valid_offer_tlv_stream(&bytes);
+
Offer {
bytes,
#[cfg(not(c_bindings))]
contents: $self.offer,
#[cfg(c_bindings)]
- contents: $self.offer.clone()
+ contents: $self.offer.clone(),
+ id,
}
}
} }
// fields.
pub(super) bytes: Vec<u8>,
pub(super) contents: OfferContents,
+ id: OfferId,
}
/// The contents of an [`Offer`], which may be shared with an [`InvoiceRequest`] or a
impl Offer {
offer_accessors!(self, self.contents);
+ /// Returns the id of the offer.
+ pub fn id(&self) -> OfferId {
+ self.id
+ }
+
pub(super) fn implied_chain(&self) -> ChainHash {
self.contents.implied_chain()
}
/// Verifies that the offer metadata was produced from the offer in the TLV stream.
pub(super) fn verify<T: secp256k1::Signing>(
&self, bytes: &[u8], key: &ExpandedKey, secp_ctx: &Secp256k1<T>
- ) -> Result<Option<KeyPair>, ()> {
+ ) -> Result<(OfferId, Option<KeyPair>), ()> {
match self.metadata() {
Some(metadata) => {
let tlv_stream = TlvStream::new(bytes).range(OFFER_TYPES).filter(|record| {
_ => true,
}
});
- signer::verify_recipient_metadata(
+ let keys = signer::verify_recipient_metadata(
metadata, key, IV_BYTES, self.signing_pubkey(), tlv_stream, secp_ctx
- )
+ )?;
+
+ let offer_id = OfferId::from_valid_invreq_tlv_stream(bytes);
+
+ Ok((offer_id, keys))
},
None => Err(()),
}
let offer = ParsedMessage::<OfferTlvStream>::try_from(bytes)?;
let ParsedMessage { bytes, tlv_stream } = offer;
let contents = OfferContents::try_from(tlv_stream)?;
- Ok(Offer { bytes, contents })
+ let id = OfferId::from_valid_offer_tlv_stream(&bytes);
+
+ Ok(Offer { bytes, contents, id })
}
}
let invoice_request = offer.request_invoice(vec![1; 32], payer_pubkey()).unwrap()
.build().unwrap()
.sign(payer_sign).unwrap();
- assert!(invoice_request.verify(&expanded_key, &secp_ctx).is_ok());
+ match invoice_request.verify(&expanded_key, &secp_ctx) {
+ Ok(invoice_request) => assert_eq!(invoice_request.offer_id, offer.id()),
+ Err(_) => panic!("unexpected error"),
+ }
// Fails verification with altered offer field
let mut tlv_stream = offer.as_tlv_stream();
let invoice_request = offer.request_invoice(vec![1; 32], payer_pubkey()).unwrap()
.build().unwrap()
.sign(payer_sign).unwrap();
- assert!(invoice_request.verify(&expanded_key, &secp_ctx).is_ok());
+ match invoice_request.verify(&expanded_key, &secp_ctx) {
+ Ok(invoice_request) => assert_eq!(invoice_request.offer_id, offer.id()),
+ Err(_) => panic!("unexpected error"),
+ }
// Fails verification with altered offer field
let mut tlv_stream = offer.as_tlv_stream();
use crate::chain::transaction::OutPoint;
use crate::crypto::utils::{hkdf_extract_expand_twice, sign, sign_with_aux_rand};
use crate::ln::chan_utils::{
- make_funding_redeemscript, ChannelPublicKeys, ChannelTransactionParameters, ClosingTransaction,
- CommitmentTransaction, HTLCOutputInCommitment, HolderCommitmentTransaction,
+ get_revokeable_redeemscript, make_funding_redeemscript, ChannelPublicKeys,
+ ChannelTransactionParameters, ClosingTransaction, CommitmentTransaction,
+ HTLCOutputInCommitment, HolderCommitmentTransaction,
};
use crate::ln::channel::ANCHOR_OUTPUT_VALUE_SATOSHI;
use crate::ln::channel_keys::{
- DelayedPaymentBasepoint, DelayedPaymentKey, HtlcBasepoint, HtlcKey, RevocationBasepoint,
- RevocationKey,
+ add_public_key_tweak, DelayedPaymentBasepoint, DelayedPaymentKey, HtlcBasepoint, HtlcKey,
+ RevocationBasepoint, RevocationKey,
};
#[cfg(taproot)]
use crate::ln::msgs::PartialSignatureWithNonce;
use crate::sign::taproot::TaprootChannelSigner;
use crate::util::atomic_counter::AtomicCounter;
use crate::util::invoice::construct_invoice_preimage;
+use core::convert::TryInto;
use core::ops::Deref;
use core::sync::atomic::{AtomicUsize, Ordering};
#[cfg(taproot)]
pub channel_keys_id: [u8; 32],
/// The value of the channel which this output originated from, possibly indirectly.
pub channel_value_satoshis: u64,
+ /// The channel public keys and other parameters needed to generate a spending transaction or to provide to a re-derived signer through
+ /// [`ChannelSigner::provide_channel_parameters`].
+ ///
+ /// Added as optional, but always `Some` if the descriptor was produced in v0.0.123 or later.
+ pub channel_transaction_parameters: Option<ChannelTransactionParameters>,
}
+
impl DelayedPaymentOutputDescriptor {
/// The maximum length a well-formed witness spending one of these should have.
/// Note: If you have the grind_signatures feature enabled, this will be at least 1 byte
(8, revocation_pubkey, required),
(10, channel_keys_id, required),
(12, channel_value_satoshis, required),
+ (13, channel_transaction_parameters, option),
});
pub(crate) const P2WPKH_WITNESS_WEIGHT: u64 = 1 /* num stack items */ +
/// Added as optional, but always `Some` if the descriptor was produced in v0.0.117 or later.
pub channel_transaction_parameters: Option<ChannelTransactionParameters>,
}
+
impl StaticPaymentOutputDescriptor {
/// Returns the `witness_script` of the spendable output.
///
///
/// This is not exported to bindings users as there is no standard serialization for an input.
/// See [`Self::create_spendable_outputs_psbt`] instead.
- pub fn to_psbt_input(&self) -> bitcoin::psbt::Input {
+ ///
+ /// The proprietary field is used to store add tweak for the signing key of this transaction.
+ /// See the [`DelayedPaymentBasepoint::derive_add_tweak`] docs for more info on add tweak and how to use it.
+ ///
+ /// To get the proprietary field use:
+ /// ```
+ /// use bitcoin::psbt::{PartiallySignedTransaction};
+ /// use bitcoin::hashes::hex::FromHex;
+ ///
+ /// # let s = "70736274ff0100520200000001dee978529ab3e61a2987bea5183713d0e6d5ceb5ac81100fdb54a1a2\
+ /// # 69cef505000000000090000000011f26000000000000160014abb3ab63280d4ccc5c11d6b50fd427a8\
+ /// # e19d6470000000000001012b10270000000000002200200afe4736760d814a2651bae63b572d935d9a\
+ /// # b74a1a16c01774e341a32afa763601054d63210394a27a700617f5b7aee72bd4f8076b5770a582b7fb\
+ /// # d1d4ee2ea3802cd3cfbe2067029000b27521034629b1c8fdebfaeb58a74cd181f485e2c462e594cb30\
+ /// # 34dee655875f69f6c7c968ac20fc144c444b5f7370656e6461626c655f6f7574707574006164645f74\
+ /// # 7765616b20a86534f38ad61dc580ef41c3886204adf0911b81619c1ad7a2f5b5de39a2ba600000";
+ /// # let psbt = PartiallySignedTransaction::deserialize(<Vec<u8> as FromHex>::from_hex(s).unwrap().as_slice()).unwrap();
+ /// let key = bitcoin::psbt::raw::ProprietaryKey {
+ /// prefix: "LDK_spendable_output".as_bytes().to_vec(),
+ /// subtype: 0,
+ /// key: "add_tweak".as_bytes().to_vec(),
+ /// };
+ /// let value = psbt
+ /// .inputs
+ /// .first()
+ /// .expect("Unable to get add tweak as there are no inputs")
+ /// .proprietary
+ /// .get(&key)
+ /// .map(|x| x.to_owned());
+ /// ```
+ pub fn to_psbt_input<T: secp256k1::Signing>(
+ &self, secp_ctx: &Secp256k1<T>,
+ ) -> bitcoin::psbt::Input {
match self {
SpendableOutputDescriptor::StaticOutput { output, .. } => {
// Is a standard P2WPKH, no need for witness script
bitcoin::psbt::Input { witness_utxo: Some(output.clone()), ..Default::default() }
},
- SpendableOutputDescriptor::DelayedPaymentOutput(descriptor) => {
- // TODO we could add the witness script as well
+ SpendableOutputDescriptor::DelayedPaymentOutput(DelayedPaymentOutputDescriptor {
+ channel_transaction_parameters,
+ per_commitment_point,
+ revocation_pubkey,
+ to_self_delay,
+ output,
+ ..
+ }) => {
+ let delayed_payment_basepoint = channel_transaction_parameters
+ .as_ref()
+ .map(|params| params.holder_pubkeys.delayed_payment_basepoint);
+
+ let (witness_script, add_tweak) =
+ if let Some(basepoint) = delayed_payment_basepoint.as_ref() {
+ // Required to derive signing key: privkey = basepoint_secret + SHA256(per_commitment_point || basepoint)
+ let add_tweak = basepoint.derive_add_tweak(&per_commitment_point);
+ let payment_key = DelayedPaymentKey(add_public_key_tweak(
+ secp_ctx,
+ &basepoint.to_public_key(),
+ &add_tweak,
+ ));
+
+ (
+ Some(get_revokeable_redeemscript(
+ &revocation_pubkey,
+ *to_self_delay,
+ &payment_key,
+ )),
+ Some(add_tweak),
+ )
+ } else {
+ (None, None)
+ };
+
bitcoin::psbt::Input {
- witness_utxo: Some(descriptor.output.clone()),
+ witness_utxo: Some(output.clone()),
+ witness_script,
+ proprietary: add_tweak
+ .map(|add_tweak| {
+ [(
+ bitcoin::psbt::raw::ProprietaryKey {
+ // A non standard namespace for spendable outputs, used to store the tweak needed
+ // to derive the private key
+ prefix: "LDK_spendable_output".as_bytes().to_vec(),
+ subtype: 0,
+ key: "add_tweak".as_bytes().to_vec(),
+ },
+ add_tweak.to_vec(),
+ )]
+ .into_iter()
+ .collect()
+ })
+ .unwrap_or_default(),
..Default::default()
}
},
- SpendableOutputDescriptor::StaticPaymentOutput(descriptor) => {
- // TODO we could add the witness script as well
- bitcoin::psbt::Input {
- witness_utxo: Some(descriptor.output.clone()),
- ..Default::default()
- }
+ SpendableOutputDescriptor::StaticPaymentOutput(descriptor) => bitcoin::psbt::Input {
+ witness_utxo: Some(descriptor.output.clone()),
+ witness_script: descriptor.witness_script(),
+ ..Default::default()
},
}
}
/// does not match the one we can spend.
///
/// We do not enforce that outputs meet the dust limit or that any output scripts are standard.
- pub fn create_spendable_outputs_psbt(
- descriptors: &[&SpendableOutputDescriptor], outputs: Vec<TxOut>,
+ pub fn create_spendable_outputs_psbt<T: secp256k1::Signing>(
+ secp_ctx: &Secp256k1<T>, descriptors: &[&SpendableOutputDescriptor], outputs: Vec<TxOut>,
change_destination_script: ScriptBuf, feerate_sat_per_1000_weight: u32,
locktime: Option<LockTime>,
) -> Result<(PartiallySignedTransaction, u64), ()> {
change_destination_script,
)?;
- let psbt_inputs = descriptors.iter().map(|d| d.to_psbt_input()).collect::<Vec<_>>();
+ let psbt_inputs =
+ descriptors.iter().map(|d| d.to_psbt_input(&secp_ctx)).collect::<Vec<_>>();
let psbt = PartiallySignedTransaction {
inputs: psbt_inputs,
outputs: vec![Default::default(); tx.output.len()],
fn sign_gossip_message(&self, msg: UnsignedGossipMessage) -> Result<Signature, ()>;
}
+/// A trait that describes a wallet capable of creating a spending [`Transaction`] from a set of
+/// [`SpendableOutputDescriptor`]s.
+pub trait OutputSpender {
+ /// Creates a [`Transaction`] which spends the given descriptors to the given outputs, plus an
+ /// output to the given change destination (if sufficient change value remains). The
+ /// transaction will have a feerate, at least, of the given value.
+ ///
+ /// The `locktime` argument is used to set the transaction's locktime. If `None`, the
+ /// transaction will have a locktime of 0. It it recommended to set this to the current block
+ /// height to avoid fee sniping, unless you have some specific reason to use a different
+ /// locktime.
+ ///
+ /// Returns `Err(())` if the output value is greater than the input value minus required fee,
+ /// if a descriptor was duplicated, or if an output descriptor `script_pubkey`
+ /// does not match the one we can spend.
+ fn spend_spendable_outputs<C: Signing>(
+ &self, descriptors: &[&SpendableOutputDescriptor], outputs: Vec<TxOut>,
+ change_destination_script: ScriptBuf, feerate_sat_per_1000_weight: u32,
+ locktime: Option<LockTime>, secp_ctx: &Secp256k1<C>,
+ ) -> Result<Transaction, ()>;
+}
+
// Primarily needed in doctests because of https://github.com/rust-lang/rust/issues/67295
/// A dynamic [`SignerProvider`] temporarily needed for doc tests.
#[cfg(taproot)]
fn get_shutdown_scriptpubkey(&self) -> Result<ShutdownScript, ()>;
}
+/// A helper trait that describes an on-chain wallet capable of returning a (change) destination
+/// script.
+pub trait ChangeDestinationSource {
+ /// Returns a script pubkey which can be used as a change destination for
+ /// [`OutputSpender::spend_spendable_outputs`].
+ ///
+ /// This method should return a different value each time it is called, to avoid linking
+ /// on-chain funds controlled to the same user.
+ fn get_change_destination_script(&self) -> Result<ScriptBuf, ()>;
+}
+
/// A simple implementation of [`WriteableEcdsaChannelSigner`] that just keeps the private keys in memory.
///
/// This implementation performs no policy checks and is insufficient by itself as
Ok(psbt)
}
-
- /// Creates a [`Transaction`] which spends the given descriptors to the given outputs, plus an
- /// output to the given change destination (if sufficient change value remains). The
- /// transaction will have a feerate, at least, of the given value.
- ///
- /// The `locktime` argument is used to set the transaction's locktime. If `None`, the
- /// transaction will have a locktime of 0. It it recommended to set this to the current block
- /// height to avoid fee sniping, unless you have some specific reason to use a different
- /// locktime.
- ///
- /// Returns `Err(())` if the output value is greater than the input value minus required fee,
- /// if a descriptor was duplicated, or if an output descriptor `script_pubkey`
- /// does not match the one we can spend.
- ///
- /// We do not enforce that outputs meet the dust limit or that any output scripts are standard.
- ///
- /// May panic if the [`SpendableOutputDescriptor`]s were not generated by channels which used
- /// this [`KeysManager`] or one of the [`InMemorySigner`] created by this [`KeysManager`].
- pub fn spend_spendable_outputs<C: Signing>(
- &self, descriptors: &[&SpendableOutputDescriptor], outputs: Vec<TxOut>,
- change_destination_script: ScriptBuf, feerate_sat_per_1000_weight: u32,
- locktime: Option<LockTime>, secp_ctx: &Secp256k1<C>,
- ) -> Result<Transaction, ()> {
- let (mut psbt, expected_max_weight) =
- SpendableOutputDescriptor::create_spendable_outputs_psbt(
- descriptors,
- outputs,
- change_destination_script,
- feerate_sat_per_1000_weight,
- locktime,
- )?;
- psbt = self.sign_spendable_outputs_psbt(descriptors, psbt, secp_ctx)?;
-
- let spend_tx = psbt.extract_tx();
-
- debug_assert!(expected_max_weight >= spend_tx.weight().to_wu());
- // Note that witnesses with a signature vary somewhat in size, so allow
- // `expected_max_weight` to overshoot by up to 3 bytes per input.
- debug_assert!(
- expected_max_weight <= spend_tx.weight().to_wu() + descriptors.len() as u64 * 3
- );
-
- Ok(spend_tx)
- }
}
impl EntropySource for KeysManager {
}
}
+impl OutputSpender for KeysManager {
+ /// Creates a [`Transaction`] which spends the given descriptors to the given outputs, plus an
+ /// output to the given change destination (if sufficient change value remains).
+ ///
+ /// See [`OutputSpender::spend_spendable_outputs`] documentation for more information.
+ ///
+ /// We do not enforce that outputs meet the dust limit or that any output scripts are standard.
+ ///
+ /// May panic if the [`SpendableOutputDescriptor`]s were not generated by channels which used
+ /// this [`KeysManager`] or one of the [`InMemorySigner`] created by this [`KeysManager`].
+ fn spend_spendable_outputs<C: Signing>(
+ &self, descriptors: &[&SpendableOutputDescriptor], outputs: Vec<TxOut>,
+ change_destination_script: ScriptBuf, feerate_sat_per_1000_weight: u32,
+ locktime: Option<LockTime>, secp_ctx: &Secp256k1<C>,
+ ) -> Result<Transaction, ()> {
+ let (mut psbt, expected_max_weight) =
+ SpendableOutputDescriptor::create_spendable_outputs_psbt(
+ secp_ctx,
+ descriptors,
+ outputs,
+ change_destination_script,
+ feerate_sat_per_1000_weight,
+ locktime,
+ )?;
+ psbt = self.sign_spendable_outputs_psbt(descriptors, psbt, secp_ctx)?;
+
+ let spend_tx = psbt.extract_tx();
+
+ debug_assert!(expected_max_weight >= spend_tx.weight().to_wu());
+ // Note that witnesses with a signature vary somewhat in size, so allow
+ // `expected_max_weight` to overshoot by up to 3 bytes per input.
+ debug_assert!(
+ expected_max_weight <= spend_tx.weight().to_wu() + descriptors.len() as u64 * 3
+ );
+
+ Ok(spend_tx)
+ }
+}
+
impl SignerProvider for KeysManager {
type EcdsaSigner = InMemorySigner;
#[cfg(taproot)]
}
}
+impl OutputSpender for PhantomKeysManager {
+ /// See [`OutputSpender::spend_spendable_outputs`] and [`KeysManager::spend_spendable_outputs`]
+ /// for documentation on this method.
+ fn spend_spendable_outputs<C: Signing>(
+ &self, descriptors: &[&SpendableOutputDescriptor], outputs: Vec<TxOut>,
+ change_destination_script: ScriptBuf, feerate_sat_per_1000_weight: u32,
+ locktime: Option<LockTime>, secp_ctx: &Secp256k1<C>,
+ ) -> Result<Transaction, ()> {
+ self.inner.spend_spendable_outputs(
+ descriptors,
+ outputs,
+ change_destination_script,
+ feerate_sat_per_1000_weight,
+ locktime,
+ secp_ctx,
+ )
+ }
+}
+
impl SignerProvider for PhantomKeysManager {
type EcdsaSigner = InMemorySigner;
#[cfg(taproot)]
}
}
- /// See [`KeysManager::spend_spendable_outputs`] for documentation on this method.
- pub fn spend_spendable_outputs<C: Signing>(
- &self, descriptors: &[&SpendableOutputDescriptor], outputs: Vec<TxOut>,
- change_destination_script: ScriptBuf, feerate_sat_per_1000_weight: u32,
- locktime: Option<LockTime>, secp_ctx: &Secp256k1<C>,
- ) -> Result<Transaction, ()> {
- self.inner.spend_spendable_outputs(
- descriptors,
- outputs,
- change_destination_script,
- feerate_sat_per_1000_weight,
- locktime,
- secp_ctx,
- )
- }
-
/// See [`KeysManager::derive_channel_keys`] for documentation on this method.
pub fn derive_channel_keys(
&self, channel_value_satoshis: u64, params: &[u8; 32],
pub mod persist;
pub mod scid_utils;
pub mod string;
+pub mod sweep;
pub mod wakers;
#[cfg(fuzzing)]
pub mod base32;
/// The primary namespace under which [`ChannelMonitorUpdate`]s will be persisted.
pub const CHANNEL_MONITOR_UPDATE_PERSISTENCE_PRIMARY_NAMESPACE: &str = "monitor_updates";
+/// The primary namespace under which archived [`ChannelMonitor`]s will be persisted.
+pub const ARCHIVED_CHANNEL_MONITOR_PERSISTENCE_PRIMARY_NAMESPACE: &str = "archived_monitors";
+/// The secondary namespace under which archived [`ChannelMonitor`]s will be persisted.
+pub const ARCHIVED_CHANNEL_MONITOR_PERSISTENCE_SECONDARY_NAMESPACE: &str = "";
+
/// The primary namespace under which the [`NetworkGraph`] will be persisted.
pub const NETWORK_GRAPH_PERSISTENCE_PRIMARY_NAMESPACE: &str = "";
/// The secondary namespace under which the [`NetworkGraph`] will be persisted.
/// The key under which the [`WriteableScore`] will be persisted.
pub const SCORER_PERSISTENCE_KEY: &str = "scorer";
+/// The primary namespace under which [`OutputSweeper`] state will be persisted.
+///
+/// [`OutputSweeper`]: crate::util::sweep::OutputSweeper
+pub const OUTPUT_SWEEPER_PERSISTENCE_PRIMARY_NAMESPACE: &str = "";
+/// The secondary namespace under which [`OutputSweeper`] state will be persisted.
+///
+/// [`OutputSweeper`]: crate::util::sweep::OutputSweeper
+pub const OUTPUT_SWEEPER_PERSISTENCE_SECONDARY_NAMESPACE: &str = "";
+/// The secondary namespace under which [`OutputSweeper`] state will be persisted.
+/// The key under which [`OutputSweeper`] state will be persisted.
+///
+/// [`OutputSweeper`]: crate::util::sweep::OutputSweeper
+pub const OUTPUT_SWEEPER_PERSISTENCE_KEY: &str = "output_sweeper";
+
/// A sentinel value to be prepended to monitors persisted by the [`MonitorUpdatingPersister`].
///
/// This serves to prevent someone from accidentally loading such monitors (which may need
Err(_) => chain::ChannelMonitorUpdateStatus::UnrecoverableError
}
}
+
+ fn archive_persisted_channel(&self, funding_txo: OutPoint) {
+ let monitor_name = MonitorName::from(funding_txo);
+ let monitor = match self.read(
+ CHANNEL_MONITOR_PERSISTENCE_PRIMARY_NAMESPACE,
+ CHANNEL_MONITOR_PERSISTENCE_SECONDARY_NAMESPACE,
+ monitor_name.as_str(),
+ ) {
+ Ok(monitor) => monitor,
+ Err(_) => return
+ };
+ match self.write(
+ ARCHIVED_CHANNEL_MONITOR_PERSISTENCE_PRIMARY_NAMESPACE,
+ ARCHIVED_CHANNEL_MONITOR_PERSISTENCE_SECONDARY_NAMESPACE,
+ monitor_name.as_str(),
+ &monitor,
+ ) {
+ Ok(()) => {}
+ Err(_e) => return
+ };
+ let _ = self.remove(
+ CHANNEL_MONITOR_PERSISTENCE_PRIMARY_NAMESPACE,
+ CHANNEL_MONITOR_PERSISTENCE_SECONDARY_NAMESPACE,
+ monitor_name.as_str(),
+ true,
+ );
+ }
}
/// Read previously persisted [`ChannelMonitor`]s from the store.
self.persist_new_channel(funding_txo, monitor, monitor_update_call_id)
}
}
+
+ fn archive_persisted_channel(&self, funding_txo: OutPoint) {
+ let monitor_name = MonitorName::from(funding_txo);
+ let monitor = match self.read_monitor(&monitor_name) {
+ Ok((_block_hash, monitor)) => monitor,
+ Err(_) => return
+ };
+ match self.kv_store.write(
+ ARCHIVED_CHANNEL_MONITOR_PERSISTENCE_PRIMARY_NAMESPACE,
+ ARCHIVED_CHANNEL_MONITOR_PERSISTENCE_SECONDARY_NAMESPACE,
+ monitor_name.as_str(),
+ &monitor.encode()
+ ) {
+ Ok(()) => {},
+ Err(_e) => return,
+ };
+ let _ = self.kv_store.remove(
+ CHANNEL_MONITOR_PERSISTENCE_PRIMARY_NAMESPACE,
+ CHANNEL_MONITOR_PERSISTENCE_SECONDARY_NAMESPACE,
+ monitor_name.as_str(),
+ true,
+ );
+ }
}
impl<K: Deref, L: Deref, ES: Deref, SP: Deref> MonitorUpdatingPersister<K, L, ES, SP>
--- /dev/null
+// This file is licensed under the Apache License, Version 2.0 <LICENSE-APACHE
+// or http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your option.
+// You may not use this file except in accordance with one or both of these
+// licenses.
+
+//! This module contains an [`OutputSweeper`] utility that keeps track of
+//! [`SpendableOutputDescriptor`]s, i.e., persists them in a given [`KVStore`] and regularly retries
+//! sweeping them.
+
+use crate::chain::chaininterface::{BroadcasterInterface, ConfirmationTarget, FeeEstimator};
+use crate::chain::channelmonitor::ANTI_REORG_DELAY;
+use crate::chain::{self, BestBlock, Confirm, Filter, Listen, WatchedOutput};
+use crate::io;
+use crate::ln::msgs::DecodeError;
+use crate::ln::ChannelId;
+use crate::prelude::Vec;
+use crate::sign::{ChangeDestinationSource, OutputSpender, SpendableOutputDescriptor};
+use crate::sync::Mutex;
+use crate::util::logger::Logger;
+use crate::util::persist::{
+ KVStore, OUTPUT_SWEEPER_PERSISTENCE_KEY, OUTPUT_SWEEPER_PERSISTENCE_PRIMARY_NAMESPACE,
+ OUTPUT_SWEEPER_PERSISTENCE_SECONDARY_NAMESPACE,
+};
+use crate::util::ser::{Readable, ReadableArgs, Writeable};
+use crate::{impl_writeable_tlv_based, log_debug, log_error};
+
+use bitcoin::blockdata::block::Header;
+use bitcoin::blockdata::locktime::absolute::LockTime;
+use bitcoin::secp256k1::Secp256k1;
+use bitcoin::{BlockHash, Transaction, Txid};
+
+use core::ops::Deref;
+
+/// The state of a spendable output currently tracked by an [`OutputSweeper`].
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub struct TrackedSpendableOutput {
+ /// The tracked output descriptor.
+ pub descriptor: SpendableOutputDescriptor,
+ /// The channel this output belongs to.
+ ///
+ /// Will be `None` if no `channel_id` was given to [`OutputSweeper::track_spendable_outputs`]
+ pub channel_id: Option<ChannelId>,
+ /// The current status of the output spend.
+ pub status: OutputSpendStatus,
+}
+
+impl TrackedSpendableOutput {
+ fn to_watched_output(&self, cur_hash: BlockHash) -> WatchedOutput {
+ let block_hash = self.status.first_broadcast_hash().or(Some(cur_hash));
+ match &self.descriptor {
+ SpendableOutputDescriptor::StaticOutput { outpoint, output, channel_keys_id: _ } => {
+ WatchedOutput {
+ block_hash,
+ outpoint: *outpoint,
+ script_pubkey: output.script_pubkey.clone(),
+ }
+ },
+ SpendableOutputDescriptor::DelayedPaymentOutput(output) => WatchedOutput {
+ block_hash,
+ outpoint: output.outpoint,
+ script_pubkey: output.output.script_pubkey.clone(),
+ },
+ SpendableOutputDescriptor::StaticPaymentOutput(output) => WatchedOutput {
+ block_hash,
+ outpoint: output.outpoint,
+ script_pubkey: output.output.script_pubkey.clone(),
+ },
+ }
+ }
+
+ /// Returns whether the output is spent in the given transaction.
+ pub fn is_spent_in(&self, tx: &Transaction) -> bool {
+ let prev_outpoint = match &self.descriptor {
+ SpendableOutputDescriptor::StaticOutput { outpoint, .. } => *outpoint,
+ SpendableOutputDescriptor::DelayedPaymentOutput(output) => output.outpoint,
+ SpendableOutputDescriptor::StaticPaymentOutput(output) => output.outpoint,
+ }
+ .into_bitcoin_outpoint();
+
+ tx.input.iter().any(|input| input.previous_output == prev_outpoint)
+ }
+}
+
+impl_writeable_tlv_based!(TrackedSpendableOutput, {
+ (0, descriptor, required),
+ (2, channel_id, option),
+ (4, status, required),
+});
+
+/// The current status of the output spend.
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub enum OutputSpendStatus {
+ /// The output is tracked but an initial spending transaction hasn't been generated and
+ /// broadcasted yet.
+ PendingInitialBroadcast {
+ /// The height at which we will first generate and broadcast a spending transaction.
+ delayed_until_height: Option<u32>,
+ },
+ /// A transaction spending the output has been broadcasted but is pending its first confirmation on-chain.
+ PendingFirstConfirmation {
+ /// The hash of the chain tip when we first broadcast a transaction spending this output.
+ first_broadcast_hash: BlockHash,
+ /// The best height when we last broadcast a transaction spending this output.
+ latest_broadcast_height: u32,
+ /// The transaction spending this output we last broadcasted.
+ latest_spending_tx: Transaction,
+ },
+ /// A transaction spending the output has been confirmed on-chain but will be tracked until it
+ /// reaches [`ANTI_REORG_DELAY`] confirmations.
+ PendingThresholdConfirmations {
+ /// The hash of the chain tip when we first broadcast a transaction spending this output.
+ first_broadcast_hash: BlockHash,
+ /// The best height when we last broadcast a transaction spending this output.
+ latest_broadcast_height: u32,
+ /// The transaction spending this output we saw confirmed on-chain.
+ latest_spending_tx: Transaction,
+ /// The height at which the spending transaction was confirmed.
+ confirmation_height: u32,
+ /// The hash of the block in which the spending transaction was confirmed.
+ confirmation_hash: BlockHash,
+ },
+}
+
+impl OutputSpendStatus {
+ fn broadcast(&mut self, cur_hash: BlockHash, cur_height: u32, latest_spending_tx: Transaction) {
+ match self {
+ Self::PendingInitialBroadcast { delayed_until_height } => {
+ if let Some(delayed_until_height) = delayed_until_height {
+ debug_assert!(
+ cur_height >= *delayed_until_height,
+ "We should never broadcast before the required height is reached."
+ );
+ }
+ *self = Self::PendingFirstConfirmation {
+ first_broadcast_hash: cur_hash,
+ latest_broadcast_height: cur_height,
+ latest_spending_tx,
+ };
+ },
+ Self::PendingFirstConfirmation { first_broadcast_hash, .. } => {
+ *self = Self::PendingFirstConfirmation {
+ first_broadcast_hash: *first_broadcast_hash,
+ latest_broadcast_height: cur_height,
+ latest_spending_tx,
+ };
+ },
+ Self::PendingThresholdConfirmations { .. } => {
+ debug_assert!(false, "We should never rebroadcast confirmed transactions.");
+ },
+ }
+ }
+
+ fn confirmed(
+ &mut self, confirmation_hash: BlockHash, confirmation_height: u32,
+ latest_spending_tx: Transaction,
+ ) {
+ match self {
+ Self::PendingInitialBroadcast { .. } => {
+ // Generally we can't see any of our transactions confirmed if they haven't been
+ // broadcasted yet, so this should never be reachable via `transactions_confirmed`.
+ debug_assert!(false, "We should never confirm when we haven't broadcasted. This a bug and should never happen, please report.");
+ *self = Self::PendingThresholdConfirmations {
+ first_broadcast_hash: confirmation_hash,
+ latest_broadcast_height: confirmation_height,
+ latest_spending_tx,
+ confirmation_height,
+ confirmation_hash,
+ };
+ },
+ Self::PendingFirstConfirmation {
+ first_broadcast_hash,
+ latest_broadcast_height,
+ ..
+ } => {
+ debug_assert!(confirmation_height >= *latest_broadcast_height);
+ *self = Self::PendingThresholdConfirmations {
+ first_broadcast_hash: *first_broadcast_hash,
+ latest_broadcast_height: *latest_broadcast_height,
+ latest_spending_tx,
+ confirmation_height,
+ confirmation_hash,
+ };
+ },
+ Self::PendingThresholdConfirmations {
+ first_broadcast_hash,
+ latest_broadcast_height,
+ ..
+ } => {
+ *self = Self::PendingThresholdConfirmations {
+ first_broadcast_hash: *first_broadcast_hash,
+ latest_broadcast_height: *latest_broadcast_height,
+ latest_spending_tx,
+ confirmation_height,
+ confirmation_hash,
+ };
+ },
+ }
+ }
+
+ fn unconfirmed(&mut self) {
+ match self {
+ Self::PendingInitialBroadcast { .. } => {
+ debug_assert!(
+ false,
+ "We should only mark a spend as unconfirmed if it used to be confirmed."
+ );
+ },
+ Self::PendingFirstConfirmation { .. } => {
+ debug_assert!(
+ false,
+ "We should only mark a spend as unconfirmed if it used to be confirmed."
+ );
+ },
+ Self::PendingThresholdConfirmations {
+ first_broadcast_hash,
+ latest_broadcast_height,
+ latest_spending_tx,
+ ..
+ } => {
+ *self = Self::PendingFirstConfirmation {
+ first_broadcast_hash: *first_broadcast_hash,
+ latest_broadcast_height: *latest_broadcast_height,
+ latest_spending_tx: latest_spending_tx.clone(),
+ };
+ },
+ }
+ }
+
+ fn is_delayed(&self, cur_height: u32) -> bool {
+ match self {
+ Self::PendingInitialBroadcast { delayed_until_height } => {
+ delayed_until_height.map_or(false, |req_height| cur_height < req_height)
+ },
+ Self::PendingFirstConfirmation { .. } => false,
+ Self::PendingThresholdConfirmations { .. } => false,
+ }
+ }
+
+ fn first_broadcast_hash(&self) -> Option<BlockHash> {
+ match self {
+ Self::PendingInitialBroadcast { .. } => None,
+ Self::PendingFirstConfirmation { first_broadcast_hash, .. } => {
+ Some(*first_broadcast_hash)
+ },
+ Self::PendingThresholdConfirmations { first_broadcast_hash, .. } => {
+ Some(*first_broadcast_hash)
+ },
+ }
+ }
+
+ fn latest_broadcast_height(&self) -> Option<u32> {
+ match self {
+ Self::PendingInitialBroadcast { .. } => None,
+ Self::PendingFirstConfirmation { latest_broadcast_height, .. } => {
+ Some(*latest_broadcast_height)
+ },
+ Self::PendingThresholdConfirmations { latest_broadcast_height, .. } => {
+ Some(*latest_broadcast_height)
+ },
+ }
+ }
+
+ fn confirmation_height(&self) -> Option<u32> {
+ match self {
+ Self::PendingInitialBroadcast { .. } => None,
+ Self::PendingFirstConfirmation { .. } => None,
+ Self::PendingThresholdConfirmations { confirmation_height, .. } => {
+ Some(*confirmation_height)
+ },
+ }
+ }
+
+ fn confirmation_hash(&self) -> Option<BlockHash> {
+ match self {
+ Self::PendingInitialBroadcast { .. } => None,
+ Self::PendingFirstConfirmation { .. } => None,
+ Self::PendingThresholdConfirmations { confirmation_hash, .. } => {
+ Some(*confirmation_hash)
+ },
+ }
+ }
+
+ fn latest_spending_tx(&self) -> Option<&Transaction> {
+ match self {
+ Self::PendingInitialBroadcast { .. } => None,
+ Self::PendingFirstConfirmation { latest_spending_tx, .. } => Some(latest_spending_tx),
+ Self::PendingThresholdConfirmations { latest_spending_tx, .. } => {
+ Some(latest_spending_tx)
+ },
+ }
+ }
+
+ fn is_confirmed(&self) -> bool {
+ match self {
+ Self::PendingInitialBroadcast { .. } => false,
+ Self::PendingFirstConfirmation { .. } => false,
+ Self::PendingThresholdConfirmations { .. } => true,
+ }
+ }
+}
+
+impl_writeable_tlv_based_enum!(OutputSpendStatus,
+ (0, PendingInitialBroadcast) => {
+ (0, delayed_until_height, option),
+ },
+ (2, PendingFirstConfirmation) => {
+ (0, first_broadcast_hash, required),
+ (2, latest_broadcast_height, required),
+ (4, latest_spending_tx, required),
+ },
+ (4, PendingThresholdConfirmations) => {
+ (0, first_broadcast_hash, required),
+ (2, latest_broadcast_height, required),
+ (4, latest_spending_tx, required),
+ (6, confirmation_height, required),
+ (8, confirmation_hash, required),
+ };
+);
+
+/// A utility that keeps track of [`SpendableOutputDescriptor`]s, persists them in a given
+/// [`KVStore`] and regularly retries sweeping them based on a callback given to the constructor
+/// methods.
+///
+/// Users should call [`Self::track_spendable_outputs`] for any [`SpendableOutputDescriptor`]s received via [`Event::SpendableOutputs`].
+///
+/// This needs to be notified of chain state changes either via its [`Listen`] or [`Confirm`]
+/// implementation and hence has to be connected with the utilized chain data sources.
+///
+/// If chain data is provided via the [`Confirm`] interface or via filtered blocks, users are
+/// required to give their chain data sources (i.e., [`Filter`] implementation) to the respective
+/// constructor.
+///
+/// [`Event::SpendableOutputs`]: crate::events::Event::SpendableOutputs
+pub struct OutputSweeper<B: Deref, D: Deref, E: Deref, F: Deref, K: Deref, L: Deref, O: Deref>
+where
+ B::Target: BroadcasterInterface,
+ D::Target: ChangeDestinationSource,
+ E::Target: FeeEstimator,
+ F::Target: Filter + Sync + Send,
+ K::Target: KVStore,
+ L::Target: Logger,
+ O::Target: OutputSpender,
+{
+ sweeper_state: Mutex<SweeperState>,
+ broadcaster: B,
+ fee_estimator: E,
+ chain_data_source: Option<F>,
+ output_spender: O,
+ change_destination_source: D,
+ kv_store: K,
+ logger: L,
+}
+
+impl<B: Deref, D: Deref, E: Deref, F: Deref, K: Deref, L: Deref, O: Deref>
+ OutputSweeper<B, D, E, F, K, L, O>
+where
+ B::Target: BroadcasterInterface,
+ D::Target: ChangeDestinationSource,
+ E::Target: FeeEstimator,
+ F::Target: Filter + Sync + Send,
+ K::Target: KVStore,
+ L::Target: Logger,
+ O::Target: OutputSpender,
+{
+ /// Constructs a new [`OutputSweeper`].
+ ///
+ /// If chain data is provided via the [`Confirm`] interface or via filtered blocks, users also
+ /// need to register their [`Filter`] implementation via the given `chain_data_source`.
+ pub fn new(
+ best_block: BestBlock, broadcaster: B, fee_estimator: E, chain_data_source: Option<F>,
+ output_spender: O, change_destination_source: D, kv_store: K, logger: L,
+ ) -> Self {
+ let outputs = Vec::new();
+ let sweeper_state = Mutex::new(SweeperState { outputs, best_block });
+ Self {
+ sweeper_state,
+ broadcaster,
+ fee_estimator,
+ chain_data_source,
+ output_spender,
+ change_destination_source,
+ kv_store,
+ logger,
+ }
+ }
+
+ /// Tells the sweeper to track the given outputs descriptors.
+ ///
+ /// Usually, this should be called based on the values emitted by the
+ /// [`Event::SpendableOutputs`].
+ ///
+ /// The given `exclude_static_outputs` flag controls whether the sweeper will filter out
+ /// [`SpendableOutputDescriptor::StaticOutput`]s, which may be handled directly by the on-chain
+ /// wallet implementation.
+ ///
+ /// If `delay_until_height` is set, we will delay the spending until the respective block
+ /// height is reached. This can be used to batch spends, e.g., to reduce on-chain fees.
+ ///
+ /// [`Event::SpendableOutputs`]: crate::events::Event::SpendableOutputs
+ pub fn track_spendable_outputs(
+ &self, output_descriptors: Vec<SpendableOutputDescriptor>, channel_id: Option<ChannelId>,
+ exclude_static_outputs: bool, delay_until_height: Option<u32>,
+ ) {
+ let mut relevant_descriptors = output_descriptors
+ .into_iter()
+ .filter(|desc| {
+ !(exclude_static_outputs
+ && matches!(desc, SpendableOutputDescriptor::StaticOutput { .. }))
+ })
+ .peekable();
+
+ if relevant_descriptors.peek().is_none() {
+ return;
+ }
+
+ let mut spending_tx_opt;
+ {
+ let mut state_lock = self.sweeper_state.lock().unwrap();
+ for descriptor in relevant_descriptors {
+ let output_info = TrackedSpendableOutput {
+ descriptor,
+ channel_id,
+ status: OutputSpendStatus::PendingInitialBroadcast {
+ delayed_until_height: delay_until_height,
+ },
+ };
+
+ if state_lock
+ .outputs
+ .iter()
+ .find(|o| o.descriptor == output_info.descriptor)
+ .is_some()
+ {
+ continue;
+ }
+
+ state_lock.outputs.push(output_info);
+ }
+ spending_tx_opt = self.regenerate_spend_if_necessary(&mut *state_lock);
+ self.persist_state(&*state_lock).unwrap_or_else(|e| {
+ log_error!(self.logger, "Error persisting OutputSweeper: {:?}", e);
+ // Skip broadcasting if the persist failed.
+ spending_tx_opt = None;
+ });
+ }
+
+ if let Some(spending_tx) = spending_tx_opt {
+ self.broadcaster.broadcast_transactions(&[&spending_tx]);
+ }
+ }
+
+ /// Returns a list of the currently tracked spendable outputs.
+ pub fn tracked_spendable_outputs(&self) -> Vec<TrackedSpendableOutput> {
+ self.sweeper_state.lock().unwrap().outputs.clone()
+ }
+
+ /// Gets the latest best block which was connected either via the [`Listen`] or
+ /// [`Confirm`] interfaces.
+ pub fn current_best_block(&self) -> BestBlock {
+ self.sweeper_state.lock().unwrap().best_block
+ }
+
+ fn regenerate_spend_if_necessary(
+ &self, sweeper_state: &mut SweeperState,
+ ) -> Option<Transaction> {
+ let cur_height = sweeper_state.best_block.height;
+ let cur_hash = sweeper_state.best_block.block_hash;
+ let filter_fn = |o: &TrackedSpendableOutput| {
+ if o.status.is_confirmed() {
+ // Don't rebroadcast confirmed txs.
+ return false;
+ }
+
+ if o.status.is_delayed(cur_height) {
+ // Don't generate and broadcast if still delayed
+ return false;
+ }
+
+ if o.status.latest_broadcast_height() >= Some(cur_height) {
+ // Only broadcast once per block height.
+ return false;
+ }
+
+ true
+ };
+
+ let respend_descriptors: Vec<&SpendableOutputDescriptor> =
+ sweeper_state.outputs.iter().filter(|o| filter_fn(*o)).map(|o| &o.descriptor).collect();
+
+ if respend_descriptors.is_empty() {
+ // Nothing to do.
+ return None;
+ }
+
+ let spending_tx = match self.spend_outputs(&*sweeper_state, respend_descriptors) {
+ Ok(spending_tx) => {
+ log_debug!(
+ self.logger,
+ "Generating and broadcasting sweeping transaction {}",
+ spending_tx.txid()
+ );
+ spending_tx
+ },
+ Err(e) => {
+ log_error!(self.logger, "Error spending outputs: {:?}", e);
+ return None;
+ },
+ };
+
+ // As we didn't modify the state so far, the same filter_fn yields the same elements as
+ // above.
+ let respend_outputs = sweeper_state.outputs.iter_mut().filter(|o| filter_fn(&**o));
+ for output_info in respend_outputs {
+ if let Some(filter) = self.chain_data_source.as_ref() {
+ let watched_output = output_info.to_watched_output(cur_hash);
+ filter.register_output(watched_output);
+ }
+
+ output_info.status.broadcast(cur_hash, cur_height, spending_tx.clone());
+ }
+
+ Some(spending_tx)
+ }
+
+ fn prune_confirmed_outputs(&self, sweeper_state: &mut SweeperState) {
+ let cur_height = sweeper_state.best_block.height;
+
+ // Prune all outputs that have sufficient depth by now.
+ sweeper_state.outputs.retain(|o| {
+ if let Some(confirmation_height) = o.status.confirmation_height() {
+ if cur_height >= confirmation_height + ANTI_REORG_DELAY - 1 {
+ log_debug!(self.logger,
+ "Pruning swept output as sufficiently confirmed via spend in transaction {:?}. Pruned descriptor: {:?}",
+ o.status.latest_spending_tx().map(|t| t.txid()), o.descriptor
+ );
+ return false;
+ }
+ }
+ true
+ });
+ }
+
+ fn persist_state(&self, sweeper_state: &SweeperState) -> Result<(), io::Error> {
+ self.kv_store
+ .write(
+ OUTPUT_SWEEPER_PERSISTENCE_PRIMARY_NAMESPACE,
+ OUTPUT_SWEEPER_PERSISTENCE_SECONDARY_NAMESPACE,
+ OUTPUT_SWEEPER_PERSISTENCE_KEY,
+ &sweeper_state.encode(),
+ )
+ .map_err(|e| {
+ log_error!(
+ self.logger,
+ "Write for key {}/{}/{} failed due to: {}",
+ OUTPUT_SWEEPER_PERSISTENCE_PRIMARY_NAMESPACE,
+ OUTPUT_SWEEPER_PERSISTENCE_SECONDARY_NAMESPACE,
+ OUTPUT_SWEEPER_PERSISTENCE_KEY,
+ e
+ );
+ e
+ })
+ }
+
+ fn spend_outputs(
+ &self, sweeper_state: &SweeperState, descriptors: Vec<&SpendableOutputDescriptor>,
+ ) -> Result<Transaction, ()> {
+ let tx_feerate =
+ self.fee_estimator.get_est_sat_per_1000_weight(ConfirmationTarget::OutputSpendingFee);
+ let change_destination_script =
+ self.change_destination_source.get_change_destination_script()?;
+ let cur_height = sweeper_state.best_block.height;
+ let locktime = Some(LockTime::from_height(cur_height).unwrap_or(LockTime::ZERO));
+ self.output_spender.spend_spendable_outputs(
+ &descriptors,
+ Vec::new(),
+ change_destination_script,
+ tx_feerate,
+ locktime,
+ &Secp256k1::new(),
+ )
+ }
+
+ fn transactions_confirmed_internal(
+ &self, sweeper_state: &mut SweeperState, header: &Header,
+ txdata: &chain::transaction::TransactionData, height: u32,
+ ) {
+ let confirmation_hash = header.block_hash();
+ for (_, tx) in txdata {
+ for output_info in sweeper_state.outputs.iter_mut() {
+ if output_info.is_spent_in(*tx) {
+ output_info.status.confirmed(confirmation_hash, height, (*tx).clone())
+ }
+ }
+ }
+ }
+
+ fn best_block_updated_internal(
+ &self, sweeper_state: &mut SweeperState, header: &Header, height: u32,
+ ) -> Option<Transaction> {
+ sweeper_state.best_block = BestBlock::new(header.block_hash(), height);
+ self.prune_confirmed_outputs(sweeper_state);
+ let spending_tx_opt = self.regenerate_spend_if_necessary(sweeper_state);
+ spending_tx_opt
+ }
+}
+
+impl<B: Deref, D: Deref, E: Deref, F: Deref, K: Deref, L: Deref, O: Deref> Listen
+ for OutputSweeper<B, D, E, F, K, L, O>
+where
+ B::Target: BroadcasterInterface,
+ D::Target: ChangeDestinationSource,
+ E::Target: FeeEstimator,
+ F::Target: Filter + Sync + Send,
+ K::Target: KVStore,
+ L::Target: Logger,
+ O::Target: OutputSpender,
+{
+ fn filtered_block_connected(
+ &self, header: &Header, txdata: &chain::transaction::TransactionData, height: u32,
+ ) {
+ let mut spending_tx_opt;
+ {
+ let mut state_lock = self.sweeper_state.lock().unwrap();
+ assert_eq!(state_lock.best_block.block_hash, header.prev_blockhash,
+ "Blocks must be connected in chain-order - the connected header must build on the last connected header");
+ assert_eq!(state_lock.best_block.height, height - 1,
+ "Blocks must be connected in chain-order - the connected block height must be one greater than the previous height");
+
+ self.transactions_confirmed_internal(&mut *state_lock, header, txdata, height);
+ spending_tx_opt = self.best_block_updated_internal(&mut *state_lock, header, height);
+
+ self.persist_state(&*state_lock).unwrap_or_else(|e| {
+ log_error!(self.logger, "Error persisting OutputSweeper: {:?}", e);
+ // Skip broadcasting if the persist failed.
+ spending_tx_opt = None;
+ });
+ }
+
+ if let Some(spending_tx) = spending_tx_opt {
+ self.broadcaster.broadcast_transactions(&[&spending_tx]);
+ }
+ }
+
+ fn block_disconnected(&self, header: &Header, height: u32) {
+ let mut state_lock = self.sweeper_state.lock().unwrap();
+
+ let new_height = height - 1;
+ let block_hash = header.block_hash();
+
+ assert_eq!(state_lock.best_block.block_hash, block_hash,
+ "Blocks must be disconnected in chain-order - the disconnected header must be the last connected header");
+ assert_eq!(state_lock.best_block.height, height,
+ "Blocks must be disconnected in chain-order - the disconnected block must have the correct height");
+ state_lock.best_block = BestBlock::new(header.prev_blockhash, new_height);
+
+ for output_info in state_lock.outputs.iter_mut() {
+ if output_info.status.confirmation_hash() == Some(block_hash) {
+ debug_assert_eq!(output_info.status.confirmation_height(), Some(height));
+ output_info.status.unconfirmed();
+ }
+ }
+
+ self.persist_state(&*state_lock).unwrap_or_else(|e| {
+ log_error!(self.logger, "Error persisting OutputSweeper: {:?}", e);
+ });
+ }
+}
+
+impl<B: Deref, D: Deref, E: Deref, F: Deref, K: Deref, L: Deref, O: Deref> Confirm
+ for OutputSweeper<B, D, E, F, K, L, O>
+where
+ B::Target: BroadcasterInterface,
+ D::Target: ChangeDestinationSource,
+ E::Target: FeeEstimator,
+ F::Target: Filter + Sync + Send,
+ K::Target: KVStore,
+ L::Target: Logger,
+ O::Target: OutputSpender,
+{
+ fn transactions_confirmed(
+ &self, header: &Header, txdata: &chain::transaction::TransactionData, height: u32,
+ ) {
+ let mut state_lock = self.sweeper_state.lock().unwrap();
+ self.transactions_confirmed_internal(&mut *state_lock, header, txdata, height);
+ self.persist_state(&*state_lock).unwrap_or_else(|e| {
+ log_error!(self.logger, "Error persisting OutputSweeper: {:?}", e);
+ });
+ }
+
+ fn transaction_unconfirmed(&self, txid: &Txid) {
+ let mut state_lock = self.sweeper_state.lock().unwrap();
+
+ // Get what height was unconfirmed.
+ let unconf_height = state_lock
+ .outputs
+ .iter()
+ .find(|o| o.status.latest_spending_tx().map(|tx| tx.txid()) == Some(*txid))
+ .and_then(|o| o.status.confirmation_height());
+
+ if let Some(unconf_height) = unconf_height {
+ // Unconfirm all >= this height.
+ state_lock
+ .outputs
+ .iter_mut()
+ .filter(|o| o.status.confirmation_height() >= Some(unconf_height))
+ .for_each(|o| o.status.unconfirmed());
+
+ self.persist_state(&*state_lock).unwrap_or_else(|e| {
+ log_error!(self.logger, "Error persisting OutputSweeper: {:?}", e);
+ });
+ }
+ }
+
+ fn best_block_updated(&self, header: &Header, height: u32) {
+ let mut spending_tx_opt;
+ {
+ let mut state_lock = self.sweeper_state.lock().unwrap();
+ spending_tx_opt = self.best_block_updated_internal(&mut *state_lock, header, height);
+ self.persist_state(&*state_lock).unwrap_or_else(|e| {
+ log_error!(self.logger, "Error persisting OutputSweeper: {:?}", e);
+ // Skip broadcasting if the persist failed.
+ spending_tx_opt = None;
+ });
+ }
+
+ if let Some(spending_tx) = spending_tx_opt {
+ self.broadcaster.broadcast_transactions(&[&spending_tx]);
+ }
+ }
+
+ fn get_relevant_txids(&self) -> Vec<(Txid, u32, Option<BlockHash>)> {
+ let state_lock = self.sweeper_state.lock().unwrap();
+ state_lock
+ .outputs
+ .iter()
+ .filter_map(|o| match o.status {
+ OutputSpendStatus::PendingThresholdConfirmations {
+ ref latest_spending_tx,
+ confirmation_height,
+ confirmation_hash,
+ ..
+ } => Some((latest_spending_tx.txid(), confirmation_height, Some(confirmation_hash))),
+ _ => None,
+ })
+ .collect::<Vec<_>>()
+ }
+}
+
+#[derive(Debug, Clone)]
+struct SweeperState {
+ outputs: Vec<TrackedSpendableOutput>,
+ best_block: BestBlock,
+}
+
+impl_writeable_tlv_based!(SweeperState, {
+ (0, outputs, required_vec),
+ (2, best_block, required),
+});
+
+/// A `enum` signalling to the [`OutputSweeper`] that it should delay spending an output until a
+/// future block height is reached.
+#[derive(Debug, Clone)]
+pub enum SpendingDelay {
+ /// A relative delay indicating we shouldn't spend the output before `cur_height + num_blocks`
+ /// is reached.
+ Relative {
+ /// The number of blocks until we'll generate and broadcast the spending transaction.
+ num_blocks: u32,
+ },
+ /// An absolute delay indicating we shouldn't spend the output before `height` is reached.
+ Absolute {
+ /// The height at which we'll generate and broadcast the spending transaction.
+ height: u32,
+ },
+}
+
+impl<B: Deref, D: Deref, E: Deref, F: Deref, K: Deref, L: Deref, O: Deref>
+ ReadableArgs<(B, E, Option<F>, O, D, K, L)> for OutputSweeper<B, D, E, F, K, L, O>
+where
+ B::Target: BroadcasterInterface,
+ D::Target: ChangeDestinationSource,
+ E::Target: FeeEstimator,
+ F::Target: Filter + Sync + Send,
+ K::Target: KVStore,
+ L::Target: Logger,
+ O::Target: OutputSpender,
+{
+ #[inline]
+ fn read<R: io::Read>(
+ reader: &mut R, args: (B, E, Option<F>, O, D, K, L),
+ ) -> Result<Self, DecodeError> {
+ let (
+ broadcaster,
+ fee_estimator,
+ chain_data_source,
+ output_spender,
+ change_destination_source,
+ kv_store,
+ logger,
+ ) = args;
+ let state = SweeperState::read(reader)?;
+ let best_block = state.best_block;
+
+ if let Some(filter) = chain_data_source.as_ref() {
+ for output_info in &state.outputs {
+ let watched_output = output_info.to_watched_output(best_block.block_hash);
+ filter.register_output(watched_output);
+ }
+ }
+
+ let sweeper_state = Mutex::new(state);
+ Ok(Self {
+ sweeper_state,
+ broadcaster,
+ fee_estimator,
+ chain_data_source,
+ output_spender,
+ change_destination_source,
+ kv_store,
+ logger,
+ })
+ }
+}
+
+impl<B: Deref, D: Deref, E: Deref, F: Deref, K: Deref, L: Deref, O: Deref>
+ ReadableArgs<(B, E, Option<F>, O, D, K, L)> for (BestBlock, OutputSweeper<B, D, E, F, K, L, O>)
+where
+ B::Target: BroadcasterInterface,
+ D::Target: ChangeDestinationSource,
+ E::Target: FeeEstimator,
+ F::Target: Filter + Sync + Send,
+ K::Target: KVStore,
+ L::Target: Logger,
+ O::Target: OutputSpender,
+{
+ #[inline]
+ fn read<R: io::Read>(
+ reader: &mut R, args: (B, E, Option<F>, O, D, K, L),
+ ) -> Result<Self, DecodeError> {
+ let (
+ broadcaster,
+ fee_estimator,
+ chain_data_source,
+ output_spender,
+ change_destination_source,
+ kv_store,
+ logger,
+ ) = args;
+ let state = SweeperState::read(reader)?;
+ let best_block = state.best_block;
+
+ if let Some(filter) = chain_data_source.as_ref() {
+ for output_info in &state.outputs {
+ let watched_output = output_info.to_watched_output(best_block.block_hash);
+ filter.register_output(watched_output);
+ }
+ }
+
+ let sweeper_state = Mutex::new(state);
+ Ok((
+ best_block,
+ OutputSweeper {
+ sweeper_state,
+ broadcaster,
+ fee_estimator,
+ chain_data_source,
+ output_spender,
+ change_destination_source,
+ kv_store,
+ logger,
+ },
+ ))
+ }
+}
}
res
}
+
+ fn archive_persisted_channel(&self, funding_txo: OutPoint) {
+ <TestPersister as chainmonitor::Persist<TestChannelSigner>>::archive_persisted_channel(&self.persister, funding_txo);
+ }
}
pub struct TestPersister {
}
ret
}
+
+ fn archive_persisted_channel(&self, funding_txo: OutPoint) {
+ // remove the channel from the offchain_monitor_updates map
+ match self.offchain_monitor_updates.lock().unwrap().remove(&funding_txo) {
+ Some(_) => {},
+ None => {
+ // If the channel was not in the offchain_monitor_updates map, it should be in the
+ // chain_sync_monitor_persistences map.
+ assert!(self.chain_sync_monitor_persistences.lock().unwrap().remove(&funding_txo).is_some());
+ }
+ };
+ }
}
pub struct TestStore {
fn handle_stfu(&self, _their_node_id: &PublicKey, msg: &msgs::Stfu) {
self.received_msg(wire::Message::Stfu(msg.clone()));
}
- #[cfg(dual_funding)]
+ #[cfg(splicing)]
fn handle_splice(&self, _their_node_id: &PublicKey, msg: &msgs::Splice) {
self.received_msg(wire::Message::Splice(msg.clone()));
}
- #[cfg(dual_funding)]
+ #[cfg(splicing)]
fn handle_splice_ack(&self, _their_node_id: &PublicKey, msg: &msgs::SpliceAck) {
self.received_msg(wire::Message::SpliceAck(msg.clone()));
}
- #[cfg(dual_funding)]
+ #[cfg(splicing)]
fn handle_splice_locked(&self, _their_node_id: &PublicKey, msg: &msgs::SpliceLocked) {
self.received_msg(wire::Message::SpliceLocked(msg.clone()));
}
watched_outputs: Mutex::new(new_hash_set()),
}
}
+ pub fn remove_watched_txn_and_outputs(&self, outpoint: OutPoint, script_pubkey: ScriptBuf) {
+ self.watched_outputs.lock().unwrap().remove(&(outpoint, script_pubkey.clone()));
+ self.watched_txn.lock().unwrap().remove(&(outpoint.txid, script_pubkey));
+ }
}
impl UtxoLookup for TestChainSource {