X-Git-Url: http://git.bitcoin.ninja/index.cgi?a=blobdiff_plain;f=lightning%2Fsrc%2Fchain%2Fchannelmonitor.rs;h=3ca15e48337f3bb2da922814be39f0acde240818;hb=e4e6e09b672dc929b4d8571975bd923a7a4486be;hp=4352076e94d009928689dfd524c6214e5c3d06de;hpb=b8b1ef3149f26992625a03d45c0307bfad70e8bd;p=rust-lightning diff --git a/lightning/src/chain/channelmonitor.rs b/lightning/src/chain/channelmonitor.rs index 4352076e..3ca15e48 100644 --- a/lightning/src/chain/channelmonitor.rs +++ b/lightning/src/chain/channelmonitor.rs @@ -34,7 +34,7 @@ use bitcoin::secp256k1; use bitcoin::sighash::EcdsaSighashType; use crate::ln::channel::INITIAL_COMMITMENT_NUMBER; -use crate::ln::{PaymentHash, PaymentPreimage, ChannelId}; +use crate::ln::types::{PaymentHash, PaymentPreimage, ChannelId}; use crate::ln::msgs::DecodeError; use crate::ln::channel_keys::{DelayedPaymentKey, DelayedPaymentBasepoint, HtlcBasepoint, HtlcKey, RevocationKey, RevocationBasepoint}; use crate::ln::chan_utils::{self,CommitmentTransaction, CounterpartyCommitmentSecrets, HTLCOutputInCommitment, HTLCClaim, ChannelTransactionParameters, HolderCommitmentTransaction, TxCreationKeys}; @@ -43,7 +43,7 @@ use crate::chain; use crate::chain::{BestBlock, WatchedOutput}; use crate::chain::chaininterface::{BroadcasterInterface, FeeEstimator, LowerBoundedFeeEstimator}; use crate::chain::transaction::{OutPoint, TransactionData}; -use crate::sign::{ChannelDerivationParameters, HTLCDescriptor, SpendableOutputDescriptor, StaticPaymentOutputDescriptor, DelayedPaymentOutputDescriptor, ecdsa::WriteableEcdsaChannelSigner, SignerProvider, EntropySource}; +use crate::sign::{ChannelDerivationParameters, HTLCDescriptor, SpendableOutputDescriptor, StaticPaymentOutputDescriptor, DelayedPaymentOutputDescriptor, ecdsa::EcdsaChannelSigner, SignerProvider, EntropySource}; use crate::chain::onchaintx::{ClaimEvent, FeerateStrategy, OnchainTxHandler}; use crate::chain::package::{CounterpartyOfferedHTLCOutput, CounterpartyReceivedHTLCOutput, HolderFundingOutput, HolderHTLCOutput, PackageSolvingData, PackageTemplate, RevokedOutput, RevokedHTLCOutput}; use crate::chain::Filter; @@ -53,10 +53,11 @@ use crate::util::byte_utils; use crate::events::{ClosureReason, Event, EventHandler}; use crate::events::bump_transaction::{AnchorDescriptor, BumpTransactionEvent}; +#[allow(unused_imports)] use crate::prelude::*; + use core::{cmp, mem}; use crate::io::{self, Error}; -use core::convert::TryInto; use core::ops::Deref; use crate::sync::{Mutex, LockTestExt}; @@ -773,14 +774,14 @@ impl Readable for IrrevocablyResolvedHTLC { /// the "reorg path" (ie disconnecting blocks until you find a common ancestor from both the /// returned block hash and the the current chain and then reconnecting blocks to get to the /// best chain) upon deserializing the object! -pub struct ChannelMonitor { +pub struct ChannelMonitor { #[cfg(test)] pub(crate) inner: Mutex>, #[cfg(not(test))] pub(super) inner: Mutex>, } -impl Clone for ChannelMonitor where Signer: Clone { +impl Clone for ChannelMonitor where Signer: Clone { fn clone(&self) -> Self { let inner = self.inner.lock().unwrap().clone(); ChannelMonitor::from_impl(inner) @@ -788,7 +789,7 @@ impl Clone for ChannelMonitor where } #[derive(Clone, PartialEq)] -pub(crate) struct ChannelMonitorImpl { +pub(crate) struct ChannelMonitorImpl { latest_update_id: u64, commitment_transaction_number_obscure_factor: u64, @@ -934,12 +935,15 @@ pub(crate) struct ChannelMonitorImpl { /// Ordering of tuple data: (their_per_commitment_point, feerate_per_kw, to_broadcaster_sats, /// to_countersignatory_sats) initial_counterparty_commitment_info: Option<(PublicKey, u32, u64, u64)>, + + /// The first block height at which we had no remaining claimable balances. + balances_empty_height: Option, } /// Transaction outputs to watch for on-chain spends. pub type TransactionOutputs = (Txid, Vec<(u32, TxOut)>); -impl PartialEq for ChannelMonitor where Signer: PartialEq { +impl PartialEq for ChannelMonitor where Signer: PartialEq { fn eq(&self, other: &Self) -> bool { // We need some kind of total lockorder. Absent a better idea, we sort by position in // memory and take locks in that order (assuming that we can't move within memory while a @@ -951,7 +955,7 @@ impl PartialEq for ChannelMonitor w } } -impl Writeable for ChannelMonitor { +impl Writeable for ChannelMonitor { fn write(&self, writer: &mut W) -> Result<(), Error> { self.inner.lock().unwrap().write(writer) } @@ -961,7 +965,7 @@ impl Writeable for ChannelMonitor { const SERIALIZATION_VERSION: u8 = 1; const MIN_SERIALIZATION_VERSION: u8 = 1; -impl Writeable for ChannelMonitorImpl { +impl Writeable for ChannelMonitorImpl { fn write(&self, writer: &mut W) -> Result<(), Error> { write_ver_prefix!(writer, SERIALIZATION_VERSION, MIN_SERIALIZATION_VERSION); @@ -1144,6 +1148,7 @@ impl Writeable for ChannelMonitorImpl where L::Target: Logger { logger: &'a L, peer_id: Option, channel_id: Option, + payment_hash: Option, } impl<'a, L: Deref> Logger for WithChannelMonitor<'a, L> where L::Target: Logger { fn log(&self, mut record: Record) { record.peer_id = self.peer_id; record.channel_id = self.channel_id; + record.payment_hash = self.payment_hash; self.logger.log(record) } } impl<'a, L: Deref> WithChannelMonitor<'a, L> where L::Target: Logger { - pub(crate) fn from(logger: &'a L, monitor: &ChannelMonitor) -> Self { - Self::from_impl(logger, &*monitor.inner.lock().unwrap()) + pub(crate) fn from(logger: &'a L, monitor: &ChannelMonitor, payment_hash: Option) -> Self { + Self::from_impl(logger, &*monitor.inner.lock().unwrap(), payment_hash) } - pub(crate) fn from_impl(logger: &'a L, monitor_impl: &ChannelMonitorImpl) -> Self { + pub(crate) fn from_impl(logger: &'a L, monitor_impl: &ChannelMonitorImpl, payment_hash: Option) -> Self { let peer_id = monitor_impl.counterparty_node_id; let channel_id = Some(monitor_impl.channel_id()); WithChannelMonitor { - logger, peer_id, channel_id, + logger, peer_id, channel_id, payment_hash, } } } -impl ChannelMonitor { +impl ChannelMonitor { /// For lockorder enforcement purposes, we need to have a single site which constructs the /// `inner` mutex, otherwise cases where we lock two monitors at the same time (eg in our /// PartialEq implementation) we may decide a lockorder violation has occurred. @@ -1327,6 +1334,7 @@ impl ChannelMonitor { best_block, counterparty_node_id: Some(counterparty_node_id), initial_counterparty_commitment_info: None, + balances_empty_height: None, }) } @@ -1350,7 +1358,7 @@ impl ChannelMonitor { where L::Target: Logger { let mut inner = self.inner.lock().unwrap(); - let logger = WithChannelMonitor::from_impl(logger, &*inner); + let logger = WithChannelMonitor::from_impl(logger, &*inner, None); inner.provide_initial_counterparty_commitment_tx(txid, htlc_outputs, commitment_number, their_cur_per_commitment_point, feerate_per_kw, to_broadcaster_value_sat, to_countersignatory_value_sat, &logger); @@ -1370,7 +1378,7 @@ impl ChannelMonitor { logger: &L, ) where L::Target: Logger { let mut inner = self.inner.lock().unwrap(); - let logger = WithChannelMonitor::from_impl(logger, &*inner); + let logger = WithChannelMonitor::from_impl(logger, &*inner, None); inner.provide_latest_counterparty_commitment_tx( txid, htlc_outputs, commitment_number, their_per_commitment_point, &logger) } @@ -1398,7 +1406,7 @@ impl ChannelMonitor { L::Target: Logger, { let mut inner = self.inner.lock().unwrap(); - let logger = WithChannelMonitor::from_impl(logger, &*inner); + let logger = WithChannelMonitor::from_impl(logger, &*inner, Some(*payment_hash)); inner.provide_payment_preimage( payment_hash, payment_preimage, broadcaster, fee_estimator, &logger) } @@ -1420,7 +1428,7 @@ impl ChannelMonitor { L::Target: Logger, { let mut inner = self.inner.lock().unwrap(); - let logger = WithChannelMonitor::from_impl(logger, &*inner); + let logger = WithChannelMonitor::from_impl(logger, &*inner, None); inner.update_monitor(updates, broadcaster, fee_estimator, &logger) } @@ -1455,7 +1463,7 @@ impl ChannelMonitor { F::Target: chain::Filter, L::Target: Logger, { let lock = self.inner.lock().unwrap(); - let logger = WithChannelMonitor::from_impl(logger, &*lock); + let logger = WithChannelMonitor::from_impl(logger, &*lock, None); log_trace!(&logger, "Registering funding outpoint {}", &lock.get_funding_txo().0); filter.register_tx(&lock.get_funding_txo().0.txid, &lock.get_funding_txo().1); for (txid, outputs) in lock.get_outputs_to_watch().iter() { @@ -1615,7 +1623,7 @@ impl ChannelMonitor { { let mut inner = self.inner.lock().unwrap(); let fee_estimator = LowerBoundedFeeEstimator::new(&**fee_estimator); - let logger = WithChannelMonitor::from_impl(logger, &*inner); + let logger = WithChannelMonitor::from_impl(logger, &*inner, None); inner.queue_latest_holder_commitment_txn_for_broadcast(broadcaster, &fee_estimator, &logger); } @@ -1626,7 +1634,7 @@ impl ChannelMonitor { pub fn unsafe_get_latest_holder_commitment_txn(&self, logger: &L) -> Vec where L::Target: Logger { let mut inner = self.inner.lock().unwrap(); - let logger = WithChannelMonitor::from_impl(logger, &*inner); + let logger = WithChannelMonitor::from_impl(logger, &*inner, None); inner.unsafe_get_latest_holder_commitment_txn(&logger) } @@ -1656,7 +1664,7 @@ impl ChannelMonitor { L::Target: Logger, { let mut inner = self.inner.lock().unwrap(); - let logger = WithChannelMonitor::from_impl(logger, &*inner); + let logger = WithChannelMonitor::from_impl(logger, &*inner, None); inner.block_connected( header, txdata, height, broadcaster, fee_estimator, &logger) } @@ -1676,7 +1684,7 @@ impl ChannelMonitor { L::Target: Logger, { let mut inner = self.inner.lock().unwrap(); - let logger = WithChannelMonitor::from_impl(logger, &*inner); + let logger = WithChannelMonitor::from_impl(logger, &*inner, None); inner.block_disconnected( header, height, broadcaster, fee_estimator, &logger) } @@ -1704,7 +1712,7 @@ impl ChannelMonitor { { let bounded_fee_estimator = LowerBoundedFeeEstimator::new(fee_estimator); let mut inner = self.inner.lock().unwrap(); - let logger = WithChannelMonitor::from_impl(logger, &*inner); + let logger = WithChannelMonitor::from_impl(logger, &*inner, None); inner.transactions_confirmed( header, txdata, height, broadcaster, &bounded_fee_estimator, &logger) } @@ -1728,7 +1736,7 @@ impl ChannelMonitor { { let bounded_fee_estimator = LowerBoundedFeeEstimator::new(fee_estimator); let mut inner = self.inner.lock().unwrap(); - let logger = WithChannelMonitor::from_impl(logger, &*inner); + let logger = WithChannelMonitor::from_impl(logger, &*inner, None); inner.transaction_unconfirmed( txid, broadcaster, &bounded_fee_estimator, &logger ); @@ -1756,7 +1764,7 @@ impl ChannelMonitor { { let bounded_fee_estimator = LowerBoundedFeeEstimator::new(fee_estimator); let mut inner = self.inner.lock().unwrap(); - let logger = WithChannelMonitor::from_impl(logger, &*inner); + let logger = WithChannelMonitor::from_impl(logger, &*inner, None); inner.best_block_updated( header, height, broadcaster, &bounded_fee_estimator, &logger ) @@ -1796,7 +1804,7 @@ impl ChannelMonitor { { let fee_estimator = LowerBoundedFeeEstimator::new(fee_estimator); let mut inner = self.inner.lock().unwrap(); - let logger = WithChannelMonitor::from_impl(logger, &*inner); + let logger = WithChannelMonitor::from_impl(logger, &*inner, None); let current_height = inner.best_block.height; inner.onchain_tx_handler.rebroadcast_pending_claims( current_height, FeerateStrategy::HighestOfPreviousOrNew, &broadcaster, &fee_estimator, &logger, @@ -1815,7 +1823,7 @@ impl ChannelMonitor { { let fee_estimator = LowerBoundedFeeEstimator::new(fee_estimator); let mut inner = self.inner.lock().unwrap(); - let logger = WithChannelMonitor::from_impl(logger, &*inner); + let logger = WithChannelMonitor::from_impl(logger, &*inner, None); let current_height = inner.best_block.height; inner.onchain_tx_handler.rebroadcast_pending_claims( current_height, FeerateStrategy::RetryPrevious, &broadcaster, &fee_estimator, &logger, @@ -1855,6 +1863,55 @@ impl ChannelMonitor { spendable_outputs } + /// Checks if the monitor is fully resolved. Resolved monitor is one that has claimed all of + /// its outputs and balances (i.e. [`Self::get_claimable_balances`] returns an empty set). + /// + /// This function returns true only if [`Self::get_claimable_balances`] has been empty for at least + /// 4032 blocks as an additional protection against any bugs resulting in spuriously empty balance sets. + pub fn is_fully_resolved(&self, logger: &L) -> bool { + let mut is_all_funds_claimed = self.get_claimable_balances().is_empty(); + let current_height = self.current_best_block().height; + let mut inner = self.inner.lock().unwrap(); + + if is_all_funds_claimed { + if !inner.funding_spend_seen { + debug_assert!(false, "We should see funding spend by the time a monitor clears out"); + is_all_funds_claimed = false; + } + } + + const BLOCKS_THRESHOLD: u32 = 4032; // ~four weeks + match (inner.balances_empty_height, is_all_funds_claimed) { + (Some(balances_empty_height), true) => { + // Claimed all funds, check if reached the blocks threshold. + return current_height >= balances_empty_height + BLOCKS_THRESHOLD; + }, + (Some(_), false) => { + // previously assumed we claimed all funds, but we have new funds to claim. + // Should not happen in practice. + debug_assert!(false, "Thought we were done claiming funds, but claimable_balances now has entries"); + log_error!(logger, + "WARNING: LDK thought it was done claiming all the available funds in the ChannelMonitor for channel {}, but later decided it had more to claim. This is potentially an important bug in LDK, please report it at https://github.com/lightningdevkit/rust-lightning/issues/new", + inner.get_funding_txo().0); + inner.balances_empty_height = None; + false + }, + (None, true) => { + // Claimed all funds but `balances_empty_height` is None. It is set to the + // current block height. + log_debug!(logger, + "ChannelMonitor funded at {} is now fully resolved. It will become archivable in {} blocks", + inner.get_funding_txo().0, BLOCKS_THRESHOLD); + inner.balances_empty_height = Some(current_height); + false + }, + (None, false) => { + // Have funds to claim. + false + }, + } + } + #[cfg(test)] pub fn get_counterparty_payment_script(&self) -> ScriptBuf { self.inner.lock().unwrap().counterparty_payment_script.clone() @@ -1872,7 +1929,7 @@ impl ChannelMonitor { } } -impl ChannelMonitorImpl { +impl ChannelMonitorImpl { /// Helper for get_claimable_balances which does the work for an individual HTLC, generating up /// to one `Balance` for the HTLC. fn get_htlc_balance(&self, htlc: &HTLCOutputInCommitment, holder_commitment: bool, @@ -2051,7 +2108,7 @@ impl ChannelMonitorImpl { } } -impl ChannelMonitor { +impl ChannelMonitor { /// Gets the balances in this channel which are either claimable by us if we were to /// force-close the channel now or which are claimable on-chain (possibly awaiting /// confirmation). @@ -2463,7 +2520,7 @@ pub fn deliberately_bogus_accepted_htlc_witness() -> Vec> { vec![Vec::new(), Vec::new(), Vec::new(), Vec::new(), deliberately_bogus_accepted_htlc_witness_program().into()].into() } -impl ChannelMonitorImpl { +impl ChannelMonitorImpl { /// Inserts a revocation secret into this channel monitor. Prunes old preimages if neither /// needed by holder commitment transactions HTCLs nor by counterparty ones. Unless we haven't already seen /// counterparty commitment transaction's secret, they are de facto pruned (we can use revocation key). @@ -4329,6 +4386,7 @@ impl ChannelMonitorImpl { revocation_pubkey: broadcasted_holder_revokable_script.2, channel_keys_id: self.channel_keys_id, channel_value_satoshis: self.channel_value_satoshis, + channel_transaction_parameters: Some(self.onchain_tx_handler.channel_transaction_parameters.clone()), })); } } @@ -4371,7 +4429,7 @@ impl ChannelMonitorImpl { } } -impl chain::Listen for (ChannelMonitor, T, F, L) +impl chain::Listen for (ChannelMonitor, T, F, L) where T::Target: BroadcasterInterface, F::Target: FeeEstimator, @@ -4386,7 +4444,7 @@ where } } -impl chain::Confirm for (M, T, F, L) +impl chain::Confirm for (M, T, F, L) where M: Deref>, T::Target: BroadcasterInterface, @@ -4631,6 +4689,7 @@ impl<'a, 'b, ES: EntropySource, SP: SignerProvider> ReadableArgs<(&'a ES, &'b SP let mut spendable_txids_confirmed = Some(Vec::new()); let mut counterparty_fulfilled_htlcs = Some(new_hash_map()); let mut initial_counterparty_commitment_info = None; + let mut balances_empty_height = None; let mut channel_id = None; read_tlv_fields!(reader, { (1, funding_spend_confirmed, option), @@ -4643,6 +4702,7 @@ impl<'a, 'b, ES: EntropySource, SP: SignerProvider> ReadableArgs<(&'a ES, &'b SP (15, counterparty_fulfilled_htlcs, option), (17, initial_counterparty_commitment_info, option), (19, channel_id, option), + (21, balances_empty_height, option), }); // `HolderForceClosedWithInfo` replaced `HolderForceClosed` in v0.0.122. If we have both @@ -4721,6 +4781,7 @@ impl<'a, 'b, ES: EntropySource, SP: SignerProvider> ReadableArgs<(&'a ES, &'b SP best_block, counterparty_node_id, initial_counterparty_commitment_info, + balances_empty_height, }))) } } @@ -4752,7 +4813,7 @@ mod tests { use crate::chain::package::{weight_offered_htlc, weight_received_htlc, weight_revoked_offered_htlc, weight_revoked_received_htlc, WEIGHT_REVOKED_OUTPUT}; use crate::chain::transaction::OutPoint; use crate::sign::InMemorySigner; - use crate::ln::{PaymentPreimage, PaymentHash, ChannelId}; + use crate::ln::types::{PaymentPreimage, PaymentHash, ChannelId}; use crate::ln::channel_keys::{DelayedPaymentBasepoint, DelayedPaymentKey, HtlcBasepoint, RevocationBasepoint, RevocationKey}; use crate::ln::chan_utils::{self,HTLCOutputInCommitment, ChannelPublicKeys, ChannelTransactionParameters, HolderCommitmentTransaction, CounterpartyChannelTransactionParameters}; use crate::ln::channelmanager::{PaymentSendFailure, PaymentId, RecipientOnionFields}; @@ -4765,6 +4826,8 @@ mod tests { use crate::sync::{Arc, Mutex}; use crate::io; use crate::ln::features::ChannelTypeFeatures; + + #[allow(unused_imports)] use crate::prelude::*; use std::str::FromStr; @@ -5199,7 +5262,8 @@ mod tests { best_block, dummy_key, channel_id); let chan_id = monitor.inner.lock().unwrap().channel_id(); - let context_logger = WithChannelMonitor::from(&logger, &monitor); + let payment_hash = PaymentHash([1; 32]); + let context_logger = WithChannelMonitor::from(&logger, &monitor, Some(payment_hash)); log_error!(context_logger, "This is an error"); log_warn!(context_logger, "This is an error"); log_debug!(context_logger, "This is an error");