X-Git-Url: http://git.bitcoin.ninja/index.cgi?a=blobdiff_plain;f=lightning%2Fsrc%2Fln%2Fshutdown_tests.rs;h=802ee1e563bf971b441604a830d1fee10f4015ec;hb=88e1b56d66ff550b36a6d422f47c9b9729406f61;hp=847ae784345910bc6730595495f553925b2c00a4;hpb=bfda1b683bb5a87b123c3986a029e043addef159;p=rust-lightning diff --git a/lightning/src/ln/shutdown_tests.rs b/lightning/src/ln/shutdown_tests.rs index 847ae784..802ee1e5 100644 --- a/lightning/src/ln/shutdown_tests.rs +++ b/lightning/src/ln/shutdown_tests.rs @@ -14,7 +14,8 @@ use crate::sign::{EntropySource, SignerProvider}; use crate::chain::ChannelMonitorUpdateStatus; use crate::chain::transaction::OutPoint; use crate::events::{Event, MessageSendEvent, HTLCDestination, MessageSendEventsProvider, ClosureReason}; -use crate::ln::channelmanager::{self, PaymentSendFailure, PaymentId, RecipientOnionFields, Retry, ChannelShutdownState, ChannelDetails}; +use crate::ln::channel_state::{ChannelDetails, ChannelShutdownState}; +use crate::ln::channelmanager::{self, PaymentSendFailure, PaymentId, RecipientOnionFields, Retry}; use crate::routing::router::{PaymentParameters, get_route, RouteParameters}; use crate::ln::msgs; use crate::ln::types::ChannelId; @@ -290,7 +291,7 @@ fn close_on_unfunded_channel() { let _open_chan = get_event_msg!(nodes[0], MessageSendEvent::SendOpenChannel, nodes[1].node.get_our_node_id()); nodes[0].node.close_channel(&chan_id, &nodes[1].node.get_our_node_id()).unwrap(); - check_closed_event!(nodes[0], 1, ClosureReason::HolderForceClosed, [nodes[1].node.get_our_node_id()], 1_000_000); + check_closed_event!(nodes[0], 1, ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(false) }, [nodes[1].node.get_our_node_id()], 1_000_000); } #[test] @@ -301,11 +302,12 @@ fn expect_channel_shutdown_state_with_force_closure() { let node_chanmgrs = create_node_chanmgrs(2, &node_cfgs, &[None, None]); let nodes = create_network(2, &node_cfgs, &node_chanmgrs); let chan_1 = create_announced_chan_between_nodes(&nodes, 0, 1); + let error_message = "Channel force-closed"; expect_channel_shutdown_state!(nodes[0], chan_1.2, ChannelShutdownState::NotShuttingDown); expect_channel_shutdown_state!(nodes[1], chan_1.2, ChannelShutdownState::NotShuttingDown); - nodes[1].node.force_close_broadcasting_latest_txn(&chan_1.2, &nodes[0].node.get_our_node_id()).unwrap(); + nodes[1].node.force_close_broadcasting_latest_txn(&chan_1.2, &nodes[0].node.get_our_node_id(), error_message.to_string()).unwrap(); check_closed_broadcast!(nodes[1], true); check_added_monitors!(nodes[1], 1); @@ -322,7 +324,7 @@ fn expect_channel_shutdown_state_with_force_closure() { assert!(nodes[1].node.list_channels().is_empty()); check_closed_broadcast!(nodes[0], true); check_closed_event!(nodes[0], 1, ClosureReason::CommitmentTxConfirmed, [nodes[1].node.get_our_node_id()], 100000); - check_closed_event!(nodes[1], 1, ClosureReason::HolderForceClosed, [nodes[0].node.get_our_node_id()], 100000); + check_closed_event!(nodes[1], 1, ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(true) }, [nodes[0].node.get_our_node_id()], 100000); } #[test] @@ -1462,3 +1464,59 @@ fn batch_funding_failure() { check_closed_events(&nodes[0], &close); assert_eq!(nodes[0].node.list_channels().len(), 0); } + +#[test] +fn test_force_closure_on_low_stale_fee() { + // Check that we force-close channels if they have a low fee and that has gotten stale (without + // update). + let chanmon_cfgs = create_chanmon_cfgs(2); + let node_cfgs = create_node_cfgs(2, &chanmon_cfgs); + let node_chanmgrs = create_node_chanmgrs(2, &node_cfgs, &[None, None]); + let nodes = create_network(2, &node_cfgs, &node_chanmgrs); + + let chan_id = create_announced_chan_between_nodes(&nodes, 0, 1).2; + + // Start by connecting lots of blocks to give LDK some feerate history + for _ in 0..super::channelmanager::FEERATE_TRACKING_BLOCKS * 2 { + connect_blocks(&nodes[1], 1); + } + + // Now connect a handful of blocks with a "high" feerate + { + let mut feerate_lock = chanmon_cfgs[1].fee_estimator.sat_per_kw.lock().unwrap(); + *feerate_lock *= 2; + } + for _ in 0..super::channelmanager::FEERATE_TRACKING_BLOCKS - 1 { + connect_blocks(&nodes[1], 1); + } + assert!(nodes[1].node.get_and_clear_pending_events().is_empty()); + + // Now, note that one more block would cause us to force-close, it won't because we've dropped + // the feerate + { + let mut feerate_lock = chanmon_cfgs[1].fee_estimator.sat_per_kw.lock().unwrap(); + *feerate_lock /= 2; + } + connect_blocks(&nodes[1], super::channelmanager::FEERATE_TRACKING_BLOCKS as u32 * 2); + assert!(nodes[1].node.get_and_clear_pending_events().is_empty()); + + // Now, connect another FEERATE_TRACKING_BLOCKS - 1 blocks at a high feerate, note that none of + // these will cause a force-closure because LDK only looks at the minimium feerate over the + // last FEERATE_TRACKING_BLOCKS blocks. + { + let mut feerate_lock = chanmon_cfgs[1].fee_estimator.sat_per_kw.lock().unwrap(); + *feerate_lock *= 2; + } + + for _ in 0..super::channelmanager::FEERATE_TRACKING_BLOCKS - 1 { + connect_blocks(&nodes[1], 1); + } + assert!(nodes[1].node.get_and_clear_pending_events().is_empty()); + + // Finally, connect one more block and check the force-close happened. + connect_blocks(&nodes[1], 1); + check_added_monitors!(nodes[1], 1); + check_closed_broadcast(&nodes[1], 1, true); + let reason = ClosureReason::PeerFeerateTooLow { peer_feerate_sat_per_kw: 253, required_feerate_sat_per_kw: 253 * 2 }; + check_closed_events(&nodes[1], &[ExpectedCloseEvent::from_id_reason(chan_id, false, reason)]); +}