/// Returns the holder signer for this channel.
#[cfg(test)]
- pub fn get_signer(&self) -> &ChannelSignerType<SP> {
- return &self.holder_signer
+ pub fn get_mut_signer(&mut self) -> &mut ChannelSignerType<SP> {
+ return &mut self.holder_signer
}
/// Only allowed immediately after deserialization if get_outbound_scid_alias returns 0,
}
}
+ /// Performs checks against necessary constraints after receiving either an `accept_channel` or
+ /// `accept_channel2` message.
+ pub fn do_accept_channel_checks(
+ &mut self, default_limits: &ChannelHandshakeLimits, their_features: &InitFeatures,
+ common_fields: &msgs::CommonAcceptChannelFields, channel_reserve_satoshis: u64,
+ ) -> Result<(), ChannelError> {
+ let peer_limits = if let Some(ref limits) = self.inbound_handshake_limits_override { limits } else { default_limits };
+
+ // Check sanity of message fields:
+ if !self.is_outbound() {
+ return Err(ChannelError::close("Got an accept_channel message from an inbound peer".to_owned()));
+ }
+ if !matches!(self.channel_state, ChannelState::NegotiatingFunding(flags) if flags == NegotiatingFundingFlags::OUR_INIT_SENT) {
+ return Err(ChannelError::close("Got an accept_channel message at a strange time".to_owned()));
+ }
+ if common_fields.dust_limit_satoshis > 21000000 * 100000000 {
+ return Err(ChannelError::close(format!("Peer never wants payout outputs? dust_limit_satoshis was {}", common_fields.dust_limit_satoshis)));
+ }
+ if channel_reserve_satoshis > self.channel_value_satoshis {
+ return Err(ChannelError::close(format!("Bogus channel_reserve_satoshis ({}). Must not be greater than ({})", channel_reserve_satoshis, self.channel_value_satoshis)));
+ }
+ if common_fields.dust_limit_satoshis > self.holder_selected_channel_reserve_satoshis {
+ return Err(ChannelError::close(format!("Dust limit ({}) is bigger than our channel reserve ({})", common_fields.dust_limit_satoshis, self.holder_selected_channel_reserve_satoshis)));
+ }
+ if channel_reserve_satoshis > self.channel_value_satoshis - self.holder_selected_channel_reserve_satoshis {
+ return Err(ChannelError::close(format!("Bogus channel_reserve_satoshis ({}). Must not be greater than channel value minus our reserve ({})",
+ channel_reserve_satoshis, self.channel_value_satoshis - self.holder_selected_channel_reserve_satoshis)));
+ }
+ let full_channel_value_msat = (self.channel_value_satoshis - channel_reserve_satoshis) * 1000;
+ if common_fields.htlc_minimum_msat >= full_channel_value_msat {
+ return Err(ChannelError::close(format!("Minimum htlc value ({}) is full channel value ({})", common_fields.htlc_minimum_msat, full_channel_value_msat)));
+ }
+ let max_delay_acceptable = u16::min(peer_limits.their_to_self_delay, MAX_LOCAL_BREAKDOWN_TIMEOUT);
+ if common_fields.to_self_delay > max_delay_acceptable {
+ return Err(ChannelError::close(format!("They wanted our payments to be delayed by a needlessly long period. Upper limit: {}. Actual: {}", max_delay_acceptable, common_fields.to_self_delay)));
+ }
+ if common_fields.max_accepted_htlcs < 1 {
+ return Err(ChannelError::close("0 max_accepted_htlcs makes for a useless channel".to_owned()));
+ }
+ if common_fields.max_accepted_htlcs > MAX_HTLCS {
+ return Err(ChannelError::close(format!("max_accepted_htlcs was {}. It must not be larger than {}", common_fields.max_accepted_htlcs, MAX_HTLCS)));
+ }
+
+ // Now check against optional parameters as set by config...
+ if common_fields.htlc_minimum_msat > peer_limits.max_htlc_minimum_msat {
+ return Err(ChannelError::close(format!("htlc_minimum_msat ({}) is higher than the user specified limit ({})", common_fields.htlc_minimum_msat, peer_limits.max_htlc_minimum_msat)));
+ }
+ if common_fields.max_htlc_value_in_flight_msat < peer_limits.min_max_htlc_value_in_flight_msat {
+ return Err(ChannelError::close(format!("max_htlc_value_in_flight_msat ({}) is less than the user specified limit ({})", common_fields.max_htlc_value_in_flight_msat, peer_limits.min_max_htlc_value_in_flight_msat)));
+ }
+ if channel_reserve_satoshis > peer_limits.max_channel_reserve_satoshis {
+ return Err(ChannelError::close(format!("channel_reserve_satoshis ({}) is higher than the user specified limit ({})", channel_reserve_satoshis, peer_limits.max_channel_reserve_satoshis)));
+ }
+ if common_fields.max_accepted_htlcs < peer_limits.min_max_accepted_htlcs {
+ return Err(ChannelError::close(format!("max_accepted_htlcs ({}) is less than the user specified limit ({})", common_fields.max_accepted_htlcs, peer_limits.min_max_accepted_htlcs)));
+ }
+ if common_fields.dust_limit_satoshis < MIN_CHAN_DUST_LIMIT_SATOSHIS {
+ return Err(ChannelError::close(format!("dust_limit_satoshis ({}) is less than the implementation limit ({})", common_fields.dust_limit_satoshis, MIN_CHAN_DUST_LIMIT_SATOSHIS)));
+ }
+ if common_fields.dust_limit_satoshis > MAX_CHAN_DUST_LIMIT_SATOSHIS {
+ return Err(ChannelError::close(format!("dust_limit_satoshis ({}) is greater than the implementation limit ({})", common_fields.dust_limit_satoshis, MAX_CHAN_DUST_LIMIT_SATOSHIS)));
+ }
+ if common_fields.minimum_depth > peer_limits.max_minimum_depth {
+ return Err(ChannelError::close(format!("We consider the minimum depth to be unreasonably large. Expected minimum: ({}). Actual: ({})", peer_limits.max_minimum_depth, common_fields.minimum_depth)));
+ }
+
+ if let Some(ty) = &common_fields.channel_type {
+ if *ty != self.channel_type {
+ return Err(ChannelError::close("Channel Type in accept_channel didn't match the one sent in open_channel.".to_owned()));
+ }
+ } else if their_features.supports_channel_type() {
+ // Assume they've accepted the channel type as they said they understand it.
+ } else {
+ let channel_type = ChannelTypeFeatures::from_init(&their_features);
+ if channel_type != ChannelTypeFeatures::only_static_remote_key() {
+ return Err(ChannelError::close("Only static_remote_key is supported for non-negotiated channel types".to_owned()));
+ }
+ self.channel_type = channel_type.clone();
+ self.channel_transaction_parameters.channel_type_features = channel_type;
+ }
+
+ let counterparty_shutdown_scriptpubkey = if their_features.supports_upfront_shutdown_script() {
+ match &common_fields.shutdown_scriptpubkey {
+ &Some(ref script) => {
+ // Peer is signaling upfront_shutdown and has opt-out with a 0-length script. We don't enforce anything
+ if script.len() == 0 {
+ None
+ } else {
+ if !script::is_bolt2_compliant(&script, their_features) {
+ return Err(ChannelError::close(format!("Peer is signaling upfront_shutdown but has provided an unacceptable scriptpubkey format: {}", script)));
+ }
+ Some(script.clone())
+ }
+ },
+ // Peer is signaling upfront shutdown but don't opt-out with correct mechanism (a.k.a 0-length script). Peer looks buggy, we fail the channel
+ &None => {
+ return Err(ChannelError::close("Peer is signaling upfront_shutdown but we don't get any script. Use 0-length script to opt-out".to_owned()));
+ }
+ }
+ } else { None };
+
+ self.counterparty_dust_limit_satoshis = common_fields.dust_limit_satoshis;
+ self.counterparty_max_htlc_value_in_flight_msat = cmp::min(common_fields.max_htlc_value_in_flight_msat, self.channel_value_satoshis * 1000);
+ self.counterparty_selected_channel_reserve_satoshis = Some(channel_reserve_satoshis);
+ self.counterparty_htlc_minimum_msat = common_fields.htlc_minimum_msat;
+ self.counterparty_max_accepted_htlcs = common_fields.max_accepted_htlcs;
+
+ if peer_limits.trust_own_funding_0conf {
+ self.minimum_depth = Some(common_fields.minimum_depth);
+ } else {
+ self.minimum_depth = Some(cmp::max(1, common_fields.minimum_depth));
+ }
+
+ let counterparty_pubkeys = ChannelPublicKeys {
+ funding_pubkey: common_fields.funding_pubkey,
+ revocation_basepoint: RevocationBasepoint::from(common_fields.revocation_basepoint),
+ payment_point: common_fields.payment_basepoint,
+ delayed_payment_basepoint: DelayedPaymentBasepoint::from(common_fields.delayed_payment_basepoint),
+ htlc_basepoint: HtlcBasepoint::from(common_fields.htlc_basepoint)
+ };
+
+ self.channel_transaction_parameters.counterparty_parameters = Some(CounterpartyChannelTransactionParameters {
+ selected_contest_delay: common_fields.to_self_delay,
+ pubkeys: counterparty_pubkeys,
+ });
+
+ self.counterparty_cur_commitment_point = Some(common_fields.first_per_commitment_point);
+ self.counterparty_shutdown_scriptpubkey = counterparty_shutdown_scriptpubkey;
+
+ self.channel_state = ChannelState::NegotiatingFunding(
+ NegotiatingFundingFlags::OUR_INIT_SENT | NegotiatingFundingFlags::THEIR_INIT_SENT
+ );
+ self.inbound_handshake_limits_override = None; // We're done enforcing limits on our peer's handshake now.
+
+ Ok(())
+ }
+
/// Returns the block hash in which our funding transaction was confirmed.
pub fn get_funding_tx_confirmed_in(&self) -> Option<BlockHash> {
self.funding_tx_confirmed_in
feerate_per_kw = cmp::max(feerate_per_kw, feerate);
}
let feerate_plus_quarter = feerate_per_kw.checked_mul(1250).map(|v| v / 1000);
- cmp::max(feerate_per_kw + 2530, feerate_plus_quarter.unwrap_or(u32::max_value()))
+ cmp::max(feerate_per_kw.saturating_add(2530), feerate_plus_quarter.unwrap_or(u32::MAX))
}
/// Get forwarding information for the counterparty.
channel_id: self.context.channel_id,
signature,
htlc_signatures,
+ batch: None,
#[cfg(taproot)]
partial_signature_with_nonce: None,
}, (counterparty_commitment_txid, commitment_stats.htlcs_included)))
}
// Message handlers
- pub fn accept_channel(&mut self, msg: &msgs::AcceptChannel, default_limits: &ChannelHandshakeLimits, their_features: &InitFeatures) -> Result<(), ChannelError> {
- let peer_limits = if let Some(ref limits) = self.context.inbound_handshake_limits_override { limits } else { default_limits };
-
- // Check sanity of message fields:
- if !self.context.is_outbound() {
- return Err(ChannelError::close("Got an accept_channel message from an inbound peer".to_owned()));
- }
- if !matches!(self.context.channel_state, ChannelState::NegotiatingFunding(flags) if flags == NegotiatingFundingFlags::OUR_INIT_SENT) {
- return Err(ChannelError::close("Got an accept_channel message at a strange time".to_owned()));
- }
- if msg.common_fields.dust_limit_satoshis > 21000000 * 100000000 {
- return Err(ChannelError::close(format!("Peer never wants payout outputs? dust_limit_satoshis was {}", msg.common_fields.dust_limit_satoshis)));
- }
- if msg.channel_reserve_satoshis > self.context.channel_value_satoshis {
- return Err(ChannelError::close(format!("Bogus channel_reserve_satoshis ({}). Must not be greater than ({})", msg.channel_reserve_satoshis, self.context.channel_value_satoshis)));
- }
- if msg.common_fields.dust_limit_satoshis > self.context.holder_selected_channel_reserve_satoshis {
- return Err(ChannelError::close(format!("Dust limit ({}) is bigger than our channel reserve ({})", msg.common_fields.dust_limit_satoshis, self.context.holder_selected_channel_reserve_satoshis)));
- }
- if msg.channel_reserve_satoshis > self.context.channel_value_satoshis - self.context.holder_selected_channel_reserve_satoshis {
- return Err(ChannelError::close(format!("Bogus channel_reserve_satoshis ({}). Must not be greater than channel value minus our reserve ({})",
- msg.channel_reserve_satoshis, self.context.channel_value_satoshis - self.context.holder_selected_channel_reserve_satoshis)));
- }
- let full_channel_value_msat = (self.context.channel_value_satoshis - msg.channel_reserve_satoshis) * 1000;
- if msg.common_fields.htlc_minimum_msat >= full_channel_value_msat {
- return Err(ChannelError::close(format!("Minimum htlc value ({}) is full channel value ({})", msg.common_fields.htlc_minimum_msat, full_channel_value_msat)));
- }
- let max_delay_acceptable = u16::min(peer_limits.their_to_self_delay, MAX_LOCAL_BREAKDOWN_TIMEOUT);
- if msg.common_fields.to_self_delay > max_delay_acceptable {
- return Err(ChannelError::close(format!("They wanted our payments to be delayed by a needlessly long period. Upper limit: {}. Actual: {}", max_delay_acceptable, msg.common_fields.to_self_delay)));
- }
- if msg.common_fields.max_accepted_htlcs < 1 {
- return Err(ChannelError::close("0 max_accepted_htlcs makes for a useless channel".to_owned()));
- }
- if msg.common_fields.max_accepted_htlcs > MAX_HTLCS {
- return Err(ChannelError::close(format!("max_accepted_htlcs was {}. It must not be larger than {}", msg.common_fields.max_accepted_htlcs, MAX_HTLCS)));
- }
-
- // Now check against optional parameters as set by config...
- if msg.common_fields.htlc_minimum_msat > peer_limits.max_htlc_minimum_msat {
- return Err(ChannelError::close(format!("htlc_minimum_msat ({}) is higher than the user specified limit ({})", msg.common_fields.htlc_minimum_msat, peer_limits.max_htlc_minimum_msat)));
- }
- if msg.common_fields.max_htlc_value_in_flight_msat < peer_limits.min_max_htlc_value_in_flight_msat {
- return Err(ChannelError::close(format!("max_htlc_value_in_flight_msat ({}) is less than the user specified limit ({})", msg.common_fields.max_htlc_value_in_flight_msat, peer_limits.min_max_htlc_value_in_flight_msat)));
- }
- if msg.channel_reserve_satoshis > peer_limits.max_channel_reserve_satoshis {
- return Err(ChannelError::close(format!("channel_reserve_satoshis ({}) is higher than the user specified limit ({})", msg.channel_reserve_satoshis, peer_limits.max_channel_reserve_satoshis)));
- }
- if msg.common_fields.max_accepted_htlcs < peer_limits.min_max_accepted_htlcs {
- return Err(ChannelError::close(format!("max_accepted_htlcs ({}) is less than the user specified limit ({})", msg.common_fields.max_accepted_htlcs, peer_limits.min_max_accepted_htlcs)));
- }
- if msg.common_fields.dust_limit_satoshis < MIN_CHAN_DUST_LIMIT_SATOSHIS {
- return Err(ChannelError::close(format!("dust_limit_satoshis ({}) is less than the implementation limit ({})", msg.common_fields.dust_limit_satoshis, MIN_CHAN_DUST_LIMIT_SATOSHIS)));
- }
- if msg.common_fields.dust_limit_satoshis > MAX_CHAN_DUST_LIMIT_SATOSHIS {
- return Err(ChannelError::close(format!("dust_limit_satoshis ({}) is greater than the implementation limit ({})", msg.common_fields.dust_limit_satoshis, MAX_CHAN_DUST_LIMIT_SATOSHIS)));
- }
- if msg.common_fields.minimum_depth > peer_limits.max_minimum_depth {
- return Err(ChannelError::close(format!("We consider the minimum depth to be unreasonably large. Expected minimum: ({}). Actual: ({})", peer_limits.max_minimum_depth, msg.common_fields.minimum_depth)));
- }
-
- if let Some(ty) = &msg.common_fields.channel_type {
- if *ty != self.context.channel_type {
- return Err(ChannelError::close("Channel Type in accept_channel didn't match the one sent in open_channel.".to_owned()));
- }
- } else if their_features.supports_channel_type() {
- // Assume they've accepted the channel type as they said they understand it.
- } else {
- let channel_type = ChannelTypeFeatures::from_init(&their_features);
- if channel_type != ChannelTypeFeatures::only_static_remote_key() {
- return Err(ChannelError::close("Only static_remote_key is supported for non-negotiated channel types".to_owned()));
- }
- self.context.channel_type = channel_type.clone();
- self.context.channel_transaction_parameters.channel_type_features = channel_type;
- }
-
- let counterparty_shutdown_scriptpubkey = if their_features.supports_upfront_shutdown_script() {
- match &msg.common_fields.shutdown_scriptpubkey {
- &Some(ref script) => {
- // Peer is signaling upfront_shutdown and has opt-out with a 0-length script. We don't enforce anything
- if script.len() == 0 {
- None
- } else {
- if !script::is_bolt2_compliant(&script, their_features) {
- return Err(ChannelError::close(format!("Peer is signaling upfront_shutdown but has provided an unacceptable scriptpubkey format: {}", script)));
- }
- Some(script.clone())
- }
- },
- // Peer is signaling upfront shutdown but don't opt-out with correct mechanism (a.k.a 0-length script). Peer looks buggy, we fail the channel
- &None => {
- return Err(ChannelError::close("Peer is signaling upfront_shutdown but we don't get any script. Use 0-length script to opt-out".to_owned()));
- }
- }
- } else { None };
-
- self.context.counterparty_dust_limit_satoshis = msg.common_fields.dust_limit_satoshis;
- self.context.counterparty_max_htlc_value_in_flight_msat = cmp::min(msg.common_fields.max_htlc_value_in_flight_msat, self.context.channel_value_satoshis * 1000);
- self.context.counterparty_selected_channel_reserve_satoshis = Some(msg.channel_reserve_satoshis);
- self.context.counterparty_htlc_minimum_msat = msg.common_fields.htlc_minimum_msat;
- self.context.counterparty_max_accepted_htlcs = msg.common_fields.max_accepted_htlcs;
-
- if peer_limits.trust_own_funding_0conf {
- self.context.minimum_depth = Some(msg.common_fields.minimum_depth);
- } else {
- self.context.minimum_depth = Some(cmp::max(1, msg.common_fields.minimum_depth));
- }
-
- let counterparty_pubkeys = ChannelPublicKeys {
- funding_pubkey: msg.common_fields.funding_pubkey,
- revocation_basepoint: RevocationBasepoint::from(msg.common_fields.revocation_basepoint),
- payment_point: msg.common_fields.payment_basepoint,
- delayed_payment_basepoint: DelayedPaymentBasepoint::from(msg.common_fields.delayed_payment_basepoint),
- htlc_basepoint: HtlcBasepoint::from(msg.common_fields.htlc_basepoint)
- };
-
- self.context.channel_transaction_parameters.counterparty_parameters = Some(CounterpartyChannelTransactionParameters {
- selected_contest_delay: msg.common_fields.to_self_delay,
- pubkeys: counterparty_pubkeys,
- });
-
- self.context.counterparty_cur_commitment_point = Some(msg.common_fields.first_per_commitment_point);
- self.context.counterparty_shutdown_scriptpubkey = counterparty_shutdown_scriptpubkey;
-
- self.context.channel_state = ChannelState::NegotiatingFunding(
- NegotiatingFundingFlags::OUR_INIT_SENT | NegotiatingFundingFlags::THEIR_INIT_SENT
- );
- self.context.inbound_handshake_limits_override = None; // We're done enforcing limits on our peer's handshake now.
-
- Ok(())
+ pub fn accept_channel(
+ &mut self, msg: &msgs::AcceptChannel, default_limits: &ChannelHandshakeLimits,
+ their_features: &InitFeatures
+ ) -> Result<(), ChannelError> {
+ self.context.do_accept_channel_checks(default_limits, their_features, &msg.common_fields, msg.channel_reserve_satoshis)
}
/// Handles a funding_signed message from the remote end.
use crate::offers::offer::{Offer, OfferBuilder};
use crate::offers::parse::Bolt12SemanticError;
use crate::offers::refund::{Refund, RefundBuilder};
+use crate::onion_message::async_payments::{AsyncPaymentsMessage, HeldHtlcAvailable, ReleaseHeldHtlc, AsyncPaymentsMessageHandler};
use crate::onion_message::messenger::{new_pending_onion_message, Destination, MessageRouter, PendingOnionMessage, Responder, ResponseInstruction};
use crate::onion_message::offers::{OffersMessage, OffersMessageHandler};
use crate::sign::{EntropySource, NodeSigner, Recipient, SignerProvider};
self.pending_outbound_payments
.send_payment_for_bolt12_invoice(
invoice, payment_id, &self.router, self.list_usable_channels(),
- || self.compute_inflight_htlcs(), &self.entropy_source, &self.node_signer,
- best_block_height, &self.logger, &self.pending_events,
+ || self.compute_inflight_htlcs(), &self.entropy_source, &self.node_signer, &self,
+ &self.secp_ctx, best_block_height, &self.logger, &self.pending_events,
|args| self.send_payment_along_path(args)
)
}
if short_chan_id != 0 {
let mut forwarding_counterparty = None;
macro_rules! forwarding_channel_not_found {
- () => {
- for forward_info in pending_forwards.drain(..) {
+ ($forward_infos: expr) => {
+ for forward_info in $forward_infos {
match forward_info {
HTLCForwardInfo::AddHTLC(PendingAddHTLCInfo {
prev_short_channel_id, prev_htlc_id, prev_channel_id, prev_funding_outpoint,
let (counterparty_node_id, forward_chan_id) = match chan_info_opt {
Some((cp_id, chan_id)) => (cp_id, chan_id),
None => {
- forwarding_channel_not_found!();
+ forwarding_channel_not_found!(pending_forwards.drain(..));
continue;
}
};
let per_peer_state = self.per_peer_state.read().unwrap();
let peer_state_mutex_opt = per_peer_state.get(&counterparty_node_id);
if peer_state_mutex_opt.is_none() {
- forwarding_channel_not_found!();
+ forwarding_channel_not_found!(pending_forwards.drain(..));
continue;
}
let mut peer_state_lock = peer_state_mutex_opt.unwrap().lock().unwrap();
let peer_state = &mut *peer_state_lock;
- if let Some(ChannelPhase::Funded(ref mut chan)) = peer_state.channel_by_id.get_mut(&forward_chan_id) {
- let logger = WithChannelContext::from(&self.logger, &chan.context, None);
- for forward_info in pending_forwards.drain(..) {
- let queue_fail_htlc_res = match forward_info {
- HTLCForwardInfo::AddHTLC(PendingAddHTLCInfo {
- prev_short_channel_id, prev_htlc_id, prev_channel_id, prev_funding_outpoint,
- prev_user_channel_id, forward_info: PendingHTLCInfo {
- incoming_shared_secret, payment_hash, outgoing_amt_msat, outgoing_cltv_value,
- routing: PendingHTLCRouting::Forward {
- onion_packet, blinded, ..
- }, skimmed_fee_msat, ..
+ let mut draining_pending_forwards = pending_forwards.drain(..);
+ while let Some(forward_info) = draining_pending_forwards.next() {
+ let queue_fail_htlc_res = match forward_info {
+ HTLCForwardInfo::AddHTLC(PendingAddHTLCInfo {
+ prev_short_channel_id, prev_htlc_id, prev_channel_id, prev_funding_outpoint,
+ prev_user_channel_id, forward_info: PendingHTLCInfo {
+ incoming_shared_secret, payment_hash, outgoing_amt_msat, outgoing_cltv_value,
+ routing: PendingHTLCRouting::Forward {
+ ref onion_packet, blinded, ..
+ }, skimmed_fee_msat, ..
+ },
+ }) => {
+ let htlc_source = HTLCSource::PreviousHopData(HTLCPreviousHopData {
+ short_channel_id: prev_short_channel_id,
+ user_channel_id: Some(prev_user_channel_id),
+ channel_id: prev_channel_id,
+ outpoint: prev_funding_outpoint,
+ htlc_id: prev_htlc_id,
+ incoming_packet_shared_secret: incoming_shared_secret,
+ // Phantom payments are only PendingHTLCRouting::Receive.
+ phantom_shared_secret: None,
+ blinded_failure: blinded.map(|b| b.failure),
+ });
+ let next_blinding_point = blinded.and_then(|b| {
+ let encrypted_tlvs_ss = self.node_signer.ecdh(
+ Recipient::Node, &b.inbound_blinding_point, None
+ ).unwrap().secret_bytes();
+ onion_utils::next_hop_pubkey(
+ &self.secp_ctx, b.inbound_blinding_point, &encrypted_tlvs_ss
+ ).ok()
+ });
+
+ // Forward the HTLC over the most appropriate channel with the corresponding peer,
+ // applying non-strict forwarding.
+ // The channel with the least amount of outbound liquidity will be used to maximize the
+ // probability of being able to successfully forward a subsequent HTLC.
+ let maybe_optimal_channel = peer_state.channel_by_id.values_mut().filter_map(|phase| match phase {
+ ChannelPhase::Funded(chan) => {
+ let balances = chan.context.get_available_balances(&self.fee_estimator);
+ if outgoing_amt_msat <= balances.next_outbound_htlc_limit_msat &&
+ outgoing_amt_msat >= balances.next_outbound_htlc_minimum_msat &&
+ chan.context.is_usable() {
+ Some((chan, balances))
+ } else {
+ None
+ }
},
- }) => {
- let logger = WithChannelContext::from(&self.logger, &chan.context, Some(payment_hash));
- log_trace!(logger, "Adding HTLC from short id {} with payment_hash {} to channel with short id {} after delay", prev_short_channel_id, &payment_hash, short_chan_id);
- let htlc_source = HTLCSource::PreviousHopData(HTLCPreviousHopData {
- short_channel_id: prev_short_channel_id,
- user_channel_id: Some(prev_user_channel_id),
- channel_id: prev_channel_id,
- outpoint: prev_funding_outpoint,
- htlc_id: prev_htlc_id,
- incoming_packet_shared_secret: incoming_shared_secret,
- // Phantom payments are only PendingHTLCRouting::Receive.
- phantom_shared_secret: None,
- blinded_failure: blinded.map(|b| b.failure),
- });
- let next_blinding_point = blinded.and_then(|b| {
- let encrypted_tlvs_ss = self.node_signer.ecdh(
- Recipient::Node, &b.inbound_blinding_point, None
- ).unwrap().secret_bytes();
- onion_utils::next_hop_pubkey(
- &self.secp_ctx, b.inbound_blinding_point, &encrypted_tlvs_ss
- ).ok()
- });
- if let Err(e) = chan.queue_add_htlc(outgoing_amt_msat,
- payment_hash, outgoing_cltv_value, htlc_source.clone(),
- onion_packet, skimmed_fee_msat, next_blinding_point, &self.fee_estimator,
- &&logger)
- {
- if let ChannelError::Ignore(msg) = e {
- log_trace!(logger, "Failed to forward HTLC with payment_hash {}: {}", &payment_hash, msg);
+ _ => None,
+ }).min_by_key(|(_, balances)| balances.next_outbound_htlc_limit_msat).map(|(c, _)| c);
+ let optimal_channel = match maybe_optimal_channel {
+ Some(chan) => chan,
+ None => {
+ // Fall back to the specified channel to return an appropriate error.
+ if let Some(ChannelPhase::Funded(ref mut chan)) = peer_state.channel_by_id.get_mut(&forward_chan_id) {
+ chan
} else {
- panic!("Stated return value requirements in send_htlc() were not met");
+ forwarding_channel_not_found!(core::iter::once(forward_info).chain(draining_pending_forwards));
+ break;
}
+ }
+ };
+
+ let logger = WithChannelContext::from(&self.logger, &optimal_channel.context, Some(payment_hash));
+ let channel_description = if optimal_channel.context.get_short_channel_id() == Some(short_chan_id) {
+ "specified"
+ } else {
+ "alternate"
+ };
+ log_trace!(logger, "Forwarding HTLC from SCID {} with payment_hash {} and next hop SCID {} over {} channel {} with corresponding peer {}",
+ prev_short_channel_id, &payment_hash, short_chan_id, channel_description, optimal_channel.context.channel_id(), &counterparty_node_id);
+ if let Err(e) = optimal_channel.queue_add_htlc(outgoing_amt_msat,
+ payment_hash, outgoing_cltv_value, htlc_source.clone(),
+ onion_packet.clone(), skimmed_fee_msat, next_blinding_point, &self.fee_estimator,
+ &&logger)
+ {
+ if let ChannelError::Ignore(msg) = e {
+ log_trace!(logger, "Failed to forward HTLC with payment_hash {} to peer {}: {}", &payment_hash, &counterparty_node_id, msg);
+ } else {
+ panic!("Stated return value requirements in send_htlc() were not met");
+ }
+
+ if let Some(ChannelPhase::Funded(ref mut chan)) = peer_state.channel_by_id.get_mut(&forward_chan_id) {
let (failure_code, data) = self.get_htlc_temp_fail_err_and_data(0x1000|7, short_chan_id, chan);
failed_forwards.push((htlc_source, payment_hash,
HTLCFailReason::reason(failure_code, data),
HTLCDestination::NextHopChannel { node_id: Some(chan.context.get_counterparty_node_id()), channel_id: forward_chan_id }
));
- continue;
+ } else {
+ forwarding_channel_not_found!(core::iter::once(forward_info).chain(draining_pending_forwards));
+ break;
}
- None
- },
- HTLCForwardInfo::AddHTLC { .. } => {
- panic!("short_channel_id != 0 should imply any pending_forward entries are of type Forward");
- },
- HTLCForwardInfo::FailHTLC { htlc_id, err_packet } => {
+ }
+ None
+ },
+ HTLCForwardInfo::AddHTLC { .. } => {
+ panic!("short_channel_id != 0 should imply any pending_forward entries are of type Forward");
+ },
+ HTLCForwardInfo::FailHTLC { htlc_id, ref err_packet } => {
+ if let Some(ChannelPhase::Funded(ref mut chan)) = peer_state.channel_by_id.get_mut(&forward_chan_id) {
+ let logger = WithChannelContext::from(&self.logger, &chan.context, None);
log_trace!(logger, "Failing HTLC back to channel with short id {} (backward HTLC ID {}) after delay", short_chan_id, htlc_id);
- Some((chan.queue_fail_htlc(htlc_id, err_packet, &&logger), htlc_id))
- },
- HTLCForwardInfo::FailMalformedHTLC { htlc_id, failure_code, sha256_of_onion } => {
+ Some((chan.queue_fail_htlc(htlc_id, err_packet.clone(), &&logger), htlc_id))
+ } else {
+ forwarding_channel_not_found!(core::iter::once(forward_info).chain(draining_pending_forwards));
+ break;
+ }
+ },
+ HTLCForwardInfo::FailMalformedHTLC { htlc_id, failure_code, sha256_of_onion } => {
+ if let Some(ChannelPhase::Funded(ref mut chan)) = peer_state.channel_by_id.get_mut(&forward_chan_id) {
+ let logger = WithChannelContext::from(&self.logger, &chan.context, None);
log_trace!(logger, "Failing malformed HTLC back to channel with short id {} (backward HTLC ID {}) after delay", short_chan_id, htlc_id);
let res = chan.queue_fail_malformed_htlc(
htlc_id, failure_code, sha256_of_onion, &&logger
);
Some((res, htlc_id))
- },
- };
- if let Some((queue_fail_htlc_res, htlc_id)) = queue_fail_htlc_res {
- if let Err(e) = queue_fail_htlc_res {
- if let ChannelError::Ignore(msg) = e {
+ } else {
+ forwarding_channel_not_found!(core::iter::once(forward_info).chain(draining_pending_forwards));
+ break;
+ }
+ },
+ };
+ if let Some((queue_fail_htlc_res, htlc_id)) = queue_fail_htlc_res {
+ if let Err(e) = queue_fail_htlc_res {
+ if let ChannelError::Ignore(msg) = e {
+ if let Some(ChannelPhase::Funded(ref mut chan)) = peer_state.channel_by_id.get_mut(&forward_chan_id) {
+ let logger = WithChannelContext::from(&self.logger, &chan.context, None);
log_trace!(logger, "Failed to fail HTLC with ID {} backwards to short_id {}: {}", htlc_id, short_chan_id, msg);
- } else {
- panic!("Stated return value requirements in queue_fail_{{malformed_}}htlc() were not met");
}
- // fail-backs are best-effort, we probably already have one
- // pending, and if not that's OK, if not, the channel is on
- // the chain and sending the HTLC-Timeout is their problem.
- continue;
+ } else {
+ panic!("Stated return value requirements in queue_fail_{{malformed_}}htlc() were not met");
}
+ // fail-backs are best-effort, we probably already have one
+ // pending, and if not that's OK, if not, the channel is on
+ // the chain and sending the HTLC-Timeout is their problem.
+ continue;
}
}
- } else {
- forwarding_channel_not_found!();
- continue;
}
} else {
'next_forwardable_htlc: for forward_info in pending_forwards.drain(..) {
}
if valid_mpp {
for htlc in sources.drain(..) {
- let prev_hop_chan_id = htlc.prev_hop.channel_id;
- if let Err((pk, err)) = self.claim_funds_from_hop(
+ self.claim_funds_from_hop(
htlc.prev_hop, payment_preimage,
|_, definitely_duplicate| {
debug_assert!(!definitely_duplicate, "We shouldn't claim duplicatively from a payment");
Some(MonitorUpdateCompletionAction::PaymentClaimed { payment_hash })
}
- ) {
- if let msgs::ErrorAction::IgnoreError = err.err.action {
- // We got a temporary failure updating monitor, but will claim the
- // HTLC when the monitor updating is restored (or on chain).
- let logger = WithContext::from(&self.logger, None, Some(prev_hop_chan_id), Some(payment_hash));
- log_error!(logger, "Temporary failure claiming HTLC, treating as success: {}", err.err.err);
- } else { errs.push((pk, err)); }
- }
+ );
}
}
if !valid_mpp {
}
}
- fn claim_funds_from_hop<ComplFunc: FnOnce(Option<u64>, bool) -> Option<MonitorUpdateCompletionAction>>(&self,
- prev_hop: HTLCPreviousHopData, payment_preimage: PaymentPreimage, completion_action: ComplFunc)
- -> Result<(), (PublicKey, MsgHandleErrInternal)> {
+ fn claim_funds_from_hop<ComplFunc: FnOnce(Option<u64>, bool) -> Option<MonitorUpdateCompletionAction>>(
+ &self, prev_hop: HTLCPreviousHopData, payment_preimage: PaymentPreimage,
+ completion_action: ComplFunc,
+ ) {
//TODO: Delay the claimed_funds relaying just like we do outbound relay!
// If we haven't yet run background events assume we're still deserializing and shouldn't
let action = if let Some(action) = completion_action(None, true) {
action
} else {
- return Ok(());
+ return;
};
mem::drop(peer_state_lock);
} else {
debug_assert!(false,
"Duplicate claims should always free another channel immediately");
- return Ok(());
+ return;
};
if let Some(peer_state_mtx) = per_peer_state.get(&node_id) {
let mut peer_state = peer_state_mtx.lock().unwrap();
}
}
}
- return Ok(());
+ return;
}
}
}
// generally always allowed to be duplicative (and it's specifically noted in
// `PaymentForwarded`).
self.handle_monitor_update_completion_actions(completion_action(None, false));
- Ok(())
}
fn finalize_claims(&self, sources: Vec<HTLCSource>) {
let completed_blocker = RAAMonitorUpdateBlockingAction::from_prev_hop_data(&hop_data);
#[cfg(debug_assertions)]
let claiming_chan_funding_outpoint = hop_data.outpoint;
- let res = self.claim_funds_from_hop(hop_data, payment_preimage,
+ self.claim_funds_from_hop(hop_data, payment_preimage,
|htlc_claim_value_msat, definitely_duplicate| {
let chan_to_release =
if let Some(node_id) = next_channel_counterparty_node_id {
})
}
});
- if let Err((pk, err)) = res {
- let result: Result<(), _> = Err(err);
- let _ = handle_error!(self, result, pk);
- }
},
}
}
}
#[cfg(splicing)]
- fn handle_splice(&self, counterparty_node_id: &PublicKey, msg: &msgs::Splice) {
+ fn handle_splice_init(&self, counterparty_node_id: &PublicKey, msg: &msgs::SpliceInit) {
let _: Result<(), _> = handle_error!(self, Err(MsgHandleErrInternal::send_err_msg_no_close(
"Splicing not supported".to_owned(),
msg.channel_id.clone())), *counterparty_node_id);
// Quiescence
&events::MessageSendEvent::SendStfu { .. } => false,
// Splicing
- &events::MessageSendEvent::SendSplice { .. } => false,
+ &events::MessageSendEvent::SendSpliceInit { .. } => false,
&events::MessageSendEvent::SendSpliceAck { .. } => false,
&events::MessageSendEvent::SendSpliceLocked { .. } => false,
// Interactive Transaction Construction
},
}
},
+ #[cfg(async_payments)]
+ OffersMessage::StaticInvoice(_invoice) => {
+ match responder {
+ Some(responder) => {
+ responder.respond(OffersMessage::InvoiceError(
+ InvoiceError::from_string("Static invoices not yet supported".to_string())
+ ))
+ },
+ None => return ResponseInstruction::NoResponse,
+ }
+ },
OffersMessage::InvoiceError(invoice_error) => {
log_trace!(self.logger, "Received invoice_error: {}", invoice_error);
ResponseInstruction::NoResponse
}
}
+impl<M: Deref, T: Deref, ES: Deref, NS: Deref, SP: Deref, F: Deref, R: Deref, L: Deref>
+AsyncPaymentsMessageHandler for ChannelManager<M, T, ES, NS, SP, F, R, L>
+where
+ M::Target: chain::Watch<<SP::Target as SignerProvider>::EcdsaSigner>,
+ T::Target: BroadcasterInterface,
+ ES::Target: EntropySource,
+ NS::Target: NodeSigner,
+ SP::Target: SignerProvider,
+ F::Target: FeeEstimator,
+ R::Target: Router,
+ L::Target: Logger,
+{
+ fn held_htlc_available(
+ &self, _message: HeldHtlcAvailable, _responder: Option<Responder>
+ ) -> ResponseInstruction<ReleaseHeldHtlc> {
+ ResponseInstruction::NoResponse
+ }
+
+ fn release_held_htlc(&self, _message: ReleaseHeldHtlc) {}
+
+ fn release_pending_messages(&self) -> Vec<PendingOnionMessage<AsyncPaymentsMessage>> {
+ Vec::new()
+ }
+}
+
impl<M: Deref, T: Deref, ES: Deref, NS: Deref, SP: Deref, F: Deref, R: Deref, L: Deref>
NodeIdLookUp for ChannelManager<M, T, ES, NS, SP, F, R, L>
where
use crate::io_extras::sink;
use crate::prelude::*;
-use core::ops::Deref;
+use bitcoin::absolute::LockTime as AbsoluteLockTime;
use bitcoin::amount::Amount;
use bitcoin::blockdata::constants::WITNESS_SCALE_FACTOR;
use bitcoin::consensus::Encodable;
use bitcoin::policy::MAX_STANDARD_TX_WEIGHT;
use bitcoin::transaction::Version;
-use bitcoin::{
- absolute::LockTime as AbsoluteLockTime, OutPoint, ScriptBuf, Sequence, Transaction, TxIn,
- TxOut, Weight,
-};
+use bitcoin::{OutPoint, ScriptBuf, Sequence, Transaction, TxIn, TxOut, Weight};
use crate::chain::chaininterface::fee_for_weight;
use crate::events::bump_transaction::{BASE_INPUT_WEIGHT, EMPTY_SCRIPT_SIG_WEIGHT};
use crate::sign::{EntropySource, P2TR_KEY_PATH_WITNESS_WEIGHT, P2WPKH_WITNESS_WEIGHT};
use crate::util::ser::TransactionU16LenLimited;
+use core::ops::Deref;
+
/// The number of received `tx_add_input` messages during a negotiation at which point the
/// negotiation MUST be failed.
const MAX_RECEIVED_TX_ADD_INPUT_COUNT: u16 = 4096;
InsufficientFees,
OutputsValueExceedsInputsValue,
InvalidTx,
-}
-
-#[derive(Debug, Clone, PartialEq, Eq)]
-pub(crate) struct InteractiveTxInput {
- serial_id: SerialId,
- input: TxIn,
- prev_output: TxOut,
-}
-
-#[derive(Debug, Clone, PartialEq, Eq)]
-pub(crate) struct InteractiveTxOutput {
- serial_id: SerialId,
- tx_out: TxOut,
+ /// No funding (shared) output found.
+ MissingFundingOutput,
+ /// More than one funding (shared) output found.
+ DuplicateFundingOutput,
+ /// The intended local part of the funding output is higher than the actual shared funding output,
+ /// if funding output is provided by the peer this is an interop error,
+ /// if provided by the same node than internal input consistency error.
+ InvalidLowFundingOutputValue,
}
#[derive(Debug, Clone, PartialEq, Eq)]
let local_inputs_value_satoshis = context
.inputs
.iter()
- .filter(|(serial_id, _)| {
- !is_serial_id_valid_for_counterparty(context.holder_is_initiator, serial_id)
- })
- .fold(0u64, |value, (_, input)| value.saturating_add(input.prev_output.value.to_sat()));
+ .fold(0u64, |value, (_, input)| value.saturating_add(input.local_value()));
let local_outputs_value_satoshis = context
.outputs
.iter()
- .filter(|(serial_id, _)| {
- !is_serial_id_valid_for_counterparty(context.holder_is_initiator, serial_id)
- })
- .fold(0u64, |value, (_, output)| value.saturating_add(output.tx_out.value.to_sat()));
+ .fold(0u64, |value, (_, output)| value.saturating_add(output.local_value()));
Self {
holder_is_initiator: context.holder_is_initiator,
}
pub fn weight(&self) -> Weight {
- let inputs_weight = self.inputs.iter().fold(
- Weight::from_wu(0),
- |weight, InteractiveTxInput { prev_output, .. }| {
- weight.checked_add(estimate_input_weight(prev_output)).unwrap_or(Weight::MAX)
- },
- );
- let outputs_weight = self.outputs.iter().fold(
- Weight::from_wu(0),
- |weight, InteractiveTxOutput { tx_out, .. }| {
- weight.checked_add(get_output_weight(&tx_out.script_pubkey)).unwrap_or(Weight::MAX)
- },
- );
+ let inputs_weight = self.inputs.iter().fold(Weight::from_wu(0), |weight, input| {
+ weight.checked_add(estimate_input_weight(input.prev_output())).unwrap_or(Weight::MAX)
+ });
+ let outputs_weight = self.outputs.iter().fold(Weight::from_wu(0), |weight, output| {
+ weight.checked_add(get_output_weight(&output.script_pubkey())).unwrap_or(Weight::MAX)
+ });
Weight::from_wu(TX_COMMON_FIELDS_WEIGHT)
.checked_add(inputs_weight)
.and_then(|weight| weight.checked_add(outputs_weight))
// Inputs and outputs must be sorted by serial_id
let ConstructedTransaction { mut inputs, mut outputs, .. } = self;
- inputs.sort_unstable_by_key(|InteractiveTxInput { serial_id, .. }| *serial_id);
- outputs.sort_unstable_by_key(|InteractiveTxOutput { serial_id, .. }| *serial_id);
+ inputs.sort_unstable_by_key(|input| input.serial_id());
+ outputs.sort_unstable_by_key(|output| output.serial_id);
- let input: Vec<TxIn> =
- inputs.into_iter().map(|InteractiveTxInput { input, .. }| input).collect();
+ let input: Vec<TxIn> = inputs.into_iter().map(|input| input.txin().clone()).collect();
let output: Vec<TxOut> =
- outputs.into_iter().map(|InteractiveTxOutput { tx_out, .. }| tx_out).collect();
+ outputs.into_iter().map(|output| output.tx_out().clone()).collect();
Transaction { version: Version::TWO, lock_time: self.lock_time, input, output }
}
received_tx_add_input_count: u16,
received_tx_add_output_count: u16,
inputs: HashMap<SerialId, InteractiveTxInput>,
+ /// The output script intended to be the new funding output script.
+ /// The script pubkey is used to determine which output is the funding output.
+ /// When an output with the same script pubkey is added by any of the nodes, it will be
+ /// treated as the shared output.
+ /// The value is the holder's intended contribution to the shared funding output.
+ /// The rest is the counterparty's contribution.
+ /// When the funding output is added (recognized by its output script pubkey), it will be marked
+ /// as shared, and split between the peers according to the local value.
+ /// If the local value is found to be larger than the actual funding output, an error is generated.
+ expected_shared_funding_output: (ScriptBuf, u64),
+ /// The actual new funding output, set only after the output has actually been added.
+ /// NOTE: this output is also included in `outputs`.
+ actual_new_funding_output: Option<SharedOwnedOutput>,
prevtx_outpoints: HashSet<OutPoint>,
+ /// The outputs added so far.
outputs: HashMap<SerialId, InteractiveTxOutput>,
+ /// The locktime of the funding transaction.
tx_locktime: AbsoluteLockTime,
+ /// The fee rate used for the transaction
feerate_sat_per_kw: u32,
}
}
impl NegotiationContext {
+ fn new(
+ holder_is_initiator: bool, expected_shared_funding_output: (ScriptBuf, u64),
+ tx_locktime: AbsoluteLockTime, feerate_sat_per_kw: u32,
+ ) -> Self {
+ NegotiationContext {
+ holder_is_initiator,
+ received_tx_add_input_count: 0,
+ received_tx_add_output_count: 0,
+ inputs: new_hash_map(),
+ expected_shared_funding_output,
+ actual_new_funding_output: None,
+ prevtx_outpoints: new_hash_set(),
+ outputs: new_hash_map(),
+ tx_locktime,
+ feerate_sat_per_kw,
+ }
+ }
+
+ fn set_actual_new_funding_output(
+ &mut self, tx_out: TxOut,
+ ) -> Result<SharedOwnedOutput, AbortReason> {
+ if self.actual_new_funding_output.is_some() {
+ return Err(AbortReason::DuplicateFundingOutput);
+ }
+ let value = tx_out.value.to_sat();
+ let local_owned = self.expected_shared_funding_output.1;
+ // Sanity check
+ if local_owned > value {
+ return Err(AbortReason::InvalidLowFundingOutputValue);
+ }
+ let shared_output = SharedOwnedOutput::new(tx_out, local_owned);
+ self.actual_new_funding_output = Some(shared_output.clone());
+ Ok(shared_output)
+ }
+
fn is_serial_id_valid_for_counterparty(&self, serial_id: &SerialId) -> bool {
is_serial_id_valid_for_counterparty(self.holder_is_initiator, serial_id)
}
fn remote_inputs_value(&self) -> u64 {
- self.inputs
- .iter()
- .filter(|(serial_id, _)| self.is_serial_id_valid_for_counterparty(serial_id))
- .fold(0u64, |acc, (_, InteractiveTxInput { prev_output, .. })| {
- acc.saturating_add(prev_output.value.to_sat())
- })
+ self.inputs.iter().fold(0u64, |acc, (_, input)| acc.saturating_add(input.remote_value()))
}
fn remote_outputs_value(&self) -> u64 {
- self.outputs
- .iter()
- .filter(|(serial_id, _)| self.is_serial_id_valid_for_counterparty(serial_id))
- .fold(0u64, |acc, (_, InteractiveTxOutput { tx_out, .. })| {
- acc.saturating_add(tx_out.value.to_sat())
- })
+ self.outputs.iter().fold(0u64, |acc, (_, output)| acc.saturating_add(output.remote_value()))
}
fn remote_inputs_weight(&self) -> Weight {
self.inputs
.iter()
.filter(|(serial_id, _)| self.is_serial_id_valid_for_counterparty(serial_id))
- .fold(0u64, |weight, (_, InteractiveTxInput { prev_output, .. })| {
- weight.saturating_add(estimate_input_weight(prev_output).to_wu())
+ .fold(0u64, |weight, (_, input)| {
+ weight.saturating_add(estimate_input_weight(input.prev_output()).to_wu())
}),
)
}
self.outputs
.iter()
.filter(|(serial_id, _)| self.is_serial_id_valid_for_counterparty(serial_id))
- .fold(0u64, |weight, (_, InteractiveTxOutput { tx_out, .. })| {
- weight.saturating_add(get_output_weight(&tx_out.script_pubkey).to_wu())
+ .fold(0u64, |weight, (_, output)| {
+ weight.saturating_add(get_output_weight(&output.script_pubkey()).to_wu())
}),
)
}
},
hash_map::Entry::Vacant(entry) => {
let prev_outpoint = OutPoint { txid, vout: msg.prevtx_out };
- entry.insert(InteractiveTxInput {
+ entry.insert(InteractiveTxInput::Remote(LocalOrRemoteInput {
serial_id: msg.serial_id,
input: TxIn {
previous_output: prev_outpoint,
..Default::default()
},
prev_output: prev_out,
- });
+ }));
self.prevtx_outpoints.insert(prev_outpoint);
Ok(())
},
// bitcoin supply.
let mut outputs_value: u64 = 0;
for output in self.outputs.iter() {
- outputs_value = outputs_value.saturating_add(output.1.tx_out.value.to_sat());
+ outputs_value = outputs_value.saturating_add(output.1.value());
}
if outputs_value.saturating_add(msg.sats) > TOTAL_BITCOIN_SUPPLY_SATOSHIS {
// The receiving node:
return Err(AbortReason::InvalidOutputScript);
}
+ let txout = TxOut { value: Amount::from_sat(msg.sats), script_pubkey: msg.script.clone() };
+ let is_shared = msg.script == self.expected_shared_funding_output.0;
+ let output = if is_shared {
+ // this is a shared funding output
+ let shared_output = self.set_actual_new_funding_output(txout)?;
+ InteractiveTxOutput {
+ serial_id: msg.serial_id,
+ added_by: AddingRole::Remote,
+ output: OutputOwned::Shared(shared_output),
+ }
+ } else {
+ InteractiveTxOutput {
+ serial_id: msg.serial_id,
+ added_by: AddingRole::Remote,
+ output: OutputOwned::Single(txout),
+ }
+ };
match self.outputs.entry(msg.serial_id) {
hash_map::Entry::Occupied(_) => {
// The receiving node:
Err(AbortReason::DuplicateSerialId)
},
hash_map::Entry::Vacant(entry) => {
- entry.insert(InteractiveTxOutput {
- serial_id: msg.serial_id,
- tx_out: TxOut {
- value: Amount::from_sat(msg.sats),
- script_pubkey: msg.script.clone(),
- },
- });
+ entry.insert(output);
Ok(())
},
}
fn sent_tx_add_input(&mut self, msg: &msgs::TxAddInput) -> Result<(), AbortReason> {
let tx = msg.prevtx.as_transaction();
- let input = TxIn {
+ let txin = TxIn {
previous_output: OutPoint { txid: tx.txid(), vout: msg.prevtx_out },
sequence: Sequence(msg.sequence),
..Default::default()
};
- let prev_output =
- tx.output.get(msg.prevtx_out as usize).ok_or(AbortReason::PrevTxOutInvalid)?.clone();
- if !self.prevtx_outpoints.insert(input.previous_output) {
+ if !self.prevtx_outpoints.insert(txin.previous_output.clone()) {
// We have added an input that already exists
return Err(AbortReason::PrevTxOutInvalid);
}
- self.inputs.insert(
- msg.serial_id,
- InteractiveTxInput { serial_id: msg.serial_id, input, prev_output },
- );
+ let vout = txin.previous_output.vout as usize;
+ let prev_output = tx.output.get(vout).ok_or(AbortReason::PrevTxOutInvalid)?.clone();
+ let input = InteractiveTxInput::Local(LocalOrRemoteInput {
+ serial_id: msg.serial_id,
+ input: txin,
+ prev_output,
+ });
+ self.inputs.insert(msg.serial_id, input);
Ok(())
}
fn sent_tx_add_output(&mut self, msg: &msgs::TxAddOutput) -> Result<(), AbortReason> {
- self.outputs.insert(
- msg.serial_id,
+ let txout = TxOut { value: Amount::from_sat(msg.sats), script_pubkey: msg.script.clone() };
+ let is_shared = msg.script == self.expected_shared_funding_output.0;
+ let output = if is_shared {
+ // this is a shared funding output
+ let shared_output = self.set_actual_new_funding_output(txout)?;
InteractiveTxOutput {
serial_id: msg.serial_id,
- tx_out: TxOut {
- value: Amount::from_sat(msg.sats),
- script_pubkey: msg.script.clone(),
- },
- },
- );
+ added_by: AddingRole::Local,
+ output: OutputOwned::Shared(shared_output),
+ }
+ } else {
+ InteractiveTxOutput {
+ serial_id: msg.serial_id,
+ added_by: AddingRole::Local,
+ output: OutputOwned::Single(txout),
+ }
+ };
+ self.outputs.insert(msg.serial_id, output);
Ok(())
}
return Err(AbortReason::ExceededNumberOfInputsOrOutputs);
}
+ if self.actual_new_funding_output.is_none() {
+ return Err(AbortReason::MissingFundingOutput);
+ }
+
// - the peer's paid feerate does not meet or exceed the agreed feerate (based on the minimum fee).
self.check_counterparty_fees(remote_inputs_value.saturating_sub(remote_outputs_value))?;
}
impl StateMachine {
- fn new(feerate_sat_per_kw: u32, is_initiator: bool, tx_locktime: AbsoluteLockTime) -> Self {
- let context = NegotiationContext {
+ fn new(
+ feerate_sat_per_kw: u32, is_initiator: bool, tx_locktime: AbsoluteLockTime,
+ expected_shared_funding_output: (ScriptBuf, u64),
+ ) -> Self {
+ let context = NegotiationContext::new(
+ is_initiator,
+ expected_shared_funding_output,
tx_locktime,
- holder_is_initiator: is_initiator,
- received_tx_add_input_count: 0,
- received_tx_add_output_count: 0,
- inputs: new_hash_map(),
- prevtx_outpoints: new_hash_set(),
- outputs: new_hash_map(),
feerate_sat_per_kw,
- };
+ );
if is_initiator {
Self::ReceivedChangeMsg(ReceivedChangeMsg(context))
} else {
]);
}
+#[derive(Clone, Copy, Debug, Eq, PartialEq)]
+pub enum AddingRole {
+ Local,
+ Remote,
+}
+
+/// Represents an input -- local or remote (both have the same fields)
+#[derive(Clone, Debug, Eq, PartialEq)]
+pub struct LocalOrRemoteInput {
+ serial_id: SerialId,
+ input: TxIn,
+ prev_output: TxOut,
+}
+
+#[derive(Clone, Debug, Eq, PartialEq)]
+pub enum InteractiveTxInput {
+ Local(LocalOrRemoteInput),
+ Remote(LocalOrRemoteInput),
+ // TODO(splicing) SharedInput should be added
+}
+
+#[derive(Clone, Debug, Eq, PartialEq)]
+pub struct SharedOwnedOutput {
+ tx_out: TxOut,
+ local_owned: u64,
+}
+
+impl SharedOwnedOutput {
+ fn new(tx_out: TxOut, local_owned: u64) -> SharedOwnedOutput {
+ debug_assert!(
+ local_owned <= tx_out.value.to_sat(),
+ "SharedOwnedOutput: Inconsistent local_owned value {}, larger than output value {}",
+ local_owned,
+ tx_out.value
+ );
+ SharedOwnedOutput { tx_out, local_owned }
+ }
+
+ fn remote_owned(&self) -> u64 {
+ self.tx_out.value.to_sat().saturating_sub(self.local_owned)
+ }
+}
+
+/// Represents an output, with information about
+/// its control -- exclusive by the adder or shared --, and
+/// its ownership -- value fully owned by the adder or jointly
+#[derive(Clone, Debug, Eq, PartialEq)]
+pub enum OutputOwned {
+ /// Belongs to local node -- controlled exclusively and fully belonging to local node
+ Single(TxOut),
+ /// Output with shared control, but fully belonging to local node
+ SharedControlFullyOwned(TxOut),
+ /// Output with shared control and joint ownership
+ Shared(SharedOwnedOutput),
+}
+
+impl OutputOwned {
+ fn tx_out(&self) -> &TxOut {
+ match self {
+ OutputOwned::Single(tx_out) | OutputOwned::SharedControlFullyOwned(tx_out) => tx_out,
+ OutputOwned::Shared(output) => &output.tx_out,
+ }
+ }
+
+ fn value(&self) -> u64 {
+ self.tx_out().value.to_sat()
+ }
+
+ fn is_shared(&self) -> bool {
+ match self {
+ OutputOwned::Single(_) => false,
+ OutputOwned::SharedControlFullyOwned(_) => true,
+ OutputOwned::Shared(_) => true,
+ }
+ }
+
+ fn local_value(&self, local_role: AddingRole) -> u64 {
+ match self {
+ OutputOwned::Single(tx_out) | OutputOwned::SharedControlFullyOwned(tx_out) => {
+ match local_role {
+ AddingRole::Local => tx_out.value.to_sat(),
+ AddingRole::Remote => 0,
+ }
+ },
+ OutputOwned::Shared(output) => output.local_owned,
+ }
+ }
+
+ fn remote_value(&self, local_role: AddingRole) -> u64 {
+ match self {
+ OutputOwned::Single(tx_out) | OutputOwned::SharedControlFullyOwned(tx_out) => {
+ match local_role {
+ AddingRole::Local => 0,
+ AddingRole::Remote => tx_out.value.to_sat(),
+ }
+ },
+ OutputOwned::Shared(output) => output.remote_owned(),
+ }
+ }
+}
+
+#[derive(Clone, Debug, Eq, PartialEq)]
+pub struct InteractiveTxOutput {
+ serial_id: SerialId,
+ added_by: AddingRole,
+ output: OutputOwned,
+}
+
+impl InteractiveTxOutput {
+ fn tx_out(&self) -> &TxOut {
+ self.output.tx_out()
+ }
+
+ fn value(&self) -> u64 {
+ self.tx_out().value.to_sat()
+ }
+
+ fn local_value(&self) -> u64 {
+ self.output.local_value(self.added_by)
+ }
+
+ fn remote_value(&self) -> u64 {
+ self.output.remote_value(self.added_by)
+ }
+
+ fn script_pubkey(&self) -> &ScriptBuf {
+ &self.output.tx_out().script_pubkey
+ }
+}
+
+impl InteractiveTxInput {
+ pub fn serial_id(&self) -> SerialId {
+ match self {
+ InteractiveTxInput::Local(input) => input.serial_id,
+ InteractiveTxInput::Remote(input) => input.serial_id,
+ }
+ }
+
+ pub fn txin(&self) -> &TxIn {
+ match self {
+ InteractiveTxInput::Local(input) => &input.input,
+ InteractiveTxInput::Remote(input) => &input.input,
+ }
+ }
+
+ pub fn prev_output(&self) -> &TxOut {
+ match self {
+ InteractiveTxInput::Local(input) => &input.prev_output,
+ InteractiveTxInput::Remote(input) => &input.prev_output,
+ }
+ }
+
+ pub fn value(&self) -> u64 {
+ self.prev_output().value.to_sat()
+ }
+
+ pub fn local_value(&self) -> u64 {
+ match self {
+ InteractiveTxInput::Local(input) => input.prev_output.value.to_sat(),
+ InteractiveTxInput::Remote(_input) => 0,
+ }
+ }
+
+ pub fn remote_value(&self) -> u64 {
+ match self {
+ InteractiveTxInput::Local(_input) => 0,
+ InteractiveTxInput::Remote(input) => input.prev_output.value.to_sat(),
+ }
+ }
+}
+
pub(crate) struct InteractiveTxConstructor {
state_machine: StateMachine,
channel_id: ChannelId,
inputs_to_contribute: Vec<(SerialId, TxIn, TransactionU16LenLimited)>,
- outputs_to_contribute: Vec<(SerialId, TxOut)>,
+ outputs_to_contribute: Vec<(SerialId, OutputOwned)>,
}
pub(crate) enum InteractiveTxMessageSend {
impl InteractiveTxConstructor {
/// Instantiates a new `InteractiveTxConstructor`.
///
+ /// `expected_remote_shared_funding_output`: In the case when the local node doesn't
+ /// add a shared output, but it expects a shared output to be added by the remote node,
+ /// it has to specify the script pubkey, used to determine the shared output,
+ /// and its (local) contribution from the shared output:
+ /// 0 when the whole value belongs to the remote node, or
+ /// positive if owned also by local.
+ /// Note: The local value cannot be larger that the actual shared output.
+ ///
/// A tuple is returned containing the newly instantiate `InteractiveTxConstructor` and optionally
/// an initial wrapped `Tx_` message which the holder needs to send to the counterparty.
pub fn new<ES: Deref>(
entropy_source: &ES, channel_id: ChannelId, feerate_sat_per_kw: u32, is_initiator: bool,
funding_tx_locktime: AbsoluteLockTime,
inputs_to_contribute: Vec<(TxIn, TransactionU16LenLimited)>,
- outputs_to_contribute: Vec<TxOut>,
- ) -> (Self, Option<InteractiveTxMessageSend>)
+ outputs_to_contribute: Vec<OutputOwned>,
+ expected_remote_shared_funding_output: Option<(ScriptBuf, u64)>,
+ ) -> Result<(Self, Option<InteractiveTxMessageSend>), AbortReason>
where
ES::Target: EntropySource,
{
- let state_machine =
- StateMachine::new(feerate_sat_per_kw, is_initiator, funding_tx_locktime);
- let mut inputs_to_contribute: Vec<(SerialId, TxIn, TransactionU16LenLimited)> =
- inputs_to_contribute
+ // Sanity check: There can be at most one shared output, local-added or remote-added
+ let mut expected_shared_funding_output: Option<(ScriptBuf, u64)> = None;
+ for output in &outputs_to_contribute {
+ let new_output = match output {
+ OutputOwned::Single(_tx_out) => None,
+ OutputOwned::SharedControlFullyOwned(tx_out) => {
+ Some((tx_out.script_pubkey.clone(), tx_out.value.to_sat()))
+ },
+ OutputOwned::Shared(output) => {
+ // Sanity check
+ if output.local_owned > output.tx_out.value.to_sat() {
+ return Err(AbortReason::InvalidLowFundingOutputValue);
+ }
+ Some((output.tx_out.script_pubkey.clone(), output.local_owned))
+ },
+ };
+ if new_output.is_some() {
+ if expected_shared_funding_output.is_some()
+ || expected_remote_shared_funding_output.is_some()
+ {
+ // more than one local-added shared output or
+ // one local-added and one remote-expected shared output
+ return Err(AbortReason::DuplicateFundingOutput);
+ }
+ expected_shared_funding_output = new_output;
+ }
+ }
+ if let Some(expected_remote_shared_funding_output) = expected_remote_shared_funding_output {
+ expected_shared_funding_output = Some(expected_remote_shared_funding_output);
+ }
+ if let Some(expected_shared_funding_output) = expected_shared_funding_output {
+ let state_machine = StateMachine::new(
+ feerate_sat_per_kw,
+ is_initiator,
+ funding_tx_locktime,
+ expected_shared_funding_output,
+ );
+ let mut inputs_to_contribute: Vec<(SerialId, TxIn, TransactionU16LenLimited)> =
+ inputs_to_contribute
+ .into_iter()
+ .map(|(input, tx)| {
+ let serial_id = generate_holder_serial_id(entropy_source, is_initiator);
+ (serial_id, input, tx)
+ })
+ .collect();
+ // We'll sort by the randomly generated serial IDs, effectively shuffling the order of the inputs
+ // as the user passed them to us to avoid leaking any potential categorization of transactions
+ // before we pass any of the inputs to the counterparty.
+ inputs_to_contribute.sort_unstable_by_key(|(serial_id, _, _)| *serial_id);
+ let mut outputs_to_contribute: Vec<_> = outputs_to_contribute
.into_iter()
- .map(|(input, tx)| {
+ .map(|output| {
let serial_id = generate_holder_serial_id(entropy_source, is_initiator);
- (serial_id, input, tx)
+ (serial_id, output)
})
.collect();
- // We'll sort by the randomly generated serial IDs, effectively shuffling the order of the inputs
- // as the user passed them to us to avoid leaking any potential categorization of transactions
- // before we pass any of the inputs to the counterparty.
- inputs_to_contribute.sort_unstable_by_key(|(serial_id, _, _)| *serial_id);
- let mut outputs_to_contribute: Vec<(SerialId, TxOut)> = outputs_to_contribute
- .into_iter()
- .map(|output| {
- let serial_id = generate_holder_serial_id(entropy_source, is_initiator);
- (serial_id, output)
- })
- .collect();
- // In the same manner and for the same rationale as the inputs above, we'll shuffle the outputs.
- outputs_to_contribute.sort_unstable_by_key(|(serial_id, _)| *serial_id);
- let mut constructor =
- Self { state_machine, channel_id, inputs_to_contribute, outputs_to_contribute };
- let message_send = if is_initiator {
- match constructor.maybe_send_message() {
- Ok(msg_send) => Some(msg_send),
- Err(_) => {
- debug_assert!(
- false,
- "We should always be able to start our state machine successfully"
- );
- None
- },
- }
+ // In the same manner and for the same rationale as the inputs above, we'll shuffle the outputs.
+ outputs_to_contribute.sort_unstable_by_key(|(serial_id, _)| *serial_id);
+ let mut constructor =
+ Self { state_machine, channel_id, inputs_to_contribute, outputs_to_contribute };
+ let message_send = if is_initiator {
+ match constructor.maybe_send_message() {
+ Ok(msg_send) => Some(msg_send),
+ Err(_) => {
+ debug_assert!(
+ false,
+ "We should always be able to start our state machine successfully"
+ );
+ None
+ },
+ }
+ } else {
+ None
+ };
+ Ok((constructor, message_send))
} else {
- None
- };
- (constructor, message_send)
+ Err(AbortReason::MissingFundingOutput)
+ }
}
fn maybe_send_message(&mut self) -> Result<InteractiveTxMessageSend, AbortReason> {
prevtx,
prevtx_out: input.previous_output.vout,
sequence: input.sequence.to_consensus_u32(),
+ shared_input_txid: None,
};
do_state_transition!(self, sent_tx_add_input, &msg)?;
Ok(InteractiveTxMessageSend::TxAddInput(msg))
let msg = msgs::TxAddOutput {
channel_id: self.channel_id,
serial_id,
- sats: output.value.to_sat(),
- script: output.script_pubkey,
+ sats: output.tx_out().value.to_sat(),
+ script: output.tx_out().script_pubkey.clone(),
};
do_state_transition!(self, sent_tx_add_output, &msg)?;
Ok(InteractiveTxMessageSend::TxAddOutput(msg))
use crate::sign::EntropySource;
use crate::util::atomic_counter::AtomicCounter;
use crate::util::ser::TransactionU16LenLimited;
+ use bitcoin::absolute::LockTime as AbsoluteLockTime;
use bitcoin::amount::Amount;
use bitcoin::blockdata::opcodes;
use bitcoin::blockdata::script::Builder;
use bitcoin::secp256k1::{Keypair, Secp256k1};
use bitcoin::transaction::Version;
use bitcoin::{
- absolute::LockTime as AbsoluteLockTime, OutPoint, Sequence, Transaction, TxIn, TxOut,
+ OutPoint, PubkeyHash, ScriptBuf, Sequence, Transaction, TxIn, TxOut, WPubkeyHash,
};
- use bitcoin::{PubkeyHash, ScriptBuf, WPubkeyHash, WScriptHash};
use core::ops::Deref;
use super::{
- get_output_weight, P2TR_INPUT_WEIGHT_LOWER_BOUND, P2WPKH_INPUT_WEIGHT_LOWER_BOUND,
+ get_output_weight, AddingRole, OutputOwned, SharedOwnedOutput,
+ P2TR_INPUT_WEIGHT_LOWER_BOUND, P2WPKH_INPUT_WEIGHT_LOWER_BOUND,
P2WSH_INPUT_WEIGHT_LOWER_BOUND, TX_COMMON_FIELDS_WEIGHT,
};
struct TestSession {
description: &'static str,
inputs_a: Vec<(TxIn, TransactionU16LenLimited)>,
- outputs_a: Vec<TxOut>,
+ outputs_a: Vec<OutputOwned>,
inputs_b: Vec<(TxIn, TransactionU16LenLimited)>,
- outputs_b: Vec<TxOut>,
+ outputs_b: Vec<OutputOwned>,
expect_error: Option<(AbortReason, ErrorCulprit)>,
+ /// A node adds no shared output, but expects the peer to add one, with the specific script pubkey, and local contribution
+ a_expected_remote_shared_output: Option<(ScriptBuf, u64)>,
+ /// B node adds no shared output, but expects the peer to add one, with the specific script pubkey, and local contribution
+ b_expected_remote_shared_output: Option<(ScriptBuf, u64)>,
}
fn do_test_interactive_tx_constructor(session: TestSession) {
let channel_id = ChannelId(entropy_source.get_secure_random_bytes());
let tx_locktime = AbsoluteLockTime::from_height(1337).unwrap();
- let (mut constructor_a, first_message_a) = InteractiveTxConstructor::new(
+ // funding output sanity check
+ let shared_outputs_by_a: Vec<_> =
+ session.outputs_a.iter().filter(|o| o.is_shared()).collect();
+ if shared_outputs_by_a.len() > 1 {
+ println!("Test warning: Expected at most one shared output. NodeA");
+ }
+ let shared_output_by_a = if shared_outputs_by_a.len() >= 1 {
+ Some(shared_outputs_by_a[0].value())
+ } else {
+ None
+ };
+ let shared_outputs_by_b: Vec<_> =
+ session.outputs_b.iter().filter(|o| o.is_shared()).collect();
+ if shared_outputs_by_b.len() > 1 {
+ println!("Test warning: Expected at most one shared output. NodeB");
+ }
+ let shared_output_by_b = if shared_outputs_by_b.len() >= 1 {
+ Some(shared_outputs_by_b[0].value())
+ } else {
+ None
+ };
+ if session.a_expected_remote_shared_output.is_some()
+ || session.b_expected_remote_shared_output.is_some()
+ {
+ let expected_by_a = if let Some(a_expected_remote_shared_output) =
+ &session.a_expected_remote_shared_output
+ {
+ a_expected_remote_shared_output.1
+ } else {
+ if shared_outputs_by_a.len() >= 1 {
+ shared_outputs_by_a[0].local_value(AddingRole::Local)
+ } else {
+ 0
+ }
+ };
+ let expected_by_b = if let Some(b_expected_remote_shared_output) =
+ &session.b_expected_remote_shared_output
+ {
+ b_expected_remote_shared_output.1
+ } else {
+ if shared_outputs_by_b.len() >= 1 {
+ shared_outputs_by_b[0].local_value(AddingRole::Local)
+ } else {
+ 0
+ }
+ };
+
+ let expected_sum = expected_by_a + expected_by_b;
+ let actual_shared_output =
+ shared_output_by_a.unwrap_or(shared_output_by_b.unwrap_or(0));
+ if expected_sum != actual_shared_output {
+ println!("Test warning: Sum of expected shared output values does not match actual shared output value, {} {} {} {} {} {}", expected_sum, actual_shared_output, expected_by_a, expected_by_b, shared_output_by_a.unwrap_or(0), shared_output_by_b.unwrap_or(0));
+ }
+ }
+
+ let (mut constructor_a, first_message_a) = match InteractiveTxConstructor::new(
entropy_source,
channel_id,
TEST_FEERATE_SATS_PER_KW,
true,
tx_locktime,
session.inputs_a,
- session.outputs_a,
- );
- let (mut constructor_b, first_message_b) = InteractiveTxConstructor::new(
+ session.outputs_a.iter().map(|o| o.clone()).collect(),
+ session.a_expected_remote_shared_output,
+ ) {
+ Ok(r) => r,
+ Err(abort_reason) => {
+ assert_eq!(
+ Some((abort_reason, ErrorCulprit::NodeA)),
+ session.expect_error,
+ "Test: {}",
+ session.description
+ );
+ return;
+ },
+ };
+ let (mut constructor_b, first_message_b) = match InteractiveTxConstructor::new(
entropy_source,
channel_id,
TEST_FEERATE_SATS_PER_KW,
false,
tx_locktime,
session.inputs_b,
- session.outputs_b,
- );
+ session.outputs_b.iter().map(|o| o.clone()).collect(),
+ session.b_expected_remote_shared_output,
+ ) {
+ Ok(r) => r,
+ Err(abort_reason) => {
+ assert_eq!(
+ Some((abort_reason, ErrorCulprit::NodeB)),
+ session.expect_error,
+ "Test: {}",
+ session.description
+ );
+ return;
+ },
+ };
let handle_message_send =
|msg: InteractiveTxMessageSend, for_constructor: &mut InteractiveTxConstructor| {
"Test: {}",
session.description
);
- assert!(message_send_b.is_none());
+ assert!(message_send_b.is_none(), "Test: {}", session.description);
return;
},
}
"Test: {}",
session.description
);
- assert!(message_send_a.is_none());
+ assert!(message_send_a.is_none(), "Test: {}", session.description);
return;
},
}
assert!(message_send_a.is_none());
assert!(message_send_b.is_none());
assert_eq!(final_tx_a.unwrap().into_unsigned_tx(), final_tx_b.unwrap().into_unsigned_tx());
- assert!(session.expect_error.is_none(), "Test: {}", session.description);
+ assert!(
+ session.expect_error.is_none(),
+ "Missing expected error {:?}, Test: {}",
+ session.expect_error,
+ session.description,
+ );
}
#[derive(Debug, Clone, Copy)]
enum TestOutput {
P2WPKH(u64),
+ /// P2WSH, but with the specific script used for the funding output
P2WSH(u64),
P2TR(u64),
// Non-witness type to test rejection.
fn generate_txout(output: &TestOutput) -> TxOut {
let secp_ctx = Secp256k1::new();
let (value, script_pubkey) = match output {
- TestOutput::P2WPKH(value) => {
- (*value, ScriptBuf::new_p2wpkh(&WPubkeyHash::from_slice(&[1; 20]).unwrap()))
- },
- TestOutput::P2WSH(value) => {
- (*value, ScriptBuf::new_p2wsh(&WScriptHash::from_slice(&[2; 32]).unwrap()))
- },
+ TestOutput::P2WPKH(value) => (*value, generate_p2wpkh_script_pubkey()),
+ TestOutput::P2WSH(value) => (*value, generate_funding_script_pubkey()),
TestOutput::P2TR(value) => (
*value,
ScriptBuf::new_p2tr(
ScriptBuf::new_p2wpkh(&WPubkeyHash::from_slice(&[1; 20]).unwrap())
}
- fn generate_outputs(outputs: &[TestOutput]) -> Vec<TxOut> {
- outputs.iter().map(generate_txout).collect()
+ fn generate_funding_script_pubkey() -> ScriptBuf {
+ Builder::new().push_int(33).into_script().to_p2wsh()
+ }
+
+ fn generate_output_nonfunding_one(output: &TestOutput) -> OutputOwned {
+ OutputOwned::Single(generate_txout(output))
+ }
+
+ fn generate_outputs(outputs: &[TestOutput]) -> Vec<OutputOwned> {
+ outputs.iter().map(|o| generate_output_nonfunding_one(o)).collect()
+ }
+
+ /// Generate a single output that is the funding output
+ fn generate_output(output: &TestOutput) -> Vec<OutputOwned> {
+ vec![OutputOwned::SharedControlFullyOwned(generate_txout(output))]
+ }
+
+ /// Generate a single P2WSH output that is the funding output
+ fn generate_funding_output(value: u64) -> Vec<OutputOwned> {
+ generate_output(&TestOutput::P2WSH(value))
+ }
+
+ /// Generate a single P2WSH output with shared contribution that is the funding output
+ fn generate_shared_funding_output_one(value: u64, local_value: u64) -> OutputOwned {
+ OutputOwned::Shared(SharedOwnedOutput {
+ tx_out: generate_txout(&TestOutput::P2WSH(value)),
+ local_owned: local_value,
+ })
+ }
+
+ /// Generate a single P2WSH output with shared contribution that is the funding output
+ fn generate_shared_funding_output(value: u64, local_value: u64) -> Vec<OutputOwned> {
+ vec![generate_shared_funding_output_one(value, local_value)]
}
fn generate_fixed_number_of_inputs(count: u16) -> Vec<(TxIn, TransactionU16LenLimited)> {
inputs
}
- fn generate_fixed_number_of_outputs(count: u16) -> Vec<TxOut> {
+ fn generate_fixed_number_of_outputs(count: u16) -> Vec<OutputOwned> {
// Set a constant value for each TxOut
generate_outputs(&vec![TestOutput::P2WPKH(1_000_000); count as usize])
}
Builder::new().push_opcode(opcodes::OP_TRUE).into_script().to_p2sh()
}
- fn generate_non_witness_output(value: u64) -> TxOut {
- TxOut { value: Amount::from_sat(value), script_pubkey: generate_p2sh_script_pubkey() }
+ fn generate_non_witness_output(value: u64) -> OutputOwned {
+ OutputOwned::Single(TxOut {
+ value: Amount::from_sat(value),
+ script_pubkey: generate_p2sh_script_pubkey(),
+ })
}
#[test]
outputs_a: vec![],
inputs_b: vec![],
outputs_b: vec![],
- expect_error: Some((AbortReason::InsufficientFees, ErrorCulprit::NodeA)),
+ expect_error: Some((AbortReason::MissingFundingOutput, ErrorCulprit::NodeA)),
+ a_expected_remote_shared_output: None,
+ b_expected_remote_shared_output: None,
});
do_test_interactive_tx_constructor(TestSession {
description: "Single contribution, no initiator inputs",
inputs_a: vec![],
- outputs_a: generate_outputs(&[TestOutput::P2WPKH(1_000_000)]),
+ outputs_a: generate_output(&TestOutput::P2WPKH(1_000_000)),
inputs_b: vec![],
outputs_b: vec![],
expect_error: Some((AbortReason::OutputsValueExceedsInputsValue, ErrorCulprit::NodeA)),
+ a_expected_remote_shared_output: None,
+ b_expected_remote_shared_output: Some((generate_p2wpkh_script_pubkey(), 0)),
});
do_test_interactive_tx_constructor(TestSession {
description: "Single contribution, no initiator outputs",
outputs_a: vec![],
inputs_b: vec![],
outputs_b: vec![],
- expect_error: None,
+ expect_error: Some((AbortReason::MissingFundingOutput, ErrorCulprit::NodeA)),
+ a_expected_remote_shared_output: None,
+ b_expected_remote_shared_output: None,
});
do_test_interactive_tx_constructor(TestSession {
description: "Single contribution, no fees",
inputs_a: generate_inputs(&[TestOutput::P2WPKH(1_000_000)]),
- outputs_a: generate_outputs(&[TestOutput::P2WPKH(1_000_000)]),
+ outputs_a: generate_output(&TestOutput::P2WPKH(1_000_000)),
inputs_b: vec![],
outputs_b: vec![],
expect_error: Some((AbortReason::InsufficientFees, ErrorCulprit::NodeA)),
+ a_expected_remote_shared_output: None,
+ b_expected_remote_shared_output: Some((generate_p2wpkh_script_pubkey(), 0)),
});
let p2wpkh_fee = fee_for_weight(TEST_FEERATE_SATS_PER_KW, P2WPKH_INPUT_WEIGHT_LOWER_BOUND);
let outputs_fee = fee_for_weight(
);
let tx_common_fields_fee =
fee_for_weight(TEST_FEERATE_SATS_PER_KW, TX_COMMON_FIELDS_WEIGHT);
+
+ let amount_adjusted_with_p2wpkh_fee =
+ 1_000_000 - p2wpkh_fee - outputs_fee - tx_common_fields_fee;
do_test_interactive_tx_constructor(TestSession {
description: "Single contribution, with P2WPKH input, insufficient fees",
inputs_a: generate_inputs(&[TestOutput::P2WPKH(1_000_000)]),
- outputs_a: generate_outputs(&[TestOutput::P2WPKH(
- 1_000_000 - p2wpkh_fee - outputs_fee - tx_common_fields_fee + 1, /* makes fees insuffcient for initiator */
- )]),
+ outputs_a: generate_output(&TestOutput::P2WPKH(
+ amount_adjusted_with_p2wpkh_fee + 1, /* makes fees insuffcient for initiator */
+ )),
inputs_b: vec![],
outputs_b: vec![],
expect_error: Some((AbortReason::InsufficientFees, ErrorCulprit::NodeA)),
+ a_expected_remote_shared_output: None,
+ b_expected_remote_shared_output: Some((generate_p2wpkh_script_pubkey(), 0)),
});
do_test_interactive_tx_constructor(TestSession {
description: "Single contribution with P2WPKH input, sufficient fees",
inputs_a: generate_inputs(&[TestOutput::P2WPKH(1_000_000)]),
- outputs_a: generate_outputs(&[TestOutput::P2WPKH(
- 1_000_000 - p2wpkh_fee - outputs_fee - tx_common_fields_fee,
- )]),
+ outputs_a: generate_output(&TestOutput::P2WPKH(amount_adjusted_with_p2wpkh_fee)),
inputs_b: vec![],
outputs_b: vec![],
expect_error: None,
+ a_expected_remote_shared_output: None,
+ b_expected_remote_shared_output: Some((generate_p2wpkh_script_pubkey(), 0)),
});
let p2wsh_fee = fee_for_weight(TEST_FEERATE_SATS_PER_KW, P2WSH_INPUT_WEIGHT_LOWER_BOUND);
+ let amount_adjusted_with_p2wsh_fee =
+ 1_000_000 - p2wsh_fee - outputs_fee - tx_common_fields_fee;
do_test_interactive_tx_constructor(TestSession {
description: "Single contribution, with P2WSH input, insufficient fees",
inputs_a: generate_inputs(&[TestOutput::P2WSH(1_000_000)]),
- outputs_a: generate_outputs(&[TestOutput::P2WPKH(
- 1_000_000 - p2wsh_fee - outputs_fee - tx_common_fields_fee + 1, /* makes fees insuffcient for initiator */
- )]),
+ outputs_a: generate_output(&TestOutput::P2WPKH(
+ amount_adjusted_with_p2wsh_fee + 1, /* makes fees insuffcient for initiator */
+ )),
inputs_b: vec![],
outputs_b: vec![],
expect_error: Some((AbortReason::InsufficientFees, ErrorCulprit::NodeA)),
+ a_expected_remote_shared_output: None,
+ b_expected_remote_shared_output: Some((generate_p2wpkh_script_pubkey(), 0)),
});
do_test_interactive_tx_constructor(TestSession {
description: "Single contribution with P2WSH input, sufficient fees",
inputs_a: generate_inputs(&[TestOutput::P2WSH(1_000_000)]),
- outputs_a: generate_outputs(&[TestOutput::P2WPKH(
- 1_000_000 - p2wsh_fee - outputs_fee - tx_common_fields_fee,
- )]),
+ outputs_a: generate_output(&TestOutput::P2WPKH(amount_adjusted_with_p2wsh_fee)),
inputs_b: vec![],
outputs_b: vec![],
expect_error: None,
+ a_expected_remote_shared_output: None,
+ b_expected_remote_shared_output: Some((generate_p2wpkh_script_pubkey(), 0)),
});
let p2tr_fee = fee_for_weight(TEST_FEERATE_SATS_PER_KW, P2TR_INPUT_WEIGHT_LOWER_BOUND);
+ let amount_adjusted_with_p2tr_fee =
+ 1_000_000 - p2tr_fee - outputs_fee - tx_common_fields_fee;
do_test_interactive_tx_constructor(TestSession {
description: "Single contribution, with P2TR input, insufficient fees",
inputs_a: generate_inputs(&[TestOutput::P2TR(1_000_000)]),
- outputs_a: generate_outputs(&[TestOutput::P2WPKH(
- 1_000_000 - p2tr_fee - outputs_fee - tx_common_fields_fee + 1, /* makes fees insuffcient for initiator */
- )]),
+ outputs_a: generate_output(&TestOutput::P2WPKH(
+ amount_adjusted_with_p2tr_fee + 1, /* makes fees insuffcient for initiator */
+ )),
inputs_b: vec![],
outputs_b: vec![],
expect_error: Some((AbortReason::InsufficientFees, ErrorCulprit::NodeA)),
+ a_expected_remote_shared_output: None,
+ b_expected_remote_shared_output: Some((generate_p2wpkh_script_pubkey(), 0)),
});
do_test_interactive_tx_constructor(TestSession {
description: "Single contribution with P2TR input, sufficient fees",
inputs_a: generate_inputs(&[TestOutput::P2TR(1_000_000)]),
- outputs_a: generate_outputs(&[TestOutput::P2WPKH(
- 1_000_000 - p2tr_fee - outputs_fee - tx_common_fields_fee,
- )]),
+ outputs_a: generate_output(&TestOutput::P2WPKH(amount_adjusted_with_p2tr_fee)),
inputs_b: vec![],
outputs_b: vec![],
expect_error: None,
+ a_expected_remote_shared_output: None,
+ b_expected_remote_shared_output: Some((generate_p2wpkh_script_pubkey(), 0)),
});
do_test_interactive_tx_constructor(TestSession {
description: "Initiator contributes sufficient fees, but non-initiator does not",
inputs_a: generate_inputs(&[TestOutput::P2WPKH(1_000_000)]),
outputs_a: vec![],
inputs_b: generate_inputs(&[TestOutput::P2WPKH(100_000)]),
- outputs_b: generate_outputs(&[TestOutput::P2WPKH(100_000)]),
+ outputs_b: generate_output(&TestOutput::P2WPKH(100_000)),
expect_error: Some((AbortReason::InsufficientFees, ErrorCulprit::NodeB)),
+ a_expected_remote_shared_output: Some((generate_p2wpkh_script_pubkey(), 0)),
+ b_expected_remote_shared_output: None,
});
do_test_interactive_tx_constructor(TestSession {
description: "Multi-input-output contributions from both sides",
inputs_a: generate_inputs(&[TestOutput::P2WPKH(1_000_000); 2]),
- outputs_a: generate_outputs(&[
- TestOutput::P2WPKH(1_000_000),
- TestOutput::P2WPKH(200_000),
- ]),
+ outputs_a: vec![
+ generate_shared_funding_output_one(1_000_000, 200_000),
+ generate_output_nonfunding_one(&TestOutput::P2WPKH(200_000)),
+ ],
inputs_b: generate_inputs(&[
TestOutput::P2WPKH(1_000_000),
TestOutput::P2WPKH(500_000),
]),
- outputs_b: generate_outputs(&[
- TestOutput::P2WPKH(1_000_000),
- TestOutput::P2WPKH(400_000),
- ]),
+ outputs_b: vec![generate_output_nonfunding_one(&TestOutput::P2WPKH(400_000))],
expect_error: None,
+ a_expected_remote_shared_output: None,
+ b_expected_remote_shared_output: Some((generate_funding_script_pubkey(), 800_000)),
});
do_test_interactive_tx_constructor(TestSession {
inputs_b: vec![],
outputs_b: vec![],
expect_error: Some((AbortReason::PrevTxOutInvalid, ErrorCulprit::NodeA)),
+ a_expected_remote_shared_output: Some((generate_funding_script_pubkey(), 0)),
+ b_expected_remote_shared_output: Some((generate_funding_script_pubkey(), 0)),
});
let tx =
do_test_interactive_tx_constructor(TestSession {
description: "Invalid input sequence from initiator",
inputs_a: vec![(invalid_sequence_input, tx.clone())],
- outputs_a: generate_outputs(&[TestOutput::P2WPKH(1_000_000)]),
+ outputs_a: generate_output(&TestOutput::P2WPKH(1_000_000)),
inputs_b: vec![],
outputs_b: vec![],
expect_error: Some((AbortReason::IncorrectInputSequenceValue, ErrorCulprit::NodeA)),
+ a_expected_remote_shared_output: None,
+ b_expected_remote_shared_output: Some((generate_p2wpkh_script_pubkey(), 0)),
});
let duplicate_input = TxIn {
previous_output: OutPoint { txid: tx.as_transaction().txid(), vout: 0 },
do_test_interactive_tx_constructor(TestSession {
description: "Duplicate prevout from initiator",
inputs_a: vec![(duplicate_input.clone(), tx.clone()), (duplicate_input, tx.clone())],
- outputs_a: generate_outputs(&[TestOutput::P2WPKH(1_000_000)]),
+ outputs_a: generate_output(&TestOutput::P2WPKH(1_000_000)),
inputs_b: vec![],
outputs_b: vec![],
expect_error: Some((AbortReason::PrevTxOutInvalid, ErrorCulprit::NodeB)),
+ a_expected_remote_shared_output: None,
+ b_expected_remote_shared_output: Some((generate_p2wpkh_script_pubkey(), 0)),
});
+ // Non-initiator uses same prevout as initiator.
let duplicate_input = TxIn {
previous_output: OutPoint { txid: tx.as_transaction().txid(), vout: 0 },
sequence: Sequence::ENABLE_RBF_NO_LOCKTIME,
do_test_interactive_tx_constructor(TestSession {
description: "Non-initiator uses same prevout as initiator",
inputs_a: vec![(duplicate_input.clone(), tx.clone())],
- outputs_a: generate_outputs(&[TestOutput::P2WPKH(1_000_000)]),
+ outputs_a: generate_shared_funding_output(1_000_000, 905_000),
inputs_b: vec![(duplicate_input.clone(), tx.clone())],
outputs_b: vec![],
expect_error: Some((AbortReason::PrevTxOutInvalid, ErrorCulprit::NodeA)),
+ a_expected_remote_shared_output: None,
+ b_expected_remote_shared_output: Some((generate_funding_script_pubkey(), 95_000)),
+ });
+ let duplicate_input = TxIn {
+ previous_output: OutPoint { txid: tx.as_transaction().txid(), vout: 0 },
+ sequence: Sequence::ENABLE_RBF_NO_LOCKTIME,
+ ..Default::default()
+ };
+ do_test_interactive_tx_constructor(TestSession {
+ description: "Non-initiator uses same prevout as initiator",
+ inputs_a: vec![(duplicate_input.clone(), tx.clone())],
+ outputs_a: generate_output(&TestOutput::P2WPKH(1_000_000)),
+ inputs_b: vec![(duplicate_input.clone(), tx.clone())],
+ outputs_b: vec![],
+ expect_error: Some((AbortReason::PrevTxOutInvalid, ErrorCulprit::NodeA)),
+ a_expected_remote_shared_output: None,
+ b_expected_remote_shared_output: Some((generate_p2wpkh_script_pubkey(), 0)),
});
do_test_interactive_tx_constructor(TestSession {
description: "Initiator sends too many TxAddInputs",
inputs_b: vec![],
outputs_b: vec![],
expect_error: Some((AbortReason::ReceivedTooManyTxAddInputs, ErrorCulprit::NodeA)),
+ a_expected_remote_shared_output: Some((generate_funding_script_pubkey(), 0)),
+ b_expected_remote_shared_output: Some((generate_funding_script_pubkey(), 0)),
});
do_test_interactive_tx_constructor_with_entropy_source(
TestSession {
inputs_b: vec![],
outputs_b: vec![],
expect_error: Some((AbortReason::DuplicateSerialId, ErrorCulprit::NodeA)),
+ a_expected_remote_shared_output: Some((generate_funding_script_pubkey(), 0)),
+ b_expected_remote_shared_output: Some((generate_funding_script_pubkey(), 0)),
},
&DuplicateEntropySource,
);
inputs_b: vec![],
outputs_b: vec![],
expect_error: Some((AbortReason::ReceivedTooManyTxAddOutputs, ErrorCulprit::NodeA)),
+ a_expected_remote_shared_output: Some((generate_funding_script_pubkey(), 0)),
+ b_expected_remote_shared_output: Some((generate_funding_script_pubkey(), 0)),
});
do_test_interactive_tx_constructor(TestSession {
description: "Initiator sends an output below dust value",
inputs_a: vec![],
- outputs_a: generate_outputs(&[TestOutput::P2WSH(
+ outputs_a: generate_funding_output(
generate_p2wsh_script_pubkey().dust_value().to_sat() - 1,
- )]),
+ ),
inputs_b: vec![],
outputs_b: vec![],
expect_error: Some((AbortReason::BelowDustLimit, ErrorCulprit::NodeA)),
+ a_expected_remote_shared_output: None,
+ b_expected_remote_shared_output: Some((generate_funding_script_pubkey(), 0)),
});
do_test_interactive_tx_constructor(TestSession {
description: "Initiator sends an output above maximum sats allowed",
inputs_a: vec![],
- outputs_a: generate_outputs(&[TestOutput::P2WPKH(TOTAL_BITCOIN_SUPPLY_SATOSHIS + 1)]),
+ outputs_a: generate_output(&TestOutput::P2WPKH(TOTAL_BITCOIN_SUPPLY_SATOSHIS + 1)),
inputs_b: vec![],
outputs_b: vec![],
expect_error: Some((AbortReason::ExceededMaximumSatsAllowed, ErrorCulprit::NodeA)),
+ a_expected_remote_shared_output: None,
+ b_expected_remote_shared_output: Some((generate_funding_script_pubkey(), 0)),
});
do_test_interactive_tx_constructor(TestSession {
description: "Initiator sends an output without a witness program",
inputs_b: vec![],
outputs_b: vec![],
expect_error: Some((AbortReason::InvalidOutputScript, ErrorCulprit::NodeA)),
+ a_expected_remote_shared_output: Some((generate_funding_script_pubkey(), 0)),
+ b_expected_remote_shared_output: Some((generate_funding_script_pubkey(), 0)),
});
do_test_interactive_tx_constructor_with_entropy_source(
TestSession {
inputs_b: vec![],
outputs_b: vec![],
expect_error: Some((AbortReason::DuplicateSerialId, ErrorCulprit::NodeA)),
+ a_expected_remote_shared_output: Some((generate_funding_script_pubkey(), 0)),
+ b_expected_remote_shared_output: Some((generate_funding_script_pubkey(), 0)),
},
&DuplicateEntropySource,
);
do_test_interactive_tx_constructor(TestSession {
description: "Peer contributed more output value than inputs",
inputs_a: generate_inputs(&[TestOutput::P2WPKH(100_000)]),
- outputs_a: generate_outputs(&[TestOutput::P2WPKH(1_000_000)]),
+ outputs_a: generate_output(&TestOutput::P2WPKH(1_000_000)),
inputs_b: vec![],
outputs_b: vec![],
expect_error: Some((AbortReason::OutputsValueExceedsInputsValue, ErrorCulprit::NodeA)),
+ a_expected_remote_shared_output: None,
+ b_expected_remote_shared_output: Some((generate_funding_script_pubkey(), 0)),
});
do_test_interactive_tx_constructor(TestSession {
AbortReason::ExceededNumberOfInputsOrOutputs,
ErrorCulprit::Indeterminate,
)),
+ a_expected_remote_shared_output: Some((generate_funding_script_pubkey(), 0)),
+ b_expected_remote_shared_output: Some((generate_funding_script_pubkey(), 0)),
});
do_test_interactive_tx_constructor(TestSession {
description: "Peer contributed more than allowed number of outputs",
AbortReason::ExceededNumberOfInputsOrOutputs,
ErrorCulprit::Indeterminate,
)),
+ a_expected_remote_shared_output: Some((generate_funding_script_pubkey(), 0)),
+ b_expected_remote_shared_output: Some((generate_funding_script_pubkey(), 0)),
+ });
+
+ // Adding multiple outputs to the funding output pubkey is an error
+ do_test_interactive_tx_constructor(TestSession {
+ description: "Adding two outputs to the funding output pubkey",
+ inputs_a: generate_inputs(&[TestOutput::P2WPKH(1_000_000)]),
+ outputs_a: generate_funding_output(100_000),
+ inputs_b: generate_inputs(&[TestOutput::P2WPKH(1_001_000)]),
+ outputs_b: generate_funding_output(100_000),
+ expect_error: Some((AbortReason::DuplicateFundingOutput, ErrorCulprit::NodeA)),
+ a_expected_remote_shared_output: None,
+ b_expected_remote_shared_output: None,
+ });
+
+ // We add the funding output, but we contribute a little
+ do_test_interactive_tx_constructor(TestSession {
+ description: "Funding output by us, small contribution",
+ inputs_a: generate_inputs(&[TestOutput::P2WPKH(12_000)]),
+ outputs_a: generate_shared_funding_output(1_000_000, 10_000),
+ inputs_b: generate_inputs(&[TestOutput::P2WPKH(992_000)]),
+ outputs_b: vec![],
+ expect_error: None,
+ a_expected_remote_shared_output: None,
+ b_expected_remote_shared_output: Some((generate_funding_script_pubkey(), 990_000)),
+ });
+
+ // They add the funding output, and we contribute a little
+ do_test_interactive_tx_constructor(TestSession {
+ description: "Funding output by them, small contribution",
+ inputs_a: generate_inputs(&[TestOutput::P2WPKH(12_000)]),
+ outputs_a: vec![],
+ inputs_b: generate_inputs(&[TestOutput::P2WPKH(992_000)]),
+ outputs_b: generate_shared_funding_output(1_000_000, 990_000),
+ expect_error: None,
+ a_expected_remote_shared_output: Some((generate_funding_script_pubkey(), 10_000)),
+ b_expected_remote_shared_output: None,
+ });
+
+ // We add the funding output, and we contribute most
+ do_test_interactive_tx_constructor(TestSession {
+ description: "Funding output by us, large contribution",
+ inputs_a: generate_inputs(&[TestOutput::P2WPKH(992_000)]),
+ outputs_a: generate_shared_funding_output(1_000_000, 990_000),
+ inputs_b: generate_inputs(&[TestOutput::P2WPKH(12_000)]),
+ outputs_b: vec![],
+ expect_error: None,
+ a_expected_remote_shared_output: None,
+ b_expected_remote_shared_output: Some((generate_funding_script_pubkey(), 10_000)),
+ });
+
+ // They add the funding output, but we contribute most
+ do_test_interactive_tx_constructor(TestSession {
+ description: "Funding output by them, large contribution",
+ inputs_a: generate_inputs(&[TestOutput::P2WPKH(992_000)]),
+ outputs_a: vec![],
+ inputs_b: generate_inputs(&[TestOutput::P2WPKH(12_000)]),
+ outputs_b: generate_shared_funding_output(1_000_000, 10_000),
+ expect_error: None,
+ a_expected_remote_shared_output: Some((generate_funding_script_pubkey(), 990_000)),
+ b_expected_remote_shared_output: None,
+ });
+
+ // During a splice-out, with peer providing more output value than input value
+ // but still pays enough fees due to their to_remote_value_satoshis portion in
+ // the shared input.
+ do_test_interactive_tx_constructor(TestSession {
+ description: "Splice out with sufficient initiator balance",
+ inputs_a: generate_inputs(&[TestOutput::P2WPKH(100_000), TestOutput::P2WPKH(50_000)]),
+ outputs_a: generate_funding_output(120_000),
+ inputs_b: generate_inputs(&[TestOutput::P2WPKH(50_000)]),
+ outputs_b: vec![],
+ expect_error: None,
+ a_expected_remote_shared_output: None,
+ b_expected_remote_shared_output: Some((generate_funding_script_pubkey(), 0)),
+ });
+
+ // During a splice-out, with peer providing more output value than input value
+ // and the to_remote_value_satoshis portion in
+ // the shared input cannot cover fees
+ do_test_interactive_tx_constructor(TestSession {
+ description: "Splice out with insufficient initiator balance",
+ inputs_a: generate_inputs(&[TestOutput::P2WPKH(100_000), TestOutput::P2WPKH(15_000)]),
+ outputs_a: generate_funding_output(120_000),
+ inputs_b: generate_inputs(&[TestOutput::P2WPKH(85_000)]),
+ outputs_b: vec![],
+ expect_error: Some((AbortReason::OutputsValueExceedsInputsValue, ErrorCulprit::NodeA)),
+ a_expected_remote_shared_output: None,
+ b_expected_remote_shared_output: Some((generate_funding_script_pubkey(), 0)),
+ });
+
+ // The actual funding output value is lower than the intended local contribution by the same node
+ do_test_interactive_tx_constructor(TestSession {
+ description: "Splice in, invalid intended local contribution",
+ inputs_a: generate_inputs(&[TestOutput::P2WPKH(100_000), TestOutput::P2WPKH(15_000)]),
+ outputs_a: generate_shared_funding_output(100_000, 120_000), // local value is higher than the output value
+ inputs_b: generate_inputs(&[TestOutput::P2WPKH(85_000)]),
+ outputs_b: vec![],
+ expect_error: Some((AbortReason::InvalidLowFundingOutputValue, ErrorCulprit::NodeA)),
+ a_expected_remote_shared_output: None,
+ b_expected_remote_shared_output: Some((generate_funding_script_pubkey(), 20_000)),
+ });
+
+ // The actual funding output value is lower than the intended local contribution of the other node
+ do_test_interactive_tx_constructor(TestSession {
+ description: "Splice in, invalid intended local contribution",
+ inputs_a: generate_inputs(&[TestOutput::P2WPKH(100_000), TestOutput::P2WPKH(15_000)]),
+ outputs_a: vec![],
+ inputs_b: generate_inputs(&[TestOutput::P2WPKH(85_000)]),
+ outputs_b: generate_funding_output(100_000),
+ // The error is caused by NodeA, it occurs when nodeA prepares the message to be sent to NodeB, that's why here it shows up as NodeB
+ expect_error: Some((AbortReason::InvalidLowFundingOutputValue, ErrorCulprit::NodeB)),
+ a_expected_remote_shared_output: Some((generate_funding_script_pubkey(), 120_000)), // this is higher than the actual output value
+ b_expected_remote_shared_output: None,
});
}