Lean on the holding cell when batch-forwarding/failing HTLCs
[rust-lightning] / lightning / src / ln / channelmanager.rs
index f27ca53bcf07a3c42334ee0b8cec05e681cdfc34..fdd0bb202417ba5f62d1a339bb001633db9ade68 100644 (file)
@@ -287,6 +287,16 @@ pub(super) enum HTLCFailReason {
        }
 }
 
+impl HTLCFailReason {
+       pub(super) fn reason(failure_code: u16, data: Vec<u8>) -> Self {
+               Self::Reason { failure_code, data }
+       }
+
+       pub(super) fn from_failure_code(failure_code: u16) -> Self {
+               Self::Reason { failure_code, data: Vec::new() }
+       }
+}
+
 struct ReceiveError {
        err_code: u16,
        err_data: Vec<u8>,
@@ -1876,8 +1886,9 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                };
 
                for htlc_source in failed_htlcs.drain(..) {
+                       let reason = HTLCFailReason::from_failure_code(0x4000 | 8);
                        let receiver = HTLCDestination::NextHopChannel { node_id: Some(*counterparty_node_id), channel_id: *channel_id };
-                       self.fail_htlc_backwards_internal(htlc_source.0, &htlc_source.1, HTLCFailReason::Reason { failure_code: 0x4000 | 8, data: Vec::new() }, receiver);
+                       self.fail_htlc_backwards_internal(&htlc_source.0, &htlc_source.1, &reason, receiver);
                }
 
                let _ = handle_error!(self, result, *counterparty_node_id);
@@ -1934,8 +1945,9 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                log_debug!(self.logger, "Finishing force-closure of channel with {} HTLCs to fail", failed_htlcs.len());
                for htlc_source in failed_htlcs.drain(..) {
                        let (source, payment_hash, counterparty_node_id, channel_id) = htlc_source;
+                       let reason = HTLCFailReason::from_failure_code(0x4000 | 8);
                        let receiver = HTLCDestination::NextHopChannel { node_id: Some(counterparty_node_id), channel_id };
-                       self.fail_htlc_backwards_internal(source, &payment_hash, HTLCFailReason::Reason { failure_code: 0x4000 | 8, data: Vec::new() }, receiver);
+                       self.fail_htlc_backwards_internal(&source, &payment_hash, &reason, receiver);
                }
                if let Some((funding_txo, monitor_update)) = monitor_update_option {
                        // There isn't anything we can do if we get an update failure - we're already
@@ -2407,10 +2419,10 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                let session_priv = SecretKey::from_slice(&session_priv_bytes[..]).expect("RNG is busted");
 
                let onion_keys = onion_utils::construct_onion_keys(&self.secp_ctx, &path, &session_priv)
-                       .map_err(|_| APIError::RouteError{err: "Pubkey along hop was maliciously selected"})?;
+                       .map_err(|_| APIError::InvalidRoute{err: "Pubkey along hop was maliciously selected"})?;
                let (onion_payloads, htlc_msat, htlc_cltv) = onion_utils::build_onion_payloads(path, total_value, payment_secret, cur_height, keysend_preimage)?;
                if onion_utils::route_size_insane(&onion_payloads) {
-                       return Err(APIError::RouteError{err: "Route size too large considering onion data"});
+                       return Err(APIError::InvalidRoute{err: "Route size too large considering onion data"});
                }
                let onion_packet = onion_utils::construct_onion_packet(onion_payloads, onion_keys, prng_seed, payment_hash);
 
@@ -2427,7 +2439,7 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                        if let hash_map::Entry::Occupied(mut chan) = channel_state.by_id.entry(id) {
                                match {
                                        if chan.get().get_counterparty_node_id() != path.first().unwrap().pubkey {
-                                               return Err(APIError::RouteError{err: "Node ID mismatch on first hop!"});
+                                               return Err(APIError::InvalidRoute{err: "Node ID mismatch on first hop!"});
                                        }
                                        if !chan.get().is_live() {
                                                return Err(APIError::ChannelUnavailable{err: "Peer for first hop currently disconnected/pending monitor update!".to_owned()});
@@ -2502,7 +2514,7 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
        /// fields for more info.
        ///
        /// If a pending payment is currently in-flight with the same [`PaymentId`] provided, this
-       /// method will error with an [`APIError::RouteError`]. Note, however, that once a payment
+       /// method will error with an [`APIError::InvalidRoute`]. Note, however, that once a payment
        /// is no longer pending (either via [`ChannelManager::abandon_payment`], or handling of an
        /// [`Event::PaymentSent`]) LDK will not stop you from sending a second payment with the same
        /// [`PaymentId`].
@@ -2521,7 +2533,7 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
        /// PaymentSendFailure for more info.
        ///
        /// In general, a path may raise:
-       ///  * [`APIError::RouteError`] when an invalid route or forwarding parameter (cltv_delta, fee,
+       ///  * [`APIError::InvalidRoute`] when an invalid route or forwarding parameter (cltv_delta, fee,
        ///    node public key) is specified.
        ///  * [`APIError::ChannelUnavailable`] if the next-hop channel is not available for updates
        ///    (including due to previous monitor update failure or new permanent monitor update
@@ -2586,7 +2598,7 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
 
        fn send_payment_internal(&self, route: &Route, payment_hash: PaymentHash, payment_secret: &Option<PaymentSecret>, keysend_preimage: Option<PaymentPreimage>, payment_id: PaymentId, recv_value_msat: Option<u64>, onion_session_privs: Vec<[u8; 32]>) -> Result<(), PaymentSendFailure> {
                if route.paths.len() < 1 {
-                       return Err(PaymentSendFailure::ParameterError(APIError::RouteError{err: "There must be at least one path to send over"}));
+                       return Err(PaymentSendFailure::ParameterError(APIError::InvalidRoute{err: "There must be at least one path to send over"}));
                }
                if payment_secret.is_none() && route.paths.len() > 1 {
                        return Err(PaymentSendFailure::ParameterError(APIError::APIMisuseError{err: "Payment secret is required for multi-path payments".to_string()}));
@@ -2596,12 +2608,12 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                let mut path_errs = Vec::with_capacity(route.paths.len());
                'path_check: for path in route.paths.iter() {
                        if path.len() < 1 || path.len() > 20 {
-                               path_errs.push(Err(APIError::RouteError{err: "Path didn't go anywhere/had bogus size"}));
+                               path_errs.push(Err(APIError::InvalidRoute{err: "Path didn't go anywhere/had bogus size"}));
                                continue 'path_check;
                        }
                        for (idx, hop) in path.iter().enumerate() {
                                if idx != path.len() - 1 && hop.pubkey == our_node_id {
-                                       path_errs.push(Err(APIError::RouteError{err: "Path went through us but wasn't a simple rebalance loop to us"}));
+                                       path_errs.push(Err(APIError::InvalidRoute{err: "Path went through us but wasn't a simple rebalance loop to us"}));
                                        continue 'path_check;
                                }
                        }
@@ -3078,15 +3090,22 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                let _persistence_guard = PersistenceNotifierGuard::notify_on_drop(&self.total_consistency_lock, &self.persistence_notifier);
 
                let next_hop_scid = match self.channel_state.lock().unwrap().by_id.get(next_hop_channel_id) {
-                       Some(chan) => chan.get_short_channel_id().unwrap_or(chan.outbound_scid_alias()),
-                       None => return Err(APIError::APIMisuseError {
-                               err: format!("Channel with id {:?} not found", next_hop_channel_id)
+                       Some(chan) => {
+                               if !chan.is_usable() {
+                                       return Err(APIError::ChannelUnavailable {
+                                               err: format!("Channel with id {} not fully established", log_bytes!(*next_hop_channel_id))
+                                       })
+                               }
+                               chan.get_short_channel_id().unwrap_or(chan.outbound_scid_alias())
+                       },
+                       None => return Err(APIError::ChannelUnavailable {
+                               err: format!("Channel with id {} not found", log_bytes!(*next_hop_channel_id))
                        })
                };
 
                let payment = self.pending_intercepted_htlcs.lock().unwrap().remove(&intercept_id)
                        .ok_or_else(|| APIError::APIMisuseError {
-                               err: format!("Payment with intercept id {:?} not found", intercept_id.0)
+                               err: format!("Payment with intercept id {} not found", log_bytes!(intercept_id.0))
                        })?;
 
                let routing = match payment.forward_info.routing {
@@ -3121,7 +3140,7 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
 
                let payment = self.pending_intercepted_htlcs.lock().unwrap().remove(&intercept_id)
                        .ok_or_else(|| APIError::APIMisuseError {
-                               err: format!("Payment with InterceptId {:?} not found", intercept_id)
+                               err: format!("Payment with intercept id {} not found", log_bytes!(intercept_id.0))
                        })?;
 
                if let PendingHTLCRouting::Forward { short_channel_id, .. } = payment.forward_info.routing {
@@ -3133,9 +3152,9 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                                phantom_shared_secret: None,
                        });
 
-                       let failure_reason = HTLCFailReason::Reason { failure_code: 0x4000 | 10, data: Vec::new() };
+                       let failure_reason = HTLCFailReason::from_failure_code(0x4000 | 10);
                        let destination = HTLCDestination::UnknownNextHop { requested_forward_scid: short_channel_id };
-                       self.fail_htlc_backwards_internal(htlc_source, &payment.forward_info.payment_hash, failure_reason, destination);
+                       self.fail_htlc_backwards_internal(&htlc_source, &payment.forward_info.payment_hash, &failure_reason, destination);
                } else { unreachable!() } // Only `PendingHTLCRouting::Forward`s are intercepted
 
                Ok(())
@@ -3151,7 +3170,6 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                let mut new_events = Vec::new();
                let mut failed_forwards = Vec::new();
                let mut phantom_receives: Vec<(u64, OutPoint, u128, Vec<(PendingHTLCInfo, u64)>)> = Vec::new();
-               let mut handle_errors = Vec::new();
                {
                        let mut forward_htlcs = HashMap::new();
                        mem::swap(&mut forward_htlcs, &mut self.forward_htlcs.lock().unwrap());
@@ -3188,7 +3206,7 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                                                                                                };
 
                                                                                                failed_forwards.push((htlc_source, payment_hash,
-                                                                                                       HTLCFailReason::Reason { failure_code: $err_code, data: $err_data },
+                                                                                                       HTLCFailReason::reason($err_code, $err_data),
                                                                                                        reason
                                                                                                ));
                                                                                                continue;
@@ -3267,8 +3285,6 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                                                        continue;
                                                },
                                                hash_map::Entry::Occupied(mut chan) => {
-                                                       let mut add_htlc_msgs = Vec::new();
-                                                       let mut fail_htlc_msgs = Vec::new();
                                                        for forward_info in pending_forwards.drain(..) {
                                                                match forward_info {
                                                                        HTLCForwardInfo::AddHTLC(PendingAddHTLCInfo {
@@ -3287,34 +3303,21 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                                                                                        // Phantom payments are only PendingHTLCRouting::Receive.
                                                                                        phantom_shared_secret: None,
                                                                                });
-                                                                               match chan.get_mut().send_htlc(outgoing_amt_msat, payment_hash, outgoing_cltv_value, htlc_source.clone(), onion_packet, &self.logger) {
-                                                                                       Err(e) => {
-                                                                                               if let ChannelError::Ignore(msg) = e {
-                                                                                                       log_trace!(self.logger, "Failed to forward HTLC with payment_hash {}: {}", log_bytes!(payment_hash.0), msg);
-                                                                                               } else {
-                                                                                                       panic!("Stated return value requirements in send_htlc() were not met");
-                                                                                               }
-                                                                                               let (failure_code, data) = self.get_htlc_temp_fail_err_and_data(0x1000|7, short_chan_id, chan.get());
-                                                                                               failed_forwards.push((htlc_source, payment_hash,
-                                                                                                       HTLCFailReason::Reason { failure_code, data },
-                                                                                                       HTLCDestination::NextHopChannel { node_id: Some(chan.get().get_counterparty_node_id()), channel_id: forward_chan_id }
-                                                                                               ));
-                                                                                               continue;
-                                                                                       },
-                                                                                       Ok(update_add) => {
-                                                                                               match update_add {
-                                                                                                       Some(msg) => { add_htlc_msgs.push(msg); },
-                                                                                                       None => {
-                                                                                                               // Nothing to do here...we're waiting on a remote
-                                                                                                               // revoke_and_ack before we can add anymore HTLCs. The Channel
-                                                                                                               // will automatically handle building the update_add_htlc and
-                                                                                                               // commitment_signed messages when we can.
-                                                                                                               // TODO: Do some kind of timer to set the channel as !is_live()
-                                                                                                               // as we don't really want others relying on us relaying through
-                                                                                                               // this channel currently :/.
-                                                                                                       }
-                                                                                               }
+                                                                               if let Err(e) = chan.get_mut().queue_add_htlc(outgoing_amt_msat,
+                                                                                       payment_hash, outgoing_cltv_value, htlc_source.clone(),
+                                                                                       onion_packet, &self.logger)
+                                                                               {
+                                                                                       if let ChannelError::Ignore(msg) = e {
+                                                                                               log_trace!(self.logger, "Failed to forward HTLC with payment_hash {}: {}", log_bytes!(payment_hash.0), msg);
+                                                                                       } else {
+                                                                                               panic!("Stated return value requirements in send_htlc() were not met");
                                                                                        }
+                                                                                       let (failure_code, data) = self.get_htlc_temp_fail_err_and_data(0x1000|7, short_chan_id, chan.get());
+                                                                                       failed_forwards.push((htlc_source, payment_hash,
+                                                                                               HTLCFailReason::reason(failure_code, data),
+                                                                                               HTLCDestination::NextHopChannel { node_id: Some(chan.get().get_counterparty_node_id()), channel_id: forward_chan_id }
+                                                                                       ));
+                                                                                       continue;
                                                                                }
                                                                        },
                                                                        HTLCForwardInfo::AddHTLC { .. } => {
@@ -3322,77 +3325,22 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                                                                        },
                                                                        HTLCForwardInfo::FailHTLC { htlc_id, err_packet } => {
                                                                                log_trace!(self.logger, "Failing HTLC back to channel with short id {} (backward HTLC ID {}) after delay", short_chan_id, htlc_id);
-                                                                               match chan.get_mut().get_update_fail_htlc(htlc_id, err_packet, &self.logger) {
-                                                                                       Err(e) => {
-                                                                                               if let ChannelError::Ignore(msg) = e {
-                                                                                                       log_trace!(self.logger, "Failed to fail HTLC with ID {} backwards to short_id {}: {}", htlc_id, short_chan_id, msg);
-                                                                                               } else {
-                                                                                                       panic!("Stated return value requirements in get_update_fail_htlc() were not met");
-                                                                                               }
-                                                                                               // fail-backs are best-effort, we probably already have one
-                                                                                               // pending, and if not that's OK, if not, the channel is on
-                                                                                               // the chain and sending the HTLC-Timeout is their problem.
-                                                                                               continue;
-                                                                                       },
-                                                                                       Ok(Some(msg)) => { fail_htlc_msgs.push(msg); },
-                                                                                       Ok(None) => {
-                                                                                               // Nothing to do here...we're waiting on a remote
-                                                                                               // revoke_and_ack before we can update the commitment
-                                                                                               // transaction. The Channel will automatically handle
-                                                                                               // building the update_fail_htlc and commitment_signed
-                                                                                               // messages when we can.
-                                                                                               // We don't need any kind of timer here as they should fail
-                                                                                               // the channel onto the chain if they can't get our
-                                                                                               // update_fail_htlc in time, it's not our problem.
+                                                                               if let Err(e) = chan.get_mut().queue_fail_htlc(
+                                                                                       htlc_id, err_packet, &self.logger
+                                                                               ) {
+                                                                                       if let ChannelError::Ignore(msg) = e {
+                                                                                               log_trace!(self.logger, "Failed to fail HTLC with ID {} backwards to short_id {}: {}", htlc_id, short_chan_id, msg);
+                                                                                       } else {
+                                                                                               panic!("Stated return value requirements in queue_fail_htlc() were not met");
                                                                                        }
+                                                                                       // fail-backs are best-effort, we probably already have one
+                                                                                       // pending, and if not that's OK, if not, the channel is on
+                                                                                       // the chain and sending the HTLC-Timeout is their problem.
+                                                                                       continue;
                                                                                }
                                                                        },
                                                                }
                                                        }
-
-                                                       if !add_htlc_msgs.is_empty() || !fail_htlc_msgs.is_empty() {
-                                                               let (commitment_msg, monitor_update) = match chan.get_mut().send_commitment(&self.logger) {
-                                                                       Ok(res) => res,
-                                                                       Err(e) => {
-                                                                               // We surely failed send_commitment due to bad keys, in that case
-                                                                               // close channel and then send error message to peer.
-                                                                               let counterparty_node_id = chan.get().get_counterparty_node_id();
-                                                                               let err: Result<(), _>  = match e {
-                                                                                       ChannelError::Ignore(_) | ChannelError::Warn(_) => {
-                                                                                               panic!("Stated return value requirements in send_commitment() were not met");
-                                                                                       }
-                                                                                       ChannelError::Close(msg) => {
-                                                                                               log_trace!(self.logger, "Closing channel {} due to Close-required error: {}", log_bytes!(chan.key()[..]), msg);
-                                                                                               let mut channel = remove_channel!(self, chan);
-                                                                                               // ChannelClosed event is generated by handle_error for us.
-                                                                                               Err(MsgHandleErrInternal::from_finish_shutdown(msg, channel.channel_id(), channel.get_user_id(), channel.force_shutdown(true), self.get_channel_update_for_broadcast(&channel).ok()))
-                                                                                       },
-                                                                               };
-                                                                               handle_errors.push((counterparty_node_id, err));
-                                                                               continue;
-                                                                       }
-                                                               };
-                                                               match self.chain_monitor.update_channel(chan.get().get_funding_txo().unwrap(), monitor_update) {
-                                                                       ChannelMonitorUpdateStatus::Completed => {},
-                                                                       e => {
-                                                                               handle_errors.push((chan.get().get_counterparty_node_id(), handle_monitor_update_res!(self, e, chan, RAACommitmentOrder::CommitmentFirst, false, true)));
-                                                                               continue;
-                                                                       }
-                                                               }
-                                                               log_debug!(self.logger, "Forwarding HTLCs resulted in a commitment update with {} HTLCs added and {} HTLCs failed for channel {}",
-                                                                       add_htlc_msgs.len(), fail_htlc_msgs.len(), log_bytes!(chan.get().channel_id()));
-                                                               channel_state.pending_msg_events.push(events::MessageSendEvent::UpdateHTLCs {
-                                                                       node_id: chan.get().get_counterparty_node_id(),
-                                                                       updates: msgs::CommitmentUpdate {
-                                                                               update_add_htlcs: add_htlc_msgs,
-                                                                               update_fulfill_htlcs: Vec::new(),
-                                                                               update_fail_htlcs: fail_htlc_msgs,
-                                                                               update_fail_malformed_htlcs: Vec::new(),
-                                                                               update_fee: None,
-                                                                               commitment_signed: commitment_msg,
-                                                                       },
-                                                               });
-                                                       }
                                                }
                                        }
                                } else {
@@ -3443,7 +3391,7 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                                                                                                incoming_packet_shared_secret: $htlc.prev_hop.incoming_packet_shared_secret,
                                                                                                phantom_shared_secret,
                                                                                        }), payment_hash,
-                                                                                       HTLCFailReason::Reason { failure_code: 0x4000 | 15, data: htlc_msat_height_data },
+                                                                                       HTLCFailReason::reason(0x4000 | 15, htlc_msat_height_data),
                                                                                        HTLCDestination::FailedPayment { payment_hash: $payment_hash },
                                                                                ));
                                                                        }
@@ -3592,13 +3540,15 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                }
 
                for (htlc_source, payment_hash, failure_reason, destination) in failed_forwards.drain(..) {
-                       self.fail_htlc_backwards_internal(htlc_source, &payment_hash, failure_reason, destination);
+                       self.fail_htlc_backwards_internal(&htlc_source, &payment_hash, &failure_reason, destination);
                }
                self.forward_htlcs(&mut phantom_receives);
 
-               for (counterparty_node_id, err) in handle_errors.drain(..) {
-                       let _ = handle_error!(self, err, counterparty_node_id);
-               }
+               // Freeing the holding cell here is relatively redundant - in practice we'll do it when we
+               // next get a `get_and_clear_pending_msg_events` call, but some tests rely on it, and it's
+               // nice to do the work now if we can rather than while we're trying to get messages in the
+               // network stack.
+               self.check_free_holding_cells();
 
                if new_events.is_empty() { return }
                let mut events = self.pending_events.lock().unwrap();
@@ -3855,8 +3805,10 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                        });
 
                        for htlc_source in timed_out_mpp_htlcs.drain(..) {
+                               let source = HTLCSource::PreviousHopData(htlc_source.0.clone());
+                               let reason = HTLCFailReason::from_failure_code(23);
                                let receiver = HTLCDestination::FailedPayment { payment_hash: htlc_source.1 };
-                               self.fail_htlc_backwards_internal(HTLCSource::PreviousHopData(htlc_source.0.clone()), &htlc_source.1, HTLCFailReason::Reason { failure_code: 23, data: Vec::new() }, receiver );
+                               self.fail_htlc_backwards_internal(&source, &htlc_source.1, &reason, receiver);
                        }
 
                        for (err, counterparty_node_id) in handle_errors.drain(..) {
@@ -3891,10 +3843,10 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                                let mut htlc_msat_height_data = byte_utils::be64_to_array(htlc.value).to_vec();
                                htlc_msat_height_data.extend_from_slice(&byte_utils::be32_to_array(
                                                self.best_block.read().unwrap().height()));
-                               self.fail_htlc_backwards_internal(
-                                               HTLCSource::PreviousHopData(htlc.prev_hop), payment_hash,
-                                               HTLCFailReason::Reason { failure_code: 0x4000 | 15, data: htlc_msat_height_data },
-                                               HTLCDestination::FailedPayment { payment_hash: *payment_hash });
+                               let source = HTLCSource::PreviousHopData(htlc.prev_hop);
+                               let reason = HTLCFailReason::reason(0x4000 | 15, htlc_msat_height_data);
+                               let receiver = HTLCDestination::FailedPayment { payment_hash: *payment_hash };
+                               self.fail_htlc_backwards_internal(&source, &payment_hash, &reason, receiver);
                        }
                }
        }
@@ -3953,23 +3905,24 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                &self, mut htlcs_to_fail: Vec<(HTLCSource, PaymentHash)>, channel_id: [u8; 32],
                counterparty_node_id: &PublicKey
        ) {
-               for (htlc_src, payment_hash) in htlcs_to_fail.drain(..) {
-                       let (failure_code, onion_failure_data) =
-                               match self.channel_state.lock().unwrap().by_id.entry(channel_id) {
-                                       hash_map::Entry::Occupied(chan_entry) => {
-                                               self.get_htlc_inbound_temp_fail_err_and_data(0x1000|7, &chan_entry.get())
-                                       },
-                                       hash_map::Entry::Vacant(_) => (0x4000|10, Vec::new())
-                               };
+               let (failure_code, onion_failure_data) =
+                       match self.channel_state.lock().unwrap().by_id.entry(channel_id) {
+                               hash_map::Entry::Occupied(chan_entry) => {
+                                       self.get_htlc_inbound_temp_fail_err_and_data(0x1000|7, &chan_entry.get())
+                               },
+                               hash_map::Entry::Vacant(_) => (0x4000|10, Vec::new())
+                       };
 
+               for (htlc_src, payment_hash) in htlcs_to_fail.drain(..) {
+                       let reason = HTLCFailReason::reason(failure_code, onion_failure_data.clone());
                        let receiver = HTLCDestination::NextHopChannel { node_id: Some(counterparty_node_id.clone()), channel_id };
-                       self.fail_htlc_backwards_internal(htlc_src, &payment_hash, HTLCFailReason::Reason { failure_code, data: onion_failure_data }, receiver);
+                       self.fail_htlc_backwards_internal(&htlc_src, &payment_hash, &reason, receiver);
                }
        }
 
        /// Fails an HTLC backwards to the sender of it to us.
        /// Note that we do not assume that channels corresponding to failed HTLCs are still available.
-       fn fail_htlc_backwards_internal(&self, source: HTLCSource, payment_hash: &PaymentHash, onion_error: HTLCFailReason,destination: HTLCDestination) {
+       fn fail_htlc_backwards_internal(&self, source: &HTLCSource, payment_hash: &PaymentHash, onion_error: &HTLCFailReason, destination: HTLCDestination) {
                #[cfg(debug_assertions)]
                {
                        // Ensure that the `channel_state` lock is not held when calling this function.
@@ -3988,13 +3941,13 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                // from block_connected which may run during initialization prior to the chain_monitor
                // being fully configured. See the docs for `ChannelManagerReadArgs` for more.
                match source {
-                       HTLCSource::OutboundRoute { ref path, session_priv, payment_id, ref payment_params, .. } => {
+                       HTLCSource::OutboundRoute { ref path, ref session_priv, ref payment_id, ref payment_params, .. } => {
                                let mut session_priv_bytes = [0; 32];
                                session_priv_bytes.copy_from_slice(&session_priv[..]);
                                let mut outbounds = self.pending_outbound_payments.lock().unwrap();
                                let mut all_paths_failed = false;
                                let mut full_failure_ev = None;
-                               if let hash_map::Entry::Occupied(mut payment) = outbounds.entry(payment_id) {
+                               if let hash_map::Entry::Occupied(mut payment) = outbounds.entry(*payment_id) {
                                        if !payment.get_mut().remove(&session_priv_bytes, Some(&path)) {
                                                log_trace!(self.logger, "Received duplicative fail for HTLC with payment_hash {}", log_bytes!(payment_hash.0));
                                                return;
@@ -4007,7 +3960,7 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                                                all_paths_failed = true;
                                                if payment.get().abandoned() {
                                                        full_failure_ev = Some(events::Event::PaymentFailed {
-                                                               payment_id,
+                                                               payment_id: *payment_id,
                                                                payment_hash: payment.get().payment_hash().expect("PendingOutboundPayments::RetriesExceeded always has a payment hash set"),
                                                        });
                                                        payment.remove();
@@ -4037,13 +3990,13 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                                                if self.payment_is_probe(payment_hash, &payment_id) {
                                                        if !payment_retryable {
                                                                events::Event::ProbeSuccessful {
-                                                                       payment_id,
+                                                                       payment_id: *payment_id,
                                                                        payment_hash: payment_hash.clone(),
                                                                        path: path.clone(),
                                                                }
                                                        } else {
                                                                events::Event::ProbeFailed {
-                                                                       payment_id,
+                                                                       payment_id: *payment_id,
                                                                        payment_hash: payment_hash.clone(),
                                                                        path: path.clone(),
                                                                        short_channel_id,
@@ -4057,7 +4010,7 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                                                                retry.as_mut().map(|r| r.payment_params.previously_failed_channels.push(scid));
                                                        }
                                                        events::Event::PaymentPathFailed {
-                                                               payment_id: Some(payment_id),
+                                                               payment_id: Some(*payment_id),
                                                                payment_hash: payment_hash.clone(),
                                                                payment_failed_permanently: !payment_retryable,
                                                                network_update,
@@ -4090,14 +4043,14 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
 
                                                if self.payment_is_probe(payment_hash, &payment_id) {
                                                        events::Event::ProbeFailed {
-                                                               payment_id,
+                                                               payment_id: *payment_id,
                                                                payment_hash: payment_hash.clone(),
                                                                path: path.clone(),
                                                                short_channel_id: Some(scid),
                                                        }
                                                } else {
                                                        events::Event::PaymentPathFailed {
-                                                               payment_id: Some(payment_id),
+                                                               payment_id: Some(*payment_id),
                                                                payment_hash: payment_hash.clone(),
                                                                payment_failed_permanently: false,
                                                                network_update: None,
@@ -4117,22 +4070,22 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                                pending_events.push(path_failure);
                                if let Some(ev) = full_failure_ev { pending_events.push(ev); }
                        },
-                       HTLCSource::PreviousHopData(HTLCPreviousHopData { short_channel_id, htlc_id, incoming_packet_shared_secret, phantom_shared_secret, outpoint }) => {
+                       HTLCSource::PreviousHopData(HTLCPreviousHopData { ref short_channel_id, ref htlc_id, ref incoming_packet_shared_secret, ref phantom_shared_secret, ref outpoint }) => {
                                let err_packet = match onion_error {
-                                       HTLCFailReason::Reason { failure_code, data } => {
+                                       HTLCFailReason::Reason { ref failure_code, ref data } => {
                                                log_trace!(self.logger, "Failing HTLC with payment_hash {} backwards from us with code {}", log_bytes!(payment_hash.0), failure_code);
                                                if let Some(phantom_ss) = phantom_shared_secret {
-                                                       let phantom_packet = onion_utils::build_failure_packet(&phantom_ss, failure_code, &data[..]).encode();
-                                                       let encrypted_phantom_packet = onion_utils::encrypt_failure_packet(&phantom_ss, &phantom_packet);
-                                                       onion_utils::encrypt_failure_packet(&incoming_packet_shared_secret, &encrypted_phantom_packet.data[..])
+                                                       let phantom_packet = onion_utils::build_failure_packet(phantom_ss, *failure_code, &data[..]).encode();
+                                                       let encrypted_phantom_packet = onion_utils::encrypt_failure_packet(phantom_ss, &phantom_packet);
+                                                       onion_utils::encrypt_failure_packet(incoming_packet_shared_secret, &encrypted_phantom_packet.data[..])
                                                } else {
-                                                       let packet = onion_utils::build_failure_packet(&incoming_packet_shared_secret, failure_code, &data[..]).encode();
-                                                       onion_utils::encrypt_failure_packet(&incoming_packet_shared_secret, &packet)
+                                                       let packet = onion_utils::build_failure_packet(incoming_packet_shared_secret, *failure_code, &data[..]).encode();
+                                                       onion_utils::encrypt_failure_packet(incoming_packet_shared_secret, &packet)
                                                }
                                        },
                                        HTLCFailReason::LightningError { err } => {
                                                log_trace!(self.logger, "Failing HTLC with payment_hash {} backwards with pre-built LightningError", log_bytes!(payment_hash.0));
-                                               onion_utils::encrypt_failure_packet(&incoming_packet_shared_secret, &err.data)
+                                               onion_utils::encrypt_failure_packet(incoming_packet_shared_secret, &err.data)
                                        }
                                };
 
@@ -4141,12 +4094,12 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                                if forward_htlcs.is_empty() {
                                        forward_event = Some(Duration::from_millis(MIN_HTLC_RELAY_HOLDING_CELL_MILLIS));
                                }
-                               match forward_htlcs.entry(short_channel_id) {
+                               match forward_htlcs.entry(*short_channel_id) {
                                        hash_map::Entry::Occupied(mut entry) => {
-                                               entry.get_mut().push(HTLCForwardInfo::FailHTLC { htlc_id, err_packet });
+                                               entry.get_mut().push(HTLCForwardInfo::FailHTLC { htlc_id: *htlc_id, err_packet });
                                        },
                                        hash_map::Entry::Vacant(entry) => {
-                                               entry.insert(vec!(HTLCForwardInfo::FailHTLC { htlc_id, err_packet }));
+                                               entry.insert(vec!(HTLCForwardInfo::FailHTLC { htlc_id: *htlc_id, err_packet }));
                                        }
                                }
                                mem::drop(forward_htlcs);
@@ -4158,7 +4111,7 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                                }
                                pending_events.push(events::Event::HTLCHandlingFailed {
                                        prev_channel_id: outpoint.to_channel_id(),
-                                       failed_next_destination: destination
+                                       failed_next_destination: destination,
                                });
                        },
                }
@@ -4287,10 +4240,10 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                                        let mut htlc_msat_height_data = byte_utils::be64_to_array(htlc.value).to_vec();
                                        htlc_msat_height_data.extend_from_slice(&byte_utils::be32_to_array(
                                                self.best_block.read().unwrap().height()));
-                                       self.fail_htlc_backwards_internal(
-                                               HTLCSource::PreviousHopData(htlc.prev_hop), &payment_hash,
-                                               HTLCFailReason::Reason { failure_code: 0x4000|15, data: htlc_msat_height_data },
-                                               HTLCDestination::FailedPayment { payment_hash } );
+                                       let source = HTLCSource::PreviousHopData(htlc.prev_hop);
+                                       let reason = HTLCFailReason::reason(0x4000 | 15, htlc_msat_height_data);
+                                       let receiver = HTLCDestination::FailedPayment { payment_hash };
+                                       self.fail_htlc_backwards_internal(&source, &payment_hash, &reason, receiver);
                                }
                        }
 
@@ -4614,7 +4567,7 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                self.finalize_claims(finalized_claims);
                for failure in pending_failures.drain(..) {
                        let receiver = HTLCDestination::NextHopChannel { node_id: Some(counterparty_node_id), channel_id: funding_txo.to_channel_id() };
-                       self.fail_htlc_backwards_internal(failure.0, &failure.1, failure.2, receiver);
+                       self.fail_htlc_backwards_internal(&failure.0, &failure.1, &failure.2, receiver);
                }
        }
 
@@ -4982,7 +4935,8 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                };
                for htlc_source in dropped_htlcs.drain(..) {
                        let receiver = HTLCDestination::NextHopChannel { node_id: Some(counterparty_node_id.clone()), channel_id: msg.channel_id };
-                       self.fail_htlc_backwards_internal(htlc_source.0, &htlc_source.1, HTLCFailReason::Reason { failure_code: 0x4000 | 8, data: Vec::new() }, receiver);
+                       let reason = HTLCFailReason::from_failure_code(0x4000 | 8);
+                       self.fail_htlc_backwards_internal(&htlc_source.0, &htlc_source.1, &reason, receiver);
                }
 
                let _ = handle_error!(self, result, *counterparty_node_id);
@@ -5127,7 +5081,7 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                                        let chan_err: ChannelError = ChannelError::Close("Got update_fail_malformed_htlc with BADONION not set".to_owned());
                                        try_chan_entry!(self, Err(chan_err), chan);
                                }
-                               try_chan_entry!(self, chan.get_mut().update_fail_malformed_htlc(&msg, HTLCFailReason::Reason { failure_code: msg.failure_code, data: Vec::new() }), chan);
+                               try_chan_entry!(self, chan.get_mut().update_fail_malformed_htlc(&msg, HTLCFailReason::from_failure_code(msg.failure_code)), chan);
                                Ok(())
                        },
                        hash_map::Entry::Vacant(_) => return Err(MsgHandleErrInternal::send_err_msg_no_close("Failed to find corresponding channel".to_owned(), msg.channel_id))
@@ -5233,7 +5187,7 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                                                                                });
 
                                                                                failed_intercept_forwards.push((htlc_source, forward_info.payment_hash,
-                                                                                               HTLCFailReason::Reason { failure_code: 0x4000 | 10, data: Vec::new() },
+                                                                                               HTLCFailReason::from_failure_code(0x4000 | 10),
                                                                                                HTLCDestination::InvalidForward { requested_forward_scid: scid },
                                                                                ));
                                                                        }
@@ -5253,7 +5207,7 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                        }
 
                        for (htlc_source, payment_hash, failure_reason, destination) in failed_intercept_forwards.drain(..) {
-                               self.fail_htlc_backwards_internal(htlc_source, &payment_hash, failure_reason, destination);
+                               self.fail_htlc_backwards_internal(&htlc_source, &payment_hash, &failure_reason, destination);
                        }
 
                        if !new_intercept_events.is_empty() {
@@ -5328,7 +5282,7 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                        {
                                for failure in pending_failures.drain(..) {
                                        let receiver = HTLCDestination::NextHopChannel { node_id: Some(*counterparty_node_id), channel_id: channel_outpoint.to_channel_id() };
-                                       self.fail_htlc_backwards_internal(failure.0, &failure.1, failure.2, receiver);
+                                       self.fail_htlc_backwards_internal(&failure.0, &failure.1, &failure.2, receiver);
                                }
                                self.forward_htlcs(&mut [(short_channel_id, channel_outpoint, user_channel_id, pending_forwards)]);
                                self.finalize_claims(finalized_claim_htlcs);
@@ -5488,7 +5442,8 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                                                } else {
                                                        log_trace!(self.logger, "Failing HTLC with hash {} from our monitor", log_bytes!(htlc_update.payment_hash.0));
                                                        let receiver = HTLCDestination::NextHopChannel { node_id: counterparty_node_id, channel_id: funding_outpoint.to_channel_id() };
-                                                       self.fail_htlc_backwards_internal(htlc_update.source, &htlc_update.payment_hash, HTLCFailReason::Reason { failure_code: 0x4000 | 8, data: Vec::new() }, receiver);
+                                                       let reason = HTLCFailReason::from_failure_code(0x4000 | 8);
+                                                       self.fail_htlc_backwards_internal(&htlc_update.source, &htlc_update.payment_hash, &reason, receiver);
                                                }
                                        },
                                        MonitorEvent::CommitmentTxConfirmed(funding_outpoint) |
@@ -5544,11 +5499,6 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
        /// Check the holding cell in each channel and free any pending HTLCs in them if possible.
        /// Returns whether there were any updates such as if pending HTLCs were freed or a monitor
        /// update was applied.
-       ///
-       /// This should only apply to HTLCs which were added to the holding cell because we were
-       /// waiting on a monitor update to finish. In that case, we don't want to free the holding cell
-       /// directly in `channel_monitor_updated` as it may introduce deadlocks calling back into user
-       /// code to inform them of a channel monitor update.
        fn check_free_holding_cells(&self) -> bool {
                let mut has_monitor_update = false;
                let mut failed_htlcs = Vec::new();
@@ -6174,9 +6124,8 @@ where
                                if let Ok((channel_ready_opt, mut timed_out_pending_htlcs, announcement_sigs)) = res {
                                        for (source, payment_hash) in timed_out_pending_htlcs.drain(..) {
                                                let (failure_code, data) = self.get_htlc_inbound_temp_fail_err_and_data(0x1000|14 /* expiry_too_soon */, &channel);
-                                               timed_out_htlcs.push((source, payment_hash, HTLCFailReason::Reason {
-                                                       failure_code, data,
-                                               }, HTLCDestination::NextHopChannel { node_id: Some(channel.get_counterparty_node_id()), channel_id: channel.channel_id() }));
+                                               timed_out_htlcs.push((source, payment_hash, HTLCFailReason::reason(failure_code, data),
+                                                       HTLCDestination::NextHopChannel { node_id: Some(channel.get_counterparty_node_id()), channel_id: channel.channel_id() }));
                                        }
                                        if let Some(channel_ready) = channel_ready_opt {
                                                send_channel_ready!(self, pending_msg_events, channel, channel_ready);
@@ -6262,10 +6211,10 @@ where
                                        if height >= htlc.cltv_expiry - HTLC_FAIL_BACK_BUFFER {
                                                let mut htlc_msat_height_data = byte_utils::be64_to_array(htlc.value).to_vec();
                                                htlc_msat_height_data.extend_from_slice(&byte_utils::be32_to_array(height));
-                                               timed_out_htlcs.push((HTLCSource::PreviousHopData(htlc.prev_hop.clone()), payment_hash.clone(), HTLCFailReason::Reason {
-                                                       failure_code: 0x4000 | 15,
-                                                       data: htlc_msat_height_data
-                                               }, HTLCDestination::FailedPayment { payment_hash: payment_hash.clone() }));
+
+                                               timed_out_htlcs.push((HTLCSource::PreviousHopData(htlc.prev_hop.clone()), payment_hash.clone(),
+                                                       HTLCFailReason::reason(0x4000 | 15, htlc_msat_height_data),
+                                                       HTLCDestination::FailedPayment { payment_hash: payment_hash.clone() }));
                                                false
                                        } else { true }
                                });
@@ -6288,7 +6237,7 @@ where
                                                _ => unreachable!(),
                                        };
                                        timed_out_htlcs.push((prev_hop_data, htlc.forward_info.payment_hash,
-                                                       HTLCFailReason::Reason { failure_code: 0x2000 | 2, data: Vec::new() },
+                                                       HTLCFailReason::from_failure_code(0x2000 | 2),
                                                        HTLCDestination::InvalidForward { requested_forward_scid }));
                                        log_trace!(self.logger, "Timing out intercepted HTLC with requested forward scid {}", requested_forward_scid);
                                        false
@@ -6299,7 +6248,7 @@ where
                self.handle_init_event_channel_failures(failed_channels);
 
                for (source, payment_hash, reason, destination) in timed_out_htlcs.drain(..) {
-                       self.fail_htlc_backwards_internal(source, &payment_hash, reason, destination);
+                       self.fail_htlc_backwards_internal(&source, &payment_hash, &reason, destination);
                }
        }
 
@@ -7786,7 +7735,8 @@ impl<'a, M: Deref, T: Deref, K: Deref, F: Deref, L: Deref>
                for htlc_source in failed_htlcs.drain(..) {
                        let (source, payment_hash, counterparty_node_id, channel_id) = htlc_source;
                        let receiver = HTLCDestination::NextHopChannel { node_id: Some(counterparty_node_id), channel_id };
-                       channel_manager.fail_htlc_backwards_internal(source, &payment_hash, HTLCFailReason::Reason { failure_code: 0x4000 | 8, data: Vec::new() }, receiver);
+                       let reason = HTLCFailReason::from_failure_code(0x4000 | 8);
+                       channel_manager.fail_htlc_backwards_internal(&source, &payment_hash, &reason, receiver);
                }
 
                //TODO: Broadcast channel update for closed channels, but only after we've made a