Correctly handle any `UPDATE` errors to phandom invoices
[rust-lightning] / lightning / src / ln / channelmanager.rs
index de86a8e703c44ce59cfb8b5664412755f8d7d2f3..6e5df15908adb097ea5b96655b234e46ac66907a 100644 (file)
@@ -49,6 +49,7 @@ use crate::ln::features::InvoiceFeatures;
 use crate::routing::router::{InFlightHtlcs, PaymentParameters, Route, RouteHop, RoutePath, RouteParameters};
 use crate::ln::msgs;
 use crate::ln::onion_utils;
+use crate::ln::onion_utils::HTLCFailReason;
 use crate::ln::msgs::{ChannelMessageHandler, DecodeError, LightningError, MAX_VALUE_MSAT};
 use crate::ln::wire::Encode;
 use crate::chain::keysinterface::{Sign, KeysInterface, KeysManager, Recipient};
@@ -276,17 +277,6 @@ impl HTLCSource {
        }
 }
 
-#[derive(Clone)] // See Channel::revoke_and_ack for why, tl;dr: Rust bug
-pub(super) enum HTLCFailReason {
-       LightningError {
-               err: msgs::OnionErrorPacket,
-       },
-       Reason {
-               failure_code: u16,
-               data: Vec<u8>,
-       }
-}
-
 struct ReceiveError {
        err_code: u16,
        err_data: Vec<u8>,
@@ -1876,8 +1866,9 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                };
 
                for htlc_source in failed_htlcs.drain(..) {
+                       let reason = HTLCFailReason::from_failure_code(0x4000 | 8);
                        let receiver = HTLCDestination::NextHopChannel { node_id: Some(*counterparty_node_id), channel_id: *channel_id };
-                       self.fail_htlc_backwards_internal(htlc_source.0, &htlc_source.1, HTLCFailReason::Reason { failure_code: 0x4000 | 8, data: Vec::new() }, receiver);
+                       self.fail_htlc_backwards_internal(&htlc_source.0, &htlc_source.1, &reason, receiver);
                }
 
                let _ = handle_error!(self, result, *counterparty_node_id);
@@ -1934,8 +1925,9 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                log_debug!(self.logger, "Finishing force-closure of channel with {} HTLCs to fail", failed_htlcs.len());
                for htlc_source in failed_htlcs.drain(..) {
                        let (source, payment_hash, counterparty_node_id, channel_id) = htlc_source;
+                       let reason = HTLCFailReason::from_failure_code(0x4000 | 8);
                        let receiver = HTLCDestination::NextHopChannel { node_id: Some(counterparty_node_id), channel_id };
-                       self.fail_htlc_backwards_internal(source, &payment_hash, HTLCFailReason::Reason { failure_code: 0x4000 | 8, data: Vec::new() }, receiver);
+                       self.fail_htlc_backwards_internal(&source, &payment_hash, &reason, receiver);
                }
                if let Some((funding_txo, monitor_update)) = monitor_update_option {
                        // There isn't anything we can do if we get an update failure - we're already
@@ -2050,10 +2042,13 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                // Also, ensure that, in the case of an unknown preimage for the received payment hash, our
                // payment logic has enough time to fail the HTLC backward before our onchain logic triggers a
                // channel closure (see HTLC_FAIL_BACK_BUFFER rationale).
-               if (hop_data.outgoing_cltv_value as u64) <= self.best_block.read().unwrap().height() as u64 + HTLC_FAIL_BACK_BUFFER as u64 + 1  {
+               let current_height: u32 = self.best_block.read().unwrap().height();
+               if (hop_data.outgoing_cltv_value as u64) <= current_height as u64 + HTLC_FAIL_BACK_BUFFER as u64 + 1 {
+                       let mut err_data = Vec::with_capacity(12);
+                       err_data.extend_from_slice(&amt_msat.to_be_bytes());
+                       err_data.extend_from_slice(&current_height.to_be_bytes());
                        return Err(ReceiveError {
-                               err_code: 17,
-                               err_data: Vec::new(),
+                               err_code: 0x4000 | 15, err_data,
                                msg: "The final CLTV expiry is too soon to handle",
                        });
                }
@@ -2161,7 +2156,8 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                                        return PendingHTLCStatus::Fail(HTLCFailureMsg::Relay(msgs::UpdateFailHTLC {
                                                channel_id: msg.channel_id,
                                                htlc_id: msg.htlc_id,
-                                               reason: onion_utils::build_first_hop_failure_packet(&shared_secret, $err_code, $data),
+                                               reason: HTLCFailReason::reason($err_code, $data.to_vec())
+                                                       .get_encrypted_failure_packet(&shared_secret, &None),
                                        }));
                                }
                        }
@@ -2226,15 +2222,16 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                        // with a short_channel_id of 0. This is important as various things later assume
                        // short_channel_id is non-0 in any ::Forward.
                        if let &PendingHTLCRouting::Forward { ref short_channel_id, .. } = routing {
-                               if let Some((err, code, chan_update)) = loop {
+                               if let Some((err, mut code, chan_update)) = loop {
                                        let id_option = self.short_to_chan_info.read().unwrap().get(&short_channel_id).cloned();
                                        let mut channel_state = self.channel_state.lock().unwrap();
                                        let forwarding_id_opt = match id_option {
                                                None => { // unknown_next_peer
                                                        // Note that this is likely a timing oracle for detecting whether an scid is a
                                                        // phantom or an intercept.
-                                                       if fake_scid::is_valid_phantom(&self.fake_scid_rand_bytes, *short_channel_id, &self.genesis_hash) ||
-                                                          fake_scid::is_valid_intercept(&self.fake_scid_rand_bytes, *short_channel_id, &self.genesis_hash)
+                                                       if (self.default_configuration.accept_intercept_htlcs &&
+                                                          fake_scid::is_valid_intercept(&self.fake_scid_rand_bytes, *short_channel_id, &self.genesis_hash)) ||
+                                                          fake_scid::is_valid_phantom(&self.fake_scid_rand_bytes, *short_channel_id, &self.genesis_hash)
                                                        {
                                                                None
                                                        } else {
@@ -2282,10 +2279,13 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                                                }
                                                chan_update_opt
                                        } else {
-                                               if (msg.cltv_expiry as u64) < (*outgoing_cltv_value) as u64 + MIN_CLTV_EXPIRY_DELTA as u64 { // incorrect_cltv_expiry
+                                               if (msg.cltv_expiry as u64) < (*outgoing_cltv_value) as u64 + MIN_CLTV_EXPIRY_DELTA as u64 {
+                                                       // We really should set `incorrect_cltv_expiry` here but as we're not
+                                                       // forwarding over a real channel we can't generate a channel_update
+                                                       // for it. Instead we just return a generic temporary_node_failure.
                                                        break Some((
                                                                "Forwarding node has tampered with the intended HTLC values or origin node has an obsolete cltv_expiry_delta",
-                                                               0x1000 | 13, None,
+                                                               0x2000 | 2, None,
                                                        ));
                                                }
                                                None
@@ -2331,6 +2331,12 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                                                (chan_update.serialized_length() as u16 + 2).write(&mut res).expect("Writes cannot fail");
                                                msgs::ChannelUpdate::TYPE.write(&mut res).expect("Writes cannot fail");
                                                chan_update.write(&mut res).expect("Writes cannot fail");
+                                       } else if code & 0x1000 == 0x1000 {
+                                               // If we're trying to return an error that requires a `channel_update` but
+                                               // we're forwarding to a phantom or intercept "channel" (i.e. cannot
+                                               // generate an update), just use the generic "temporary_node_failure"
+                                               // instead.
+                                               code = 0x2000 | 2;
                                        }
                                        return_err!(err, code, &res.0[..]);
                                }
@@ -3057,13 +3063,19 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
        /// Intercepted HTLCs can be useful for Lightning Service Providers (LSPs) to open a just-in-time
        /// channel to a receiving node if the node lacks sufficient inbound liquidity.
        ///
-       /// To make use of intercepted HTLCs, use [`ChannelManager::get_intercept_scid`] to generate short
-       /// channel id(s) to put in the receiver's invoice route hints. These route hints will signal to
-       /// LDK to generate an [`HTLCIntercepted`] event when it receives the forwarded HTLC.
+       /// To make use of intercepted HTLCs, set [`UserConfig::accept_intercept_htlcs`] and use
+       /// [`ChannelManager::get_intercept_scid`] to generate short channel id(s) to put in the
+       /// receiver's invoice route hints. These route hints will signal to LDK to generate an
+       /// [`HTLCIntercepted`] event when it receives the forwarded HTLC, and this method or
+       /// [`ChannelManager::fail_intercepted_htlc`] MUST be called in response to the event.
        ///
        /// Note that LDK does not enforce fee requirements in `amt_to_forward_msat`, and will not stop
        /// you from forwarding more than you received.
        ///
+       /// Errors if the event was not handled in time, in which case the HTLC was automatically failed
+       /// backwards.
+       ///
+       /// [`UserConfig::accept_intercept_htlcs`]: crate::util::config::UserConfig::accept_intercept_htlcs
        /// [`HTLCIntercepted`]: events::Event::HTLCIntercepted
        // TODO: when we move to deciding the best outbound channel at forward time, only take
        // `next_node_id` and not `next_hop_channel_id`
@@ -3071,7 +3083,14 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                let _persistence_guard = PersistenceNotifierGuard::notify_on_drop(&self.total_consistency_lock, &self.persistence_notifier);
 
                let next_hop_scid = match self.channel_state.lock().unwrap().by_id.get(next_hop_channel_id) {
-                       Some(chan) => chan.get_short_channel_id().unwrap_or(chan.outbound_scid_alias()),
+                       Some(chan) => {
+                               if !chan.is_usable() {
+                                       return Err(APIError::APIMisuseError {
+                                               err: format!("Channel with id {:?} not fully established", next_hop_channel_id)
+                                       })
+                               }
+                               chan.get_short_channel_id().unwrap_or(chan.outbound_scid_alias())
+                       },
                        None => return Err(APIError::APIMisuseError {
                                err: format!("Channel with id {:?} not found", next_hop_channel_id)
                        })
@@ -3102,6 +3121,38 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                Ok(())
        }
 
+       /// Fails the intercepted HTLC indicated by intercept_id. Should only be called in response to
+       /// an [`HTLCIntercepted`] event. See [`ChannelManager::forward_intercepted_htlc`].
+       ///
+       /// Errors if the event was not handled in time, in which case the HTLC was automatically failed
+       /// backwards.
+       ///
+       /// [`HTLCIntercepted`]: events::Event::HTLCIntercepted
+       pub fn fail_intercepted_htlc(&self, intercept_id: InterceptId) -> Result<(), APIError> {
+               let _persistence_guard = PersistenceNotifierGuard::notify_on_drop(&self.total_consistency_lock, &self.persistence_notifier);
+
+               let payment = self.pending_intercepted_htlcs.lock().unwrap().remove(&intercept_id)
+                       .ok_or_else(|| APIError::APIMisuseError {
+                               err: format!("Payment with InterceptId {:?} not found", intercept_id)
+                       })?;
+
+               if let PendingHTLCRouting::Forward { short_channel_id, .. } = payment.forward_info.routing {
+                       let htlc_source = HTLCSource::PreviousHopData(HTLCPreviousHopData {
+                               short_channel_id: payment.prev_short_channel_id,
+                               outpoint: payment.prev_funding_outpoint,
+                               htlc_id: payment.prev_htlc_id,
+                               incoming_packet_shared_secret: payment.forward_info.incoming_shared_secret,
+                               phantom_shared_secret: None,
+                       });
+
+                       let failure_reason = HTLCFailReason::from_failure_code(0x4000 | 10);
+                       let destination = HTLCDestination::UnknownNextHop { requested_forward_scid: short_channel_id };
+                       self.fail_htlc_backwards_internal(&htlc_source, &payment.forward_info.payment_hash, &failure_reason, destination);
+               } else { unreachable!() } // Only `PendingHTLCRouting::Forward`s are intercepted
+
+               Ok(())
+       }
+
        /// Processes HTLCs which are pending waiting on random forward delay.
        ///
        /// Should only really ever be called in response to a PendingHTLCsForwardable event.
@@ -3149,7 +3200,7 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                                                                                                };
 
                                                                                                failed_forwards.push((htlc_source, payment_hash,
-                                                                                                       HTLCFailReason::Reason { failure_code: $err_code, data: $err_data },
+                                                                                                       HTLCFailReason::reason($err_code, $err_data),
                                                                                                        reason
                                                                                                ));
                                                                                                continue;
@@ -3257,7 +3308,7 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                                                                                                }
                                                                                                let (failure_code, data) = self.get_htlc_temp_fail_err_and_data(0x1000|7, short_chan_id, chan.get());
                                                                                                failed_forwards.push((htlc_source, payment_hash,
-                                                                                                       HTLCFailReason::Reason { failure_code, data },
+                                                                                                       HTLCFailReason::reason(failure_code, data),
                                                                                                        HTLCDestination::NextHopChannel { node_id: Some(chan.get().get_counterparty_node_id()), channel_id: forward_chan_id }
                                                                                                ));
                                                                                                continue;
@@ -3404,7 +3455,7 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                                                                                                incoming_packet_shared_secret: $htlc.prev_hop.incoming_packet_shared_secret,
                                                                                                phantom_shared_secret,
                                                                                        }), payment_hash,
-                                                                                       HTLCFailReason::Reason { failure_code: 0x4000 | 15, data: htlc_msat_height_data },
+                                                                                       HTLCFailReason::reason(0x4000 | 15, htlc_msat_height_data),
                                                                                        HTLCDestination::FailedPayment { payment_hash: $payment_hash },
                                                                                ));
                                                                        }
@@ -3553,7 +3604,7 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                }
 
                for (htlc_source, payment_hash, failure_reason, destination) in failed_forwards.drain(..) {
-                       self.fail_htlc_backwards_internal(htlc_source, &payment_hash, failure_reason, destination);
+                       self.fail_htlc_backwards_internal(&htlc_source, &payment_hash, &failure_reason, destination);
                }
                self.forward_htlcs(&mut phantom_receives);
 
@@ -3816,8 +3867,10 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                        });
 
                        for htlc_source in timed_out_mpp_htlcs.drain(..) {
+                               let source = HTLCSource::PreviousHopData(htlc_source.0.clone());
+                               let reason = HTLCFailReason::from_failure_code(23);
                                let receiver = HTLCDestination::FailedPayment { payment_hash: htlc_source.1 };
-                               self.fail_htlc_backwards_internal(HTLCSource::PreviousHopData(htlc_source.0.clone()), &htlc_source.1, HTLCFailReason::Reason { failure_code: 23, data: Vec::new() }, receiver );
+                               self.fail_htlc_backwards_internal(&source, &htlc_source.1, &reason, receiver);
                        }
 
                        for (err, counterparty_node_id) in handle_errors.drain(..) {
@@ -3852,10 +3905,10 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                                let mut htlc_msat_height_data = byte_utils::be64_to_array(htlc.value).to_vec();
                                htlc_msat_height_data.extend_from_slice(&byte_utils::be32_to_array(
                                                self.best_block.read().unwrap().height()));
-                               self.fail_htlc_backwards_internal(
-                                               HTLCSource::PreviousHopData(htlc.prev_hop), payment_hash,
-                                               HTLCFailReason::Reason { failure_code: 0x4000 | 15, data: htlc_msat_height_data },
-                                               HTLCDestination::FailedPayment { payment_hash: *payment_hash });
+                               let source = HTLCSource::PreviousHopData(htlc.prev_hop);
+                               let reason = HTLCFailReason::reason(0x4000 | 15, htlc_msat_height_data);
+                               let receiver = HTLCDestination::FailedPayment { payment_hash: *payment_hash };
+                               self.fail_htlc_backwards_internal(&source, &payment_hash, &reason, receiver);
                        }
                }
        }
@@ -3914,23 +3967,24 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                &self, mut htlcs_to_fail: Vec<(HTLCSource, PaymentHash)>, channel_id: [u8; 32],
                counterparty_node_id: &PublicKey
        ) {
-               for (htlc_src, payment_hash) in htlcs_to_fail.drain(..) {
-                       let (failure_code, onion_failure_data) =
-                               match self.channel_state.lock().unwrap().by_id.entry(channel_id) {
-                                       hash_map::Entry::Occupied(chan_entry) => {
-                                               self.get_htlc_inbound_temp_fail_err_and_data(0x1000|7, &chan_entry.get())
-                                       },
-                                       hash_map::Entry::Vacant(_) => (0x4000|10, Vec::new())
-                               };
+               let (failure_code, onion_failure_data) =
+                       match self.channel_state.lock().unwrap().by_id.entry(channel_id) {
+                               hash_map::Entry::Occupied(chan_entry) => {
+                                       self.get_htlc_inbound_temp_fail_err_and_data(0x1000|7, &chan_entry.get())
+                               },
+                               hash_map::Entry::Vacant(_) => (0x4000|10, Vec::new())
+                       };
 
+               for (htlc_src, payment_hash) in htlcs_to_fail.drain(..) {
+                       let reason = HTLCFailReason::reason(failure_code, onion_failure_data.clone());
                        let receiver = HTLCDestination::NextHopChannel { node_id: Some(counterparty_node_id.clone()), channel_id };
-                       self.fail_htlc_backwards_internal(htlc_src, &payment_hash, HTLCFailReason::Reason { failure_code, data: onion_failure_data }, receiver);
+                       self.fail_htlc_backwards_internal(&htlc_src, &payment_hash, &reason, receiver);
                }
        }
 
        /// Fails an HTLC backwards to the sender of it to us.
        /// Note that we do not assume that channels corresponding to failed HTLCs are still available.
-       fn fail_htlc_backwards_internal(&self, source: HTLCSource, payment_hash: &PaymentHash, onion_error: HTLCFailReason,destination: HTLCDestination) {
+       fn fail_htlc_backwards_internal(&self, source: &HTLCSource, payment_hash: &PaymentHash, onion_error: &HTLCFailReason, destination: HTLCDestination) {
                #[cfg(debug_assertions)]
                {
                        // Ensure that the `channel_state` lock is not held when calling this function.
@@ -3949,13 +4003,13 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                // from block_connected which may run during initialization prior to the chain_monitor
                // being fully configured. See the docs for `ChannelManagerReadArgs` for more.
                match source {
-                       HTLCSource::OutboundRoute { ref path, session_priv, payment_id, ref payment_params, .. } => {
+                       HTLCSource::OutboundRoute { ref path, ref session_priv, ref payment_id, ref payment_params, .. } => {
                                let mut session_priv_bytes = [0; 32];
                                session_priv_bytes.copy_from_slice(&session_priv[..]);
                                let mut outbounds = self.pending_outbound_payments.lock().unwrap();
                                let mut all_paths_failed = false;
                                let mut full_failure_ev = None;
-                               if let hash_map::Entry::Occupied(mut payment) = outbounds.entry(payment_id) {
+                               if let hash_map::Entry::Occupied(mut payment) = outbounds.entry(*payment_id) {
                                        if !payment.get_mut().remove(&session_priv_bytes, Some(&path)) {
                                                log_trace!(self.logger, "Received duplicative fail for HTLC with payment_hash {}", log_bytes!(payment_hash.0));
                                                return;
@@ -3968,7 +4022,7 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                                                all_paths_failed = true;
                                                if payment.get().abandoned() {
                                                        full_failure_ev = Some(events::Event::PaymentFailed {
-                                                               payment_id,
+                                                               payment_id: *payment_id,
                                                                payment_hash: payment.get().payment_hash().expect("PendingOutboundPayments::RetriesExceeded always has a payment hash set"),
                                                        });
                                                        payment.remove();
@@ -3988,126 +4042,69 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                                } else { None };
                                log_trace!(self.logger, "Failing outbound payment HTLC with payment_hash {}", log_bytes!(payment_hash.0));
 
-                               let path_failure = match &onion_error {
-                                       &HTLCFailReason::LightningError { ref err } => {
+                               let path_failure = {
 #[cfg(test)]
-                                               let (network_update, short_channel_id, payment_retryable, onion_error_code, onion_error_data) = onion_utils::process_onion_failure(&self.secp_ctx, &self.logger, &source, err.data.clone());
+                                       let (network_update, short_channel_id, payment_retryable, onion_error_code, onion_error_data) = onion_error.decode_onion_failure(&self.secp_ctx, &self.logger, &source);
 #[cfg(not(test))]
-                                               let (network_update, short_channel_id, payment_retryable, _, _) = onion_utils::process_onion_failure(&self.secp_ctx, &self.logger, &source, err.data.clone());
+                                       let (network_update, short_channel_id, payment_retryable, _, _) = onion_error.decode_onion_failure(&self.secp_ctx, &self.logger, &source);
 
-                                               if self.payment_is_probe(payment_hash, &payment_id) {
-                                                       if !payment_retryable {
-                                                               events::Event::ProbeSuccessful {
-                                                                       payment_id,
-                                                                       payment_hash: payment_hash.clone(),
-                                                                       path: path.clone(),
-                                                               }
-                                                       } else {
-                                                               events::Event::ProbeFailed {
-                                                                       payment_id,
-                                                                       payment_hash: payment_hash.clone(),
-                                                                       path: path.clone(),
-                                                                       short_channel_id,
-                                                               }
-                                                       }
-                                               } else {
-                                                       // TODO: If we decided to blame ourselves (or one of our channels) in
-                                                       // process_onion_failure we should close that channel as it implies our
-                                                       // next-hop is needlessly blaming us!
-                                                       if let Some(scid) = short_channel_id {
-                                                               retry.as_mut().map(|r| r.payment_params.previously_failed_channels.push(scid));
-                                                       }
-                                                       events::Event::PaymentPathFailed {
-                                                               payment_id: Some(payment_id),
-                                                               payment_hash: payment_hash.clone(),
-                                                               payment_failed_permanently: !payment_retryable,
-                                                               network_update,
-                                                               all_paths_failed,
-                                                               path: path.clone(),
-                                                               short_channel_id,
-                                                               retry,
-                                                               #[cfg(test)]
-                                                               error_code: onion_error_code,
-                                                               #[cfg(test)]
-                                                               error_data: onion_error_data
-                                                       }
-                                               }
-                                       },
-                                       &HTLCFailReason::Reason {
-#[cfg(test)]
-                                                       ref failure_code,
-#[cfg(test)]
-                                                       ref data,
-                                                       .. } => {
-                                               // we get a fail_malformed_htlc from the first hop
-                                               // TODO: We'd like to generate a NetworkUpdate for temporary
-                                               // failures here, but that would be insufficient as find_route
-                                               // generally ignores its view of our own channels as we provide them via
-                                               // ChannelDetails.
-                                               // TODO: For non-temporary failures, we really should be closing the
-                                               // channel here as we apparently can't relay through them anyway.
-                                               let scid = path.first().unwrap().short_channel_id;
-                                               retry.as_mut().map(|r| r.payment_params.previously_failed_channels.push(scid));
-
-                                               if self.payment_is_probe(payment_hash, &payment_id) {
-                                                       events::Event::ProbeFailed {
-                                                               payment_id,
+                                       if self.payment_is_probe(payment_hash, &payment_id) {
+                                               if !payment_retryable {
+                                                       events::Event::ProbeSuccessful {
+                                                               payment_id: *payment_id,
                                                                payment_hash: payment_hash.clone(),
                                                                path: path.clone(),
-                                                               short_channel_id: Some(scid),
                                                        }
                                                } else {
-                                                       events::Event::PaymentPathFailed {
-                                                               payment_id: Some(payment_id),
+                                                       events::Event::ProbeFailed {
+                                                               payment_id: *payment_id,
                                                                payment_hash: payment_hash.clone(),
-                                                               payment_failed_permanently: false,
-                                                               network_update: None,
-                                                               all_paths_failed,
                                                                path: path.clone(),
-                                                               short_channel_id: Some(scid),
-                                                               retry,
-#[cfg(test)]
-                                                               error_code: Some(*failure_code),
-#[cfg(test)]
-                                                               error_data: Some(data.clone()),
+                                                               short_channel_id,
                                                        }
                                                }
+                                       } else {
+                                               // TODO: If we decided to blame ourselves (or one of our channels) in
+                                               // process_onion_failure we should close that channel as it implies our
+                                               // next-hop is needlessly blaming us!
+                                               if let Some(scid) = short_channel_id {
+                                                       retry.as_mut().map(|r| r.payment_params.previously_failed_channels.push(scid));
+                                               }
+                                               events::Event::PaymentPathFailed {
+                                                       payment_id: Some(*payment_id),
+                                                       payment_hash: payment_hash.clone(),
+                                                       payment_failed_permanently: !payment_retryable,
+                                                       network_update,
+                                                       all_paths_failed,
+                                                       path: path.clone(),
+                                                       short_channel_id,
+                                                       retry,
+                                                       #[cfg(test)]
+                                                       error_code: onion_error_code,
+                                                       #[cfg(test)]
+                                                       error_data: onion_error_data
+                                               }
                                        }
                                };
                                let mut pending_events = self.pending_events.lock().unwrap();
                                pending_events.push(path_failure);
                                if let Some(ev) = full_failure_ev { pending_events.push(ev); }
                        },
-                       HTLCSource::PreviousHopData(HTLCPreviousHopData { short_channel_id, htlc_id, incoming_packet_shared_secret, phantom_shared_secret, outpoint }) => {
-                               let err_packet = match onion_error {
-                                       HTLCFailReason::Reason { failure_code, data } => {
-                                               log_trace!(self.logger, "Failing HTLC with payment_hash {} backwards from us with code {}", log_bytes!(payment_hash.0), failure_code);
-                                               if let Some(phantom_ss) = phantom_shared_secret {
-                                                       let phantom_packet = onion_utils::build_failure_packet(&phantom_ss, failure_code, &data[..]).encode();
-                                                       let encrypted_phantom_packet = onion_utils::encrypt_failure_packet(&phantom_ss, &phantom_packet);
-                                                       onion_utils::encrypt_failure_packet(&incoming_packet_shared_secret, &encrypted_phantom_packet.data[..])
-                                               } else {
-                                                       let packet = onion_utils::build_failure_packet(&incoming_packet_shared_secret, failure_code, &data[..]).encode();
-                                                       onion_utils::encrypt_failure_packet(&incoming_packet_shared_secret, &packet)
-                                               }
-                                       },
-                                       HTLCFailReason::LightningError { err } => {
-                                               log_trace!(self.logger, "Failing HTLC with payment_hash {} backwards with pre-built LightningError", log_bytes!(payment_hash.0));
-                                               onion_utils::encrypt_failure_packet(&incoming_packet_shared_secret, &err.data)
-                                       }
-                               };
+                       HTLCSource::PreviousHopData(HTLCPreviousHopData { ref short_channel_id, ref htlc_id, ref incoming_packet_shared_secret, ref phantom_shared_secret, ref outpoint }) => {
+                               log_trace!(self.logger, "Failing HTLC with payment_hash {} backwards from us with {:?}", log_bytes!(payment_hash.0), onion_error);
+                               let err_packet = onion_error.get_encrypted_failure_packet(incoming_packet_shared_secret, phantom_shared_secret);
 
                                let mut forward_event = None;
                                let mut forward_htlcs = self.forward_htlcs.lock().unwrap();
                                if forward_htlcs.is_empty() {
                                        forward_event = Some(Duration::from_millis(MIN_HTLC_RELAY_HOLDING_CELL_MILLIS));
                                }
-                               match forward_htlcs.entry(short_channel_id) {
+                               match forward_htlcs.entry(*short_channel_id) {
                                        hash_map::Entry::Occupied(mut entry) => {
-                                               entry.get_mut().push(HTLCForwardInfo::FailHTLC { htlc_id, err_packet });
+                                               entry.get_mut().push(HTLCForwardInfo::FailHTLC { htlc_id: *htlc_id, err_packet });
                                        },
                                        hash_map::Entry::Vacant(entry) => {
-                                               entry.insert(vec!(HTLCForwardInfo::FailHTLC { htlc_id, err_packet }));
+                                               entry.insert(vec!(HTLCForwardInfo::FailHTLC { htlc_id: *htlc_id, err_packet }));
                                        }
                                }
                                mem::drop(forward_htlcs);
@@ -4119,7 +4116,7 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                                }
                                pending_events.push(events::Event::HTLCHandlingFailed {
                                        prev_channel_id: outpoint.to_channel_id(),
-                                       failed_next_destination: destination
+                                       failed_next_destination: destination,
                                });
                        },
                }
@@ -4248,10 +4245,10 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                                        let mut htlc_msat_height_data = byte_utils::be64_to_array(htlc.value).to_vec();
                                        htlc_msat_height_data.extend_from_slice(&byte_utils::be32_to_array(
                                                self.best_block.read().unwrap().height()));
-                                       self.fail_htlc_backwards_internal(
-                                               HTLCSource::PreviousHopData(htlc.prev_hop), &payment_hash,
-                                               HTLCFailReason::Reason { failure_code: 0x4000|15, data: htlc_msat_height_data },
-                                               HTLCDestination::FailedPayment { payment_hash } );
+                                       let source = HTLCSource::PreviousHopData(htlc.prev_hop);
+                                       let reason = HTLCFailReason::reason(0x4000 | 15, htlc_msat_height_data);
+                                       let receiver = HTLCDestination::FailedPayment { payment_hash };
+                                       self.fail_htlc_backwards_internal(&source, &payment_hash, &reason, receiver);
                                }
                        }
 
@@ -4575,7 +4572,7 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                self.finalize_claims(finalized_claims);
                for failure in pending_failures.drain(..) {
                        let receiver = HTLCDestination::NextHopChannel { node_id: Some(counterparty_node_id), channel_id: funding_txo.to_channel_id() };
-                       self.fail_htlc_backwards_internal(failure.0, &failure.1, failure.2, receiver);
+                       self.fail_htlc_backwards_internal(&failure.0, &failure.1, &failure.2, receiver);
                }
        }
 
@@ -4943,7 +4940,8 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                };
                for htlc_source in dropped_htlcs.drain(..) {
                        let receiver = HTLCDestination::NextHopChannel { node_id: Some(counterparty_node_id.clone()), channel_id: msg.channel_id };
-                       self.fail_htlc_backwards_internal(htlc_source.0, &htlc_source.1, HTLCFailReason::Reason { failure_code: 0x4000 | 8, data: Vec::new() }, receiver);
+                       let reason = HTLCFailReason::from_failure_code(0x4000 | 8);
+                       self.fail_htlc_backwards_internal(&htlc_source.0, &htlc_source.1, &reason, receiver);
                }
 
                let _ = handle_error!(self, result, *counterparty_node_id);
@@ -5022,10 +5020,10 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                                                PendingHTLCStatus::Forward(PendingHTLCInfo { ref incoming_shared_secret, .. }) => {
                                                        let reason = if (error_code & 0x1000) != 0 {
                                                                let (real_code, error_data) = self.get_htlc_inbound_temp_fail_err_and_data(error_code, chan);
-                                                               onion_utils::build_first_hop_failure_packet(incoming_shared_secret, real_code, &error_data)
+                                                               HTLCFailReason::reason(real_code, error_data)
                                                        } else {
-                                                               onion_utils::build_first_hop_failure_packet(incoming_shared_secret, error_code, &[])
-                                                       };
+                                                               HTLCFailReason::from_failure_code(error_code)
+                                                       }.get_encrypted_failure_packet(incoming_shared_secret, &None);
                                                        let msg = msgs::UpdateFailHTLC {
                                                                channel_id: msg.channel_id,
                                                                htlc_id: msg.htlc_id,
@@ -5069,7 +5067,7 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                                if chan.get().get_counterparty_node_id() != *counterparty_node_id {
                                        return Err(MsgHandleErrInternal::send_err_msg_no_close("Got a message for a channel from the wrong node!".to_owned(), msg.channel_id));
                                }
-                               try_chan_entry!(self, chan.get_mut().update_fail_htlc(&msg, HTLCFailReason::LightningError { err: msg.reason.clone() }), chan);
+                               try_chan_entry!(self, chan.get_mut().update_fail_htlc(&msg, HTLCFailReason::from_msg(msg)), chan);
                        },
                        hash_map::Entry::Vacant(_) => return Err(MsgHandleErrInternal::send_err_msg_no_close("Failed to find corresponding channel".to_owned(), msg.channel_id))
                }
@@ -5088,7 +5086,7 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                                        let chan_err: ChannelError = ChannelError::Close("Got update_fail_malformed_htlc with BADONION not set".to_owned());
                                        try_chan_entry!(self, Err(chan_err), chan);
                                }
-                               try_chan_entry!(self, chan.get_mut().update_fail_malformed_htlc(&msg, HTLCFailReason::Reason { failure_code: msg.failure_code, data: Vec::new() }), chan);
+                               try_chan_entry!(self, chan.get_mut().update_fail_malformed_htlc(&msg, HTLCFailReason::reason(msg.failure_code, msg.sha256_of_onion.to_vec())), chan);
                                Ok(())
                        },
                        hash_map::Entry::Vacant(_) => return Err(MsgHandleErrInternal::send_err_msg_no_close("Failed to find corresponding channel".to_owned(), msg.channel_id))
@@ -5194,7 +5192,7 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                                                                                });
 
                                                                                failed_intercept_forwards.push((htlc_source, forward_info.payment_hash,
-                                                                                               HTLCFailReason::Reason { failure_code: 0x4000 | 10, data: Vec::new() },
+                                                                                               HTLCFailReason::from_failure_code(0x4000 | 10),
                                                                                                HTLCDestination::InvalidForward { requested_forward_scid: scid },
                                                                                ));
                                                                        }
@@ -5214,7 +5212,7 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                        }
 
                        for (htlc_source, payment_hash, failure_reason, destination) in failed_intercept_forwards.drain(..) {
-                               self.fail_htlc_backwards_internal(htlc_source, &payment_hash, failure_reason, destination);
+                               self.fail_htlc_backwards_internal(&htlc_source, &payment_hash, &failure_reason, destination);
                        }
 
                        if !new_intercept_events.is_empty() {
@@ -5289,7 +5287,7 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                        {
                                for failure in pending_failures.drain(..) {
                                        let receiver = HTLCDestination::NextHopChannel { node_id: Some(*counterparty_node_id), channel_id: channel_outpoint.to_channel_id() };
-                                       self.fail_htlc_backwards_internal(failure.0, &failure.1, failure.2, receiver);
+                                       self.fail_htlc_backwards_internal(&failure.0, &failure.1, &failure.2, receiver);
                                }
                                self.forward_htlcs(&mut [(short_channel_id, channel_outpoint, user_channel_id, pending_forwards)]);
                                self.finalize_claims(finalized_claim_htlcs);
@@ -5449,7 +5447,8 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
                                                } else {
                                                        log_trace!(self.logger, "Failing HTLC with hash {} from our monitor", log_bytes!(htlc_update.payment_hash.0));
                                                        let receiver = HTLCDestination::NextHopChannel { node_id: counterparty_node_id, channel_id: funding_outpoint.to_channel_id() };
-                                                       self.fail_htlc_backwards_internal(htlc_update.source, &htlc_update.payment_hash, HTLCFailReason::Reason { failure_code: 0x4000 | 8, data: Vec::new() }, receiver);
+                                                       let reason = HTLCFailReason::from_failure_code(0x4000 | 8);
+                                                       self.fail_htlc_backwards_internal(&htlc_update.source, &htlc_update.payment_hash, &reason, receiver);
                                                }
                                        },
                                        MonitorEvent::CommitmentTxConfirmed(funding_outpoint) |
@@ -6135,9 +6134,8 @@ where
                                if let Ok((channel_ready_opt, mut timed_out_pending_htlcs, announcement_sigs)) = res {
                                        for (source, payment_hash) in timed_out_pending_htlcs.drain(..) {
                                                let (failure_code, data) = self.get_htlc_inbound_temp_fail_err_and_data(0x1000|14 /* expiry_too_soon */, &channel);
-                                               timed_out_htlcs.push((source, payment_hash, HTLCFailReason::Reason {
-                                                       failure_code, data,
-                                               }, HTLCDestination::NextHopChannel { node_id: Some(channel.get_counterparty_node_id()), channel_id: channel.channel_id() }));
+                                               timed_out_htlcs.push((source, payment_hash, HTLCFailReason::reason(failure_code, data),
+                                                       HTLCDestination::NextHopChannel { node_id: Some(channel.get_counterparty_node_id()), channel_id: channel.channel_id() }));
                                        }
                                        if let Some(channel_ready) = channel_ready_opt {
                                                send_channel_ready!(self, pending_msg_events, channel, channel_ready);
@@ -6224,21 +6222,43 @@ where
                                                let mut htlc_msat_height_data = byte_utils::be64_to_array(htlc.value).to_vec();
                                                htlc_msat_height_data.extend_from_slice(&byte_utils::be32_to_array(height));
 
-                                               timed_out_htlcs.push((HTLCSource::PreviousHopData(htlc.prev_hop.clone()), payment_hash.clone(), HTLCFailReason::Reason {
-                                                       failure_code: 0x4000 | 15,
-                                                       data: htlc_msat_height_data
-                                               }, HTLCDestination::FailedPayment { payment_hash: payment_hash.clone() }));
+                                               timed_out_htlcs.push((HTLCSource::PreviousHopData(htlc.prev_hop.clone()), payment_hash.clone(),
+                                                       HTLCFailReason::reason(0x4000 | 15, htlc_msat_height_data),
+                                                       HTLCDestination::FailedPayment { payment_hash: payment_hash.clone() }));
                                                false
                                        } else { true }
                                });
                                !htlcs.is_empty() // Only retain this entry if htlcs has at least one entry.
                        });
+
+                       let mut intercepted_htlcs = self.pending_intercepted_htlcs.lock().unwrap();
+                       intercepted_htlcs.retain(|_, htlc| {
+                               if height >= htlc.forward_info.outgoing_cltv_value - HTLC_FAIL_BACK_BUFFER {
+                                       let prev_hop_data = HTLCSource::PreviousHopData(HTLCPreviousHopData {
+                                               short_channel_id: htlc.prev_short_channel_id,
+                                               htlc_id: htlc.prev_htlc_id,
+                                               incoming_packet_shared_secret: htlc.forward_info.incoming_shared_secret,
+                                               phantom_shared_secret: None,
+                                               outpoint: htlc.prev_funding_outpoint,
+                                       });
+
+                                       let requested_forward_scid /* intercept scid */ = match htlc.forward_info.routing {
+                                               PendingHTLCRouting::Forward { short_channel_id, .. } => short_channel_id,
+                                               _ => unreachable!(),
+                                       };
+                                       timed_out_htlcs.push((prev_hop_data, htlc.forward_info.payment_hash,
+                                                       HTLCFailReason::from_failure_code(0x2000 | 2),
+                                                       HTLCDestination::InvalidForward { requested_forward_scid }));
+                                       log_trace!(self.logger, "Timing out intercepted HTLC with requested forward scid {}", requested_forward_scid);
+                                       false
+                               } else { true }
+                       });
                }
 
                self.handle_init_event_channel_failures(failed_channels);
 
                for (source, payment_hash, reason, destination) in timed_out_htlcs.drain(..) {
-                       self.fail_htlc_backwards_internal(source, &payment_hash, reason, destination);
+                       self.fail_htlc_backwards_internal(&source, &payment_hash, &reason, destination);
                }
        }
 
@@ -6949,16 +6969,6 @@ impl Writeable for HTLCSource {
        }
 }
 
-impl_writeable_tlv_based_enum!(HTLCFailReason,
-       (0, LightningError) => {
-               (0, err, required),
-       },
-       (1, Reason) => {
-               (0, failure_code, required),
-               (2, data, vec_type),
-       },
-;);
-
 impl_writeable_tlv_based!(PendingAddHTLCInfo, {
        (0, forward_info, required),
        (1, prev_user_channel_id, (default_value, 0)),
@@ -7725,7 +7735,8 @@ impl<'a, M: Deref, T: Deref, K: Deref, F: Deref, L: Deref>
                for htlc_source in failed_htlcs.drain(..) {
                        let (source, payment_hash, counterparty_node_id, channel_id) = htlc_source;
                        let receiver = HTLCDestination::NextHopChannel { node_id: Some(counterparty_node_id), channel_id };
-                       channel_manager.fail_htlc_backwards_internal(source, &payment_hash, HTLCFailReason::Reason { failure_code: 0x4000 | 8, data: Vec::new() }, receiver);
+                       let reason = HTLCFailReason::from_failure_code(0x4000 | 8);
+                       channel_manager.fail_htlc_backwards_internal(&source, &payment_hash, &reason, receiver);
                }
 
                //TODO: Broadcast channel update for closed channels, but only after we've made a