Merge pull request #97 from TheBlueMatt/2018-07-no-useless-preimages
[rust-lightning] / src / ln / channelmanager.rs
index 1e3021bf09bc5da4ce2cdddf246b5db6ea3899b4..6c349fd0ebe2c031c0cbddfb8b3c41354b56778d 100644 (file)
@@ -137,13 +137,15 @@ impl ChannelHolder {
                        by_id: &mut self.by_id,
                        short_to_id: &mut self.short_to_id,
                        next_forward: &mut self.next_forward,
-                       /// short channel id -> forward infos. Key of 0 means payments received
                        forward_htlcs: &mut self.forward_htlcs,
                        claimable_htlcs: &mut self.claimable_htlcs,
                }
        }
 }
 
+#[cfg(not(any(target_pointer_width = "32", target_pointer_width = "64")))]
+const ERR: () = "You need at least 32 bit pointers (well, usize, but we'll assume they're the same) for ChannelManager::latest_block_height";
+
 /// Manager which keeps track of a number of channels and sends messages to the appropriate
 /// channel, also tracking HTLC preimages and forwarding onion packets appropriately.
 /// Implements ChannelMessageHandler, handling the multi-channel parts and passing things through
@@ -157,7 +159,7 @@ pub struct ChannelManager {
 
        announce_channels_publicly: bool,
        fee_proportional_millionths: u32,
-       latest_block_height: AtomicUsize, //TODO: Compile-time assert this is at least 32-bits long
+       latest_block_height: AtomicUsize,
        secp_ctx: Secp256k1,
 
        channel_state: Mutex<ChannelHolder>,
@@ -388,7 +390,7 @@ impl ChannelManager {
                let mut chan = {
                        let mut channel_state_lock = self.channel_state.lock().unwrap();
                        let channel_state = channel_state_lock.borrow_parts();
-                       if let Some(mut chan) = channel_state.by_id.remove(channel_id) {
+                       if let Some(chan) = channel_state.by_id.remove(channel_id) {
                                if let Some(short_id) = chan.get_short_channel_id() {
                                        channel_state.short_to_id.remove(&short_id);
                                }
@@ -1622,20 +1624,14 @@ impl ChannelMessageHandler for ChannelManager {
                // destination. That's OK since those nodes are probably busted or trying to do network
                // mapping through repeated loops. In either case, we want them to stop talking to us, so
                // we send permanent_node_failure.
-               match &claimable_htlcs_entry {
-                       &hash_map::Entry::Occupied(ref e) => {
-                               let mut acceptable_cycle = false;
-                               match e.get() {
-                                       &PendingOutboundHTLC::OutboundRoute { .. } => {
-                                               acceptable_cycle = pending_forward_info.short_channel_id == 0;
-                                       },
-                                       _ => {},
-                               }
-                               if !acceptable_cycle {
-                                       return_err!("Payment looped through us twice", 0x4000 | 0x2000 | 2, &[0;0]);
-                               }
-                       },
-                       _ => {},
+               if let &hash_map::Entry::Occupied(ref e) = &claimable_htlcs_entry {
+                       let mut acceptable_cycle = false;
+                       if let &PendingOutboundHTLC::OutboundRoute { .. } = e.get() {
+                               acceptable_cycle = pending_forward_info.short_channel_id == 0;
+                       }
+                       if !acceptable_cycle {
+                               return_err!("Payment looped through us twice", 0x4000 | 0x2000 | 2, &[0;0]);
+                       }
                }
 
                let (source_short_channel_id, res) = match channel_state.by_id.get_mut(&msg.channel_id) {
@@ -1686,22 +1682,16 @@ impl ChannelMessageHandler for ChannelManager {
                // is broken, we may have enough info to get our own money!
                self.claim_funds_internal(msg.payment_preimage.clone(), false);
 
-               let monitor = {
-                       let mut channel_state = self.channel_state.lock().unwrap();
-                       match channel_state.by_id.get_mut(&msg.channel_id) {
-                               Some(chan) => {
-                                       if chan.get_their_node_id() != *their_node_id {
-                                               return Err(HandleError{err: "Got a message for a channel from the wrong node!", action: None})
-                                       }
-                                       chan.update_fulfill_htlc(&msg)?
-                               },
-                               None => return Err(HandleError{err: "Failed to find corresponding channel", action: None})
-                       }
-               };
-               if let Err(_e) = self.monitor.add_update_monitor(monitor.get_funding_txo().unwrap(), monitor) {
-                       unimplemented!();
+               let mut channel_state = self.channel_state.lock().unwrap();
+               match channel_state.by_id.get_mut(&msg.channel_id) {
+                       Some(chan) => {
+                               if chan.get_their_node_id() != *their_node_id {
+                                       return Err(HandleError{err: "Got a message for a channel from the wrong node!", action: None})
+                               }
+                               chan.update_fulfill_htlc(&msg)
+                       },
+                       None => return Err(HandleError{err: "Failed to find corresponding channel", action: None})
                }
-               Ok(())
        }
 
        fn handle_update_fail_htlc(&self, their_node_id: &PublicKey, msg: &msgs::UpdateFailHTLC) -> Result<Option<msgs::HTLCFailChannelUpdate>, HandleError> {
@@ -2496,10 +2486,9 @@ mod tests {
                                        {
                                                let mut added_monitors = $node.chan_monitor.added_monitors.lock().unwrap();
                                                if $last_node {
-                                                       assert_eq!(added_monitors.len(), 1);
+                                                       assert_eq!(added_monitors.len(), 0);
                                                } else {
-                                                       assert_eq!(added_monitors.len(), 2);
-                                                       assert!(added_monitors[0].0 != added_monitors[1].0);
+                                                       assert_eq!(added_monitors.len(), 1);
                                                }
                                                added_monitors.clear();
                                        }