X-Git-Url: http://git.bitcoin.ninja/index.cgi?a=blobdiff_plain;f=lightning%2Fsrc%2Fln%2Ffunctional_tests.rs;h=af4989583112bb9bf9be9b57944ff6b634e18ec0;hb=d2520f490897eab5eaa1300b42ac7ffa0951d115;hp=cfc29394f8b5e56acce9454b2b5d096b5af0e109;hpb=9098240e341cf09578819cdf1d2950303478b137;p=rust-lightning diff --git a/lightning/src/ln/functional_tests.rs b/lightning/src/ln/functional_tests.rs index cfc29394..af498958 100644 --- a/lightning/src/ln/functional_tests.rs +++ b/lightning/src/ln/functional_tests.rs @@ -64,7 +64,7 @@ fn test_insane_channel_opens() { // Instantiate channel parameters where we push the maximum msats given our // funding satoshis let channel_value_sat = 31337; // same as funding satoshis - let channel_reserve_satoshis = Channel::::get_our_channel_reserve_satoshis(channel_value_sat); + let channel_reserve_satoshis = Channel::::get_remote_channel_reserve_satoshis(channel_value_sat); let push_msat = (channel_value_sat - channel_reserve_satoshis) * 1000; // Have node0 initiate a channel to node1 with aforementioned parameters @@ -1578,9 +1578,9 @@ fn do_channel_reserve_test(test_recv: bool) { // attempt to get channel_reserve violation let (route, our_payment_hash, _) = get_route_and_payment_hash!(recv_value + 1); unwrap_send_err!(nodes[0].node.send_payment(&route, our_payment_hash, &None), true, APIError::ChannelUnavailable { err }, - assert_eq!(err, "Cannot send value that would put us over their reserve value")); + assert_eq!(err, "Cannot send value that would put us under local channel reserve value")); assert!(nodes[0].node.get_and_clear_pending_msg_events().is_empty()); - nodes[0].logger.assert_log("lightning::ln::channelmanager".to_string(), "Cannot send value that would put us over their reserve value".to_string(), 1); + nodes[0].logger.assert_log("lightning::ln::channelmanager".to_string(), "Cannot send value that would put us under local channel reserve value".to_string(), 1); } // adding pending output @@ -1603,9 +1603,9 @@ fn do_channel_reserve_test(test_recv: bool) { { let (route, our_payment_hash, _) = get_route_and_payment_hash!(recv_value_2 + 1); unwrap_send_err!(nodes[0].node.send_payment(&route, our_payment_hash, &None), true, APIError::ChannelUnavailable { err }, - assert_eq!(err, "Cannot send value that would put us over their reserve value")); + assert_eq!(err, "Cannot send value that would put us under local channel reserve value")); assert!(nodes[0].node.get_and_clear_pending_msg_events().is_empty()); - nodes[0].logger.assert_log("lightning::ln::channelmanager".to_string(), "Cannot send value that would put us over their reserve value".to_string(), 2); + nodes[0].logger.assert_log("lightning::ln::channelmanager".to_string(), "Cannot send value that would put us under local channel reserve value".to_string(), 2); } { @@ -1640,7 +1640,7 @@ fn do_channel_reserve_test(test_recv: bool) { assert_eq!(nodes[1].node.list_channels().len(), 1); assert_eq!(nodes[1].node.list_channels().len(), 1); let err_msg = check_closed_broadcast!(nodes[1], true).unwrap(); - assert_eq!(err_msg.data, "Remote HTLC add would put them over their reserve value"); + assert_eq!(err_msg.data, "Remote HTLC add would put them under their reserve value"); check_added_monitors!(nodes[1], 1); return; } @@ -1666,9 +1666,9 @@ fn do_channel_reserve_test(test_recv: bool) { { let (route, our_payment_hash, _) = get_route_and_payment_hash!(recv_value_22+1); unwrap_send_err!(nodes[0].node.send_payment(&route, our_payment_hash, &None), true, APIError::ChannelUnavailable { err }, - assert_eq!(err, "Cannot send value that would put us over their reserve value")); + assert_eq!(err, "Cannot send value that would put us under local channel reserve value")); assert!(nodes[0].node.get_and_clear_pending_msg_events().is_empty()); - nodes[0].logger.assert_log("lightning::ln::channelmanager".to_string(), "Cannot send value that would put us over their reserve value".to_string(), 3); + nodes[0].logger.assert_log("lightning::ln::channelmanager".to_string(), "Cannot send value that would put us under local channel reserve value".to_string(), 3); } let (route_22, our_payment_hash_22, our_payment_preimage_22) = get_route_and_payment_hash!(recv_value_22); @@ -5775,7 +5775,7 @@ fn bolt2_open_channel_sending_node_checks_part2() { assert!(PublicKey::from_slice(&node0_to_1_send_open_channel.funding_pubkey.serialize()).is_ok()); assert!(PublicKey::from_slice(&node0_to_1_send_open_channel.revocation_basepoint.serialize()).is_ok()); assert!(PublicKey::from_slice(&node0_to_1_send_open_channel.htlc_basepoint.serialize()).is_ok()); - assert!(PublicKey::from_slice(&node0_to_1_send_open_channel.payment_basepoint.serialize()).is_ok()); + assert!(PublicKey::from_slice(&node0_to_1_send_open_channel.payment_point.serialize()).is_ok()); assert!(PublicKey::from_slice(&node0_to_1_send_open_channel.delayed_payment_basepoint.serialize()).is_ok()); } @@ -5977,7 +5977,7 @@ fn test_update_add_htlc_bolt2_receiver_sender_can_afford_amount_sent() { assert!(nodes[1].node.list_channels().is_empty()); let err_msg = check_closed_broadcast!(nodes[1], true).unwrap(); - assert_eq!(err_msg.data, "Remote HTLC add would put them over their reserve value"); + assert_eq!(err_msg.data, "Remote HTLC add would put them under their reserve value"); check_added_monitors!(nodes[1], 1); } @@ -6807,7 +6807,7 @@ fn test_data_loss_protect() { // We want to be sure that : // * we don't broadcast our Local Commitment Tx in case of fallen behind // * we close channel in case of detecting other being fallen behind - // * we are able to claim our own outputs thanks to remote my_current_per_commitment_point + // * we are able to claim our own outputs thanks to to_remote being static let keys_manager; let fee_estimator; let tx_broadcaster; @@ -6868,9 +6868,9 @@ fn test_data_loss_protect() { let reestablish_0 = get_chan_reestablish_msgs!(nodes[1], nodes[0]); - // Check we update monitor following learning of per_commitment_point from B + // Check we don't broadcast any transactions following learning of per_commitment_point from B nodes[0].node.handle_channel_reestablish(&nodes[1].node.get_our_node_id(), &reestablish_0[0]); - check_added_monitors!(nodes[0], 2); + check_added_monitors!(nodes[0], 1); { let node_txn = nodes[0].tx_broadcaster.txn_broadcasted.lock().unwrap().clone();