Skip to content

Commit

Permalink
Merge pull request #1434 from TheBlueMatt/2022-04-robust-payment-claims
Browse files Browse the repository at this point in the history
Improve Robustness of Inbound MPP Claims Across Restart
  • Loading branch information
valentinewallace committed May 30, 2022
2 parents ce7b0b4 + 531d6c8 commit a534a5e
Show file tree
Hide file tree
Showing 14 changed files with 754 additions and 175 deletions.
5 changes: 3 additions & 2 deletions fuzz/src/chanmon_consistency.rs
Original file line number Diff line number Diff line change
Expand Up @@ -840,13 +840,14 @@ pub fn do_test<Out: Output>(data: &[u8], underlying_out: Out) {
events::Event::PaymentReceived { payment_hash, .. } => {
if claim_set.insert(payment_hash.0) {
if $fail {
assert!(nodes[$node].fail_htlc_backwards(&payment_hash));
nodes[$node].fail_htlc_backwards(&payment_hash);
} else {
assert!(nodes[$node].claim_funds(PaymentPreimage(payment_hash.0)));
nodes[$node].claim_funds(PaymentPreimage(payment_hash.0));
}
}
},
events::Event::PaymentSent { .. } => {},
events::Event::PaymentClaimed { .. } => {},
events::Event::PaymentPathSuccessful { .. } => {},
events::Event::PaymentPathFailed { .. } => {},
events::Event::PaymentForwarded { .. } if $node == 1 => {},
Expand Down
11 changes: 7 additions & 4 deletions lightning/src/chain/chainmonitor.rs
Original file line number Diff line number Diff line change
Expand Up @@ -731,7 +731,7 @@ impl<ChannelSigner: Sign, C: Deref, T: Deref, F: Deref, L: Deref, P: Deref> even
mod tests {
use bitcoin::BlockHeader;
use ::{check_added_monitors, check_closed_broadcast, check_closed_event};
use ::{expect_payment_sent, expect_payment_sent_without_paths, expect_payment_path_successful, get_event_msg};
use ::{expect_payment_sent, expect_payment_claimed, expect_payment_sent_without_paths, expect_payment_path_successful, get_event_msg};
use ::{get_htlc_update_msgs, get_local_commitment_txn, get_revoke_commit_msgs, get_route_and_payment_hash, unwrap_send_err};
use chain::{ChannelMonitorUpdateErr, Confirm, Watch};
use chain::channelmonitor::LATENCY_GRACE_PERIOD_BLOCKS;
Expand Down Expand Up @@ -798,16 +798,18 @@ mod tests {
create_announced_chan_between_nodes(&nodes, 0, 1, InitFeatures::known(), InitFeatures::known());

// Route two payments to be claimed at the same time.
let payment_preimage_1 = route_payment(&nodes[0], &[&nodes[1]], 1_000_000).0;
let payment_preimage_2 = route_payment(&nodes[0], &[&nodes[1]], 1_000_000).0;
let (payment_preimage_1, payment_hash_1, _) = route_payment(&nodes[0], &[&nodes[1]], 1_000_000);
let (payment_preimage_2, payment_hash_2, _) = route_payment(&nodes[0], &[&nodes[1]], 1_000_000);

chanmon_cfgs[1].persister.offchain_monitor_updates.lock().unwrap().clear();
chanmon_cfgs[1].persister.set_update_ret(Err(ChannelMonitorUpdateErr::TemporaryFailure));

nodes[1].node.claim_funds(payment_preimage_1);
check_added_monitors!(nodes[1], 1);
expect_payment_claimed!(nodes[1], payment_hash_1, 1_000_000);
nodes[1].node.claim_funds(payment_preimage_2);
check_added_monitors!(nodes[1], 1);
expect_payment_claimed!(nodes[1], payment_hash_2, 1_000_000);

chanmon_cfgs[1].persister.set_update_ret(Ok(()));

Expand Down Expand Up @@ -877,8 +879,9 @@ mod tests {
let (route, second_payment_hash, _, second_payment_secret) = get_route_and_payment_hash!(nodes[0], nodes[1], 100_000);

// First route a payment that we will claim on chain and give the recipient the preimage.
let payment_preimage = route_payment(&nodes[0], &[&nodes[1]], 1_000_000).0;
let (payment_preimage, payment_hash, _) = route_payment(&nodes[0], &[&nodes[1]], 1_000_000);
nodes[1].node.claim_funds(payment_preimage);
expect_payment_claimed!(nodes[1], payment_hash, 1_000_000);
nodes[1].node.get_and_clear_pending_msg_events();
check_added_monitors!(nodes[1], 1);
let remote_txn = get_local_commitment_txn!(nodes[1], channel.2);
Expand Down
11 changes: 10 additions & 1 deletion lightning/src/chain/channelmonitor.rs
Original file line number Diff line number Diff line change
Expand Up @@ -655,6 +655,10 @@ pub(crate) struct ChannelMonitorImpl<Signer: Sign> {
// deserialization
current_holder_commitment_number: u64,

/// The set of payment hashes from inbound payments for which we know the preimage. Payment
/// preimages that are not included in any unrevoked local commitment transaction or unrevoked
/// remote commitment transactions are automatically removed when commitment transactions are
/// revoked.
payment_preimages: HashMap<PaymentHash, PaymentPreimage>,

// Note that `MonitorEvent`s MUST NOT be generated during update processing, only generated
Expand Down Expand Up @@ -1085,7 +1089,8 @@ impl<Signer: Sign> ChannelMonitor<Signer> {
self.inner.lock().unwrap().provide_latest_holder_commitment_tx(holder_commitment_tx, htlc_outputs).map_err(|_| ())
}

#[cfg(test)]
/// This is used to provide payment preimage(s) out-of-band during startup without updating the
/// off-chain state with a new commitment transaction.
pub(crate) fn provide_payment_preimage<B: Deref, F: Deref, L: Deref>(
&self,
payment_hash: &PaymentHash,
Expand Down Expand Up @@ -1632,6 +1637,10 @@ impl<Signer: Sign> ChannelMonitor<Signer> {

res
}

pub(crate) fn get_stored_preimages(&self) -> HashMap<PaymentHash, PaymentPreimage> {
self.inner.lock().unwrap().payment_preimages.clone()
}
}

/// Compares a broadcasted commitment transaction's HTLCs with those in the latest state,
Expand Down
83 changes: 50 additions & 33 deletions lightning/src/ln/chanmon_update_fail_tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,7 @@ fn test_monitor_and_persister_update_fail() {
send_payment(&nodes[0], &vec!(&nodes[1])[..], 10_000_000);

// Route an HTLC from node 0 to node 1 (but don't settle)
let preimage = route_payment(&nodes[0], &vec!(&nodes[1])[..], 9_000_000).0;
let (preimage, payment_hash, _) = route_payment(&nodes[0], &[&nodes[1]], 9_000_000);

// Make a copy of the ChainMonitor so we can capture the error it returns on a
// bogus update. Note that if instead we updated the nodes[0]'s ChainMonitor
Expand Down Expand Up @@ -123,8 +123,10 @@ fn test_monitor_and_persister_update_fail() {
persister.set_update_ret(Err(ChannelMonitorUpdateErr::TemporaryFailure));

// Try to update ChannelMonitor
assert!(nodes[1].node.claim_funds(preimage));
nodes[1].node.claim_funds(preimage);
expect_payment_claimed!(nodes[1], payment_hash, 9_000_000);
check_added_monitors!(nodes[1], 1);

let updates = get_htlc_update_msgs!(nodes[1], nodes[0].node.get_our_node_id());
assert_eq!(updates.update_fulfill_htlcs.len(), 1);
nodes[0].node.handle_update_fulfill_htlc(&nodes[1].node.get_our_node_id(), &updates.update_fulfill_htlcs[0]);
Expand Down Expand Up @@ -189,9 +191,9 @@ fn do_test_simple_monitor_temporary_update_fail(disconnect: bool) {
let events_3 = nodes[1].node.get_and_clear_pending_events();
assert_eq!(events_3.len(), 1);
match events_3[0] {
Event::PaymentReceived { ref payment_hash, ref purpose, amt } => {
Event::PaymentReceived { ref payment_hash, ref purpose, amount_msat } => {
assert_eq!(payment_hash_1, *payment_hash);
assert_eq!(amt, 1000000);
assert_eq!(amount_msat, 1_000_000);
match &purpose {
PaymentPurpose::InvoicePayment { payment_preimage, payment_secret, .. } => {
assert!(payment_preimage.is_none());
Expand Down Expand Up @@ -267,7 +269,7 @@ fn do_test_monitor_temporary_update_fail(disconnect_count: usize) {
let mut nodes = create_network(2, &node_cfgs, &node_chanmgrs);
let channel_id = create_announced_chan_between_nodes(&nodes, 0, 1, InitFeatures::known(), InitFeatures::known()).2;

let (payment_preimage_1, payment_hash_1, _) = route_payment(&nodes[0], &[&nodes[1]], 1000000);
let (payment_preimage_1, payment_hash_1, _) = route_payment(&nodes[0], &[&nodes[1]], 1_000_000);

// Now try to send a second payment which will fail to send
let (route, payment_hash_2, payment_preimage_2, payment_secret_2) = get_route_and_payment_hash!(nodes[0], nodes[1], 1000000);
Expand All @@ -283,8 +285,10 @@ fn do_test_monitor_temporary_update_fail(disconnect_count: usize) {

// Claim the previous payment, which will result in a update_fulfill_htlc/CS from nodes[1]
// but nodes[0] won't respond since it is frozen.
assert!(nodes[1].node.claim_funds(payment_preimage_1));
nodes[1].node.claim_funds(payment_preimage_1);
check_added_monitors!(nodes[1], 1);
expect_payment_claimed!(nodes[1], payment_hash_1, 1_000_000);

let events_2 = nodes[1].node.get_and_clear_pending_msg_events();
assert_eq!(events_2.len(), 1);
let (bs_initial_fulfill, bs_initial_commitment_signed) = match events_2[0] {
Expand Down Expand Up @@ -555,9 +559,9 @@ fn do_test_monitor_temporary_update_fail(disconnect_count: usize) {
let events_5 = nodes[1].node.get_and_clear_pending_events();
assert_eq!(events_5.len(), 1);
match events_5[0] {
Event::PaymentReceived { ref payment_hash, ref purpose, amt } => {
Event::PaymentReceived { ref payment_hash, ref purpose, amount_msat } => {
assert_eq!(payment_hash_2, *payment_hash);
assert_eq!(amt, 1000000);
assert_eq!(amount_msat, 1_000_000);
match &purpose {
PaymentPurpose::InvoicePayment { payment_preimage, payment_secret, .. } => {
assert!(payment_preimage.is_none());
Expand Down Expand Up @@ -672,9 +676,9 @@ fn test_monitor_update_fail_cs() {
let events = nodes[1].node.get_and_clear_pending_events();
assert_eq!(events.len(), 1);
match events[0] {
Event::PaymentReceived { payment_hash, ref purpose, amt } => {
Event::PaymentReceived { payment_hash, ref purpose, amount_msat } => {
assert_eq!(payment_hash, our_payment_hash);
assert_eq!(amt, 1000000);
assert_eq!(amount_msat, 1_000_000);
match &purpose {
PaymentPurpose::InvoicePayment { payment_preimage, payment_secret, .. } => {
assert!(payment_preimage.is_none());
Expand Down Expand Up @@ -827,7 +831,7 @@ fn do_test_monitor_update_fail_raa(test_ignore_second_cs: bool) {
let (_, payment_hash_1, _) = route_payment(&nodes[0], &[&nodes[1], &nodes[2]], 1000000);

// Fail the payment backwards, failing the monitor update on nodes[1]'s receipt of the RAA
assert!(nodes[2].node.fail_htlc_backwards(&payment_hash_1));
nodes[2].node.fail_htlc_backwards(&payment_hash_1);
expect_pending_htlcs_forwardable!(nodes[2]);
check_added_monitors!(nodes[2], 1);

Expand Down Expand Up @@ -1088,13 +1092,15 @@ fn test_monitor_update_fail_reestablish() {
let chan_1 = create_announced_chan_between_nodes(&nodes, 0, 1, InitFeatures::known(), InitFeatures::known());
create_announced_chan_between_nodes(&nodes, 1, 2, InitFeatures::known(), InitFeatures::known());

let (payment_preimage, _, _) = route_payment(&nodes[0], &[&nodes[1], &nodes[2]], 1000000);
let (payment_preimage, payment_hash, _) = route_payment(&nodes[0], &[&nodes[1], &nodes[2]], 1_000_000);

nodes[1].node.peer_disconnected(&nodes[0].node.get_our_node_id(), false);
nodes[0].node.peer_disconnected(&nodes[1].node.get_our_node_id(), false);

assert!(nodes[2].node.claim_funds(payment_preimage));
nodes[2].node.claim_funds(payment_preimage);
check_added_monitors!(nodes[2], 1);
expect_payment_claimed!(nodes[2], payment_hash, 1_000_000);

let mut updates = get_htlc_update_msgs!(nodes[2], nodes[1].node.get_our_node_id());
assert!(updates.update_add_htlcs.is_empty());
assert!(updates.update_fail_htlcs.is_empty());
Expand Down Expand Up @@ -1292,13 +1298,14 @@ fn claim_while_disconnected_monitor_update_fail() {
let channel_id = create_announced_chan_between_nodes(&nodes, 0, 1, InitFeatures::known(), InitFeatures::known()).2;

// Forward a payment for B to claim
let (payment_preimage_1, _, _) = route_payment(&nodes[0], &[&nodes[1]], 1000000);
let (payment_preimage_1, payment_hash_1, _) = route_payment(&nodes[0], &[&nodes[1]], 1_000_000);

nodes[0].node.peer_disconnected(&nodes[1].node.get_our_node_id(), false);
nodes[1].node.peer_disconnected(&nodes[0].node.get_our_node_id(), false);

assert!(nodes[1].node.claim_funds(payment_preimage_1));
nodes[1].node.claim_funds(payment_preimage_1);
check_added_monitors!(nodes[1], 1);
expect_payment_claimed!(nodes[1], payment_hash_1, 1_000_000);

nodes[0].node.peer_connected(&nodes[1].node.get_our_node_id(), &msgs::Init { features: InitFeatures::empty(), remote_network_address: None });
nodes[1].node.peer_connected(&nodes[0].node.get_our_node_id(), &msgs::Init { features: InitFeatures::empty(), remote_network_address: None });
Expand Down Expand Up @@ -1578,10 +1585,11 @@ fn test_monitor_update_fail_claim() {
// Rebalance a bit so that we can send backwards from 3 to 2.
send_payment(&nodes[0], &[&nodes[1], &nodes[2]], 5000000);

let (payment_preimage_1, _, _) = route_payment(&nodes[0], &[&nodes[1]], 1000000);
let (payment_preimage_1, payment_hash_1, _) = route_payment(&nodes[0], &[&nodes[1]], 1_000_000);

chanmon_cfgs[1].persister.set_update_ret(Err(ChannelMonitorUpdateErr::TemporaryFailure));
assert!(nodes[1].node.claim_funds(payment_preimage_1));
nodes[1].node.claim_funds(payment_preimage_1);
expect_payment_claimed!(nodes[1], payment_hash_1, 1_000_000);
nodes[1].logger.assert_log("lightning::ln::channelmanager".to_string(), "Temporary failure claiming HTLC, treating as success: Failed to update ChannelMonitor".to_string(), 1);
check_added_monitors!(nodes[1], 1);

Expand Down Expand Up @@ -1642,9 +1650,9 @@ fn test_monitor_update_fail_claim() {
let events = nodes[0].node.get_and_clear_pending_events();
assert_eq!(events.len(), 2);
match events[0] {
Event::PaymentReceived { ref payment_hash, ref purpose, amt } => {
Event::PaymentReceived { ref payment_hash, ref purpose, amount_msat } => {
assert_eq!(payment_hash_2, *payment_hash);
assert_eq!(1_000_000, amt);
assert_eq!(1_000_000, amount_msat);
match &purpose {
PaymentPurpose::InvoicePayment { payment_preimage, payment_secret, .. } => {
assert!(payment_preimage.is_none());
Expand All @@ -1656,9 +1664,9 @@ fn test_monitor_update_fail_claim() {
_ => panic!("Unexpected event"),
}
match events[1] {
Event::PaymentReceived { ref payment_hash, ref purpose, amt } => {
Event::PaymentReceived { ref payment_hash, ref purpose, amount_msat } => {
assert_eq!(payment_hash_3, *payment_hash);
assert_eq!(1_000_000, amt);
assert_eq!(1_000_000, amount_msat);
match &purpose {
PaymentPurpose::InvoicePayment { payment_preimage, payment_secret, .. } => {
assert!(payment_preimage.is_none());
Expand Down Expand Up @@ -1688,7 +1696,7 @@ fn test_monitor_update_on_pending_forwards() {
send_payment(&nodes[0], &[&nodes[1], &nodes[2]], 5000000);

let (_, payment_hash_1, _) = route_payment(&nodes[0], &[&nodes[1], &nodes[2]], 1000000);
assert!(nodes[2].node.fail_htlc_backwards(&payment_hash_1));
nodes[2].node.fail_htlc_backwards(&payment_hash_1);
expect_pending_htlcs_forwardable!(nodes[2]);
check_added_monitors!(nodes[2], 1);

Expand Down Expand Up @@ -1754,7 +1762,7 @@ fn monitor_update_claim_fail_no_response() {
let channel_id = create_announced_chan_between_nodes(&nodes, 0, 1, InitFeatures::known(), InitFeatures::known()).2;

// Forward a payment for B to claim
let (payment_preimage_1, _, _) = route_payment(&nodes[0], &[&nodes[1]], 1000000);
let (payment_preimage_1, payment_hash_1, _) = route_payment(&nodes[0], &[&nodes[1]], 1_000_000);

// Now start forwarding a second payment, skipping the last RAA so B is in AwaitingRAA
let (route, payment_hash_2, payment_preimage_2, payment_secret_2) = get_route_and_payment_hash!(nodes[0], nodes[1], 1000000);
Expand All @@ -1770,8 +1778,10 @@ fn monitor_update_claim_fail_no_response() {
let as_raa = commitment_signed_dance!(nodes[1], nodes[0], payment_event.commitment_msg, false, true, false, true);

chanmon_cfgs[1].persister.set_update_ret(Err(ChannelMonitorUpdateErr::TemporaryFailure));
assert!(nodes[1].node.claim_funds(payment_preimage_1));
nodes[1].node.claim_funds(payment_preimage_1);
expect_payment_claimed!(nodes[1], payment_hash_1, 1_000_000);
check_added_monitors!(nodes[1], 1);

let events = nodes[1].node.get_and_clear_pending_msg_events();
assert_eq!(events.len(), 0);
nodes[1].logger.assert_log("lightning::ln::channelmanager".to_string(), "Temporary failure claiming HTLC, treating as success: Failed to update ChannelMonitor".to_string(), 1);
Expand Down Expand Up @@ -2076,13 +2086,15 @@ fn test_fail_htlc_on_broadcast_after_claim() {
create_announced_chan_between_nodes(&nodes, 0, 1, InitFeatures::known(), InitFeatures::known());
let chan_id_2 = create_announced_chan_between_nodes(&nodes, 1, 2, InitFeatures::known(), InitFeatures::known()).2;

let payment_preimage = route_payment(&nodes[0], &[&nodes[1], &nodes[2]], 2000).0;
let (payment_preimage, payment_hash, _) = route_payment(&nodes[0], &[&nodes[1], &nodes[2]], 2000);

let bs_txn = get_local_commitment_txn!(nodes[2], chan_id_2);
assert_eq!(bs_txn.len(), 1);

nodes[2].node.claim_funds(payment_preimage);
check_added_monitors!(nodes[2], 1);
expect_payment_claimed!(nodes[2], payment_hash, 2000);

let cs_updates = get_htlc_update_msgs!(nodes[2], nodes[1].node.get_our_node_id());
nodes[1].node.handle_update_fulfill_htlc(&nodes[2].node.get_our_node_id(), &cs_updates.update_fulfill_htlcs[0]);
let bs_updates = get_htlc_update_msgs!(nodes[1], nodes[0].node.get_our_node_id());
Expand Down Expand Up @@ -2235,7 +2247,7 @@ fn do_channel_holding_cell_serialize(disconnect: bool, reload_a: bool) {
//
// Note that because, at the end, MonitorUpdateFailed is still set, the HTLC generated in (c)
// will not be freed from the holding cell.
let (payment_preimage_0, _, _) = route_payment(&nodes[1], &[&nodes[0]], 100000);
let (payment_preimage_0, payment_hash_0, _) = route_payment(&nodes[1], &[&nodes[0]], 100_000);

nodes[0].node.send_payment(&route, payment_hash_1, &Some(payment_secret_1)).unwrap();
check_added_monitors!(nodes[0], 1);
Expand All @@ -2246,8 +2258,9 @@ fn do_channel_holding_cell_serialize(disconnect: bool, reload_a: bool) {
check_added_monitors!(nodes[0], 0);

chanmon_cfgs[0].persister.set_update_ret(Err(ChannelMonitorUpdateErr::TemporaryFailure));
assert!(nodes[0].node.claim_funds(payment_preimage_0));
nodes[0].node.claim_funds(payment_preimage_0);
check_added_monitors!(nodes[0], 1);
expect_payment_claimed!(nodes[0], payment_hash_0, 100_000);

nodes[1].node.handle_update_add_htlc(&nodes[0].node.get_our_node_id(), &send.msgs[0]);
nodes[1].node.handle_commitment_signed(&nodes[0].node.get_our_node_id(), &send.commitment_msg);
Expand Down Expand Up @@ -2455,13 +2468,15 @@ fn do_test_reconnect_dup_htlc_claims(htlc_status: HTLCStatusAtDupClaim, second_f
payment_preimage,
};
if second_fails {
assert!(nodes[2].node.fail_htlc_backwards(&payment_hash));
nodes[2].node.fail_htlc_backwards(&payment_hash);
expect_pending_htlcs_forwardable!(nodes[2]);
check_added_monitors!(nodes[2], 1);
get_htlc_update_msgs!(nodes[2], nodes[1].node.get_our_node_id());
} else {
assert!(nodes[2].node.claim_funds(payment_preimage));
nodes[2].node.claim_funds(payment_preimage);
check_added_monitors!(nodes[2], 1);
expect_payment_claimed!(nodes[2], payment_hash, 100_000);

let cs_updates = get_htlc_update_msgs!(nodes[2], nodes[1].node.get_our_node_id());
assert_eq!(cs_updates.update_fulfill_htlcs.len(), 1);
// Check that the message we're about to deliver matches the one generated:
Expand Down Expand Up @@ -2630,20 +2645,22 @@ fn double_temp_error() {

let (_, _, channel_id, _) = create_announced_chan_between_nodes(&nodes, 0, 1, InitFeatures::known(), InitFeatures::known());

let (payment_preimage_1, _, _) = route_payment(&nodes[0], &[&nodes[1]], 1000000);
let (payment_preimage_2, _, _) = route_payment(&nodes[0], &[&nodes[1]], 1000000);
let (payment_preimage_1, payment_hash_1, _) = route_payment(&nodes[0], &[&nodes[1]], 1_000_000);
let (payment_preimage_2, payment_hash_2, _) = route_payment(&nodes[0], &[&nodes[1]], 1_000_000);

chanmon_cfgs[1].persister.set_update_ret(Err(ChannelMonitorUpdateErr::TemporaryFailure));
// `claim_funds` results in a ChannelMonitorUpdate.
assert!(nodes[1].node.claim_funds(payment_preimage_1));
nodes[1].node.claim_funds(payment_preimage_1);
check_added_monitors!(nodes[1], 1);
expect_payment_claimed!(nodes[1], payment_hash_1, 1_000_000);
let (funding_tx, latest_update_1, _) = nodes[1].chain_monitor.latest_monitor_update_id.lock().unwrap().get(&channel_id).unwrap().clone();

chanmon_cfgs[1].persister.set_update_ret(Err(ChannelMonitorUpdateErr::TemporaryFailure));
// Previously, this would've panicked due to a double-call to `Channel::monitor_update_failed`,
// which had some asserts that prevented it from being called twice.
assert!(nodes[1].node.claim_funds(payment_preimage_2));
nodes[1].node.claim_funds(payment_preimage_2);
check_added_monitors!(nodes[1], 1);
expect_payment_claimed!(nodes[1], payment_hash_2, 1_000_000);
chanmon_cfgs[1].persister.set_update_ret(Ok(()));

let (_, latest_update_2, _) = nodes[1].chain_monitor.latest_monitor_update_id.lock().unwrap().get(&channel_id).unwrap().clone();
Expand Down
Loading

0 comments on commit a534a5e

Please sign in to comment.