Skip to content

Commit b8661ef

Browse files
committed
Pass info about claimed payments, incl HTLCs to ChannelMonitors
When we claim an MPP payment, then crash before persisting all the relevant `ChannelMonitor`s, we rely on the payment data being available in the `ChannelManager` on restart to re-claim any parts that haven't yet been claimed. This is fine as long as the `ChannelManager` was persisted before the `PaymentClaimable` event was processed, which is generally the case in our `lightning-background-processor`, but may not be in other cases or in a somewhat rare race. In order to fix this, we need to track where all the MPP parts of a payment are in the `ChannelMonitor`, allowing us to re-claim any missing pieces without reference to any `ChannelManager` data. Further, in order to properly generate a `PaymentClaimed` event against the re-started claim, we have to store various payment metadata with the HTLC list as well. Here we take the first step, building a list of MPP parts and metadata in `ChannelManager` and passing it through to `ChannelMonitor` in the `ChannelMonitorUpdate`s.
1 parent d9175f4 commit b8661ef

File tree

4 files changed

+93
-33
lines changed

4 files changed

+93
-33
lines changed

lightning/src/chain/channelmonitor.rs

Lines changed: 20 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -38,7 +38,7 @@ use crate::types::payment::{PaymentHash, PaymentPreimage};
3838
use crate::ln::msgs::DecodeError;
3939
use crate::ln::channel_keys::{DelayedPaymentKey, DelayedPaymentBasepoint, HtlcBasepoint, HtlcKey, RevocationKey, RevocationBasepoint};
4040
use crate::ln::chan_utils::{self,CommitmentTransaction, CounterpartyCommitmentSecrets, HTLCOutputInCommitment, HTLCClaim, ChannelTransactionParameters, HolderCommitmentTransaction, TxCreationKeys};
41-
use crate::ln::channelmanager::{HTLCSource, SentHTLCId};
41+
use crate::ln::channelmanager::{HTLCSource, SentHTLCId, PaymentClaimDetails};
4242
use crate::chain;
4343
use crate::chain::{BestBlock, WatchedOutput};
4444
use crate::chain::chaininterface::{BroadcasterInterface, ConfirmationTarget, FeeEstimator, LowerBoundedFeeEstimator};
@@ -546,6 +546,9 @@ pub(crate) enum ChannelMonitorUpdateStep {
546546
},
547547
PaymentPreimage {
548548
payment_preimage: PaymentPreimage,
549+
/// If this preimage was from an inbound payment claim, information about the claim should
550+
/// be included here to enable claim replay on startup.
551+
payment_info: Option<PaymentClaimDetails>,
549552
},
550553
CommitmentSecret {
551554
idx: u64,
@@ -594,6 +597,7 @@ impl_writeable_tlv_based_enum_upgradable!(ChannelMonitorUpdateStep,
594597
},
595598
(2, PaymentPreimage) => {
596599
(0, payment_preimage, required),
600+
(1, payment_info, option),
597601
},
598602
(3, CommitmentSecret) => {
599603
(0, idx, required),
@@ -1502,8 +1506,11 @@ impl<Signer: EcdsaChannelSigner> ChannelMonitor<Signer> {
15021506
{
15031507
let mut inner = self.inner.lock().unwrap();
15041508
let logger = WithChannelMonitor::from_impl(logger, &*inner, Some(*payment_hash));
1509+
// Note that we don't pass any MPP claim parts here. This is generally not okay but in this
1510+
// case is acceptable as we only call this method from `ChannelManager` deserialization in
1511+
// cases where we are replaying a claim started on a previous version of LDK.
15051512
inner.provide_payment_preimage(
1506-
payment_hash, payment_preimage, broadcaster, fee_estimator, &logger)
1513+
payment_hash, payment_preimage, &None, broadcaster, fee_estimator, &logger)
15071514
}
15081515

15091516
/// Updates a ChannelMonitor on the basis of some new information provided by the Channel
@@ -2930,12 +2937,14 @@ impl<Signer: EcdsaChannelSigner> ChannelMonitorImpl<Signer> {
29302937
/// Provides a payment_hash->payment_preimage mapping. Will be automatically pruned when all
29312938
/// commitment_tx_infos which contain the payment hash have been revoked.
29322939
fn provide_payment_preimage<B: Deref, F: Deref, L: Deref>(
2933-
&mut self, payment_hash: &PaymentHash, payment_preimage: &PaymentPreimage, broadcaster: &B,
2940+
&mut self, payment_hash: &PaymentHash, payment_preimage: &PaymentPreimage,
2941+
payment_info: &Option<PaymentClaimDetails>, broadcaster: &B,
29342942
fee_estimator: &LowerBoundedFeeEstimator<F>, logger: &WithChannelMonitor<L>)
29352943
where B::Target: BroadcasterInterface,
29362944
F::Target: FeeEstimator,
29372945
L::Target: Logger,
29382946
{
2947+
// TODO: Store payment_info (but do not override any existing values)
29392948
self.payment_preimages.insert(payment_hash.clone(), payment_preimage.clone());
29402949

29412950
let confirmed_spend_txid = self.funding_spend_confirmed.or_else(|| {
@@ -3139,9 +3148,9 @@ impl<Signer: EcdsaChannelSigner> ChannelMonitorImpl<Signer> {
31393148
log_trace!(logger, "Updating ChannelMonitor with latest counterparty commitment transaction info");
31403149
self.provide_latest_counterparty_commitment_tx(*commitment_txid, htlc_outputs.clone(), *commitment_number, *their_per_commitment_point, logger)
31413150
},
3142-
ChannelMonitorUpdateStep::PaymentPreimage { payment_preimage } => {
3151+
ChannelMonitorUpdateStep::PaymentPreimage { payment_preimage, payment_info } => {
31433152
log_trace!(logger, "Updating ChannelMonitor with payment preimage");
3144-
self.provide_payment_preimage(&PaymentHash(Sha256::hash(&payment_preimage.0[..]).to_byte_array()), &payment_preimage, broadcaster, &bounded_fee_estimator, logger)
3153+
self.provide_payment_preimage(&PaymentHash(Sha256::hash(&payment_preimage.0[..]).to_byte_array()), &payment_preimage, payment_info, broadcaster, &bounded_fee_estimator, logger)
31453154
},
31463155
ChannelMonitorUpdateStep::CommitmentSecret { idx, secret } => {
31473156
log_trace!(logger, "Updating ChannelMonitor with commitment secret");
@@ -5097,8 +5106,12 @@ mod tests {
50975106
assert_eq!(replay_update.updates.len(), 1);
50985107
if let ChannelMonitorUpdateStep::LatestCounterpartyCommitmentTXInfo { .. } = replay_update.updates[0] {
50995108
} else { panic!(); }
5100-
replay_update.updates.push(ChannelMonitorUpdateStep::PaymentPreimage { payment_preimage: payment_preimage_1 });
5101-
replay_update.updates.push(ChannelMonitorUpdateStep::PaymentPreimage { payment_preimage: payment_preimage_2 });
5109+
replay_update.updates.push(ChannelMonitorUpdateStep::PaymentPreimage {
5110+
payment_preimage: payment_preimage_1, payment_info: None,
5111+
});
5112+
replay_update.updates.push(ChannelMonitorUpdateStep::PaymentPreimage {
5113+
payment_preimage: payment_preimage_2, payment_info: None,
5114+
});
51025115

51035116
let broadcaster = TestBroadcaster::with_blocks(Arc::clone(&nodes[1].blocks));
51045117
assert!(

lightning/src/ln/channel.rs

Lines changed: 18 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@ use crate::ln::msgs;
3232
use crate::ln::msgs::{ClosingSigned, ClosingSignedFeeRange, DecodeError};
3333
use crate::ln::script::{self, ShutdownScript};
3434
use crate::ln::channel_state::{ChannelShutdownState, CounterpartyForwardingInfo, InboundHTLCDetails, InboundHTLCStateDetails, OutboundHTLCDetails, OutboundHTLCStateDetails};
35-
use crate::ln::channelmanager::{self, PendingHTLCStatus, HTLCSource, SentHTLCId, HTLCFailureMsg, PendingHTLCInfo, RAACommitmentOrder, BREAKDOWN_TIMEOUT, MIN_CLTV_EXPIRY_DELTA, MAX_LOCAL_BREAKDOWN_TIMEOUT};
35+
use crate::ln::channelmanager::{self, PendingHTLCStatus, HTLCSource, SentHTLCId, HTLCFailureMsg, PendingHTLCInfo, RAACommitmentOrder, PaymentClaimDetails, BREAKDOWN_TIMEOUT, MIN_CLTV_EXPIRY_DELTA, MAX_LOCAL_BREAKDOWN_TIMEOUT};
3636
use crate::ln::chan_utils::{
3737
CounterpartyCommitmentSecrets, TxCreationKeys, HTLCOutputInCommitment, htlc_success_tx_weight,
3838
htlc_timeout_tx_weight, make_funding_redeemscript, ChannelPublicKeys, CommitmentTransaction,
@@ -4039,14 +4039,17 @@ impl<SP: Deref> Channel<SP> where
40394039
// (see equivalent if condition there).
40404040
assert!(!self.context.channel_state.can_generate_new_commitment());
40414041
let mon_update_id = self.context.latest_monitor_update_id; // Forget the ChannelMonitor update
4042-
let fulfill_resp = self.get_update_fulfill_htlc(htlc_id_arg, payment_preimage_arg, logger);
4042+
let fulfill_resp = self.get_update_fulfill_htlc(htlc_id_arg, payment_preimage_arg, None, logger);
40434043
self.context.latest_monitor_update_id = mon_update_id;
40444044
if let UpdateFulfillFetch::NewClaim { msg, .. } = fulfill_resp {
40454045
assert!(msg.is_none()); // The HTLC must have ended up in the holding cell.
40464046
}
40474047
}
40484048

4049-
fn get_update_fulfill_htlc<L: Deref>(&mut self, htlc_id_arg: u64, payment_preimage_arg: PaymentPreimage, logger: &L) -> UpdateFulfillFetch where L::Target: Logger {
4049+
fn get_update_fulfill_htlc<L: Deref>(
4050+
&mut self, htlc_id_arg: u64, payment_preimage_arg: PaymentPreimage,
4051+
payment_info: Option<PaymentClaimDetails>, logger: &L,
4052+
) -> UpdateFulfillFetch where L::Target: Logger {
40504053
// Either ChannelReady got set (which means it won't be unset) or there is no way any
40514054
// caller thought we could have something claimed (cause we wouldn't have accepted in an
40524055
// incoming HTLC anyway). If we got to ShutdownComplete, callers aren't allowed to call us,
@@ -4104,6 +4107,7 @@ impl<SP: Deref> Channel<SP> where
41044107
counterparty_node_id: Some(self.context.counterparty_node_id),
41054108
updates: vec![ChannelMonitorUpdateStep::PaymentPreimage {
41064109
payment_preimage: payment_preimage_arg.clone(),
4110+
payment_info,
41074111
}],
41084112
channel_id: Some(self.context.channel_id()),
41094113
};
@@ -4171,9 +4175,12 @@ impl<SP: Deref> Channel<SP> where
41714175
}
41724176
}
41734177

4174-
pub fn get_update_fulfill_htlc_and_commit<L: Deref>(&mut self, htlc_id: u64, payment_preimage: PaymentPreimage, logger: &L) -> UpdateFulfillCommitFetch where L::Target: Logger {
4178+
pub fn get_update_fulfill_htlc_and_commit<L: Deref>(
4179+
&mut self, htlc_id: u64, payment_preimage: PaymentPreimage,
4180+
payment_info: Option<PaymentClaimDetails>, logger: &L,
4181+
) -> UpdateFulfillCommitFetch where L::Target: Logger {
41754182
let release_cs_monitor = self.context.blocked_monitor_updates.is_empty();
4176-
match self.get_update_fulfill_htlc(htlc_id, payment_preimage, logger) {
4183+
match self.get_update_fulfill_htlc(htlc_id, payment_preimage, payment_info, logger) {
41774184
UpdateFulfillFetch::NewClaim { mut monitor_update, htlc_value_msat, msg } => {
41784185
// Even if we aren't supposed to let new monitor updates with commitment state
41794186
// updates run, we still need to push the preimage ChannelMonitorUpdateStep no
@@ -4934,9 +4941,14 @@ impl<SP: Deref> Channel<SP> where
49344941
// not fail - any in between attempts to claim the HTLC will have resulted
49354942
// in it hitting the holding cell again and we cannot change the state of a
49364943
// holding cell HTLC from fulfill to anything else.
4944+
//
4945+
// Note that we should have already provided a preimage-containing
4946+
// `ChannelMonitorUpdate` to the user, making this one redundant, however
4947+
// there's no harm in including the extra `ChannelMonitorUpdateStep` here.
4948+
// We do not bother to track and include `payment_info` here, however.
49374949
let mut additional_monitor_update =
49384950
if let UpdateFulfillFetch::NewClaim { monitor_update, .. } =
4939-
self.get_update_fulfill_htlc(htlc_id, *payment_preimage, logger)
4951+
self.get_update_fulfill_htlc(htlc_id, *payment_preimage, None, logger)
49404952
{ monitor_update } else { unreachable!() };
49414953
update_fulfill_count += 1;
49424954
monitor_update.updates.append(&mut additional_monitor_update.updates);

lightning/src/ln/channelmanager.rs

Lines changed: 49 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -801,6 +801,7 @@ pub(super) enum RAACommitmentOrder {
801801
}
802802

803803
/// Information about a payment which is currently being claimed.
804+
#[derive(Clone, Debug, PartialEq, Eq)]
804805
struct ClaimingPayment {
805806
amount_msat: u64,
806807
payment_purpose: events::PaymentPurpose,
@@ -1072,12 +1073,36 @@ struct MPPClaimHTLCSource {
10721073
htlc_id: u64,
10731074
}
10741075

1076+
impl_writeable_tlv_based!(MPPClaimHTLCSource, {
1077+
(0, counterparty_node_id, required),
1078+
(2, funding_txo, required),
1079+
(4, channel_id, required),
1080+
(6, htlc_id, required),
1081+
});
1082+
10751083
#[derive(Debug)]
10761084
pub(crate) struct PendingMPPClaim {
10771085
channels_without_preimage: Vec<MPPClaimHTLCSource>,
10781086
channels_with_preimage: Vec<MPPClaimHTLCSource>,
10791087
}
10801088

1089+
#[derive(Clone, Debug, PartialEq, Eq)]
1090+
/// When we're claiming a(n MPP) payment, we want to store information about thay payment in the
1091+
/// [`ChannelMonitor`] so that we can replay the claim without any information from the
1092+
/// [`ChannelManager`] at all. This struct stores that information with enough to replay claims
1093+
/// against all MPP parts as well as generate an [`Event::PaymentClaimed`].
1094+
pub(crate) struct PaymentClaimDetails {
1095+
mpp_parts: Vec<MPPClaimHTLCSource>,
1096+
/// Use [`ClaimingPayment`] as a stable source of all the fields we need to generate the
1097+
/// [`Event::PaymentClaimed`].
1098+
claiming_payment: ClaimingPayment,
1099+
}
1100+
1101+
impl_writeable_tlv_based!(PaymentClaimDetails, {
1102+
(0, mpp_parts, required_vec),
1103+
(2, claiming_payment, required),
1104+
});
1105+
10811106
#[derive(Clone)]
10821107
pub(crate) struct PendingMPPClaimPointer(Arc<Mutex<PendingMPPClaim>>);
10831108

@@ -6675,6 +6700,7 @@ where
66756700

66766701
let _persistence_guard = PersistenceNotifierGuard::notify_on_drop(self);
66776702

6703+
let claiming_payment;
66786704
let sources = {
66796705
let mut claimable_payments = self.claimable_payments.lock().unwrap();
66806706
if let Some(payment) = claimable_payments.claimable_payments.remove(&payment_hash) {
@@ -6689,7 +6715,7 @@ where
66896715
}
66906716

66916717
let payment_id = payment.inbound_payment_id(&self.inbound_payment_id_secret);
6692-
let claiming_payment = claimable_payments.pending_claiming_payments
6718+
claiming_payment = claimable_payments.pending_claiming_payments
66936719
.entry(payment_hash)
66946720
.and_modify(|_| {
66956721
debug_assert!(false, "Shouldn't get a duplicate pending claim event ever");
@@ -6708,7 +6734,7 @@ where
67086734
onion_fields: payment.onion_fields,
67096735
payment_id: Some(payment_id),
67106736
}
6711-
});
6737+
}).clone();
67126738

67136739
if let Some(RecipientOnionFields { ref custom_tlvs, .. }) = claiming_payment.onion_fields {
67146740
if !custom_tlvs_known && custom_tlvs.iter().any(|(typ, _)| typ % 2 == 0) {
@@ -6772,26 +6798,27 @@ where
67726798
return;
67736799
}
67746800
if valid_mpp {
6801+
let mpp_parts: Vec<_> = sources.iter().filter_map(|htlc| {
6802+
if let Some(cp_id) = htlc.prev_hop.counterparty_node_id {
6803+
Some(MPPClaimHTLCSource {
6804+
counterparty_node_id: cp_id,
6805+
funding_txo: htlc.prev_hop.outpoint,
6806+
channel_id: htlc.prev_hop.channel_id,
6807+
htlc_id: htlc.prev_hop.htlc_id,
6808+
})
6809+
} else {
6810+
None
6811+
}
6812+
}).collect();
67756813
let pending_mpp_claim_ptr_opt = if sources.len() > 1 {
6776-
let channels_without_preimage = sources.iter().filter_map(|htlc| {
6777-
if let Some(cp_id) = htlc.prev_hop.counterparty_node_id {
6778-
Some(MPPClaimHTLCSource {
6779-
counterparty_node_id: cp_id,
6780-
funding_txo: htlc.prev_hop.outpoint,
6781-
channel_id: htlc.prev_hop.channel_id,
6782-
htlc_id: htlc.prev_hop.htlc_id,
6783-
})
6784-
} else {
6785-
None
6786-
}
6787-
}).collect();
67886814
Some(Arc::new(Mutex::new(PendingMPPClaim {
6789-
channels_without_preimage,
6815+
channels_without_preimage: mpp_parts.clone(),
67906816
channels_with_preimage: Vec::new(),
67916817
})))
67926818
} else {
67936819
None
67946820
};
6821+
let payment_info = Some(PaymentClaimDetails { mpp_parts, claiming_payment });
67956822
for htlc in sources {
67966823
let this_mpp_claim = pending_mpp_claim_ptr_opt.as_ref().and_then(|pending_mpp_claim|
67976824
if let Some(cp_id) = htlc.prev_hop.counterparty_node_id {
@@ -6807,7 +6834,7 @@ where
68076834
}
68086835
});
68096836
self.claim_funds_from_hop(
6810-
htlc.prev_hop, payment_preimage,
6837+
htlc.prev_hop, payment_preimage, payment_info.clone(),
68116838
|_, definitely_duplicate| {
68126839
debug_assert!(!definitely_duplicate, "We shouldn't claim duplicatively from a payment");
68136840
(Some(MonitorUpdateCompletionAction::PaymentClaimed { payment_hash, pending_mpp_claim: this_mpp_claim }), raa_blocker)
@@ -6837,7 +6864,7 @@ where
68376864
ComplFunc: FnOnce(Option<u64>, bool) -> (Option<MonitorUpdateCompletionAction>, Option<RAAMonitorUpdateBlockingAction>)
68386865
>(
68396866
&self, prev_hop: HTLCPreviousHopData, payment_preimage: PaymentPreimage,
6840-
completion_action: ComplFunc,
6867+
payment_info: Option<PaymentClaimDetails>, completion_action: ComplFunc,
68416868
) {
68426869
//TODO: Delay the claimed_funds relaying just like we do outbound relay!
68436870

@@ -6871,7 +6898,8 @@ where
68716898
if let ChannelPhase::Funded(chan) = chan_phase_entry.get_mut() {
68726899
let counterparty_node_id = chan.context.get_counterparty_node_id();
68736900
let logger = WithChannelContext::from(&self.logger, &chan.context, None);
6874-
let fulfill_res = chan.get_update_fulfill_htlc_and_commit(prev_hop.htlc_id, payment_preimage, &&logger);
6901+
let fulfill_res =
6902+
chan.get_update_fulfill_htlc_and_commit(prev_hop.htlc_id, payment_preimage, payment_info, &&logger);
68756903

68766904
match fulfill_res {
68776905
UpdateFulfillCommitFetch::NewClaim { htlc_value_msat, monitor_update } => {
@@ -6959,6 +6987,7 @@ where
69596987
counterparty_node_id: None,
69606988
updates: vec![ChannelMonitorUpdateStep::PaymentPreimage {
69616989
payment_preimage,
6990+
payment_info,
69626991
}],
69636992
channel_id: Some(prev_hop.channel_id),
69646993
};
@@ -7069,7 +7098,7 @@ where
70697098
let completed_blocker = RAAMonitorUpdateBlockingAction::from_prev_hop_data(&hop_data);
70707099
#[cfg(debug_assertions)]
70717100
let claiming_chan_funding_outpoint = hop_data.outpoint;
7072-
self.claim_funds_from_hop(hop_data, payment_preimage,
7101+
self.claim_funds_from_hop(hop_data, payment_preimage, None,
70737102
|htlc_claim_value_msat, definitely_duplicate| {
70747103
let chan_to_release =
70757104
if let Some(node_id) = next_channel_counterparty_node_id {
@@ -7105,7 +7134,7 @@ where
71057134
if *funding_txo == claiming_chan_funding_outpoint {
71067135
assert!(update.updates.iter().any(|upd|
71077136
if let ChannelMonitorUpdateStep::PaymentPreimage {
7108-
payment_preimage: update_preimage
7137+
payment_preimage: update_preimage, ..
71097138
} = upd {
71107139
payment_preimage == *update_preimage
71117140
} else { false }

pending_changelog/3322-a.txt

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
API Changes
2+
===========
3+
4+
Additional information is now stored in `ChannelMonitorUpdate`s which may increase the size of
5+
`ChannelMonitorUpdate`s claiming inbound payments substantially. The expected maximum size of
6+
`ChannelMonitorUpdate`s shouldn't change materially.

0 commit comments

Comments
 (0)