Skip to content

Locktimed packages fixes #3923

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 5 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
51 changes: 32 additions & 19 deletions lightning/src/chain/channelmonitor.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3514,23 +3514,26 @@ impl<Signer: EcdsaChannelSigner> ChannelMonitorImpl<Signer> {
(payment_preimage.clone(), payment_info.clone().into_iter().collect())
});

let confirmed_spend_txid = self.funding_spend_confirmed.or_else(|| {
self.onchain_events_awaiting_threshold_conf.iter().find_map(|event| match event.event {
OnchainEvent::FundingSpendConfirmation { .. } => Some(event.txid),
_ => None,
})
});
let confirmed_spend_txid = if let Some(txid) = confirmed_spend_txid {
txid
} else {
return;
};
let confirmed_spend_info = self.funding_spend_confirmed
.map(|txid| (txid, None))
.or_else(|| {
self.onchain_events_awaiting_threshold_conf.iter().find_map(|event| match event.event {
OnchainEvent::FundingSpendConfirmation { .. } => Some((event.txid, Some(event.height))),
_ => None,
})
});
let (confirmed_spend_txid, confirmed_spend_height) =
if let Some((txid, height)) = confirmed_spend_info {
(txid, height)
} else {
return;
};

// If the channel is force closed, try to claim the output from this preimage.
// First check if a counterparty commitment transaction has been broadcasted:
macro_rules! claim_htlcs {
($commitment_number: expr, $txid: expr, $htlcs: expr) => {
let (htlc_claim_reqs, _) = self.get_counterparty_output_claim_info($commitment_number, $txid, None, $htlcs);
let (htlc_claim_reqs, _) = self.get_counterparty_output_claim_info($commitment_number, $txid, None, $htlcs, confirmed_spend_height);
let conf_target = self.closure_conf_target();
self.onchain_tx_handler.update_claims_view_from_requests(
htlc_claim_reqs, self.best_block.height, self.best_block.height, broadcaster,
Expand Down Expand Up @@ -4226,6 +4229,7 @@ impl<Signer: EcdsaChannelSigner> ChannelMonitorImpl<Signer> {
per_commitment_point, per_commitment_key, outp.value,
self.funding.channel_parameters.channel_type_features.supports_anchors_zero_fee_htlc_tx(),
self.funding.channel_parameters.clone(),
height,
);
let justice_package = PackageTemplate::build_package(
commitment_txid, idx as u32,
Expand All @@ -4250,6 +4254,7 @@ impl<Signer: EcdsaChannelSigner> ChannelMonitorImpl<Signer> {
let revk_htlc_outp = RevokedHTLCOutput::build(
per_commitment_point, per_commitment_key, htlc.clone(),
self.funding.channel_parameters.clone(),
height,
);
let counterparty_spendable_height = if htlc.offered {
htlc.cltv_expiry
Expand Down Expand Up @@ -4304,7 +4309,7 @@ impl<Signer: EcdsaChannelSigner> ChannelMonitorImpl<Signer> {
(htlc, htlc_source.as_ref().map(|htlc_source| htlc_source.as_ref()))
), logger);
let (htlc_claim_reqs, counterparty_output_info) =
self.get_counterparty_output_claim_info(commitment_number, commitment_txid, Some(tx), per_commitment_option);
self.get_counterparty_output_claim_info(commitment_number, commitment_txid, Some(tx), per_commitment_option, Some(height));
to_counterparty_output_info = counterparty_output_info;
for req in htlc_claim_reqs {
claimable_outpoints.push(req);
Expand All @@ -4316,8 +4321,11 @@ impl<Signer: EcdsaChannelSigner> ChannelMonitorImpl<Signer> {

/// Returns the HTLC claim package templates and the counterparty output info
#[rustfmt::skip]
fn get_counterparty_output_claim_info(&self, commitment_number: u64, commitment_txid: Txid, tx: Option<&Transaction>, per_commitment_option: Option<&Vec<(HTLCOutputInCommitment, Option<Box<HTLCSource>>)>>)
-> (Vec<PackageTemplate>, CommitmentTxCounterpartyOutputInfo) {
fn get_counterparty_output_claim_info(
&self, commitment_number: u64, commitment_txid: Txid, tx: Option<&Transaction>,
per_commitment_option: Option<&Vec<(HTLCOutputInCommitment, Option<Box<HTLCSource>>)>>,
confirmation_height: Option<u32>,
) -> (Vec<PackageTemplate>, CommitmentTxCounterpartyOutputInfo) {
let mut claimable_outpoints = Vec::new();
let mut to_counterparty_output_info: CommitmentTxCounterpartyOutputInfo = None;

Expand Down Expand Up @@ -4374,15 +4382,19 @@ impl<Signer: EcdsaChannelSigner> ChannelMonitorImpl<Signer> {
let counterparty_htlc_outp = if htlc.offered {
PackageSolvingData::CounterpartyOfferedHTLCOutput(
CounterpartyOfferedHTLCOutput::build(
*per_commitment_point, preimage.unwrap(), htlc.clone(),
*per_commitment_point, preimage.unwrap(),
htlc.clone(),
self.funding.channel_parameters.clone(),
confirmation_height,
)
)
} else {
PackageSolvingData::CounterpartyReceivedHTLCOutput(
CounterpartyReceivedHTLCOutput::build(
*per_commitment_point, htlc.clone(),
*per_commitment_point,
htlc.clone(),
self.funding.channel_parameters.clone(),
confirmation_height,
)
)
};
Expand Down Expand Up @@ -4426,6 +4438,7 @@ impl<Signer: EcdsaChannelSigner> ChannelMonitorImpl<Signer> {
let revk_outp = RevokedOutput::build(
per_commitment_point, per_commitment_key, tx.output[idx].value, false,
self.funding.channel_parameters.clone(),
height,
);
let justice_package = PackageTemplate::build_package(
htlc_txid, idx as u32, PackageSolvingData::RevokedOutput(revk_outp),
Expand Down Expand Up @@ -4507,7 +4520,7 @@ impl<Signer: EcdsaChannelSigner> ChannelMonitorImpl<Signer> {
.expect("Expected transaction output index for non-dust HTLC");
PackageTemplate::build_package(
tx.txid(), transaction_output_index,
PackageSolvingData::HolderHTLCOutput(HolderHTLCOutput::build(htlc_descriptor)),
PackageSolvingData::HolderHTLCOutput(HolderHTLCOutput::build(htlc_descriptor, conf_height)),
counterparty_spendable_height,
)
})
Expand Down Expand Up @@ -4687,7 +4700,7 @@ impl<Signer: EcdsaChannelSigner> ChannelMonitorImpl<Signer> {
let txid = self.funding.current_holder_commitment_tx.trust().txid();
let vout = htlc_descriptor.htlc.transaction_output_index
.expect("Expected transaction output index for non-dust HTLC");
let htlc_output = HolderHTLCOutput::build(htlc_descriptor);
let htlc_output = HolderHTLCOutput::build(htlc_descriptor, 0);
if let Some(htlc_tx) = htlc_output.get_maybe_signed_htlc_tx(
&mut self.onchain_tx_handler, &::bitcoin::OutPoint { txid, vout },
) {
Expand Down
54 changes: 39 additions & 15 deletions lightning/src/chain/onchaintx.rs
Original file line number Diff line number Diff line change
Expand Up @@ -269,6 +269,9 @@ pub struct OnchainTxHandler<ChannelSigner: EcdsaChannelSigner> {
#[cfg(not(any(test, feature = "_test_utils")))]
claimable_outpoints: HashMap<BitcoinOutPoint, (ClaimId, u32)>,

#[cfg(any(test, feature = "_test_utils"))]
pub(crate) locktimed_packages: BTreeMap<u32, Vec<PackageTemplate>>,
#[cfg(not(any(test, feature = "_test_utils")))]
locktimed_packages: BTreeMap<u32, Vec<PackageTemplate>>,

onchain_events_awaiting_threshold_conf: Vec<OnchainEventEntry>,
Expand Down Expand Up @@ -886,9 +889,10 @@ impl<ChannelSigner: EcdsaChannelSigner> OnchainTxHandler<ChannelSigner> {
// Because fuzzing can cause hash collisions, we can end up with conflicting claim
// ids here, so we only assert when not fuzzing.
debug_assert!(cfg!(fuzzing) || self.pending_claim_requests.get(&claim_id).is_none());
for k in req.outpoints() {
log_info!(logger, "Registering claiming request for {}:{}", k.txid, k.vout);
self.claimable_outpoints.insert(k.clone(), (claim_id, conf_height));
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Looks like we can get rid of conf_height now

for (k, outpoint_confirmation_height) in req.outpoints_and_creation_heights() {
let creation_height = outpoint_confirmation_height.unwrap_or(conf_height);
log_info!(logger, "Registering claiming request for {}:{}, which exists as of height {creation_height}", k.txid, k.vout);
self.claimable_outpoints.insert(k.clone(), (claim_id, creation_height));
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Hm, shouldn't we prefer signed_locktime over creation_height for outpoints that have one though? While the outpoint hasn't been reorged out, claiming it is no longer possible once the block at signed_locktime is disconnected.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

In practice I think this would just result in us broadcasting things that cannot enter the mempool until we get back to the expected height. If we have any other claims that were merged into the same package for whatever reason, and they are still valid at the disconnected block height, then this would be a greater issue.

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The problem is that when things time out via claimable_outpoints, we don't stop claiming them, we remove them. We don't get them back after a reorg at that point. We could move to pushing things into the locked-packages vec after a block-disconnect, but that seems like a bigger change?

}
self.pending_claim_requests.insert(claim_id, req);
}
Expand Down Expand Up @@ -994,6 +998,17 @@ impl<ChannelSigner: EcdsaChannelSigner> OnchainTxHandler<ChannelSigner> {
panic!("Inconsistencies between pending_claim_requests map and claimable_outpoints map");
}
}

// Also remove/split any locktimed packages whose inputs have been spent by this transaction.
self.locktimed_packages.retain(|_locktime, packages|{
packages.retain_mut(|package| {
if let Some(p) = package.split_package(&inp.previous_output) {
claimed_outputs_material.push(p);
}
!package.outpoints().is_empty()
});
!packages.is_empty()
});
}
for package in claimed_outputs_material.drain(..) {
let entry = OnchainEventEntry {
Expand Down Expand Up @@ -1135,6 +1150,13 @@ impl<ChannelSigner: EcdsaChannelSigner> OnchainTxHandler<ChannelSigner> {
//- resurect outpoint back in its claimable set and regenerate tx
match entry.event {
OnchainEvent::ContentiousOutpoint { package } => {
// We pass 0 to `package_locktime` to get the actual required locktime.
let package_locktime = package.package_locktime(0);
if package_locktime >= height {
self.locktimed_packages.entry(package_locktime).or_default().push(package);
continue;
}

if let Some(pending_claim) = self.claimable_outpoints.get(package.outpoints()[0]) {
if let Some(request) = self.pending_claim_requests.get_mut(&pending_claim.0) {
assert!(request.merge_package(package, height).is_ok());
Expand Down Expand Up @@ -1358,19 +1380,21 @@ mod tests {
holder_commit_txid,
htlc.transaction_output_index.unwrap(),
PackageSolvingData::HolderHTLCOutput(HolderHTLCOutput::build(HTLCDescriptor {
channel_derivation_parameters: ChannelDerivationParameters {
value_satoshis: tx_handler.channel_value_satoshis,
keys_id: tx_handler.channel_keys_id,
transaction_parameters: tx_handler.channel_transaction_parameters.clone(),
channel_derivation_parameters: ChannelDerivationParameters {
value_satoshis: tx_handler.channel_value_satoshis,
keys_id: tx_handler.channel_keys_id,
transaction_parameters: tx_handler.channel_transaction_parameters.clone(),
},
commitment_txid: holder_commit_txid,
per_commitment_number: holder_commit.commitment_number(),
per_commitment_point: holder_commit.per_commitment_point(),
feerate_per_kw: holder_commit.feerate_per_kw(),
htlc: htlc.clone(),
preimage: None,
counterparty_sig: *counterparty_sig,
},
commitment_txid: holder_commit_txid,
per_commitment_number: holder_commit.commitment_number(),
per_commitment_point: holder_commit.per_commitment_point(),
feerate_per_kw: holder_commit.feerate_per_kw(),
htlc: htlc.clone(),
preimage: None,
counterparty_sig: *counterparty_sig,
})),
0
)),
0,
));
}
Expand Down
Loading
Loading