Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Channel splitting #13

Draft
wants to merge 9 commits into
base: main
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions .github/workflows/build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ jobs:
- name: Run CI script
if: "!matrix.coverage"
shell: bash # Default on Winblows is powershell
run: ./ci/ci-tests.sh
run: CI_MINIMIZE_DISK_USAGE=1 ./ci/ci-tests.sh
- name: Install deps for kcov
if: matrix.coverage
run: |
Expand Down Expand Up @@ -159,7 +159,7 @@ jobs:
rustup override set ${{ env.TOOLCHAIN }}
- name: Fetch full tree and rebase on upstream
run: |
git remote add upstream https://github.com/lightningdevkit/rust-lightning
git remote add upstream https://github.com/p2pderivatives/rust-lightning
git fetch upstream
export GIT_COMMITTER_EMAIL="[email protected]"
export GIT_COMMITTER_NAME="RL CI"
Expand Down
16 changes: 16 additions & 0 deletions ci/ci-tests.sh
Original file line number Diff line number Diff line change
Expand Up @@ -29,11 +29,20 @@ PIN_RELEASE_DEPS # pin the release dependencies in our main workspace
# The quote crate switched to Rust edition 2021 starting with v1.0.31, i.e., has MSRV of 1.56
[ "$RUSTC_MINOR_VERSION" -lt 56 ] && cargo update -p quote --precise "1.0.30" --verbose

# The syn crate depends on too-new proc-macro2 starting with v2.0.33, i.e., has MSRV of 1.56
if [ "$RUSTC_MINOR_VERSION" -lt 56 ]; then
SYN_2_DEP=$(grep -o '"syn 2.*' Cargo.lock | tr -d '",' | tr ' ' ':')
cargo update -p "$SYN_2_DEP" --precise "2.0.32" --verbose
fi

# The proc-macro2 crate switched to Rust edition 2021 starting with v1.0.66, i.e., has MSRV of 1.56
[ "$RUSTC_MINOR_VERSION" -lt 56 ] && cargo update -p proc-macro2 --precise "1.0.65" --verbose

[ "$LDK_COVERAGE_BUILD" != "" ] && export RUSTFLAGS="-C link-dead-code"

# The memchr crate switched to an MSRV of 1.60 starting with v2.6.0
[ "$RUSTC_MINOR_VERSION" -lt 60 ] && cargo update -p memchr --precise "2.5.0" --verbose

export RUST_BACKTRACE=1

echo -e "\n\nBuilding and testing all workspace crates..."
Expand Down Expand Up @@ -81,13 +90,20 @@ if [[ $RUSTC_MINOR_VERSION -gt 67 ]]; then
# lightning-transaction-sync's MSRV is 1.67
cargo check --verbose --color always --features lightning-transaction-sync
else
# The memchr crate switched to an MSRV of 1.60 starting with v2.6.0
# This is currently only a release dependency via core2, which we intend to work with
# rust-bitcoin to remove soon.
[ "$RUSTC_MINOR_VERSION" -lt 60 ] && cargo update -p memchr --precise "2.5.0" --verbose
cargo check --verbose --color always
fi
[ "$CI_MINIMIZE_DISK_USAGE" != "" ] && cargo clean
popd

# Test that we can build downstream code with only the "release pins".
pushd msrv-no-dev-deps-check
PIN_RELEASE_DEPS
# The memchr crate switched to an MSRV of 1.60 starting with v2.6.0
[ "$RUSTC_MINOR_VERSION" -lt 60 ] && cargo update -p memchr --precise "2.5.0" --verbose
cargo check
popd

Expand Down
5 changes: 5 additions & 0 deletions fuzz/src/chanmon_consistency.rs
Original file line number Diff line number Diff line change
Expand Up @@ -166,6 +166,10 @@ impl chain::Watch<EnforcingSigner> for TestChainMonitor {
fn release_pending_monitor_events(&self) -> Vec<(OutPoint, Vec<MonitorEvent>, Option<PublicKey>)> {
return self.chain_monitor.release_pending_monitor_events();
}

fn update_channel_funding_txo(&self, old_funding_txo: OutPoint, new_funding_txo: OutPoint, channel_value_satoshis: u64) -> ChannelMonitorUpdateStatus {
todo!()
}
}

struct KeyProvider {
Expand Down Expand Up @@ -306,6 +310,7 @@ fn check_api_err(api_err: APIError, sendable_bounds_violated: bool) {
// We can (obviously) temp-fail a monitor update
},
APIError::IncompatibleShutdownScript { .. } => panic!("Cannot send an incompatible shutdown script"),
APIError::ExternalError { .. } => panic!("We don't produce external errors in fuzz!"),
}
}
#[inline]
Expand Down
5 changes: 5 additions & 0 deletions fuzz/src/router.rs
Original file line number Diff line number Diff line change
Expand Up @@ -271,6 +271,11 @@ pub fn do_test<Out: test_logger::Output>(data: &[u8], out: Out) {
config: None,
feerate_sat_per_1000_weight: None,
channel_shutdown_state: Some(channelmanager::ChannelShutdownState::NotShuttingDown),
funding_redeemscript: None,
holder_funding_pubkey: get_pubkey!(),
counter_funding_pubkey: None,
original_funding_outpoint: None,
channel_keys_id: [0u8; 32],
});
}
Some(&first_hops_vec[..])
Expand Down
39 changes: 20 additions & 19 deletions lightning-block-sync/src/poll.rs
Original file line number Diff line number Diff line change
Expand Up @@ -120,7 +120,7 @@ impl std::ops::Deref for ValidatedBlockHeader {
impl ValidatedBlockHeader {
/// Checks that the header correctly builds on previous_header: the claimed work differential
/// matches the actual PoW and the difficulty transition is possible, i.e., within 4x.
fn check_builds_on(&self, previous_header: &ValidatedBlockHeader, network: Network) -> BlockSourceResult<()> {
fn check_builds_on(&self, previous_header: &ValidatedBlockHeader, _network: Network) -> BlockSourceResult<()> {
if self.header.prev_blockhash != previous_header.block_hash {
return Err(BlockSourceError::persistent("invalid previous block hash"));
}
Expand All @@ -129,24 +129,25 @@ impl ValidatedBlockHeader {
return Err(BlockSourceError::persistent("invalid block height"));
}

let work = self.header.work();
if self.chainwork != previous_header.chainwork + work {
return Err(BlockSourceError::persistent("invalid chainwork"));
}

if let Network::Bitcoin = network {
if self.height % 2016 == 0 {
let target = self.header.target();
let previous_target = previous_header.header.target();
let min_target = previous_target >> 2;
let max_target = previous_target << 2;
if target > max_target || target < min_target {
return Err(BlockSourceError::persistent("invalid difficulty transition"))
}
} else if self.header.bits != previous_header.header.bits {
return Err(BlockSourceError::persistent("invalid difficulty"))
}
}
// let work = self.header.work();
// if self.chainwork != previous_header.chainwork + work {
// return Err(BlockSourceError::persistent("invalid chainwork"));
// }

// TODO(Tibo): This causes issues with Esplora, temporary fix.
// if let Network::Bitcoin = network {
// if self.height % 2016 == 0 {
// let target = self.header.target();
// let previous_target = previous_header.header.target();
// let min_target = previous_target >> 2;
// let max_target = previous_target << 2;
// if target > max_target || target < min_target {
// return Err(BlockSourceError::persistent("invalid difficulty transition"))
// }
// } else if self.header.bits != previous_header.header.bits {
// return Err(BlockSourceError::persistent("invalid difficulty"))
// }
// }

Ok(())
}
Expand Down
4 changes: 2 additions & 2 deletions lightning-invoice/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -503,7 +503,7 @@ pub struct Bolt11InvoiceSignature(pub RecoverableSignature);

impl PartialOrd for Bolt11InvoiceSignature {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
self.0.serialize_compact().1.partial_cmp(&other.0.serialize_compact().1)
Some(self.cmp(other))
}
}

Expand Down Expand Up @@ -1866,7 +1866,7 @@ mod test {
use lightning::ln::features::Bolt11InvoiceFeatures;
use secp256k1::Secp256k1;
use secp256k1::SecretKey;
use crate::{Bolt11Invoice, RawBolt11Invoice, RawHrp, RawDataPart, Currency, Sha256, PositiveTimestamp,
use crate::{Bolt11Invoice, RawBolt11Invoice, RawHrp, RawDataPart, Currency, Sha256, PositiveTimestamp,
Bolt11SemanticError};

let private_key = SecretKey::from_slice(&[42; 32]).unwrap();
Expand Down
2 changes: 1 addition & 1 deletion lightning-persister/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,7 @@ impl FilesystemPersister {
let mut buffer = Cursor::new(&contents);
match <(BlockHash, ChannelMonitor<<SP::Target as SignerProvider>::Signer>)>::read(&mut buffer, (&*entropy_source, &*signer_provider)) {
Ok((blockhash, channel_monitor)) => {
if channel_monitor.get_funding_txo().0.txid != txid || channel_monitor.get_funding_txo().0.index != index {
if channel_monitor.get_original_funding_txo().0.txid != txid || channel_monitor.get_original_funding_txo().0.index != index {
return Err(std::io::Error::new(std::io::ErrorKind::InvalidData,
"ChannelMonitor was stored in the wrong file"));
}
Expand Down
1 change: 1 addition & 0 deletions lightning/rustfmt.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
disable_all_formatting = true
38 changes: 36 additions & 2 deletions lightning/src/chain/chainmonitor.rs
Original file line number Diff line number Diff line change
Expand Up @@ -374,7 +374,7 @@ where C::Target: chain::Filter,
let monitor_states = self.monitors.read().unwrap();
for (_, monitor_state) in monitor_states.iter().filter(|(funding_outpoint, _)| {
for chan in ignored_channels {
if chan.funding_txo.as_ref() == Some(funding_outpoint) {
if chan.funding_txo.as_ref() == Some(funding_outpoint) || chan.original_funding_outpoint.as_ref() == Some(funding_outpoint) {
return false;
}
}
Expand Down Expand Up @@ -555,6 +555,15 @@ where C::Target: chain::Filter,
)
}
}

/// Retrieves the latest holder commitment transaction (and possibly HTLC transactions) for
/// the channel identified with the given `funding_txo`. Errors if no monitor is registered
/// for the given `funding_txo`.
pub fn get_latest_holder_commitment_txn(&self, funding_txo: &OutPoint) -> Result<Vec<bitcoin::Transaction>, ()> {
let monitors = self.monitors.read().unwrap();
let monitor = monitors.get(funding_txo).ok_or(())?;
Ok(monitor.monitor.get_latest_holder_commitment_txn_internal(&self.logger))
}
}

impl<ChannelSigner: WriteableEcdsaChannelSigner, C: Deref, T: Deref, F: Deref, L: Deref, P: Deref>
Expand Down Expand Up @@ -685,6 +694,31 @@ where C::Target: chain::Filter,
persist_res
}

fn update_channel_funding_txo(&self, old_funding_txo: OutPoint, new_funding_txo: OutPoint, channel_value_satoshis: u64) -> ChannelMonitorUpdateStatus {
let mut monitors = self.monitors.write().unwrap();
let monitor_opt = monitors.get_mut(&old_funding_txo);
match monitor_opt {
None => {
log_error!(self.logger, "Failed to update channel monitor funding txo: no such monitor registered");

// We should never ever trigger this from within ChannelManager. Technically a
// user could use this object with some proxying in between which makes this
// possible, but in tests and fuzzing, this should be a panic.
#[cfg(any(test, fuzzing))]
panic!("ChannelManager generated a channel update for a channel that was not yet registered!");
#[cfg(not(any(test, fuzzing)))]
return ChannelMonitorUpdateStatus::PermanentFailure;
},
Some(monitor_state) => {
let spk = monitor_state.monitor.update_funding_info(new_funding_txo, channel_value_satoshis);
if let Some(filter) = &self.chain_source {
filter.register_output(WatchedOutput { block_hash: None, outpoint: new_funding_txo, script_pubkey: spk });
}
return ChannelMonitorUpdateStatus::Completed;
}
}
}

/// Note that we persist the given `ChannelMonitor` update while holding the
/// `ChainMonitor` monitors lock.
fn update_channel(&self, funding_txo: OutPoint, update: &ChannelMonitorUpdate) -> ChannelMonitorUpdateStatus {
Expand Down Expand Up @@ -766,7 +800,7 @@ where C::Target: chain::Filter,
}
let monitor_events = monitor_state.monitor.get_and_clear_pending_monitor_events();
if monitor_events.len() > 0 {
let monitor_outpoint = monitor_state.monitor.get_funding_txo().0;
let monitor_outpoint = monitor_state.monitor.get_original_funding_txo().0;
let counterparty_node_id = monitor_state.monitor.get_counterparty_node_id();
pending_monitor_events.push((monitor_outpoint, monitor_events, counterparty_node_id));
}
Expand Down
69 changes: 66 additions & 3 deletions lightning/src/chain/channelmonitor.rs
Original file line number Diff line number Diff line change
Expand Up @@ -763,6 +763,7 @@ pub(crate) struct ChannelMonitorImpl<Signer: WriteableEcdsaChannelSigner> {
channel_keys_id: [u8; 32],
holder_revocation_basepoint: PublicKey,
funding_info: (OutPoint, Script),
original_funding_info: Option<(OutPoint, Script)>,
current_counterparty_commitment_txid: Option<Txid>,
prev_counterparty_commitment_txid: Option<Txid>,

Expand Down Expand Up @@ -945,6 +946,13 @@ impl<Signer: WriteableEcdsaChannelSigner> Writeable for ChannelMonitorImpl<Signe
writer.write_all(&self.funding_info.0.txid[..])?;
writer.write_all(&self.funding_info.0.index.to_be_bytes())?;
self.funding_info.1.write(writer)?;
if let Some(ref original_funding_info) = self.original_funding_info {
writer.write_all(&[0; 1])?;
original_funding_info.0.write(writer)?;
original_funding_info.1.write(writer)?;
} else {
writer.write_all(&[1; 1])?;
}
self.current_counterparty_commitment_txid.write(writer)?;
self.prev_counterparty_commitment_txid.write(writer)?;

Expand Down Expand Up @@ -1187,6 +1195,7 @@ impl<Signer: WriteableEcdsaChannelSigner> ChannelMonitor<Signer> {
channel_keys_id,
holder_revocation_basepoint,
funding_info,
original_funding_info: None,
current_counterparty_commitment_txid: None,
prev_counterparty_commitment_txid: None,

Expand Down Expand Up @@ -1252,6 +1261,23 @@ impl<Signer: WriteableEcdsaChannelSigner> ChannelMonitor<Signer> {
txid, htlc_outputs, commitment_number, their_per_commitment_point, logger)
}

pub(crate) fn update_funding_info(&self, fund_outpoint: OutPoint, channel_value_satoshis: u64) -> Script {
let mut inner = self.inner.lock().unwrap();
let script = inner.funding_info.1.clone();
if let Some(original) = inner.original_funding_info.as_ref() {
if fund_outpoint == original.0 {
inner.original_funding_info = None;
}
} else {
inner.original_funding_info = Some((inner.funding_info.0.clone(), inner.funding_info.1.clone()));
}
inner.outputs_to_watch.insert(fund_outpoint.txid, vec![(fund_outpoint.index as u32, script.clone())]);
inner.funding_info = (fund_outpoint, script.clone());
inner.channel_value_satoshis = channel_value_satoshis;
inner.onchain_tx_handler.signer.set_channel_value_satoshis(channel_value_satoshis);
script
}

#[cfg(test)]
fn provide_latest_holder_commitment_tx(
&self, holder_commitment_tx: HolderCommitmentTransaction,
Expand Down Expand Up @@ -1308,6 +1334,11 @@ impl<Signer: WriteableEcdsaChannelSigner> ChannelMonitor<Signer> {
self.inner.lock().unwrap().get_funding_txo().clone()
}

///
pub fn get_original_funding_txo(&self) -> (OutPoint, Script) {
self.inner.lock().unwrap().get_original_funding_txo().clone()
}

/// Gets a list of txids, with their output scripts (in the order they appear in the
/// transaction), which we must learn about spends of via block_connected().
pub fn get_outputs_to_watch(&self) -> Vec<(Txid, Vec<(u32, Script)>)> {
Expand Down Expand Up @@ -1416,6 +1447,11 @@ impl<Signer: WriteableEcdsaChannelSigner> ChannelMonitor<Signer> {
self.inner.lock().unwrap().get_latest_holder_commitment_txn(logger)
}

pub(crate) fn get_latest_holder_commitment_txn_internal<L: Deref>(&self, logger: &L) -> Vec<Transaction>
where L::Target: Logger {
self.inner.lock().unwrap().get_latest_holder_commitment_txn_internal(logger)
}

/// Unsafe test-only version of get_latest_holder_commitment_txn used by our test framework
/// to bypass HolderCommitmentTransaction state update lockdown after signature and generate
/// revoked commitment transaction.
Expand Down Expand Up @@ -2573,6 +2609,10 @@ impl<Signer: WriteableEcdsaChannelSigner> ChannelMonitorImpl<Signer> {
&self.funding_info
}

pub fn get_original_funding_txo(&self) -> &(OutPoint, Script) {
&self.original_funding_info.as_ref().unwrap_or(&self.funding_info)
}

pub fn get_outputs_to_watch(&self) -> &HashMap<Txid, Vec<(u32, Script)>> {
// If we've detected a counterparty commitment tx on chain, we must include it in the set
// of outputs to watch for spends of, otherwise we're likely to lose user funds. Because
Expand Down Expand Up @@ -3018,8 +3058,12 @@ impl<Signer: WriteableEcdsaChannelSigner> ChannelMonitorImpl<Signer> {
}

pub fn get_latest_holder_commitment_txn<L: Deref>(&mut self, logger: &L) -> Vec<Transaction> where L::Target: Logger {
log_debug!(logger, "Getting signed latest holder commitment transaction!");
self.holder_tx_signed = true;
self.get_latest_holder_commitment_txn_internal(logger)
}

pub(crate) fn get_latest_holder_commitment_txn_internal<L: Deref>(&mut self, logger: &L) -> Vec<Transaction> where L::Target: Logger {
log_debug!(logger, "Getting signed latest holder commitment transaction!");
let commitment_tx = self.onchain_tx_handler.get_fully_signed_holder_tx(&self.funding_redeemscript);
let txid = commitment_tx.txid();
let mut holder_transactions = vec![commitment_tx];
Expand Down Expand Up @@ -3186,7 +3230,14 @@ impl<Signer: WriteableEcdsaChannelSigner> ChannelMonitorImpl<Signer> {
// (except for HTLC transactions for channels with anchor outputs), which is an easy
// way to filter out any potential non-matching txn for lazy filters.
let prevout = &tx.input[0].previous_output;
if prevout.txid == self.funding_info.0.txid && prevout.vout == self.funding_info.0.index as u32 {
let match_prevout = |outpoint: &OutPoint| {
prevout.txid == outpoint.txid && prevout.vout == outpoint.index as u32
};
let is_split = tx.output.len() == 2 && tx.output[0].script_pubkey == tx.output[1].script_pubkey;
let is_match = match_prevout(&self.funding_info.0) ||
(self.original_funding_info.is_some() && match_prevout(&self.original_funding_info.as_ref().unwrap().0) && !is_split);

if is_match {
let mut balance_spendable_csv = None;
log_info!(logger, "Channel {} closed by funding output spend in txid {}.",
log_bytes!(self.funding_info.0.to_channel_id()), txid);
Expand Down Expand Up @@ -3945,6 +3996,16 @@ impl<'a, 'b, ES: EntropySource, SP: SignerProvider> ReadableArgs<(&'a ES, &'b SP
index: Readable::read(reader)?,
};
let funding_info = (outpoint, Readable::read(reader)?);
let original_funding_info = match <u8 as Readable>::read(reader)? {
0 => {
let outpoint = Readable::read(reader)?;
let script = Readable::read(reader)?;
Some((outpoint, script))
},
1 => { None },
_ => return Err(DecodeError::InvalidValue),
};

let current_counterparty_commitment_txid = Readable::read(reader)?;
let prev_counterparty_commitment_txid = Readable::read(reader)?;

Expand Down Expand Up @@ -4141,6 +4202,7 @@ impl<'a, 'b, ES: EntropySource, SP: SignerProvider> ReadableArgs<(&'a ES, &'b SP
channel_keys_id,
holder_revocation_basepoint,
funding_info,
original_funding_info,
current_counterparty_commitment_txid,
prev_counterparty_commitment_txid,

Expand Down Expand Up @@ -4399,7 +4461,8 @@ mod tests {
selected_contest_delay: 67,
}),
funding_outpoint: Some(funding_outpoint),
channel_type_features: ChannelTypeFeatures::only_static_remote_key()
channel_type_features: ChannelTypeFeatures::only_static_remote_key(),
original_funding_outpoint: None,
};
// Prune with one old state and a holder commitment tx holding a few overlaps with the
// old state.
Expand Down
Loading