From f777814a7a265f2bc0f4b3d743ad9e758c144d51 Mon Sep 17 00:00:00 2001 From: t-bast Date: Tue, 3 Feb 2026 11:41:01 +0100 Subject: [PATCH 1/7] Improve channel and payment events We improve the following events: - `TransactionPublished` includes more details about mining fees and an optional liquidity purchase - all channel events include the latest `channel_type` - `PaymentRelayed` exposes the `relayFee` earned --- .../acinq/eclair/channel/ChannelEvents.scala | 18 +++++++-- .../fr/acinq/eclair/channel/Helpers.scala | 7 ++-- .../channel/fsm/DualFundingHandlers.scala | 2 +- .../eclair/channel/fsm/ErrorHandlers.scala | 15 +++++--- .../channel/fsm/SingleFundingHandlers.scala | 2 +- .../channel/publish/MempoolTxMonitor.scala | 4 +- .../fr/acinq/eclair/db/DbEventHandler.scala | 26 ++++++------- .../fr/acinq/eclair/db/pg/PgAuditDb.scala | 19 +++------- .../eclair/db/sqlite/SqliteAuditDb.scala | 19 +++------- .../acinq/eclair/payment/PaymentEvents.scala | 8 ++-- .../eclair/payment/relay/ChannelRelay.scala | 2 +- .../relay/PostRestartHtlcCleaner.scala | 2 +- .../eclair/wire/protocol/LiquidityAds.scala | 4 +- .../publish/MempoolTxMonitorSpec.scala | 7 +++- .../fr/acinq/eclair/db/AuditDbSpec.scala | 37 +++++++++---------- .../fr/acinq/eclair/db/PgUtilsSpec.scala | 2 +- .../eclair/json/JsonSerializersSpec.scala | 2 +- .../payment/relay/ChannelRelayerSpec.scala | 8 ++-- .../fr/acinq/eclair/api/ApiServiceSpec.scala | 4 +- 19 files changed, 95 insertions(+), 93 deletions(-) diff --git a/eclair-core/src/main/scala/fr/acinq/eclair/channel/ChannelEvents.scala b/eclair-core/src/main/scala/fr/acinq/eclair/channel/ChannelEvents.scala index 890b93e322..cee5e99f21 100644 --- a/eclair-core/src/main/scala/fr/acinq/eclair/channel/ChannelEvents.scala +++ b/eclair-core/src/main/scala/fr/acinq/eclair/channel/ChannelEvents.scala @@ -21,8 +21,9 @@ import fr.acinq.bitcoin.scalacompat.Crypto.PublicKey import fr.acinq.bitcoin.scalacompat.{ByteVector32, Satoshi, Transaction, TxId} import fr.acinq.eclair.blockchain.fee.FeeratePerKw import fr.acinq.eclair.channel.Helpers.Closing.ClosingType +import fr.acinq.eclair.transactions.Transactions import fr.acinq.eclair.wire.protocol._ -import fr.acinq.eclair.{BlockHeight, CltvExpiry, Features, MilliSatoshi, RealShortChannelId, ShortChannelId} +import fr.acinq.eclair.{BlockHeight, CltvExpiry, Features, MilliSatoshi, RealShortChannelId, ShortChannelId, TimestampMilli} /** * Created by PM on 17/08/2016. @@ -92,10 +93,19 @@ case class ChannelLiquidityPurchased(channel: ActorRef, channelId: ByteVector32, case class ChannelErrorOccurred(channel: ActorRef, channelId: ByteVector32, remoteNodeId: PublicKey, error: ChannelError, isFatal: Boolean) extends ChannelEvent -// NB: the fee should be set to 0 when we're not paying it. -case class TransactionPublished(channelId: ByteVector32, remoteNodeId: PublicKey, tx: Transaction, miningFee: Satoshi, desc: String) extends ChannelEvent +/** + * We published a transaction related to the given [[channelId]]. + * + * @param localMiningFee mining fee paid by us in the given [[tx]]. + * @param remoteMiningFee mining fee paid by our channel peer in the given [[tx]]. + * @param liquidityPurchase_opt optional liquidity purchase included in this transaction. + */ +case class TransactionPublished(channelId: ByteVector32, remoteNodeId: PublicKey, tx: Transaction, localMiningFee: Satoshi, remoteMiningFee: Satoshi, desc: String, liquidityPurchase_opt: Option[LiquidityAds.PurchaseBasicInfo], timestamp: TimestampMilli = TimestampMilli.now()) extends ChannelEvent { + val miningFee: Satoshi = localMiningFee + remoteMiningFee + val feerate: FeeratePerKw = Transactions.fee2rate(miningFee, tx.weight()) +} -case class TransactionConfirmed(channelId: ByteVector32, remoteNodeId: PublicKey, tx: Transaction) extends ChannelEvent +case class TransactionConfirmed(channelId: ByteVector32, remoteNodeId: PublicKey, tx: Transaction, timestamp: TimestampMilli = TimestampMilli.now()) extends ChannelEvent // NB: this event is only sent when the channel is available. case class AvailableBalanceChanged(channel: ActorRef, channelId: ByteVector32, aliases: ShortIdAliases, commitments: Commitments, lastAnnouncement_opt: Option[ChannelAnnouncement]) extends ChannelEvent diff --git a/eclair-core/src/main/scala/fr/acinq/eclair/channel/Helpers.scala b/eclair-core/src/main/scala/fr/acinq/eclair/channel/Helpers.scala index b50b4a15f6..e09f4221a4 100644 --- a/eclair-core/src/main/scala/fr/acinq/eclair/channel/Helpers.scala +++ b/eclair-core/src/main/scala/fr/acinq/eclair/channel/Helpers.scala @@ -971,11 +971,12 @@ object Helpers { } } - /** Compute the fee paid by a commitment transaction. */ - def commitTxFee(commitInput: InputInfo, commitTx: Transaction, localPaysCommitTxFees: Boolean): Satoshi = { + /** Compute the fee paid by a commitment transaction. The first result is the fee paid by us, the second one is the fee paid by our peer. */ + def commitTxFee(commitInput: InputInfo, commitTx: Transaction, localPaysCommitTxFees: Boolean): (Satoshi, Satoshi) = { require(commitTx.txIn.size == 1, "transaction must have only one input") require(commitTx.txIn.exists(txIn => txIn.outPoint == commitInput.outPoint), "transaction must spend the funding output") - if (localPaysCommitTxFees) commitInput.txOut.amount - commitTx.txOut.map(_.amount).sum else 0 sat + val commitFee = commitInput.txOut.amount - commitTx.txOut.map(_.amount).sum + if (localPaysCommitTxFees) (commitFee, 0 sat) else (0 sat, commitFee) } /** Return the confirmation target that should be used for our local commitment. */ diff --git a/eclair-core/src/main/scala/fr/acinq/eclair/channel/fsm/DualFundingHandlers.scala b/eclair-core/src/main/scala/fr/acinq/eclair/channel/fsm/DualFundingHandlers.scala index 0f891bea1a..75c188f413 100644 --- a/eclair-core/src/main/scala/fr/acinq/eclair/channel/fsm/DualFundingHandlers.scala +++ b/eclair-core/src/main/scala/fr/acinq/eclair/channel/fsm/DualFundingHandlers.scala @@ -51,7 +51,7 @@ trait DualFundingHandlers extends CommonFundingHandlers { // to publish and we may be able to RBF. wallet.publishTransaction(fundingTx.signedTx).onComplete { case Success(_) => - context.system.eventStream.publish(TransactionPublished(dualFundedTx.fundingParams.channelId, remoteNodeId, fundingTx.signedTx, fundingTx.tx.localFees.truncateToSatoshi, "funding")) + context.system.eventStream.publish(TransactionPublished(dualFundedTx.fundingParams.channelId, remoteNodeId, fundingTx.signedTx, localMiningFee = fundingTx.tx.localFees.truncateToSatoshi, remoteMiningFee = fundingTx.tx.remoteFees.truncateToSatoshi, "funding", dualFundedTx.liquidityPurchase_opt)) // We rely on Bitcoin Core ZMQ notifications to learn about transactions that appear in our mempool, but // it doesn't provide strong guarantees that we'll always receive an event. This can be an issue for 0-conf // funding transactions, where we end up delaying our channel_ready or splice_locked. diff --git a/eclair-core/src/main/scala/fr/acinq/eclair/channel/fsm/ErrorHandlers.scala b/eclair-core/src/main/scala/fr/acinq/eclair/channel/fsm/ErrorHandlers.scala index 6807f366ff..96a1f2a516 100644 --- a/eclair-core/src/main/scala/fr/acinq/eclair/channel/fsm/ErrorHandlers.scala +++ b/eclair-core/src/main/scala/fr/acinq/eclair/channel/fsm/ErrorHandlers.scala @@ -232,7 +232,8 @@ trait ErrorHandlers extends CommonHandlers { /** Publish 2nd-stage transactions for our local commitment. */ def doPublish(lcp: LocalCommitPublished, txs: Closing.LocalClose.SecondStageTransactions, commitment: FullCommitment): Unit = { - val publishCommitTx = PublishFinalTx(lcp.commitTx, commitment.fundingInput, "commit-tx", Closing.commitTxFee(commitment.commitInput(channelKeys), lcp.commitTx, commitment.localChannelParams.paysCommitTxFees), None) + val (localCommitFee, _) = Closing.commitTxFee(commitment.commitInput(channelKeys), lcp.commitTx, commitment.localChannelParams.paysCommitTxFees) + val publishCommitTx = PublishFinalTx(lcp.commitTx, commitment.fundingInput, "commit-tx", localCommitFee, None) val publishAnchorTx_opt = txs.anchorTx_opt match { case Some(anchorTx) if !lcp.isConfirmed => val confirmationTarget = Closing.confirmationTarget(commitment.localCommit, commitment.localCommitParams.dustLimit, commitment.commitmentFormat, nodeParams.onChainFeeConf) @@ -274,7 +275,8 @@ trait ErrorHandlers extends CommonHandlers { case closing: DATA_CLOSING => nodeParams.onChainFeeConf.getClosingFeerate(nodeParams.currentBitcoinCoreFeerates, closing.maxClosingFeerate_opt) case _ => nodeParams.onChainFeeConf.getClosingFeerate(nodeParams.currentBitcoinCoreFeerates, maxClosingFeerateOverride_opt = None) } - context.system.eventStream.publish(TransactionPublished(d.channelId, remoteNodeId, commitTx, Closing.commitTxFee(commitments.commitInput(channelKeys), commitTx, d.commitments.localChannelParams.paysCommitTxFees), "remote-commit")) + val (localCommitFee, remoteCommitFee) = Closing.commitTxFee(commitments.commitInput(channelKeys), commitTx, d.commitments.localChannelParams.paysCommitTxFees) + context.system.eventStream.publish(TransactionPublished(d.channelId, remoteNodeId, commitTx, localCommitFee, remoteCommitFee, "remote-commit", None)) val (remoteCommitPublished, closingTxs) = Closing.RemoteClose.claimCommitTxOutputs(channelKeys, commitments, commitments.remoteCommit, commitTx, closingFeerate, finalScriptPubKey, nodeParams.onChainFeeConf.spendAnchorWithoutHtlcs) val nextData = d match { case closing: DATA_CLOSING => closing.copy(remoteCommitPublished = Some(remoteCommitPublished)) @@ -296,7 +298,8 @@ trait ErrorHandlers extends CommonHandlers { case closing: DATA_CLOSING => nodeParams.onChainFeeConf.getClosingFeerate(nodeParams.currentBitcoinCoreFeerates, closing.maxClosingFeerate_opt) case _ => nodeParams.onChainFeeConf.getClosingFeerate(nodeParams.currentBitcoinCoreFeerates, maxClosingFeerateOverride_opt = None) } - context.system.eventStream.publish(TransactionPublished(d.channelId, remoteNodeId, commitTx, Closing.commitTxFee(commitment.commitInput(channelKeys), commitTx, d.commitments.localChannelParams.paysCommitTxFees), "next-remote-commit")) + val (localCommitFee, remoteCommitFee) = Closing.commitTxFee(commitment.commitInput(channelKeys), commitTx, d.commitments.localChannelParams.paysCommitTxFees) + context.system.eventStream.publish(TransactionPublished(d.channelId, remoteNodeId, commitTx, localCommitFee, remoteCommitFee, "next-remote-commit", None)) val (remoteCommitPublished, closingTxs) = Closing.RemoteClose.claimCommitTxOutputs(channelKeys, commitment, remoteCommit, commitTx, closingFeerate, finalScriptPubKey, nodeParams.onChainFeeConf.spendAnchorWithoutHtlcs) val nextData = d match { case closing: DATA_CLOSING => closing.copy(nextRemoteCommitPublished = Some(remoteCommitPublished)) @@ -350,7 +353,8 @@ trait ErrorHandlers extends CommonHandlers { val dustLimit = commitment.localCommitParams.dustLimit val (revokedCommitPublished, closingTxs) = Closing.RevokedClose.claimCommitTxOutputs(d.commitments.channelParams, channelKeys, tx, commitmentNumber, remotePerCommitmentSecret, toSelfDelay, commitmentFormat, nodeParams.db.channels, dustLimit, nodeParams.currentBitcoinCoreFeerates, nodeParams.onChainFeeConf, finalScriptPubKey) log.warning("txid={} was a revoked commitment, publishing the penalty tx", tx.txid) - context.system.eventStream.publish(TransactionPublished(d.channelId, remoteNodeId, tx, Closing.commitTxFee(commitment.commitInput(channelKeys), tx, d.commitments.localChannelParams.paysCommitTxFees), "revoked-commit")) + val (localCommitFee, remoteCommitFee) = Closing.commitTxFee(commitment.commitInput(channelKeys), tx, d.commitments.localChannelParams.paysCommitTxFees) + context.system.eventStream.publish(TransactionPublished(d.channelId, remoteNodeId, tx, localCommitFee, remoteCommitFee, "revoked-commit", None)) val exc = FundingTxSpent(d.channelId, tx.txid) val error = Error(d.channelId, exc.getMessage) val nextData = d match { @@ -364,7 +368,8 @@ trait ErrorHandlers extends CommonHandlers { case None => d match { case d: DATA_WAIT_FOR_REMOTE_PUBLISH_FUTURE_COMMITMENT => log.warning("they published a future commit (because we asked them to) in txid={}", tx.txid) - context.system.eventStream.publish(TransactionPublished(d.channelId, remoteNodeId, tx, Closing.commitTxFee(d.commitments.latest.commitInput(channelKeys), tx, d.commitments.localChannelParams.paysCommitTxFees), "future-remote-commit")) + val (localCommitFee, remoteCommitFee) = Closing.commitTxFee(d.commitments.latest.commitInput(channelKeys), tx, d.commitments.localChannelParams.paysCommitTxFees) + context.system.eventStream.publish(TransactionPublished(d.channelId, remoteNodeId, tx, localCommitFee, remoteCommitFee, "future-remote-commit", None)) val remotePerCommitmentPoint = d.remoteChannelReestablish.myCurrentPerCommitmentPoint val commitKeys = d.commitments.latest.remoteKeys(channelKeys, remotePerCommitmentPoint) val closingFeerate = nodeParams.onChainFeeConf.getClosingFeerate(nodeParams.currentBitcoinCoreFeerates, maxClosingFeerateOverride_opt = None) diff --git a/eclair-core/src/main/scala/fr/acinq/eclair/channel/fsm/SingleFundingHandlers.scala b/eclair-core/src/main/scala/fr/acinq/eclair/channel/fsm/SingleFundingHandlers.scala index 95a1192493..27994b16f4 100644 --- a/eclair-core/src/main/scala/fr/acinq/eclair/channel/fsm/SingleFundingHandlers.scala +++ b/eclair-core/src/main/scala/fr/acinq/eclair/channel/fsm/SingleFundingHandlers.scala @@ -43,7 +43,7 @@ trait SingleFundingHandlers extends CommonFundingHandlers { def publishFundingTx(channelId: ByteVector32, fundingTx: Transaction, fundingTxFee: Satoshi, replyTo: akka.actor.typed.ActorRef[OpenChannelResponse]): Unit = { wallet.commit(fundingTx).onComplete { case Success(true) => - context.system.eventStream.publish(TransactionPublished(channelId, remoteNodeId, fundingTx, fundingTxFee, "funding")) + context.system.eventStream.publish(TransactionPublished(channelId, remoteNodeId, fundingTx, localMiningFee = fundingTxFee, remoteMiningFee = 0 sat, "funding", None)) replyTo ! OpenChannelResponse.Created(channelId, fundingTxId = fundingTx.txid, fundingTxFee) case Success(false) => replyTo ! OpenChannelResponse.Rejected("couldn't publish funding tx") diff --git a/eclair-core/src/main/scala/fr/acinq/eclair/channel/publish/MempoolTxMonitor.scala b/eclair-core/src/main/scala/fr/acinq/eclair/channel/publish/MempoolTxMonitor.scala index 0470229637..df7c334063 100644 --- a/eclair-core/src/main/scala/fr/acinq/eclair/channel/publish/MempoolTxMonitor.scala +++ b/eclair-core/src/main/scala/fr/acinq/eclair/channel/publish/MempoolTxMonitor.scala @@ -19,7 +19,7 @@ package fr.acinq.eclair.channel.publish import akka.actor.typed.eventstream.EventStream import akka.actor.typed.scaladsl.{ActorContext, Behaviors, TimerScheduler} import akka.actor.typed.{ActorRef, Behavior} -import fr.acinq.bitcoin.scalacompat.{ByteVector32, OutPoint, Satoshi, Transaction, TxId} +import fr.acinq.bitcoin.scalacompat.{ByteVector32, OutPoint, Satoshi, SatoshiLong, Transaction, TxId} import fr.acinq.eclair.blockchain.CurrentBlockHeight import fr.acinq.eclair.blockchain.bitcoind.rpc.BitcoinCoreClient import fr.acinq.eclair.channel.publish.TxPublisher.{TxPublishContext, TxRejectedReason} @@ -136,7 +136,7 @@ private class MempoolTxMonitor(nodeParams: NodeParams, private def waitForConfirmation(): Behavior[Command] = { context.system.eventStream ! EventStream.Subscribe(context.messageAdapter[CurrentBlockHeight](cbc => WrappedCurrentBlockHeight(cbc.blockHeight))) - context.system.eventStream ! EventStream.Publish(TransactionPublished(txPublishContext.channelId_opt.getOrElse(ByteVector32.Zeroes), txPublishContext.remoteNodeId, cmd.tx, cmd.fee, cmd.desc)) + context.system.eventStream ! EventStream.Publish(TransactionPublished(txPublishContext.channelId_opt.getOrElse(ByteVector32.Zeroes), txPublishContext.remoteNodeId, cmd.tx, localMiningFee = cmd.fee, remoteMiningFee = 0 sat, cmd.desc, None)) Behaviors.receiveMessagePartial { case WrappedCurrentBlockHeight(currentBlockHeight) => timers.startSingleTimer(CheckTxConfirmationsKey, CheckTxConfirmations(currentBlockHeight), (1 + Random.nextLong(nodeParams.channelConf.maxTxPublishRetryDelay.toMillis)).millis) diff --git a/eclair-core/src/main/scala/fr/acinq/eclair/db/DbEventHandler.scala b/eclair-core/src/main/scala/fr/acinq/eclair/db/DbEventHandler.scala index ba249a831d..9275ade6f4 100644 --- a/eclair-core/src/main/scala/fr/acinq/eclair/db/DbEventHandler.scala +++ b/eclair-core/src/main/scala/fr/acinq/eclair/db/DbEventHandler.scala @@ -29,7 +29,7 @@ import fr.acinq.eclair.channel._ import fr.acinq.eclair.db.DbEventHandler.ChannelEvent import fr.acinq.eclair.payment.Monitoring.{Metrics => PaymentMetrics, Tags => PaymentTags} import fr.acinq.eclair.payment._ -import fr.acinq.eclair.{Logs, NodeParams} +import fr.acinq.eclair.{Logs, NodeParams, TimestampMilli} /** * This actor sits at the interface between our event stream and the database. @@ -90,8 +90,8 @@ class DbEventHandler(nodeParams: NodeParams) extends Actor with DiagnosticActorL incoming.foreach(p => channelsDb.updateChannelMeta(p.channelId, ChannelEvent.EventType.PaymentReceived)) outgoing.foreach(p => channelsDb.updateChannelMeta(p.channelId, ChannelEvent.EventType.PaymentSent)) case ChannelPaymentRelayed(_, incoming, outgoing) => - channelsDb.updateChannelMeta(incoming.channelId, ChannelEvent.EventType.PaymentReceived) - channelsDb.updateChannelMeta(outgoing.channelId, ChannelEvent.EventType.PaymentSent) + incoming.foreach(i => channelsDb.updateChannelMeta(i.channelId, ChannelEvent.EventType.PaymentReceived)) + outgoing.foreach(o => channelsDb.updateChannelMeta(o.channelId, ChannelEvent.EventType.PaymentSent)) case OnTheFlyFundingPaymentRelayed(_, incoming, outgoing) => incoming.foreach(p => channelsDb.updateChannelMeta(p.channelId, ChannelEvent.EventType.PaymentReceived)) outgoing.foreach(p => channelsDb.updateChannelMeta(p.channelId, ChannelEvent.EventType.PaymentSent)) @@ -124,7 +124,7 @@ class DbEventHandler(nodeParams: NodeParams) extends Actor with DiagnosticActorL case ChannelStateChanged(_, channelId, _, remoteNodeId, WAIT_FOR_CHANNEL_READY | WAIT_FOR_DUAL_FUNDING_READY, NORMAL, Some(commitments)) => ChannelMetrics.ChannelLifecycleEvents.withTag(ChannelTags.Event, ChannelTags.Events.Created).increment() val event = ChannelEvent.EventType.Created - auditDb.add(ChannelEvent(channelId, remoteNodeId, commitments.latest.fundingTxId, commitments.latest.capacity, commitments.localChannelParams.isChannelOpener, !commitments.announceChannel, event)) + auditDb.add(ChannelEvent(channelId, remoteNodeId, commitments.latest.fundingTxId, commitments.latest.commitmentFormat.toString, commitments.latest.capacity, commitments.localChannelParams.isChannelOpener, !commitments.announceChannel, event.label)) channelsDb.updateChannelMeta(channelId, event) case ChannelStateChanged(_, channelId, _, _, OFFLINE, SYNCING, _) => channelsDb.updateChannelMeta(channelId, ChannelEvent.EventType.Connected) @@ -141,7 +141,7 @@ class DbEventHandler(nodeParams: NodeParams) extends Actor with DiagnosticActorL case 0 => ChannelEvent.EventType.Confirmed case _ => ChannelEvent.EventType.Spliced } - auditDb.add(ChannelEvent(e.channelId, e.remoteNodeId, e.fundingTxId, e.commitments.latest.capacity, e.commitments.localChannelParams.isChannelOpener, !e.commitments.announceChannel, event)) + auditDb.add(ChannelEvent(e.channelId, e.remoteNodeId, e.fundingTxId, e.commitments.latest.commitmentFormat.toString, e.commitments.latest.capacity, e.commitments.localChannelParams.isChannelOpener, !e.commitments.announceChannel, event.label)) case e: ChannelClosed => ChannelMetrics.ChannelLifecycleEvents.withTag(ChannelTags.Event, ChannelTags.Events.Closed).increment() @@ -150,7 +150,7 @@ class DbEventHandler(nodeParams: NodeParams) extends Actor with DiagnosticActorL // spent by the closing transaction. val capacity = e.commitments.latest.capacity val fundingTxId = e.commitments.latest.fundingTxId - auditDb.add(ChannelEvent(e.channelId, e.commitments.remoteNodeId, fundingTxId, capacity, e.commitments.localChannelParams.isChannelOpener, !e.commitments.announceChannel, event)) + auditDb.add(ChannelEvent(e.channelId, e.commitments.remoteNodeId, fundingTxId, e.commitments.latest.commitmentFormat.toString, capacity, e.commitments.localChannelParams.isChannelOpener, !e.commitments.announceChannel, event.label)) channelsDb.updateChannelMeta(e.channelId, event) case u: ChannelUpdateParametersChanged => @@ -178,7 +178,7 @@ object DbEventHandler { def props(nodeParams: NodeParams): Props = Props(new DbEventHandler(nodeParams)) // @formatter:off - case class ChannelEvent(channelId: ByteVector32, remoteNodeId: PublicKey, fundingTxId: TxId, capacity: Satoshi, isChannelOpener: Boolean, isPrivate: Boolean, event: ChannelEvent.EventType) + case class ChannelEvent(channelId: ByteVector32, remoteNodeId: PublicKey, fundingTxId: TxId, channelType: String, capacity: Satoshi, isChannelOpener: Boolean, isPrivate: Boolean, event: String, timestamp: TimestampMilli = TimestampMilli.now()) object ChannelEvent { sealed trait EventType { def label: String } object EventType { @@ -190,12 +190,12 @@ object DbEventHandler { object PaymentReceived extends EventType { override def label: String = "received" } case class Closed(closingType: ClosingType) extends EventType { override def label: String = closingType match { - case _: MutualClose => "mutual" - case _: LocalClose => "local" - case _: CurrentRemoteClose => "remote" - case _: NextRemoteClose => "remote" - case _: RecoveryClose => "recovery" - case _: RevokedClose => "revoked" + case _: MutualClose => "mutual-close" + case _: LocalClose => "local-close" + case _: CurrentRemoteClose => "remote-close" + case _: NextRemoteClose => "remote-close" + case _: RecoveryClose => "recovery-close" + case _: RevokedClose => "revoked-close" } } } diff --git a/eclair-core/src/main/scala/fr/acinq/eclair/db/pg/PgAuditDb.scala b/eclair-core/src/main/scala/fr/acinq/eclair/db/pg/PgAuditDb.scala index 4caa243a72..e3e7a83e7f 100644 --- a/eclair-core/src/main/scala/fr/acinq/eclair/db/pg/PgAuditDb.scala +++ b/eclair-core/src/main/scala/fr/acinq/eclair/db/pg/PgAuditDb.scala @@ -195,7 +195,7 @@ class PgAuditDb(implicit ds: DataSource) extends AuditDb with Logging { statement.setLong(3, e.capacity.toLong) statement.setBoolean(4, e.isChannelOpener) statement.setBoolean(5, e.isPrivate) - statement.setString(6, e.event.label) + statement.setString(6, e.event) statement.setTimestamp(7, Timestamp.from(Instant.now())) statement.executeUpdate() } @@ -243,8 +243,8 @@ class PgAuditDb(implicit ds: DataSource) extends AuditDb with Logging { val payments = e match { case e: ChannelPaymentRelayed => // non-trampoline relayed payments have one input and one output - val in = Seq(RelayedPart(e.paymentIn.channelId, e.paymentIn.amount, "IN", "channel", e.startedAt)) - val out = Seq(RelayedPart(e.paymentOut.channelId, e.paymentOut.amount, "OUT", "channel", e.settledAt)) + val in = e.incoming.map(i => RelayedPart(i.channelId, i.amount, "IN", "channel", i.receivedAt)) + val out = e.outgoing.map(o => RelayedPart(o.channelId, o.amount, "OUT", "channel", o.settledAt)) in ++ out case TrampolinePaymentRelayed(_, incoming, outgoing, nextTrampolineNodeId, nextTrampolineAmount) => using(pg.prepareStatement("INSERT INTO audit.relayed_trampoline VALUES (?, ?, ?, ?)")) { statement => @@ -450,7 +450,7 @@ class PgAuditDb(implicit ds: DataSource) extends AuditDb with Logging { val outgoing = parts.filter(_.direction == "OUT").map(p => PaymentEvent.OutgoingPayment(p.channelId, PrivateKey(ByteVector32.One).publicKey, p.amount, p.timestamp)).sortBy(_.amount) parts.headOption match { case Some(RelayedPart(_, _, _, "channel", _)) => incoming.zip(outgoing).map { - case (in, out) => ChannelPaymentRelayed(paymentHash, in, out) + case (in, out) => ChannelPaymentRelayed(paymentHash, Seq(in), Seq(out)) } case Some(RelayedPart(_, _, _, "trampoline", _)) => trampolineByHash.get(paymentHash) match { case Some((nextTrampolineAmount, nextTrampolineNodeId)) => TrampolinePaymentRelayed(paymentHash, incoming, outgoing, nextTrampolineNodeId, nextTrampolineAmount) :: Nil @@ -502,16 +502,7 @@ class PgAuditDb(implicit ds: DataSource) extends AuditDb with Logging { val relayed = listRelayed(from, to).foldLeft(Map.empty[ByteVector32, Seq[Relayed]]) { (previous, e) => // NB: we must avoid counting the fee twice: we associate it to the outgoing channels rather than the incoming ones. - val current = e match { - case c: ChannelPaymentRelayed => Map( - c.paymentIn.channelId -> (Relayed(c.amountIn, 0 msat, "IN") +: previous.getOrElse(c.paymentIn.channelId, Nil)), - c.paymentOut.channelId -> (Relayed(c.amountOut, c.amountIn - c.amountOut, "OUT") +: previous.getOrElse(c.paymentOut.channelId, Nil)), - ) - case t: TrampolinePaymentRelayed => - aggregateRelayStats(previous, t.incoming, t.outgoing) - case f: OnTheFlyFundingPaymentRelayed => - aggregateRelayStats(previous, f.incoming, f.outgoing) - } + val current = aggregateRelayStats(previous, e.incoming, e.outgoing) previous ++ current } diff --git a/eclair-core/src/main/scala/fr/acinq/eclair/db/sqlite/SqliteAuditDb.scala b/eclair-core/src/main/scala/fr/acinq/eclair/db/sqlite/SqliteAuditDb.scala index 464d9daed4..d753208fcb 100644 --- a/eclair-core/src/main/scala/fr/acinq/eclair/db/sqlite/SqliteAuditDb.scala +++ b/eclair-core/src/main/scala/fr/acinq/eclair/db/sqlite/SqliteAuditDb.scala @@ -189,7 +189,7 @@ class SqliteAuditDb(val sqlite: Connection) extends AuditDb with Logging { statement.setLong(3, e.capacity.toLong) statement.setBoolean(4, e.isChannelOpener) statement.setBoolean(5, e.isPrivate) - statement.setString(6, e.event.label) + statement.setString(6, e.event) statement.setLong(7, TimestampMilli.now().toLong) statement.executeUpdate() } @@ -231,8 +231,8 @@ class SqliteAuditDb(val sqlite: Connection) extends AuditDb with Logging { val payments = e match { case e: ChannelPaymentRelayed => // non-trampoline relayed payments have one input and one output - val in = Seq(RelayedPart(e.paymentIn.channelId, e.paymentIn.amount, "IN", "channel", e.startedAt)) - val out = Seq(RelayedPart(e.paymentOut.channelId, e.paymentOut.amount, "OUT", "channel", e.settledAt)) + val in = e.incoming.map(i => RelayedPart(i.channelId, i.amount, "IN", "channel", i.receivedAt)) + val out = e.outgoing.map(o => RelayedPart(o.channelId, o.amount, "OUT", "channel", o.settledAt)) in ++ out case TrampolinePaymentRelayed(_, incoming, outgoing, nextTrampolineNodeId, nextTrampolineAmount) => using(sqlite.prepareStatement("INSERT INTO relayed_trampoline VALUES (?, ?, ?, ?)")) { statement => @@ -421,7 +421,7 @@ class SqliteAuditDb(val sqlite: Connection) extends AuditDb with Logging { val outgoing = parts.filter(_.direction == "OUT").map(p => PaymentEvent.OutgoingPayment(p.channelId, PrivateKey(ByteVector32.One).publicKey, p.amount, p.timestamp)).sortBy(_.amount) parts.headOption match { case Some(RelayedPart(_, _, _, "channel", _)) => incoming.zip(outgoing).map { - case (in, out) => ChannelPaymentRelayed(paymentHash, in, out) + case (in, out) => ChannelPaymentRelayed(paymentHash, Seq(in), Seq(out)) } case Some(RelayedPart(_, _, _, "trampoline", _)) => trampolineByHash.get(paymentHash) match { case Some((nextTrampolineAmount, nextTrampolineNodeId)) => TrampolinePaymentRelayed(paymentHash, incoming, outgoing, nextTrampolineNodeId, nextTrampolineAmount) :: Nil @@ -472,16 +472,7 @@ class SqliteAuditDb(val sqlite: Connection) extends AuditDb with Logging { val relayed = listRelayed(from, to).foldLeft(Map.empty[ByteVector32, Seq[Relayed]]) { (previous, e) => // NB: we must avoid counting the fee twice: we associate it to the outgoing channels rather than the incoming ones. - val current = e match { - case c: ChannelPaymentRelayed => Map( - c.paymentIn.channelId -> (Relayed(c.amountIn, 0 msat, "IN") +: previous.getOrElse(c.paymentIn.channelId, Nil)), - c.paymentOut.channelId -> (Relayed(c.amountOut, c.amountIn - c.amountOut, "OUT") +: previous.getOrElse(c.paymentOut.channelId, Nil)), - ) - case t: TrampolinePaymentRelayed => - aggregateRelayStats(previous, t.incoming, t.outgoing) - case f: OnTheFlyFundingPaymentRelayed => - aggregateRelayStats(previous, f.incoming, f.outgoing) - } + val current = aggregateRelayStats(previous, e.incoming, e.outgoing) previous ++ current } diff --git a/eclair-core/src/main/scala/fr/acinq/eclair/payment/PaymentEvents.scala b/eclair-core/src/main/scala/fr/acinq/eclair/payment/PaymentEvents.scala index 7874753217..f8553a811a 100644 --- a/eclair-core/src/main/scala/fr/acinq/eclair/payment/PaymentEvents.scala +++ b/eclair-core/src/main/scala/fr/acinq/eclair/payment/PaymentEvents.scala @@ -120,16 +120,14 @@ sealed trait PaymentRelayed extends PaymentEvent { def outgoing: Seq[PaymentEvent.OutgoingPayment] def amountIn: MilliSatoshi = incoming.map(_.amount).sum def amountOut: MilliSatoshi = outgoing.map(_.amount).sum + def relayFee: MilliSatoshi = amountIn - amountOut override def startedAt: TimestampMilli = incoming.map(_.receivedAt).minOption.getOrElse(TimestampMilli.now()) override def settledAt: TimestampMilli = outgoing.map(_.settledAt).maxOption.getOrElse(TimestampMilli.now()) // @formatter:on } -/** A payment was successfully relayed from a single incoming channel to a single outgoing channel. */ -case class ChannelPaymentRelayed(paymentHash: ByteVector32, paymentIn: PaymentEvent.IncomingPayment, paymentOut: PaymentEvent.OutgoingPayment) extends PaymentRelayed { - override val incoming: Seq[PaymentEvent.IncomingPayment] = Seq(paymentIn) - override val outgoing: Seq[PaymentEvent.OutgoingPayment] = Seq(paymentOut) -} +/** A payment was successfully relayed from incoming channels to outgoing channels. */ +case class ChannelPaymentRelayed(paymentHash: ByteVector32, incoming: Seq[PaymentEvent.IncomingPayment], outgoing: Seq[PaymentEvent.OutgoingPayment]) extends PaymentRelayed /** A trampoline payment was successfully relayed, using potentially multiple incoming and outgoing channels. */ case class TrampolinePaymentRelayed(paymentHash: ByteVector32, incoming: Seq[PaymentEvent.IncomingPayment], outgoing: Seq[PaymentEvent.OutgoingPayment], nextTrampolineNodeId: PublicKey, nextTrampolineAmount: MilliSatoshi) extends PaymentRelayed diff --git a/eclair-core/src/main/scala/fr/acinq/eclair/payment/relay/ChannelRelay.scala b/eclair-core/src/main/scala/fr/acinq/eclair/payment/relay/ChannelRelay.scala index 97f987787b..2911c4dd33 100644 --- a/eclair-core/src/main/scala/fr/acinq/eclair/payment/relay/ChannelRelay.scala +++ b/eclair-core/src/main/scala/fr/acinq/eclair/payment/relay/ChannelRelay.scala @@ -258,7 +258,7 @@ class ChannelRelay private(nodeParams: NodeParams, val cmd = CMD_FULFILL_HTLC(upstream.add.id, fulfill.paymentPreimage, Some(attribution), commit = true) val incoming = PaymentEvent.IncomingPayment(upstream.add.channelId, upstream.receivedFrom, upstream.amountIn, upstream.receivedAt) val outgoing = PaymentEvent.OutgoingPayment(htlc.channelId, remoteNodeId, htlc.amountMsat, now) - context.system.eventStream ! EventStream.Publish(ChannelPaymentRelayed(htlc.paymentHash, incoming, outgoing)) + context.system.eventStream ! EventStream.Publish(ChannelPaymentRelayed(htlc.paymentHash, Seq(incoming), Seq(outgoing))) recordRelayDuration(isSuccess = true) safeSendAndStop(upstream.add.channelId, cmd) case WrappedAddResponse(RES_ADD_SETTLED(_, _, htlc, fail: HtlcResult.Fail)) => diff --git a/eclair-core/src/main/scala/fr/acinq/eclair/payment/relay/PostRestartHtlcCleaner.scala b/eclair-core/src/main/scala/fr/acinq/eclair/payment/relay/PostRestartHtlcCleaner.scala index 7952335af7..9236f0430b 100644 --- a/eclair-core/src/main/scala/fr/acinq/eclair/payment/relay/PostRestartHtlcCleaner.scala +++ b/eclair-core/src/main/scala/fr/acinq/eclair/payment/relay/PostRestartHtlcCleaner.scala @@ -211,7 +211,7 @@ class PostRestartHtlcCleaner(nodeParams: NodeParams, register: ActorRef, initial } PendingCommandsDb.safeSend(register, nodeParams.db.pendingCommands, u.originChannelId, CMD_FULFILL_HTLC(u.originHtlcId, paymentPreimage, None, commit = true)) // We don't know when we received this HTLC so we just pretend that we received it just now. - context.system.eventStream.publish(ChannelPaymentRelayed(fulfilledHtlc.paymentHash, PaymentEvent.IncomingPayment(u.originChannelId, u.originNodeId, u.amountIn, TimestampMilli.now()), PaymentEvent.OutgoingPayment(fulfilledHtlc.channelId, downstreamNodeId, fulfilledHtlc.amountMsat, TimestampMilli.now()))) + context.system.eventStream.publish(ChannelPaymentRelayed(fulfilledHtlc.paymentHash, Seq(PaymentEvent.IncomingPayment(u.originChannelId, u.originNodeId, u.amountIn, TimestampMilli.now())), Seq(PaymentEvent.OutgoingPayment(fulfilledHtlc.channelId, downstreamNodeId, fulfilledHtlc.amountMsat, TimestampMilli.now())))) Metrics.PendingRelayedOut.decrement() context become main(brokenHtlcs.copy(relayedOut = brokenHtlcs.relayedOut - origin)) case u: Upstream.Cold.Trampoline => diff --git a/eclair-core/src/main/scala/fr/acinq/eclair/wire/protocol/LiquidityAds.scala b/eclair-core/src/main/scala/fr/acinq/eclair/wire/protocol/LiquidityAds.scala index fbf983bad9..79c5fd660b 100644 --- a/eclair-core/src/main/scala/fr/acinq/eclair/wire/protocol/LiquidityAds.scala +++ b/eclair-core/src/main/scala/fr/acinq/eclair/wire/protocol/LiquidityAds.scala @@ -249,7 +249,9 @@ object LiquidityAds { case class WillFundPurchase(willFund: WillFund, purchase: Purchase) /** Minimal information about a liquidity purchase, useful for example when RBF-ing transactions. */ - case class PurchaseBasicInfo(isBuyer: Boolean, amount: Satoshi, fees: Fees) + case class PurchaseBasicInfo(isBuyer: Boolean, amount: Satoshi, fees: Fees) { + val isSeller: Boolean = !isBuyer + } object Codecs { val fundingRate: Codec[FundingRate] = ( diff --git a/eclair-core/src/test/scala/fr/acinq/eclair/channel/publish/MempoolTxMonitorSpec.scala b/eclair-core/src/test/scala/fr/acinq/eclair/channel/publish/MempoolTxMonitorSpec.scala index 2f02d3239d..331de836cc 100644 --- a/eclair-core/src/test/scala/fr/acinq/eclair/channel/publish/MempoolTxMonitorSpec.scala +++ b/eclair-core/src/test/scala/fr/acinq/eclair/channel/publish/MempoolTxMonitorSpec.scala @@ -32,6 +32,7 @@ import fr.acinq.eclair.channel.publish.TxPublisher.TxRejectedReason._ import fr.acinq.eclair.channel.{TransactionConfirmed, TransactionPublished} import fr.acinq.eclair.{TestConstants, TestKitBaseClass, randomKey} import org.scalatest.BeforeAndAfterAll +import org.scalatest.Inside.inside import org.scalatest.funsuite.AnyFunSuiteLike import java.util.UUID @@ -279,7 +280,11 @@ class MempoolTxMonitorSpec extends TestKitBaseClass with AnyFunSuiteLike with Bi generateBlocks(2) monitor ! WrappedCurrentBlockHeight(currentBlockHeight()) - eventListener.expectMsg(TransactionConfirmed(txPublished.channelId, txPublished.remoteNodeId, tx)) + inside(eventListener.expectMsgType[TransactionConfirmed]) { e => + assert(e.channelId == txPublished.channelId) + assert(e.remoteNodeId == txPublished.remoteNodeId) + assert(e.tx == tx) + } } } diff --git a/eclair-core/src/test/scala/fr/acinq/eclair/db/AuditDbSpec.scala b/eclair-core/src/test/scala/fr/acinq/eclair/db/AuditDbSpec.scala index 8f2b4ae9b9..9a07b6f51d 100644 --- a/eclair-core/src/test/scala/fr/acinq/eclair/db/AuditDbSpec.scala +++ b/eclair-core/src/test/scala/fr/acinq/eclair/db/AuditDbSpec.scala @@ -21,7 +21,6 @@ import fr.acinq.bitcoin.scalacompat.{Block, ByteVector32, SatoshiLong, Script, T import fr.acinq.eclair.TestDatabases.{TestPgDatabases, TestSqliteDatabases} import fr.acinq.eclair.TestUtils.randomTxId import fr.acinq.eclair._ -import fr.acinq.eclair.channel.Helpers.Closing.MutualClose import fr.acinq.eclair.channel._ import fr.acinq.eclair.db.AuditDb.Stats import fr.acinq.eclair.db.DbEventHandler.ChannelEvent @@ -70,8 +69,8 @@ class AuditDbSpec extends AnyFunSuite { val pp2a = PaymentEvent.IncomingPayment(randomBytes32(), dummyRemoteNodeId, 42000 msat, now) val pp2b = PaymentEvent.IncomingPayment(randomBytes32(), dummyRemoteNodeId, 42100 msat, now) val e2 = PaymentReceived(randomBytes32(), pp2a :: pp2b :: Nil) - val e3 = ChannelPaymentRelayed(randomBytes32(), PaymentEvent.IncomingPayment(randomBytes32(), dummyRemoteNodeId, 42000 msat, now - 3.seconds), PaymentEvent.OutgoingPayment(randomBytes32(), dummyRemoteNodeId, 1000 msat, now)) - val e4a = TransactionPublished(randomBytes32(), randomKey().publicKey, Transaction(0, Seq.empty, Seq.empty, 0), 42 sat, "mutual") + val e3 = ChannelPaymentRelayed(randomBytes32(), Seq(PaymentEvent.IncomingPayment(randomBytes32(), dummyRemoteNodeId, 42000 msat, now - 3.seconds)), Seq(PaymentEvent.OutgoingPayment(randomBytes32(), dummyRemoteNodeId, 1000 msat, now))) + val e4a = TransactionPublished(randomBytes32(), randomKey().publicKey, Transaction(0, Seq.empty, Seq.empty, 0), 42 sat, 0 sat, "mutual", None) val e4b = TransactionConfirmed(e4a.channelId, e4a.remoteNodeId, e4a.tx) val e4c = TransactionConfirmed(randomBytes32(), randomKey().publicKey, Transaction(2, Nil, TxOut(500 sat, hex"1234") :: Nil, 0)) val pp5a = PaymentSent.PaymentPart(UUID.randomUUID(), PaymentEvent.OutgoingPayment(randomBytes32(), dummyRemoteNodeId, 42000 msat, 0 unixms), 1000 msat, None, startedAt = 0 unixms) @@ -79,7 +78,7 @@ class AuditDbSpec extends AnyFunSuite { val e5 = PaymentSent(UUID.randomUUID(), randomBytes32(), 84100 msat, randomKey().publicKey, pp5a :: pp5b :: Nil, None, startedAt = 0 unixms) val pp6 = PaymentSent.PaymentPart(UUID.randomUUID(), PaymentEvent.OutgoingPayment(randomBytes32(), dummyRemoteNodeId, 42000 msat, settledAt = now + 10.minutes), 1000 msat, None, startedAt = now + 10.minutes) val e6 = PaymentSent(UUID.randomUUID(), randomBytes32(), 42000 msat, randomKey().publicKey, pp6 :: Nil, None, startedAt = now + 10.minutes) - val e7 = ChannelEvent(randomBytes32(), randomKey().publicKey, randomTxId(), 456123000 sat, isChannelOpener = true, isPrivate = false, ChannelEvent.EventType.Closed(MutualClose(null))) + val e7 = ChannelEvent(randomBytes32(), randomKey().publicKey, randomTxId(), "anchor_outputs", 456123000 sat, isChannelOpener = true, isPrivate = false, "mutual-close") val e10 = TrampolinePaymentRelayed(randomBytes32(), Seq( PaymentEvent.IncomingPayment(randomBytes32(), dummyRemoteNodeId, 20000 msat, now - 7.seconds), @@ -92,8 +91,8 @@ class AuditDbSpec extends AnyFunSuite { ), randomKey().publicKey, 30000 msat) val multiPartPaymentHash = randomBytes32() - val e11 = ChannelPaymentRelayed(multiPartPaymentHash, PaymentEvent.IncomingPayment(randomBytes32(), dummyRemoteNodeId, 13000 msat, now - 5.seconds), PaymentEvent.OutgoingPayment(randomBytes32(), dummyRemoteNodeId, 11000 msat, now + 4.milli)) - val e12 = ChannelPaymentRelayed(multiPartPaymentHash, PaymentEvent.IncomingPayment(randomBytes32(), dummyRemoteNodeId, 15000 msat, now - 4.seconds), PaymentEvent.OutgoingPayment(randomBytes32(), dummyRemoteNodeId, 12500 msat, now + 5.milli)) + val e11 = ChannelPaymentRelayed(multiPartPaymentHash, Seq(PaymentEvent.IncomingPayment(randomBytes32(), dummyRemoteNodeId, 13000 msat, now - 5.seconds)), Seq(PaymentEvent.OutgoingPayment(randomBytes32(), dummyRemoteNodeId, 11000 msat, now + 4.milli))) + val e12 = ChannelPaymentRelayed(multiPartPaymentHash, Seq(PaymentEvent.IncomingPayment(randomBytes32(), dummyRemoteNodeId, 15000 msat, now - 4.seconds)), Seq(PaymentEvent.OutgoingPayment(randomBytes32(), dummyRemoteNodeId, 12500 msat, now + 5.milli))) db.add(e1) db.add(e2) @@ -144,26 +143,26 @@ class AuditDbSpec extends AnyFunSuite { val c5 = c1.copy(bytes = 0x05b +: c1.tail) val c6 = c1.copy(bytes = 0x06b +: c1.tail) - db.add(ChannelPaymentRelayed(randomBytes32(), PaymentEvent.IncomingPayment(c6, randomKey().publicKey, 46000 msat, 1000 unixms), PaymentEvent.OutgoingPayment(c1, randomKey().publicKey, 44000 msat, 1001 unixms))) - db.add(ChannelPaymentRelayed(randomBytes32(), PaymentEvent.IncomingPayment(c6, randomKey().publicKey, 41000 msat, 1002 unixms), PaymentEvent.OutgoingPayment(c1, randomKey().publicKey, 40000 msat, 1003 unixms))) - db.add(ChannelPaymentRelayed(randomBytes32(), PaymentEvent.IncomingPayment(c5, randomKey().publicKey, 43000 msat, 1004 unixms), PaymentEvent.OutgoingPayment(c1, randomKey().publicKey, 42000 msat, 1005 unixms))) - db.add(ChannelPaymentRelayed(randomBytes32(), PaymentEvent.IncomingPayment(c5, randomKey().publicKey, 42000 msat, 1006 unixms), PaymentEvent.OutgoingPayment(c2, randomKey().publicKey, 40000 msat, 1007 unixms))) - db.add(ChannelPaymentRelayed(randomBytes32(), PaymentEvent.IncomingPayment(c5, randomKey().publicKey, 45000 msat, 1008 unixms), PaymentEvent.OutgoingPayment(c6, randomKey().publicKey, 40000 msat, 1009 unixms))) + db.add(ChannelPaymentRelayed(randomBytes32(), Seq(PaymentEvent.IncomingPayment(c6, randomKey().publicKey, 46000 msat, 1000 unixms)), Seq(PaymentEvent.OutgoingPayment(c1, randomKey().publicKey, 44000 msat, 1001 unixms)))) + db.add(ChannelPaymentRelayed(randomBytes32(), Seq(PaymentEvent.IncomingPayment(c6, randomKey().publicKey, 41000 msat, 1002 unixms)), Seq(PaymentEvent.OutgoingPayment(c1, randomKey().publicKey, 40000 msat, 1003 unixms)))) + db.add(ChannelPaymentRelayed(randomBytes32(), Seq(PaymentEvent.IncomingPayment(c5, randomKey().publicKey, 43000 msat, 1004 unixms)), Seq(PaymentEvent.OutgoingPayment(c1, randomKey().publicKey, 42000 msat, 1005 unixms)))) + db.add(ChannelPaymentRelayed(randomBytes32(), Seq(PaymentEvent.IncomingPayment(c5, randomKey().publicKey, 42000 msat, 1006 unixms)), Seq(PaymentEvent.OutgoingPayment(c2, randomKey().publicKey, 40000 msat, 1007 unixms)))) + db.add(ChannelPaymentRelayed(randomBytes32(), Seq(PaymentEvent.IncomingPayment(c5, randomKey().publicKey, 45000 msat, 1008 unixms)), Seq(PaymentEvent.OutgoingPayment(c6, randomKey().publicKey, 40000 msat, 1009 unixms)))) db.add(TrampolinePaymentRelayed(randomBytes32(), Seq(PaymentEvent.IncomingPayment(c6, randomKey().publicKey, 25000 msat, 1010 unixms)), Seq(PaymentEvent.OutgoingPayment(c4, randomKey().publicKey, 20000 msat, 1011 unixms)), randomKey().publicKey, 15000 msat)) db.add(TrampolinePaymentRelayed(randomBytes32(), Seq(PaymentEvent.IncomingPayment(c6, randomKey().publicKey, 46000 msat, 1012 unixms)), Seq(PaymentEvent.OutgoingPayment(c2, randomKey().publicKey, 16000 msat, 1013 unixms), PaymentEvent.OutgoingPayment(c4, randomKey().publicKey, 10000 msat, 1014 unixms), PaymentEvent.OutgoingPayment(c4, randomKey().publicKey, 14000 msat, 1015 unixms)), randomKey().publicKey, 37000 msat)) // The following confirmed txs will be taken into account. - db.add(TransactionPublished(c2, n2, Transaction(0, Seq.empty, Seq(TxOut(5000 sat, hex"12345")), 0), 200 sat, "funding")) + db.add(TransactionPublished(c2, n2, Transaction(0, Seq.empty, Seq(TxOut(5000 sat, hex"12345")), 0), 200 sat, 0 sat, "funding", None)) db.add(TransactionConfirmed(c2, n2, Transaction(0, Seq.empty, Seq(TxOut(5000 sat, hex"12345")), 0))) - db.add(TransactionPublished(c2, n2, Transaction(0, Seq.empty, Seq(TxOut(4000 sat, hex"00112233")), 0), 300 sat, "mutual")) + db.add(TransactionPublished(c2, n2, Transaction(0, Seq.empty, Seq(TxOut(4000 sat, hex"00112233")), 0), 300 sat, 0 sat, "mutual", None)) db.add(TransactionConfirmed(c2, n2, Transaction(0, Seq.empty, Seq(TxOut(4000 sat, hex"00112233")), 0))) - db.add(TransactionPublished(c3, n3, Transaction(0, Seq.empty, Seq(TxOut(8000 sat, hex"deadbeef")), 0), 400 sat, "funding")) + db.add(TransactionPublished(c3, n3, Transaction(0, Seq.empty, Seq(TxOut(8000 sat, hex"deadbeef")), 0), 400 sat, 0 sat, "funding", None)) db.add(TransactionConfirmed(c3, n3, Transaction(0, Seq.empty, Seq(TxOut(8000 sat, hex"deadbeef")), 0))) - db.add(TransactionPublished(c4, n4, Transaction(0, Seq.empty, Seq(TxOut(6000 sat, hex"0000000000")), 0), 500 sat, "funding")) + db.add(TransactionPublished(c4, n4, Transaction(0, Seq.empty, Seq(TxOut(6000 sat, hex"0000000000")), 0), 500 sat, 0 sat, "funding", None)) db.add(TransactionConfirmed(c4, n4, Transaction(0, Seq.empty, Seq(TxOut(6000 sat, hex"0000000000")), 0))) // The following txs will not be taken into account. - db.add(TransactionPublished(c2, n2, Transaction(0, Seq.empty, Seq(TxOut(5000 sat, hex"12345")), 0), 1000 sat, "funding")) // duplicate - db.add(TransactionPublished(c4, n4, Transaction(0, Seq.empty, Seq(TxOut(4500 sat, hex"1111222233")), 0), 500 sat, "funding")) // unconfirmed + db.add(TransactionPublished(c2, n2, Transaction(0, Seq.empty, Seq(TxOut(5000 sat, hex"12345")), 0), 1000 sat, 0 sat, "funding", None)) // duplicate + db.add(TransactionPublished(c4, n4, Transaction(0, Seq.empty, Seq(TxOut(4500 sat, hex"1111222233")), 0), 500 sat, 0 sat, "funding", None)) // unconfirmed db.add(TransactionConfirmed(c4, n4, Transaction(0, Seq.empty, Seq(TxOut(2500 sat, hex"ffffff")), 0))) // doesn't match a published tx assert(db.listPublished(randomBytes32()).isEmpty) @@ -204,7 +203,7 @@ class AuditDbSpec extends AnyFunSuite { channelIds.foreach(channelId => { val nodeId = nodeIds(Random.nextInt(nodeCount)) val fundingTx = Transaction(0, Seq.empty, Seq(TxOut(5000 sat, Script.pay2wpkh(nodeId))), 0) - db.add(TransactionPublished(channelId, nodeId, fundingTx, 100 sat, "funding")) + db.add(TransactionPublished(channelId, nodeId, fundingTx, 100 sat, 0 sat, "funding", None)) db.add(TransactionConfirmed(channelId, nodeId, fundingTx)) }) // Add relay events. @@ -217,7 +216,7 @@ class AuditDbSpec extends AnyFunSuite { db.add(TrampolinePaymentRelayed(randomBytes32(), incoming, outgoing, randomKey().publicKey, 5000 msat)) } else { val toChannelId = channelIds(Random.nextInt(channelCount)) - db.add(ChannelPaymentRelayed(randomBytes32(), PaymentEvent.IncomingPayment(randomBytes32(), randomKey().publicKey, 10000 msat, TimestampMilli.now() - 2.seconds), PaymentEvent.OutgoingPayment(toChannelId, randomKey().publicKey, Random.nextInt(10000).msat, TimestampMilli.now()))) + db.add(ChannelPaymentRelayed(randomBytes32(), Seq(PaymentEvent.IncomingPayment(randomBytes32(), randomKey().publicKey, 10000 msat, TimestampMilli.now() - 2.seconds)), Seq(PaymentEvent.OutgoingPayment(toChannelId, randomKey().publicKey, Random.nextInt(10000).msat, TimestampMilli.now())))) } }) // Test starts here. diff --git a/eclair-core/src/test/scala/fr/acinq/eclair/db/PgUtilsSpec.scala b/eclair-core/src/test/scala/fr/acinq/eclair/db/PgUtilsSpec.scala index 89bfd76c5b..7e97f96d1a 100644 --- a/eclair-core/src/test/scala/fr/acinq/eclair/db/PgUtilsSpec.scala +++ b/eclair-core/src/test/scala/fr/acinq/eclair/db/PgUtilsSpec.scala @@ -173,7 +173,7 @@ class PgUtilsSpec extends TestKitBaseClass with AnyFunSuiteLike with Eventually db.network.addNode(Announcements.makeNodeAnnouncement(randomKey(), "node-A", Color(50, 99, -80), Nil, Features.empty, TimestampSecond.now() - 45.days)) db.network.addNode(Announcements.makeNodeAnnouncement(randomKey(), "node-B", Color(50, 99, -80), Nil, Features.empty, TimestampSecond.now() - 3.days)) db.network.addNode(Announcements.makeNodeAnnouncement(randomKey(), "node-C", Color(50, 99, -80), Nil, Features.empty, TimestampSecond.now() - 7.minutes)) - db.audit.add(ChannelPaymentRelayed(randomBytes32(), PaymentEvent.IncomingPayment(randomBytes32(), randomKey().publicKey, 421 msat, TimestampMilli.now() - 5.seconds), PaymentEvent.OutgoingPayment(randomBytes32(), randomKey().publicKey, 400 msat, TimestampMilli.now() - 3.seconds))) + db.audit.add(ChannelPaymentRelayed(randomBytes32(), Seq(PaymentEvent.IncomingPayment(randomBytes32(), randomKey().publicKey, 421 msat, TimestampMilli.now() - 5.seconds)), Seq(PaymentEvent.OutgoingPayment(randomBytes32(), randomKey().publicKey, 400 msat, TimestampMilli.now() - 3.seconds)))) db.dataSource.close() } diff --git a/eclair-core/src/test/scala/fr/acinq/eclair/json/JsonSerializersSpec.scala b/eclair-core/src/test/scala/fr/acinq/eclair/json/JsonSerializersSpec.scala index ffdd099ba7..241bad4b98 100644 --- a/eclair-core/src/test/scala/fr/acinq/eclair/json/JsonSerializersSpec.scala +++ b/eclair-core/src/test/scala/fr/acinq/eclair/json/JsonSerializersSpec.scala @@ -404,7 +404,7 @@ class JsonSerializersSpec extends TestKitBaseClass with AnyFunSuiteLike with Mat } test("type hints") { - val e1 = ChannelPaymentRelayed(randomBytes32(), PaymentEvent.IncomingPayment(randomBytes32(), randomKey().publicKey, 110 msat, 100 unixms), PaymentEvent.OutgoingPayment(randomBytes32(), randomKey().publicKey, 100 msat, 150 unixms)) + val e1 = ChannelPaymentRelayed(randomBytes32(), Seq(PaymentEvent.IncomingPayment(randomBytes32(), randomKey().publicKey, 110 msat, 100 unixms)), Seq(PaymentEvent.OutgoingPayment(randomBytes32(), randomKey().publicKey, 100 msat, 150 unixms))) assert(JsonSerializers.serialization.writePretty(e1)(JsonSerializers.formats).contains("\"type\" : \"payment-relayed\"")) } diff --git a/eclair-core/src/test/scala/fr/acinq/eclair/payment/relay/ChannelRelayerSpec.scala b/eclair-core/src/test/scala/fr/acinq/eclair/payment/relay/ChannelRelayerSpec.scala index 8f93d78191..7e9037af2a 100644 --- a/eclair-core/src/test/scala/fr/acinq/eclair/payment/relay/ChannelRelayerSpec.scala +++ b/eclair-core/src/test/scala/fr/acinq/eclair/payment/relay/ChannelRelayerSpec.scala @@ -767,11 +767,11 @@ class ChannelRelayerSpec extends ScalaTestWithActorTestKit(ConfigFactory.load("a val paymentRelayed = eventListener.expectMessageType[ChannelPaymentRelayed] assert(paymentRelayed.paymentHash == r.add.paymentHash) assert(paymentRelayed.amountIn == r.add.amountMsat) - assert(paymentRelayed.paymentIn.channelId == r.add.channelId) - assert(paymentRelayed.paymentIn.remoteNodeId == TestConstants.Alice.nodeParams.nodeId) + assert(paymentRelayed.incoming.map(_.channelId) == Seq(r.add.channelId)) + assert(paymentRelayed.incoming.map(_.remoteNodeId) == Seq(TestConstants.Alice.nodeParams.nodeId)) assert(paymentRelayed.amountOut == r.amountToForward) - assert(paymentRelayed.paymentOut.channelId == channelId1) - assert(paymentRelayed.paymentOut.remoteNodeId == remoteNodeId2) + assert(paymentRelayed.outgoing.map(_.channelId) == Seq(channelId1)) + assert(paymentRelayed.outgoing.map(_.remoteNodeId) == Seq(remoteNodeId2)) assert(paymentRelayed.startedAt == r.receivedAt) assert(paymentRelayed.settledAt >= now) } diff --git a/eclair-node/src/test/scala/fr/acinq/eclair/api/ApiServiceSpec.scala b/eclair-node/src/test/scala/fr/acinq/eclair/api/ApiServiceSpec.scala index 71529e795c..fb1a12cb2d 100644 --- a/eclair-node/src/test/scala/fr/acinq/eclair/api/ApiServiceSpec.scala +++ b/eclair-node/src/test/scala/fr/acinq/eclair/api/ApiServiceSpec.scala @@ -1136,8 +1136,8 @@ class ApiServiceSpec extends AnyFunSuite with ScalatestRouteTest with IdiomaticM system.eventStream.publish(ps) wsClient.expectMessage(expectedSerializedPs) - val prel = ChannelPaymentRelayed(ByteVector32.Zeroes, PaymentEvent.IncomingPayment(ByteVector32.Zeroes, previousNodeId, 21 msat, TimestampMilli(1553784961048L)), PaymentEvent.OutgoingPayment(ByteVector32.One, nextNodeId, 20 msat, TimestampMilli(1553784963659L))) - val expectedSerializedPrel = """{"type":"payment-relayed","paymentHash":"0000000000000000000000000000000000000000000000000000000000000000","paymentIn":{"channelId":"0000000000000000000000000000000000000000000000000000000000000000","remoteNodeId":"02e899d99662f2e64ea0eeaecb53c4628fa40a22d7185076e42e8a3d67fcb7b8e6","amount":21,"receivedAt":{"iso":"2019-03-28T14:56:01.048Z","unix":1553784961}},"paymentOut":{"channelId":"0100000000000000000000000000000000000000000000000000000000000000","remoteNodeId":"030bb6a5e0c6b203c7e2180fb78c7ba4bdce46126761d8201b91ddac089cdecc87","amount":20,"settledAt":{"iso":"2019-03-28T14:56:03.659Z","unix":1553784963}}}""" + val prel = ChannelPaymentRelayed(ByteVector32.Zeroes, Seq(PaymentEvent.IncomingPayment(ByteVector32.Zeroes, previousNodeId, 21 msat, TimestampMilli(1553784961048L))), Seq(PaymentEvent.OutgoingPayment(ByteVector32.One, nextNodeId, 20 msat, TimestampMilli(1553784963659L)))) + val expectedSerializedPrel = """{"type":"payment-relayed","paymentHash":"0000000000000000000000000000000000000000000000000000000000000000","incoming":[{"channelId":"0000000000000000000000000000000000000000000000000000000000000000","remoteNodeId":"02e899d99662f2e64ea0eeaecb53c4628fa40a22d7185076e42e8a3d67fcb7b8e6","amount":21,"receivedAt":{"iso":"2019-03-28T14:56:01.048Z","unix":1553784961}}],"outgoing":[{"channelId":"0100000000000000000000000000000000000000000000000000000000000000","remoteNodeId":"030bb6a5e0c6b203c7e2180fb78c7ba4bdce46126761d8201b91ddac089cdecc87","amount":20,"settledAt":{"iso":"2019-03-28T14:56:03.659Z","unix":1553784963}}]}""" assert(serialization.write(prel) == expectedSerializedPrel) system.eventStream.publish(prel) wsClient.expectMessage(expectedSerializedPrel) From 43c8f6077e9a42b644eee1adb6ad25373b07ad77 Mon Sep 17 00:00:00 2001 From: t-bast Date: Tue, 27 Jan 2026 17:30:25 +0100 Subject: [PATCH 2/7] Improve `channel_events` in the `AuditDb` We improve the `channel_events` table in the `AuditDb` by: - adding the `funding_txid` from each event - adding the `channel_type` from each event - using hex instead of blobs in sqlite - providing APIs to list these events - adding indexes on `channel_id` and `remote_node_id` The added data isn't available in past events. We decide to simply rename the older tables and start fresh: previous data will not be available from the API, but can still be queried directly in SQL if necessary. --- .../scala/fr/acinq/eclair/db/AuditDb.scala | 4 + .../scala/fr/acinq/eclair/db/Databases.scala | 3 +- .../fr/acinq/eclair/db/pg/PgAuditDb.scala | 84 ++++++++++++-- .../eclair/db/sqlite/SqliteAuditDb.scala | 93 +++++++++++++-- .../fr/acinq/eclair/db/AuditDbSpec.scala | 106 +++++++++++++++++- 5 files changed, 265 insertions(+), 25 deletions(-) diff --git a/eclair-core/src/main/scala/fr/acinq/eclair/db/AuditDb.scala b/eclair-core/src/main/scala/fr/acinq/eclair/db/AuditDb.scala index b708a8c1cc..f4f34601b1 100644 --- a/eclair-core/src/main/scala/fr/acinq/eclair/db/AuditDb.scala +++ b/eclair-core/src/main/scala/fr/acinq/eclair/db/AuditDb.scala @@ -44,6 +44,10 @@ trait AuditDb { def listPublished(channelId: ByteVector32): Seq[PublishedTransaction] + def listChannelEvents(channelId: ByteVector32, from: TimestampMilli, to: TimestampMilli): Seq[ChannelEvent] + + def listChannelEvents(remoteNodeId: PublicKey, from: TimestampMilli, to: TimestampMilli): Seq[ChannelEvent] + def listSent(from: TimestampMilli, to: TimestampMilli, paginated_opt: Option[Paginated] = None): Seq[PaymentSent] def listReceived(from: TimestampMilli, to: TimestampMilli, paginated_opt: Option[Paginated] = None): Seq[PaymentReceived] diff --git a/eclair-core/src/main/scala/fr/acinq/eclair/db/Databases.scala b/eclair-core/src/main/scala/fr/acinq/eclair/db/Databases.scala index e47b9fdb7e..685e319ab5 100644 --- a/eclair-core/src/main/scala/fr/acinq/eclair/db/Databases.scala +++ b/eclair-core/src/main/scala/fr/acinq/eclair/db/Databases.scala @@ -271,8 +271,9 @@ object Databases extends Logging { if (urlFile.exists()) { val oldUrl = readString(urlFile.toPath) - if (oldUrl != url) + if (url != null && oldUrl != null && oldUrl != url) { throw JdbcUrlChanged(oldUrl, url) + } } else { writeString(urlFile.toPath, url) } diff --git a/eclair-core/src/main/scala/fr/acinq/eclair/db/pg/PgAuditDb.scala b/eclair-core/src/main/scala/fr/acinq/eclair/db/pg/PgAuditDb.scala index e3e7a83e7f..411e2c8c78 100644 --- a/eclair-core/src/main/scala/fr/acinq/eclair/db/pg/PgAuditDb.scala +++ b/eclair-core/src/main/scala/fr/acinq/eclair/db/pg/PgAuditDb.scala @@ -35,7 +35,7 @@ import javax.sql.DataSource object PgAuditDb { val DB_NAME = "audit" - val CURRENT_VERSION = 13 + val CURRENT_VERSION = 14 } class PgAuditDb(implicit ds: DataSource) extends AuditDb with Logging { @@ -117,6 +117,17 @@ class PgAuditDb(implicit ds: DataSource) extends AuditDb with Logging { statement.executeUpdate("CREATE INDEX IF NOT EXISTS relayed_channel_id_idx ON audit.relayed(channel_id)") } + def migration1314(statement: Statement): Unit = { + // We add the funding_txid and channel_type fields to channel_events. + statement.executeUpdate("ALTER TABLE audit.channel_events RENAME TO channel_events_before_v14") + statement.executeUpdate("DROP INDEX audit.channel_events_timestamp_idx") + statement.executeUpdate("CREATE TABLE audit.channel_events (channel_id TEXT NOT NULL, node_id TEXT NOT NULL, funding_txid TEXT NOT NULL, channel_type TEXT NOT NULL, capacity_sat BIGINT NOT NULL, is_opener BOOLEAN NOT NULL, is_private BOOLEAN NOT NULL, event TEXT NOT NULL, timestamp TIMESTAMP WITH TIME ZONE NOT NULL)") + // We recreate indexes for updated tables. + statement.executeUpdate("CREATE INDEX channel_events_cid_idx ON audit.channel_events(channel_id)") + statement.executeUpdate("CREATE INDEX channel_events_nid_idx ON audit.channel_events(node_id)") + statement.executeUpdate("CREATE INDEX channel_events_timestamp_idx ON audit.channel_events(timestamp)") + } + getVersion(statement, DB_NAME) match { case None => statement.executeUpdate("CREATE SCHEMA audit") @@ -125,7 +136,7 @@ class PgAuditDb(implicit ds: DataSource) extends AuditDb with Logging { statement.executeUpdate("CREATE TABLE audit.received (amount_msat BIGINT NOT NULL, payment_hash TEXT NOT NULL, from_channel_id TEXT NOT NULL, timestamp TIMESTAMP WITH TIME ZONE NOT NULL)") statement.executeUpdate("CREATE TABLE audit.relayed (payment_hash TEXT NOT NULL, amount_msat BIGINT NOT NULL, channel_id TEXT NOT NULL, direction TEXT NOT NULL, relay_type TEXT NOT NULL, timestamp TIMESTAMP WITH TIME ZONE NOT NULL)") statement.executeUpdate("CREATE TABLE audit.relayed_trampoline (payment_hash TEXT NOT NULL, amount_msat BIGINT NOT NULL, next_node_id TEXT NOT NULL, timestamp TIMESTAMP WITH TIME ZONE NOT NULL)") - statement.executeUpdate("CREATE TABLE audit.channel_events (channel_id TEXT NOT NULL, node_id TEXT NOT NULL, capacity_sat BIGINT NOT NULL, is_funder BOOLEAN NOT NULL, is_private BOOLEAN NOT NULL, event TEXT NOT NULL, timestamp TIMESTAMP WITH TIME ZONE NOT NULL)") + statement.executeUpdate("CREATE TABLE audit.channel_events (channel_id TEXT NOT NULL, node_id TEXT NOT NULL, funding_txid TEXT NOT NULL, channel_type TEXT NOT NULL, capacity_sat BIGINT NOT NULL, is_opener BOOLEAN NOT NULL, is_private BOOLEAN NOT NULL, event TEXT NOT NULL, timestamp TIMESTAMP WITH TIME ZONE NOT NULL)") statement.executeUpdate("CREATE TABLE audit.channel_updates (channel_id TEXT NOT NULL, node_id TEXT NOT NULL, fee_base_msat BIGINT NOT NULL, fee_proportional_millionths BIGINT NOT NULL, cltv_expiry_delta BIGINT NOT NULL, htlc_minimum_msat BIGINT NOT NULL, htlc_maximum_msat BIGINT NOT NULL, timestamp TIMESTAMP WITH TIME ZONE NOT NULL)") statement.executeUpdate("CREATE TABLE audit.path_finding_metrics (amount_msat BIGINT NOT NULL, fees_msat BIGINT NOT NULL, status TEXT NOT NULL, duration_ms BIGINT NOT NULL, timestamp TIMESTAMP WITH TIME ZONE NOT NULL, is_mpp BOOLEAN NOT NULL, experiment_name TEXT NOT NULL, recipient_node_id TEXT NOT NULL, payment_hash TEXT, routing_hints JSONB)") statement.executeUpdate("CREATE TABLE audit.transactions_published (tx_id TEXT NOT NULL PRIMARY KEY, channel_id TEXT NOT NULL, node_id TEXT NOT NULL, mining_fee_sat BIGINT NOT NULL, tx_type TEXT NOT NULL, timestamp TIMESTAMP WITH TIME ZONE NOT NULL)") @@ -138,6 +149,8 @@ class PgAuditDb(implicit ds: DataSource) extends AuditDb with Logging { statement.executeUpdate("CREATE INDEX relayed_trampoline_timestamp_idx ON audit.relayed_trampoline(timestamp)") statement.executeUpdate("CREATE INDEX relayed_trampoline_payment_hash_idx ON audit.relayed_trampoline(payment_hash)") statement.executeUpdate("CREATE INDEX relayed_channel_id_idx ON audit.relayed(channel_id)") + statement.executeUpdate("CREATE INDEX channel_events_cid_idx ON audit.channel_events(channel_id)") + statement.executeUpdate("CREATE INDEX channel_events_nid_idx ON audit.channel_events(node_id)") statement.executeUpdate("CREATE INDEX channel_events_timestamp_idx ON audit.channel_events(timestamp)") statement.executeUpdate("CREATE INDEX channel_updates_cid_idx ON audit.channel_updates(channel_id)") statement.executeUpdate("CREATE INDEX channel_updates_nid_idx ON audit.channel_updates(node_id)") @@ -151,7 +164,7 @@ class PgAuditDb(implicit ds: DataSource) extends AuditDb with Logging { statement.executeUpdate("CREATE INDEX transactions_published_channel_id_idx ON audit.transactions_published(channel_id)") statement.executeUpdate("CREATE INDEX transactions_published_timestamp_idx ON audit.transactions_published(timestamp)") statement.executeUpdate("CREATE INDEX transactions_confirmed_timestamp_idx ON audit.transactions_confirmed(timestamp)") - case Some(v@(4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12)) => + case Some(v@(4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13)) => logger.warn(s"migrating db $DB_NAME, found version=$v current=$CURRENT_VERSION") if (v < 5) { migration45(statement) @@ -180,6 +193,9 @@ class PgAuditDb(implicit ds: DataSource) extends AuditDb with Logging { if (v < 13) { migration1213(statement) } + if (v < 14) { + migration1314(statement) + } case Some(CURRENT_VERSION) => () // table is up-to-date, nothing to do case Some(unknownVersion) => throw new RuntimeException(s"Unknown version of DB $DB_NAME found, version=$unknownVersion") } @@ -189,14 +205,16 @@ class PgAuditDb(implicit ds: DataSource) extends AuditDb with Logging { override def add(e: ChannelEvent): Unit = withMetrics("audit/add-channel-lifecycle", DbBackends.Postgres) { inTransaction { pg => - using(pg.prepareStatement("INSERT INTO audit.channel_events VALUES (?, ?, ?, ?, ?, ?, ?)")) { statement => + using(pg.prepareStatement("INSERT INTO audit.channel_events VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)")) { statement => statement.setString(1, e.channelId.toHex) - statement.setString(2, e.remoteNodeId.value.toHex) - statement.setLong(3, e.capacity.toLong) - statement.setBoolean(4, e.isChannelOpener) - statement.setBoolean(5, e.isPrivate) - statement.setString(6, e.event) - statement.setTimestamp(7, Timestamp.from(Instant.now())) + statement.setString(2, e.remoteNodeId.toHex) + statement.setString(3, e.fundingTxId.value.toHex) + statement.setString(4, e.channelType) + statement.setLong(5, e.capacity.toLong) + statement.setBoolean(6, e.isChannelOpener) + statement.setBoolean(7, e.isPrivate) + statement.setString(8, e.event) + statement.setTimestamp(9, e.timestamp.toSqlTimestamp) statement.executeUpdate() } } @@ -348,6 +366,52 @@ class PgAuditDb(implicit ds: DataSource) extends AuditDb with Logging { } } + override def listChannelEvents(channelId: ByteVector32, from: TimestampMilli, to: TimestampMilli): Seq[ChannelEvent] = withMetrics("audit/list-channel-events-by-channel-id", DbBackends.Postgres) { + inTransaction { pg => + using(pg.prepareStatement("SELECT * FROM audit.channel_events WHERE channel_id = ? AND timestamp BETWEEN ? AND ?")) { statement => + statement.setString(1, channelId.toHex) + statement.setTimestamp(2, from.toSqlTimestamp) + statement.setTimestamp(3, to.toSqlTimestamp) + statement.executeQuery().map { rs => + ChannelEvent( + channelId = channelId, + remoteNodeId = PublicKey(rs.getByteVectorFromHex("node_id")), + fundingTxId = TxId(rs.getByteVector32FromHex("funding_txid")), + channelType = rs.getString("channel_type"), + capacity = Satoshi(rs.getLong("capacity_sat")), + isChannelOpener = rs.getBoolean("is_opener"), + isPrivate = rs.getBoolean("is_private"), + event = rs.getString("event"), + timestamp = TimestampMilli.fromSqlTimestamp(rs.getTimestamp("timestamp")), + ) + }.toSeq + } + } + } + + override def listChannelEvents(remoteNodeId: PublicKey, from: TimestampMilli, to: TimestampMilli): Seq[ChannelEvent] = withMetrics("audit/list-channel-events-by-node-id", DbBackends.Postgres) { + inTransaction { pg => + using(pg.prepareStatement("SELECT * FROM audit.channel_events WHERE node_id = ? AND timestamp BETWEEN ? AND ?")) { statement => + statement.setString(1, remoteNodeId.toHex) + statement.setTimestamp(2, from.toSqlTimestamp) + statement.setTimestamp(3, to.toSqlTimestamp) + statement.executeQuery().map { rs => + ChannelEvent( + channelId = rs.getByteVector32FromHex("channel_id"), + remoteNodeId = remoteNodeId, + fundingTxId = TxId(rs.getByteVector32FromHex("funding_txid")), + channelType = rs.getString("channel_type"), + capacity = Satoshi(rs.getLong("capacity_sat")), + isChannelOpener = rs.getBoolean("is_opener"), + isPrivate = rs.getBoolean("is_private"), + event = rs.getString("event"), + timestamp = TimestampMilli.fromSqlTimestamp(rs.getTimestamp("timestamp")), + ) + }.toSeq + } + } + } + override def listSent(from: TimestampMilli, to: TimestampMilli, paginated_opt: Option[Paginated] = None): Seq[PaymentSent] = inTransaction { pg => using(pg.prepareStatement("SELECT * FROM audit.sent WHERE timestamp BETWEEN ? AND ?")) { statement => diff --git a/eclair-core/src/main/scala/fr/acinq/eclair/db/sqlite/SqliteAuditDb.scala b/eclair-core/src/main/scala/fr/acinq/eclair/db/sqlite/SqliteAuditDb.scala index d753208fcb..6420cc3143 100644 --- a/eclair-core/src/main/scala/fr/acinq/eclair/db/sqlite/SqliteAuditDb.scala +++ b/eclair-core/src/main/scala/fr/acinq/eclair/db/sqlite/SqliteAuditDb.scala @@ -33,7 +33,7 @@ import java.util.UUID object SqliteAuditDb { val DB_NAME = "audit" - val CURRENT_VERSION = 10 + val CURRENT_VERSION = 11 } class SqliteAuditDb(val sqlite: Connection) extends AuditDb with Logging { @@ -117,14 +117,34 @@ class SqliteAuditDb(val sqlite: Connection) extends AuditDb with Logging { statement.executeUpdate("CREATE INDEX relayed_channel_id_idx ON relayed(channel_id)") } + def migration1011(statement: Statement): Unit = { + // We add the funding_txid and channel_type fields to channel_events and use TEXT instead of BLOBs. + statement.executeUpdate("ALTER TABLE channel_events RENAME TO channel_events_before_v14") + statement.executeUpdate("DROP INDEX channel_events_timestamp_idx") + statement.executeUpdate("CREATE TABLE channel_events (channel_id TEXT NOT NULL, node_id TEXT NOT NULL, funding_txid TEXT NOT NULL, channel_type TEXT NOT NULL, capacity_sat INTEGER NOT NULL, is_opener BOOLEAN NOT NULL, is_private BOOLEAN NOT NULL, event TEXT NOT NULL, timestamp INTEGER NOT NULL)") + // We update the channel_updates table to use TEXT instead of BLOBs. + statement.executeUpdate("ALTER TABLE channel_updates RENAME TO channel_updates_before_v14") + statement.executeUpdate("DROP INDEX channel_updates_cid_idx") + statement.executeUpdate("DROP INDEX channel_updates_nid_idx") + statement.executeUpdate("DROP INDEX channel_updates_timestamp_idx") + statement.executeUpdate("CREATE TABLE channel_updates (channel_id TEXT NOT NULL, node_id TEXT NOT NULL, fee_base_msat INTEGER NOT NULL, fee_proportional_millionths INTEGER NOT NULL, cltv_expiry_delta INTEGER NOT NULL, htlc_minimum_msat INTEGER NOT NULL, htlc_maximum_msat INTEGER NOT NULL, timestamp INTEGER NOT NULL)") + // We recreate indexes for updated tables. + statement.executeUpdate("CREATE INDEX channel_events_cid_idx ON channel_events(channel_id)") + statement.executeUpdate("CREATE INDEX channel_events_nid_idx ON channel_events(node_id)") + statement.executeUpdate("CREATE INDEX channel_events_timestamp_idx ON channel_events(timestamp)") + statement.executeUpdate("CREATE INDEX channel_updates_cid_idx ON channel_updates(channel_id)") + statement.executeUpdate("CREATE INDEX channel_updates_nid_idx ON channel_updates(node_id)") + statement.executeUpdate("CREATE INDEX channel_updates_timestamp_idx ON channel_updates(timestamp)") + } + getVersion(statement, DB_NAME) match { case None => statement.executeUpdate("CREATE TABLE sent (amount_msat INTEGER NOT NULL, fees_msat INTEGER NOT NULL, recipient_amount_msat INTEGER NOT NULL, payment_id TEXT NOT NULL, parent_payment_id TEXT NOT NULL, payment_hash BLOB NOT NULL, payment_preimage BLOB NOT NULL, recipient_node_id BLOB NOT NULL, to_channel_id BLOB NOT NULL, timestamp INTEGER NOT NULL)") statement.executeUpdate("CREATE TABLE received (amount_msat INTEGER NOT NULL, payment_hash BLOB NOT NULL, from_channel_id BLOB NOT NULL, timestamp INTEGER NOT NULL)") statement.executeUpdate("CREATE TABLE relayed (payment_hash BLOB NOT NULL, amount_msat INTEGER NOT NULL, channel_id BLOB NOT NULL, direction TEXT NOT NULL, relay_type TEXT NOT NULL, timestamp INTEGER NOT NULL)") statement.executeUpdate("CREATE TABLE relayed_trampoline (payment_hash BLOB NOT NULL, amount_msat INTEGER NOT NULL, next_node_id BLOB NOT NULL, timestamp INTEGER NOT NULL)") - statement.executeUpdate("CREATE TABLE channel_events (channel_id BLOB NOT NULL, node_id BLOB NOT NULL, capacity_sat INTEGER NOT NULL, is_funder BOOLEAN NOT NULL, is_private BOOLEAN NOT NULL, event TEXT NOT NULL, timestamp INTEGER NOT NULL)") - statement.executeUpdate("CREATE TABLE channel_updates (channel_id BLOB NOT NULL, node_id BLOB NOT NULL, fee_base_msat INTEGER NOT NULL, fee_proportional_millionths INTEGER NOT NULL, cltv_expiry_delta INTEGER NOT NULL, htlc_minimum_msat INTEGER NOT NULL, htlc_maximum_msat INTEGER NOT NULL, timestamp INTEGER NOT NULL)") + statement.executeUpdate("CREATE TABLE channel_events (channel_id TEXT NOT NULL, node_id TEXT NOT NULL, funding_txid TEXT NOT NULL, channel_type TEXT NOT NULL, capacity_sat INTEGER NOT NULL, is_opener BOOLEAN NOT NULL, is_private BOOLEAN NOT NULL, event TEXT NOT NULL, timestamp INTEGER NOT NULL)") + statement.executeUpdate("CREATE TABLE channel_updates (channel_id TEXT NOT NULL, node_id TEXT NOT NULL, fee_base_msat INTEGER NOT NULL, fee_proportional_millionths INTEGER NOT NULL, cltv_expiry_delta INTEGER NOT NULL, htlc_minimum_msat INTEGER NOT NULL, htlc_maximum_msat INTEGER NOT NULL, timestamp INTEGER NOT NULL)") statement.executeUpdate("CREATE TABLE path_finding_metrics (amount_msat INTEGER NOT NULL, fees_msat INTEGER NOT NULL, status TEXT NOT NULL, duration_ms INTEGER NOT NULL, timestamp INTEGER NOT NULL, is_mpp INTEGER NOT NULL, experiment_name TEXT NOT NULL, recipient_node_id BLOB NOT NULL)") statement.executeUpdate("CREATE TABLE transactions_published (tx_id BLOB NOT NULL PRIMARY KEY, channel_id BLOB NOT NULL, node_id BLOB NOT NULL, mining_fee_sat INTEGER NOT NULL, tx_type TEXT NOT NULL, timestamp INTEGER NOT NULL)") statement.executeUpdate("CREATE TABLE transactions_confirmed (tx_id BLOB NOT NULL PRIMARY KEY, channel_id BLOB NOT NULL, node_id BLOB NOT NULL, timestamp INTEGER NOT NULL)") @@ -136,6 +156,8 @@ class SqliteAuditDb(val sqlite: Connection) extends AuditDb with Logging { statement.executeUpdate("CREATE INDEX relayed_channel_id_idx ON relayed(channel_id)") statement.executeUpdate("CREATE INDEX relayed_trampoline_timestamp_idx ON relayed_trampoline(timestamp)") statement.executeUpdate("CREATE INDEX relayed_trampoline_payment_hash_idx ON relayed_trampoline(payment_hash)") + statement.executeUpdate("CREATE INDEX channel_events_cid_idx ON channel_events(channel_id)") + statement.executeUpdate("CREATE INDEX channel_events_nid_idx ON channel_events(node_id)") statement.executeUpdate("CREATE INDEX channel_events_timestamp_idx ON channel_events(timestamp)") statement.executeUpdate("CREATE INDEX channel_updates_cid_idx ON channel_updates(channel_id)") statement.executeUpdate("CREATE INDEX channel_updates_nid_idx ON channel_updates(node_id)") @@ -147,7 +169,7 @@ class SqliteAuditDb(val sqlite: Connection) extends AuditDb with Logging { statement.executeUpdate("CREATE INDEX transactions_published_channel_id_idx ON transactions_published(channel_id)") statement.executeUpdate("CREATE INDEX transactions_published_timestamp_idx ON transactions_published(timestamp)") statement.executeUpdate("CREATE INDEX transactions_confirmed_timestamp_idx ON transactions_confirmed(timestamp)") - case Some(v@(1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9)) => + case Some(v@(1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10)) => logger.warn(s"migrating db $DB_NAME, found version=$v current=$CURRENT_VERSION") if (v < 2) { migration12(statement) @@ -176,6 +198,9 @@ class SqliteAuditDb(val sqlite: Connection) extends AuditDb with Logging { if (v < 10) { migration910(statement) } + if (v < 11) { + migration1011(statement) + } case Some(CURRENT_VERSION) => () // table is up-to-date, nothing to do case Some(unknownVersion) => throw new RuntimeException(s"Unknown version of DB $DB_NAME found, version=$unknownVersion") } @@ -183,14 +208,16 @@ class SqliteAuditDb(val sqlite: Connection) extends AuditDb with Logging { } override def add(e: ChannelEvent): Unit = withMetrics("audit/add-channel-lifecycle", DbBackends.Sqlite) { - using(sqlite.prepareStatement("INSERT INTO channel_events VALUES (?, ?, ?, ?, ?, ?, ?)")) { statement => - statement.setBytes(1, e.channelId.toArray) - statement.setBytes(2, e.remoteNodeId.value.toArray) - statement.setLong(3, e.capacity.toLong) - statement.setBoolean(4, e.isChannelOpener) - statement.setBoolean(5, e.isPrivate) - statement.setString(6, e.event) - statement.setLong(7, TimestampMilli.now().toLong) + using(sqlite.prepareStatement("INSERT INTO channel_events VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)")) { statement => + statement.setString(1, e.channelId.toHex) + statement.setString(2, e.remoteNodeId.toHex) + statement.setString(3, e.fundingTxId.value.toHex) + statement.setString(4, e.channelType) + statement.setLong(5, e.capacity.toLong) + statement.setBoolean(6, e.isChannelOpener) + statement.setBoolean(7, e.isPrivate) + statement.setString(8, e.event) + statement.setLong(9, e.timestamp.toLong) statement.executeUpdate() } } @@ -323,6 +350,48 @@ class SqliteAuditDb(val sqlite: Connection) extends AuditDb with Logging { } } + override def listChannelEvents(channelId: ByteVector32, from: TimestampMilli, to: TimestampMilli): Seq[ChannelEvent] = withMetrics("audit/list-channel-events-by-channel-id", DbBackends.Sqlite) { + using(sqlite.prepareStatement("SELECT * FROM channel_events WHERE channel_id = ? AND timestamp >= ? AND timestamp < ?")) { statement => + statement.setString(1, channelId.toHex) + statement.setLong(2, from.toLong) + statement.setLong(3, to.toLong) + statement.executeQuery().map { rs => + ChannelEvent( + channelId = channelId, + remoteNodeId = PublicKey(rs.getByteVectorFromHex("node_id")), + fundingTxId = TxId(rs.getByteVector32FromHex("funding_txid")), + channelType = rs.getString("channel_type"), + capacity = Satoshi(rs.getLong("capacity_sat")), + isChannelOpener = rs.getBoolean("is_opener"), + isPrivate = rs.getBoolean("is_private"), + event = rs.getString("event"), + timestamp = TimestampMilli(rs.getLong("timestamp")), + ) + }.toSeq + } + } + + override def listChannelEvents(remoteNodeId: PublicKey, from: TimestampMilli, to: TimestampMilli): Seq[ChannelEvent] = withMetrics("audit/list-channel-events-by-node-id", DbBackends.Sqlite) { + using(sqlite.prepareStatement("SELECT * FROM channel_events WHERE node_id = ? AND timestamp >= ? AND timestamp < ?")) { statement => + statement.setString(1, remoteNodeId.toHex) + statement.setLong(2, from.toLong) + statement.setLong(3, to.toLong) + statement.executeQuery().map { rs => + ChannelEvent( + channelId = rs.getByteVector32FromHex("channel_id"), + remoteNodeId = remoteNodeId, + fundingTxId = TxId(rs.getByteVector32FromHex("funding_txid")), + channelType = rs.getString("channel_type"), + capacity = Satoshi(rs.getLong("capacity_sat")), + isChannelOpener = rs.getBoolean("is_opener"), + isPrivate = rs.getBoolean("is_private"), + event = rs.getString("event"), + timestamp = TimestampMilli(rs.getLong("timestamp")), + ) + }.toSeq + } + } + override def listSent(from: TimestampMilli, to: TimestampMilli, paginated_opt: Option[Paginated] = None): Seq[PaymentSent] = using(sqlite.prepareStatement("SELECT * FROM sent WHERE timestamp >= ? AND timestamp < ?")) { statement => statement.setLong(1, from.toLong) diff --git a/eclair-core/src/test/scala/fr/acinq/eclair/db/AuditDbSpec.scala b/eclair-core/src/test/scala/fr/acinq/eclair/db/AuditDbSpec.scala index 9a07b6f51d..742ed5c16f 100644 --- a/eclair-core/src/test/scala/fr/acinq/eclair/db/AuditDbSpec.scala +++ b/eclair-core/src/test/scala/fr/acinq/eclair/db/AuditDbSpec.scala @@ -18,7 +18,7 @@ package fr.acinq.eclair.db import fr.acinq.bitcoin.scalacompat.Crypto.{PrivateKey, PublicKey} import fr.acinq.bitcoin.scalacompat.{Block, ByteVector32, SatoshiLong, Script, Transaction, TxOut} -import fr.acinq.eclair.TestDatabases.{TestPgDatabases, TestSqliteDatabases} +import fr.acinq.eclair.TestDatabases.{TestPgDatabases, TestSqliteDatabases, migrationCheck} import fr.acinq.eclair.TestUtils.randomTxId import fr.acinq.eclair._ import fr.acinq.eclair.channel._ @@ -26,6 +26,7 @@ import fr.acinq.eclair.db.AuditDb.Stats import fr.acinq.eclair.db.DbEventHandler.ChannelEvent import fr.acinq.eclair.db.jdbc.JdbcUtils.using import fr.acinq.eclair.db.pg.PgAuditDb +import fr.acinq.eclair.db.pg.PgUtils.{getVersion, setVersion} import fr.acinq.eclair.db.sqlite.SqliteAuditDb import fr.acinq.eclair.payment.Bolt11Invoice.ExtraHop import fr.acinq.eclair.payment._ @@ -78,7 +79,7 @@ class AuditDbSpec extends AnyFunSuite { val e5 = PaymentSent(UUID.randomUUID(), randomBytes32(), 84100 msat, randomKey().publicKey, pp5a :: pp5b :: Nil, None, startedAt = 0 unixms) val pp6 = PaymentSent.PaymentPart(UUID.randomUUID(), PaymentEvent.OutgoingPayment(randomBytes32(), dummyRemoteNodeId, 42000 msat, settledAt = now + 10.minutes), 1000 msat, None, startedAt = now + 10.minutes) val e6 = PaymentSent(UUID.randomUUID(), randomBytes32(), 42000 msat, randomKey().publicKey, pp6 :: Nil, None, startedAt = now + 10.minutes) - val e7 = ChannelEvent(randomBytes32(), randomKey().publicKey, randomTxId(), "anchor_outputs", 456123000 sat, isChannelOpener = true, isPrivate = false, "mutual-close") + val e7 = ChannelEvent(randomBytes32(), randomKey().publicKey, randomTxId(), "anchor_outputs", 456123000 sat, isChannelOpener = true, isPrivate = false, "mutual-close", now) val e10 = TrampolinePaymentRelayed(randomBytes32(), Seq( PaymentEvent.IncomingPayment(randomBytes32(), dummyRemoteNodeId, 20000 msat, now - 7.seconds), @@ -125,6 +126,12 @@ class AuditDbSpec extends AnyFunSuite { assert(db.listRelayed(from = TimestampMilli(0L), to = now + 1.minute, Some(Paginated(count = 2, skip = 4))).toList == List()) assert(db.listNetworkFees(from = TimestampMilli(0L), to = now + 1.minute).size == 1) assert(db.listNetworkFees(from = TimestampMilli(0L), to = now + 1.minute).head.txType == "mutual") + assert(db.listChannelEvents(randomBytes32(), from = TimestampMilli(0L), to = now + 1.minute).isEmpty) + assert(db.listChannelEvents(e7.channelId, from = TimestampMilli(0L), to = now + 1.minute) == Seq(e7)) + assert(db.listChannelEvents(e7.channelId, from = TimestampMilli(0L), to = now - 1.minute).isEmpty) + assert(db.listChannelEvents(randomKey().publicKey, from = TimestampMilli(0L), to = now + 1.minute).isEmpty) + assert(db.listChannelEvents(e7.remoteNodeId, from = TimestampMilli(0L), to = now + 1.minute) == Seq(e7)) + assert(db.listChannelEvents(e7.remoteNodeId, from = TimestampMilli(0L), to = now - 1.minute).isEmpty) } } @@ -326,4 +333,99 @@ class AuditDbSpec extends AnyFunSuite { } } + test("migrate audit db to v14") { + val channelId = randomBytes32() + val remoteNodeId = randomKey().publicKey + val fundingTxId = randomTxId() + val now = TimestampMilli.now() + val channelCreated = ChannelEvent(channelId, remoteNodeId, fundingTxId, "anchor_outputs", 100_000 sat, isChannelOpener = true, isPrivate = false, "created", now) + forAllDbs { + case dbs: TestPgDatabases => + migrationCheck( + dbs = dbs, + initializeTables = connection => { + // We simulate the DB as it was before eclair v14. + using(connection.createStatement()) { statement => + statement.executeUpdate("CREATE SCHEMA audit") + statement.executeUpdate("CREATE TABLE audit.channel_events (channel_id TEXT NOT NULL, node_id TEXT NOT NULL, capacity_sat BIGINT NOT NULL, is_funder BOOLEAN NOT NULL, is_private BOOLEAN NOT NULL, event TEXT NOT NULL, timestamp TIMESTAMP WITH TIME ZONE NOT NULL)") + statement.executeUpdate("CREATE INDEX channel_events_timestamp_idx ON audit.channel_events(timestamp)") + setVersion(statement, "audit", 13) + } + // We insert some data into the tables we'll modify. + using(connection.prepareStatement("INSERT INTO audit.channel_events VALUES (?, ?, ?, ?, ?, ?, ?)")) { statement => + statement.setString(1, channelId.toHex) + statement.setString(2, remoteNodeId.toHex) + statement.setLong(3, 100_000) + statement.setBoolean(4, true) + statement.setBoolean(5, false) + statement.setString(6, "mutual") + statement.setTimestamp(7, now.toSqlTimestamp) + statement.executeUpdate() + } + }, + dbName = PgAuditDb.DB_NAME, + targetVersion = PgAuditDb.CURRENT_VERSION, + postCheck = connection => { + val migratedDb = dbs.audit + using(connection.createStatement()) { statement => assert(getVersion(statement, "audit").contains(PgAuditDb.CURRENT_VERSION)) } + // We've created new tables: previous data from the existing tables isn't available anymore through the API. + assert(migratedDb.listChannelEvents(channelId, 0 unixms, now + 1.minute).isEmpty) + assert(migratedDb.listChannelEvents(remoteNodeId, 0 unixms, now + 1.minute).isEmpty) + // But the data is still available in the database. + using(connection.prepareStatement("SELECT * FROM audit.channel_events_before_v14")) { statement => + val result = statement.executeQuery() + assert(result.next()) + } + // We can use the new tables immediately. + migratedDb.add(channelCreated) + assert(migratedDb.listChannelEvents(channelId, 0 unixms, now + 1.minute) == Seq(channelCreated)) + } + ) + case dbs: TestSqliteDatabases => + migrationCheck( + dbs = dbs, + initializeTables = connection => { + // We simulate the DB as it was before eclair v14. + using(connection.createStatement()) { statement => + statement.executeUpdate("CREATE TABLE channel_events (channel_id BLOB NOT NULL, node_id BLOB NOT NULL, capacity_sat INTEGER NOT NULL, is_funder BOOLEAN NOT NULL, is_private BOOLEAN NOT NULL, event TEXT NOT NULL, timestamp INTEGER NOT NULL)") + statement.executeUpdate("CREATE TABLE channel_updates (channel_id BLOB NOT NULL, node_id BLOB NOT NULL, fee_base_msat INTEGER NOT NULL, fee_proportional_millionths INTEGER NOT NULL, cltv_expiry_delta INTEGER NOT NULL, htlc_minimum_msat INTEGER NOT NULL, htlc_maximum_msat INTEGER NOT NULL, timestamp INTEGER NOT NULL)") + statement.executeUpdate("CREATE INDEX channel_events_timestamp_idx ON channel_events(timestamp)") + statement.executeUpdate("CREATE INDEX channel_updates_cid_idx ON channel_updates(channel_id)") + statement.executeUpdate("CREATE INDEX channel_updates_nid_idx ON channel_updates(node_id)") + statement.executeUpdate("CREATE INDEX channel_updates_timestamp_idx ON channel_updates(timestamp)") + setVersion(statement, "audit", 10) + } + // We insert some data into the tables we'll modify. + using(connection.prepareStatement("INSERT INTO channel_events VALUES (?, ?, ?, ?, ?, ?, ?)")) { statement => + statement.setBytes(1, channelId.toArray) + statement.setBytes(2, remoteNodeId.value.toArray) + statement.setLong(3, 100_000) + statement.setBoolean(4, true) + statement.setBoolean(5, false) + statement.setString(6, "mutual") + statement.setLong(7, now.toLong) + statement.executeUpdate() + } + }, + dbName = SqliteAuditDb.DB_NAME, + targetVersion = SqliteAuditDb.CURRENT_VERSION, + postCheck = connection => { + val migratedDb = dbs.audit + using(connection.createStatement()) { statement => assert(getVersion(statement, "audit").contains(SqliteAuditDb.CURRENT_VERSION)) } + // We've created new tables: previous data from the existing tables isn't available anymore through the API. + assert(migratedDb.listChannelEvents(channelId, 0 unixms, now + 1.minute).isEmpty) + assert(migratedDb.listChannelEvents(remoteNodeId, 0 unixms, now + 1.minute).isEmpty) + // But the data is still available in the database. + using(connection.prepareStatement("SELECT * FROM channel_events_before_v14")) { statement => + val result = statement.executeQuery() + assert(result.next()) + } + // We can use the new tables immediately. + migratedDb.add(channelCreated) + assert(migratedDb.listChannelEvents(channelId, 0 unixms, now + 1.minute) == Seq(channelCreated)) + } + ) + } + } + } From d4f9b1265db6b766b7bfdea6bd86c18c81000950 Mon Sep 17 00:00:00 2001 From: t-bast Date: Wed, 28 Jan 2026 15:41:49 +0100 Subject: [PATCH 3/7] Improve transaction events in the `AuditDb` We improve the `transactions_published` and `transactions_confirmed` tables in the `AuditDb` by: - adding more details about mining fees and feerates - adding input and output counts - using hex instead of blobs in sqlite - providing APIs to list transactions by `remote_node_id` - adding indexes for listing APIs The added data isn't available in past events. We decide to simply rename the older tables and start fresh: previous data will not be available from the API, but can still be queried directly in SQL if necessary. --- .../scala/fr/acinq/eclair/db/AuditDb.scala | 9 +- .../fr/acinq/eclair/db/pg/PgAuditDb.scala | 80 +++++++-- .../eclair/db/sqlite/SqliteAuditDb.scala | 94 ++++++++--- .../fr/acinq/eclair/db/AuditDbSpec.scala | 156 +++++++++++++----- 4 files changed, 262 insertions(+), 77 deletions(-) diff --git a/eclair-core/src/main/scala/fr/acinq/eclair/db/AuditDb.scala b/eclair-core/src/main/scala/fr/acinq/eclair/db/AuditDb.scala index f4f34601b1..6095ba051e 100644 --- a/eclair-core/src/main/scala/fr/acinq/eclair/db/AuditDb.scala +++ b/eclair-core/src/main/scala/fr/acinq/eclair/db/AuditDb.scala @@ -18,6 +18,7 @@ package fr.acinq.eclair.db import fr.acinq.bitcoin.scalacompat.Crypto.PublicKey import fr.acinq.bitcoin.scalacompat.{ByteVector32, Satoshi, TxId} +import fr.acinq.eclair.blockchain.fee.FeeratePerKw import fr.acinq.eclair.channel._ import fr.acinq.eclair.db.AuditDb.{NetworkFee, PublishedTransaction, Stats} import fr.acinq.eclair.db.DbEventHandler.ChannelEvent @@ -44,6 +45,8 @@ trait AuditDb { def listPublished(channelId: ByteVector32): Seq[PublishedTransaction] + def listPublished(remoteNodeId: PublicKey, from: TimestampMilli, to: TimestampMilli): Seq[PublishedTransaction] + def listChannelEvents(channelId: ByteVector32, from: TimestampMilli, to: TimestampMilli): Seq[ChannelEvent] def listChannelEvents(remoteNodeId: PublicKey, from: TimestampMilli, to: TimestampMilli): Seq[ChannelEvent] @@ -62,7 +65,11 @@ trait AuditDb { object AuditDb { - case class PublishedTransaction(txId: TxId, desc: String, miningFee: Satoshi) + case class PublishedTransaction(txId: TxId, desc: String, localMiningFee: Satoshi, remoteMiningFee: Satoshi, feerate: FeeratePerKw, timestamp: TimestampMilli) + + object PublishedTransaction { + def apply(tx: TransactionPublished): PublishedTransaction = PublishedTransaction(tx.tx.txid, tx.desc, tx.localMiningFee, tx.remoteMiningFee, tx.feerate, tx.timestamp) + } case class NetworkFee(remoteNodeId: PublicKey, channelId: ByteVector32, txId: ByteVector32, fee: Satoshi, txType: String, timestamp: TimestampMilli) diff --git a/eclair-core/src/main/scala/fr/acinq/eclair/db/pg/PgAuditDb.scala b/eclair-core/src/main/scala/fr/acinq/eclair/db/pg/PgAuditDb.scala index 411e2c8c78..f8a36a6baf 100644 --- a/eclair-core/src/main/scala/fr/acinq/eclair/db/pg/PgAuditDb.scala +++ b/eclair-core/src/main/scala/fr/acinq/eclair/db/pg/PgAuditDb.scala @@ -18,6 +18,7 @@ package fr.acinq.eclair.db.pg import fr.acinq.bitcoin.scalacompat.Crypto.{PrivateKey, PublicKey} import fr.acinq.bitcoin.scalacompat.{ByteVector32, Satoshi, SatoshiLong, TxId} +import fr.acinq.eclair.blockchain.fee.FeeratePerKw import fr.acinq.eclair.channel._ import fr.acinq.eclair.db.AuditDb.{NetworkFee, PublishedTransaction, Stats} import fr.acinq.eclair.db.DbEventHandler.ChannelEvent @@ -122,10 +123,25 @@ class PgAuditDb(implicit ds: DataSource) extends AuditDb with Logging { statement.executeUpdate("ALTER TABLE audit.channel_events RENAME TO channel_events_before_v14") statement.executeUpdate("DROP INDEX audit.channel_events_timestamp_idx") statement.executeUpdate("CREATE TABLE audit.channel_events (channel_id TEXT NOT NULL, node_id TEXT NOT NULL, funding_txid TEXT NOT NULL, channel_type TEXT NOT NULL, capacity_sat BIGINT NOT NULL, is_opener BOOLEAN NOT NULL, is_private BOOLEAN NOT NULL, event TEXT NOT NULL, timestamp TIMESTAMP WITH TIME ZONE NOT NULL)") - // We recreate indexes for updated tables. + // We recreate indexes for updated channel tables. statement.executeUpdate("CREATE INDEX channel_events_cid_idx ON audit.channel_events(channel_id)") statement.executeUpdate("CREATE INDEX channel_events_nid_idx ON audit.channel_events(node_id)") statement.executeUpdate("CREATE INDEX channel_events_timestamp_idx ON audit.channel_events(timestamp)") + // We add mining fee details, input and output counts to the transaction tables. + statement.executeUpdate("ALTER TABLE audit.transactions_published RENAME TO transactions_published_before_v14") + statement.executeUpdate("ALTER TABLE audit.transactions_confirmed RENAME TO transactions_confirmed_before_v14") + statement.executeUpdate("DROP INDEX audit.transactions_published_channel_id_idx") + statement.executeUpdate("DROP INDEX audit.transactions_published_timestamp_idx") + statement.executeUpdate("DROP INDEX audit.transactions_confirmed_timestamp_idx") + statement.executeUpdate("CREATE TABLE audit.transactions_published (tx_id TEXT NOT NULL PRIMARY KEY, channel_id TEXT NOT NULL, node_id TEXT NOT NULL, local_mining_fee_sat BIGINT NOT NULL, remote_mining_fee_sat BIGINT NOT NULL, feerate_sat_per_kw BIGINT NOT NULL, input_count BIGINT NOT NULL, output_count BIGINT NOT NULL, tx_type TEXT NOT NULL, timestamp TIMESTAMP WITH TIME ZONE NOT NULL)") + statement.executeUpdate("CREATE TABLE audit.transactions_confirmed (tx_id TEXT NOT NULL PRIMARY KEY, channel_id TEXT NOT NULL, node_id TEXT NOT NULL, input_count BIGINT NOT NULL, output_count BIGINT NOT NULL, timestamp TIMESTAMP WITH TIME ZONE NOT NULL)") + // We recreate indexes for the updated transaction tables. + statement.executeUpdate("CREATE INDEX transactions_published_channel_id_idx ON audit.transactions_published(channel_id)") + statement.executeUpdate("CREATE INDEX transactions_published_node_id_idx ON audit.transactions_published(node_id)") + statement.executeUpdate("CREATE INDEX transactions_published_timestamp_idx ON audit.transactions_published(timestamp)") + statement.executeUpdate("CREATE INDEX transactions_confirmed_channel_id_idx ON audit.transactions_confirmed(channel_id)") + statement.executeUpdate("CREATE INDEX transactions_confirmed_node_id_idx ON audit.transactions_confirmed(node_id)") + statement.executeUpdate("CREATE INDEX transactions_confirmed_timestamp_idx ON audit.transactions_confirmed(timestamp)") } getVersion(statement, DB_NAME) match { @@ -139,8 +155,8 @@ class PgAuditDb(implicit ds: DataSource) extends AuditDb with Logging { statement.executeUpdate("CREATE TABLE audit.channel_events (channel_id TEXT NOT NULL, node_id TEXT NOT NULL, funding_txid TEXT NOT NULL, channel_type TEXT NOT NULL, capacity_sat BIGINT NOT NULL, is_opener BOOLEAN NOT NULL, is_private BOOLEAN NOT NULL, event TEXT NOT NULL, timestamp TIMESTAMP WITH TIME ZONE NOT NULL)") statement.executeUpdate("CREATE TABLE audit.channel_updates (channel_id TEXT NOT NULL, node_id TEXT NOT NULL, fee_base_msat BIGINT NOT NULL, fee_proportional_millionths BIGINT NOT NULL, cltv_expiry_delta BIGINT NOT NULL, htlc_minimum_msat BIGINT NOT NULL, htlc_maximum_msat BIGINT NOT NULL, timestamp TIMESTAMP WITH TIME ZONE NOT NULL)") statement.executeUpdate("CREATE TABLE audit.path_finding_metrics (amount_msat BIGINT NOT NULL, fees_msat BIGINT NOT NULL, status TEXT NOT NULL, duration_ms BIGINT NOT NULL, timestamp TIMESTAMP WITH TIME ZONE NOT NULL, is_mpp BOOLEAN NOT NULL, experiment_name TEXT NOT NULL, recipient_node_id TEXT NOT NULL, payment_hash TEXT, routing_hints JSONB)") - statement.executeUpdate("CREATE TABLE audit.transactions_published (tx_id TEXT NOT NULL PRIMARY KEY, channel_id TEXT NOT NULL, node_id TEXT NOT NULL, mining_fee_sat BIGINT NOT NULL, tx_type TEXT NOT NULL, timestamp TIMESTAMP WITH TIME ZONE NOT NULL)") - statement.executeUpdate("CREATE TABLE audit.transactions_confirmed (tx_id TEXT NOT NULL PRIMARY KEY, channel_id TEXT NOT NULL, node_id TEXT NOT NULL, timestamp TIMESTAMP WITH TIME ZONE NOT NULL)") + statement.executeUpdate("CREATE TABLE audit.transactions_published (tx_id TEXT NOT NULL PRIMARY KEY, channel_id TEXT NOT NULL, node_id TEXT NOT NULL, local_mining_fee_sat BIGINT NOT NULL, remote_mining_fee_sat BIGINT NOT NULL, feerate_sat_per_kw BIGINT NOT NULL, input_count BIGINT NOT NULL, output_count BIGINT NOT NULL, tx_type TEXT NOT NULL, timestamp TIMESTAMP WITH TIME ZONE NOT NULL)") + statement.executeUpdate("CREATE TABLE audit.transactions_confirmed (tx_id TEXT NOT NULL PRIMARY KEY, channel_id TEXT NOT NULL, node_id TEXT NOT NULL, input_count BIGINT NOT NULL, output_count BIGINT NOT NULL, timestamp TIMESTAMP WITH TIME ZONE NOT NULL)") statement.executeUpdate("CREATE INDEX sent_timestamp_idx ON audit.sent(timestamp)") statement.executeUpdate("CREATE INDEX received_timestamp_idx ON audit.received(timestamp)") @@ -162,7 +178,10 @@ class PgAuditDb(implicit ds: DataSource) extends AuditDb with Logging { statement.executeUpdate("CREATE INDEX metrics_recipient_idx ON audit.path_finding_metrics(recipient_node_id)") statement.executeUpdate("CREATE INDEX metrics_hash_idx ON audit.path_finding_metrics(payment_hash)") statement.executeUpdate("CREATE INDEX transactions_published_channel_id_idx ON audit.transactions_published(channel_id)") + statement.executeUpdate("CREATE INDEX transactions_published_node_id_idx ON audit.transactions_published(node_id)") statement.executeUpdate("CREATE INDEX transactions_published_timestamp_idx ON audit.transactions_published(timestamp)") + statement.executeUpdate("CREATE INDEX transactions_confirmed_channel_id_idx ON audit.transactions_confirmed(channel_id)") + statement.executeUpdate("CREATE INDEX transactions_confirmed_node_id_idx ON audit.transactions_confirmed(node_id)") statement.executeUpdate("CREATE INDEX transactions_confirmed_timestamp_idx ON audit.transactions_confirmed(timestamp)") case Some(v@(4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13)) => logger.warn(s"migrating db $DB_NAME, found version=$v current=$CURRENT_VERSION") @@ -297,13 +316,17 @@ class PgAuditDb(implicit ds: DataSource) extends AuditDb with Logging { override def add(e: TransactionPublished): Unit = withMetrics("audit/add-transaction-published", DbBackends.Postgres) { inTransaction { pg => - using(pg.prepareStatement("INSERT INTO audit.transactions_published VALUES (?, ?, ?, ?, ?, ?) ON CONFLICT DO NOTHING")) { statement => + using(pg.prepareStatement("INSERT INTO audit.transactions_published VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?) ON CONFLICT DO NOTHING")) { statement => statement.setString(1, e.tx.txid.value.toHex) statement.setString(2, e.channelId.toHex) - statement.setString(3, e.remoteNodeId.value.toHex) - statement.setLong(4, e.miningFee.toLong) - statement.setString(5, e.desc) - statement.setTimestamp(6, Timestamp.from(Instant.now())) + statement.setString(3, e.remoteNodeId.toHex) + statement.setLong(4, e.localMiningFee.toLong) + statement.setLong(5, e.remoteMiningFee.toLong) + statement.setLong(6, e.feerate.toLong) + statement.setLong(7, e.tx.txIn.size) + statement.setLong(8, e.tx.txOut.size) + statement.setString(9, e.desc) + statement.setTimestamp(10, e.timestamp.toSqlTimestamp) statement.executeUpdate() } } @@ -311,11 +334,13 @@ class PgAuditDb(implicit ds: DataSource) extends AuditDb with Logging { override def add(e: TransactionConfirmed): Unit = withMetrics("audit/add-transaction-confirmed", DbBackends.Postgres) { inTransaction { pg => - using(pg.prepareStatement("INSERT INTO audit.transactions_confirmed VALUES (?, ?, ?, ?) ON CONFLICT DO NOTHING")) { statement => + using(pg.prepareStatement("INSERT INTO audit.transactions_confirmed VALUES (?, ?, ?, ?, ?, ?) ON CONFLICT DO NOTHING")) { statement => statement.setString(1, e.tx.txid.value.toHex) statement.setString(2, e.channelId.toHex) - statement.setString(3, e.remoteNodeId.value.toHex) - statement.setTimestamp(4, Timestamp.from(Instant.now())) + statement.setString(3, e.remoteNodeId.toHex) + statement.setLong(4, e.tx.txIn.size) + statement.setLong(5, e.tx.txOut.size) + statement.setTimestamp(6, e.timestamp.toSqlTimestamp) statement.executeUpdate() } } @@ -355,12 +380,39 @@ class PgAuditDb(implicit ds: DataSource) extends AuditDb with Logging { } } - override def listPublished(channelId: ByteVector32): Seq[PublishedTransaction] = withMetrics("audit/list-published", DbBackends.Postgres) { + override def listPublished(channelId: ByteVector32): Seq[PublishedTransaction] = withMetrics("audit/list-published-by-channel-id", DbBackends.Postgres) { inTransaction { pg => using(pg.prepareStatement("SELECT * FROM audit.transactions_published WHERE channel_id = ?")) { statement => statement.setString(1, channelId.toHex) statement.executeQuery().map { rs => - PublishedTransaction(TxId.fromValidHex(rs.getString("tx_id")), rs.getString("tx_type"), rs.getLong("mining_fee_sat").sat) + PublishedTransaction( + txId = TxId(rs.getByteVector32FromHex("tx_id")), + desc = rs.getString("tx_type"), + localMiningFee = rs.getLong("local_mining_fee_sat").sat, + remoteMiningFee = rs.getLong("remote_mining_fee_sat").sat, + feerate = FeeratePerKw(rs.getLong("feerate_sat_per_kw").sat), + timestamp = TimestampMilli.fromSqlTimestamp(rs.getTimestamp("timestamp")) + ) + }.toSeq + } + } + } + + override def listPublished(remoteNodeId: PublicKey, from: TimestampMilli, to: TimestampMilli): Seq[PublishedTransaction] = withMetrics("audit/list-published-by-node-id", DbBackends.Postgres) { + inTransaction { pg => + using(pg.prepareStatement("SELECT * FROM audit.transactions_published WHERE node_id = ? AND timestamp BETWEEN ? AND ?")) { statement => + statement.setString(1, remoteNodeId.toHex) + statement.setTimestamp(2, from.toSqlTimestamp) + statement.setTimestamp(3, to.toSqlTimestamp) + statement.executeQuery().map { rs => + PublishedTransaction( + txId = TxId(rs.getByteVector32FromHex("tx_id")), + desc = rs.getString("tx_type"), + localMiningFee = rs.getLong("local_mining_fee_sat").sat, + remoteMiningFee = rs.getLong("remote_mining_fee_sat").sat, + feerate = FeeratePerKw(rs.getLong("feerate_sat_per_kw").sat), + timestamp = TimestampMilli.fromSqlTimestamp(rs.getTimestamp("timestamp")) + ) }.toSeq } } @@ -541,7 +593,7 @@ class PgAuditDb(implicit ds: DataSource) extends AuditDb with Logging { remoteNodeId = PublicKey(rs.getByteVectorFromHex("node_id")), channelId = rs.getByteVector32FromHex("channel_id"), txId = rs.getByteVector32FromHex("tx_id"), - fee = Satoshi(rs.getLong("mining_fee_sat")), + fee = Satoshi(rs.getLong("local_mining_fee_sat")), txType = rs.getString("tx_type"), timestamp = TimestampMilli.fromSqlTimestamp(rs.getTimestamp("timestamp"))) }.toSeq diff --git a/eclair-core/src/main/scala/fr/acinq/eclair/db/sqlite/SqliteAuditDb.scala b/eclair-core/src/main/scala/fr/acinq/eclair/db/sqlite/SqliteAuditDb.scala index 6420cc3143..3e7daae4ad 100644 --- a/eclair-core/src/main/scala/fr/acinq/eclair/db/sqlite/SqliteAuditDb.scala +++ b/eclair-core/src/main/scala/fr/acinq/eclair/db/sqlite/SqliteAuditDb.scala @@ -18,6 +18,7 @@ package fr.acinq.eclair.db.sqlite import fr.acinq.bitcoin.scalacompat.Crypto.{PrivateKey, PublicKey} import fr.acinq.bitcoin.scalacompat.{ByteVector32, Satoshi, SatoshiLong, TxId} +import fr.acinq.eclair.blockchain.fee.FeeratePerKw import fr.acinq.eclair.channel._ import fr.acinq.eclair.db.AuditDb.{NetworkFee, PublishedTransaction, Stats} import fr.acinq.eclair.db.DbEventHandler.ChannelEvent @@ -128,13 +129,28 @@ class SqliteAuditDb(val sqlite: Connection) extends AuditDb with Logging { statement.executeUpdate("DROP INDEX channel_updates_nid_idx") statement.executeUpdate("DROP INDEX channel_updates_timestamp_idx") statement.executeUpdate("CREATE TABLE channel_updates (channel_id TEXT NOT NULL, node_id TEXT NOT NULL, fee_base_msat INTEGER NOT NULL, fee_proportional_millionths INTEGER NOT NULL, cltv_expiry_delta INTEGER NOT NULL, htlc_minimum_msat INTEGER NOT NULL, htlc_maximum_msat INTEGER NOT NULL, timestamp INTEGER NOT NULL)") - // We recreate indexes for updated tables. + // We recreate indexes for the updated channel tables. statement.executeUpdate("CREATE INDEX channel_events_cid_idx ON channel_events(channel_id)") statement.executeUpdate("CREATE INDEX channel_events_nid_idx ON channel_events(node_id)") statement.executeUpdate("CREATE INDEX channel_events_timestamp_idx ON channel_events(timestamp)") statement.executeUpdate("CREATE INDEX channel_updates_cid_idx ON channel_updates(channel_id)") statement.executeUpdate("CREATE INDEX channel_updates_nid_idx ON channel_updates(node_id)") statement.executeUpdate("CREATE INDEX channel_updates_timestamp_idx ON channel_updates(timestamp)") + // We add mining fee details, input and output counts to the transaction tables, and use TEXT instead of BLOBs. + statement.executeUpdate("ALTER TABLE transactions_published RENAME TO transactions_published_before_v14") + statement.executeUpdate("ALTER TABLE transactions_confirmed RENAME TO transactions_confirmed_before_v14") + statement.executeUpdate("CREATE TABLE transactions_published (tx_id TEXT NOT NULL PRIMARY KEY, channel_id TEXT NOT NULL, node_id TEXT NOT NULL, local_mining_fee_sat INTEGER NOT NULL, remote_mining_fee_sat INTEGER NOT NULL, feerate_sat_per_kw INTEGER NOT NULL, input_count INTEGER NOT NULL, output_count INTEGER NOT NULL, tx_type TEXT NOT NULL, timestamp INTEGER NOT NULL)") + statement.executeUpdate("CREATE TABLE transactions_confirmed (tx_id TEXT NOT NULL PRIMARY KEY, channel_id TEXT NOT NULL, node_id TEXT NOT NULL, input_count INTEGER NOT NULL, output_count INTEGER NOT NULL, timestamp INTEGER NOT NULL)") + statement.executeUpdate("DROP INDEX transactions_published_channel_id_idx") + statement.executeUpdate("DROP INDEX transactions_published_timestamp_idx") + statement.executeUpdate("DROP INDEX transactions_confirmed_timestamp_idx") + // We recreate indexes for the updated transaction tables. + statement.executeUpdate("CREATE INDEX transactions_published_channel_id_idx ON transactions_published(channel_id)") + statement.executeUpdate("CREATE INDEX transactions_published_node_id_idx ON transactions_published(node_id)") + statement.executeUpdate("CREATE INDEX transactions_published_timestamp_idx ON transactions_published(timestamp)") + statement.executeUpdate("CREATE INDEX transactions_confirmed_channel_id_idx ON transactions_confirmed(channel_id)") + statement.executeUpdate("CREATE INDEX transactions_confirmed_node_id_idx ON transactions_confirmed(node_id)") + statement.executeUpdate("CREATE INDEX transactions_confirmed_timestamp_idx ON transactions_confirmed(timestamp)") } getVersion(statement, DB_NAME) match { @@ -146,8 +162,8 @@ class SqliteAuditDb(val sqlite: Connection) extends AuditDb with Logging { statement.executeUpdate("CREATE TABLE channel_events (channel_id TEXT NOT NULL, node_id TEXT NOT NULL, funding_txid TEXT NOT NULL, channel_type TEXT NOT NULL, capacity_sat INTEGER NOT NULL, is_opener BOOLEAN NOT NULL, is_private BOOLEAN NOT NULL, event TEXT NOT NULL, timestamp INTEGER NOT NULL)") statement.executeUpdate("CREATE TABLE channel_updates (channel_id TEXT NOT NULL, node_id TEXT NOT NULL, fee_base_msat INTEGER NOT NULL, fee_proportional_millionths INTEGER NOT NULL, cltv_expiry_delta INTEGER NOT NULL, htlc_minimum_msat INTEGER NOT NULL, htlc_maximum_msat INTEGER NOT NULL, timestamp INTEGER NOT NULL)") statement.executeUpdate("CREATE TABLE path_finding_metrics (amount_msat INTEGER NOT NULL, fees_msat INTEGER NOT NULL, status TEXT NOT NULL, duration_ms INTEGER NOT NULL, timestamp INTEGER NOT NULL, is_mpp INTEGER NOT NULL, experiment_name TEXT NOT NULL, recipient_node_id BLOB NOT NULL)") - statement.executeUpdate("CREATE TABLE transactions_published (tx_id BLOB NOT NULL PRIMARY KEY, channel_id BLOB NOT NULL, node_id BLOB NOT NULL, mining_fee_sat INTEGER NOT NULL, tx_type TEXT NOT NULL, timestamp INTEGER NOT NULL)") - statement.executeUpdate("CREATE TABLE transactions_confirmed (tx_id BLOB NOT NULL PRIMARY KEY, channel_id BLOB NOT NULL, node_id BLOB NOT NULL, timestamp INTEGER NOT NULL)") + statement.executeUpdate("CREATE TABLE transactions_published (tx_id TEXT NOT NULL PRIMARY KEY, channel_id TEXT NOT NULL, node_id TEXT NOT NULL, local_mining_fee_sat INTEGER NOT NULL, remote_mining_fee_sat INTEGER NOT NULL, feerate_sat_per_kw INTEGER NOT NULL, input_count INTEGER NOT NULL, output_count INTEGER NOT NULL, tx_type TEXT NOT NULL, timestamp INTEGER NOT NULL)") + statement.executeUpdate("CREATE TABLE transactions_confirmed (tx_id TEXT NOT NULL PRIMARY KEY, channel_id TEXT NOT NULL, node_id TEXT NOT NULL, input_count INTEGER NOT NULL, output_count INTEGER NOT NULL, timestamp INTEGER NOT NULL)") statement.executeUpdate("CREATE INDEX sent_timestamp_idx ON sent(timestamp)") statement.executeUpdate("CREATE INDEX received_timestamp_idx ON received(timestamp)") @@ -167,7 +183,10 @@ class SqliteAuditDb(val sqlite: Connection) extends AuditDb with Logging { statement.executeUpdate("CREATE INDEX metrics_mpp_idx ON path_finding_metrics(is_mpp)") statement.executeUpdate("CREATE INDEX metrics_name_idx ON path_finding_metrics(experiment_name)") statement.executeUpdate("CREATE INDEX transactions_published_channel_id_idx ON transactions_published(channel_id)") + statement.executeUpdate("CREATE INDEX transactions_published_node_id_idx ON transactions_published(node_id)") statement.executeUpdate("CREATE INDEX transactions_published_timestamp_idx ON transactions_published(timestamp)") + statement.executeUpdate("CREATE INDEX transactions_confirmed_channel_id_idx ON transactions_confirmed(channel_id)") + statement.executeUpdate("CREATE INDEX transactions_confirmed_node_id_idx ON transactions_confirmed(node_id)") statement.executeUpdate("CREATE INDEX transactions_confirmed_timestamp_idx ON transactions_confirmed(timestamp)") case Some(v@(1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10)) => logger.warn(s"migrating db $DB_NAME, found version=$v current=$CURRENT_VERSION") @@ -292,23 +311,29 @@ class SqliteAuditDb(val sqlite: Connection) extends AuditDb with Logging { } override def add(e: TransactionPublished): Unit = withMetrics("audit/add-transaction-published", DbBackends.Sqlite) { - using(sqlite.prepareStatement("INSERT OR IGNORE INTO transactions_published VALUES (?, ?, ?, ?, ?, ?)")) { statement => - statement.setBytes(1, e.tx.txid.value.toArray) - statement.setBytes(2, e.channelId.toArray) - statement.setBytes(3, e.remoteNodeId.value.toArray) - statement.setLong(4, e.miningFee.toLong) - statement.setString(5, e.desc) - statement.setLong(6, TimestampMilli.now().toLong) + using(sqlite.prepareStatement("INSERT OR IGNORE INTO transactions_published VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)")) { statement => + statement.setString(1, e.tx.txid.value.toHex) + statement.setString(2, e.channelId.toHex) + statement.setString(3, e.remoteNodeId.toHex) + statement.setLong(4, e.localMiningFee.toLong) + statement.setLong(5, e.remoteMiningFee.toLong) + statement.setLong(6, e.feerate.toLong) + statement.setLong(7, e.tx.txIn.size) + statement.setLong(8, e.tx.txOut.size) + statement.setString(9, e.desc) + statement.setLong(10, e.timestamp.toLong) statement.executeUpdate() } } override def add(e: TransactionConfirmed): Unit = withMetrics("audit/add-transaction-confirmed", DbBackends.Sqlite) { - using(sqlite.prepareStatement("INSERT OR IGNORE INTO transactions_confirmed VALUES (?, ?, ?, ?)")) { statement => - statement.setBytes(1, e.tx.txid.value.toArray) - statement.setBytes(2, e.channelId.toArray) - statement.setBytes(3, e.remoteNodeId.value.toArray) - statement.setLong(4, TimestampMilli.now().toLong) + using(sqlite.prepareStatement("INSERT OR IGNORE INTO transactions_confirmed VALUES (?, ?, ?, ?, ?, ?)")) { statement => + statement.setString(1, e.tx.txid.value.toHex) + statement.setString(2, e.channelId.toHex) + statement.setString(3, e.remoteNodeId.toHex) + statement.setLong(4, e.tx.txIn.size) + statement.setLong(5, e.tx.txOut.size) + statement.setLong(6, e.timestamp.toLong) statement.executeUpdate() } } @@ -341,11 +366,36 @@ class SqliteAuditDb(val sqlite: Connection) extends AuditDb with Logging { } } - override def listPublished(channelId: ByteVector32): Seq[PublishedTransaction] = withMetrics("audit/list-published", DbBackends.Sqlite) { + override def listPublished(channelId: ByteVector32): Seq[PublishedTransaction] = withMetrics("audit/list-published-by-channel-id", DbBackends.Sqlite) { using(sqlite.prepareStatement("SELECT * FROM transactions_published WHERE channel_id = ?")) { statement => - statement.setBytes(1, channelId.toArray) + statement.setString(1, channelId.toHex) + statement.executeQuery().map { rs => + PublishedTransaction( + txId = TxId(rs.getByteVector32FromHex("tx_id")), + desc = rs.getString("tx_type"), + localMiningFee = rs.getLong("local_mining_fee_sat").sat, + remoteMiningFee = rs.getLong("remote_mining_fee_sat").sat, + feerate = FeeratePerKw(rs.getLong("feerate_sat_per_kw").sat), + timestamp = TimestampMilli(rs.getLong("timestamp")) + ) + }.toSeq + } + } + + override def listPublished(remoteNodeId: PublicKey, from: TimestampMilli, to: TimestampMilli): Seq[PublishedTransaction] = withMetrics("audit/list-published-by-node-id", DbBackends.Sqlite) { + using(sqlite.prepareStatement("SELECT * FROM transactions_published WHERE node_id = ? AND timestamp >= ? AND timestamp < ?")) { statement => + statement.setString(1, remoteNodeId.toHex) + statement.setLong(2, from.toLong) + statement.setLong(3, to.toLong) statement.executeQuery().map { rs => - PublishedTransaction(TxId(rs.getByteVector32("tx_id")), rs.getString("tx_type"), rs.getLong("mining_fee_sat").sat) + PublishedTransaction( + txId = TxId(rs.getByteVector32FromHex("tx_id")), + desc = rs.getString("tx_type"), + localMiningFee = rs.getLong("local_mining_fee_sat").sat, + remoteMiningFee = rs.getLong("remote_mining_fee_sat").sat, + feerate = FeeratePerKw(rs.getLong("feerate_sat_per_kw").sat), + timestamp = TimestampMilli(rs.getLong("timestamp")) + ) }.toSeq } } @@ -514,10 +564,10 @@ class SqliteAuditDb(val sqlite: Connection) extends AuditDb with Logging { statement.executeQuery() .map { rs => NetworkFee( - remoteNodeId = PublicKey(rs.getByteVector("node_id")), - channelId = rs.getByteVector32("channel_id"), - txId = rs.getByteVector32("tx_id"), - fee = Satoshi(rs.getLong("mining_fee_sat")), + remoteNodeId = PublicKey(rs.getByteVectorFromHex("node_id")), + channelId = rs.getByteVector32FromHex("channel_id"), + txId = rs.getByteVector32FromHex("tx_id"), + fee = Satoshi(rs.getLong("local_mining_fee_sat")), txType = rs.getString("tx_type"), timestamp = TimestampMilli(rs.getLong("timestamp"))) }.toSeq diff --git a/eclair-core/src/test/scala/fr/acinq/eclair/db/AuditDbSpec.scala b/eclair-core/src/test/scala/fr/acinq/eclair/db/AuditDbSpec.scala index 742ed5c16f..2ce984e54c 100644 --- a/eclair-core/src/test/scala/fr/acinq/eclair/db/AuditDbSpec.scala +++ b/eclair-core/src/test/scala/fr/acinq/eclair/db/AuditDbSpec.scala @@ -17,12 +17,12 @@ package fr.acinq.eclair.db import fr.acinq.bitcoin.scalacompat.Crypto.{PrivateKey, PublicKey} -import fr.acinq.bitcoin.scalacompat.{Block, ByteVector32, SatoshiLong, Script, Transaction, TxOut} +import fr.acinq.bitcoin.scalacompat.{Block, ByteVector32, OutPoint, SatoshiLong, Script, Transaction, TxIn, TxOut} import fr.acinq.eclair.TestDatabases.{TestPgDatabases, TestSqliteDatabases, migrationCheck} import fr.acinq.eclair.TestUtils.randomTxId import fr.acinq.eclair._ import fr.acinq.eclair.channel._ -import fr.acinq.eclair.db.AuditDb.Stats +import fr.acinq.eclair.db.AuditDb.{PublishedTransaction, Stats} import fr.acinq.eclair.db.DbEventHandler.ChannelEvent import fr.acinq.eclair.db.jdbc.JdbcUtils.using import fr.acinq.eclair.db.pg.PgAuditDb @@ -58,7 +58,57 @@ class AuditDbSpec extends AnyFunSuite { } } - test("add/list events") { + test("add/list channel events") { + forAllDbs { dbs => + val db = dbs.audit + val now = TimestampMilli.now() + val channelId1 = randomBytes32() + val channelId2 = randomBytes32() + val remoteNodeId = randomKey().publicKey + val e1 = ChannelEvent(channelId1, remoteNodeId, randomTxId(), "anchor_outputs", 100_000 sat, isChannelOpener = true, isPrivate = false, "mutual-close", now - 1.minute) + val e2 = ChannelEvent(channelId2, remoteNodeId, randomTxId(), "taproot", 150_000 sat, isChannelOpener = false, isPrivate = true, "funding", now) + + db.add(e1) + db.add(e2) + + assert(db.listChannelEvents(randomBytes32(), from = TimestampMilli(0L), to = now + 1.minute).isEmpty) + assert(db.listChannelEvents(channelId1, from = TimestampMilli(0L), to = now + 1.minute) == Seq(e1)) + assert(db.listChannelEvents(channelId1, from = TimestampMilli(0L), to = now - 10.minute).isEmpty) + assert(db.listChannelEvents(randomKey().publicKey, from = TimestampMilli(0L), to = now + 1.minute).isEmpty) + assert(db.listChannelEvents(remoteNodeId, from = TimestampMilli(0L), to = now + 1.minute) == Seq(e1, e2)) + assert(db.listChannelEvents(remoteNodeId, from = TimestampMilli(0L), to = now - 30.seconds) == Seq(e1)) + } + } + + test("add/list transaction events") { + forAllDbs { dbs => + val db = dbs.audit + val now = TimestampMilli.now() + val channelId1 = randomBytes32() + val channelId2 = randomBytes32() + val remoteNodeId = randomKey().publicKey + val p1a = TransactionPublished(channelId1, remoteNodeId, Transaction(2, Nil, Seq(TxOut(50_000 sat, Script.pay2wpkh(remoteNodeId))), 0), 50 sat, 0 sat, "funding", None, now - 10.seconds) + val p1b = TransactionPublished(channelId1, remoteNodeId, Transaction(2, Nil, Seq(TxOut(100_000 sat, Script.pay2wpkh(remoteNodeId))), 0), 75 sat, 25 sat, "splice", None, now - 5.seconds) + val p2 = TransactionPublished(channelId2, remoteNodeId, Transaction(2, Nil, Seq(TxOut(200_000 sat, Script.pay2wpkh(remoteNodeId))), 0), 0 sat, 0 sat, "local-close", None, now - 1.seconds) + val c1 = TransactionConfirmed(channelId1, remoteNodeId, p1a.tx, now) + val c2 = TransactionConfirmed(channelId2, remoteNodeId, Transaction(2, Nil, Seq(TxOut(150_000 sat, hex"1234")), 0), now) + + db.add(p1a) + db.add(p1b) + db.add(p2) + db.add(c1) + db.add(c2) + + assert(db.listPublished(randomBytes32()).isEmpty) + assert(db.listPublished(randomKey().publicKey, from = TimestampMilli(0L), to = now + 1.seconds).isEmpty) + assert(db.listPublished(channelId1) == Seq(PublishedTransaction(p1a), PublishedTransaction(p1b))) + assert(db.listPublished(channelId2) == Seq(PublishedTransaction(p2))) + assert(db.listPublished(remoteNodeId, from = now - 1.minute, to = now) == Seq(PublishedTransaction(p1a), PublishedTransaction(p1b), PublishedTransaction(p2))) + assert(db.listPublished(remoteNodeId, from = now - 6.seconds, to = now) == Seq(PublishedTransaction(p1b), PublishedTransaction(p2))) + } + } + + test("add/list payment events") { forAllDbs { dbs => val db = dbs.audit // We don't yet store the remote node_id in our DB: we use this placeholder instead. @@ -71,15 +121,11 @@ class AuditDbSpec extends AnyFunSuite { val pp2b = PaymentEvent.IncomingPayment(randomBytes32(), dummyRemoteNodeId, 42100 msat, now) val e2 = PaymentReceived(randomBytes32(), pp2a :: pp2b :: Nil) val e3 = ChannelPaymentRelayed(randomBytes32(), Seq(PaymentEvent.IncomingPayment(randomBytes32(), dummyRemoteNodeId, 42000 msat, now - 3.seconds)), Seq(PaymentEvent.OutgoingPayment(randomBytes32(), dummyRemoteNodeId, 1000 msat, now))) - val e4a = TransactionPublished(randomBytes32(), randomKey().publicKey, Transaction(0, Seq.empty, Seq.empty, 0), 42 sat, 0 sat, "mutual", None) - val e4b = TransactionConfirmed(e4a.channelId, e4a.remoteNodeId, e4a.tx) - val e4c = TransactionConfirmed(randomBytes32(), randomKey().publicKey, Transaction(2, Nil, TxOut(500 sat, hex"1234") :: Nil, 0)) val pp5a = PaymentSent.PaymentPart(UUID.randomUUID(), PaymentEvent.OutgoingPayment(randomBytes32(), dummyRemoteNodeId, 42000 msat, 0 unixms), 1000 msat, None, startedAt = 0 unixms) val pp5b = PaymentSent.PaymentPart(UUID.randomUUID(), PaymentEvent.OutgoingPayment(randomBytes32(), dummyRemoteNodeId, 42100 msat, 1 unixms), 900 msat, None, startedAt = 1 unixms) val e5 = PaymentSent(UUID.randomUUID(), randomBytes32(), 84100 msat, randomKey().publicKey, pp5a :: pp5b :: Nil, None, startedAt = 0 unixms) val pp6 = PaymentSent.PaymentPart(UUID.randomUUID(), PaymentEvent.OutgoingPayment(randomBytes32(), dummyRemoteNodeId, 42000 msat, settledAt = now + 10.minutes), 1000 msat, None, startedAt = now + 10.minutes) val e6 = PaymentSent(UUID.randomUUID(), randomBytes32(), 42000 msat, randomKey().publicKey, pp6 :: Nil, None, startedAt = now + 10.minutes) - val e7 = ChannelEvent(randomBytes32(), randomKey().publicKey, randomTxId(), "anchor_outputs", 456123000 sat, isChannelOpener = true, isPrivate = false, "mutual-close", now) val e10 = TrampolinePaymentRelayed(randomBytes32(), Seq( PaymentEvent.IncomingPayment(randomBytes32(), dummyRemoteNodeId, 20000 msat, now - 7.seconds), @@ -98,12 +144,8 @@ class AuditDbSpec extends AnyFunSuite { db.add(e1) db.add(e2) db.add(e3) - db.add(e4a) - db.add(e4b) - db.add(e4c) db.add(e5) db.add(e6) - db.add(e7) db.add(e10) db.add(e11) db.add(e12) @@ -124,14 +166,6 @@ class AuditDbSpec extends AnyFunSuite { assert(db.listRelayed(from = TimestampMilli(0L), to = now + 1.minute, Some(Paginated(count = 2, skip = 0))).toList == List(e3, e10)) assert(db.listRelayed(from = TimestampMilli(0L), to = now + 1.minute, Some(Paginated(count = 2, skip = 1))).toList == List(e10, e11)) assert(db.listRelayed(from = TimestampMilli(0L), to = now + 1.minute, Some(Paginated(count = 2, skip = 4))).toList == List()) - assert(db.listNetworkFees(from = TimestampMilli(0L), to = now + 1.minute).size == 1) - assert(db.listNetworkFees(from = TimestampMilli(0L), to = now + 1.minute).head.txType == "mutual") - assert(db.listChannelEvents(randomBytes32(), from = TimestampMilli(0L), to = now + 1.minute).isEmpty) - assert(db.listChannelEvents(e7.channelId, from = TimestampMilli(0L), to = now + 1.minute) == Seq(e7)) - assert(db.listChannelEvents(e7.channelId, from = TimestampMilli(0L), to = now - 1.minute).isEmpty) - assert(db.listChannelEvents(randomKey().publicKey, from = TimestampMilli(0L), to = now + 1.minute).isEmpty) - assert(db.listChannelEvents(e7.remoteNodeId, from = TimestampMilli(0L), to = now + 1.minute) == Seq(e7)) - assert(db.listChannelEvents(e7.remoteNodeId, from = TimestampMilli(0L), to = now - 1.minute).isEmpty) } } @@ -159,18 +193,18 @@ class AuditDbSpec extends AnyFunSuite { db.add(TrampolinePaymentRelayed(randomBytes32(), Seq(PaymentEvent.IncomingPayment(c6, randomKey().publicKey, 46000 msat, 1012 unixms)), Seq(PaymentEvent.OutgoingPayment(c2, randomKey().publicKey, 16000 msat, 1013 unixms), PaymentEvent.OutgoingPayment(c4, randomKey().publicKey, 10000 msat, 1014 unixms), PaymentEvent.OutgoingPayment(c4, randomKey().publicKey, 14000 msat, 1015 unixms)), randomKey().publicKey, 37000 msat)) // The following confirmed txs will be taken into account. - db.add(TransactionPublished(c2, n2, Transaction(0, Seq.empty, Seq(TxOut(5000 sat, hex"12345")), 0), 200 sat, 0 sat, "funding", None)) - db.add(TransactionConfirmed(c2, n2, Transaction(0, Seq.empty, Seq(TxOut(5000 sat, hex"12345")), 0))) - db.add(TransactionPublished(c2, n2, Transaction(0, Seq.empty, Seq(TxOut(4000 sat, hex"00112233")), 0), 300 sat, 0 sat, "mutual", None)) - db.add(TransactionConfirmed(c2, n2, Transaction(0, Seq.empty, Seq(TxOut(4000 sat, hex"00112233")), 0))) - db.add(TransactionPublished(c3, n3, Transaction(0, Seq.empty, Seq(TxOut(8000 sat, hex"deadbeef")), 0), 400 sat, 0 sat, "funding", None)) - db.add(TransactionConfirmed(c3, n3, Transaction(0, Seq.empty, Seq(TxOut(8000 sat, hex"deadbeef")), 0))) - db.add(TransactionPublished(c4, n4, Transaction(0, Seq.empty, Seq(TxOut(6000 sat, hex"0000000000")), 0), 500 sat, 0 sat, "funding", None)) - db.add(TransactionConfirmed(c4, n4, Transaction(0, Seq.empty, Seq(TxOut(6000 sat, hex"0000000000")), 0))) + db.add(TransactionPublished(c2, n2, Transaction(2, Nil, Seq(TxOut(5000 sat, hex"12345")), 0), 200 sat, 100 sat, "funding", None)) + db.add(TransactionConfirmed(c2, n2, Transaction(2, Nil, Seq(TxOut(5000 sat, hex"12345")), 0))) + db.add(TransactionPublished(c2, n2, Transaction(2, Nil, Seq(TxOut(4000 sat, hex"00112233")), 0), 300 sat, 200 sat, "mutual", None)) + db.add(TransactionConfirmed(c2, n2, Transaction(2, Nil, Seq(TxOut(4000 sat, hex"00112233")), 0))) + db.add(TransactionPublished(c3, n3, Transaction(2, Nil, Seq(TxOut(8000 sat, hex"deadbeef")), 0), 400 sat, 50 sat, "funding", None)) + db.add(TransactionConfirmed(c3, n3, Transaction(2, Nil, Seq(TxOut(8000 sat, hex"deadbeef")), 0))) + db.add(TransactionPublished(c4, n4, Transaction(2, Nil, Seq(TxOut(6000 sat, hex"0000000000")), 0), 500 sat, 0 sat, "funding", None)) + db.add(TransactionConfirmed(c4, n4, Transaction(2, Nil, Seq(TxOut(6000 sat, hex"0000000000")), 0))) // The following txs will not be taken into account. - db.add(TransactionPublished(c2, n2, Transaction(0, Seq.empty, Seq(TxOut(5000 sat, hex"12345")), 0), 1000 sat, 0 sat, "funding", None)) // duplicate - db.add(TransactionPublished(c4, n4, Transaction(0, Seq.empty, Seq(TxOut(4500 sat, hex"1111222233")), 0), 500 sat, 0 sat, "funding", None)) // unconfirmed - db.add(TransactionConfirmed(c4, n4, Transaction(0, Seq.empty, Seq(TxOut(2500 sat, hex"ffffff")), 0))) // doesn't match a published tx + db.add(TransactionPublished(c2, n2, Transaction(2, Nil, Seq(TxOut(5000 sat, hex"12345")), 0), 1000 sat, 0 sat, "funding", None)) // duplicate + db.add(TransactionPublished(c4, n4, Transaction(2, Nil, Seq(TxOut(4500 sat, hex"1111222233")), 0), 500 sat, 150 sat, "funding", None)) // unconfirmed + db.add(TransactionConfirmed(c4, n4, Transaction(2, Nil, Seq(TxOut(2500 sat, hex"ffffff")), 0))) // doesn't match a published tx assert(db.listPublished(randomBytes32()).isEmpty) assert(db.listPublished(c4).map(_.txId).toSet.size == 2) @@ -336,9 +370,11 @@ class AuditDbSpec extends AnyFunSuite { test("migrate audit db to v14") { val channelId = randomBytes32() val remoteNodeId = randomKey().publicKey - val fundingTxId = randomTxId() + val fundingTx = Transaction(2, Seq(TxIn(OutPoint(randomTxId(), 2), Nil, 0)), Seq(TxOut(150_000 sat, Script.pay2wpkh(randomKey().publicKey))), 0) val now = TimestampMilli.now() - val channelCreated = ChannelEvent(channelId, remoteNodeId, fundingTxId, "anchor_outputs", 100_000 sat, isChannelOpener = true, isPrivate = false, "created", now) + val channelCreated = ChannelEvent(channelId, remoteNodeId, fundingTx.txid, "anchor_outputs", 100_000 sat, isChannelOpener = true, isPrivate = false, "created", now) + val txPublished = TransactionPublished(channelId, remoteNodeId, fundingTx, 200 sat, 100 sat, "funding", None) + val txConfirmed = TransactionConfirmed(channelId, remoteNodeId, fundingTx) forAllDbs { case dbs: TestPgDatabases => migrationCheck( @@ -348,7 +384,12 @@ class AuditDbSpec extends AnyFunSuite { using(connection.createStatement()) { statement => statement.executeUpdate("CREATE SCHEMA audit") statement.executeUpdate("CREATE TABLE audit.channel_events (channel_id TEXT NOT NULL, node_id TEXT NOT NULL, capacity_sat BIGINT NOT NULL, is_funder BOOLEAN NOT NULL, is_private BOOLEAN NOT NULL, event TEXT NOT NULL, timestamp TIMESTAMP WITH TIME ZONE NOT NULL)") + statement.executeUpdate("CREATE TABLE audit.transactions_published (tx_id TEXT NOT NULL PRIMARY KEY, channel_id TEXT NOT NULL, node_id TEXT NOT NULL, mining_fee_sat BIGINT NOT NULL, tx_type TEXT NOT NULL, timestamp TIMESTAMP WITH TIME ZONE NOT NULL)") + statement.executeUpdate("CREATE TABLE audit.transactions_confirmed (tx_id TEXT NOT NULL PRIMARY KEY, channel_id TEXT NOT NULL, node_id TEXT NOT NULL, timestamp TIMESTAMP WITH TIME ZONE NOT NULL)") statement.executeUpdate("CREATE INDEX channel_events_timestamp_idx ON audit.channel_events(timestamp)") + statement.executeUpdate("CREATE INDEX transactions_published_channel_id_idx ON audit.transactions_published(channel_id)") + statement.executeUpdate("CREATE INDEX transactions_published_timestamp_idx ON audit.transactions_published(timestamp)") + statement.executeUpdate("CREATE INDEX transactions_confirmed_timestamp_idx ON audit.transactions_confirmed(timestamp)") setVersion(statement, "audit", 13) } // We insert some data into the tables we'll modify. @@ -362,6 +403,15 @@ class AuditDbSpec extends AnyFunSuite { statement.setTimestamp(7, now.toSqlTimestamp) statement.executeUpdate() } + using(connection.prepareStatement("INSERT INTO audit.transactions_published VALUES (?, ?, ?, ?, ?, ?)")) { statement => + statement.setString(1, fundingTx.txid.value.toHex) + statement.setString(2, channelId.toHex) + statement.setString(3, remoteNodeId.toHex) + statement.setLong(4, txPublished.localMiningFee.toLong) + statement.setString(5, txPublished.desc) + statement.setTimestamp(6, txPublished.timestamp.toSqlTimestamp) + statement.executeUpdate() + } }, dbName = PgAuditDb.DB_NAME, targetVersion = PgAuditDb.CURRENT_VERSION, @@ -371,14 +421,20 @@ class AuditDbSpec extends AnyFunSuite { // We've created new tables: previous data from the existing tables isn't available anymore through the API. assert(migratedDb.listChannelEvents(channelId, 0 unixms, now + 1.minute).isEmpty) assert(migratedDb.listChannelEvents(remoteNodeId, 0 unixms, now + 1.minute).isEmpty) + assert(migratedDb.listPublished(channelId).isEmpty) // But the data is still available in the database. - using(connection.prepareStatement("SELECT * FROM audit.channel_events_before_v14")) { statement => - val result = statement.executeQuery() - assert(result.next()) - } + Seq("audit.channel_events_before_v14", "audit.transactions_published_before_v14").foreach(table => { + using(connection.prepareStatement(s"SELECT * FROM $table")) { statement => + val result = statement.executeQuery() + assert(result.next()) + } + }) // We can use the new tables immediately. migratedDb.add(channelCreated) assert(migratedDb.listChannelEvents(channelId, 0 unixms, now + 1.minute) == Seq(channelCreated)) + migratedDb.add(txPublished) + migratedDb.add(txConfirmed) + assert(migratedDb.listPublished(channelId) == Seq(PublishedTransaction(txPublished))) } ) case dbs: TestSqliteDatabases => @@ -389,10 +445,15 @@ class AuditDbSpec extends AnyFunSuite { using(connection.createStatement()) { statement => statement.executeUpdate("CREATE TABLE channel_events (channel_id BLOB NOT NULL, node_id BLOB NOT NULL, capacity_sat INTEGER NOT NULL, is_funder BOOLEAN NOT NULL, is_private BOOLEAN NOT NULL, event TEXT NOT NULL, timestamp INTEGER NOT NULL)") statement.executeUpdate("CREATE TABLE channel_updates (channel_id BLOB NOT NULL, node_id BLOB NOT NULL, fee_base_msat INTEGER NOT NULL, fee_proportional_millionths INTEGER NOT NULL, cltv_expiry_delta INTEGER NOT NULL, htlc_minimum_msat INTEGER NOT NULL, htlc_maximum_msat INTEGER NOT NULL, timestamp INTEGER NOT NULL)") + statement.executeUpdate("CREATE TABLE transactions_published (tx_id BLOB NOT NULL PRIMARY KEY, channel_id BLOB NOT NULL, node_id BLOB NOT NULL, mining_fee_sat INTEGER NOT NULL, tx_type TEXT NOT NULL, timestamp INTEGER NOT NULL)") + statement.executeUpdate("CREATE TABLE transactions_confirmed (tx_id BLOB NOT NULL PRIMARY KEY, channel_id BLOB NOT NULL, node_id BLOB NOT NULL, timestamp INTEGER NOT NULL)") statement.executeUpdate("CREATE INDEX channel_events_timestamp_idx ON channel_events(timestamp)") statement.executeUpdate("CREATE INDEX channel_updates_cid_idx ON channel_updates(channel_id)") statement.executeUpdate("CREATE INDEX channel_updates_nid_idx ON channel_updates(node_id)") statement.executeUpdate("CREATE INDEX channel_updates_timestamp_idx ON channel_updates(timestamp)") + statement.executeUpdate("CREATE INDEX transactions_published_channel_id_idx ON transactions_published(channel_id)") + statement.executeUpdate("CREATE INDEX transactions_published_timestamp_idx ON transactions_published(timestamp)") + statement.executeUpdate("CREATE INDEX transactions_confirmed_timestamp_idx ON transactions_confirmed(timestamp)") setVersion(statement, "audit", 10) } // We insert some data into the tables we'll modify. @@ -406,6 +467,15 @@ class AuditDbSpec extends AnyFunSuite { statement.setLong(7, now.toLong) statement.executeUpdate() } + using(connection.prepareStatement("INSERT INTO transactions_published VALUES (?, ?, ?, ?, ?, ?)")) { statement => + statement.setBytes(1, fundingTx.txid.value.toArray) + statement.setBytes(2, channelId.toArray) + statement.setBytes(3, remoteNodeId.value.toArray) + statement.setLong(4, txPublished.localMiningFee.toLong) + statement.setString(5, txPublished.desc) + statement.setLong(6, txPublished.timestamp.toLong) + statement.executeUpdate() + } }, dbName = SqliteAuditDb.DB_NAME, targetVersion = SqliteAuditDb.CURRENT_VERSION, @@ -415,14 +485,20 @@ class AuditDbSpec extends AnyFunSuite { // We've created new tables: previous data from the existing tables isn't available anymore through the API. assert(migratedDb.listChannelEvents(channelId, 0 unixms, now + 1.minute).isEmpty) assert(migratedDb.listChannelEvents(remoteNodeId, 0 unixms, now + 1.minute).isEmpty) + assert(migratedDb.listPublished(channelId).isEmpty) // But the data is still available in the database. - using(connection.prepareStatement("SELECT * FROM channel_events_before_v14")) { statement => - val result = statement.executeQuery() - assert(result.next()) - } + Seq("channel_events_before_v14", "transactions_published_before_v14").foreach(table => { + using(connection.prepareStatement(s"SELECT * FROM $table")) { statement => + val result = statement.executeQuery() + assert(result.next()) + } + }) // We can use the new tables immediately. migratedDb.add(channelCreated) assert(migratedDb.listChannelEvents(channelId, 0 unixms, now + 1.minute) == Seq(channelCreated)) + migratedDb.add(txPublished) + migratedDb.add(txConfirmed) + assert(migratedDb.listPublished(channelId) == Seq(PublishedTransaction(txPublished))) } ) } From e290f5d8c0338f42019f7de3f45c294d72be692f Mon Sep 17 00:00:00 2001 From: t-bast Date: Wed, 28 Jan 2026 17:17:34 +0100 Subject: [PATCH 4/7] Improve payment sent and received events in the `AuditDb` We improve the `sent` and `received` payment tables in the `AuditDb` by: - adding `start_at` / `settled_at` timestamps to measure duration - adding `remote_node_id` for every channel used - using hex instead of blobs in sqlite - reordering columns and renaming them wherever useful The added data isn't available in past events. We decide to simply rename the older tables and start fresh: previous data will not be available from the API, but can still be queried directly in SQL if necessary. --- .../fr/acinq/eclair/db/pg/PgAuditDb.scala | 72 +++--- .../eclair/db/sqlite/SqliteAuditDb.scala | 84 ++++--- .../fr/acinq/eclair/db/AuditDbSpec.scala | 206 ++++++++++++------ .../integration/PaymentIntegrationSpec.scala | 10 +- 4 files changed, 232 insertions(+), 140 deletions(-) diff --git a/eclair-core/src/main/scala/fr/acinq/eclair/db/pg/PgAuditDb.scala b/eclair-core/src/main/scala/fr/acinq/eclair/db/pg/PgAuditDb.scala index f8a36a6baf..0aee67ed0f 100644 --- a/eclair-core/src/main/scala/fr/acinq/eclair/db/pg/PgAuditDb.scala +++ b/eclair-core/src/main/scala/fr/acinq/eclair/db/pg/PgAuditDb.scala @@ -142,14 +142,24 @@ class PgAuditDb(implicit ds: DataSource) extends AuditDb with Logging { statement.executeUpdate("CREATE INDEX transactions_confirmed_channel_id_idx ON audit.transactions_confirmed(channel_id)") statement.executeUpdate("CREATE INDEX transactions_confirmed_node_id_idx ON audit.transactions_confirmed(node_id)") statement.executeUpdate("CREATE INDEX transactions_confirmed_timestamp_idx ON audit.transactions_confirmed(timestamp)") + // We update the sent payment table to include outgoing_node_id and started_at, and rename columns for clarity. + statement.executeUpdate("ALTER TABLE audit.sent RENAME TO sent_before_v14") + statement.executeUpdate("DROP INDEX audit.sent_timestamp_idx") + statement.executeUpdate("CREATE TABLE audit.sent (payment_id TEXT NOT NULL, parent_payment_id TEXT NOT NULL, payment_hash TEXT NOT NULL, payment_preimage TEXT NOT NULL, amount_with_fees_msat BIGINT NOT NULL, fees_msat BIGINT NOT NULL, recipient_total_amount_msat BIGINT NOT NULL, recipient_node_id TEXT NOT NULL, outgoing_channel_id TEXT NOT NULL, outgoing_node_id TEXT NOT NULL, started_at TIMESTAMP WITH TIME ZONE NOT NULL, settled_at TIMESTAMP WITH TIME ZONE NOT NULL)") + statement.executeUpdate("CREATE INDEX sent_settled_at_idx ON audit.sent(settled_at)") + // We update the received payment table to include the incoming_node_id, and rename columns for clarity. + statement.executeUpdate("ALTER TABLE audit.received RENAME TO received_before_v14") + statement.executeUpdate("DROP INDEX audit.received_timestamp_idx") + statement.executeUpdate("CREATE TABLE audit.received (payment_hash TEXT NOT NULL, amount_msat BIGINT NOT NULL, incoming_channel_id TEXT NOT NULL, incoming_node_id TEXT NOT NULL, received_at TIMESTAMP WITH TIME ZONE NOT NULL)") + statement.executeUpdate("CREATE INDEX received_at_idx ON audit.received(received_at)") } getVersion(statement, DB_NAME) match { case None => statement.executeUpdate("CREATE SCHEMA audit") - statement.executeUpdate("CREATE TABLE audit.sent (amount_msat BIGINT NOT NULL, fees_msat BIGINT NOT NULL, recipient_amount_msat BIGINT NOT NULL, payment_id TEXT NOT NULL, parent_payment_id TEXT NOT NULL, payment_hash TEXT NOT NULL, payment_preimage TEXT NOT NULL, recipient_node_id TEXT NOT NULL, to_channel_id TEXT NOT NULL, timestamp TIMESTAMP WITH TIME ZONE NOT NULL)") - statement.executeUpdate("CREATE TABLE audit.received (amount_msat BIGINT NOT NULL, payment_hash TEXT NOT NULL, from_channel_id TEXT NOT NULL, timestamp TIMESTAMP WITH TIME ZONE NOT NULL)") + statement.executeUpdate("CREATE TABLE audit.sent (payment_id TEXT NOT NULL, parent_payment_id TEXT NOT NULL, payment_hash TEXT NOT NULL, payment_preimage TEXT NOT NULL, amount_with_fees_msat BIGINT NOT NULL, fees_msat BIGINT NOT NULL, recipient_total_amount_msat BIGINT NOT NULL, recipient_node_id TEXT NOT NULL, outgoing_channel_id TEXT NOT NULL, outgoing_node_id TEXT NOT NULL, started_at TIMESTAMP WITH TIME ZONE NOT NULL, settled_at TIMESTAMP WITH TIME ZONE NOT NULL)") + statement.executeUpdate("CREATE TABLE audit.received (payment_hash TEXT NOT NULL, amount_msat BIGINT NOT NULL, incoming_channel_id TEXT NOT NULL, incoming_node_id TEXT NOT NULL, received_at TIMESTAMP WITH TIME ZONE NOT NULL)") statement.executeUpdate("CREATE TABLE audit.relayed (payment_hash TEXT NOT NULL, amount_msat BIGINT NOT NULL, channel_id TEXT NOT NULL, direction TEXT NOT NULL, relay_type TEXT NOT NULL, timestamp TIMESTAMP WITH TIME ZONE NOT NULL)") statement.executeUpdate("CREATE TABLE audit.relayed_trampoline (payment_hash TEXT NOT NULL, amount_msat BIGINT NOT NULL, next_node_id TEXT NOT NULL, timestamp TIMESTAMP WITH TIME ZONE NOT NULL)") statement.executeUpdate("CREATE TABLE audit.channel_events (channel_id TEXT NOT NULL, node_id TEXT NOT NULL, funding_txid TEXT NOT NULL, channel_type TEXT NOT NULL, capacity_sat BIGINT NOT NULL, is_opener BOOLEAN NOT NULL, is_private BOOLEAN NOT NULL, event TEXT NOT NULL, timestamp TIMESTAMP WITH TIME ZONE NOT NULL)") @@ -158,8 +168,8 @@ class PgAuditDb(implicit ds: DataSource) extends AuditDb with Logging { statement.executeUpdate("CREATE TABLE audit.transactions_published (tx_id TEXT NOT NULL PRIMARY KEY, channel_id TEXT NOT NULL, node_id TEXT NOT NULL, local_mining_fee_sat BIGINT NOT NULL, remote_mining_fee_sat BIGINT NOT NULL, feerate_sat_per_kw BIGINT NOT NULL, input_count BIGINT NOT NULL, output_count BIGINT NOT NULL, tx_type TEXT NOT NULL, timestamp TIMESTAMP WITH TIME ZONE NOT NULL)") statement.executeUpdate("CREATE TABLE audit.transactions_confirmed (tx_id TEXT NOT NULL PRIMARY KEY, channel_id TEXT NOT NULL, node_id TEXT NOT NULL, input_count BIGINT NOT NULL, output_count BIGINT NOT NULL, timestamp TIMESTAMP WITH TIME ZONE NOT NULL)") - statement.executeUpdate("CREATE INDEX sent_timestamp_idx ON audit.sent(timestamp)") - statement.executeUpdate("CREATE INDEX received_timestamp_idx ON audit.received(timestamp)") + statement.executeUpdate("CREATE INDEX sent_settled_at_idx ON audit.sent(settled_at)") + statement.executeUpdate("CREATE INDEX received_at_idx ON audit.received(received_at)") statement.executeUpdate("CREATE INDEX relayed_timestamp_idx ON audit.relayed(timestamp)") statement.executeUpdate("CREATE INDEX relayed_payment_hash_idx ON audit.relayed(payment_hash)") statement.executeUpdate("CREATE INDEX relayed_trampoline_timestamp_idx ON audit.relayed_trampoline(timestamp)") @@ -241,18 +251,20 @@ class PgAuditDb(implicit ds: DataSource) extends AuditDb with Logging { override def add(e: PaymentSent): Unit = withMetrics("audit/add-payment-sent", DbBackends.Postgres) { inTransaction { pg => - using(pg.prepareStatement("INSERT INTO audit.sent VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)")) { statement => + using(pg.prepareStatement("INSERT INTO audit.sent VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)")) { statement => e.parts.foreach(p => { - statement.setLong(1, p.amountWithFees.toLong) - statement.setLong(2, p.feesPaid.toLong) - statement.setLong(3, e.recipientAmount.toLong) - statement.setString(4, p.id.toString) - statement.setString(5, e.id.toString) - statement.setString(6, e.paymentHash.toHex) - statement.setString(7, e.paymentPreimage.toHex) + statement.setString(1, p.id.toString) + statement.setString(2, e.id.toString) + statement.setString(3, e.paymentHash.toHex) + statement.setString(4, e.paymentPreimage.toHex) + statement.setLong(5, p.amountWithFees.toLong) + statement.setLong(6, p.feesPaid.toLong) + statement.setLong(7, e.recipientAmount.toLong) statement.setString(8, e.recipientNodeId.value.toHex) statement.setString(9, p.payment.channelId.toHex) - statement.setTimestamp(10, p.settledAt.toSqlTimestamp) + statement.setString(10, p.payment.remoteNodeId.toHex) + statement.setTimestamp(11, p.startedAt.toSqlTimestamp) + statement.setTimestamp(12, p.settledAt.toSqlTimestamp) statement.addBatch() }) statement.executeBatch() @@ -262,12 +274,13 @@ class PgAuditDb(implicit ds: DataSource) extends AuditDb with Logging { override def add(e: PaymentReceived): Unit = withMetrics("audit/add-payment-received", DbBackends.Postgres) { inTransaction { pg => - using(pg.prepareStatement("INSERT INTO audit.received VALUES (?, ?, ?, ?)")) { statement => + using(pg.prepareStatement("INSERT INTO audit.received VALUES (?, ?, ?, ?, ?)")) { statement => e.parts.foreach(p => { - statement.setLong(1, p.amount.toLong) - statement.setString(2, e.paymentHash.toHex) + statement.setString(1, e.paymentHash.toHex) + statement.setLong(2, p.amount.toLong) statement.setString(3, p.channelId.toHex) - statement.setTimestamp(4, p.receivedAt.toSqlTimestamp) + statement.setString(4, p.remoteNodeId.toHex) + statement.setTimestamp(5, p.receivedAt.toSqlTimestamp) statement.addBatch() }) statement.executeBatch() @@ -466,7 +479,7 @@ class PgAuditDb(implicit ds: DataSource) extends AuditDb with Logging { override def listSent(from: TimestampMilli, to: TimestampMilli, paginated_opt: Option[Paginated] = None): Seq[PaymentSent] = inTransaction { pg => - using(pg.prepareStatement("SELECT * FROM audit.sent WHERE timestamp BETWEEN ? AND ?")) { statement => + using(pg.prepareStatement("SELECT * FROM audit.sent WHERE settled_at BETWEEN ? AND ?")) { statement => statement.setTimestamp(1, from.toSqlTimestamp) statement.setTimestamp(2, to.toSqlTimestamp) val result = statement.executeQuery() @@ -475,21 +488,20 @@ class PgAuditDb(implicit ds: DataSource) extends AuditDb with Logging { val part = PaymentSent.PaymentPart( id = UUID.fromString(rs.getString("payment_id")), payment = PaymentEvent.OutgoingPayment( - channelId = rs.getByteVector32FromHex("to_channel_id"), - remoteNodeId = PrivateKey(ByteVector32.One).publicKey, // we're not storing the remote node_id yet - amount = MilliSatoshi(rs.getLong("amount_msat")), - settledAt = TimestampMilli.fromSqlTimestamp(rs.getTimestamp("timestamp")) + channelId = rs.getByteVector32FromHex("outgoing_channel_id"), + remoteNodeId = PublicKey(rs.getByteVectorFromHex("outgoing_node_id")), + amount = MilliSatoshi(rs.getLong("amount_with_fees_msat")), + settledAt = TimestampMilli.fromSqlTimestamp(rs.getTimestamp("settled_at")) ), feesPaid = MilliSatoshi(rs.getLong("fees_msat")), route = None, // we don't store the route in the audit DB - // TODO: store startedAt when updating the DB schema instead of duplicating settledAt. - startedAt = TimestampMilli.fromSqlTimestamp(rs.getTimestamp("timestamp"))) + startedAt = TimestampMilli.fromSqlTimestamp(rs.getTimestamp("started_at"))) val sent = sentByParentId.get(parentId) match { - case Some(s) => s.copy(parts = s.parts :+ part) + case Some(s) => s.copy(parts = s.parts :+ part, startedAt = Seq(s.startedAt, part.startedAt).min) case None => PaymentSent( parentId, rs.getByteVector32FromHex("payment_preimage"), - MilliSatoshi(rs.getLong("recipient_amount_msat")), + MilliSatoshi(rs.getLong("recipient_total_amount_msat")), PublicKey(rs.getByteVectorFromHex("recipient_node_id")), Seq(part), None, @@ -506,17 +518,17 @@ class PgAuditDb(implicit ds: DataSource) extends AuditDb with Logging { override def listReceived(from: TimestampMilli, to: TimestampMilli, paginated_opt: Option[Paginated] = None): Seq[PaymentReceived] = inTransaction { pg => - using(pg.prepareStatement("SELECT * FROM audit.received WHERE timestamp BETWEEN ? AND ?")) { statement => + using(pg.prepareStatement("SELECT * FROM audit.received WHERE received_at BETWEEN ? AND ?")) { statement => statement.setTimestamp(1, from.toSqlTimestamp) statement.setTimestamp(2, to.toSqlTimestamp) val result = statement.executeQuery() .foldLeft(Map.empty[ByteVector32, PaymentReceived]) { (receivedByHash, rs) => val paymentHash = rs.getByteVector32FromHex("payment_hash") val part = PaymentEvent.IncomingPayment( - channelId = rs.getByteVector32FromHex("from_channel_id"), - remoteNodeId = PrivateKey(ByteVector32.One).publicKey, // we're not storing the remote node_id yet + channelId = rs.getByteVector32FromHex("incoming_channel_id"), + remoteNodeId = PublicKey(rs.getByteVectorFromHex("incoming_node_id")), amount = MilliSatoshi(rs.getLong("amount_msat")), - receivedAt = TimestampMilli.fromSqlTimestamp(rs.getTimestamp("timestamp"))) + receivedAt = TimestampMilli.fromSqlTimestamp(rs.getTimestamp("received_at"))) val received = receivedByHash.get(paymentHash) match { case Some(r) => r.copy(parts = r.parts :+ part) case None => PaymentReceived(paymentHash, Seq(part)) diff --git a/eclair-core/src/main/scala/fr/acinq/eclair/db/sqlite/SqliteAuditDb.scala b/eclair-core/src/main/scala/fr/acinq/eclair/db/sqlite/SqliteAuditDb.scala index 3e7daae4ad..2713072f9d 100644 --- a/eclair-core/src/main/scala/fr/acinq/eclair/db/sqlite/SqliteAuditDb.scala +++ b/eclair-core/src/main/scala/fr/acinq/eclair/db/sqlite/SqliteAuditDb.scala @@ -151,12 +151,22 @@ class SqliteAuditDb(val sqlite: Connection) extends AuditDb with Logging { statement.executeUpdate("CREATE INDEX transactions_confirmed_channel_id_idx ON transactions_confirmed(channel_id)") statement.executeUpdate("CREATE INDEX transactions_confirmed_node_id_idx ON transactions_confirmed(node_id)") statement.executeUpdate("CREATE INDEX transactions_confirmed_timestamp_idx ON transactions_confirmed(timestamp)") + // We update the sent payment table to include outgoing_node_id and started_at, rename columns for clarity and use TEXT instead of BLOBs. + statement.executeUpdate("ALTER TABLE sent RENAME TO sent_before_v14") + statement.executeUpdate("DROP INDEX sent_timestamp_idx") + statement.executeUpdate("CREATE TABLE sent (payment_id TEXT NOT NULL, parent_payment_id TEXT NOT NULL, payment_hash TEXT NOT NULL, payment_preimage TEXT NOT NULL, amount_with_fees_msat INTEGER NOT NULL, fees_msat INTEGER NOT NULL, recipient_total_amount_msat INTEGER NOT NULL, recipient_node_id TEXT NOT NULL, outgoing_channel_id TEXT NOT NULL, outgoing_node_id TEXT NOT NULL, started_at INTEGER NOT NULL, settled_at INTEGER NOT NULL)") + statement.executeUpdate("CREATE INDEX sent_settled_at_idx ON sent(settled_at)") + // We update the received payment table to include the incoming_node_id, rename columns for clarity and use TEXT instead of BLOBs. + statement.executeUpdate("ALTER TABLE received RENAME TO received_before_v14") + statement.executeUpdate("DROP INDEX received_timestamp_idx") + statement.executeUpdate("CREATE TABLE received (payment_hash TEXT NOT NULL, amount_msat INTEGER NOT NULL, incoming_channel_id TEXT NOT NULL, incoming_node_id TEXT NOT NULL, received_at INTEGER NOT NULL)") + statement.executeUpdate("CREATE INDEX received_at_idx ON received(received_at)") } getVersion(statement, DB_NAME) match { case None => - statement.executeUpdate("CREATE TABLE sent (amount_msat INTEGER NOT NULL, fees_msat INTEGER NOT NULL, recipient_amount_msat INTEGER NOT NULL, payment_id TEXT NOT NULL, parent_payment_id TEXT NOT NULL, payment_hash BLOB NOT NULL, payment_preimage BLOB NOT NULL, recipient_node_id BLOB NOT NULL, to_channel_id BLOB NOT NULL, timestamp INTEGER NOT NULL)") - statement.executeUpdate("CREATE TABLE received (amount_msat INTEGER NOT NULL, payment_hash BLOB NOT NULL, from_channel_id BLOB NOT NULL, timestamp INTEGER NOT NULL)") + statement.executeUpdate("CREATE TABLE sent (payment_id TEXT NOT NULL, parent_payment_id TEXT NOT NULL, payment_hash TEXT NOT NULL, payment_preimage TEXT NOT NULL, amount_with_fees_msat INTEGER NOT NULL, fees_msat INTEGER NOT NULL, recipient_total_amount_msat INTEGER NOT NULL, recipient_node_id TEXT NOT NULL, outgoing_channel_id TEXT NOT NULL, outgoing_node_id TEXT NOT NULL, started_at INTEGER NOT NULL, settled_at INTEGER NOT NULL)") + statement.executeUpdate("CREATE TABLE received (payment_hash TEXT NOT NULL, amount_msat INTEGER NOT NULL, incoming_channel_id TEXT NOT NULL, incoming_node_id TEXT NOT NULL, received_at INTEGER NOT NULL)") statement.executeUpdate("CREATE TABLE relayed (payment_hash BLOB NOT NULL, amount_msat INTEGER NOT NULL, channel_id BLOB NOT NULL, direction TEXT NOT NULL, relay_type TEXT NOT NULL, timestamp INTEGER NOT NULL)") statement.executeUpdate("CREATE TABLE relayed_trampoline (payment_hash BLOB NOT NULL, amount_msat INTEGER NOT NULL, next_node_id BLOB NOT NULL, timestamp INTEGER NOT NULL)") statement.executeUpdate("CREATE TABLE channel_events (channel_id TEXT NOT NULL, node_id TEXT NOT NULL, funding_txid TEXT NOT NULL, channel_type TEXT NOT NULL, capacity_sat INTEGER NOT NULL, is_opener BOOLEAN NOT NULL, is_private BOOLEAN NOT NULL, event TEXT NOT NULL, timestamp INTEGER NOT NULL)") @@ -165,8 +175,8 @@ class SqliteAuditDb(val sqlite: Connection) extends AuditDb with Logging { statement.executeUpdate("CREATE TABLE transactions_published (tx_id TEXT NOT NULL PRIMARY KEY, channel_id TEXT NOT NULL, node_id TEXT NOT NULL, local_mining_fee_sat INTEGER NOT NULL, remote_mining_fee_sat INTEGER NOT NULL, feerate_sat_per_kw INTEGER NOT NULL, input_count INTEGER NOT NULL, output_count INTEGER NOT NULL, tx_type TEXT NOT NULL, timestamp INTEGER NOT NULL)") statement.executeUpdate("CREATE TABLE transactions_confirmed (tx_id TEXT NOT NULL PRIMARY KEY, channel_id TEXT NOT NULL, node_id TEXT NOT NULL, input_count INTEGER NOT NULL, output_count INTEGER NOT NULL, timestamp INTEGER NOT NULL)") - statement.executeUpdate("CREATE INDEX sent_timestamp_idx ON sent(timestamp)") - statement.executeUpdate("CREATE INDEX received_timestamp_idx ON received(timestamp)") + statement.executeUpdate("CREATE INDEX sent_settled_at_idx ON sent(settled_at)") + statement.executeUpdate("CREATE INDEX received_at_idx ON received(received_at)") statement.executeUpdate("CREATE INDEX relayed_timestamp_idx ON relayed(timestamp)") statement.executeUpdate("CREATE INDEX relayed_payment_hash_idx ON relayed(payment_hash)") statement.executeUpdate("CREATE INDEX relayed_channel_id_idx ON relayed(channel_id)") @@ -242,18 +252,20 @@ class SqliteAuditDb(val sqlite: Connection) extends AuditDb with Logging { } override def add(e: PaymentSent): Unit = withMetrics("audit/add-payment-sent", DbBackends.Sqlite) { - using(sqlite.prepareStatement("INSERT INTO sent VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)")) { statement => + using(sqlite.prepareStatement("INSERT INTO sent VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)")) { statement => e.parts.foreach(p => { - statement.setLong(1, p.amountWithFees.toLong) - statement.setLong(2, p.feesPaid.toLong) - statement.setLong(3, e.recipientAmount.toLong) - statement.setString(4, p.id.toString) - statement.setString(5, e.id.toString) - statement.setBytes(6, e.paymentHash.toArray) - statement.setBytes(7, e.paymentPreimage.toArray) - statement.setBytes(8, e.recipientNodeId.value.toArray) - statement.setBytes(9, p.channelId.toArray) - statement.setLong(10, p.settledAt.toLong) + statement.setString(1, p.id.toString) + statement.setString(2, e.id.toString) + statement.setString(3, e.paymentHash.toHex) + statement.setString(4, e.paymentPreimage.toHex) + statement.setLong(5, p.amountWithFees.toLong) + statement.setLong(6, p.feesPaid.toLong) + statement.setLong(7, e.recipientAmount.toLong) + statement.setString(8, e.recipientNodeId.toHex) + statement.setString(9, p.channelId.toHex) + statement.setString(10, p.remoteNodeId.toHex) + statement.setLong(11, p.startedAt.toLong) + statement.setLong(12, p.settledAt.toLong) statement.addBatch() }) statement.executeBatch() @@ -261,12 +273,13 @@ class SqliteAuditDb(val sqlite: Connection) extends AuditDb with Logging { } override def add(e: PaymentReceived): Unit = withMetrics("audit/add-payment-received", DbBackends.Sqlite) { - using(sqlite.prepareStatement("INSERT INTO received VALUES (?, ?, ?, ?)")) { statement => + using(sqlite.prepareStatement("INSERT INTO received VALUES (?, ?, ?, ?, ?)")) { statement => e.parts.foreach(p => { - statement.setLong(1, p.amount.toLong) - statement.setBytes(2, e.paymentHash.toArray) - statement.setBytes(3, p.channelId.toArray) - statement.setLong(4, p.receivedAt.toLong) + statement.setString(1, e.paymentHash.toHex) + statement.setLong(2, p.amount.toLong) + statement.setString(3, p.channelId.toHex) + statement.setString(4, p.remoteNodeId.toHex) + statement.setLong(5, p.receivedAt.toLong) statement.addBatch() }) statement.executeBatch() @@ -443,7 +456,7 @@ class SqliteAuditDb(val sqlite: Connection) extends AuditDb with Logging { } override def listSent(from: TimestampMilli, to: TimestampMilli, paginated_opt: Option[Paginated] = None): Seq[PaymentSent] = - using(sqlite.prepareStatement("SELECT * FROM sent WHERE timestamp >= ? AND timestamp < ?")) { statement => + using(sqlite.prepareStatement("SELECT * FROM sent WHERE settled_at >= ? AND settled_at < ?")) { statement => statement.setLong(1, from.toLong) statement.setLong(2, to.toLong) val result = statement.executeQuery() @@ -452,22 +465,21 @@ class SqliteAuditDb(val sqlite: Connection) extends AuditDb with Logging { val part = PaymentSent.PaymentPart( id = UUID.fromString(rs.getString("payment_id")), payment = PaymentEvent.OutgoingPayment( - channelId = rs.getByteVector32("to_channel_id"), - remoteNodeId = PrivateKey(ByteVector32.One).publicKey, // we're not storing the remote node_id yet - amount = MilliSatoshi(rs.getLong("amount_msat")), - settledAt = TimestampMilli(rs.getLong("timestamp")) + channelId = rs.getByteVector32FromHex("outgoing_channel_id"), + remoteNodeId = PublicKey(rs.getByteVectorFromHex("outgoing_node_id")), + amount = MilliSatoshi(rs.getLong("amount_with_fees_msat")), + settledAt = TimestampMilli(rs.getLong("settled_at")) ), feesPaid = MilliSatoshi(rs.getLong("fees_msat")), route = None, // we don't store the route in the audit DB - // TODO: store startedAt when updating the DB schema instead of duplicating settledAt. - startedAt = TimestampMilli(rs.getLong("timestamp"))) + startedAt = TimestampMilli(rs.getLong("started_at"))) val sent = sentByParentId.get(parentId) match { - case Some(s) => s.copy(parts = s.parts :+ part) + case Some(s) => s.copy(parts = s.parts :+ part, startedAt = Seq(s.startedAt, part.startedAt).min) case None => PaymentSent( parentId, - rs.getByteVector32("payment_preimage"), - MilliSatoshi(rs.getLong("recipient_amount_msat")), - PublicKey(rs.getByteVector("recipient_node_id")), + rs.getByteVector32FromHex("payment_preimage"), + MilliSatoshi(rs.getLong("recipient_total_amount_msat")), + PublicKey(rs.getByteVectorFromHex("recipient_node_id")), Seq(part), None, part.startedAt) @@ -481,17 +493,17 @@ class SqliteAuditDb(val sqlite: Connection) extends AuditDb with Logging { } override def listReceived(from: TimestampMilli, to: TimestampMilli, paginated_opt: Option[Paginated] = None): Seq[PaymentReceived] = - using(sqlite.prepareStatement("SELECT * FROM received WHERE timestamp >= ? AND timestamp < ?")) { statement => + using(sqlite.prepareStatement("SELECT * FROM received WHERE received_at >= ? AND received_at < ?")) { statement => statement.setLong(1, from.toLong) statement.setLong(2, to.toLong) val result = statement.executeQuery() .foldLeft(Map.empty[ByteVector32, PaymentReceived]) { (receivedByHash, rs) => - val paymentHash = rs.getByteVector32("payment_hash") + val paymentHash = rs.getByteVector32FromHex("payment_hash") val part = PaymentEvent.IncomingPayment( - channelId = rs.getByteVector32("from_channel_id"), - remoteNodeId = PrivateKey(ByteVector32.One).publicKey, // we're not storing the remote node_id yet + channelId = rs.getByteVector32FromHex("incoming_channel_id"), + remoteNodeId = PublicKey(rs.getByteVectorFromHex("incoming_node_id")), amount = MilliSatoshi(rs.getLong("amount_msat")), - receivedAt = TimestampMilli(rs.getLong("timestamp"))) + receivedAt = TimestampMilli(rs.getLong("received_at"))) val received = receivedByHash.get(paymentHash) match { case Some(r) => r.copy(parts = r.parts :+ part) case None => PaymentReceived(paymentHash, Seq(part)) diff --git a/eclair-core/src/test/scala/fr/acinq/eclair/db/AuditDbSpec.scala b/eclair-core/src/test/scala/fr/acinq/eclair/db/AuditDbSpec.scala index 2ce984e54c..5d9a0e4f40 100644 --- a/eclair-core/src/test/scala/fr/acinq/eclair/db/AuditDbSpec.scala +++ b/eclair-core/src/test/scala/fr/acinq/eclair/db/AuditDbSpec.scala @@ -16,8 +16,8 @@ package fr.acinq.eclair.db -import fr.acinq.bitcoin.scalacompat.Crypto.{PrivateKey, PublicKey} -import fr.acinq.bitcoin.scalacompat.{Block, ByteVector32, OutPoint, SatoshiLong, Script, Transaction, TxIn, TxOut} +import fr.acinq.bitcoin.scalacompat.Crypto.PublicKey +import fr.acinq.bitcoin.scalacompat.{Block, ByteVector32, Crypto, OutPoint, SatoshiLong, Script, Transaction, TxIn, TxOut} import fr.acinq.eclair.TestDatabases.{TestPgDatabases, TestSqliteDatabases, migrationCheck} import fr.acinq.eclair.TestUtils.randomTxId import fr.acinq.eclair._ @@ -111,61 +111,69 @@ class AuditDbSpec extends AnyFunSuite { test("add/list payment events") { forAllDbs { dbs => val db = dbs.audit - // We don't yet store the remote node_id in our DB: we use this placeholder instead. - // TODO: update this test once we store the remote node_id for incoming/outgoing payments. - val dummyRemoteNodeId = PrivateKey(ByteVector32.One).publicKey val now = TimestampMilli.now() - val e1 = PaymentSent(ZERO_UUID, randomBytes32(), 40000 msat, randomKey().publicKey, PaymentSent.PaymentPart(ZERO_UUID, PaymentEvent.OutgoingPayment(randomBytes32(), dummyRemoteNodeId, 42000 msat, now), 1000 msat, None, now) :: Nil, None, now) - val pp2a = PaymentEvent.IncomingPayment(randomBytes32(), dummyRemoteNodeId, 42000 msat, now) - val pp2b = PaymentEvent.IncomingPayment(randomBytes32(), dummyRemoteNodeId, 42100 msat, now) - val e2 = PaymentReceived(randomBytes32(), pp2a :: pp2b :: Nil) - val e3 = ChannelPaymentRelayed(randomBytes32(), Seq(PaymentEvent.IncomingPayment(randomBytes32(), dummyRemoteNodeId, 42000 msat, now - 3.seconds)), Seq(PaymentEvent.OutgoingPayment(randomBytes32(), dummyRemoteNodeId, 1000 msat, now))) - val pp5a = PaymentSent.PaymentPart(UUID.randomUUID(), PaymentEvent.OutgoingPayment(randomBytes32(), dummyRemoteNodeId, 42000 msat, 0 unixms), 1000 msat, None, startedAt = 0 unixms) - val pp5b = PaymentSent.PaymentPart(UUID.randomUUID(), PaymentEvent.OutgoingPayment(randomBytes32(), dummyRemoteNodeId, 42100 msat, 1 unixms), 900 msat, None, startedAt = 1 unixms) - val e5 = PaymentSent(UUID.randomUUID(), randomBytes32(), 84100 msat, randomKey().publicKey, pp5a :: pp5b :: Nil, None, startedAt = 0 unixms) - val pp6 = PaymentSent.PaymentPart(UUID.randomUUID(), PaymentEvent.OutgoingPayment(randomBytes32(), dummyRemoteNodeId, 42000 msat, settledAt = now + 10.minutes), 1000 msat, None, startedAt = now + 10.minutes) - val e6 = PaymentSent(UUID.randomUUID(), randomBytes32(), 42000 msat, randomKey().publicKey, pp6 :: Nil, None, startedAt = now + 10.minutes) - val e10 = TrampolinePaymentRelayed(randomBytes32(), + val uuid1 = UUID.randomUUID() + val uuid2 = UUID.randomUUID() + val uuid3 = UUID.randomUUID() + val remoteNodeId1 = randomKey().publicKey + val remoteNodeId2 = randomKey().publicKey + val channelId1 = randomBytes32() + val channelId2 = randomBytes32() + val preimage1 = randomBytes32() + val paymentHash1 = Crypto.sha256(preimage1) + val preimage2 = randomBytes32() + val paymentHash2 = Crypto.sha256(preimage2) + + val e1 = PaymentSent(ZERO_UUID, preimage1, 40000 msat, remoteNodeId2, PaymentSent.PaymentPart(ZERO_UUID, PaymentEvent.OutgoingPayment(channelId1, remoteNodeId1, 42000 msat, now - 75.seconds), 1000 msat, None, now - 100.seconds) :: Nil, None, now - 100.seconds) + val pp2a = PaymentEvent.IncomingPayment(channelId1, remoteNodeId1, 42000 msat, now - 1.seconds) + val pp2b = PaymentEvent.IncomingPayment(channelId2, remoteNodeId2, 42100 msat, now) + val e2 = PaymentReceived(paymentHash1, pp2a :: pp2b :: Nil) + val e3 = ChannelPaymentRelayed(paymentHash1, Seq(PaymentEvent.IncomingPayment(channelId1, remoteNodeId1, 42000 msat, now - 3.seconds)), Seq(PaymentEvent.OutgoingPayment(channelId2, remoteNodeId2, 1000 msat, now))) + val pp4a = PaymentSent.PaymentPart(uuid1, PaymentEvent.OutgoingPayment(channelId1, remoteNodeId1, 42000 msat, now - 15.seconds), 1000 msat, None, startedAt = now - 30.seconds) + val pp4b = PaymentSent.PaymentPart(uuid2, PaymentEvent.OutgoingPayment(channelId2, remoteNodeId2, 42100 msat, now - 10.seconds), 900 msat, None, startedAt = now - 25.seconds) + val e4 = PaymentSent(uuid3, preimage1, 84100 msat, remoteNodeId2, pp4a :: pp4b :: Nil, None, startedAt = now - 30.seconds) + val pp5 = PaymentSent.PaymentPart(uuid2, PaymentEvent.OutgoingPayment(channelId1, remoteNodeId1, 42000 msat, settledAt = now + 10.minutes), 1000 msat, None, startedAt = now + 9.minutes) + val e5 = PaymentSent(uuid2, preimage1, 42000 msat, remoteNodeId1, pp5 :: Nil, None, startedAt = now + 9.minutes) + val e6 = TrampolinePaymentRelayed(randomBytes32(), Seq( - PaymentEvent.IncomingPayment(randomBytes32(), dummyRemoteNodeId, 20000 msat, now - 7.seconds), - PaymentEvent.IncomingPayment(randomBytes32(), dummyRemoteNodeId, 22000 msat, now - 5.seconds) + PaymentEvent.IncomingPayment(channelId1, remoteNodeId1, 20000 msat, now - 7.seconds), + PaymentEvent.IncomingPayment(channelId1, remoteNodeId1, 22000 msat, now - 5.seconds) ), Seq( - PaymentEvent.OutgoingPayment(randomBytes32(), dummyRemoteNodeId, 10000 msat, now + 1.milli), - PaymentEvent.OutgoingPayment(randomBytes32(), dummyRemoteNodeId, 12000 msat, now + 2.milli), - PaymentEvent.OutgoingPayment(randomBytes32(), dummyRemoteNodeId, 15000 msat, now + 3.milli) + PaymentEvent.OutgoingPayment(channelId2, remoteNodeId2, 10000 msat, now + 1.milli), + PaymentEvent.OutgoingPayment(channelId2, remoteNodeId2, 12000 msat, now + 2.milli), + PaymentEvent.OutgoingPayment(channelId1, remoteNodeId1, 15000 msat, now + 3.milli) ), randomKey().publicKey, 30000 msat) - val multiPartPaymentHash = randomBytes32() - val e11 = ChannelPaymentRelayed(multiPartPaymentHash, Seq(PaymentEvent.IncomingPayment(randomBytes32(), dummyRemoteNodeId, 13000 msat, now - 5.seconds)), Seq(PaymentEvent.OutgoingPayment(randomBytes32(), dummyRemoteNodeId, 11000 msat, now + 4.milli))) - val e12 = ChannelPaymentRelayed(multiPartPaymentHash, Seq(PaymentEvent.IncomingPayment(randomBytes32(), dummyRemoteNodeId, 15000 msat, now - 4.seconds)), Seq(PaymentEvent.OutgoingPayment(randomBytes32(), dummyRemoteNodeId, 12500 msat, now + 5.milli))) + val e7 = ChannelPaymentRelayed(paymentHash2, Seq(PaymentEvent.IncomingPayment(channelId1, remoteNodeId1, 13000 msat, now - 5.seconds)), Seq(PaymentEvent.OutgoingPayment(channelId2, remoteNodeId2, 11000 msat, now + 4.milli))) + val e8 = ChannelPaymentRelayed(paymentHash2, Seq(PaymentEvent.IncomingPayment(channelId2, remoteNodeId2, 15000 msat, now - 4.seconds)), Seq(PaymentEvent.OutgoingPayment(channelId1, remoteNodeId1, 12500 msat, now + 5.milli))) db.add(e1) db.add(e2) db.add(e3) + db.add(e4) db.add(e5) db.add(e6) - db.add(e10) - db.add(e11) - db.add(e12) - - assert(db.listSent(from = TimestampMilli(0L), to = now + 15.minute).toList == List(e5, e1, e6)) - assert(db.listSent(from = TimestampMilli(100000L), to = now + 1.minute).toList == List(e1)) - assert(db.listSent(from = TimestampMilli(0L), to = now + 15.minute, Some(Paginated(count = 0, skip = 0))).toList == List()) - assert(db.listSent(from = TimestampMilli(0L), to = now + 15.minute, Some(Paginated(count = 2, skip = 0))).toList == List(e5, e1)) - assert(db.listSent(from = TimestampMilli(0L), to = now + 15.minute, Some(Paginated(count = 2, skip = 1))).toList == List(e1, e6)) - assert(db.listSent(from = TimestampMilli(0L), to = now + 15.minute, Some(Paginated(count = 2, skip = 2))).toList == List(e6)) - assert(db.listSent(from = TimestampMilli(0L), to = now + 15.minute, Some(Paginated(count = 2, skip = 3))).toList == List()) - assert(db.listReceived(from = TimestampMilli(0L), to = now + 1.minute).toList == List(e2)) - assert(db.listReceived(from = TimestampMilli(0L), to = now + 1.minute, Some(Paginated(count = 0, skip = 0))).toList == List()) - assert(db.listReceived(from = TimestampMilli(0L), to = now + 1.minute, Some(Paginated(count = 2, skip = 0))).toList == List(e2)) - assert(db.listReceived(from = TimestampMilli(0L), to = now + 1.minute, Some(Paginated(count = 2, skip = 1))).toList == List()) - assert(db.listRelayed(from = TimestampMilli(0L), to = now + 1.minute).toList == List(e3, e10, e11, e12)) - assert(db.listRelayed(from = TimestampMilli(0L), to = now + 1.minute, Some(Paginated(count = 0, skip = 0))).toList == List()) - assert(db.listRelayed(from = TimestampMilli(0L), to = now + 1.minute, Some(Paginated(count = 2, skip = 0))).toList == List(e3, e10)) - assert(db.listRelayed(from = TimestampMilli(0L), to = now + 1.minute, Some(Paginated(count = 2, skip = 1))).toList == List(e10, e11)) - assert(db.listRelayed(from = TimestampMilli(0L), to = now + 1.minute, Some(Paginated(count = 2, skip = 4))).toList == List()) + db.add(e7) + db.add(e8) + + assert(db.listSent(from = now - 15.minutes, to = now + 15.minute).toList == List(e1, e4, e5)) + assert(db.listSent(from = now - 80.seconds, to = now - 70.seconds).toList == List(e1)) + assert(db.listSent(from = now - 15.minutes, to = now + 15.minute, Some(Paginated(count = 0, skip = 0))).toList == List()) + assert(db.listSent(from = now - 15.minutes, to = now + 15.minute, Some(Paginated(count = 2, skip = 0))).toList == List(e1, e4)) + assert(db.listSent(from = now - 15.minutes, to = now + 15.minute, Some(Paginated(count = 2, skip = 1))).toList == List(e4, e5)) + assert(db.listSent(from = now - 15.minutes, to = now + 15.minute, Some(Paginated(count = 2, skip = 2))).toList == List(e5)) + assert(db.listSent(from = now - 15.minutes, to = now + 15.minute, Some(Paginated(count = 2, skip = 3))).toList == List()) + assert(db.listReceived(from = now - 5.seconds, to = now + 5.seconds).toList == List(e2)) + assert(db.listReceived(from = now - 5.seconds, to = now + 5.seconds, Some(Paginated(count = 0, skip = 0))).toList == List()) + assert(db.listReceived(from = now - 5.seconds, to = now + 5.seconds, Some(Paginated(count = 2, skip = 0))).toList == List(e2)) + assert(db.listReceived(from = now - 5.seconds, to = now + 5.seconds, Some(Paginated(count = 2, skip = 1))).toList == List()) + assert(db.listRelayed(from = now - 10.seconds, to = now + 1.minute).toList == List(e3, e6, e7, e8)) + assert(db.listRelayed(from = now - 10.seconds, to = now + 1.minute, Some(Paginated(count = 0, skip = 0))).toList == List()) + assert(db.listRelayed(from = now - 10.seconds, to = now + 1.minute, Some(Paginated(count = 2, skip = 0))).toList == List(e3, e6)) + assert(db.listRelayed(from = now - 10.seconds, to = now + 1.minute, Some(Paginated(count = 2, skip = 1))).toList == List(e6, e7)) + assert(db.listRelayed(from = now - 10.seconds, to = now + 1.minute, Some(Paginated(count = 2, skip = 4))).toList == List()) } } @@ -368,13 +376,17 @@ class AuditDbSpec extends AnyFunSuite { } test("migrate audit db to v14") { - val channelId = randomBytes32() - val remoteNodeId = randomKey().publicKey + val channelId1 = randomBytes32() + val channelId2 = randomBytes32() + val remoteNodeId1 = randomKey().publicKey + val remoteNodeId2 = randomKey().publicKey val fundingTx = Transaction(2, Seq(TxIn(OutPoint(randomTxId(), 2), Nil, 0)), Seq(TxOut(150_000 sat, Script.pay2wpkh(randomKey().publicKey))), 0) val now = TimestampMilli.now() - val channelCreated = ChannelEvent(channelId, remoteNodeId, fundingTx.txid, "anchor_outputs", 100_000 sat, isChannelOpener = true, isPrivate = false, "created", now) - val txPublished = TransactionPublished(channelId, remoteNodeId, fundingTx, 200 sat, 100 sat, "funding", None) - val txConfirmed = TransactionConfirmed(channelId, remoteNodeId, fundingTx) + val channelCreated = ChannelEvent(channelId1, remoteNodeId1, fundingTx.txid, "anchor_outputs", 100_000 sat, isChannelOpener = true, isPrivate = false, "created", now) + val txPublished = TransactionPublished(channelId1, remoteNodeId1, fundingTx, 200 sat, 100 sat, "funding", None) + val txConfirmed = TransactionConfirmed(channelId1, remoteNodeId1, fundingTx) + val paymentSent = PaymentSent(UUID.randomUUID(), randomBytes32(), 25_000_000 msat, remoteNodeId2, Seq(PaymentSent.PaymentPart(UUID.randomUUID(), PaymentEvent.OutgoingPayment(channelId1, remoteNodeId1, 24_999_999 msat, now), 561 msat, None, now - 10.seconds)), None, now - 10.seconds) + val paymentReceived = PaymentReceived(randomBytes32(), Seq(PaymentEvent.IncomingPayment(channelId1, remoteNodeId1, 15_350 msat, now - 1.seconds))) forAllDbs { case dbs: TestPgDatabases => migrationCheck( @@ -383,9 +395,13 @@ class AuditDbSpec extends AnyFunSuite { // We simulate the DB as it was before eclair v14. using(connection.createStatement()) { statement => statement.executeUpdate("CREATE SCHEMA audit") + statement.executeUpdate("CREATE TABLE audit.sent (amount_msat BIGINT NOT NULL, fees_msat BIGINT NOT NULL, recipient_amount_msat BIGINT NOT NULL, payment_id TEXT NOT NULL, parent_payment_id TEXT NOT NULL, payment_hash TEXT NOT NULL, payment_preimage TEXT NOT NULL, recipient_node_id TEXT NOT NULL, to_channel_id TEXT NOT NULL, timestamp TIMESTAMP WITH TIME ZONE NOT NULL)") + statement.executeUpdate("CREATE TABLE audit.received (amount_msat BIGINT NOT NULL, payment_hash TEXT NOT NULL, from_channel_id TEXT NOT NULL, timestamp TIMESTAMP WITH TIME ZONE NOT NULL)") statement.executeUpdate("CREATE TABLE audit.channel_events (channel_id TEXT NOT NULL, node_id TEXT NOT NULL, capacity_sat BIGINT NOT NULL, is_funder BOOLEAN NOT NULL, is_private BOOLEAN NOT NULL, event TEXT NOT NULL, timestamp TIMESTAMP WITH TIME ZONE NOT NULL)") statement.executeUpdate("CREATE TABLE audit.transactions_published (tx_id TEXT NOT NULL PRIMARY KEY, channel_id TEXT NOT NULL, node_id TEXT NOT NULL, mining_fee_sat BIGINT NOT NULL, tx_type TEXT NOT NULL, timestamp TIMESTAMP WITH TIME ZONE NOT NULL)") statement.executeUpdate("CREATE TABLE audit.transactions_confirmed (tx_id TEXT NOT NULL PRIMARY KEY, channel_id TEXT NOT NULL, node_id TEXT NOT NULL, timestamp TIMESTAMP WITH TIME ZONE NOT NULL)") + statement.executeUpdate("CREATE INDEX sent_timestamp_idx ON audit.sent(timestamp)") + statement.executeUpdate("CREATE INDEX received_timestamp_idx ON audit.received(timestamp)") statement.executeUpdate("CREATE INDEX channel_events_timestamp_idx ON audit.channel_events(timestamp)") statement.executeUpdate("CREATE INDEX transactions_published_channel_id_idx ON audit.transactions_published(channel_id)") statement.executeUpdate("CREATE INDEX transactions_published_timestamp_idx ON audit.transactions_published(timestamp)") @@ -393,9 +409,29 @@ class AuditDbSpec extends AnyFunSuite { setVersion(statement, "audit", 13) } // We insert some data into the tables we'll modify. + using(connection.prepareStatement("INSERT INTO audit.sent VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)")) { statement => + statement.setLong(1, paymentSent.parts.head.amountWithFees.toLong) + statement.setLong(2, paymentSent.parts.head.feesPaid.toLong) + statement.setLong(3, paymentSent.recipientAmount.toLong) + statement.setString(4, paymentSent.parts.head.id.toString) + statement.setString(5, paymentSent.id.toString) + statement.setString(6, paymentSent.paymentHash.toHex) + statement.setString(7, paymentSent.paymentPreimage.toHex) + statement.setString(8, paymentSent.recipientNodeId.value.toHex) + statement.setString(9, paymentSent.parts.head.channelId.toHex) + statement.setTimestamp(10, paymentSent.parts.head.settledAt.toSqlTimestamp) + statement.executeUpdate() + } + using(connection.prepareStatement("INSERT INTO audit.received VALUES (?, ?, ?, ?)")) { statement => + statement.setLong(1, paymentReceived.parts.head.amount.toLong) + statement.setString(2, paymentReceived.paymentHash.toHex) + statement.setString(3, paymentReceived.parts.head.channelId.toHex) + statement.setTimestamp(4, paymentReceived.parts.head.receivedAt.toSqlTimestamp) + statement.executeUpdate() + } using(connection.prepareStatement("INSERT INTO audit.channel_events VALUES (?, ?, ?, ?, ?, ?, ?)")) { statement => - statement.setString(1, channelId.toHex) - statement.setString(2, remoteNodeId.toHex) + statement.setString(1, channelId1.toHex) + statement.setString(2, remoteNodeId1.toHex) statement.setLong(3, 100_000) statement.setBoolean(4, true) statement.setBoolean(5, false) @@ -405,8 +441,8 @@ class AuditDbSpec extends AnyFunSuite { } using(connection.prepareStatement("INSERT INTO audit.transactions_published VALUES (?, ?, ?, ?, ?, ?)")) { statement => statement.setString(1, fundingTx.txid.value.toHex) - statement.setString(2, channelId.toHex) - statement.setString(3, remoteNodeId.toHex) + statement.setString(2, channelId1.toHex) + statement.setString(3, remoteNodeId1.toHex) statement.setLong(4, txPublished.localMiningFee.toLong) statement.setString(5, txPublished.desc) statement.setTimestamp(6, txPublished.timestamp.toSqlTimestamp) @@ -419,22 +455,26 @@ class AuditDbSpec extends AnyFunSuite { val migratedDb = dbs.audit using(connection.createStatement()) { statement => assert(getVersion(statement, "audit").contains(PgAuditDb.CURRENT_VERSION)) } // We've created new tables: previous data from the existing tables isn't available anymore through the API. - assert(migratedDb.listChannelEvents(channelId, 0 unixms, now + 1.minute).isEmpty) - assert(migratedDb.listChannelEvents(remoteNodeId, 0 unixms, now + 1.minute).isEmpty) - assert(migratedDb.listPublished(channelId).isEmpty) + assert(migratedDb.listChannelEvents(channelId1, 0 unixms, now + 1.minute).isEmpty) + assert(migratedDb.listChannelEvents(remoteNodeId1, 0 unixms, now + 1.minute).isEmpty) + assert(migratedDb.listPublished(channelId1).isEmpty) // But the data is still available in the database. - Seq("audit.channel_events_before_v14", "audit.transactions_published_before_v14").foreach(table => { + Seq("audit.sent_before_v14", "audit.received_before_v14", "audit.channel_events_before_v14", "audit.transactions_published_before_v14").foreach(table => { using(connection.prepareStatement(s"SELECT * FROM $table")) { statement => val result = statement.executeQuery() assert(result.next()) } }) // We can use the new tables immediately. + migratedDb.add(paymentSent) + assert(migratedDb.listSent(0 unixms, now + 1.minute) == Seq(paymentSent)) + migratedDb.add(paymentReceived) + assert(migratedDb.listReceived(0 unixms, now + 1.minute) == Seq(paymentReceived)) migratedDb.add(channelCreated) - assert(migratedDb.listChannelEvents(channelId, 0 unixms, now + 1.minute) == Seq(channelCreated)) + assert(migratedDb.listChannelEvents(channelId1, 0 unixms, now + 1.minute) == Seq(channelCreated)) migratedDb.add(txPublished) migratedDb.add(txConfirmed) - assert(migratedDb.listPublished(channelId) == Seq(PublishedTransaction(txPublished))) + assert(migratedDb.listPublished(channelId1) == Seq(PublishedTransaction(txPublished))) } ) case dbs: TestSqliteDatabases => @@ -443,10 +483,14 @@ class AuditDbSpec extends AnyFunSuite { initializeTables = connection => { // We simulate the DB as it was before eclair v14. using(connection.createStatement()) { statement => + statement.executeUpdate("CREATE TABLE sent (amount_msat INTEGER NOT NULL, fees_msat INTEGER NOT NULL, recipient_amount_msat INTEGER NOT NULL, payment_id TEXT NOT NULL, parent_payment_id TEXT NOT NULL, payment_hash BLOB NOT NULL, payment_preimage BLOB NOT NULL, recipient_node_id BLOB NOT NULL, to_channel_id BLOB NOT NULL, timestamp INTEGER NOT NULL)") + statement.executeUpdate("CREATE TABLE received (amount_msat INTEGER NOT NULL, payment_hash BLOB NOT NULL, from_channel_id BLOB NOT NULL, timestamp INTEGER NOT NULL)") statement.executeUpdate("CREATE TABLE channel_events (channel_id BLOB NOT NULL, node_id BLOB NOT NULL, capacity_sat INTEGER NOT NULL, is_funder BOOLEAN NOT NULL, is_private BOOLEAN NOT NULL, event TEXT NOT NULL, timestamp INTEGER NOT NULL)") statement.executeUpdate("CREATE TABLE channel_updates (channel_id BLOB NOT NULL, node_id BLOB NOT NULL, fee_base_msat INTEGER NOT NULL, fee_proportional_millionths INTEGER NOT NULL, cltv_expiry_delta INTEGER NOT NULL, htlc_minimum_msat INTEGER NOT NULL, htlc_maximum_msat INTEGER NOT NULL, timestamp INTEGER NOT NULL)") statement.executeUpdate("CREATE TABLE transactions_published (tx_id BLOB NOT NULL PRIMARY KEY, channel_id BLOB NOT NULL, node_id BLOB NOT NULL, mining_fee_sat INTEGER NOT NULL, tx_type TEXT NOT NULL, timestamp INTEGER NOT NULL)") statement.executeUpdate("CREATE TABLE transactions_confirmed (tx_id BLOB NOT NULL PRIMARY KEY, channel_id BLOB NOT NULL, node_id BLOB NOT NULL, timestamp INTEGER NOT NULL)") + statement.executeUpdate("CREATE INDEX sent_timestamp_idx ON sent(timestamp)") + statement.executeUpdate("CREATE INDEX received_timestamp_idx ON received(timestamp)") statement.executeUpdate("CREATE INDEX channel_events_timestamp_idx ON channel_events(timestamp)") statement.executeUpdate("CREATE INDEX channel_updates_cid_idx ON channel_updates(channel_id)") statement.executeUpdate("CREATE INDEX channel_updates_nid_idx ON channel_updates(node_id)") @@ -457,9 +501,29 @@ class AuditDbSpec extends AnyFunSuite { setVersion(statement, "audit", 10) } // We insert some data into the tables we'll modify. + using(connection.prepareStatement("INSERT INTO sent VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)")) { statement => + statement.setLong(1, paymentSent.parts.head.amountWithFees.toLong) + statement.setLong(2, paymentSent.parts.head.feesPaid.toLong) + statement.setLong(3, paymentSent.recipientAmount.toLong) + statement.setString(4, paymentSent.parts.head.id.toString) + statement.setString(5, paymentSent.id.toString) + statement.setBytes(6, paymentSent.paymentHash.toArray) + statement.setBytes(7, paymentSent.paymentPreimage.toArray) + statement.setBytes(8, paymentSent.recipientNodeId.value.toArray) + statement.setBytes(9, paymentSent.parts.head.channelId.toArray) + statement.setLong(10, paymentSent.parts.head.settledAt.toLong) + statement.executeUpdate() + } + using(connection.prepareStatement("INSERT INTO received VALUES (?, ?, ?, ?)")) { statement => + statement.setLong(1, paymentReceived.parts.head.amount.toLong) + statement.setBytes(2, paymentReceived.paymentHash.toArray) + statement.setBytes(3, paymentReceived.parts.head.channelId.toArray) + statement.setLong(4, paymentReceived.parts.head.receivedAt.toLong) + statement.executeUpdate() + } using(connection.prepareStatement("INSERT INTO channel_events VALUES (?, ?, ?, ?, ?, ?, ?)")) { statement => - statement.setBytes(1, channelId.toArray) - statement.setBytes(2, remoteNodeId.value.toArray) + statement.setBytes(1, channelId1.toArray) + statement.setBytes(2, remoteNodeId1.value.toArray) statement.setLong(3, 100_000) statement.setBoolean(4, true) statement.setBoolean(5, false) @@ -469,8 +533,8 @@ class AuditDbSpec extends AnyFunSuite { } using(connection.prepareStatement("INSERT INTO transactions_published VALUES (?, ?, ?, ?, ?, ?)")) { statement => statement.setBytes(1, fundingTx.txid.value.toArray) - statement.setBytes(2, channelId.toArray) - statement.setBytes(3, remoteNodeId.value.toArray) + statement.setBytes(2, channelId1.toArray) + statement.setBytes(3, remoteNodeId1.value.toArray) statement.setLong(4, txPublished.localMiningFee.toLong) statement.setString(5, txPublished.desc) statement.setLong(6, txPublished.timestamp.toLong) @@ -483,22 +547,26 @@ class AuditDbSpec extends AnyFunSuite { val migratedDb = dbs.audit using(connection.createStatement()) { statement => assert(getVersion(statement, "audit").contains(SqliteAuditDb.CURRENT_VERSION)) } // We've created new tables: previous data from the existing tables isn't available anymore through the API. - assert(migratedDb.listChannelEvents(channelId, 0 unixms, now + 1.minute).isEmpty) - assert(migratedDb.listChannelEvents(remoteNodeId, 0 unixms, now + 1.minute).isEmpty) - assert(migratedDb.listPublished(channelId).isEmpty) + assert(migratedDb.listChannelEvents(channelId1, 0 unixms, now + 1.minute).isEmpty) + assert(migratedDb.listChannelEvents(remoteNodeId1, 0 unixms, now + 1.minute).isEmpty) + assert(migratedDb.listPublished(channelId1).isEmpty) // But the data is still available in the database. - Seq("channel_events_before_v14", "transactions_published_before_v14").foreach(table => { + Seq("sent_before_v14", "received_before_v14", "channel_events_before_v14", "transactions_published_before_v14").foreach(table => { using(connection.prepareStatement(s"SELECT * FROM $table")) { statement => val result = statement.executeQuery() assert(result.next()) } }) // We can use the new tables immediately. + migratedDb.add(paymentSent) + assert(migratedDb.listSent(0 unixms, now + 1.minute) == Seq(paymentSent)) + migratedDb.add(paymentReceived) + assert(migratedDb.listReceived(0 unixms, now + 1.minute) == Seq(paymentReceived)) migratedDb.add(channelCreated) - assert(migratedDb.listChannelEvents(channelId, 0 unixms, now + 1.minute) == Seq(channelCreated)) + assert(migratedDb.listChannelEvents(channelId1, 0 unixms, now + 1.minute) == Seq(channelCreated)) migratedDb.add(txPublished) migratedDb.add(txConfirmed) - assert(migratedDb.listPublished(channelId) == Seq(PublishedTransaction(txPublished))) + assert(migratedDb.listPublished(channelId1) == Seq(PublishedTransaction(txPublished))) } ) } diff --git a/eclair-core/src/test/scala/fr/acinq/eclair/integration/PaymentIntegrationSpec.scala b/eclair-core/src/test/scala/fr/acinq/eclair/integration/PaymentIntegrationSpec.scala index 3e57e05b27..dea2ee93be 100644 --- a/eclair-core/src/test/scala/fr/acinq/eclair/integration/PaymentIntegrationSpec.scala +++ b/eclair-core/src/test/scala/fr/acinq/eclair/integration/PaymentIntegrationSpec.scala @@ -48,7 +48,7 @@ import fr.acinq.eclair.router.Router.{ChannelHop, GossipDecision, PublicChannel} import fr.acinq.eclair.router.{Announcements, AnnouncementsBatchValidationSpec, Router} import fr.acinq.eclair.wire.protocol.OfferTypes.{Offer, OfferPaths} import fr.acinq.eclair.wire.protocol.{ChannelAnnouncement, ChannelUpdate, IncorrectOrUnknownPaymentDetails} -import fr.acinq.eclair.{CltvExpiryDelta, EclairImpl, EncodedNodeId, Features, Kit, MilliSatoshiLong, ShortChannelId, TimestampMilli,TimestampMilliLong, randomBytes32, randomKey} +import fr.acinq.eclair.{CltvExpiryDelta, EclairImpl, EncodedNodeId, Features, Kit, MilliSatoshiLong, ShortChannelId, TimestampMilli, randomBytes32, randomKey} import org.json4s.JsonAST.{JString, JValue} import scodec.bits.{ByteVector, HexStringSyntax} @@ -382,13 +382,13 @@ class PaymentIntegrationSpec extends IntegrationSpec { val sent = nodes("B").nodeParams.db.audit.listSent(start, TimestampMilli.now()) assert(sent.length == 1, sent) val paymentSent1 = paymentSent.copy( - // We don't store the route in the DB, and don't store the startedAt timestamp yet (we set it to the same value as settledAt). - parts = paymentSent.parts.map(p => p.copy(payment = p.payment.copy(remoteNodeId = PrivateKey(ByteVector32.One).publicKey), route = None, startedAt = p.settledAt)).sortBy(_.settledAt), + // We don't store the route in the DB. + parts = paymentSent.parts.map(p => p.copy(route = None)).sortBy(_.settledAt), // We don't store attribution data in the DB. remainingAttribution_opt = None, - startedAt = 0 unixms, + startedAt = paymentSent.startedAt, ) - assert(sent.head.copy(parts = sent.head.parts.sortBy(_.settledAt), startedAt = 0 unixms) == paymentSent1) + assert(sent.head.copy(parts = sent.head.parts.sortBy(_.settledAt), startedAt = paymentSent.startedAt) == paymentSent1) awaitCond(nodes("D").nodeParams.db.payments.getIncomingPayment(invoice.paymentHash).exists(_.status.isInstanceOf[IncomingPaymentStatus.Received])) val Some(IncomingStandardPayment(_, _, _, _, IncomingPaymentStatus.Received(receivedAmount, _))) = nodes("D").nodeParams.db.payments.getIncomingPayment(invoice.paymentHash) From 22fede9339f24f860157b8a16393fb14e84408bd Mon Sep 17 00:00:00 2001 From: t-bast Date: Thu, 29 Jan 2026 17:24:51 +0100 Subject: [PATCH 5/7] Improve payment relayed events in the `AuditDb` We improve the `relayed` and `relayed_trampoline` payment tables in the `AuditDb` by: - unifying the way we handle multi-part payments, by having one row per payment, whether incoming or outgoing, and reconciling based on type and direction when listing events - adding `remote_node_id` for every channel used - using hex instead of blobs in sqlite - renaming columns for better clarify The added data isn't available in past events. We decide to simply rename the older tables and start fresh: previous data will not be available from the API, but can still be queried directly in SQL if necessary. --- .../scala/fr/acinq/eclair/db/AuditDb.scala | 54 ++++++- .../fr/acinq/eclair/db/pg/PgAuditDb.scala | 151 +++++++++-------- .../eclair/db/sqlite/SqliteAuditDb.scala | 153 +++++++++--------- .../fr/acinq/eclair/db/AuditDbSpec.scala | 111 ++++++++++--- 4 files changed, 285 insertions(+), 184 deletions(-) diff --git a/eclair-core/src/main/scala/fr/acinq/eclair/db/AuditDb.scala b/eclair-core/src/main/scala/fr/acinq/eclair/db/AuditDb.scala index 6095ba051e..27292f684d 100644 --- a/eclair-core/src/main/scala/fr/acinq/eclair/db/AuditDb.scala +++ b/eclair-core/src/main/scala/fr/acinq/eclair/db/AuditDb.scala @@ -22,8 +22,8 @@ import fr.acinq.eclair.blockchain.fee.FeeratePerKw import fr.acinq.eclair.channel._ import fr.acinq.eclair.db.AuditDb.{NetworkFee, PublishedTransaction, Stats} import fr.acinq.eclair.db.DbEventHandler.ChannelEvent -import fr.acinq.eclair.payment.{PathFindingExperimentMetrics, PaymentReceived, PaymentRelayed, PaymentSent} -import fr.acinq.eclair.{Paginated, TimestampMilli} +import fr.acinq.eclair.payment._ +import fr.acinq.eclair.{MilliSatoshi, Paginated, TimestampMilli} trait AuditDb { @@ -75,4 +75,54 @@ object AuditDb { case class Stats(channelId: ByteVector32, direction: String, avgPaymentAmount: Satoshi, paymentCount: Int, relayFee: Satoshi, networkFee: Satoshi) + case class RelayedPart(channelId: ByteVector32, remoteNodeId: PublicKey, amount: MilliSatoshi, direction: String, relayType: String, timestamp: TimestampMilli) + + def relayType(e: PaymentRelayed): String = e match { + case _: ChannelPaymentRelayed => "channel" + case _: TrampolinePaymentRelayed => "trampoline" + case _: OnTheFlyFundingPaymentRelayed => "on-the-fly-funding" + } + + private def incomingParts(parts: Seq[RelayedPart]): Seq[PaymentEvent.IncomingPayment] = { + parts.filter(_.direction == "IN").map(p => PaymentEvent.IncomingPayment(p.channelId, p.remoteNodeId, p.amount, p.timestamp)).sortBy(_.receivedAt) + } + + private def outgoingParts(parts: Seq[RelayedPart]): Seq[PaymentEvent.OutgoingPayment] = { + parts.filter(_.direction == "OUT").map(p => PaymentEvent.OutgoingPayment(p.channelId, p.remoteNodeId, p.amount, p.timestamp)).sortBy(_.settledAt) + } + + private def verifyInAndOut(parts: Seq[RelayedPart]): Boolean = { + parts.exists(_.direction == "IN") && parts.exists(_.direction == "OUT") + } + + def listRelayedInternal(relayedByHash: Map[ByteVector32, Seq[RelayedPart]], trampolineDetails: Map[ByteVector32, (PublicKey, MilliSatoshi)], paginated_opt: Option[Paginated]): Seq[PaymentRelayed] = { + val result = relayedByHash.flatMap { + case (paymentHash, parts) => + // We may have been routing multiple payments for the same payment_hash with different relay types. + // That's fine, we simply separate each part into the correct event. + val channelParts = parts.filter(_.relayType == "channel") + val trampolineParts = parts.filter(_.relayType == "trampoline") + val onTheFlyParts = parts.filter(_.relayType == "on-the-fly-funding") + val channelRelayed_opt = if (verifyInAndOut(channelParts)) { + Some(ChannelPaymentRelayed(paymentHash, incomingParts(channelParts), outgoingParts(channelParts))) + } else { + None + } + val trampolineRelayed_opt = trampolineDetails.get(paymentHash) match { + case Some((nextTrampolineNode, nextTrampolineAmount)) if verifyInAndOut(trampolineParts) => Some(TrampolinePaymentRelayed(paymentHash, incomingParts(trampolineParts), outgoingParts(trampolineParts), nextTrampolineNode, nextTrampolineAmount)) + case _ => None + } + val onTheFlyRelayed_opt = if (verifyInAndOut(onTheFlyParts)) { + Some(OnTheFlyFundingPaymentRelayed(paymentHash, incomingParts(onTheFlyParts), outgoingParts(onTheFlyParts))) + } else { + None + } + channelRelayed_opt.toSeq ++ trampolineRelayed_opt.toSeq ++ onTheFlyRelayed_opt.toSeq + }.toSeq.sortBy(_.settledAt) + paginated_opt match { + case Some(paginated) => result.slice(paginated.skip, paginated.skip + paginated.count) + case None => result + } + } + } \ No newline at end of file diff --git a/eclair-core/src/main/scala/fr/acinq/eclair/db/pg/PgAuditDb.scala b/eclair-core/src/main/scala/fr/acinq/eclair/db/pg/PgAuditDb.scala index 0aee67ed0f..cb174182f0 100644 --- a/eclair-core/src/main/scala/fr/acinq/eclair/db/pg/PgAuditDb.scala +++ b/eclair-core/src/main/scala/fr/acinq/eclair/db/pg/PgAuditDb.scala @@ -16,11 +16,11 @@ package fr.acinq.eclair.db.pg -import fr.acinq.bitcoin.scalacompat.Crypto.{PrivateKey, PublicKey} +import fr.acinq.bitcoin.scalacompat.Crypto.PublicKey import fr.acinq.bitcoin.scalacompat.{ByteVector32, Satoshi, SatoshiLong, TxId} import fr.acinq.eclair.blockchain.fee.FeeratePerKw import fr.acinq.eclair.channel._ -import fr.acinq.eclair.db.AuditDb.{NetworkFee, PublishedTransaction, Stats} +import fr.acinq.eclair.db.AuditDb._ import fr.acinq.eclair.db.DbEventHandler.ChannelEvent import fr.acinq.eclair.db.Monitoring.Metrics.withMetrics import fr.acinq.eclair.db.Monitoring.Tags.DbBackends @@ -46,8 +46,6 @@ class PgAuditDb(implicit ds: DataSource) extends AuditDb with Logging { import PgAuditDb._ import fr.acinq.eclair.json.JsonSerializers.{formats, serialization} - case class RelayedPart(channelId: ByteVector32, amount: MilliSatoshi, direction: String, relayType: String, timestamp: TimestampMilli) - inTransaction { pg => using(pg.createStatement()) { statement => def migration45(statement: Statement): Unit = { @@ -152,6 +150,21 @@ class PgAuditDb(implicit ds: DataSource) extends AuditDb with Logging { statement.executeUpdate("DROP INDEX audit.received_timestamp_idx") statement.executeUpdate("CREATE TABLE audit.received (payment_hash TEXT NOT NULL, amount_msat BIGINT NOT NULL, incoming_channel_id TEXT NOT NULL, incoming_node_id TEXT NOT NULL, received_at TIMESTAMP WITH TIME ZONE NOT NULL)") statement.executeUpdate("CREATE INDEX received_at_idx ON audit.received(received_at)") + // We update the relayed payment table to include our channel peer's node_id, rename columns for clarity. + statement.executeUpdate("ALTER TABLE audit.relayed RENAME TO relayed_before_v14") + statement.executeUpdate("ALTER TABLE audit.relayed_trampoline RENAME TO relayed_trampoline_before_v14") + statement.executeUpdate("DROP INDEX audit.relayed_timestamp_idx") + statement.executeUpdate("DROP INDEX audit.relayed_payment_hash_idx") + statement.executeUpdate("DROP INDEX audit.relayed_channel_id_idx") + statement.executeUpdate("DROP INDEX audit.relayed_trampoline_timestamp_idx") + statement.executeUpdate("DROP INDEX audit.relayed_trampoline_payment_hash_idx") + statement.executeUpdate("CREATE TABLE audit.relayed (payment_hash TEXT NOT NULL, amount_msat BIGINT NOT NULL, channel_id TEXT NOT NULL, node_id TEXT NOT NULL, direction TEXT NOT NULL, relay_type TEXT NOT NULL, timestamp TIMESTAMP WITH TIME ZONE NOT NULL)") + statement.executeUpdate("CREATE TABLE audit.relayed_trampoline (payment_hash TEXT NOT NULL, next_trampoline_amount_msat BIGINT NOT NULL, next_trampoline_node_id TEXT NOT NULL, settled_at TIMESTAMP WITH TIME ZONE NOT NULL)") + statement.executeUpdate("CREATE INDEX relayed_timestamp_idx ON audit.relayed(timestamp)") + statement.executeUpdate("CREATE INDEX relayed_payment_hash_idx ON audit.relayed(payment_hash)") + statement.executeUpdate("CREATE INDEX relayed_channel_id_idx ON audit.relayed(channel_id)") + statement.executeUpdate("CREATE INDEX relayed_node_id_idx ON audit.relayed(node_id)") + statement.executeUpdate("CREATE INDEX relayed_trampoline_payment_hash_idx ON audit.relayed_trampoline(payment_hash)") } getVersion(statement, DB_NAME) match { @@ -160,8 +173,8 @@ class PgAuditDb(implicit ds: DataSource) extends AuditDb with Logging { statement.executeUpdate("CREATE TABLE audit.sent (payment_id TEXT NOT NULL, parent_payment_id TEXT NOT NULL, payment_hash TEXT NOT NULL, payment_preimage TEXT NOT NULL, amount_with_fees_msat BIGINT NOT NULL, fees_msat BIGINT NOT NULL, recipient_total_amount_msat BIGINT NOT NULL, recipient_node_id TEXT NOT NULL, outgoing_channel_id TEXT NOT NULL, outgoing_node_id TEXT NOT NULL, started_at TIMESTAMP WITH TIME ZONE NOT NULL, settled_at TIMESTAMP WITH TIME ZONE NOT NULL)") statement.executeUpdate("CREATE TABLE audit.received (payment_hash TEXT NOT NULL, amount_msat BIGINT NOT NULL, incoming_channel_id TEXT NOT NULL, incoming_node_id TEXT NOT NULL, received_at TIMESTAMP WITH TIME ZONE NOT NULL)") - statement.executeUpdate("CREATE TABLE audit.relayed (payment_hash TEXT NOT NULL, amount_msat BIGINT NOT NULL, channel_id TEXT NOT NULL, direction TEXT NOT NULL, relay_type TEXT NOT NULL, timestamp TIMESTAMP WITH TIME ZONE NOT NULL)") - statement.executeUpdate("CREATE TABLE audit.relayed_trampoline (payment_hash TEXT NOT NULL, amount_msat BIGINT NOT NULL, next_node_id TEXT NOT NULL, timestamp TIMESTAMP WITH TIME ZONE NOT NULL)") + statement.executeUpdate("CREATE TABLE audit.relayed (payment_hash TEXT NOT NULL, amount_msat BIGINT NOT NULL, channel_id TEXT NOT NULL, node_id TEXT NOT NULL, direction TEXT NOT NULL, relay_type TEXT NOT NULL, timestamp TIMESTAMP WITH TIME ZONE NOT NULL)") + statement.executeUpdate("CREATE TABLE audit.relayed_trampoline (payment_hash TEXT NOT NULL, next_trampoline_amount_msat BIGINT NOT NULL, next_trampoline_node_id TEXT NOT NULL, settled_at TIMESTAMP WITH TIME ZONE NOT NULL)") statement.executeUpdate("CREATE TABLE audit.channel_events (channel_id TEXT NOT NULL, node_id TEXT NOT NULL, funding_txid TEXT NOT NULL, channel_type TEXT NOT NULL, capacity_sat BIGINT NOT NULL, is_opener BOOLEAN NOT NULL, is_private BOOLEAN NOT NULL, event TEXT NOT NULL, timestamp TIMESTAMP WITH TIME ZONE NOT NULL)") statement.executeUpdate("CREATE TABLE audit.channel_updates (channel_id TEXT NOT NULL, node_id TEXT NOT NULL, fee_base_msat BIGINT NOT NULL, fee_proportional_millionths BIGINT NOT NULL, cltv_expiry_delta BIGINT NOT NULL, htlc_minimum_msat BIGINT NOT NULL, htlc_maximum_msat BIGINT NOT NULL, timestamp TIMESTAMP WITH TIME ZONE NOT NULL)") statement.executeUpdate("CREATE TABLE audit.path_finding_metrics (amount_msat BIGINT NOT NULL, fees_msat BIGINT NOT NULL, status TEXT NOT NULL, duration_ms BIGINT NOT NULL, timestamp TIMESTAMP WITH TIME ZONE NOT NULL, is_mpp BOOLEAN NOT NULL, experiment_name TEXT NOT NULL, recipient_node_id TEXT NOT NULL, payment_hash TEXT, routing_hints JSONB)") @@ -172,9 +185,9 @@ class PgAuditDb(implicit ds: DataSource) extends AuditDb with Logging { statement.executeUpdate("CREATE INDEX received_at_idx ON audit.received(received_at)") statement.executeUpdate("CREATE INDEX relayed_timestamp_idx ON audit.relayed(timestamp)") statement.executeUpdate("CREATE INDEX relayed_payment_hash_idx ON audit.relayed(payment_hash)") - statement.executeUpdate("CREATE INDEX relayed_trampoline_timestamp_idx ON audit.relayed_trampoline(timestamp)") - statement.executeUpdate("CREATE INDEX relayed_trampoline_payment_hash_idx ON audit.relayed_trampoline(payment_hash)") statement.executeUpdate("CREATE INDEX relayed_channel_id_idx ON audit.relayed(channel_id)") + statement.executeUpdate("CREATE INDEX relayed_node_id_idx ON audit.relayed(node_id)") + statement.executeUpdate("CREATE INDEX relayed_trampoline_payment_hash_idx ON audit.relayed_trampoline(payment_hash)") statement.executeUpdate("CREATE INDEX channel_events_cid_idx ON audit.channel_events(channel_id)") statement.executeUpdate("CREATE INDEX channel_events_nid_idx ON audit.channel_events(node_id)") statement.executeUpdate("CREATE INDEX channel_events_timestamp_idx ON audit.channel_events(timestamp)") @@ -290,40 +303,39 @@ class PgAuditDb(implicit ds: DataSource) extends AuditDb with Logging { override def add(e: PaymentRelayed): Unit = withMetrics("audit/add-payment-relayed", DbBackends.Postgres) { inTransaction { pg => - val payments = e match { - case e: ChannelPaymentRelayed => - // non-trampoline relayed payments have one input and one output - val in = e.incoming.map(i => RelayedPart(i.channelId, i.amount, "IN", "channel", i.receivedAt)) - val out = e.outgoing.map(o => RelayedPart(o.channelId, o.amount, "OUT", "channel", o.settledAt)) - in ++ out - case TrampolinePaymentRelayed(_, incoming, outgoing, nextTrampolineNodeId, nextTrampolineAmount) => + e match { + case e: TrampolinePaymentRelayed => + // For trampoline payments, we store additional metadata about the payment in a dedicated table. using(pg.prepareStatement("INSERT INTO audit.relayed_trampoline VALUES (?, ?, ?, ?)")) { statement => statement.setString(1, e.paymentHash.toHex) - statement.setLong(2, nextTrampolineAmount.toLong) - statement.setString(3, nextTrampolineNodeId.value.toHex) + statement.setLong(2, e.nextTrampolineAmount.toLong) + statement.setString(3, e.nextTrampolineNodeId.toHex) statement.setTimestamp(4, e.settledAt.toSqlTimestamp) statement.executeUpdate() } - // trampoline relayed payments do MPP aggregation and may have M inputs and N outputs - val in = incoming.map(i => RelayedPart(i.channelId, i.amount, "IN", "trampoline", i.receivedAt)) - val out = outgoing.map(o => RelayedPart(o.channelId, o.amount, "OUT", "trampoline", o.settledAt)) - in ++ out - case OnTheFlyFundingPaymentRelayed(_, incoming, outgoing) => - val in = incoming.map(i => RelayedPart(i.channelId, i.amount, "IN", "on-the-fly-funding", i.receivedAt)) - val out = outgoing.map(o => RelayedPart(o.channelId, o.amount, "OUT", "on-the-fly-funding", o.settledAt)) - in ++ out - } - for (p <- payments) { - using(pg.prepareStatement("INSERT INTO audit.relayed VALUES (?, ?, ?, ?, ?, ?)")) { statement => - statement.setString(1, e.paymentHash.toHex) - statement.setLong(2, p.amount.toLong) - statement.setString(3, p.channelId.toHex) - statement.setString(4, p.direction) - statement.setString(5, p.relayType) - statement.setTimestamp(6, p.timestamp.toSqlTimestamp) - statement.executeUpdate() - } + case _ => () } + // We store each incoming and outgoing part in a dedicated row, to support multi-part payments. + e.incoming.foreach(i => using(pg.prepareStatement("INSERT INTO audit.relayed VALUES (?, ?, ?, ?, ?, ?, ?)")) { statement => + statement.setString(1, e.paymentHash.toHex) + statement.setLong(2, i.amount.toLong) + statement.setString(3, i.channelId.toHex) + statement.setString(4, i.remoteNodeId.toHex) + statement.setString(5, "IN") + statement.setString(6, relayType(e)) + statement.setTimestamp(7, i.receivedAt.toSqlTimestamp) + statement.executeUpdate() + }) + e.outgoing.foreach(o => using(pg.prepareStatement("INSERT INTO audit.relayed VALUES (?, ?, ?, ?, ?, ?, ?)")) { statement => + statement.setString(1, e.paymentHash.toHex) + statement.setLong(2, o.amount.toLong) + statement.setString(3, o.channelId.toHex) + statement.setString(4, o.remoteNodeId.toHex) + statement.setString(5, "OUT") + statement.setString(6, relayType(e)) + statement.setTimestamp(7, o.settledAt.toSqlTimestamp) + statement.executeUpdate() + }) } } @@ -544,55 +556,36 @@ class PgAuditDb(implicit ds: DataSource) extends AuditDb with Logging { override def listRelayed(from: TimestampMilli, to: TimestampMilli, paginated_opt: Option[Paginated] = None): Seq[PaymentRelayed] = inTransaction { pg => - val trampolineByHash = using(pg.prepareStatement("SELECT * FROM audit.relayed_trampoline WHERE timestamp BETWEEN ? and ?")) { statement => - statement.setTimestamp(1, from.toSqlTimestamp) - statement.setTimestamp(2, to.toSqlTimestamp) - statement.executeQuery() - .foldLeft(Map.empty[ByteVector32, (MilliSatoshi, PublicKey)]) { (trampolineByHash, rs) => - val paymentHash = rs.getByteVector32FromHex("payment_hash") - val amount = MilliSatoshi(rs.getLong("amount_msat")) - val nodeId = PublicKey(rs.getByteVectorFromHex("next_node_id")) - trampolineByHash + (paymentHash -> (amount, nodeId)) - } - } val relayedByHash = using(pg.prepareStatement("SELECT * FROM audit.relayed WHERE timestamp BETWEEN ? and ?")) { statement => statement.setTimestamp(1, from.toSqlTimestamp) statement.setTimestamp(2, to.toSqlTimestamp) - statement.executeQuery() - .foldLeft(Map.empty[ByteVector32, Seq[RelayedPart]]) { (relayedByHash, rs) => - val paymentHash = rs.getByteVector32FromHex("payment_hash") - val part = RelayedPart( - rs.getByteVector32FromHex("channel_id"), - MilliSatoshi(rs.getLong("amount_msat")), - rs.getString("direction"), - rs.getString("relay_type"), - TimestampMilli.fromSqlTimestamp(rs.getTimestamp("timestamp"))) - relayedByHash + (paymentHash -> (relayedByHash.getOrElse(paymentHash, Nil) :+ part)) - } + statement.executeQuery().foldLeft(Map.empty[ByteVector32, Seq[RelayedPart]]) { (relayedByHash, rs) => + val paymentHash = rs.getByteVector32FromHex("payment_hash") + val part = RelayedPart( + rs.getByteVector32FromHex("channel_id"), + PublicKey(rs.getByteVectorFromHex("node_id")), + MilliSatoshi(rs.getLong("amount_msat")), + rs.getString("direction"), + rs.getString("relay_type"), + TimestampMilli.fromSqlTimestamp(rs.getTimestamp("timestamp"))) + relayedByHash + (paymentHash -> (relayedByHash.getOrElse(paymentHash, Nil) :+ part)) + } } - val result = relayedByHash.flatMap { - case (paymentHash, parts) => - // We may have been routing multiple payments for the same payment_hash (MPP) in both cases (trampoline and channel). - // NB: we may link the wrong in-out parts, but the overall sum will be correct: we sort by amounts to minimize the risk of mismatch. - val incoming = parts.filter(_.direction == "IN").map(p => PaymentEvent.IncomingPayment(p.channelId, PrivateKey(ByteVector32.One).publicKey, p.amount, p.timestamp)).sortBy(_.amount) - val outgoing = parts.filter(_.direction == "OUT").map(p => PaymentEvent.OutgoingPayment(p.channelId, PrivateKey(ByteVector32.One).publicKey, p.amount, p.timestamp)).sortBy(_.amount) - parts.headOption match { - case Some(RelayedPart(_, _, _, "channel", _)) => incoming.zip(outgoing).map { - case (in, out) => ChannelPaymentRelayed(paymentHash, Seq(in), Seq(out)) + val trampolineDetails = relayedByHash + .filter { case (_, parts) => parts.exists(_.relayType == "trampoline") } + .map { + case (paymentHash, _) => using(pg.prepareStatement("SELECT * FROM audit.relayed_trampoline WHERE payment_hash = ?")) { statement => + statement.setString(1, paymentHash.toHex) + statement.executeQuery().headOption match { + case Some(rs) => + val nextTrampolineNode = PublicKey(rs.getByteVectorFromHex("next_trampoline_node_id")) + val nextTrampolineAmount = MilliSatoshi(rs.getLong("next_trampoline_amount_msat")) + Some(paymentHash -> (nextTrampolineNode, nextTrampolineAmount)) + case None => None } - case Some(RelayedPart(_, _, _, "trampoline", _)) => trampolineByHash.get(paymentHash) match { - case Some((nextTrampolineAmount, nextTrampolineNodeId)) => TrampolinePaymentRelayed(paymentHash, incoming, outgoing, nextTrampolineNodeId, nextTrampolineAmount) :: Nil - case None => Nil - } - case Some(RelayedPart(_, _, _, "on-the-fly-funding", _)) => - Seq(OnTheFlyFundingPaymentRelayed(paymentHash, incoming, outgoing)) - case _ => Nil } - }.toSeq.sortBy(_.settledAt) - paginated_opt match { - case Some(paginated) => result.slice(paginated.skip, paginated.skip + paginated.count) - case None => result - } + }.flatten.toMap + listRelayedInternal(relayedByHash, trampolineDetails, paginated_opt) } override def listNetworkFees(from: TimestampMilli, to: TimestampMilli): Seq[NetworkFee] = diff --git a/eclair-core/src/main/scala/fr/acinq/eclair/db/sqlite/SqliteAuditDb.scala b/eclair-core/src/main/scala/fr/acinq/eclair/db/sqlite/SqliteAuditDb.scala index 2713072f9d..5fb26e4cee 100644 --- a/eclair-core/src/main/scala/fr/acinq/eclair/db/sqlite/SqliteAuditDb.scala +++ b/eclair-core/src/main/scala/fr/acinq/eclair/db/sqlite/SqliteAuditDb.scala @@ -20,7 +20,7 @@ import fr.acinq.bitcoin.scalacompat.Crypto.{PrivateKey, PublicKey} import fr.acinq.bitcoin.scalacompat.{ByteVector32, Satoshi, SatoshiLong, TxId} import fr.acinq.eclair.blockchain.fee.FeeratePerKw import fr.acinq.eclair.channel._ -import fr.acinq.eclair.db.AuditDb.{NetworkFee, PublishedTransaction, Stats} +import fr.acinq.eclair.db.AuditDb._ import fr.acinq.eclair.db.DbEventHandler.ChannelEvent import fr.acinq.eclair.db.Monitoring.Metrics.withMetrics import fr.acinq.eclair.db.Monitoring.Tags.DbBackends @@ -43,8 +43,6 @@ class SqliteAuditDb(val sqlite: Connection) extends AuditDb with Logging { import ExtendedResultSet._ import SqliteAuditDb._ - case class RelayedPart(channelId: ByteVector32, amount: MilliSatoshi, direction: String, relayType: String, timestamp: TimestampMilli) - using(sqlite.createStatement(), inTransaction = true) { statement => def migration12(statement: Statement): Unit = { @@ -161,14 +159,29 @@ class SqliteAuditDb(val sqlite: Connection) extends AuditDb with Logging { statement.executeUpdate("DROP INDEX received_timestamp_idx") statement.executeUpdate("CREATE TABLE received (payment_hash TEXT NOT NULL, amount_msat INTEGER NOT NULL, incoming_channel_id TEXT NOT NULL, incoming_node_id TEXT NOT NULL, received_at INTEGER NOT NULL)") statement.executeUpdate("CREATE INDEX received_at_idx ON received(received_at)") + // We update the relayed payment table to include our channel peer's node_id, rename columns for clarity and use TEXT instead of BLOBs. + statement.executeUpdate("ALTER TABLE relayed RENAME TO relayed_before_v14") + statement.executeUpdate("ALTER TABLE relayed_trampoline RENAME TO relayed_trampoline_before_v14") + statement.executeUpdate("DROP INDEX relayed_timestamp_idx") + statement.executeUpdate("DROP INDEX relayed_payment_hash_idx") + statement.executeUpdate("DROP INDEX relayed_channel_id_idx") + statement.executeUpdate("DROP INDEX relayed_trampoline_timestamp_idx") + statement.executeUpdate("DROP INDEX relayed_trampoline_payment_hash_idx") + statement.executeUpdate("CREATE TABLE relayed (payment_hash TEXT NOT NULL, amount_msat INTEGER NOT NULL, channel_id TEXT NOT NULL, node_id TEXT NOT NULL, direction TEXT NOT NULL, relay_type TEXT NOT NULL, timestamp INTEGER NOT NULL)") + statement.executeUpdate("CREATE TABLE relayed_trampoline (payment_hash TEXT NOT NULL, next_trampoline_amount_msat INTEGER NOT NULL, next_trampoline_node_id TEXT NOT NULL, settled_at INTEGER NOT NULL)") + statement.executeUpdate("CREATE INDEX relayed_timestamp_idx ON relayed(timestamp)") + statement.executeUpdate("CREATE INDEX relayed_payment_hash_idx ON relayed(payment_hash)") + statement.executeUpdate("CREATE INDEX relayed_channel_id_idx ON relayed(channel_id)") + statement.executeUpdate("CREATE INDEX relayed_node_id_idx ON relayed(node_id)") + statement.executeUpdate("CREATE INDEX relayed_trampoline_payment_hash_idx ON relayed_trampoline(payment_hash)") } getVersion(statement, DB_NAME) match { case None => statement.executeUpdate("CREATE TABLE sent (payment_id TEXT NOT NULL, parent_payment_id TEXT NOT NULL, payment_hash TEXT NOT NULL, payment_preimage TEXT NOT NULL, amount_with_fees_msat INTEGER NOT NULL, fees_msat INTEGER NOT NULL, recipient_total_amount_msat INTEGER NOT NULL, recipient_node_id TEXT NOT NULL, outgoing_channel_id TEXT NOT NULL, outgoing_node_id TEXT NOT NULL, started_at INTEGER NOT NULL, settled_at INTEGER NOT NULL)") statement.executeUpdate("CREATE TABLE received (payment_hash TEXT NOT NULL, amount_msat INTEGER NOT NULL, incoming_channel_id TEXT NOT NULL, incoming_node_id TEXT NOT NULL, received_at INTEGER NOT NULL)") - statement.executeUpdate("CREATE TABLE relayed (payment_hash BLOB NOT NULL, amount_msat INTEGER NOT NULL, channel_id BLOB NOT NULL, direction TEXT NOT NULL, relay_type TEXT NOT NULL, timestamp INTEGER NOT NULL)") - statement.executeUpdate("CREATE TABLE relayed_trampoline (payment_hash BLOB NOT NULL, amount_msat INTEGER NOT NULL, next_node_id BLOB NOT NULL, timestamp INTEGER NOT NULL)") + statement.executeUpdate("CREATE TABLE relayed (payment_hash TEXT NOT NULL, amount_msat INTEGER NOT NULL, channel_id TEXT NOT NULL, node_id TEXT NOT NULL, direction TEXT NOT NULL, relay_type TEXT NOT NULL, timestamp INTEGER NOT NULL)") + statement.executeUpdate("CREATE TABLE relayed_trampoline (payment_hash TEXT NOT NULL, next_trampoline_amount_msat INTEGER NOT NULL, next_trampoline_node_id TEXT NOT NULL, settled_at INTEGER NOT NULL)") statement.executeUpdate("CREATE TABLE channel_events (channel_id TEXT NOT NULL, node_id TEXT NOT NULL, funding_txid TEXT NOT NULL, channel_type TEXT NOT NULL, capacity_sat INTEGER NOT NULL, is_opener BOOLEAN NOT NULL, is_private BOOLEAN NOT NULL, event TEXT NOT NULL, timestamp INTEGER NOT NULL)") statement.executeUpdate("CREATE TABLE channel_updates (channel_id TEXT NOT NULL, node_id TEXT NOT NULL, fee_base_msat INTEGER NOT NULL, fee_proportional_millionths INTEGER NOT NULL, cltv_expiry_delta INTEGER NOT NULL, htlc_minimum_msat INTEGER NOT NULL, htlc_maximum_msat INTEGER NOT NULL, timestamp INTEGER NOT NULL)") statement.executeUpdate("CREATE TABLE path_finding_metrics (amount_msat INTEGER NOT NULL, fees_msat INTEGER NOT NULL, status TEXT NOT NULL, duration_ms INTEGER NOT NULL, timestamp INTEGER NOT NULL, is_mpp INTEGER NOT NULL, experiment_name TEXT NOT NULL, recipient_node_id BLOB NOT NULL)") @@ -180,7 +193,7 @@ class SqliteAuditDb(val sqlite: Connection) extends AuditDb with Logging { statement.executeUpdate("CREATE INDEX relayed_timestamp_idx ON relayed(timestamp)") statement.executeUpdate("CREATE INDEX relayed_payment_hash_idx ON relayed(payment_hash)") statement.executeUpdate("CREATE INDEX relayed_channel_id_idx ON relayed(channel_id)") - statement.executeUpdate("CREATE INDEX relayed_trampoline_timestamp_idx ON relayed_trampoline(timestamp)") + statement.executeUpdate("CREATE INDEX relayed_node_id_idx ON relayed(node_id)") statement.executeUpdate("CREATE INDEX relayed_trampoline_payment_hash_idx ON relayed_trampoline(payment_hash)") statement.executeUpdate("CREATE INDEX channel_events_cid_idx ON channel_events(channel_id)") statement.executeUpdate("CREATE INDEX channel_events_nid_idx ON channel_events(node_id)") @@ -287,40 +300,39 @@ class SqliteAuditDb(val sqlite: Connection) extends AuditDb with Logging { } override def add(e: PaymentRelayed): Unit = withMetrics("audit/add-payment-relayed", DbBackends.Sqlite) { - val payments = e match { - case e: ChannelPaymentRelayed => - // non-trampoline relayed payments have one input and one output - val in = e.incoming.map(i => RelayedPart(i.channelId, i.amount, "IN", "channel", i.receivedAt)) - val out = e.outgoing.map(o => RelayedPart(o.channelId, o.amount, "OUT", "channel", o.settledAt)) - in ++ out - case TrampolinePaymentRelayed(_, incoming, outgoing, nextTrampolineNodeId, nextTrampolineAmount) => + e match { + case e: TrampolinePaymentRelayed => + // For trampoline payments, we store additional metadata about the payment in a dedicated table. using(sqlite.prepareStatement("INSERT INTO relayed_trampoline VALUES (?, ?, ?, ?)")) { statement => - statement.setBytes(1, e.paymentHash.toArray) - statement.setLong(2, nextTrampolineAmount.toLong) - statement.setBytes(3, nextTrampolineNodeId.value.toArray) + statement.setString(1, e.paymentHash.toHex) + statement.setLong(2, e.nextTrampolineAmount.toLong) + statement.setString(3, e.nextTrampolineNodeId.toHex) statement.setLong(4, e.settledAt.toLong) statement.executeUpdate() } - // trampoline relayed payments do MPP aggregation and may have M inputs and N outputs - val in = incoming.map(i => RelayedPart(i.channelId, i.amount, "IN", "trampoline", i.receivedAt)) - val out = outgoing.map(o => RelayedPart(o.channelId, o.amount, "OUT", "trampoline", o.settledAt)) - in ++ out - case OnTheFlyFundingPaymentRelayed(_, incoming, outgoing) => - val in = incoming.map(i => RelayedPart(i.channelId, i.amount, "IN", "on-the-fly-funding", i.receivedAt)) - val out = outgoing.map(o => RelayedPart(o.channelId, o.amount, "OUT", "on-the-fly-funding", o.settledAt)) - in ++ out - } - for (p <- payments) { - using(sqlite.prepareStatement("INSERT INTO relayed VALUES (?, ?, ?, ?, ?, ?)")) { statement => - statement.setBytes(1, e.paymentHash.toArray) - statement.setLong(2, p.amount.toLong) - statement.setBytes(3, p.channelId.toArray) - statement.setString(4, p.direction) - statement.setString(5, p.relayType) - statement.setLong(6, p.timestamp.toLong) - statement.executeUpdate() - } - } + case _ => () + } + // We store each incoming and outgoing part in a dedicated row, to support multi-part payments. + e.incoming.foreach(i => using(sqlite.prepareStatement("INSERT INTO relayed VALUES (?, ?, ?, ?, ?, ?, ?)")) { statement => + statement.setString(1, e.paymentHash.toHex) + statement.setLong(2, i.amount.toLong) + statement.setString(3, i.channelId.toHex) + statement.setString(4, i.remoteNodeId.toHex) + statement.setString(5, "IN") + statement.setString(6, relayType(e)) + statement.setLong(7, i.receivedAt.toLong) + statement.executeUpdate() + }) + e.outgoing.foreach(o => using(sqlite.prepareStatement("INSERT INTO relayed VALUES (?, ?, ?, ?, ?, ?, ?)")) { statement => + statement.setString(1, e.paymentHash.toHex) + statement.setLong(2, o.amount.toLong) + statement.setString(3, o.channelId.toHex) + statement.setString(4, o.remoteNodeId.toHex) + statement.setString(5, "OUT") + statement.setString(6, relayType(e)) + statement.setLong(7, o.settledAt.toLong) + statement.executeUpdate() + }) } override def add(e: TransactionPublished): Unit = withMetrics("audit/add-transaction-published", DbBackends.Sqlite) { @@ -517,56 +529,37 @@ class SqliteAuditDb(val sqlite: Connection) extends AuditDb with Logging { } override def listRelayed(from: TimestampMilli, to: TimestampMilli, paginated_opt: Option[Paginated] = None): Seq[PaymentRelayed] = { - val trampolineByHash = using(sqlite.prepareStatement("SELECT * FROM relayed_trampoline WHERE timestamp >= ? AND timestamp < ?")) { statement => - statement.setLong(1, from.toLong) - statement.setLong(2, to.toLong) - statement.executeQuery() - .map { rs => - val paymentHash = rs.getByteVector32("payment_hash") - val amount = MilliSatoshi(rs.getLong("amount_msat")) - val nodeId = PublicKey(rs.getByteVector("next_node_id")) - paymentHash -> (amount, nodeId) - } - .toMap - } val relayedByHash = using(sqlite.prepareStatement("SELECT * FROM relayed WHERE timestamp >= ? AND timestamp < ?")) { statement => statement.setLong(1, from.toLong) statement.setLong(2, to.toLong) - statement.executeQuery() - .foldLeft(Map.empty[ByteVector32, Seq[RelayedPart]]) { (relayedByHash, rs) => - val paymentHash = rs.getByteVector32("payment_hash") - val part = RelayedPart( - rs.getByteVector32("channel_id"), - MilliSatoshi(rs.getLong("amount_msat")), - rs.getString("direction"), - rs.getString("relay_type"), - TimestampMilli(rs.getLong("timestamp"))) - relayedByHash + (paymentHash -> (relayedByHash.getOrElse(paymentHash, Nil) :+ part)) - } + statement.executeQuery().foldLeft(Map.empty[ByteVector32, Seq[RelayedPart]]) { (relayedByHash, rs) => + val paymentHash = rs.getByteVector32FromHex("payment_hash") + val part = RelayedPart( + rs.getByteVector32FromHex("channel_id"), + PublicKey(rs.getByteVectorFromHex("node_id")), + MilliSatoshi(rs.getLong("amount_msat")), + rs.getString("direction"), + rs.getString("relay_type"), + TimestampMilli(rs.getLong("timestamp")) + ) + relayedByHash + (paymentHash -> (relayedByHash.getOrElse(paymentHash, Nil) :+ part)) + } } - val result = relayedByHash.flatMap { - case (paymentHash, parts) => - // We may have been routing multiple payments for the same payment_hash (MPP) in both cases (trampoline and channel). - // NB: we may link the wrong in-out parts, but the overall sum will be correct: we sort by amounts to minimize the risk of mismatch. - val incoming = parts.filter(_.direction == "IN").map(p => PaymentEvent.IncomingPayment(p.channelId, PrivateKey(ByteVector32.One).publicKey, p.amount, p.timestamp)).sortBy(_.amount) - val outgoing = parts.filter(_.direction == "OUT").map(p => PaymentEvent.OutgoingPayment(p.channelId, PrivateKey(ByteVector32.One).publicKey, p.amount, p.timestamp)).sortBy(_.amount) - parts.headOption match { - case Some(RelayedPart(_, _, _, "channel", _)) => incoming.zip(outgoing).map { - case (in, out) => ChannelPaymentRelayed(paymentHash, Seq(in), Seq(out)) - } - case Some(RelayedPart(_, _, _, "trampoline", _)) => trampolineByHash.get(paymentHash) match { - case Some((nextTrampolineAmount, nextTrampolineNodeId)) => TrampolinePaymentRelayed(paymentHash, incoming, outgoing, nextTrampolineNodeId, nextTrampolineAmount) :: Nil - case None => Nil + val trampolineDetails = relayedByHash + .filter { case (_, parts) => parts.exists(_.relayType == "trampoline") } + .map { + case (paymentHash, _) => using(sqlite.prepareStatement("SELECT * FROM relayed_trampoline WHERE payment_hash = ?")) { statement => + statement.setString(1, paymentHash.toHex) + statement.executeQuery().headOption match { + case Some(rs) => + val nextTrampolineNode = PublicKey(rs.getByteVectorFromHex("next_trampoline_node_id")) + val nextTrampolineAmount = MilliSatoshi(rs.getLong("next_trampoline_amount_msat")) + Some(paymentHash -> (nextTrampolineNode, nextTrampolineAmount)) + case None => None } - case Some(RelayedPart(_, _, _, "on-the-fly-funding", _)) => - Seq(OnTheFlyFundingPaymentRelayed(paymentHash, incoming, outgoing)) - case _ => Nil } - }.toSeq.sortBy(_.settledAt) - paginated_opt match { - case Some(paginated) => result.slice(paginated.skip, paginated.skip + paginated.count) - case None => result - } + }.flatten.toMap + listRelayedInternal(relayedByHash, trampolineDetails, paginated_opt) } override def listNetworkFees(from: TimestampMilli, to: TimestampMilli): Seq[NetworkFee] = diff --git a/eclair-core/src/test/scala/fr/acinq/eclair/db/AuditDbSpec.scala b/eclair-core/src/test/scala/fr/acinq/eclair/db/AuditDbSpec.scala index 5d9a0e4f40..e760be662e 100644 --- a/eclair-core/src/test/scala/fr/acinq/eclair/db/AuditDbSpec.scala +++ b/eclair-core/src/test/scala/fr/acinq/eclair/db/AuditDbSpec.scala @@ -135,7 +135,7 @@ class AuditDbSpec extends AnyFunSuite { val e4 = PaymentSent(uuid3, preimage1, 84100 msat, remoteNodeId2, pp4a :: pp4b :: Nil, None, startedAt = now - 30.seconds) val pp5 = PaymentSent.PaymentPart(uuid2, PaymentEvent.OutgoingPayment(channelId1, remoteNodeId1, 42000 msat, settledAt = now + 10.minutes), 1000 msat, None, startedAt = now + 9.minutes) val e5 = PaymentSent(uuid2, preimage1, 42000 msat, remoteNodeId1, pp5 :: Nil, None, startedAt = now + 9.minutes) - val e6 = TrampolinePaymentRelayed(randomBytes32(), + val e6 = TrampolinePaymentRelayed(paymentHash1, Seq( PaymentEvent.IncomingPayment(channelId1, remoteNodeId1, 20000 msat, now - 7.seconds), PaymentEvent.IncomingPayment(channelId1, remoteNodeId1, 22000 msat, now - 5.seconds) @@ -169,10 +169,10 @@ class AuditDbSpec extends AnyFunSuite { assert(db.listReceived(from = now - 5.seconds, to = now + 5.seconds, Some(Paginated(count = 0, skip = 0))).toList == List()) assert(db.listReceived(from = now - 5.seconds, to = now + 5.seconds, Some(Paginated(count = 2, skip = 0))).toList == List(e2)) assert(db.listReceived(from = now - 5.seconds, to = now + 5.seconds, Some(Paginated(count = 2, skip = 1))).toList == List()) - assert(db.listRelayed(from = now - 10.seconds, to = now + 1.minute).toList == List(e3, e6, e7, e8)) + assert(db.listRelayed(from = now - 10.seconds, to = now + 1.minute).toList == List(e3, e6, ChannelPaymentRelayed(paymentHash2, e7.incoming ++ e8.incoming, e7.outgoing ++ e8.outgoing))) assert(db.listRelayed(from = now - 10.seconds, to = now + 1.minute, Some(Paginated(count = 0, skip = 0))).toList == List()) assert(db.listRelayed(from = now - 10.seconds, to = now + 1.minute, Some(Paginated(count = 2, skip = 0))).toList == List(e3, e6)) - assert(db.listRelayed(from = now - 10.seconds, to = now + 1.minute, Some(Paginated(count = 2, skip = 1))).toList == List(e6, e7)) + assert(db.listRelayed(from = now - 10.seconds, to = now + 1.minute, Some(Paginated(count = 2, skip = 1))).toList == List(e6, ChannelPaymentRelayed(paymentHash2, e7.incoming ++ e8.incoming, e7.outgoing ++ e8.outgoing))) assert(db.listRelayed(from = now - 10.seconds, to = now + 1.minute, Some(Paginated(count = 2, skip = 4))).toList == List()) } } @@ -283,36 +283,40 @@ class AuditDbSpec extends AnyFunSuite { val isPg = dbs.isInstanceOf[TestPgDatabases] val table = if (isPg) "audit.relayed" else "relayed" - using(sqlite.prepareStatement(s"INSERT INTO $table (payment_hash, amount_msat, channel_id, direction, relay_type, timestamp) VALUES (?, ?, ?, ?, ?, ?)")) { statement => - if (isPg) statement.setString(1, randomBytes32().toHex) else statement.setBytes(1, randomBytes32().toArray) + using(sqlite.prepareStatement(s"INSERT INTO $table (payment_hash, amount_msat, channel_id, node_id, direction, relay_type, timestamp) VALUES (?, ?, ?, ?, ?, ?, ?)")) { statement => + statement.setString(1, randomBytes32().toHex) statement.setLong(2, 42) - if (isPg) statement.setString(3, randomBytes32().toHex) else statement.setBytes(3, randomBytes32().toArray) - statement.setString(4, "IN") - statement.setString(5, "unknown") // invalid relay type - if (isPg) statement.setTimestamp(6, Timestamp.from(Instant.ofEpochMilli(10))) else statement.setLong(6, 10) + statement.setString(3, randomBytes32().toHex) + statement.setString(4, randomKey().publicKey.toHex) + statement.setString(5, "IN") + statement.setString(6, "unknown") // invalid relay type + if (isPg) statement.setTimestamp(7, Timestamp.from(Instant.ofEpochMilli(10))) else statement.setLong(7, 10) statement.executeUpdate() } - using(sqlite.prepareStatement(s"INSERT INTO $table (payment_hash, amount_msat, channel_id, direction, relay_type, timestamp) VALUES (?, ?, ?, ?, ?, ?)")) { statement => - if (isPg) statement.setString(1, randomBytes32().toHex) else statement.setBytes(1, randomBytes32().toArray) + using(sqlite.prepareStatement(s"INSERT INTO $table (payment_hash, amount_msat, channel_id, node_id, direction, relay_type, timestamp) VALUES (?, ?, ?, ?, ?, ?, ?)")) { statement => + statement.setString(1, randomBytes32().toHex) statement.setLong(2, 51) - if (isPg) statement.setString(3, randomBytes32().toHex) else statement.setBytes(3, randomBytes32().toArray) - statement.setString(4, "UP") // invalid direction - statement.setString(5, "channel") - if (isPg) statement.setTimestamp(6, Timestamp.from(Instant.ofEpochMilli(20))) else statement.setLong(6, 20) + statement.setString(3, randomBytes32().toHex) + statement.setString(4, randomKey().publicKey.toHex) + statement.setString(5, "UP") // invalid direction + statement.setString(6, "channel") + if (isPg) statement.setTimestamp(7, Timestamp.from(Instant.ofEpochMilli(20))) else statement.setLong(7, 20) statement.executeUpdate() } val paymentHash = randomBytes32() val channelId = randomBytes32() + val nodeId = randomKey().publicKey - using(sqlite.prepareStatement(s"INSERT INTO $table (payment_hash, amount_msat, channel_id, direction, relay_type, timestamp) VALUES (?, ?, ?, ?, ?, ?)")) { statement => - if (isPg) statement.setString(1, paymentHash.toHex) else statement.setBytes(1, paymentHash.toArray) + using(sqlite.prepareStatement(s"INSERT INTO $table (payment_hash, amount_msat, channel_id, node_id, direction, relay_type, timestamp) VALUES (?, ?, ?, ?, ?, ?, ?)")) { statement => + statement.setString(1, paymentHash.toHex) statement.setLong(2, 65) - if (isPg) statement.setString(3, channelId.toHex) else statement.setBytes(3, channelId.toArray) - statement.setString(4, "IN") // missing a corresponding OUT - statement.setString(5, "channel") - if (isPg) statement.setTimestamp(6, Timestamp.from(Instant.ofEpochMilli(30))) else statement.setLong(6, 30) + statement.setString(3, channelId.toHex) + statement.setString(4, nodeId.toHex) + statement.setString(5, "IN") // missing a corresponding OUT + statement.setString(6, "channel") + if (isPg) statement.setTimestamp(7, Timestamp.from(Instant.ofEpochMilli(30))) else statement.setLong(7, 30) statement.executeUpdate() } @@ -387,6 +391,7 @@ class AuditDbSpec extends AnyFunSuite { val txConfirmed = TransactionConfirmed(channelId1, remoteNodeId1, fundingTx) val paymentSent = PaymentSent(UUID.randomUUID(), randomBytes32(), 25_000_000 msat, remoteNodeId2, Seq(PaymentSent.PaymentPart(UUID.randomUUID(), PaymentEvent.OutgoingPayment(channelId1, remoteNodeId1, 24_999_999 msat, now), 561 msat, None, now - 10.seconds)), None, now - 10.seconds) val paymentReceived = PaymentReceived(randomBytes32(), Seq(PaymentEvent.IncomingPayment(channelId1, remoteNodeId1, 15_350 msat, now - 1.seconds))) + val paymentRelayed = ChannelPaymentRelayed(randomBytes32(), Seq(PaymentEvent.IncomingPayment(channelId1, remoteNodeId1, 1100 msat, now - 1.seconds)), Seq(PaymentEvent.OutgoingPayment(channelId2, remoteNodeId2, 1000 msat, now))) forAllDbs { case dbs: TestPgDatabases => migrationCheck( @@ -397,11 +402,18 @@ class AuditDbSpec extends AnyFunSuite { statement.executeUpdate("CREATE SCHEMA audit") statement.executeUpdate("CREATE TABLE audit.sent (amount_msat BIGINT NOT NULL, fees_msat BIGINT NOT NULL, recipient_amount_msat BIGINT NOT NULL, payment_id TEXT NOT NULL, parent_payment_id TEXT NOT NULL, payment_hash TEXT NOT NULL, payment_preimage TEXT NOT NULL, recipient_node_id TEXT NOT NULL, to_channel_id TEXT NOT NULL, timestamp TIMESTAMP WITH TIME ZONE NOT NULL)") statement.executeUpdate("CREATE TABLE audit.received (amount_msat BIGINT NOT NULL, payment_hash TEXT NOT NULL, from_channel_id TEXT NOT NULL, timestamp TIMESTAMP WITH TIME ZONE NOT NULL)") + statement.executeUpdate("CREATE TABLE audit.relayed (payment_hash TEXT NOT NULL, amount_msat BIGINT NOT NULL, channel_id TEXT NOT NULL, direction TEXT NOT NULL, relay_type TEXT NOT NULL, timestamp TIMESTAMP WITH TIME ZONE NOT NULL)") + statement.executeUpdate("CREATE TABLE audit.relayed_trampoline (payment_hash TEXT NOT NULL, amount_msat BIGINT NOT NULL, next_node_id TEXT NOT NULL, timestamp TIMESTAMP WITH TIME ZONE NOT NULL)") statement.executeUpdate("CREATE TABLE audit.channel_events (channel_id TEXT NOT NULL, node_id TEXT NOT NULL, capacity_sat BIGINT NOT NULL, is_funder BOOLEAN NOT NULL, is_private BOOLEAN NOT NULL, event TEXT NOT NULL, timestamp TIMESTAMP WITH TIME ZONE NOT NULL)") statement.executeUpdate("CREATE TABLE audit.transactions_published (tx_id TEXT NOT NULL PRIMARY KEY, channel_id TEXT NOT NULL, node_id TEXT NOT NULL, mining_fee_sat BIGINT NOT NULL, tx_type TEXT NOT NULL, timestamp TIMESTAMP WITH TIME ZONE NOT NULL)") statement.executeUpdate("CREATE TABLE audit.transactions_confirmed (tx_id TEXT NOT NULL PRIMARY KEY, channel_id TEXT NOT NULL, node_id TEXT NOT NULL, timestamp TIMESTAMP WITH TIME ZONE NOT NULL)") statement.executeUpdate("CREATE INDEX sent_timestamp_idx ON audit.sent(timestamp)") statement.executeUpdate("CREATE INDEX received_timestamp_idx ON audit.received(timestamp)") + statement.executeUpdate("CREATE INDEX relayed_timestamp_idx ON audit.relayed(timestamp)") + statement.executeUpdate("CREATE INDEX relayed_payment_hash_idx ON audit.relayed(payment_hash)") + statement.executeUpdate("CREATE INDEX relayed_channel_id_idx ON audit.relayed(channel_id)") + statement.executeUpdate("CREATE INDEX relayed_trampoline_timestamp_idx ON audit.relayed_trampoline(timestamp)") + statement.executeUpdate("CREATE INDEX relayed_trampoline_payment_hash_idx ON audit.relayed_trampoline(payment_hash)") statement.executeUpdate("CREATE INDEX channel_events_timestamp_idx ON audit.channel_events(timestamp)") statement.executeUpdate("CREATE INDEX transactions_published_channel_id_idx ON audit.transactions_published(channel_id)") statement.executeUpdate("CREATE INDEX transactions_published_timestamp_idx ON audit.transactions_published(timestamp)") @@ -429,6 +441,24 @@ class AuditDbSpec extends AnyFunSuite { statement.setTimestamp(4, paymentReceived.parts.head.receivedAt.toSqlTimestamp) statement.executeUpdate() } + using(connection.prepareStatement("INSERT INTO audit.relayed VALUES (?, ?, ?, ?, ?, ?)")) { statement => + statement.setString(1, paymentRelayed.paymentHash.toHex) + statement.setLong(2, paymentRelayed.incoming.head.amount.toLong) + statement.setString(3, paymentRelayed.incoming.head.channelId.toHex) + statement.setString(4, "IN") + statement.setString(5, "channel") + statement.setTimestamp(6, paymentRelayed.incoming.head.receivedAt.toSqlTimestamp) + statement.executeUpdate() + } + using(connection.prepareStatement("INSERT INTO audit.relayed VALUES (?, ?, ?, ?, ?, ?)")) { statement => + statement.setString(1, paymentRelayed.paymentHash.toHex) + statement.setLong(2, paymentRelayed.outgoing.head.amount.toLong) + statement.setString(3, paymentRelayed.outgoing.head.channelId.toHex) + statement.setString(4, "OUT") + statement.setString(5, "channel") + statement.setTimestamp(6, paymentRelayed.outgoing.head.settledAt.toSqlTimestamp) + statement.executeUpdate() + } using(connection.prepareStatement("INSERT INTO audit.channel_events VALUES (?, ?, ?, ?, ?, ?, ?)")) { statement => statement.setString(1, channelId1.toHex) statement.setString(2, remoteNodeId1.toHex) @@ -455,11 +485,14 @@ class AuditDbSpec extends AnyFunSuite { val migratedDb = dbs.audit using(connection.createStatement()) { statement => assert(getVersion(statement, "audit").contains(PgAuditDb.CURRENT_VERSION)) } // We've created new tables: previous data from the existing tables isn't available anymore through the API. + assert(migratedDb.listSent(0 unixms, now + 1.minute).isEmpty) + assert(migratedDb.listReceived(0 unixms, now + 1.minute).isEmpty) + assert(migratedDb.listRelayed(0 unixms, now + 1.minute).isEmpty) assert(migratedDb.listChannelEvents(channelId1, 0 unixms, now + 1.minute).isEmpty) assert(migratedDb.listChannelEvents(remoteNodeId1, 0 unixms, now + 1.minute).isEmpty) assert(migratedDb.listPublished(channelId1).isEmpty) // But the data is still available in the database. - Seq("audit.sent_before_v14", "audit.received_before_v14", "audit.channel_events_before_v14", "audit.transactions_published_before_v14").foreach(table => { + Seq("audit.sent_before_v14", "audit.received_before_v14", "audit.relayed_before_v14", "audit.channel_events_before_v14", "audit.transactions_published_before_v14").foreach(table => { using(connection.prepareStatement(s"SELECT * FROM $table")) { statement => val result = statement.executeQuery() assert(result.next()) @@ -470,6 +503,8 @@ class AuditDbSpec extends AnyFunSuite { assert(migratedDb.listSent(0 unixms, now + 1.minute) == Seq(paymentSent)) migratedDb.add(paymentReceived) assert(migratedDb.listReceived(0 unixms, now + 1.minute) == Seq(paymentReceived)) + migratedDb.add(paymentRelayed) + assert(migratedDb.listRelayed(0 unixms, now + 1.minute) == Seq(paymentRelayed)) migratedDb.add(channelCreated) assert(migratedDb.listChannelEvents(channelId1, 0 unixms, now + 1.minute) == Seq(channelCreated)) migratedDb.add(txPublished) @@ -485,12 +520,19 @@ class AuditDbSpec extends AnyFunSuite { using(connection.createStatement()) { statement => statement.executeUpdate("CREATE TABLE sent (amount_msat INTEGER NOT NULL, fees_msat INTEGER NOT NULL, recipient_amount_msat INTEGER NOT NULL, payment_id TEXT NOT NULL, parent_payment_id TEXT NOT NULL, payment_hash BLOB NOT NULL, payment_preimage BLOB NOT NULL, recipient_node_id BLOB NOT NULL, to_channel_id BLOB NOT NULL, timestamp INTEGER NOT NULL)") statement.executeUpdate("CREATE TABLE received (amount_msat INTEGER NOT NULL, payment_hash BLOB NOT NULL, from_channel_id BLOB NOT NULL, timestamp INTEGER NOT NULL)") + statement.executeUpdate("CREATE TABLE relayed (payment_hash BLOB NOT NULL, amount_msat INTEGER NOT NULL, channel_id BLOB NOT NULL, direction TEXT NOT NULL, relay_type TEXT NOT NULL, timestamp INTEGER NOT NULL)") + statement.executeUpdate("CREATE TABLE relayed_trampoline (payment_hash BLOB NOT NULL, amount_msat INTEGER NOT NULL, next_node_id BLOB NOT NULL, timestamp INTEGER NOT NULL)") statement.executeUpdate("CREATE TABLE channel_events (channel_id BLOB NOT NULL, node_id BLOB NOT NULL, capacity_sat INTEGER NOT NULL, is_funder BOOLEAN NOT NULL, is_private BOOLEAN NOT NULL, event TEXT NOT NULL, timestamp INTEGER NOT NULL)") statement.executeUpdate("CREATE TABLE channel_updates (channel_id BLOB NOT NULL, node_id BLOB NOT NULL, fee_base_msat INTEGER NOT NULL, fee_proportional_millionths INTEGER NOT NULL, cltv_expiry_delta INTEGER NOT NULL, htlc_minimum_msat INTEGER NOT NULL, htlc_maximum_msat INTEGER NOT NULL, timestamp INTEGER NOT NULL)") statement.executeUpdate("CREATE TABLE transactions_published (tx_id BLOB NOT NULL PRIMARY KEY, channel_id BLOB NOT NULL, node_id BLOB NOT NULL, mining_fee_sat INTEGER NOT NULL, tx_type TEXT NOT NULL, timestamp INTEGER NOT NULL)") statement.executeUpdate("CREATE TABLE transactions_confirmed (tx_id BLOB NOT NULL PRIMARY KEY, channel_id BLOB NOT NULL, node_id BLOB NOT NULL, timestamp INTEGER NOT NULL)") statement.executeUpdate("CREATE INDEX sent_timestamp_idx ON sent(timestamp)") statement.executeUpdate("CREATE INDEX received_timestamp_idx ON received(timestamp)") + statement.executeUpdate("CREATE INDEX relayed_timestamp_idx ON relayed(timestamp)") + statement.executeUpdate("CREATE INDEX relayed_payment_hash_idx ON relayed(payment_hash)") + statement.executeUpdate("CREATE INDEX relayed_channel_id_idx ON relayed(channel_id)") + statement.executeUpdate("CREATE INDEX relayed_trampoline_timestamp_idx ON relayed_trampoline(timestamp)") + statement.executeUpdate("CREATE INDEX relayed_trampoline_payment_hash_idx ON relayed_trampoline(payment_hash)") statement.executeUpdate("CREATE INDEX channel_events_timestamp_idx ON channel_events(timestamp)") statement.executeUpdate("CREATE INDEX channel_updates_cid_idx ON channel_updates(channel_id)") statement.executeUpdate("CREATE INDEX channel_updates_nid_idx ON channel_updates(node_id)") @@ -521,6 +563,24 @@ class AuditDbSpec extends AnyFunSuite { statement.setLong(4, paymentReceived.parts.head.receivedAt.toLong) statement.executeUpdate() } + using(connection.prepareStatement("INSERT INTO relayed VALUES (?, ?, ?, ?, ?, ?)")) { statement => + statement.setBytes(1, paymentRelayed.paymentHash.toArray) + statement.setLong(2, paymentRelayed.incoming.head.amount.toLong) + statement.setBytes(3, paymentRelayed.incoming.head.channelId.toArray) + statement.setString(4, "IN") + statement.setString(5, "channel") + statement.setLong(6, paymentRelayed.incoming.head.receivedAt.toLong) + statement.executeUpdate() + } + using(connection.prepareStatement("INSERT INTO relayed VALUES (?, ?, ?, ?, ?, ?)")) { statement => + statement.setBytes(1, paymentRelayed.paymentHash.toArray) + statement.setLong(2, paymentRelayed.outgoing.head.amount.toLong) + statement.setBytes(3, paymentRelayed.outgoing.head.channelId.toArray) + statement.setString(4, "OUT") + statement.setString(5, "channel") + statement.setLong(6, paymentRelayed.outgoing.head.settledAt.toLong) + statement.executeUpdate() + } using(connection.prepareStatement("INSERT INTO channel_events VALUES (?, ?, ?, ?, ?, ?, ?)")) { statement => statement.setBytes(1, channelId1.toArray) statement.setBytes(2, remoteNodeId1.value.toArray) @@ -547,11 +607,14 @@ class AuditDbSpec extends AnyFunSuite { val migratedDb = dbs.audit using(connection.createStatement()) { statement => assert(getVersion(statement, "audit").contains(SqliteAuditDb.CURRENT_VERSION)) } // We've created new tables: previous data from the existing tables isn't available anymore through the API. + assert(migratedDb.listSent(0 unixms, now + 1.minute).isEmpty) + assert(migratedDb.listReceived(0 unixms, now + 1.minute).isEmpty) + assert(migratedDb.listRelayed(0 unixms, now + 1.minute).isEmpty) assert(migratedDb.listChannelEvents(channelId1, 0 unixms, now + 1.minute).isEmpty) assert(migratedDb.listChannelEvents(remoteNodeId1, 0 unixms, now + 1.minute).isEmpty) assert(migratedDb.listPublished(channelId1).isEmpty) // But the data is still available in the database. - Seq("sent_before_v14", "received_before_v14", "channel_events_before_v14", "transactions_published_before_v14").foreach(table => { + Seq("sent_before_v14", "received_before_v14", "relayed_before_v14", "channel_events_before_v14", "transactions_published_before_v14").foreach(table => { using(connection.prepareStatement(s"SELECT * FROM $table")) { statement => val result = statement.executeQuery() assert(result.next()) @@ -562,6 +625,8 @@ class AuditDbSpec extends AnyFunSuite { assert(migratedDb.listSent(0 unixms, now + 1.minute) == Seq(paymentSent)) migratedDb.add(paymentReceived) assert(migratedDb.listReceived(0 unixms, now + 1.minute) == Seq(paymentReceived)) + migratedDb.add(paymentRelayed) + assert(migratedDb.listRelayed(0 unixms, now + 1.minute) == Seq(paymentRelayed)) migratedDb.add(channelCreated) assert(migratedDb.listChannelEvents(channelId1, 0 unixms, now + 1.minute) == Seq(channelCreated)) migratedDb.add(txPublished) From a2673ecbc6d6386c27630f29fd9602235f874e40 Mon Sep 17 00:00:00 2001 From: t-bast Date: Fri, 30 Jan 2026 12:10:55 +0100 Subject: [PATCH 6/7] Improve relay statistics computation We change the relay statistics we compute, to more accurately reflect nodes that are good or bad peers and inform liquidity allocation decisions. --- docs/release-notes/eclair-vnext.md | 14 ++ .../main/scala/fr/acinq/eclair/Eclair.scala | 14 +- .../scala/fr/acinq/eclair/Paginated.scala | 7 + .../scala/fr/acinq/eclair/db/AuditDb.scala | 73 +++++++-- .../fr/acinq/eclair/db/pg/PgAuditDb.scala | 137 +++++++--------- .../eclair/db/sqlite/SqliteAuditDb.scala | 131 ++++++--------- .../fr/acinq/eclair/db/AuditDbSpec.scala | 152 +++++++----------- .../acinq/eclair/api/handlers/Channel.scala | 10 +- .../fr/acinq/eclair/api/handlers/Fees.scala | 8 +- .../fr/acinq/eclair/api/handlers/Node.scala | 14 +- 10 files changed, 270 insertions(+), 290 deletions(-) diff --git a/docs/release-notes/eclair-vnext.md b/docs/release-notes/eclair-vnext.md index 8abe2c1514..f22d7aa7c4 100644 --- a/docs/release-notes/eclair-vnext.md +++ b/docs/release-notes/eclair-vnext.md @@ -26,6 +26,19 @@ However, when using zero-conf, this event may be emitted before the `channel-con See #3237 for more details. +### Major changes to the AuditDb + +We make a collection of backwards-incompatible changes to all tables of the `audit` database. +The main change is that it is way more relevant to track statistics for peer nodes instead of individual channels, so we want to track the `node_id` associated with each event. +We also track more data about transactions we make and relayed payments, to more easily score peers based on the fees we're earning vs the fees we're paying (for on-chain transactions or for liquidity purchases). + +Note that we cannot migrate existing data (since it is lacking information that we now need), so we simply rename older tables with a `_before_v14` suffix and create new ones. +Past data will thus not be accessible through the APIs, but can be queried directly using SQL if necessary. +It should be acceptable, since liquidity decisions should be taken based on relatively recent data (a few weeks) in order to be economically relevant (nodes that generated fees months ago but aren't generating any new fees since then are probably not good peers). + +We expose a now `relaystats` API that ranks peers based on the routing fees they're generating. +See #3245 for more details. + ### Channel jamming accountability We update our channel jamming mitigation to match the latest draft of the [spec](https://github.com/lightning/bolts/pull/1280). @@ -47,6 +60,7 @@ eclair.relay.reserved-for-accountable = 0.0 - `findroute`, `findroutetonode` and `findroutebetweennodes` now include a `maxCltvExpiryDelta` parameter (#3234) - `channel-opened` was removed from the websocket in favor of `channel-confirmed` and `channel-ready` (#3237) +- `networkfees` and `channelstats` are removed in favor in `relaystats` (#3245) ### Miscellaneous improvements and bug fixes diff --git a/eclair-core/src/main/scala/fr/acinq/eclair/Eclair.scala b/eclair-core/src/main/scala/fr/acinq/eclair/Eclair.scala index 9b953754fc..78c06f758a 100644 --- a/eclair-core/src/main/scala/fr/acinq/eclair/Eclair.scala +++ b/eclair-core/src/main/scala/fr/acinq/eclair/Eclair.scala @@ -35,7 +35,7 @@ import fr.acinq.eclair.blockchain.bitcoind.rpc.BitcoinCoreClient.{AddressType, D import fr.acinq.eclair.blockchain.fee.{ConfirmationTarget, FeeratePerByte, FeeratePerKw} import fr.acinq.eclair.channel._ import fr.acinq.eclair.crypto.Sphinx -import fr.acinq.eclair.db.AuditDb.{NetworkFee, Stats} +import fr.acinq.eclair.db.AuditDb.RelayStats import fr.acinq.eclair.db.{IncomingPayment, OfferData, OutgoingPayment, OutgoingPaymentStatus} import fr.acinq.eclair.io.Peer.{GetPeerInfo, OpenChannelResponse, PeerInfo} import fr.acinq.eclair.io._ @@ -159,9 +159,9 @@ trait Eclair { def audit(from: TimestampSecond, to: TimestampSecond, paginated_opt: Option[Paginated])(implicit timeout: Timeout): Future[AuditResponse] - def networkFees(from: TimestampSecond, to: TimestampSecond)(implicit timeout: Timeout): Future[Seq[NetworkFee]] + def relayStats(remoteNodeId: PublicKey, from: TimestampSecond, to: TimestampSecond)(implicit timeout: Timeout): Future[RelayStats] - def channelStats(from: TimestampSecond, to: TimestampSecond, paginated_opt: Option[Paginated])(implicit timeout: Timeout): Future[Seq[Stats]] + def relayStats(from: TimestampSecond, to: TimestampSecond, paginated_opt: Option[Paginated])(implicit timeout: Timeout): Future[Seq[RelayStats]] def getInvoice(paymentHash: ByteVector32)(implicit timeout: Timeout): Future[Option[Invoice]] @@ -596,12 +596,12 @@ class EclairImpl(val appKit: Kit) extends Eclair with Logging with SpendFromChan )) } - override def networkFees(from: TimestampSecond, to: TimestampSecond)(implicit timeout: Timeout): Future[Seq[NetworkFee]] = { - Future(appKit.nodeParams.db.audit.listNetworkFees(from.toTimestampMilli, to.toTimestampMilli)) + override def relayStats(remoteNodeId: PublicKey, from: TimestampSecond, to: TimestampSecond)(implicit timeout: Timeout): Future[RelayStats] = { + Future(appKit.nodeParams.db.audit.relayStats(remoteNodeId, from.toTimestampMilli, to.toTimestampMilli)) } - override def channelStats(from: TimestampSecond, to: TimestampSecond, paginated_opt: Option[Paginated])(implicit timeout: Timeout): Future[Seq[Stats]] = { - Future(appKit.nodeParams.db.audit.stats(from.toTimestampMilli, to.toTimestampMilli, paginated_opt)) + override def relayStats(from: TimestampSecond, to: TimestampSecond, paginated_opt: Option[Paginated])(implicit timeout: Timeout): Future[Seq[RelayStats]] = { + Future(appKit.nodeParams.db.audit.relayStats(from.toTimestampMilli, to.toTimestampMilli, paginated_opt)) } override def allInvoices(from: TimestampSecond, to: TimestampSecond, paginated_opt: Option[Paginated])(implicit timeout: Timeout): Future[Seq[Invoice]] = Future { diff --git a/eclair-core/src/main/scala/fr/acinq/eclair/Paginated.scala b/eclair-core/src/main/scala/fr/acinq/eclair/Paginated.scala index a21f9b341c..20aef2692e 100644 --- a/eclair-core/src/main/scala/fr/acinq/eclair/Paginated.scala +++ b/eclair-core/src/main/scala/fr/acinq/eclair/Paginated.scala @@ -27,3 +27,10 @@ case class Paginated(count: Int, skip: Int) { require(count >= 0, "count must be a positive number") require(skip >= 0, "skip must be a positive number") } + +object Paginated { + def paginate[T](results: Seq[T], paginated_opt: Option[Paginated]): Seq[T] = paginated_opt match { + case Some(paginated) => results.slice(paginated.skip, paginated.skip + paginated.count) + case None => results + } +} \ No newline at end of file diff --git a/eclair-core/src/main/scala/fr/acinq/eclair/db/AuditDb.scala b/eclair-core/src/main/scala/fr/acinq/eclair/db/AuditDb.scala index 27292f684d..986029e9db 100644 --- a/eclair-core/src/main/scala/fr/acinq/eclair/db/AuditDb.scala +++ b/eclair-core/src/main/scala/fr/acinq/eclair/db/AuditDb.scala @@ -17,13 +17,13 @@ package fr.acinq.eclair.db import fr.acinq.bitcoin.scalacompat.Crypto.PublicKey -import fr.acinq.bitcoin.scalacompat.{ByteVector32, Satoshi, TxId} +import fr.acinq.bitcoin.scalacompat.{ByteVector32, Satoshi, SatoshiLong, TxId} import fr.acinq.eclair.blockchain.fee.FeeratePerKw import fr.acinq.eclair.channel._ -import fr.acinq.eclair.db.AuditDb.{NetworkFee, PublishedTransaction, Stats} +import fr.acinq.eclair.db.AuditDb.{ConfirmedTransaction, PublishedTransaction, RelayStats} import fr.acinq.eclair.db.DbEventHandler.ChannelEvent import fr.acinq.eclair.payment._ -import fr.acinq.eclair.{MilliSatoshi, Paginated, TimestampMilli} +import fr.acinq.eclair.{MilliSatoshi, MilliSatoshiLong, Paginated, TimestampMilli} trait AuditDb { @@ -47,6 +47,12 @@ trait AuditDb { def listPublished(remoteNodeId: PublicKey, from: TimestampMilli, to: TimestampMilli): Seq[PublishedTransaction] + def listConfirmed(channelId: ByteVector32): Seq[ConfirmedTransaction] + + def listConfirmed(remoteNodeId: PublicKey, from: TimestampMilli, to: TimestampMilli, paginated_opt: Option[Paginated]): Seq[ConfirmedTransaction] + + def listConfirmed(from: TimestampMilli, to: TimestampMilli, paginated_opt: Option[Paginated] = None): Seq[ConfirmedTransaction] + def listChannelEvents(channelId: ByteVector32, from: TimestampMilli, to: TimestampMilli): Seq[ChannelEvent] def listChannelEvents(remoteNodeId: PublicKey, from: TimestampMilli, to: TimestampMilli): Seq[ChannelEvent] @@ -57,9 +63,50 @@ trait AuditDb { def listRelayed(from: TimestampMilli, to: TimestampMilli, paginated_opt: Option[Paginated] = None): Seq[PaymentRelayed] - def listNetworkFees(from: TimestampMilli, to: TimestampMilli): Seq[NetworkFee] + def relayStats(remoteNodeId: PublicKey, from: TimestampMilli, to: TimestampMilli): RelayStats = { + val relayed = listRelayed(from, to).filter(e => e.incoming.exists(_.remoteNodeId == remoteNodeId) || e.outgoing.exists(_.remoteNodeId == remoteNodeId)) + val relayFeeEarned = relayed.map(e => { + // When using MPP and trampoline, payments can be relayed through multiple nodes at once. + // We split the fee according to the proportional amount relayed through the requested node. + e.relayFee * (e.outgoing.filter(_.remoteNodeId == remoteNodeId).map(_.amount).sum.toLong.toDouble / e.amountOut.toLong) + }).sum + val incomingPayments = relayed.flatMap(_.incoming).filter(_.remoteNodeId == remoteNodeId) + val outgoingPayments = relayed.flatMap(_.outgoing).filter(_.remoteNodeId == remoteNodeId) + val onChainFeePaid = listConfirmed(remoteNodeId, from, to, None).map(_.onChainFeePaid).sum + RelayStats(remoteNodeId, incomingPayments.size, incomingPayments.map(_.amount).sum, outgoingPayments.size, outgoingPayments.map(_.amount).sum, relayFeeEarned, onChainFeePaid, from, to) + } - def stats(from: TimestampMilli, to: TimestampMilli, paginated_opt: Option[Paginated] = None): Seq[Stats] + def relayStats(from: TimestampMilli, to: TimestampMilli, paginated_opt: Option[Paginated] = None): Seq[RelayStats] = { + // We fill payment data from all relayed payments. + val perNodeStats = listRelayed(from, to).foldLeft(Map.empty[PublicKey, RelayStats]) { + case (perNodeStats, e) => + val withIncoming = e.incoming.foldLeft(perNodeStats) { + case (perNodeStats, i) => + val current = perNodeStats.getOrElse(i.remoteNodeId, RelayStats(i.remoteNodeId, from, to)) + val updated = current.copy(incomingPaymentCount = current.incomingPaymentCount + 1, totalAmountIn = current.totalAmountIn + i.amount) + perNodeStats + (i.remoteNodeId -> updated) + } + val withOutgoing = e.outgoing.foldLeft(withIncoming) { + case (perNodeStats, o) => + val current = perNodeStats.getOrElse(o.remoteNodeId, RelayStats(o.remoteNodeId, from, to)) + val updated = current.copy(outgoingPaymentCount = current.outgoingPaymentCount + 1, totalAmountOut = current.totalAmountOut + o.amount) + perNodeStats + (o.remoteNodeId -> updated) + } + val withRelayFee = e.outgoing.map(_.remoteNodeId).toSet.foldLeft(withOutgoing) { + case (perNodeStats, remoteNodeId) => + val current = perNodeStats.getOrElse(remoteNodeId, RelayStats(remoteNodeId, from, to)) + val updated = current.copy(relayFeeEarned = current.relayFeeEarned + e.relayFee * (e.outgoing.filter(_.remoteNodeId == remoteNodeId).map(_.amount).sum.toLong.toDouble / e.amountOut.toLong)) + perNodeStats + (remoteNodeId -> updated) + } + withRelayFee + }.values.toSeq.sortBy(_.relayFeeEarned)(Ordering[MilliSatoshi].reverse) + // We add on-chain fees paid for each node. + val confirmedTransactions = listConfirmed(from, to) + Paginated.paginate(perNodeStats.map(stats => { + val onChainFeePaid = confirmedTransactions.filter(_.remoteNodeId == stats.remoteNodeId).map(_.onChainFeePaid).sum + stats.copy(onChainFeePaid = onChainFeePaid) + }), paginated_opt) + } } @@ -71,9 +118,13 @@ object AuditDb { def apply(tx: TransactionPublished): PublishedTransaction = PublishedTransaction(tx.tx.txid, tx.desc, tx.localMiningFee, tx.remoteMiningFee, tx.feerate, tx.timestamp) } - case class NetworkFee(remoteNodeId: PublicKey, channelId: ByteVector32, txId: ByteVector32, fee: Satoshi, txType: String, timestamp: TimestampMilli) + case class ConfirmedTransaction(remoteNodeId: PublicKey, channelId: ByteVector32, txId: TxId, onChainFeePaid: Satoshi, txType: String, timestamp: TimestampMilli) - case class Stats(channelId: ByteVector32, direction: String, avgPaymentAmount: Satoshi, paymentCount: Int, relayFee: Satoshi, networkFee: Satoshi) + case class RelayStats(remoteNodeId: PublicKey, incomingPaymentCount: Int, totalAmountIn: MilliSatoshi, outgoingPaymentCount: Int, totalAmountOut: MilliSatoshi, relayFeeEarned: MilliSatoshi, onChainFeePaid: Satoshi, from: TimestampMilli, to: TimestampMilli) + + object RelayStats { + def apply(remoteNodeId: PublicKey, from: TimestampMilli, to: TimestampMilli): RelayStats = RelayStats(remoteNodeId, 0, 0 msat, 0, 0 msat, 0 msat, 0 sat, from, to) + } case class RelayedPart(channelId: ByteVector32, remoteNodeId: PublicKey, amount: MilliSatoshi, direction: String, relayType: String, timestamp: TimestampMilli) @@ -96,7 +147,7 @@ object AuditDb { } def listRelayedInternal(relayedByHash: Map[ByteVector32, Seq[RelayedPart]], trampolineDetails: Map[ByteVector32, (PublicKey, MilliSatoshi)], paginated_opt: Option[Paginated]): Seq[PaymentRelayed] = { - val result = relayedByHash.flatMap { + Paginated.paginate(relayedByHash.flatMap { case (paymentHash, parts) => // We may have been routing multiple payments for the same payment_hash with different relay types. // That's fine, we simply separate each part into the correct event. @@ -118,11 +169,7 @@ object AuditDb { None } channelRelayed_opt.toSeq ++ trampolineRelayed_opt.toSeq ++ onTheFlyRelayed_opt.toSeq - }.toSeq.sortBy(_.settledAt) - paginated_opt match { - case Some(paginated) => result.slice(paginated.skip, paginated.skip + paginated.count) - case None => result - } + }.toSeq.sortBy(_.settledAt), paginated_opt) } } \ No newline at end of file diff --git a/eclair-core/src/main/scala/fr/acinq/eclair/db/pg/PgAuditDb.scala b/eclair-core/src/main/scala/fr/acinq/eclair/db/pg/PgAuditDb.scala index cb174182f0..9dda0976e9 100644 --- a/eclair-core/src/main/scala/fr/acinq/eclair/db/pg/PgAuditDb.scala +++ b/eclair-core/src/main/scala/fr/acinq/eclair/db/pg/PgAuditDb.scala @@ -26,7 +26,7 @@ import fr.acinq.eclair.db.Monitoring.Metrics.withMetrics import fr.acinq.eclair.db.Monitoring.Tags.DbBackends import fr.acinq.eclair.db._ import fr.acinq.eclair.payment._ -import fr.acinq.eclair.{MilliSatoshi, MilliSatoshiLong, Paginated, TimestampMilli} +import fr.acinq.eclair.{MilliSatoshi, Paginated, TimestampMilli} import grizzled.slf4j.Logging import java.sql.{Statement, Timestamp} @@ -443,6 +443,60 @@ class PgAuditDb(implicit ds: DataSource) extends AuditDb with Logging { } } + override def listConfirmed(channelId: ByteVector32): Seq[ConfirmedTransaction] = withMetrics("audit/list-confirmed-by-channel-id", DbBackends.Postgres) { + inTransaction { pg => + using(pg.prepareStatement("SELECT * FROM audit.transactions_confirmed INNER JOIN audit.transactions_published ON audit.transactions_published.tx_id = audit.transactions_confirmed.tx_id WHERE audit.transactions_confirmed.channel_id = ? ORDER BY audit.transactions_confirmed.timestamp")) { statement => + statement.setString(1, channelId.toHex) + statement.executeQuery().map { rs => + ConfirmedTransaction( + remoteNodeId = PublicKey(rs.getByteVectorFromHex("node_id")), + channelId = rs.getByteVector32FromHex("channel_id"), + txId = TxId(rs.getByteVector32FromHex("tx_id")), + onChainFeePaid = Satoshi(rs.getLong("local_mining_fee_sat")), + txType = rs.getString("tx_type"), + timestamp = TimestampMilli.fromSqlTimestamp(rs.getTimestamp("timestamp"))) + }.toSeq + } + } + } + + override def listConfirmed(remoteNodeId: PublicKey, from: TimestampMilli, to: TimestampMilli, paginated_opt: Option[Paginated]): Seq[ConfirmedTransaction] = withMetrics("audit/list-confirmed-by-node-id", DbBackends.Postgres) { + inTransaction { pg => + using(pg.prepareStatement("SELECT * FROM audit.transactions_confirmed INNER JOIN audit.transactions_published ON audit.transactions_published.tx_id = audit.transactions_confirmed.tx_id WHERE audit.transactions_confirmed.node_id = ? AND audit.transactions_confirmed.timestamp BETWEEN ? and ? ORDER BY audit.transactions_confirmed.timestamp")) { statement => + statement.setString(1, remoteNodeId.toHex) + statement.setTimestamp(2, from.toSqlTimestamp) + statement.setTimestamp(3, to.toSqlTimestamp) + Paginated.paginate(statement.executeQuery().map { rs => + ConfirmedTransaction( + remoteNodeId = PublicKey(rs.getByteVectorFromHex("node_id")), + channelId = rs.getByteVector32FromHex("channel_id"), + txId = TxId(rs.getByteVector32FromHex("tx_id")), + onChainFeePaid = Satoshi(rs.getLong("local_mining_fee_sat")), + txType = rs.getString("tx_type"), + timestamp = TimestampMilli.fromSqlTimestamp(rs.getTimestamp("timestamp"))) + }.toSeq, paginated_opt) + } + } + } + + override def listConfirmed(from: TimestampMilli, to: TimestampMilli, paginated_opt: Option[Paginated]): Seq[ConfirmedTransaction] = withMetrics("audit/list-confirmed", DbBackends.Postgres) { + inTransaction { pg => + using(pg.prepareStatement("SELECT * FROM audit.transactions_confirmed INNER JOIN audit.transactions_published ON audit.transactions_published.tx_id = audit.transactions_confirmed.tx_id WHERE audit.transactions_confirmed.timestamp BETWEEN ? and ? ORDER BY audit.transactions_confirmed.timestamp")) { statement => + statement.setTimestamp(1, from.toSqlTimestamp) + statement.setTimestamp(2, to.toSqlTimestamp) + Paginated.paginate(statement.executeQuery().map { rs => + ConfirmedTransaction( + remoteNodeId = PublicKey(rs.getByteVectorFromHex("node_id")), + channelId = rs.getByteVector32FromHex("channel_id"), + txId = TxId(rs.getByteVector32FromHex("tx_id")), + onChainFeePaid = Satoshi(rs.getLong("local_mining_fee_sat")), + txType = rs.getString("tx_type"), + timestamp = TimestampMilli.fromSqlTimestamp(rs.getTimestamp("timestamp"))) + }.toSeq, paginated_opt) + } + } + } + override def listChannelEvents(channelId: ByteVector32, from: TimestampMilli, to: TimestampMilli): Seq[ChannelEvent] = withMetrics("audit/list-channel-events-by-channel-id", DbBackends.Postgres) { inTransaction { pg => using(pg.prepareStatement("SELECT * FROM audit.channel_events WHERE channel_id = ? AND timestamp BETWEEN ? AND ?")) { statement => @@ -494,7 +548,7 @@ class PgAuditDb(implicit ds: DataSource) extends AuditDb with Logging { using(pg.prepareStatement("SELECT * FROM audit.sent WHERE settled_at BETWEEN ? AND ?")) { statement => statement.setTimestamp(1, from.toSqlTimestamp) statement.setTimestamp(2, to.toSqlTimestamp) - val result = statement.executeQuery() + Paginated.paginate(statement.executeQuery() .foldLeft(Map.empty[UUID, PaymentSent]) { (sentByParentId, rs) => val parentId = UUID.fromString(rs.getString("parent_payment_id")) val part = PaymentSent.PaymentPart( @@ -520,11 +574,7 @@ class PgAuditDb(implicit ds: DataSource) extends AuditDb with Logging { part.startedAt) } sentByParentId + (parentId -> sent) - }.values.toSeq.sortBy(_.settledAt) - paginated_opt match { - case Some(paginated) => result.slice(paginated.skip, paginated.skip + paginated.count) - case None => result - } + }.values.toSeq.sortBy(_.settledAt), paginated_opt) } } @@ -533,7 +583,7 @@ class PgAuditDb(implicit ds: DataSource) extends AuditDb with Logging { using(pg.prepareStatement("SELECT * FROM audit.received WHERE received_at BETWEEN ? AND ?")) { statement => statement.setTimestamp(1, from.toSqlTimestamp) statement.setTimestamp(2, to.toSqlTimestamp) - val result = statement.executeQuery() + Paginated.paginate(statement.executeQuery() .foldLeft(Map.empty[ByteVector32, PaymentReceived]) { (receivedByHash, rs) => val paymentHash = rs.getByteVector32FromHex("payment_hash") val part = PaymentEvent.IncomingPayment( @@ -546,11 +596,7 @@ class PgAuditDb(implicit ds: DataSource) extends AuditDb with Logging { case None => PaymentReceived(paymentHash, Seq(part)) } receivedByHash + (paymentHash -> received) - }.values.toSeq.sortBy(_.settledAt) - paginated_opt match { - case Some(paginated) => result.slice(paginated.skip, paginated.skip + paginated.count) - case None => result - } + }.values.toSeq.sortBy(_.settledAt), paginated_opt) } } @@ -587,69 +633,4 @@ class PgAuditDb(implicit ds: DataSource) extends AuditDb with Logging { }.flatten.toMap listRelayedInternal(relayedByHash, trampolineDetails, paginated_opt) } - - override def listNetworkFees(from: TimestampMilli, to: TimestampMilli): Seq[NetworkFee] = - inTransaction { pg => - using(pg.prepareStatement("SELECT * FROM audit.transactions_confirmed INNER JOIN audit.transactions_published ON audit.transactions_published.tx_id = audit.transactions_confirmed.tx_id WHERE audit.transactions_confirmed.timestamp BETWEEN ? and ? ORDER BY audit.transactions_confirmed.timestamp")) { statement => - statement.setTimestamp(1, from.toSqlTimestamp) - statement.setTimestamp(2, to.toSqlTimestamp) - statement.executeQuery().map { rs => - NetworkFee( - remoteNodeId = PublicKey(rs.getByteVectorFromHex("node_id")), - channelId = rs.getByteVector32FromHex("channel_id"), - txId = rs.getByteVector32FromHex("tx_id"), - fee = Satoshi(rs.getLong("local_mining_fee_sat")), - txType = rs.getString("tx_type"), - timestamp = TimestampMilli.fromSqlTimestamp(rs.getTimestamp("timestamp"))) - }.toSeq - } - } - - override def stats(from: TimestampMilli, to: TimestampMilli, paginated_opt: Option[Paginated]): Seq[Stats] = { - case class Relayed(amount: MilliSatoshi, fee: MilliSatoshi, direction: String) - - def aggregateRelayStats(previous: Map[ByteVector32, Seq[Relayed]], incoming: Seq[PaymentEvent.IncomingPayment], outgoing: Seq[PaymentEvent.OutgoingPayment]): Map[ByteVector32, Seq[Relayed]] = { - // We ensure trampoline payments are counted only once per channel and per direction (if multiple HTLCs were sent - // from/to the same channel, we group them). - val amountIn = incoming.map(_.amount).sum - val amountOut = outgoing.map(_.amount).sum - val in = incoming.groupBy(_.channelId).map { case (channelId, parts) => (channelId, Relayed(parts.map(_.amount).sum, 0 msat, "IN")) }.toSeq - val out = outgoing.groupBy(_.channelId).map { case (channelId, parts) => - val fee = (amountIn - amountOut) * parts.length / outgoing.length // we split the fee among outgoing channels - (channelId, Relayed(parts.map(_.amount).sum, fee, "OUT")) - }.toSeq - (in ++ out).groupBy(_._1).map { case (channelId, payments) => (channelId, payments.map(_._2) ++ previous.getOrElse(channelId, Nil)) } - } - - val relayed = listRelayed(from, to).foldLeft(Map.empty[ByteVector32, Seq[Relayed]]) { (previous, e) => - // NB: we must avoid counting the fee twice: we associate it to the outgoing channels rather than the incoming ones. - val current = aggregateRelayStats(previous, e.incoming, e.outgoing) - previous ++ current - } - - val networkFees = listNetworkFees(from, to).foldLeft(Map.empty[ByteVector32, Satoshi]) { (feeByChannelId, f) => - feeByChannelId + (f.channelId -> (feeByChannelId.getOrElse(f.channelId, 0 sat) + f.fee)) - } - - // Channels opened by our peers won't have any network fees paid by us, but we still want to compute stats for them. - val allChannels = networkFees.keySet ++ relayed.keySet - val result = allChannels.toSeq.flatMap(channelId => { - val networkFee = networkFees.getOrElse(channelId, 0 sat) - val (in, out) = relayed.getOrElse(channelId, Nil).partition(_.direction == "IN") - ((in, "IN") :: (out, "OUT") :: Nil).map { case (r, direction) => - val paymentCount = r.length - if (paymentCount == 0) { - Stats(channelId, direction, 0 sat, 0, 0 sat, networkFee) - } else { - val avgPaymentAmount = r.map(_.amount).sum / paymentCount - val relayFee = r.map(_.fee).sum - Stats(channelId, direction, avgPaymentAmount.truncateToSatoshi, paymentCount, relayFee.truncateToSatoshi, networkFee) - } - } - }).sortBy(s => s.channelId.toHex + s.direction) - paginated_opt match { - case Some(paginated) => result.slice(paginated.skip, paginated.skip + paginated.count) - case None => result - } - } } diff --git a/eclair-core/src/main/scala/fr/acinq/eclair/db/sqlite/SqliteAuditDb.scala b/eclair-core/src/main/scala/fr/acinq/eclair/db/sqlite/SqliteAuditDb.scala index 5fb26e4cee..f27dd40d19 100644 --- a/eclair-core/src/main/scala/fr/acinq/eclair/db/sqlite/SqliteAuditDb.scala +++ b/eclair-core/src/main/scala/fr/acinq/eclair/db/sqlite/SqliteAuditDb.scala @@ -26,7 +26,7 @@ import fr.acinq.eclair.db.Monitoring.Metrics.withMetrics import fr.acinq.eclair.db.Monitoring.Tags.DbBackends import fr.acinq.eclair.db._ import fr.acinq.eclair.payment._ -import fr.acinq.eclair.{MilliSatoshi, MilliSatoshiLong, Paginated, TimestampMilli} +import fr.acinq.eclair.{MilliSatoshi, Paginated, TimestampMilli} import grizzled.slf4j.Logging import java.sql.{Connection, Statement} @@ -425,6 +425,54 @@ class SqliteAuditDb(val sqlite: Connection) extends AuditDb with Logging { } } + override def listConfirmed(channelId: ByteVector32): Seq[ConfirmedTransaction] = withMetrics("audit/list-confirmed-by-channel-id", DbBackends.Sqlite) { + using(sqlite.prepareStatement("SELECT * FROM transactions_confirmed INNER JOIN transactions_published ON transactions_published.tx_id = transactions_confirmed.tx_id WHERE transactions_confirmed.channel_id = ? ORDER BY transactions_confirmed.timestamp")) { statement => + statement.setString(1, channelId.toHex) + statement.executeQuery().map { rs => + ConfirmedTransaction( + remoteNodeId = PublicKey(rs.getByteVectorFromHex("node_id")), + channelId = rs.getByteVector32FromHex("channel_id"), + txId = TxId(rs.getByteVector32FromHex("tx_id")), + onChainFeePaid = Satoshi(rs.getLong("local_mining_fee_sat")), + txType = rs.getString("tx_type"), + timestamp = TimestampMilli(rs.getLong("timestamp"))) + }.toSeq + } + } + + override def listConfirmed(remoteNodeId: PublicKey, from: TimestampMilli, to: TimestampMilli, paginated_opt: Option[Paginated]): Seq[ConfirmedTransaction] = withMetrics("audit/list-confirmed-by-node-id", DbBackends.Sqlite) { + using(sqlite.prepareStatement("SELECT * FROM transactions_confirmed INNER JOIN transactions_published ON transactions_published.tx_id = transactions_confirmed.tx_id WHERE transactions_confirmed.node_id = ? AND transactions_confirmed.timestamp >= ? AND transactions_confirmed.timestamp < ? ORDER BY transactions_confirmed.timestamp")) { statement => + statement.setString(1, remoteNodeId.toHex) + statement.setLong(2, from.toLong) + statement.setLong(3, to.toLong) + Paginated.paginate(statement.executeQuery().map { rs => + ConfirmedTransaction( + remoteNodeId = PublicKey(rs.getByteVectorFromHex("node_id")), + channelId = rs.getByteVector32FromHex("channel_id"), + txId = TxId(rs.getByteVector32FromHex("tx_id")), + onChainFeePaid = Satoshi(rs.getLong("local_mining_fee_sat")), + txType = rs.getString("tx_type"), + timestamp = TimestampMilli(rs.getLong("timestamp"))) + }.toSeq, paginated_opt) + } + } + + override def listConfirmed(from: TimestampMilli, to: TimestampMilli, paginated_opt: Option[Paginated]): Seq[ConfirmedTransaction] = withMetrics("audit/list-confirmed", DbBackends.Sqlite) { + using(sqlite.prepareStatement("SELECT * FROM transactions_confirmed INNER JOIN transactions_published ON transactions_published.tx_id = transactions_confirmed.tx_id WHERE transactions_confirmed.timestamp >= ? AND transactions_confirmed.timestamp < ? ORDER BY transactions_confirmed.timestamp")) { statement => + statement.setLong(1, from.toLong) + statement.setLong(2, to.toLong) + Paginated.paginate(statement.executeQuery().map { rs => + ConfirmedTransaction( + remoteNodeId = PublicKey(rs.getByteVectorFromHex("node_id")), + channelId = rs.getByteVector32FromHex("channel_id"), + txId = TxId(rs.getByteVector32FromHex("tx_id")), + onChainFeePaid = Satoshi(rs.getLong("local_mining_fee_sat")), + txType = rs.getString("tx_type"), + timestamp = TimestampMilli(rs.getLong("timestamp"))) + }.toSeq, paginated_opt) + } + } + override def listChannelEvents(channelId: ByteVector32, from: TimestampMilli, to: TimestampMilli): Seq[ChannelEvent] = withMetrics("audit/list-channel-events-by-channel-id", DbBackends.Sqlite) { using(sqlite.prepareStatement("SELECT * FROM channel_events WHERE channel_id = ? AND timestamp >= ? AND timestamp < ?")) { statement => statement.setString(1, channelId.toHex) @@ -471,7 +519,7 @@ class SqliteAuditDb(val sqlite: Connection) extends AuditDb with Logging { using(sqlite.prepareStatement("SELECT * FROM sent WHERE settled_at >= ? AND settled_at < ?")) { statement => statement.setLong(1, from.toLong) statement.setLong(2, to.toLong) - val result = statement.executeQuery() + Paginated.paginate(statement.executeQuery() .foldLeft(Map.empty[UUID, PaymentSent]) { (sentByParentId, rs) => val parentId = UUID.fromString(rs.getString("parent_payment_id")) val part = PaymentSent.PaymentPart( @@ -497,18 +545,14 @@ class SqliteAuditDb(val sqlite: Connection) extends AuditDb with Logging { part.startedAt) } sentByParentId + (parentId -> sent) - }.values.toSeq.sortBy(_.settledAt) - paginated_opt match { - case Some(paginated) => result.slice(paginated.skip, paginated.skip + paginated.count) - case None => result - } + }.values.toSeq.sortBy(_.settledAt), paginated_opt) } override def listReceived(from: TimestampMilli, to: TimestampMilli, paginated_opt: Option[Paginated] = None): Seq[PaymentReceived] = using(sqlite.prepareStatement("SELECT * FROM received WHERE received_at >= ? AND received_at < ?")) { statement => statement.setLong(1, from.toLong) statement.setLong(2, to.toLong) - val result = statement.executeQuery() + Paginated.paginate(statement.executeQuery() .foldLeft(Map.empty[ByteVector32, PaymentReceived]) { (receivedByHash, rs) => val paymentHash = rs.getByteVector32FromHex("payment_hash") val part = PaymentEvent.IncomingPayment( @@ -521,11 +565,7 @@ class SqliteAuditDb(val sqlite: Connection) extends AuditDb with Logging { case None => PaymentReceived(paymentHash, Seq(part)) } receivedByHash + (paymentHash -> received) - }.values.toSeq.sortBy(_.settledAt) - paginated_opt match { - case Some(paginated) => result.slice(paginated.skip, paginated.skip + paginated.count) - case None => result - } + }.values.toSeq.sortBy(_.settledAt), paginated_opt) } override def listRelayed(from: TimestampMilli, to: TimestampMilli, paginated_opt: Option[Paginated] = None): Seq[PaymentRelayed] = { @@ -561,69 +601,4 @@ class SqliteAuditDb(val sqlite: Connection) extends AuditDb with Logging { }.flatten.toMap listRelayedInternal(relayedByHash, trampolineDetails, paginated_opt) } - - override def listNetworkFees(from: TimestampMilli, to: TimestampMilli): Seq[NetworkFee] = - using(sqlite.prepareStatement("SELECT * FROM transactions_confirmed INNER JOIN transactions_published ON transactions_published.tx_id = transactions_confirmed.tx_id WHERE transactions_confirmed.timestamp >= ? AND transactions_confirmed.timestamp < ? ORDER BY transactions_confirmed.timestamp")) { statement => - statement.setLong(1, from.toLong) - statement.setLong(2, to.toLong) - statement.executeQuery() - .map { rs => - NetworkFee( - remoteNodeId = PublicKey(rs.getByteVectorFromHex("node_id")), - channelId = rs.getByteVector32FromHex("channel_id"), - txId = rs.getByteVector32FromHex("tx_id"), - fee = Satoshi(rs.getLong("local_mining_fee_sat")), - txType = rs.getString("tx_type"), - timestamp = TimestampMilli(rs.getLong("timestamp"))) - }.toSeq - } - - override def stats(from: TimestampMilli, to: TimestampMilli, paginated_opt: Option[Paginated]): Seq[Stats] = { - case class Relayed(amount: MilliSatoshi, fee: MilliSatoshi, direction: String) - - def aggregateRelayStats(previous: Map[ByteVector32, Seq[Relayed]], incoming: Seq[PaymentEvent.IncomingPayment], outgoing: Seq[PaymentEvent.OutgoingPayment]): Map[ByteVector32, Seq[Relayed]] = { - // We ensure trampoline payments are counted only once per channel and per direction (if multiple HTLCs were sent - // from/to the same channel, we group them). - val amountIn = incoming.map(_.amount).sum - val amountOut = outgoing.map(_.amount).sum - val in = incoming.groupBy(_.channelId).map { case (channelId, parts) => (channelId, Relayed(parts.map(_.amount).sum, 0 msat, "IN")) }.toSeq - val out = outgoing.groupBy(_.channelId).map { case (channelId, parts) => - val fee = (amountIn - amountOut) * parts.length / outgoing.length // we split the fee among outgoing channels - (channelId, Relayed(parts.map(_.amount).sum, fee, "OUT")) - }.toSeq - (in ++ out).groupBy(_._1).map { case (channelId, payments) => (channelId, payments.map(_._2) ++ previous.getOrElse(channelId, Nil)) } - } - - val relayed = listRelayed(from, to).foldLeft(Map.empty[ByteVector32, Seq[Relayed]]) { (previous, e) => - // NB: we must avoid counting the fee twice: we associate it to the outgoing channels rather than the incoming ones. - val current = aggregateRelayStats(previous, e.incoming, e.outgoing) - previous ++ current - } - - val networkFees = listNetworkFees(from, to).foldLeft(Map.empty[ByteVector32, Satoshi]) { (feeByChannelId, f) => - feeByChannelId + (f.channelId -> (feeByChannelId.getOrElse(f.channelId, 0 sat) + f.fee)) - } - - // Channels opened by our peers won't have any network fees paid by us, but we still want to compute stats for them. - val allChannels = networkFees.keySet ++ relayed.keySet - val result = allChannels.toSeq.flatMap(channelId => { - val networkFee = networkFees.getOrElse(channelId, 0 sat) - val (in, out) = relayed.getOrElse(channelId, Nil).partition(_.direction == "IN") - ((in, "IN") :: (out, "OUT") :: Nil).map { case (r, direction) => - val paymentCount = r.length - if (paymentCount == 0) { - Stats(channelId, direction, 0 sat, 0, 0 sat, networkFee) - } else { - val avgPaymentAmount = r.map(_.amount).sum / paymentCount - val relayFee = r.map(_.fee).sum - Stats(channelId, direction, avgPaymentAmount.truncateToSatoshi, paymentCount, relayFee.truncateToSatoshi, networkFee) - } - } - }).sortBy(s => s.channelId.toHex + s.direction) - paginated_opt match { - case Some(paginated) => result.slice(paginated.skip, paginated.skip + paginated.count) - case None => result - } - - } } diff --git a/eclair-core/src/test/scala/fr/acinq/eclair/db/AuditDbSpec.scala b/eclair-core/src/test/scala/fr/acinq/eclair/db/AuditDbSpec.scala index e760be662e..85465e798b 100644 --- a/eclair-core/src/test/scala/fr/acinq/eclair/db/AuditDbSpec.scala +++ b/eclair-core/src/test/scala/fr/acinq/eclair/db/AuditDbSpec.scala @@ -17,12 +17,12 @@ package fr.acinq.eclair.db import fr.acinq.bitcoin.scalacompat.Crypto.PublicKey -import fr.acinq.bitcoin.scalacompat.{Block, ByteVector32, Crypto, OutPoint, SatoshiLong, Script, Transaction, TxIn, TxOut} +import fr.acinq.bitcoin.scalacompat.{Block, Crypto, OutPoint, SatoshiLong, Script, Transaction, TxIn, TxOut} import fr.acinq.eclair.TestDatabases.{TestPgDatabases, TestSqliteDatabases, migrationCheck} import fr.acinq.eclair.TestUtils.randomTxId import fr.acinq.eclair._ import fr.acinq.eclair.channel._ -import fr.acinq.eclair.db.AuditDb.{PublishedTransaction, Stats} +import fr.acinq.eclair.db.AuditDb.{ConfirmedTransaction, PublishedTransaction, RelayStats} import fr.acinq.eclair.db.DbEventHandler.ChannelEvent import fr.acinq.eclair.db.jdbc.JdbcUtils.using import fr.acinq.eclair.db.pg.PgAuditDb @@ -31,7 +31,6 @@ import fr.acinq.eclair.db.sqlite.SqliteAuditDb import fr.acinq.eclair.payment.Bolt11Invoice.ExtraHop import fr.acinq.eclair.payment._ import fr.acinq.eclair.router.Announcements -import org.scalatest.Tag import org.scalatest.funsuite.AnyFunSuite import scodec.bits.HexStringSyntax @@ -39,7 +38,6 @@ import java.sql.Timestamp import java.time.Instant import java.util.UUID import scala.concurrent.duration._ -import scala.util.Random class AuditDbSpec extends AnyFunSuite { @@ -71,12 +69,12 @@ class AuditDbSpec extends AnyFunSuite { db.add(e1) db.add(e2) - assert(db.listChannelEvents(randomBytes32(), from = TimestampMilli(0L), to = now + 1.minute).isEmpty) - assert(db.listChannelEvents(channelId1, from = TimestampMilli(0L), to = now + 1.minute) == Seq(e1)) - assert(db.listChannelEvents(channelId1, from = TimestampMilli(0L), to = now - 10.minute).isEmpty) - assert(db.listChannelEvents(randomKey().publicKey, from = TimestampMilli(0L), to = now + 1.minute).isEmpty) - assert(db.listChannelEvents(remoteNodeId, from = TimestampMilli(0L), to = now + 1.minute) == Seq(e1, e2)) - assert(db.listChannelEvents(remoteNodeId, from = TimestampMilli(0L), to = now - 30.seconds) == Seq(e1)) + assert(db.listChannelEvents(randomBytes32(), from = 0 unixms, to = now + 1.minute).isEmpty) + assert(db.listChannelEvents(channelId1, from = 0 unixms, to = now + 1.minute) == Seq(e1)) + assert(db.listChannelEvents(channelId1, from = 0 unixms, to = now - 10.minute).isEmpty) + assert(db.listChannelEvents(randomKey().publicKey, from = 0 unixms, to = now + 1.minute).isEmpty) + assert(db.listChannelEvents(remoteNodeId, from = 0 unixms, to = now + 1.minute) == Seq(e1, e2)) + assert(db.listChannelEvents(remoteNodeId, from = 0 unixms, to = now - 30.seconds) == Seq(e1)) } } @@ -91,7 +89,7 @@ class AuditDbSpec extends AnyFunSuite { val p1b = TransactionPublished(channelId1, remoteNodeId, Transaction(2, Nil, Seq(TxOut(100_000 sat, Script.pay2wpkh(remoteNodeId))), 0), 75 sat, 25 sat, "splice", None, now - 5.seconds) val p2 = TransactionPublished(channelId2, remoteNodeId, Transaction(2, Nil, Seq(TxOut(200_000 sat, Script.pay2wpkh(remoteNodeId))), 0), 0 sat, 0 sat, "local-close", None, now - 1.seconds) val c1 = TransactionConfirmed(channelId1, remoteNodeId, p1a.tx, now) - val c2 = TransactionConfirmed(channelId2, remoteNodeId, Transaction(2, Nil, Seq(TxOut(150_000 sat, hex"1234")), 0), now) + val c2 = TransactionConfirmed(channelId2, remoteNodeId, Transaction(2, Nil, Seq(TxOut(150_000 sat, hex"1234")), 0), now + 100.millis) db.add(p1a) db.add(p1b) @@ -100,11 +98,16 @@ class AuditDbSpec extends AnyFunSuite { db.add(c2) assert(db.listPublished(randomBytes32()).isEmpty) - assert(db.listPublished(randomKey().publicKey, from = TimestampMilli(0L), to = now + 1.seconds).isEmpty) + assert(db.listPublished(randomKey().publicKey, from = 0 unixms, to = now + 1.seconds).isEmpty) assert(db.listPublished(channelId1) == Seq(PublishedTransaction(p1a), PublishedTransaction(p1b))) assert(db.listPublished(channelId2) == Seq(PublishedTransaction(p2))) assert(db.listPublished(remoteNodeId, from = now - 1.minute, to = now) == Seq(PublishedTransaction(p1a), PublishedTransaction(p1b), PublishedTransaction(p2))) assert(db.listPublished(remoteNodeId, from = now - 6.seconds, to = now) == Seq(PublishedTransaction(p1b), PublishedTransaction(p2))) + assert(db.listConfirmed(randomBytes32()).isEmpty) + assert(db.listConfirmed(randomKey().publicKey, from = 0 unixms, to = now + 1.seconds, None).isEmpty) + assert(db.listConfirmed(channelId1) == Seq(ConfirmedTransaction(remoteNodeId, channelId1, p1a.tx.txid, 50 sat, "funding", now))) + assert(db.listConfirmed(channelId2).isEmpty) // this isn't a transaction we published ourselves, so we're not paying any fees for it + assert(db.listConfirmed(remoteNodeId, from = 0 unixms, to = now + 1.seconds, None) == Seq(ConfirmedTransaction(remoteNodeId, channelId1, p1a.tx.txid, 50 sat, "funding", now))) } } @@ -181,101 +184,56 @@ class AuditDbSpec extends AnyFunSuite { forAllDbs { dbs => val db = dbs.audit + val n1 = randomKey().publicKey val n2 = randomKey().publicKey val n3 = randomKey().publicKey val n4 = randomKey().publicKey - val c1 = ByteVector32.One - val c2 = c1.copy(bytes = 0x02b +: c1.tail) - val c3 = c1.copy(bytes = 0x03b +: c1.tail) - val c4 = c1.copy(bytes = 0x04b +: c1.tail) - val c5 = c1.copy(bytes = 0x05b +: c1.tail) - val c6 = c1.copy(bytes = 0x06b +: c1.tail) - - db.add(ChannelPaymentRelayed(randomBytes32(), Seq(PaymentEvent.IncomingPayment(c6, randomKey().publicKey, 46000 msat, 1000 unixms)), Seq(PaymentEvent.OutgoingPayment(c1, randomKey().publicKey, 44000 msat, 1001 unixms)))) - db.add(ChannelPaymentRelayed(randomBytes32(), Seq(PaymentEvent.IncomingPayment(c6, randomKey().publicKey, 41000 msat, 1002 unixms)), Seq(PaymentEvent.OutgoingPayment(c1, randomKey().publicKey, 40000 msat, 1003 unixms)))) - db.add(ChannelPaymentRelayed(randomBytes32(), Seq(PaymentEvent.IncomingPayment(c5, randomKey().publicKey, 43000 msat, 1004 unixms)), Seq(PaymentEvent.OutgoingPayment(c1, randomKey().publicKey, 42000 msat, 1005 unixms)))) - db.add(ChannelPaymentRelayed(randomBytes32(), Seq(PaymentEvent.IncomingPayment(c5, randomKey().publicKey, 42000 msat, 1006 unixms)), Seq(PaymentEvent.OutgoingPayment(c2, randomKey().publicKey, 40000 msat, 1007 unixms)))) - db.add(ChannelPaymentRelayed(randomBytes32(), Seq(PaymentEvent.IncomingPayment(c5, randomKey().publicKey, 45000 msat, 1008 unixms)), Seq(PaymentEvent.OutgoingPayment(c6, randomKey().publicKey, 40000 msat, 1009 unixms)))) - db.add(TrampolinePaymentRelayed(randomBytes32(), Seq(PaymentEvent.IncomingPayment(c6, randomKey().publicKey, 25000 msat, 1010 unixms)), Seq(PaymentEvent.OutgoingPayment(c4, randomKey().publicKey, 20000 msat, 1011 unixms)), randomKey().publicKey, 15000 msat)) - db.add(TrampolinePaymentRelayed(randomBytes32(), Seq(PaymentEvent.IncomingPayment(c6, randomKey().publicKey, 46000 msat, 1012 unixms)), Seq(PaymentEvent.OutgoingPayment(c2, randomKey().publicKey, 16000 msat, 1013 unixms), PaymentEvent.OutgoingPayment(c4, randomKey().publicKey, 10000 msat, 1014 unixms), PaymentEvent.OutgoingPayment(c4, randomKey().publicKey, 14000 msat, 1015 unixms)), randomKey().publicKey, 37000 msat)) - - // The following confirmed txs will be taken into account. - db.add(TransactionPublished(c2, n2, Transaction(2, Nil, Seq(TxOut(5000 sat, hex"12345")), 0), 200 sat, 100 sat, "funding", None)) - db.add(TransactionConfirmed(c2, n2, Transaction(2, Nil, Seq(TxOut(5000 sat, hex"12345")), 0))) - db.add(TransactionPublished(c2, n2, Transaction(2, Nil, Seq(TxOut(4000 sat, hex"00112233")), 0), 300 sat, 200 sat, "mutual", None)) - db.add(TransactionConfirmed(c2, n2, Transaction(2, Nil, Seq(TxOut(4000 sat, hex"00112233")), 0))) - db.add(TransactionPublished(c3, n3, Transaction(2, Nil, Seq(TxOut(8000 sat, hex"deadbeef")), 0), 400 sat, 50 sat, "funding", None)) - db.add(TransactionConfirmed(c3, n3, Transaction(2, Nil, Seq(TxOut(8000 sat, hex"deadbeef")), 0))) - db.add(TransactionPublished(c4, n4, Transaction(2, Nil, Seq(TxOut(6000 sat, hex"0000000000")), 0), 500 sat, 0 sat, "funding", None)) - db.add(TransactionConfirmed(c4, n4, Transaction(2, Nil, Seq(TxOut(6000 sat, hex"0000000000")), 0))) + // We create some channel relay events where: + // - n1 generated 10 msat of routing fees + // - n2 generated 50_000 msat of routing fees + // - n3 generated 500 msat of routing fees + // - n4 generated 5_000 msat of routing fees + db.add(ChannelPaymentRelayed(randomBytes32(), Seq(PaymentEvent.IncomingPayment(randomBytes32(), n1, 16_500 msat, 1000 unixms)), Seq(PaymentEvent.OutgoingPayment(randomBytes32(), n3, 16_000 msat, 1001 unixms)))) + db.add(ChannelPaymentRelayed(randomBytes32(), Seq(PaymentEvent.IncomingPayment(randomBytes32(), n2, 20_000 msat, 1002 unixms)), Seq(PaymentEvent.OutgoingPayment(randomBytes32(), n4, 15_000 msat, 1003 unixms)))) + db.add(ChannelPaymentRelayed(randomBytes32(), Seq(PaymentEvent.IncomingPayment(randomBytes32(), n3, 50_010 msat, 1004 unixms)), Seq(PaymentEvent.OutgoingPayment(randomBytes32(), n1, 50_000 msat, 1005 unixms)))) + db.add(ChannelPaymentRelayed(randomBytes32(), Seq(PaymentEvent.IncomingPayment(randomBytes32(), n3, 100_000 msat, 1006 unixms)), Seq(PaymentEvent.OutgoingPayment(randomBytes32(), n2, 50_000 msat, 1007 unixms)))) + db.add(ChannelPaymentRelayed(randomBytes32(), Seq(PaymentEvent.IncomingPayment(randomBytes32(), n4, 40_000 msat, 1008 unixms)), Seq(PaymentEvent.OutgoingPayment(randomBytes32(), n1, 40_000 msat, 1009 unixms)))) + // We create some trampoline relay events where: + // - n2 generated 5_000 msat of routing fees + // - 10_000 msat split between n2 (40%) and n3 (60%) + db.add(TrampolinePaymentRelayed(randomBytes32(), Seq(PaymentEvent.IncomingPayment(randomBytes32(), n1, 25_000 msat, 1010 unixms)), Seq(PaymentEvent.OutgoingPayment(randomBytes32(), n2, 20_000 msat, 1011 unixms)), randomKey().publicKey, 15000 msat)) + db.add(TrampolinePaymentRelayed(randomBytes32(), Seq(PaymentEvent.IncomingPayment(randomBytes32(), n1, 110_000 msat, 1012 unixms)), Seq(PaymentEvent.OutgoingPayment(randomBytes32(), n2, 25_000 msat, 1013 unixms), PaymentEvent.OutgoingPayment(randomBytes32(), n2, 15_000 msat, 1014 unixms), PaymentEvent.OutgoingPayment(randomBytes32(), n3, 60_000 msat, 1015 unixms)), randomKey().publicKey, 37000 msat)) + + // The following confirmed txs will be taken into account: + // - n2 paid 100 sat of on-chain fees + // - n3 paid 5 sat of on-chain fees + db.add(TransactionPublished(randomBytes32(), n2, Transaction(2, Nil, Seq(TxOut(5000 sat, hex"12345")), 0), 30 sat, 110 sat, "funding", None)) + db.add(TransactionConfirmed(randomBytes32(), n2, Transaction(2, Nil, Seq(TxOut(5000 sat, hex"12345")), 0))) + db.add(TransactionPublished(randomBytes32(), n2, Transaction(2, Nil, Seq(TxOut(4000 sat, hex"00112233")), 0), 70 sat, 80 sat, "mutual", None)) + db.add(TransactionConfirmed(randomBytes32(), n2, Transaction(2, Nil, Seq(TxOut(4000 sat, hex"00112233")), 0))) + db.add(TransactionPublished(randomBytes32(), n3, Transaction(2, Nil, Seq(TxOut(8000 sat, hex"deadbeef")), 0), 5 sat, 50 sat, "funding", None)) + db.add(TransactionConfirmed(randomBytes32(), n3, Transaction(2, Nil, Seq(TxOut(8000 sat, hex"deadbeef")), 0))) // The following txs will not be taken into account. - db.add(TransactionPublished(c2, n2, Transaction(2, Nil, Seq(TxOut(5000 sat, hex"12345")), 0), 1000 sat, 0 sat, "funding", None)) // duplicate - db.add(TransactionPublished(c4, n4, Transaction(2, Nil, Seq(TxOut(4500 sat, hex"1111222233")), 0), 500 sat, 150 sat, "funding", None)) // unconfirmed - db.add(TransactionConfirmed(c4, n4, Transaction(2, Nil, Seq(TxOut(2500 sat, hex"ffffff")), 0))) // doesn't match a published tx - - assert(db.listPublished(randomBytes32()).isEmpty) - assert(db.listPublished(c4).map(_.txId).toSet.size == 2) - assert(db.listPublished(c4).map(_.desc) == Seq("funding", "funding")) - - // NB: we only count a relay fee for the outgoing channel, no the incoming one. - assert(db.stats(0 unixms, TimestampMilli.now() + 1.milli) == Seq( - Stats(channelId = c1, direction = "IN", avgPaymentAmount = 0 sat, paymentCount = 0, relayFee = 0 sat, networkFee = 0 sat), - Stats(channelId = c1, direction = "OUT", avgPaymentAmount = 42 sat, paymentCount = 3, relayFee = 4 sat, networkFee = 0 sat), - Stats(channelId = c2, direction = "IN", avgPaymentAmount = 0 sat, paymentCount = 0, relayFee = 0 sat, networkFee = 500 sat), - Stats(channelId = c2, direction = "OUT", avgPaymentAmount = 28 sat, paymentCount = 2, relayFee = 4 sat, networkFee = 500 sat), - Stats(channelId = c3, direction = "IN", avgPaymentAmount = 0 sat, paymentCount = 0, relayFee = 0 sat, networkFee = 400 sat), - Stats(channelId = c3, direction = "OUT", avgPaymentAmount = 0 sat, paymentCount = 0, relayFee = 0 sat, networkFee = 400 sat), - Stats(channelId = c4, direction = "IN", avgPaymentAmount = 0 sat, paymentCount = 0, relayFee = 0 sat, networkFee = 500 sat), - Stats(channelId = c4, direction = "OUT", avgPaymentAmount = 22 sat, paymentCount = 2, relayFee = 9 sat, networkFee = 500 sat), - Stats(channelId = c5, direction = "IN", avgPaymentAmount = 43 sat, paymentCount = 3, relayFee = 0 sat, networkFee = 0 sat), - Stats(channelId = c5, direction = "OUT", avgPaymentAmount = 0 sat, paymentCount = 0, relayFee = 0 sat, networkFee = 0 sat), - Stats(channelId = c6, direction = "IN", avgPaymentAmount = 39 sat, paymentCount = 4, relayFee = 0 sat, networkFee = 0 sat), - Stats(channelId = c6, direction = "OUT", avgPaymentAmount = 40 sat, paymentCount = 1, relayFee = 5 sat, networkFee = 0 sat), + db.add(TransactionPublished(randomBytes32(), n2, Transaction(2, Nil, Seq(TxOut(5000 sat, hex"12345")), 0), 1000 sat, 0 sat, "funding", None)) // duplicate + db.add(TransactionPublished(randomBytes32(), n4, Transaction(2, Nil, Seq(TxOut(4500 sat, hex"1111222233")), 0), 500 sat, 150 sat, "funding", None)) // unconfirmed + db.add(TransactionConfirmed(randomBytes32(), n4, Transaction(2, Nil, Seq(TxOut(2500 sat, hex"ffffff")), 0))) // doesn't match a published tx + + // We list nodes with the highest fee earners first. + val (from, to) = (0 unixms, TimestampMilli.now() + 1.milli) + assert(db.relayStats(from, to) == Seq( + RelayStats(n2, incomingPaymentCount = 1, totalAmountIn = 20_000 msat, outgoingPaymentCount = 4, totalAmountOut = 110_000 msat, relayFeeEarned = 59_000 msat, onChainFeePaid = 100 sat, from, to), + RelayStats(n3, incomingPaymentCount = 2, totalAmountIn = 150_010 msat, outgoingPaymentCount = 2, totalAmountOut = 76_000 msat, relayFeeEarned = 6_500 msat, onChainFeePaid = 5 sat, from, to), + RelayStats(n4, incomingPaymentCount = 1, totalAmountIn = 40_000 msat, outgoingPaymentCount = 1, totalAmountOut = 15_000 msat, relayFeeEarned = 5_000 msat, onChainFeePaid = 0 sat, from, to), + RelayStats(n1, incomingPaymentCount = 3, totalAmountIn = 151_500 msat, outgoingPaymentCount = 2, totalAmountOut = 90_000 msat, relayFeeEarned = 10 msat, onChainFeePaid = 0 sat, from, to), )) - assert(db.stats(0 unixms, TimestampMilli.now() + 1.milli, Some(Paginated(2, 3))) == Seq( - Stats(channelId = c2, direction = "OUT", avgPaymentAmount = 28 sat, paymentCount = 2, relayFee = 4 sat, networkFee = 500 sat), - Stats(channelId = c3, direction = "IN", avgPaymentAmount = 0 sat, paymentCount = 0, relayFee = 0 sat, networkFee = 400 sat), + assert(db.relayStats(from, to, Some(Paginated(count = 2, skip = 1))) == Seq( + RelayStats(n3, incomingPaymentCount = 2, totalAmountIn = 150_010 msat, outgoingPaymentCount = 2, totalAmountOut = 76_000 msat, relayFeeEarned = 6_500 msat, onChainFeePaid = 5 sat, from, to), + RelayStats(n4, incomingPaymentCount = 1, totalAmountIn = 40_000 msat, outgoingPaymentCount = 1, totalAmountOut = 15_000 msat, relayFeeEarned = 5_000 msat, onChainFeePaid = 0 sat, from, to), )) } } - ignore("relay stats performance", Tag("perf")) { - forAllDbs { dbs => - val db = dbs.audit - val nodeCount = 100 - val channelCount = 1000 - val eventCount = 100000 - val nodeIds = (1 to nodeCount).map(_ => randomKey().publicKey) - val channelIds = (1 to channelCount).map(_ => randomBytes32()) - // Fund channels. - channelIds.foreach(channelId => { - val nodeId = nodeIds(Random.nextInt(nodeCount)) - val fundingTx = Transaction(0, Seq.empty, Seq(TxOut(5000 sat, Script.pay2wpkh(nodeId))), 0) - db.add(TransactionPublished(channelId, nodeId, fundingTx, 100 sat, 0 sat, "funding", None)) - db.add(TransactionConfirmed(channelId, nodeId, fundingTx)) - }) - // Add relay events. - (1 to eventCount).foreach(_ => { - // 25% trampoline relays. - if (Random.nextInt(4) == 0) { - val outgoingCount = 1 + Random.nextInt(4) - val incoming = Seq(PaymentEvent.IncomingPayment(randomBytes32(), randomKey().publicKey, 10000 msat, TimestampMilli.now() - 3.seconds)) - val outgoing = (1 to outgoingCount).map(_ => PaymentEvent.OutgoingPayment(channelIds(Random.nextInt(channelCount)), randomKey().publicKey, Random.nextInt(2000).msat, TimestampMilli.now())) - db.add(TrampolinePaymentRelayed(randomBytes32(), incoming, outgoing, randomKey().publicKey, 5000 msat)) - } else { - val toChannelId = channelIds(Random.nextInt(channelCount)) - db.add(ChannelPaymentRelayed(randomBytes32(), Seq(PaymentEvent.IncomingPayment(randomBytes32(), randomKey().publicKey, 10000 msat, TimestampMilli.now() - 2.seconds)), Seq(PaymentEvent.OutgoingPayment(toChannelId, randomKey().publicKey, Random.nextInt(10000).msat, TimestampMilli.now())))) - } - }) - // Test starts here. - val start = TimestampMilli.now() - assert(db.stats(0 unixms, start + 1.milli).nonEmpty) - val end = TimestampMilli.now() - fail(s"took ${end - start}ms") - } - } - test("ignore invalid values in the DB") { forAllDbs { dbs => val db = dbs.audit diff --git a/eclair-node/src/main/scala/fr/acinq/eclair/api/handlers/Channel.scala b/eclair-node/src/main/scala/fr/acinq/eclair/api/handlers/Channel.scala index d598dc2f97..3a41b1fd5b 100644 --- a/eclair-node/src/main/scala/fr/acinq/eclair/api/handlers/Channel.scala +++ b/eclair-node/src/main/scala/fr/acinq/eclair/api/handlers/Channel.scala @@ -153,18 +153,10 @@ trait Channel { } } - val channelStats: Route = postRequest("channelstats") { implicit t => - withPaginated { paginated_opt => - formFields(fromFormParam(), toFormParam()) { (from, to) => - complete(eclairApi.channelStats(from, to, paginated_opt.orElse(Some(Paginated(count = 10, skip = 0))))) - } - } - } - val channelBalances: Route = postRequest("channelbalances") { implicit t => complete(eclairApi.channelBalances()) } - val channelRoutes: Route = open ~ rbfOpen ~ spliceIn ~ spliceOut ~ rbfSplice ~ close ~ forceClose ~ bumpForceClose ~ channel ~ channels ~ closedChannels ~ allChannels ~ allUpdates ~ channelStats ~ channelBalances + val channelRoutes: Route = open ~ rbfOpen ~ spliceIn ~ spliceOut ~ rbfSplice ~ close ~ forceClose ~ bumpForceClose ~ channel ~ channels ~ closedChannels ~ allChannels ~ allUpdates ~ channelBalances } diff --git a/eclair-node/src/main/scala/fr/acinq/eclair/api/handlers/Fees.scala b/eclair-node/src/main/scala/fr/acinq/eclair/api/handlers/Fees.scala index 684c4a7d9c..931533308f 100644 --- a/eclair-node/src/main/scala/fr/acinq/eclair/api/handlers/Fees.scala +++ b/eclair-node/src/main/scala/fr/acinq/eclair/api/handlers/Fees.scala @@ -27,12 +27,6 @@ trait Fees { import fr.acinq.eclair.api.serde.JsonSupport.{formats, marshaller, serialization} - val networkFees: Route = postRequest("networkfees") { implicit t => - formFields(fromFormParam(), toFormParam()) { (from, to) => - complete(eclairApi.networkFees(from, to)) - } - } - val updateRelayFee: Route = postRequest("updaterelayfee") { implicit t => withNodesIdentifier { nodes => formFields("feeBaseMsat".as[MilliSatoshi], "feeProportionalMillionths".as[Long]) { (feeBase, feeProportional) => @@ -45,6 +39,6 @@ trait Fees { } } - val feeRoutes: Route = networkFees ~ updateRelayFee + val feeRoutes: Route = updateRelayFee } diff --git a/eclair-node/src/main/scala/fr/acinq/eclair/api/handlers/Node.scala b/eclair-node/src/main/scala/fr/acinq/eclair/api/handlers/Node.scala index 72a7bed7f3..498480023c 100644 --- a/eclair-node/src/main/scala/fr/acinq/eclair/api/handlers/Node.scala +++ b/eclair-node/src/main/scala/fr/acinq/eclair/api/handlers/Node.scala @@ -17,6 +17,7 @@ package fr.acinq.eclair.api.handlers import akka.http.scaladsl.server.Route +import fr.acinq.eclair.Paginated import fr.acinq.eclair.api.Service import fr.acinq.eclair.api.directives.EclairDirectives import fr.acinq.eclair.api.serde.FormParamExtractors._ @@ -56,6 +57,17 @@ trait Node { complete(eclairApi.peers()) } + val relayStats: Route = postRequest("relaystats") { implicit t => + withPaginated { paginated_opt => + formFields(nodeIdFormParam.?, fromFormParam(), toFormParam()) { (remoteNodeId_opt, from, to) => + remoteNodeId_opt match { + case Some(remoteNodeId) => complete(eclairApi.relayStats(remoteNodeId, from, to)) + case None => complete(eclairApi.relayStats(from, to, paginated_opt.orElse(Some(Paginated(count = 10, skip = 0))))) + } + } + } + } + val audit: Route = postRequest("audit") { implicit t => withPaginated { paginated_opt => formFields(fromFormParam(), toFormParam()) { (from, to) => @@ -68,5 +80,5 @@ trait Node { complete(eclairApi.stop()) } - val nodeRoutes: Route = getInfo ~ connect ~ disconnect ~ peers ~ audit ~ stop + val nodeRoutes: Route = getInfo ~ connect ~ disconnect ~ peers ~ relayStats ~ audit ~ stop } From a50fbbb734ee1aecce84c66d60294002edab7e02 Mon Sep 17 00:00:00 2001 From: t-bast Date: Fri, 30 Jan 2026 15:33:40 +0100 Subject: [PATCH 7/7] Add liquidity purchase fees in transaction events We now track liquidity purchases as well in transaction events, as their fees must be taken into account when evaluating the overall fees earned from a node. We also add the number of on-chain transactions made with a given peer to its relay stats. --- .../scala/fr/acinq/eclair/db/AuditDb.scala | 25 ++++++++----- .../fr/acinq/eclair/db/pg/PgAuditDb.scala | 37 +++++++++++++++---- .../eclair/db/sqlite/SqliteAuditDb.scala | 37 +++++++++++++++---- .../fr/acinq/eclair/db/AuditDbSpec.scala | 28 ++++++++------ 4 files changed, 90 insertions(+), 37 deletions(-) diff --git a/eclair-core/src/main/scala/fr/acinq/eclair/db/AuditDb.scala b/eclair-core/src/main/scala/fr/acinq/eclair/db/AuditDb.scala index 986029e9db..252d6a5c59 100644 --- a/eclair-core/src/main/scala/fr/acinq/eclair/db/AuditDb.scala +++ b/eclair-core/src/main/scala/fr/acinq/eclair/db/AuditDb.scala @@ -23,6 +23,7 @@ import fr.acinq.eclair.channel._ import fr.acinq.eclair.db.AuditDb.{ConfirmedTransaction, PublishedTransaction, RelayStats} import fr.acinq.eclair.db.DbEventHandler.ChannelEvent import fr.acinq.eclair.payment._ +import fr.acinq.eclair.wire.protocol.LiquidityAds import fr.acinq.eclair.{MilliSatoshi, MilliSatoshiLong, Paginated, TimestampMilli} trait AuditDb { @@ -72,8 +73,11 @@ trait AuditDb { }).sum val incomingPayments = relayed.flatMap(_.incoming).filter(_.remoteNodeId == remoteNodeId) val outgoingPayments = relayed.flatMap(_.outgoing).filter(_.remoteNodeId == remoteNodeId) - val onChainFeePaid = listConfirmed(remoteNodeId, from, to, None).map(_.onChainFeePaid).sum - RelayStats(remoteNodeId, incomingPayments.size, incomingPayments.map(_.amount).sum, outgoingPayments.size, outgoingPayments.map(_.amount).sum, relayFeeEarned, onChainFeePaid, from, to) + val confirmedTransactions = listConfirmed(remoteNodeId, from, to, None) + val onChainFeePaid = confirmedTransactions.map(_.onChainFeePaid).sum + val liquidityFeeEarned = confirmedTransactions.flatMap(_.liquidityPurchase_opt).filter(_.isSeller).map(_.fees.total).sum + val liquidityFeePaid = confirmedTransactions.flatMap(_.liquidityPurchase_opt).filter(_.isBuyer).map(_.fees.total).sum + RelayStats(remoteNodeId, incomingPayments.size, incomingPayments.map(_.amount).sum, outgoingPayments.size, outgoingPayments.map(_.amount).sum, relayFeeEarned, confirmedTransactions.size, onChainFeePaid, liquidityFeeEarned, liquidityFeePaid, from, to) } def relayStats(from: TimestampMilli, to: TimestampMilli, paginated_opt: Option[Paginated] = None): Seq[RelayStats] = { @@ -103,8 +107,11 @@ trait AuditDb { // We add on-chain fees paid for each node. val confirmedTransactions = listConfirmed(from, to) Paginated.paginate(perNodeStats.map(stats => { - val onChainFeePaid = confirmedTransactions.filter(_.remoteNodeId == stats.remoteNodeId).map(_.onChainFeePaid).sum - stats.copy(onChainFeePaid = onChainFeePaid) + val transactionsWithPeer = confirmedTransactions.filter(_.remoteNodeId == stats.remoteNodeId) + val onChainFeePaid = transactionsWithPeer.map(_.onChainFeePaid).sum + val liquidityFeeEarned = transactionsWithPeer.flatMap(_.liquidityPurchase_opt).filter(_.isSeller).map(_.fees.total).sum + val liquidityFeePaid = transactionsWithPeer.flatMap(_.liquidityPurchase_opt).filter(_.isBuyer).map(_.fees.total).sum + stats.copy(onChainTransactionsCount = transactionsWithPeer.size, onChainFeePaid = onChainFeePaid, liquidityFeeEarned = liquidityFeeEarned, liquidityFeePaid = liquidityFeePaid) }), paginated_opt) } @@ -112,18 +119,18 @@ trait AuditDb { object AuditDb { - case class PublishedTransaction(txId: TxId, desc: String, localMiningFee: Satoshi, remoteMiningFee: Satoshi, feerate: FeeratePerKw, timestamp: TimestampMilli) + case class PublishedTransaction(txId: TxId, desc: String, localMiningFee: Satoshi, remoteMiningFee: Satoshi, feerate: FeeratePerKw, liquidityPurchase_opt: Option[LiquidityAds.PurchaseBasicInfo], timestamp: TimestampMilli) object PublishedTransaction { - def apply(tx: TransactionPublished): PublishedTransaction = PublishedTransaction(tx.tx.txid, tx.desc, tx.localMiningFee, tx.remoteMiningFee, tx.feerate, tx.timestamp) + def apply(tx: TransactionPublished): PublishedTransaction = PublishedTransaction(tx.tx.txid, tx.desc, tx.localMiningFee, tx.remoteMiningFee, tx.feerate, tx.liquidityPurchase_opt, tx.timestamp) } - case class ConfirmedTransaction(remoteNodeId: PublicKey, channelId: ByteVector32, txId: TxId, onChainFeePaid: Satoshi, txType: String, timestamp: TimestampMilli) + case class ConfirmedTransaction(remoteNodeId: PublicKey, channelId: ByteVector32, txId: TxId, onChainFeePaid: Satoshi, txType: String, liquidityPurchase_opt: Option[LiquidityAds.PurchaseBasicInfo], timestamp: TimestampMilli) - case class RelayStats(remoteNodeId: PublicKey, incomingPaymentCount: Int, totalAmountIn: MilliSatoshi, outgoingPaymentCount: Int, totalAmountOut: MilliSatoshi, relayFeeEarned: MilliSatoshi, onChainFeePaid: Satoshi, from: TimestampMilli, to: TimestampMilli) + case class RelayStats(remoteNodeId: PublicKey, incomingPaymentCount: Int, totalAmountIn: MilliSatoshi, outgoingPaymentCount: Int, totalAmountOut: MilliSatoshi, relayFeeEarned: MilliSatoshi, onChainTransactionsCount: Int, onChainFeePaid: Satoshi, liquidityFeeEarned: Satoshi, liquidityFeePaid: Satoshi, from: TimestampMilli, to: TimestampMilli) object RelayStats { - def apply(remoteNodeId: PublicKey, from: TimestampMilli, to: TimestampMilli): RelayStats = RelayStats(remoteNodeId, 0, 0 msat, 0, 0 msat, 0 msat, 0 sat, from, to) + def apply(remoteNodeId: PublicKey, from: TimestampMilli, to: TimestampMilli): RelayStats = RelayStats(remoteNodeId, 0, 0 msat, 0, 0 msat, 0 msat, 0, 0 sat, 0 sat, 0 sat, from, to) } case class RelayedPart(channelId: ByteVector32, remoteNodeId: PublicKey, amount: MilliSatoshi, direction: String, relayType: String, timestamp: TimestampMilli) diff --git a/eclair-core/src/main/scala/fr/acinq/eclair/db/pg/PgAuditDb.scala b/eclair-core/src/main/scala/fr/acinq/eclair/db/pg/PgAuditDb.scala index 9dda0976e9..83172bd56b 100644 --- a/eclair-core/src/main/scala/fr/acinq/eclair/db/pg/PgAuditDb.scala +++ b/eclair-core/src/main/scala/fr/acinq/eclair/db/pg/PgAuditDb.scala @@ -26,10 +26,11 @@ import fr.acinq.eclair.db.Monitoring.Metrics.withMetrics import fr.acinq.eclair.db.Monitoring.Tags.DbBackends import fr.acinq.eclair.db._ import fr.acinq.eclair.payment._ +import fr.acinq.eclair.wire.protocol.LiquidityAds import fr.acinq.eclair.{MilliSatoshi, Paginated, TimestampMilli} import grizzled.slf4j.Logging -import java.sql.{Statement, Timestamp} +import java.sql.{ResultSet, Statement, Timestamp} import java.time.Instant import java.util.UUID import javax.sql.DataSource @@ -131,7 +132,7 @@ class PgAuditDb(implicit ds: DataSource) extends AuditDb with Logging { statement.executeUpdate("DROP INDEX audit.transactions_published_channel_id_idx") statement.executeUpdate("DROP INDEX audit.transactions_published_timestamp_idx") statement.executeUpdate("DROP INDEX audit.transactions_confirmed_timestamp_idx") - statement.executeUpdate("CREATE TABLE audit.transactions_published (tx_id TEXT NOT NULL PRIMARY KEY, channel_id TEXT NOT NULL, node_id TEXT NOT NULL, local_mining_fee_sat BIGINT NOT NULL, remote_mining_fee_sat BIGINT NOT NULL, feerate_sat_per_kw BIGINT NOT NULL, input_count BIGINT NOT NULL, output_count BIGINT NOT NULL, tx_type TEXT NOT NULL, timestamp TIMESTAMP WITH TIME ZONE NOT NULL)") + statement.executeUpdate("CREATE TABLE audit.transactions_published (tx_id TEXT NOT NULL PRIMARY KEY, channel_id TEXT NOT NULL, node_id TEXT NOT NULL, local_mining_fee_sat BIGINT NOT NULL, remote_mining_fee_sat BIGINT NOT NULL, feerate_sat_per_kw BIGINT NOT NULL, is_buying_liquidity BOOLEAN NOT NULL, liquidity_amount_sat BIGINT NOT NULL, liquidity_mining_fee_sat BIGINT NOT NULL, liquidity_service_fee_sat BIGINT NOT NULL, input_count BIGINT NOT NULL, output_count BIGINT NOT NULL, tx_type TEXT NOT NULL, timestamp TIMESTAMP WITH TIME ZONE NOT NULL)") statement.executeUpdate("CREATE TABLE audit.transactions_confirmed (tx_id TEXT NOT NULL PRIMARY KEY, channel_id TEXT NOT NULL, node_id TEXT NOT NULL, input_count BIGINT NOT NULL, output_count BIGINT NOT NULL, timestamp TIMESTAMP WITH TIME ZONE NOT NULL)") // We recreate indexes for the updated transaction tables. statement.executeUpdate("CREATE INDEX transactions_published_channel_id_idx ON audit.transactions_published(channel_id)") @@ -178,7 +179,7 @@ class PgAuditDb(implicit ds: DataSource) extends AuditDb with Logging { statement.executeUpdate("CREATE TABLE audit.channel_events (channel_id TEXT NOT NULL, node_id TEXT NOT NULL, funding_txid TEXT NOT NULL, channel_type TEXT NOT NULL, capacity_sat BIGINT NOT NULL, is_opener BOOLEAN NOT NULL, is_private BOOLEAN NOT NULL, event TEXT NOT NULL, timestamp TIMESTAMP WITH TIME ZONE NOT NULL)") statement.executeUpdate("CREATE TABLE audit.channel_updates (channel_id TEXT NOT NULL, node_id TEXT NOT NULL, fee_base_msat BIGINT NOT NULL, fee_proportional_millionths BIGINT NOT NULL, cltv_expiry_delta BIGINT NOT NULL, htlc_minimum_msat BIGINT NOT NULL, htlc_maximum_msat BIGINT NOT NULL, timestamp TIMESTAMP WITH TIME ZONE NOT NULL)") statement.executeUpdate("CREATE TABLE audit.path_finding_metrics (amount_msat BIGINT NOT NULL, fees_msat BIGINT NOT NULL, status TEXT NOT NULL, duration_ms BIGINT NOT NULL, timestamp TIMESTAMP WITH TIME ZONE NOT NULL, is_mpp BOOLEAN NOT NULL, experiment_name TEXT NOT NULL, recipient_node_id TEXT NOT NULL, payment_hash TEXT, routing_hints JSONB)") - statement.executeUpdate("CREATE TABLE audit.transactions_published (tx_id TEXT NOT NULL PRIMARY KEY, channel_id TEXT NOT NULL, node_id TEXT NOT NULL, local_mining_fee_sat BIGINT NOT NULL, remote_mining_fee_sat BIGINT NOT NULL, feerate_sat_per_kw BIGINT NOT NULL, input_count BIGINT NOT NULL, output_count BIGINT NOT NULL, tx_type TEXT NOT NULL, timestamp TIMESTAMP WITH TIME ZONE NOT NULL)") + statement.executeUpdate("CREATE TABLE audit.transactions_published (tx_id TEXT NOT NULL PRIMARY KEY, channel_id TEXT NOT NULL, node_id TEXT NOT NULL, local_mining_fee_sat BIGINT NOT NULL, remote_mining_fee_sat BIGINT NOT NULL, feerate_sat_per_kw BIGINT NOT NULL, is_buying_liquidity BOOLEAN NOT NULL, liquidity_amount_sat BIGINT NOT NULL, liquidity_mining_fee_sat BIGINT NOT NULL, liquidity_service_fee_sat BIGINT NOT NULL, input_count BIGINT NOT NULL, output_count BIGINT NOT NULL, tx_type TEXT NOT NULL, timestamp TIMESTAMP WITH TIME ZONE NOT NULL)") statement.executeUpdate("CREATE TABLE audit.transactions_confirmed (tx_id TEXT NOT NULL PRIMARY KEY, channel_id TEXT NOT NULL, node_id TEXT NOT NULL, input_count BIGINT NOT NULL, output_count BIGINT NOT NULL, timestamp TIMESTAMP WITH TIME ZONE NOT NULL)") statement.executeUpdate("CREATE INDEX sent_settled_at_idx ON audit.sent(settled_at)") @@ -341,17 +342,21 @@ class PgAuditDb(implicit ds: DataSource) extends AuditDb with Logging { override def add(e: TransactionPublished): Unit = withMetrics("audit/add-transaction-published", DbBackends.Postgres) { inTransaction { pg => - using(pg.prepareStatement("INSERT INTO audit.transactions_published VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?) ON CONFLICT DO NOTHING")) { statement => + using(pg.prepareStatement("INSERT INTO audit.transactions_published VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) ON CONFLICT DO NOTHING")) { statement => statement.setString(1, e.tx.txid.value.toHex) statement.setString(2, e.channelId.toHex) statement.setString(3, e.remoteNodeId.toHex) statement.setLong(4, e.localMiningFee.toLong) statement.setLong(5, e.remoteMiningFee.toLong) statement.setLong(6, e.feerate.toLong) - statement.setLong(7, e.tx.txIn.size) - statement.setLong(8, e.tx.txOut.size) - statement.setString(9, e.desc) - statement.setTimestamp(10, e.timestamp.toSqlTimestamp) + statement.setBoolean(7, e.liquidityPurchase_opt.exists(_.isBuyer)) + statement.setLong(8, e.liquidityPurchase_opt.map(_.amount.toLong).getOrElse(0)) + statement.setLong(9, e.liquidityPurchase_opt.map(_.fees.miningFee.toLong).getOrElse(0)) + statement.setLong(10, e.liquidityPurchase_opt.map(_.fees.serviceFee.toLong).getOrElse(0)) + statement.setLong(11, e.tx.txIn.size) + statement.setLong(12, e.tx.txOut.size) + statement.setString(13, e.desc) + statement.setTimestamp(14, e.timestamp.toSqlTimestamp) statement.executeUpdate() } } @@ -405,6 +410,17 @@ class PgAuditDb(implicit ds: DataSource) extends AuditDb with Logging { } } + private def readLiquidityPurchase(rs: ResultSet): Option[LiquidityAds.PurchaseBasicInfo] = { + rs.getLong("liquidity_amount_sat") match { + case 0 => None + case amount => Some(LiquidityAds.PurchaseBasicInfo( + isBuyer = rs.getBoolean("is_buying_liquidity"), + amount = Satoshi(amount), + fees = LiquidityAds.Fees(miningFee = Satoshi(rs.getLong("liquidity_mining_fee_sat")), serviceFee = Satoshi(rs.getLong("liquidity_service_fee_sat"))), + )) + } + } + override def listPublished(channelId: ByteVector32): Seq[PublishedTransaction] = withMetrics("audit/list-published-by-channel-id", DbBackends.Postgres) { inTransaction { pg => using(pg.prepareStatement("SELECT * FROM audit.transactions_published WHERE channel_id = ?")) { statement => @@ -416,6 +432,7 @@ class PgAuditDb(implicit ds: DataSource) extends AuditDb with Logging { localMiningFee = rs.getLong("local_mining_fee_sat").sat, remoteMiningFee = rs.getLong("remote_mining_fee_sat").sat, feerate = FeeratePerKw(rs.getLong("feerate_sat_per_kw").sat), + liquidityPurchase_opt = readLiquidityPurchase(rs), timestamp = TimestampMilli.fromSqlTimestamp(rs.getTimestamp("timestamp")) ) }.toSeq @@ -436,6 +453,7 @@ class PgAuditDb(implicit ds: DataSource) extends AuditDb with Logging { localMiningFee = rs.getLong("local_mining_fee_sat").sat, remoteMiningFee = rs.getLong("remote_mining_fee_sat").sat, feerate = FeeratePerKw(rs.getLong("feerate_sat_per_kw").sat), + liquidityPurchase_opt = readLiquidityPurchase(rs), timestamp = TimestampMilli.fromSqlTimestamp(rs.getTimestamp("timestamp")) ) }.toSeq @@ -454,6 +472,7 @@ class PgAuditDb(implicit ds: DataSource) extends AuditDb with Logging { txId = TxId(rs.getByteVector32FromHex("tx_id")), onChainFeePaid = Satoshi(rs.getLong("local_mining_fee_sat")), txType = rs.getString("tx_type"), + liquidityPurchase_opt = readLiquidityPurchase(rs), timestamp = TimestampMilli.fromSqlTimestamp(rs.getTimestamp("timestamp"))) }.toSeq } @@ -473,6 +492,7 @@ class PgAuditDb(implicit ds: DataSource) extends AuditDb with Logging { txId = TxId(rs.getByteVector32FromHex("tx_id")), onChainFeePaid = Satoshi(rs.getLong("local_mining_fee_sat")), txType = rs.getString("tx_type"), + liquidityPurchase_opt = readLiquidityPurchase(rs), timestamp = TimestampMilli.fromSqlTimestamp(rs.getTimestamp("timestamp"))) }.toSeq, paginated_opt) } @@ -491,6 +511,7 @@ class PgAuditDb(implicit ds: DataSource) extends AuditDb with Logging { txId = TxId(rs.getByteVector32FromHex("tx_id")), onChainFeePaid = Satoshi(rs.getLong("local_mining_fee_sat")), txType = rs.getString("tx_type"), + liquidityPurchase_opt = readLiquidityPurchase(rs), timestamp = TimestampMilli.fromSqlTimestamp(rs.getTimestamp("timestamp"))) }.toSeq, paginated_opt) } diff --git a/eclair-core/src/main/scala/fr/acinq/eclair/db/sqlite/SqliteAuditDb.scala b/eclair-core/src/main/scala/fr/acinq/eclair/db/sqlite/SqliteAuditDb.scala index f27dd40d19..6543279ecd 100644 --- a/eclair-core/src/main/scala/fr/acinq/eclair/db/sqlite/SqliteAuditDb.scala +++ b/eclair-core/src/main/scala/fr/acinq/eclair/db/sqlite/SqliteAuditDb.scala @@ -26,10 +26,11 @@ import fr.acinq.eclair.db.Monitoring.Metrics.withMetrics import fr.acinq.eclair.db.Monitoring.Tags.DbBackends import fr.acinq.eclair.db._ import fr.acinq.eclair.payment._ +import fr.acinq.eclair.wire.protocol.LiquidityAds import fr.acinq.eclair.{MilliSatoshi, Paginated, TimestampMilli} import grizzled.slf4j.Logging -import java.sql.{Connection, Statement} +import java.sql.{Connection, ResultSet, Statement} import java.util.UUID object SqliteAuditDb { @@ -137,7 +138,7 @@ class SqliteAuditDb(val sqlite: Connection) extends AuditDb with Logging { // We add mining fee details, input and output counts to the transaction tables, and use TEXT instead of BLOBs. statement.executeUpdate("ALTER TABLE transactions_published RENAME TO transactions_published_before_v14") statement.executeUpdate("ALTER TABLE transactions_confirmed RENAME TO transactions_confirmed_before_v14") - statement.executeUpdate("CREATE TABLE transactions_published (tx_id TEXT NOT NULL PRIMARY KEY, channel_id TEXT NOT NULL, node_id TEXT NOT NULL, local_mining_fee_sat INTEGER NOT NULL, remote_mining_fee_sat INTEGER NOT NULL, feerate_sat_per_kw INTEGER NOT NULL, input_count INTEGER NOT NULL, output_count INTEGER NOT NULL, tx_type TEXT NOT NULL, timestamp INTEGER NOT NULL)") + statement.executeUpdate("CREATE TABLE transactions_published (tx_id TEXT NOT NULL PRIMARY KEY, channel_id TEXT NOT NULL, node_id TEXT NOT NULL, local_mining_fee_sat INTEGER NOT NULL, remote_mining_fee_sat INTEGER NOT NULL, feerate_sat_per_kw INTEGER NOT NULL, is_buying_liquidity BOOLEAN NOT NULL, liquidity_amount_sat INTEGER NOT NULL, liquidity_mining_fee_sat INTEGER NOT NULL, liquidity_service_fee_sat INTEGER NOT NULL, input_count INTEGER NOT NULL, output_count INTEGER NOT NULL, tx_type TEXT NOT NULL, timestamp INTEGER NOT NULL)") statement.executeUpdate("CREATE TABLE transactions_confirmed (tx_id TEXT NOT NULL PRIMARY KEY, channel_id TEXT NOT NULL, node_id TEXT NOT NULL, input_count INTEGER NOT NULL, output_count INTEGER NOT NULL, timestamp INTEGER NOT NULL)") statement.executeUpdate("DROP INDEX transactions_published_channel_id_idx") statement.executeUpdate("DROP INDEX transactions_published_timestamp_idx") @@ -185,7 +186,7 @@ class SqliteAuditDb(val sqlite: Connection) extends AuditDb with Logging { statement.executeUpdate("CREATE TABLE channel_events (channel_id TEXT NOT NULL, node_id TEXT NOT NULL, funding_txid TEXT NOT NULL, channel_type TEXT NOT NULL, capacity_sat INTEGER NOT NULL, is_opener BOOLEAN NOT NULL, is_private BOOLEAN NOT NULL, event TEXT NOT NULL, timestamp INTEGER NOT NULL)") statement.executeUpdate("CREATE TABLE channel_updates (channel_id TEXT NOT NULL, node_id TEXT NOT NULL, fee_base_msat INTEGER NOT NULL, fee_proportional_millionths INTEGER NOT NULL, cltv_expiry_delta INTEGER NOT NULL, htlc_minimum_msat INTEGER NOT NULL, htlc_maximum_msat INTEGER NOT NULL, timestamp INTEGER NOT NULL)") statement.executeUpdate("CREATE TABLE path_finding_metrics (amount_msat INTEGER NOT NULL, fees_msat INTEGER NOT NULL, status TEXT NOT NULL, duration_ms INTEGER NOT NULL, timestamp INTEGER NOT NULL, is_mpp INTEGER NOT NULL, experiment_name TEXT NOT NULL, recipient_node_id BLOB NOT NULL)") - statement.executeUpdate("CREATE TABLE transactions_published (tx_id TEXT NOT NULL PRIMARY KEY, channel_id TEXT NOT NULL, node_id TEXT NOT NULL, local_mining_fee_sat INTEGER NOT NULL, remote_mining_fee_sat INTEGER NOT NULL, feerate_sat_per_kw INTEGER NOT NULL, input_count INTEGER NOT NULL, output_count INTEGER NOT NULL, tx_type TEXT NOT NULL, timestamp INTEGER NOT NULL)") + statement.executeUpdate("CREATE TABLE transactions_published (tx_id TEXT NOT NULL PRIMARY KEY, channel_id TEXT NOT NULL, node_id TEXT NOT NULL, local_mining_fee_sat INTEGER NOT NULL, remote_mining_fee_sat INTEGER NOT NULL, feerate_sat_per_kw INTEGER NOT NULL, is_buying_liquidity BOOLEAN NOT NULL, liquidity_amount_sat INTEGER NOT NULL, liquidity_mining_fee_sat INTEGER NOT NULL, liquidity_service_fee_sat INTEGER NOT NULL, input_count INTEGER NOT NULL, output_count INTEGER NOT NULL, tx_type TEXT NOT NULL, timestamp INTEGER NOT NULL)") statement.executeUpdate("CREATE TABLE transactions_confirmed (tx_id TEXT NOT NULL PRIMARY KEY, channel_id TEXT NOT NULL, node_id TEXT NOT NULL, input_count INTEGER NOT NULL, output_count INTEGER NOT NULL, timestamp INTEGER NOT NULL)") statement.executeUpdate("CREATE INDEX sent_settled_at_idx ON sent(settled_at)") @@ -336,17 +337,21 @@ class SqliteAuditDb(val sqlite: Connection) extends AuditDb with Logging { } override def add(e: TransactionPublished): Unit = withMetrics("audit/add-transaction-published", DbBackends.Sqlite) { - using(sqlite.prepareStatement("INSERT OR IGNORE INTO transactions_published VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)")) { statement => + using(sqlite.prepareStatement("INSERT OR IGNORE INTO transactions_published VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)")) { statement => statement.setString(1, e.tx.txid.value.toHex) statement.setString(2, e.channelId.toHex) statement.setString(3, e.remoteNodeId.toHex) statement.setLong(4, e.localMiningFee.toLong) statement.setLong(5, e.remoteMiningFee.toLong) statement.setLong(6, e.feerate.toLong) - statement.setLong(7, e.tx.txIn.size) - statement.setLong(8, e.tx.txOut.size) - statement.setString(9, e.desc) - statement.setLong(10, e.timestamp.toLong) + statement.setBoolean(7, e.liquidityPurchase_opt.exists(_.isBuyer)) + statement.setLong(8, e.liquidityPurchase_opt.map(_.amount.toLong).getOrElse(0)) + statement.setLong(9, e.liquidityPurchase_opt.map(_.fees.miningFee.toLong).getOrElse(0)) + statement.setLong(10, e.liquidityPurchase_opt.map(_.fees.serviceFee.toLong).getOrElse(0)) + statement.setLong(11, e.tx.txIn.size) + statement.setLong(12, e.tx.txOut.size) + statement.setString(13, e.desc) + statement.setLong(14, e.timestamp.toLong) statement.executeUpdate() } } @@ -391,6 +396,17 @@ class SqliteAuditDb(val sqlite: Connection) extends AuditDb with Logging { } } + private def readLiquidityPurchase(rs: ResultSet): Option[LiquidityAds.PurchaseBasicInfo] = { + rs.getLong("liquidity_amount_sat") match { + case 0 => None + case amount => Some(LiquidityAds.PurchaseBasicInfo( + isBuyer = rs.getBoolean("is_buying_liquidity"), + amount = Satoshi(amount), + fees = LiquidityAds.Fees(miningFee = Satoshi(rs.getLong("liquidity_mining_fee_sat")), serviceFee = Satoshi(rs.getLong("liquidity_service_fee_sat"))), + )) + } + } + override def listPublished(channelId: ByteVector32): Seq[PublishedTransaction] = withMetrics("audit/list-published-by-channel-id", DbBackends.Sqlite) { using(sqlite.prepareStatement("SELECT * FROM transactions_published WHERE channel_id = ?")) { statement => statement.setString(1, channelId.toHex) @@ -401,6 +417,7 @@ class SqliteAuditDb(val sqlite: Connection) extends AuditDb with Logging { localMiningFee = rs.getLong("local_mining_fee_sat").sat, remoteMiningFee = rs.getLong("remote_mining_fee_sat").sat, feerate = FeeratePerKw(rs.getLong("feerate_sat_per_kw").sat), + liquidityPurchase_opt = readLiquidityPurchase(rs), timestamp = TimestampMilli(rs.getLong("timestamp")) ) }.toSeq @@ -419,6 +436,7 @@ class SqliteAuditDb(val sqlite: Connection) extends AuditDb with Logging { localMiningFee = rs.getLong("local_mining_fee_sat").sat, remoteMiningFee = rs.getLong("remote_mining_fee_sat").sat, feerate = FeeratePerKw(rs.getLong("feerate_sat_per_kw").sat), + liquidityPurchase_opt = readLiquidityPurchase(rs), timestamp = TimestampMilli(rs.getLong("timestamp")) ) }.toSeq @@ -435,6 +453,7 @@ class SqliteAuditDb(val sqlite: Connection) extends AuditDb with Logging { txId = TxId(rs.getByteVector32FromHex("tx_id")), onChainFeePaid = Satoshi(rs.getLong("local_mining_fee_sat")), txType = rs.getString("tx_type"), + liquidityPurchase_opt = readLiquidityPurchase(rs), timestamp = TimestampMilli(rs.getLong("timestamp"))) }.toSeq } @@ -452,6 +471,7 @@ class SqliteAuditDb(val sqlite: Connection) extends AuditDb with Logging { txId = TxId(rs.getByteVector32FromHex("tx_id")), onChainFeePaid = Satoshi(rs.getLong("local_mining_fee_sat")), txType = rs.getString("tx_type"), + liquidityPurchase_opt = readLiquidityPurchase(rs), timestamp = TimestampMilli(rs.getLong("timestamp"))) }.toSeq, paginated_opt) } @@ -468,6 +488,7 @@ class SqliteAuditDb(val sqlite: Connection) extends AuditDb with Logging { txId = TxId(rs.getByteVector32FromHex("tx_id")), onChainFeePaid = Satoshi(rs.getLong("local_mining_fee_sat")), txType = rs.getString("tx_type"), + liquidityPurchase_opt = readLiquidityPurchase(rs), timestamp = TimestampMilli(rs.getLong("timestamp"))) }.toSeq, paginated_opt) } diff --git a/eclair-core/src/test/scala/fr/acinq/eclair/db/AuditDbSpec.scala b/eclair-core/src/test/scala/fr/acinq/eclair/db/AuditDbSpec.scala index 85465e798b..5eaf7c4f79 100644 --- a/eclair-core/src/test/scala/fr/acinq/eclair/db/AuditDbSpec.scala +++ b/eclair-core/src/test/scala/fr/acinq/eclair/db/AuditDbSpec.scala @@ -31,6 +31,7 @@ import fr.acinq.eclair.db.sqlite.SqliteAuditDb import fr.acinq.eclair.payment.Bolt11Invoice.ExtraHop import fr.acinq.eclair.payment._ import fr.acinq.eclair.router.Announcements +import fr.acinq.eclair.wire.protocol.LiquidityAds import org.scalatest.funsuite.AnyFunSuite import scodec.bits.HexStringSyntax @@ -85,7 +86,8 @@ class AuditDbSpec extends AnyFunSuite { val channelId1 = randomBytes32() val channelId2 = randomBytes32() val remoteNodeId = randomKey().publicKey - val p1a = TransactionPublished(channelId1, remoteNodeId, Transaction(2, Nil, Seq(TxOut(50_000 sat, Script.pay2wpkh(remoteNodeId))), 0), 50 sat, 0 sat, "funding", None, now - 10.seconds) + val liquidityPurchase = LiquidityAds.PurchaseBasicInfo(isBuyer = true, 50_000 sat, LiquidityAds.Fees(10 sat, 5 sat)) + val p1a = TransactionPublished(channelId1, remoteNodeId, Transaction(2, Nil, Seq(TxOut(50_000 sat, Script.pay2wpkh(remoteNodeId))), 0), 50 sat, 0 sat, "funding", Some(liquidityPurchase), now - 10.seconds) val p1b = TransactionPublished(channelId1, remoteNodeId, Transaction(2, Nil, Seq(TxOut(100_000 sat, Script.pay2wpkh(remoteNodeId))), 0), 75 sat, 25 sat, "splice", None, now - 5.seconds) val p2 = TransactionPublished(channelId2, remoteNodeId, Transaction(2, Nil, Seq(TxOut(200_000 sat, Script.pay2wpkh(remoteNodeId))), 0), 0 sat, 0 sat, "local-close", None, now - 1.seconds) val c1 = TransactionConfirmed(channelId1, remoteNodeId, p1a.tx, now) @@ -105,9 +107,9 @@ class AuditDbSpec extends AnyFunSuite { assert(db.listPublished(remoteNodeId, from = now - 6.seconds, to = now) == Seq(PublishedTransaction(p1b), PublishedTransaction(p2))) assert(db.listConfirmed(randomBytes32()).isEmpty) assert(db.listConfirmed(randomKey().publicKey, from = 0 unixms, to = now + 1.seconds, None).isEmpty) - assert(db.listConfirmed(channelId1) == Seq(ConfirmedTransaction(remoteNodeId, channelId1, p1a.tx.txid, 50 sat, "funding", now))) + assert(db.listConfirmed(channelId1) == Seq(ConfirmedTransaction(remoteNodeId, channelId1, p1a.tx.txid, 50 sat, "funding", p1a.liquidityPurchase_opt, now))) assert(db.listConfirmed(channelId2).isEmpty) // this isn't a transaction we published ourselves, so we're not paying any fees for it - assert(db.listConfirmed(remoteNodeId, from = 0 unixms, to = now + 1.seconds, None) == Seq(ConfirmedTransaction(remoteNodeId, channelId1, p1a.tx.txid, 50 sat, "funding", now))) + assert(db.listConfirmed(remoteNodeId, from = 0 unixms, to = now + 1.seconds, None) == Seq(ConfirmedTransaction(remoteNodeId, channelId1, p1a.tx.txid, 50 sat, "funding", p1a.liquidityPurchase_opt, now))) } } @@ -206,11 +208,11 @@ class AuditDbSpec extends AnyFunSuite { db.add(TrampolinePaymentRelayed(randomBytes32(), Seq(PaymentEvent.IncomingPayment(randomBytes32(), n1, 110_000 msat, 1012 unixms)), Seq(PaymentEvent.OutgoingPayment(randomBytes32(), n2, 25_000 msat, 1013 unixms), PaymentEvent.OutgoingPayment(randomBytes32(), n2, 15_000 msat, 1014 unixms), PaymentEvent.OutgoingPayment(randomBytes32(), n3, 60_000 msat, 1015 unixms)), randomKey().publicKey, 37000 msat)) // The following confirmed txs will be taken into account: - // - n2 paid 100 sat of on-chain fees + // - n2 paid 100 sat of on-chain fees, 15 sat of liquidity fees, and earned 10 sat of liquidity fees // - n3 paid 5 sat of on-chain fees - db.add(TransactionPublished(randomBytes32(), n2, Transaction(2, Nil, Seq(TxOut(5000 sat, hex"12345")), 0), 30 sat, 110 sat, "funding", None)) + db.add(TransactionPublished(randomBytes32(), n2, Transaction(2, Nil, Seq(TxOut(5000 sat, hex"12345")), 0), 30 sat, 110 sat, "funding", Some(LiquidityAds.PurchaseBasicInfo(isBuyer = false, 50_000 sat, LiquidityAds.Fees(7 sat, 3 sat))))) db.add(TransactionConfirmed(randomBytes32(), n2, Transaction(2, Nil, Seq(TxOut(5000 sat, hex"12345")), 0))) - db.add(TransactionPublished(randomBytes32(), n2, Transaction(2, Nil, Seq(TxOut(4000 sat, hex"00112233")), 0), 70 sat, 80 sat, "mutual", None)) + db.add(TransactionPublished(randomBytes32(), n2, Transaction(2, Nil, Seq(TxOut(4000 sat, hex"00112233")), 0), 70 sat, 80 sat, "mutual", Some(LiquidityAds.PurchaseBasicInfo(isBuyer = true, 40_000 sat, LiquidityAds.Fees(10 sat, 5 sat))))) db.add(TransactionConfirmed(randomBytes32(), n2, Transaction(2, Nil, Seq(TxOut(4000 sat, hex"00112233")), 0))) db.add(TransactionPublished(randomBytes32(), n3, Transaction(2, Nil, Seq(TxOut(8000 sat, hex"deadbeef")), 0), 5 sat, 50 sat, "funding", None)) db.add(TransactionConfirmed(randomBytes32(), n3, Transaction(2, Nil, Seq(TxOut(8000 sat, hex"deadbeef")), 0))) @@ -222,15 +224,17 @@ class AuditDbSpec extends AnyFunSuite { // We list nodes with the highest fee earners first. val (from, to) = (0 unixms, TimestampMilli.now() + 1.milli) assert(db.relayStats(from, to) == Seq( - RelayStats(n2, incomingPaymentCount = 1, totalAmountIn = 20_000 msat, outgoingPaymentCount = 4, totalAmountOut = 110_000 msat, relayFeeEarned = 59_000 msat, onChainFeePaid = 100 sat, from, to), - RelayStats(n3, incomingPaymentCount = 2, totalAmountIn = 150_010 msat, outgoingPaymentCount = 2, totalAmountOut = 76_000 msat, relayFeeEarned = 6_500 msat, onChainFeePaid = 5 sat, from, to), - RelayStats(n4, incomingPaymentCount = 1, totalAmountIn = 40_000 msat, outgoingPaymentCount = 1, totalAmountOut = 15_000 msat, relayFeeEarned = 5_000 msat, onChainFeePaid = 0 sat, from, to), - RelayStats(n1, incomingPaymentCount = 3, totalAmountIn = 151_500 msat, outgoingPaymentCount = 2, totalAmountOut = 90_000 msat, relayFeeEarned = 10 msat, onChainFeePaid = 0 sat, from, to), + RelayStats(n2, incomingPaymentCount = 1, totalAmountIn = 20_000 msat, outgoingPaymentCount = 4, totalAmountOut = 110_000 msat, relayFeeEarned = 59_000 msat, onChainTransactionsCount = 2, onChainFeePaid = 100 sat, liquidityFeeEarned = 10 sat, liquidityFeePaid = 15 sat, from, to), + RelayStats(n3, incomingPaymentCount = 2, totalAmountIn = 150_010 msat, outgoingPaymentCount = 2, totalAmountOut = 76_000 msat, relayFeeEarned = 6_500 msat, onChainTransactionsCount = 1, onChainFeePaid = 5 sat, liquidityFeeEarned = 0 sat, liquidityFeePaid = 0 sat, from, to), + RelayStats(n4, incomingPaymentCount = 1, totalAmountIn = 40_000 msat, outgoingPaymentCount = 1, totalAmountOut = 15_000 msat, relayFeeEarned = 5_000 msat, onChainTransactionsCount = 0, onChainFeePaid = 0 sat, liquidityFeeEarned = 0 sat, liquidityFeePaid = 0 sat, from, to), + RelayStats(n1, incomingPaymentCount = 3, totalAmountIn = 151_500 msat, outgoingPaymentCount = 2, totalAmountOut = 90_000 msat, relayFeeEarned = 10 msat, onChainTransactionsCount = 0, onChainFeePaid = 0 sat, liquidityFeeEarned = 0 sat, liquidityFeePaid = 0 sat, from, to), )) assert(db.relayStats(from, to, Some(Paginated(count = 2, skip = 1))) == Seq( - RelayStats(n3, incomingPaymentCount = 2, totalAmountIn = 150_010 msat, outgoingPaymentCount = 2, totalAmountOut = 76_000 msat, relayFeeEarned = 6_500 msat, onChainFeePaid = 5 sat, from, to), - RelayStats(n4, incomingPaymentCount = 1, totalAmountIn = 40_000 msat, outgoingPaymentCount = 1, totalAmountOut = 15_000 msat, relayFeeEarned = 5_000 msat, onChainFeePaid = 0 sat, from, to), + RelayStats(n3, incomingPaymentCount = 2, totalAmountIn = 150_010 msat, outgoingPaymentCount = 2, totalAmountOut = 76_000 msat, relayFeeEarned = 6_500 msat, onChainTransactionsCount = 1, onChainFeePaid = 5 sat, liquidityFeeEarned = 0 sat, liquidityFeePaid = 0 sat, from, to), + RelayStats(n4, incomingPaymentCount = 1, totalAmountIn = 40_000 msat, outgoingPaymentCount = 1, totalAmountOut = 15_000 msat, relayFeeEarned = 5_000 msat, onChainTransactionsCount = 0, onChainFeePaid = 0 sat, liquidityFeeEarned = 0 sat, liquidityFeePaid = 0 sat, from, to), )) + assert(db.relayStats(n3, from, to) == RelayStats(n3, incomingPaymentCount = 2, totalAmountIn = 150_010 msat, outgoingPaymentCount = 2, totalAmountOut = 76_000 msat, relayFeeEarned = 6_500 msat, onChainTransactionsCount = 1, onChainFeePaid = 5 sat, liquidityFeeEarned = 0 sat, liquidityFeePaid = 0 sat, from, to)) + assert(db.relayStats(n1, from, to) == RelayStats(n1, incomingPaymentCount = 3, totalAmountIn = 151_500 msat, outgoingPaymentCount = 2, totalAmountOut = 90_000 msat, relayFeeEarned = 10 msat, onChainTransactionsCount = 0, onChainFeePaid = 0 sat, liquidityFeeEarned = 0 sat, liquidityFeePaid = 0 sat, from, to)) } }