Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
27 commits
Select commit Hold shift + click to select a range
8142b73
fix(drive): reuse existing platform node id during operator update (#…
pauldelucia Oct 29, 2025
a71b221
chore(release): update changelog and bump version to 2.1.3 (#2835)
QuantumExplorer Oct 29, 2025
077ec82
chore(dashmate)!: backport port conflicts with mainnet and testnet o…
lklimek Oct 29, 2025
4b268dc
test: try to reproduce non deterministic resync causing apphash misma…
lklimek Nov 17, 2025
9f83eec
chore: prepare_proposal_apphash updated
lklimek Nov 17, 2025
7f552f2
chore: prepare proposal apphash final
lklimek Nov 18, 2025
3a6a287
drive: bump rocksdb to 0.24
lklimek Nov 18, 2025
3db8943
chore: replay_abci_requests final binary
lklimek Nov 18, 2025
a3af485
Merge tag 'v2.1.3' into backport/2.1.3
lklimek Nov 18, 2025
5813a0b
Merge branch 'backport/2.1.3' into test/resync-apphash-mismatch
lklimek Nov 18, 2025
e11fb8d
chore: improve help
lklimek Nov 18, 2025
0f056cd
chore: replayer, most recent version
lklimek Nov 26, 2025
45dc854
feat(drive-abci): improved state verification in drive-abci verify
lklimek Nov 26, 2025
8b311e4
feat(replay): verify initial database on start
lklimek Nov 26, 2025
323193f
refactor: move replay to `drive-abci replay`
lklimek Nov 27, 2025
abaa8d5
chore: adjust .env.testnet to match testnet
lklimek Nov 27, 2025
282febe
Merge remote-tracking branch 'origin/v2.2-dev' into test/resync-appha…
lklimek Dec 5, 2025
9352a62
refactor: move finalize block apphash verification to separate pr
lklimek Dec 5, 2025
4750697
only 1 log file supported
lklimek Dec 5, 2025
d7960ec
chore: clippy
lklimek Dec 5, 2025
b831183
Merge branch 'v3.0-dev' into test/resync-apphash-mismatch
lklimek Dec 18, 2025
8337e12
Update packages/rs-drive-abci/src/replay/cli.rs
lklimek Dec 18, 2025
b3e1702
chore: fix build after merge
lklimek Dec 18, 2025
abe6204
ci: enable `replay` feature on docker drive-abci debug image
lklimek Dec 18, 2025
ed49cd1
chore: cargo machete
lklimek Dec 18, 2025
d9b41b7
disable replay by default
lklimek Dec 18, 2025
3b144c4
Merge branch 'v3.0-dev' into test/resync-apphash-mismatch
lklimek Dec 19, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions .github/actions/docker/action.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,9 @@ inputs:
cache_to_name:
description: "Save cache to name manifest (should be used only on default branch)"
default: "false"
additional_features:
description: Extra Cargo features to enable (comma-separated)
default: ""
outputs:
digest:
value: ${{ steps.docker_build.outputs.digest }}
Expand Down Expand Up @@ -192,6 +195,7 @@ runs:
AWS=${{ env.HOME }}/.aws/credentials
build-args: |
CARGO_BUILD_PROFILE=${{ inputs.cargo_profile }}
ADDITIONAL_FEATURES=${{ inputs.additional_features }}
${{ steps.sccache.outputs.env_vars }}
cache-from: ${{ steps.layer_cache_settings.outputs.cache_from }}
cache-to: ${{ steps.layer_cache_settings.outputs.cache_to }}
Expand Down
5 changes: 5 additions & 0 deletions .github/workflows/release-docker-image.yml
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,10 @@ on:
type: string
description: Cargo profile. i.e. release, dev
default: release
additional_features:
type: string
description: Extra Cargo features to enable for Drive builds (comma-separated)
default: ""
env:
DIGEST_NAME: digests-${{ inputs.image_org }}-${{ inputs.image_name }}-${{ inputs.tag }}-${{ inputs.cargo_profile }}-${{ github.sha }}
DIGEST_DIR_PATH: /tmp/digests
Expand Down Expand Up @@ -66,6 +70,7 @@ jobs:
cache_secret_access_key: ${{ secrets.CACHE_SECRET_KEY }}
# On release, we generate a new "base" image, so we need to save cache to name manifest, like '.../drive'
cache_to_name: ${{ github.event_name == 'release' && 'true' || 'false' }}
additional_features: ${{ inputs.additional_features }}

- name: Export digest
run: |
Expand Down
1 change: 1 addition & 0 deletions .github/workflows/release.yml
Original file line number Diff line number Diff line change
Expand Up @@ -178,6 +178,7 @@ jobs:
target: drive-abci
cargo_profile: dev
tag: ${{ inputs.tag || github.event.release.tag_name }}-debug
additional_features: console,grovedbg,replay

release-rs-dapi-image:
name: Release RS-DAPI image
Expand Down
6 changes: 3 additions & 3 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

14 changes: 11 additions & 3 deletions Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -417,6 +417,7 @@ FROM deps AS build-drive-abci
# This is only for testing purpose and should be used only for
# local development environment
ARG SDK_TEST_DATA
ARG ADDITIONAL_FEATURES=""

SHELL ["/bin/bash", "-o", "pipefail","-e", "-x", "-c"]

Expand All @@ -431,10 +432,13 @@ RUN --mount=type=cache,sharing=shared,id=cargo_registry_index,target=${CARGO_HOM
--mount=type=secret,id=AWS \
set -ex; \
source /root/env && \
export FEATURES_FLAG=""; \
ADDITIONAL_FEATURES_TRIMMED="$(echo "${ADDITIONAL_FEATURES}" | tr -d '[:space:]')"; \
if [[ "${CARGO_BUILD_PROFILE}" == "release" ]] ; then \
mv .cargo/config-release.toml .cargo/config.toml; \
else \
export FEATURES_FLAG="--features=console,grovedbg"; \
fi && \
if [[ -n "${ADDITIONAL_FEATURES_TRIMMED}" ]]; then \
export FEATURES_FLAG="--features=${ADDITIONAL_FEATURES_TRIMMED}"; \
fi && \
if [ "${SDK_TEST_DATA}" == "true" ]; then \
mv .cargo/config-test-sdk-data.toml .cargo/config.toml; \
Expand Down Expand Up @@ -504,13 +508,17 @@ RUN --mount=type=cache,sharing=shared,id=cargo_registry_index,target=${CARGO_HOM
--mount=type=secret,id=AWS \
set -ex; \
source /root/env && \
export FEATURES_FLAG=""; \
ADDITIONAL_FEATURES_TRIMMED="$(echo "${ADDITIONAL_FEATURES}" | tr -d '[:space:]')"; \
if [[ "${CARGO_BUILD_PROFILE}" == "release" ]] ; then \
mv .cargo/config-release.toml .cargo/config.toml; \
export OUT_DIRECTORY=release; \
else \
export FEATURES_FLAG="--features=console,grovedbg"; \
export OUT_DIRECTORY=debug; \
fi && \
if [[ -n "${ADDITIONAL_FEATURES_TRIMMED}" ]]; then \
export FEATURES_FLAG="--features=${ADDITIONAL_FEATURES_TRIMMED}"; \
fi && \
if [ "${SDK_TEST_DATA}" == "true" ]; then \
mv .cargo/config-test-sdk-data.toml .cargo/config.toml; \
fi && \
Expand Down
16 changes: 8 additions & 8 deletions packages/rs-drive-abci/.env.testnet
Original file line number Diff line number Diff line change
Expand Up @@ -33,21 +33,21 @@ CORE_CHECK_TX_JSON_RPC_PASSWORD=password
INITIAL_CORE_CHAINLOCKED_HEIGHT=1243

# https://github.com/dashevo/dashcore-lib/blob/286c33a9d29d33f05d874c47a9b33764a0be0cf1/lib/constants/index.js#L42-L57
VALIDATOR_SET_QUORUM_TYPE=llmq_25_67
VALIDATOR_SET_QUORUM_SIZE=25
VALIDATOR_SET_QUORUM_TYPE=6
VALIDATOR_SET_QUORUM_SIZE=100
VALIDATOR_SET_QUORUM_WINDOW=24
VALIDATOR_SET_QUORUM_ACTIVE_SIGNERS=24
VALIDATOR_SET_QUORUM_ROTATION=false
VALIDATOR_SET_ROTATION_BLOCK_COUNT=64

CHAIN_LOCK_QUORUM_TYPE=llmq_50_60
CHAIN_LOCK_QUORUM_SIZE=50
CHAIN_LOCK_QUORUM_TYPE=1
CHAIN_LOCK_QUORUM_SIZE=400
CHAIN_LOCK_QUORUM_WINDOW=24
CHAIN_LOCK_QUORUM_ACTIVE_SIGNERS=24
CHAIN_LOCK_QUORUM_ROTATION=false

INSTANT_LOCK_QUORUM_TYPE=llmq_60_75
INSTANT_LOCK_QUORUM_SIZE=50
INSTANT_LOCK_QUORUM_TYPE=5
INSTANT_LOCK_QUORUM_SIZE=60
INSTANT_LOCK_QUORUM_WINDOW=288
INSTANT_LOCK_QUORUM_ACTIVE_SIGNERS=32
INSTANT_LOCK_QUORUM_ROTATION=true
Expand Down Expand Up @@ -77,9 +77,9 @@ MASTERNODE_REWARD_SHARES_SECOND_PUBLIC_KEY=02bf55f97f189895da29824781053140ee66b
WITHDRAWALS_MASTER_PUBLIC_KEY=027057cdf58628635ef7b75e6b6c90dd996a16929cd68130e16b9328d429e5e03a
WITHDRAWALS_SECOND_PUBLIC_KEY=022084d827fea4823a69aa7c8d3e02fe780eaa0ef1e5e9841af395ba7e40465ab6

EPOCH_TIME_LENGTH_S=788400
EPOCH_TIME_LENGTH_S=3600

CHAIN_ID=devnet
CHAIN_ID=dash-testnet-51
BLOCK_SPACING_MS=5000

TOKIO_CONSOLE_ENABLED=false
Expand Down
12 changes: 8 additions & 4 deletions packages/rs-drive-abci/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,11 @@ tracing-subscriber = { version = "0.3.16", default-features = false, features =
tenderdash-abci = { git = "https://github.com/dashpay/rs-tenderdash-abci", tag = "v1.5.0-dev.2", features = [
"grpc",
] }
time = { version = "0.3", optional = true, features = [
"macros",
"formatting",
"serde-human-readable",
] }

lazy_static = "1.4.0"
itertools = { version = "0.13" }
Expand All @@ -79,8 +84,6 @@ console-subscriber = { version = "0.4", optional = true }
bls-signatures = { git = "https://github.com/dashpay/bls-signatures", rev = "0842b17583888e8f46c252a4ee84cdfd58e0546f", optional = true }

[dev-dependencies]
bs58 = { version = "0.5.0" }
base64 = "0.22.1"
platform-version = { path = "../rs-platform-version", features = [
"mock-versions",
] }
Expand All @@ -104,7 +107,7 @@ bls-signatures = { git = "https://github.com/dashpay/bls-signatures", rev = "084
mockall = { version = "0.13" }

# For tests of grovedb verify
rocksdb = { version = "0.23.0" }
rocksdb = { version = "0.24.0" }
integer-encoding = { version = "4.0.0" }

[features]
Expand All @@ -113,7 +116,8 @@ mocks = ["mockall", "drive/fixtures-and-mocks", "bls-signatures"]
console = ["console-subscriber", "tokio/tracing"]
testing-config = []
grovedbg = ["drive/grovedbg"]

# `abci-server replay` command
replay = ["dep:time", "tenderdash-abci/serde"]
[[bin]]
name = "drive-abci"
path = "src/main.rs"
Expand Down
16 changes: 12 additions & 4 deletions packages/rs-drive-abci/src/abci/handler/process_proposal.rs
Original file line number Diff line number Diff line change
Expand Up @@ -33,12 +33,11 @@ where
if let Some(block_execution_context) = block_execution_context_guard.as_mut() {
// We are already in a block, or in init chain.
// This only makes sense if we were the proposer unless we are at a future round
if block_execution_context.block_state_info().round() != (request.round as u32) {
let block_state_info = block_execution_context.block_state_info();
if block_state_info.round() != (request.round as u32) {
// We were not the proposer, and we should process something new
drop_block_execution_context = true;
} else if let Some(current_block_hash) =
block_execution_context.block_state_info().block_hash()
{
} else if let Some(current_block_hash) = block_state_info.block_hash() {
// There is also the possibility that this block already came in, but tenderdash crashed
// Now tenderdash is sending it again
if let Some(proposal_info) = block_execution_context.proposer_results() {
Expand Down Expand Up @@ -69,6 +68,15 @@ where
} else {
// We are getting a different block hash for a block of the same round
// This is a terrible issue
tracing::error!(
method = "process_proposal",
block_state_info = ?block_state_info,
"received a process proposal request twice with different hash for height {}/round {}: existing hash {:?}, new hash {:?}",
request.height,
request.round,
current_block_hash,
request.hash,
);
Err(Error::Abci(AbciError::BadRequest(
"received a process proposal request twice with different hash".to_string(),
)))?;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -399,6 +399,7 @@ where
tracing::trace!(
method = "run_block_proposal_v0",
app_hash = hex::encode(root_hash),
block_hash = hex::encode(block_proposal.block_hash.unwrap_or_default()),
platform_state_fingerprint = hex::encode(
block_execution_context
.block_platform_state()
Expand Down
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
//! Fetches execution state from grovedb storage

use crate::error::execution::ExecutionError;
use crate::error::Error;
use crate::platform_types::platform::Platform;
Expand Down
2 changes: 1 addition & 1 deletion packages/rs-drive-abci/src/execution/storage/mod.rs
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
mod fetch_platform_state;
pub mod fetch_platform_state;
mod store_platform_state;
5 changes: 5 additions & 0 deletions packages/rs-drive-abci/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -50,5 +50,10 @@ pub mod query;
/// Various utils
pub mod utils;

/// Replay captured ABCI requests against drive-abci
#[cfg(feature = "replay")]
pub mod replay;
/// Drive server
pub mod server;
/// Verification helpers
pub mod verify;
73 changes: 14 additions & 59 deletions packages/rs-drive-abci/src/main.rs
Original file line number Diff line number Diff line change
@@ -1,13 +1,15 @@
//! Main server process for RS-Drive-ABCI
//!
//! RS-Drive-ABCI server starts a single-threaded server and listens to connections from Tenderdash.
#[cfg(feature = "replay")]
use drive_abci::replay::{self, ReplayArgs};
use drive_abci::verify::verify_grovedb;

use clap::{Parser, Subcommand};
use dapi_grpc::platform::v0::get_status_request;
use dapi_grpc::platform::v0::get_status_request::GetStatusRequestV0;
use dapi_grpc::platform::v0::platform_client::PlatformClient;
use dapi_grpc::tonic::transport::Uri;
use dpp::version::PlatformVersion;
use drive_abci::config::{FromEnv, PlatformConfig};
use drive_abci::core::wait_for_core_to_sync::v0::wait_for_core_to_sync_v0;
use drive_abci::logging::{LogBuilder, LogConfig, LogDestination, Loggers};
Expand All @@ -16,7 +18,6 @@ use drive_abci::platform_types::platform::Platform;
use drive_abci::rpc::core::DefaultCoreRPC;
use drive_abci::{logging, server};
use itertools::Itertools;
use std::fs::remove_file;
#[cfg(all(tokio_unstable, feature = "console"))]
use std::net::SocketAddr;
use std::path::PathBuf;
Expand Down Expand Up @@ -63,6 +64,11 @@ enum Commands {
/// Print current software version
#[command()]
Version,

/// Replay ABCI requests captured from drive-abci logs.
#[cfg(feature = "replay")]
#[command()]
Replay(ReplayArgs),
}

/// Server that accepts connections from Tenderdash, and
Expand Down Expand Up @@ -151,8 +157,13 @@ impl Cli {
}
Commands::Config => dump_config(&config)?,
Commands::Status => runtime.block_on(check_status(&config))?,
Commands::Verify => verify_grovedb(&config.db_path, true)?,
Commands::Verify => drive_abci::verify::run(&config, true)?,
Commands::Version => print_version(),
#[cfg(feature = "replay")]
Commands::Replay(args) => {
replay::run(config, args, cancel.clone()).map_err(|e| e.to_string())?;
return Ok(());
}
};

Ok(())
Expand Down Expand Up @@ -331,62 +342,6 @@ async fn check_status(config: &PlatformConfig) -> Result<(), String> {
.map_err(|e| format!("can't request status: {e}"))
}

/// Verify GroveDB integrity.
///
/// This function will execute GroveDB integrity checks if one of the following conditions is met:
/// - `force` is `true`
/// - file `.fsck` in `config.db_path` exists
///
/// After successful verification, .fsck file is removed.
fn verify_grovedb(db_path: &PathBuf, force: bool) -> Result<(), String> {
let fsck = PathBuf::from(db_path).join(".fsck");

if !force {
if !fsck.exists() {
return Ok(());
}
tracing::info!(
"found {} file, starting grovedb verification",
fsck.display()
);
}

let grovedb = drive::grovedb::GroveDb::open(db_path).expect("open grovedb");
//todo: get platform version instead of taking latest
let result = grovedb
.visualize_verify_grovedb(
None,
true,
true,
&PlatformVersion::latest().drive.grove_version,
)
.map_err(|e| e.to_string());

match result {
Ok(data) => {
for result in data {
tracing::warn!(?result, "grovedb verification")
}
tracing::info!("grovedb verification finished");

if fsck.exists() {
if let Err(e) = remove_file(&fsck) {
tracing::warn!(
error = ?e,
path =fsck.display().to_string(),
"grovedb verification: cannot remove .fsck file: please remove it manually to avoid running verification again",
);
}
}
Ok(())
}
Err(e) => {
tracing::error!("grovedb verification failed: {}", e);
Err(e)
}
}
}

/// Print current software version.
fn print_version() {
println!("{}", env!("CARGO_PKG_VERSION"));
Expand Down
Loading
Loading