Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
36 commits
Select commit Hold shift + click to select a range
f403b82
feat: forester: pda & mint compression
sergeytimoshin Jan 7, 2026
9b6ae17
refactor rent exemption calculations
sergeytimoshin Jan 21, 2026
342640c
format
sergeytimoshin Jan 21, 2026
66deaec
feat: add support for compressed mint retrieval in the indexer
sergeytimoshin Jan 21, 2026
12b236c
get_compressed_mints_by_authority authority type filtering
sergeytimoshin Jan 21, 2026
01911fb
fix: update PHOTON_COMMIT version in versions.sh
sergeytimoshin Jan 21, 2026
841a8c2
docs: update CLI parameter descriptions for compressible PDA program
sergeytimoshin Jan 21, 2026
1dea574
feat: add hex dependency and update existing hex usage in Cargo.toml …
sergeytimoshin Jan 21, 2026
e748e41
fix: update authority_type field in GetCompressedMintsByAuthorityOpti…
sergeytimoshin Jan 21, 2026
93d61a7
fix: update mint_authority and mint fields in build_expected_mint fun…
sergeytimoshin Jan 21, 2026
9121a06
feat: refactor bootstrap logic to use run_bootstrap helper; enhance m…
sergeytimoshin Jan 21, 2026
0adcca5
fix: update build_expected_mint function to accept version parameter …
sergeytimoshin Jan 21, 2026
265dbaa
fix: adjust calculate_compressible_slot to correctly compute availabl…
sergeytimoshin Jan 21, 2026
10bae7c
bump photon version
sergeytimoshin Jan 21, 2026
005ddfb
wip
sergeytimoshin Jan 23, 2026
4659bb4
cleanup
sergeytimoshin Jan 23, 2026
8e23a82
cleanup
sergeytimoshin Jan 23, 2026
a60962e
cleanup
sergeytimoshin Jan 23, 2026
91f5d91
cleanup
sergeytimoshin Jan 23, 2026
32c6cba
new apis
sergeytimoshin Jan 27, 2026
fb11ddd
feat: add support for unified account interface with hot/cold context…
sergeytimoshin Jan 28, 2026
5f12024
cleanup
sergeytimoshin Jan 28, 2026
d2fbfa9
format
sergeytimoshin Jan 28, 2026
11215b3
refactor account interface
sergeytimoshin Jan 29, 2026
00f85ab
bump photon version
sergeytimoshin Jan 29, 2026
27a78d7
feat: implement batch lookup for multiple compressed accounts in RPC
sergeytimoshin Jan 29, 2026
f43212b
enhance account interface
sergeytimoshin Jan 29, 2026
f46c619
fix: update account types in get_accounts_to_update test
sergeytimoshin Jan 29, 2026
e52e51e
cleanup
sergeytimoshin Jan 29, 2026
5dcf777
bump photon
sergeytimoshin Jan 29, 2026
93ea0bb
cleanup
sergeytimoshin Jan 29, 2026
1e4d918
fix: update error assertion in test_create_ata_failing for invalid mi…
sergeytimoshin Jan 29, 2026
8216d7e
fix: update error assertion in test_create_ata_failing for invalid mi…
sergeytimoshin Jan 30, 2026
44202c2
Potential fix for code scanning alert no. 143: Workflow does not cont…
sergeytimoshin Jan 30, 2026
5ca6ab9
fix: update default version fallback to V2 in LIGHT_PROTOCOL_VERSION …
sergeytimoshin Jan 30, 2026
28b7db0
cleanup
sergeytimoshin Jan 31, 2026
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
72 changes: 18 additions & 54 deletions .github/workflows/programs.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ on:
- "program-tests/**"
- "program-libs/**"
- "prover/client/**"
- ".github/workflows/light-system-programs-tests.yml"
- ".github/workflows/programs.yml"
pull_request:
branches:
- "*"
Expand All @@ -16,22 +16,24 @@ on:
- "program-tests/**"
- "program-libs/**"
- "prover/client/**"
- ".github/workflows/light-system-programs-tests.yml"
- ".github/workflows/programs.yml"
types:
- opened
- synchronize
- reopened
- ready_for_review

name: programs
permissions:
contents: read

concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true

jobs:
system-programs:
name: programs
name: ${{ matrix.test-group }}
if: github.event.pull_request.draft == false
runs-on: warp-ubuntu-latest-x64-4x
timeout-minutes: 90
Expand All @@ -52,27 +54,16 @@ jobs:

strategy:
matrix:
include:
- program: account-compression-and-registry
sub-tests: '["cargo-test-sbf -p account-compression-test", "cargo-test-sbf -p registry-test"]'
- program: light-system-program-address
sub-tests: '["cargo-test-sbf -p system-test -- test_with_address", "cargo-test-sbf -p e2e-test", "cargo-test-sbf -p compressed-token-test --test light_token"]'
- program: light-system-program-compression
sub-tests: '["cargo-test-sbf -p system-test -- test_with_compression", "cargo-test-sbf -p system-test --test test_re_init_cpi_account"]'
- program: compressed-token-and-e2e
sub-tests: '["cargo test -p light-compressed-token", "cargo-test-sbf -p compressed-token-test --test v1", "cargo-test-sbf -p compressed-token-test --test mint"]'
- program: compressed-token-batched-tree
sub-tests: '["cargo-test-sbf -p compressed-token-test -- test_transfer_with_photon_and_batched_tree"]'
- program: system-cpi-test
sub-tests:
'["cargo-test-sbf -p system-cpi-test", "cargo test -p light-system-program-pinocchio",
"cargo-test-sbf -p system-cpi-v2-test -- --skip functional_ --skip event::parse", "cargo-test-sbf -p system-cpi-v2-test -- event::parse",
"cargo-test-sbf -p compressed-token-test --test transfer2"
]'
- program: system-cpi-test-v2-functional-read-only
sub-tests: '["cargo-test-sbf -p system-cpi-v2-test -- functional_read_only"]'
- program: system-cpi-test-v2-functional-account-infos
sub-tests: '["cargo-test-sbf -p system-cpi-v2-test -- functional_account_infos"]'
test-group:
- account-compression-and-registry
- system-address
- system-compression
- compressed-token-and-e2e
- compressed-token-batched-tree
- system-cpi
- system-cpi-v2-functional-read-only
- system-cpi-v2-functional-account-infos

steps:
- name: Checkout sources
uses: actions/checkout@v6
Expand All @@ -87,34 +78,7 @@ jobs:
run: |
just cli build

- name: ${{ matrix.program }}
- name: Run tests
working-directory: program-tests
run: |

IFS=',' read -r -a sub_tests <<< "${{ join(fromJSON(matrix['sub-tests']), ', ') }}"
for subtest in "${sub_tests[@]}"
do
echo "$subtest"

# Retry logic for flaky batched-tree test
if [[ "$subtest" == *"test_transfer_with_photon_and_batched_tree"* ]]; then
echo "Running flaky test with retry logic (max 3 attempts)..."
attempt=1
max_attempts=3
until RUSTFLAGS="-D warnings" eval "$subtest"; do
attempt=$((attempt + 1))
if [ $attempt -gt $max_attempts ]; then
echo "Test failed after $max_attempts attempts"
exit 1
fi
echo "Attempt $attempt/$max_attempts failed, retrying..."
sleep 5
done
echo "Test passed on attempt $attempt"
else
RUSTFLAGS="-D warnings" eval "$subtest"
if [ "$subtest" == "cargo-test-sbf -p e2e-test" ]; then
just programs build-compressed-token-small
RUSTFLAGS="-D warnings" eval "$subtest -- --test test_10_all"
fi
fi
done
just ci-${{ matrix.test-group }}
4 changes: 4 additions & 0 deletions .mise.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
# Disable mise's Go management for this project.
# We use our own Go installation via devenv.sh.
[settings]
disable_tools = ["go"]
1 change: 1 addition & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -140,6 +140,7 @@ pinocchio-pubkey = { version = "0.3.0" }
pinocchio-system = { version = "0.3.0" }
bs58 = "^0.5.1"
sha2 = "0.10"
hex = "0.4"
litesvm = "0.7"
# Anchor
anchor-lang = { version = "0.31.1" }
Expand Down
2 changes: 1 addition & 1 deletion forester/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ futures = { workspace = true }
thiserror = { workspace = true }
borsh = { workspace = true }
bs58 = { workspace = true }
hex = "0.4"
hex = { workspace = true }
env_logger = { workspace = true }
async-trait = { workspace = true }
tracing = { workspace = true }
Expand Down
4 changes: 4 additions & 0 deletions forester/justfile
Original file line number Diff line number Diff line change
Expand Up @@ -35,3 +35,7 @@ test-compressible-mint: build-compressible-test-deps
test-compressible-ctoken: build-compressible-test-deps
RUST_LOG=forester=debug,light_client=debug \
cargo test --package forester --test test_compressible_ctoken -- --nocapture

# Test for indexer interface scenarios (creates test data for photon)
test-indexer-interface: build-test-deps
cargo test --package forester --test test_indexer_interface -- --nocapture
Comment on lines +38 to +41
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

🧹 Nitpick | 🔵 Trivial

Test recipe looks functional, consider adding RUST_LOG for consistency.

The recipe structure is correct and the dependency on build-test-deps makes sense if the indexer interface tests use the create-address-test-program.

Unlike the other compressible test recipes (lines 28-37) which set RUST_LOG=forester=debug,light_client=debug, this one doesn't. If debugging indexer interface issues becomes necessary, operators will need to manually add it.

♻️ Optional: Add RUST_LOG for debugging consistency
 # Test for indexer interface scenarios (creates test data for photon)
 test-indexer-interface: build-test-deps
+    RUST_LOG=forester=debug,light_client=debug \
     cargo test --package forester --test test_indexer_interface -- --nocapture
📝 Committable suggestion

‼️ IMPORTANT
Carefully review the code before committing. Ensure that it accurately replaces the highlighted code, contains no missing lines, and has no issues with indentation. Thoroughly test & benchmark the code to ensure it meets the requirements.

Suggested change
# Test for indexer interface scenarios (creates test data for photon)
test-indexer-interface: build-test-deps
cargo test --package forester --test test_indexer_interface -- --nocapture
# Test for indexer interface scenarios (creates test data for photon)
test-indexer-interface: build-test-deps
RUST_LOG=forester=debug,light_client=debug \
cargo test --package forester --test test_indexer_interface -- --nocapture
🤖 Prompt for AI Agents
In `@forester/justfile` around lines 38 - 41, The test-indexer-interface recipe is
missing the RUST_LOG environment setting used elsewhere; update the
test-indexer-interface recipe so it exports or prefixes the cargo command with
RUST_LOG=forester=debug,light_client=debug (same format as the other
compressible test recipes) before invoking cargo test, keeping the dependency on
build-test-deps and the existing cargo test invocation (refer to the
test-indexer-interface recipe and build-test-deps).

136 changes: 133 additions & 3 deletions forester/src/compressible/bootstrap_helpers.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,12 +5,18 @@
//! - Account field extraction from JSON responses
//! - Standard and V2 API patterns

use std::time::Duration;
use std::{
sync::{
atomic::{AtomicBool, Ordering},
Arc,
},
time::Duration,
};

use serde_json::json;
use solana_sdk::pubkey::Pubkey;
use tokio::time::timeout;
use tracing::debug;
use tokio::{sync::oneshot, time::timeout};
use tracing::{debug, info};

use super::config::{DEFAULT_PAGE_SIZE, DEFAULT_PAGINATION_DELAY_MS};
use crate::Result;
Expand Down Expand Up @@ -344,3 +350,127 @@ where

Ok((page_count, total_fetched, total_inserted))
}

/// Result of a bootstrap operation
#[derive(Debug, Clone)]
pub struct BootstrapResult {
/// Number of pages fetched (1 for standard API)
pub pages: usize,
/// Total number of accounts fetched from RPC
pub fetched: usize,
/// Number of accounts successfully inserted/processed
pub inserted: usize,
}

/// High-level bootstrap runner that handles common scaffolding.
///
/// This helper encapsulates:
/// - Shutdown flag setup and listener spawning
/// - HTTP client creation
/// - Automatic selection between standard and V2 APIs based on localhost detection
/// - Consistent logging with the provided label
///
/// # Arguments
/// * `rpc_url` - The RPC endpoint URL
/// * `program_id` - The program ID to fetch accounts from
/// * `filters` - Optional memcmp/dataSize filters for the query
/// * `shutdown_rx` - Optional shutdown receiver for graceful cancellation
/// * `process_fn` - Closure called for each fetched account; returns true if successfully processed
/// * `label` - Label for log messages (e.g., "Mint", "CToken", "PDA")
///
/// # Returns
/// A `BootstrapResult` containing page count, fetched count, and inserted count.
pub async fn run_bootstrap<F>(
rpc_url: &str,
program_id: &Pubkey,
filters: Option<Vec<serde_json::Value>>,
shutdown_rx: Option<oneshot::Receiver<()>>,
process_fn: F,
label: &str,
) -> Result<BootstrapResult>
where
F: FnMut(RawAccountData) -> bool,
{
info!("Starting bootstrap of {} accounts", label);

// Set up shutdown flag and listener task
let shutdown_flag = Arc::new(AtomicBool::new(false));

// Spawn shutdown listener and keep handle for cleanup
let shutdown_listener_handle = shutdown_rx.map(|rx| {
let shutdown_flag_clone = shutdown_flag.clone();
tokio::spawn(async move {
let _ = rx.await;
shutdown_flag_clone.store(true, Ordering::SeqCst);
})
});

let client = reqwest::Client::new();

info!(
"Bootstrapping {} accounts from program {}",
label, program_id
);

let result = if is_localhost(rpc_url) {
debug!("Detected localhost, using standard getProgramAccounts");
let api_result = bootstrap_standard_api(
&client,
rpc_url,
program_id,
filters,
Some(&shutdown_flag),
process_fn,
)
.await;

// Abort shutdown listener before returning (success or error)
if let Some(handle) = shutdown_listener_handle {
handle.abort();
}

let (fetched, inserted) = api_result?;

info!(
"{} bootstrap complete: {} fetched, {} inserted",
label, fetched, inserted
);

BootstrapResult {
pages: 1,
fetched,
inserted,
}
} else {
debug!("Using getProgramAccountsV2 with pagination");
let api_result = bootstrap_v2_api(
&client,
rpc_url,
program_id,
filters,
Some(&shutdown_flag),
process_fn,
)
.await;

// Abort shutdown listener before returning (success or error)
if let Some(handle) = shutdown_listener_handle {
handle.abort();
}

let (pages, fetched, inserted) = api_result?;

info!(
"{} bootstrap complete: {} pages, {} fetched, {} inserted",
label, pages, fetched, inserted
);

BootstrapResult {
pages,
fetched,
inserted,
}
};

Ok(result)
}
4 changes: 2 additions & 2 deletions forester/src/compressible/config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ pub const DEFAULT_PAGINATION_DELAY_MS: u64 = 100;

/// Configuration for a compressible PDA program.
///
/// Can be specified via CLI (using `program_id:discriminator_base58` format)
/// Can be specified via CLI `--compressible-pda-program` (using `program_id:discriminator_base58` format)
/// or via config file using the serialized struct format.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct PdaProgramConfig {
Expand Down Expand Up @@ -146,7 +146,7 @@ pub struct CompressibleConfig {
#[serde(default = "default_max_concurrent_batches")]
pub max_concurrent_batches: usize,
/// Compressible PDA programs to track and compress.
/// Can be specified in config file or via CLI `--pda-program` flags.
/// Can be specified in config file or via CLI `--compressible-pda-program` flags.
/// CLI values are merged with config file values.
#[serde(default)]
pub pda_programs: Vec<PdaProgramConfig>,
Expand Down
7 changes: 7 additions & 0 deletions forester/src/compressible/ctoken/state.rs
Original file line number Diff line number Diff line change
Expand Up @@ -76,13 +76,20 @@ impl CTokenAccountTracker {

/// Update tracker with an already-deserialized Token.
/// Use this to avoid double deserialization when the Token is already available.
/// Skips mint accounts (only tracks actual token accounts).
pub fn update_from_token(
&self,
pubkey: Pubkey,
ctoken: Token,
lamports: u64,
account_size: usize,
) -> Result<()> {
// Skip mint accounts - only track actual token accounts
if !ctoken.is_token_account() {
debug!("Skipping non-token account {}", pubkey);
return Ok(());
}

let compressible_slot = match calculate_compressible_slot(&ctoken, lamports, account_size) {
Ok(slot) => slot,
Err(e) => {
Expand Down
Loading
Loading