From 355c427e13c9d591d61c45360adf16bb7f6cf6a7 Mon Sep 17 00:00:00 2001 From: Rafael Cardenas Date: Tue, 6 Jan 2026 16:21:11 -0600 Subject: [PATCH 01/19] chore: first draft --- package-lock.json | 105 ++++++++++++++++++++++++++++++++++++ package.json | 1 + src/env.ts | 7 ++- src/snp/schemas.ts | 101 ++++++++++++++++++++++++++++++++++ src/snp/snp-event-stream.ts | 76 ++++++++++++++++++++++++++ 5 files changed, 289 insertions(+), 1 deletion(-) create mode 100644 src/snp/schemas.ts create mode 100644 src/snp/snp-event-stream.ts diff --git a/package-lock.json b/package-lock.json index 560c9e2..b93a995 100644 --- a/package-lock.json +++ b/package-lock.json @@ -15,6 +15,7 @@ "@google-cloud/storage": "^7.12.1", "@hirosystems/api-toolkit": "^1.12.0", "@hirosystems/chainhook-client": "^2.4.0", + "@hirosystems/salt-n-pepper-client": "^1.2.0", "@hirosystems/stacks-encoding-native-js": "^1.2.0", "@sinclair/typebox": "^0.28.17", "@stacks/blockchain-api-client": "^8.13.6", @@ -2697,6 +2698,16 @@ "undici": "^5.21.2" } }, + "node_modules/@hirosystems/salt-n-pepper-client": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/@hirosystems/salt-n-pepper-client/-/salt-n-pepper-client-1.2.0.tgz", + "integrity": "sha512-sVOsCb2+N29/bF0fNBQArOcv5UHboL726aLGOzpizVaPztdVO74+ZZAPlAPsDaG/hrHV0M2CCrYKMDRfYYChCg==", + "license": "GPL-3.0-only", + "dependencies": { + "@hirosystems/api-toolkit": "^1.7.2", + "redis": "^4.7.0" + } + }, "node_modules/@hirosystems/stacks-encoding-native-js": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/@hirosystems/stacks-encoding-native-js/-/stacks-encoding-native-js-1.2.0.tgz", @@ -4128,6 +4139,65 @@ "integrity": "sha512-U69T3ItWHvLwGg5eJ0n3I62nWuE6ilHlmz7zM0npLBRvPRd7e6NYmg54vvRtP5mZG7kZqZCFVdsTWo7BPtBujg==", "dev": true }, + "node_modules/@redis/bloom": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/@redis/bloom/-/bloom-1.2.0.tgz", + "integrity": "sha512-HG2DFjYKbpNmVXsa0keLHp/3leGJz1mjh09f2RLGGLQZzSHpkmZWuwJbAvo3QcRY8p80m5+ZdXZdYOSBLlp7Cg==", + "license": "MIT", + "peerDependencies": { + "@redis/client": "^1.0.0" + } + }, + "node_modules/@redis/client": { + "version": "1.6.1", + "resolved": "https://registry.npmjs.org/@redis/client/-/client-1.6.1.tgz", + "integrity": "sha512-/KCsg3xSlR+nCK8/8ZYSknYxvXHwubJrU82F3Lm1Fp6789VQ0/3RJKfsmRXjqfaTA++23CvC3hqmqe/2GEt6Kw==", + "license": "MIT", + "dependencies": { + "cluster-key-slot": "1.1.2", + "generic-pool": "3.9.0", + "yallist": "4.0.0" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/@redis/graph": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@redis/graph/-/graph-1.1.1.tgz", + "integrity": "sha512-FEMTcTHZozZciLRl6GiiIB4zGm5z5F3F6a6FZCyrfxdKOhFlGkiAqlexWMBzCi4DcRoyiOsuLfW+cjlGWyExOw==", + "license": "MIT", + "peerDependencies": { + "@redis/client": "^1.0.0" + } + }, + "node_modules/@redis/json": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/@redis/json/-/json-1.0.7.tgz", + "integrity": "sha512-6UyXfjVaTBTJtKNG4/9Z8PSpKE6XgSyEb8iwaqDcy+uKrd/DGYHTWkUdnQDyzm727V7p21WUMhsqz5oy65kPcQ==", + "license": "MIT", + "peerDependencies": { + "@redis/client": "^1.0.0" + } + }, + "node_modules/@redis/search": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/@redis/search/-/search-1.2.0.tgz", + "integrity": "sha512-tYoDBbtqOVigEDMAcTGsRlMycIIjwMCgD8eR2t0NANeQmgK/lvxNAvYyb6bZDD4frHRhIHkJu2TBRvB0ERkOmw==", + "license": "MIT", + "peerDependencies": { + "@redis/client": "^1.0.0" + } + }, + "node_modules/@redis/time-series": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@redis/time-series/-/time-series-1.1.0.tgz", + "integrity": "sha512-c1Q99M5ljsIuc4YdaCwfUEXsofakb9c8+Zse2qxTadu8TalLXuAESzLvFAvNVbkmSlvlzIQOLpBCmWI9wTOt+g==", + "license": "MIT", + "peerDependencies": { + "@redis/client": "^1.0.0" + } + }, "node_modules/@redocly/ajv": { "version": "8.11.2", "resolved": "https://registry.npmjs.org/@redocly/ajv/-/ajv-8.11.2.tgz", @@ -6069,6 +6139,15 @@ "node": ">=12" } }, + "node_modules/cluster-key-slot": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/cluster-key-slot/-/cluster-key-slot-1.1.2.tgz", + "integrity": "sha512-RMr0FhtfXemyinomL4hrWcYJxmX6deFdCxpJzhDttxgO1+bcCnkk+9drydLVDmAMG7NE6aN/fl4F7ucU/90gAA==", + "license": "Apache-2.0", + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/co": { "version": "4.6.0", "resolved": "https://registry.npmjs.org/co/-/co-4.6.0.tgz", @@ -8064,6 +8143,15 @@ "node": ">=14" } }, + "node_modules/generic-pool": { + "version": "3.9.0", + "resolved": "https://registry.npmjs.org/generic-pool/-/generic-pool-3.9.0.tgz", + "integrity": "sha512-hymDOu5B53XvN4QT9dBmZxPX4CWhBPPLguTZ9MMFeFa/Kg0xWVfylOVNlJji/E7yTZWFd/q9GO5TxDLq156D7g==", + "license": "MIT", + "engines": { + "node": ">= 4" + } + }, "node_modules/gensync": { "version": "1.0.0-beta.2", "resolved": "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.2.tgz", @@ -12202,6 +12290,23 @@ "node": ">=8" } }, + "node_modules/redis": { + "version": "4.7.1", + "resolved": "https://registry.npmjs.org/redis/-/redis-4.7.1.tgz", + "integrity": "sha512-S1bJDnqLftzHXHP8JsT5II/CtHWQrASX5K96REjWjlmWKrviSOLWmM7QnRLstAWsu1VBBV1ffV6DzCvxNP0UJQ==", + "license": "MIT", + "workspaces": [ + "./packages/*" + ], + "dependencies": { + "@redis/bloom": "1.2.0", + "@redis/client": "1.6.1", + "@redis/graph": "1.1.1", + "@redis/json": "1.0.7", + "@redis/search": "1.2.0", + "@redis/time-series": "1.1.0" + } + }, "node_modules/redoc-cli": { "version": "0.13.20", "resolved": "https://registry.npmjs.org/redoc-cli/-/redoc-cli-0.13.20.tgz", diff --git a/package.json b/package.json index 521e5bd..ff8f08f 100644 --- a/package.json +++ b/package.json @@ -67,6 +67,7 @@ "@google-cloud/storage": "^7.12.1", "@hirosystems/api-toolkit": "^1.12.0", "@hirosystems/chainhook-client": "^2.4.0", + "@hirosystems/salt-n-pepper-client": "^1.2.0", "@hirosystems/stacks-encoding-native-js": "^1.2.0", "@sinclair/typebox": "^0.28.17", "@stacks/blockchain-api-client": "^8.13.6", diff --git a/src/env.ts b/src/env.ts index 653d3f8..e1a4cf2 100644 --- a/src/env.ts +++ b/src/env.ts @@ -32,6 +32,9 @@ const schema = Type.Object({ /** Port in which to serve the profiler */ PROFILER_PORT: Type.Number({ default: 9119 }), + SNP_REDIS_URL: Type.String(), + SNP_REDIS_STREAM_KEY_PREFIX: Type.String(), + /** Hostname of the chainhook node we'll use to register predicates */ CHAINHOOK_NODE_RPC_HOST: Type.String({ default: '127.0.0.1' }), /** Control port of the chainhook node */ @@ -67,7 +70,9 @@ const schema = Type.Object({ STACKS_NODE_RPC_HOST: Type.String(), STACKS_NODE_RPC_PORT: Type.Number({ minimum: 0, maximum: 65535 }), - /// Base url for the Stacks API. Used only through AdminRPC requests for maintenance operations. + /** + * Base url for the Stacks API. Used only through AdminRPC requests for maintenance operations. + */ STACKS_API_BASE_URL: Type.String({ default: 'https://api.mainnet.hiro.so' }), /** Whether or not the job queue should start processing jobs immediately after bootup. */ diff --git a/src/snp/schemas.ts b/src/snp/schemas.ts new file mode 100644 index 0000000..984b418 --- /dev/null +++ b/src/snp/schemas.ts @@ -0,0 +1,101 @@ +import { Static, Type } from '@sinclair/typebox'; + +const SnpBaseEventSchema = Type.Object({ + txid: Type.String(), + event_index: Type.Number(), +}); + +export const SnpSmartContractPrintEventSchema = Type.Composite([ + SnpBaseEventSchema, + Type.Object({ + type: Type.Literal('contract_event'), + contract_event: Type.Object({ + contract_identifier: Type.String(), + topic: Type.String(), + raw_value: Type.String(), + }), + }), +]); +export type SnpSmartContractPrintEvent = Static; + +export const SnpNftMintEventSchema = Type.Composite([ + SnpBaseEventSchema, + Type.Object({ + type: Type.Literal('nft_mint_event'), + nft_mint_event: Type.Object({ + asset_identifier: Type.String(), + recipient: Type.String(), + raw_value: Type.String(), + }), + }), +]); +export type SnpNftMintEvent = Static; + +export const SnpNftBurnEventSchema = Type.Composite([ + SnpBaseEventSchema, + Type.Object({ + type: Type.Literal('nft_burn_event'), + nft_burn_event: Type.Object({ + asset_identifier: Type.String(), + sender: Type.String(), + raw_value: Type.String(), + }), + }), +]); +export type SnpNftBurnEvent = Static; + +export const SnpFtMintEventSchema = Type.Composite([ + SnpBaseEventSchema, + Type.Object({ + type: Type.Literal('ft_mint_event'), + ft_mint_event: Type.Object({ + asset_identifier: Type.String(), + recipient: Type.String(), + amount: Type.String(), + }), + }), +]); +export type SnpFtMintEvent = Static; + +export const SnpFtBurnEventSchema = Type.Composite([ + SnpBaseEventSchema, + Type.Object({ + type: Type.Literal('ft_burn_event'), + ft_burn_event: Type.Object({ + asset_identifier: Type.String(), + sender: Type.String(), + amount: Type.String(), + }), + }), +]); +export type SnpFtBurnEvent = Static; + +export const SnpEventSchema = Type.Union([ + SnpSmartContractPrintEventSchema, + SnpNftMintEventSchema, + SnpNftBurnEventSchema, + SnpFtMintEventSchema, + SnpFtBurnEventSchema, +]); +export type SnpEvent = Static; + +export const SnpTransactionSchema = Type.Object({ + status: Type.Union([ + Type.Literal('success'), + Type.Literal('abort_by_response'), + Type.Literal('abort_by_post_condition'), + ]), + txid: Type.String(), + tx_index: Type.Number(), + contract_interface: Type.Union([Type.Null(), Type.String()]), +}); +export type SnpTransaction = Static; + +export const SnpBlockSchema = Type.Object({ + block_height: Type.Number(), + index_block_hash: Type.String(), + parent_index_block_hash: Type.String(), + events: Type.Array(SnpEventSchema), + transactions: Type.Array(SnpTransactionSchema), +}); +export type SnpBlock = Static; diff --git a/src/snp/snp-event-stream.ts b/src/snp/snp-event-stream.ts new file mode 100644 index 0000000..3c5d79b --- /dev/null +++ b/src/snp/snp-event-stream.ts @@ -0,0 +1,76 @@ +import { parseBoolean, SERVER_VERSION } from '@hirosystems/api-toolkit'; +import { logger as defaultLogger } from '@hirosystems/api-toolkit'; +import { StacksEventStream, StacksEventStreamType } from '@hirosystems/salt-n-pepper-client'; +import { EventEmitter } from 'node:events'; +import { SnpBlock, SnpBlockSchema } from './schemas'; +import { TypeCompiler } from '@sinclair/typebox/compiler'; + +const SnpBlockCType = TypeCompiler.Compile(SnpBlockSchema); + +export class SnpEventStreamHandler { + // db: PgWriteStore; + logger = defaultLogger.child({ name: 'SnpEventStreamHandler' }); + snpClientStream: StacksEventStream; + redisUrl: string; + redisStreamPrefix: string | undefined; + + readonly events = new EventEmitter<{ + processedMessage: [{ msgId: string }]; + }>(); + + constructor(opts: { redisUrl: string; redisStreamPrefix: string; lastMessageId: string }) { + this.redisUrl = opts.redisUrl; + this.redisStreamPrefix = opts.redisStreamPrefix; + + this.logger.info(`SNP streaming enabled, lastMsgId: ${opts.lastMessageId}`); + const appName = `token-metadata-api ${SERVER_VERSION.tag} (${SERVER_VERSION.branch}:${SERVER_VERSION.commit})`; + + this.snpClientStream = new StacksEventStream({ + redisUrl: this.redisUrl, + redisStreamPrefix: this.redisStreamPrefix, + eventStreamType: StacksEventStreamType.confirmedChainEvents, + lastMessageId: opts.lastMessageId, + appName, + }); + } + + async start() { + this.logger.info(`Connecting to SNP event stream at ${this.redisUrl} ...`); + await this.snpClientStream.connect({ waitForReady: true }); + this.snpClientStream.start(async (messageId, timestamp, path, body) => { + return this.handleMsg(messageId, timestamp, path, body); + }); + } + + async handleMsg(messageId: string, timestamp: string, path: string, body: any) { + this.logger.debug(`Received SNP stream event ${path}, msgId: ${messageId}`); + if (path !== '/new_block') { + this.logger.warn(`Unsupported SNP stream event ${path}, skipping...`); + return; + } + if (!SnpBlockCType.Check(body)) { + throw new Error(`Failed to parse SNP block body: ${body}`); + } + const block = body; + + // const response = await this.eventServer.fastifyInstance.inject({ + // method: 'POST', + // url: path, + // payload: body, + // }); + + // if (response.statusCode < 200 || response.statusCode > 299) { + // const errorMessage = `Failed to process SNP message ${messageId} at path ${path}, status: ${response.statusCode}, body: ${response.body}`; + // this.logger.error(errorMessage); + // throw new Error(errorMessage); + // } + + // await this.db.updateLastIngestedSnpRedisMsgId(this.db.sql, messageId); + await Promise.resolve(); + this.events.emit('processedMessage', { msgId: messageId }); + } + + async stop(): Promise { + await this.snpClientStream.stop(); + } +} From 3a1bbadbb71c0553969ee5dfcadd3e07624968a7 Mon Sep 17 00:00:00 2001 From: Rafael Cardenas Date: Tue, 6 Jan 2026 16:25:49 -0600 Subject: [PATCH 02/19] chore: delete chainhook server --- package-lock.json | 12 ----- package.json | 1 - src/chainhook/server.ts | 100 ---------------------------------------- src/env.ts | 29 +----------- 4 files changed, 2 insertions(+), 140 deletions(-) delete mode 100644 src/chainhook/server.ts diff --git a/package-lock.json b/package-lock.json index b93a995..13f26a8 100644 --- a/package-lock.json +++ b/package-lock.json @@ -14,7 +14,6 @@ "@fastify/type-provider-typebox": "^3.2.0", "@google-cloud/storage": "^7.12.1", "@hirosystems/api-toolkit": "^1.12.0", - "@hirosystems/chainhook-client": "^2.4.0", "@hirosystems/salt-n-pepper-client": "^1.2.0", "@hirosystems/stacks-encoding-native-js": "^1.2.0", "@sinclair/typebox": "^0.28.17", @@ -2687,17 +2686,6 @@ "fastify": ">=4" } }, - "node_modules/@hirosystems/chainhook-client": { - "version": "2.4.0", - "resolved": "https://registry.npmjs.org/@hirosystems/chainhook-client/-/chainhook-client-2.4.0.tgz", - "integrity": "sha512-S+lekGeMqtEEPiEcvSSoqfO33V2/kMY8eAboULysddQ0KQx/z9RHn+iV2bU7J3lq8nkFAgBUCWaGNzuBVGY0yA==", - "dependencies": { - "@fastify/type-provider-typebox": "^3.2.0", - "fastify": "^4.15.0", - "pino": "^8.11.0", - "undici": "^5.21.2" - } - }, "node_modules/@hirosystems/salt-n-pepper-client": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/@hirosystems/salt-n-pepper-client/-/salt-n-pepper-client-1.2.0.tgz", diff --git a/package.json b/package.json index ff8f08f..1c3d962 100644 --- a/package.json +++ b/package.json @@ -66,7 +66,6 @@ "@fastify/type-provider-typebox": "^3.2.0", "@google-cloud/storage": "^7.12.1", "@hirosystems/api-toolkit": "^1.12.0", - "@hirosystems/chainhook-client": "^2.4.0", "@hirosystems/salt-n-pepper-client": "^1.2.0", "@hirosystems/stacks-encoding-native-js": "^1.2.0", "@sinclair/typebox": "^0.28.17", diff --git a/src/chainhook/server.ts b/src/chainhook/server.ts deleted file mode 100644 index 8ccef0e..0000000 --- a/src/chainhook/server.ts +++ /dev/null @@ -1,100 +0,0 @@ -import { - ChainhookEventObserver, - ChainhookNodeOptions, - EventObserverOptions, - EventObserverPredicate, - Payload, - Predicate, - StacksPayload, -} from '@hirosystems/chainhook-client'; -import { PgStore } from '../pg/pg-store'; -import { ENV } from '../env'; -import { logger } from '@hirosystems/api-toolkit'; - -export async function startChainhookServer(args: { db: PgStore }): Promise { - const blockHeight = await args.db.getChainTipBlockHeight(); - logger.info(`ChainhookServer is at block ${blockHeight}`); - - const predicates: EventObserverPredicate[] = []; - if (ENV.CHAINHOOK_AUTO_PREDICATE_REGISTRATION) { - const header = { - name: 'metadata-api-blocks', - version: 1, - chain: 'stacks', - }; - switch (ENV.NETWORK) { - case 'mainnet': - predicates.push({ - ...header, - networks: { - mainnet: { - start_block: blockHeight, - include_contract_abi: true, - if_this: { - scope: 'block_height', - higher_than: 1, - }, - }, - }, - }); - break; - case 'testnet': - predicates.push({ - ...header, - networks: { - testnet: { - start_block: blockHeight, - include_contract_abi: true, - if_this: { - scope: 'block_height', - higher_than: 1, - }, - }, - }, - }); - break; - } - } - - const observer: EventObserverOptions = { - hostname: ENV.API_HOST, - port: ENV.EVENT_PORT, - auth_token: ENV.CHAINHOOK_NODE_AUTH_TOKEN, - external_base_url: `http://${ENV.EXTERNAL_HOSTNAME}`, - wait_for_chainhook_node: ENV.CHAINHOOK_AUTO_PREDICATE_REGISTRATION, - validate_chainhook_payloads: false, - body_limit: ENV.EVENT_SERVER_BODY_LIMIT, - predicate_disk_file_path: ENV.CHAINHOOK_PREDICATE_PATH, - predicate_health_check_interval_ms: 300_000, - node_type: 'chainhook', - predicate_re_register_callback: async predicate => { - const blockHeight = await args.db.getChainTipBlockHeight(); - switch (ENV.NETWORK) { - case 'mainnet': - if (predicate.networks.mainnet) predicate.networks.mainnet.start_block = blockHeight; - break; - case 'testnet': - if (predicate.networks.testnet) predicate.networks.testnet.start_block = blockHeight; - break; - } - return predicate as Predicate; - }, - }; - const chainhook: ChainhookNodeOptions = { - base_url: `http://${ENV.CHAINHOOK_NODE_RPC_HOST}:${ENV.CHAINHOOK_NODE_RPC_PORT}`, - }; - const server = new ChainhookEventObserver(observer, chainhook); - await server.start(predicates, async (payload: Payload) => { - logger.info( - `ChainhookServer received ${ - payload.chainhook.is_streaming_blocks ? 'streamed' : 'replay' - } payload from predicate ${payload.chainhook.uuid}` - ); - await args.db.chainhook.processPayload(payload as StacksPayload); - }); - return server; -} - -export async function closeChainhookServer(server: ChainhookEventObserver) { - await server.close(); -} diff --git a/src/env.ts b/src/env.ts index e1a4cf2..9f31377 100644 --- a/src/env.ts +++ b/src/env.ts @@ -21,41 +21,16 @@ const schema = Type.Object({ API_PORT: Type.Number({ default: 3000, minimum: 0, maximum: 65535 }), /** Port in which to serve the Admin RPC interface */ ADMIN_RPC_PORT: Type.Number({ default: 3001, minimum: 0, maximum: 65535 }), - /** Port in which to receive chainhook events */ - EVENT_PORT: Type.Number({ default: 3099, minimum: 0, maximum: 65535 }), - /** Event server body limit (bytes) */ - EVENT_SERVER_BODY_LIMIT: Type.Integer({ default: 20971520 }), - /** Hostname that will be reported to the chainhook node so it can call us back with events */ - EXTERNAL_HOSTNAME: Type.String({ default: '127.0.0.1' }), + /** Port in which to serve prometheus metrics */ PROMETHEUS_PORT: Type.Number({ default: 9153 }), + /** Port in which to serve the profiler */ PROFILER_PORT: Type.Number({ default: 9119 }), SNP_REDIS_URL: Type.String(), SNP_REDIS_STREAM_KEY_PREFIX: Type.String(), - /** Hostname of the chainhook node we'll use to register predicates */ - CHAINHOOK_NODE_RPC_HOST: Type.String({ default: '127.0.0.1' }), - /** Control port of the chainhook node */ - CHAINHOOK_NODE_RPC_PORT: Type.Number({ default: 20456, minimum: 0, maximum: 65535 }), - /** - * Authorization token that the chainhook node must send with every event to make sure it's - * coming from the valid instance - */ - CHAINHOOK_NODE_AUTH_TOKEN: Type.String(), - /** - * Register chainhook predicates automatically when the API is first launched. Set this to `false` - * if you're configuring your predicates manually. - */ - CHAINHOOK_AUTO_PREDICATE_REGISTRATION: Type.Boolean({ default: true }), - /** - * File path to a directory where the `predicate.json` file will be persisted by the API when - * registering its chainhook predicate so it can validate and resume later. Only used if auto - * predicate registration is enabled. - */ - CHAINHOOK_PREDICATE_PATH: Type.String({ default: '.' }), - PGHOST: Type.String(), PGPORT: Type.Number({ default: 5432, minimum: 0, maximum: 65535 }), PGUSER: Type.String(), From 96a97414a5d8d15494fd7ca8d15e787d184bc32f Mon Sep 17 00:00:00 2001 From: Rafael Cardenas Date: Tue, 6 Jan 2026 22:56:39 -0600 Subject: [PATCH 03/19] processor draft --- src/pg/chainhook/block-cache.ts | 116 ------------ src/pg/chainhook/chainhook-pg-store.ts | 138 +++++---------- src/snp/schemas.ts | 1 + src/snp/snp-block-processor.ts | 195 +++++++++++++++++++++ src/snp/snp-event-stream.ts | 49 +++--- src/token-processor/util/sip-validation.ts | 10 +- 6 files changed, 272 insertions(+), 237 deletions(-) delete mode 100644 src/pg/chainhook/block-cache.ts create mode 100644 src/snp/snp-block-processor.ts diff --git a/src/pg/chainhook/block-cache.ts b/src/pg/chainhook/block-cache.ts deleted file mode 100644 index f43ab96..0000000 --- a/src/pg/chainhook/block-cache.ts +++ /dev/null @@ -1,116 +0,0 @@ -import { - BlockIdentifier, - StacksTransaction, - StacksTransactionContractDeploymentKind, -} from '@hirosystems/chainhook-client'; -import { - NftMintEvent, - SftMintEvent, - SmartContractDeployment, - TokenMetadataUpdateNotification, - getContractLogMetadataUpdateNotification, - getContractLogSftMintEvent, - getSmartContractSip, -} from '../../token-processor/util/sip-validation'; -import { ClarityAbi } from '@stacks/transactions'; -import { ClarityTypeID, decodeClarityValue } from '@hirosystems/stacks-encoding-native-js'; -import BigNumber from 'bignumber.js'; - -export type CachedEvent = { - event: T; - tx_id: string; - tx_index: number; - event_index?: number; -}; - -export type CachedFtSupplyDeltaMap = Map; - -function contractPrincipalFromAssetIdentifier(asset_identifier: string): string { - return asset_identifier.split('::')[0]; -} - -/** - * Reads transactions and events from a block received via Chainhook and identifies events we should - * write to the DB. - */ -export class BlockCache { - block: BlockIdentifier; - - contracts: CachedEvent[] = []; - notifications: CachedEvent[] = []; - sftMints: CachedEvent[] = []; - nftMints: CachedEvent[] = []; - ftSupplyDelta: CachedFtSupplyDeltaMap = new Map(); - - constructor(block: BlockIdentifier) { - this.block = block; - } - - transaction(tx: StacksTransaction) { - if (!tx.metadata.success) return; - if (tx.metadata.kind.type === 'ContractDeployment' && tx.metadata.contract_abi) { - const abi = tx.metadata.contract_abi as ClarityAbi; - const sip = getSmartContractSip(abi); - if (sip) { - const kind = tx.metadata.kind as StacksTransactionContractDeploymentKind; - this.contracts.push({ - event: { - principal: kind.data.contract_identifier, - sip, - fungible_token_name: abi.fungible_tokens[0]?.name, - non_fungible_token_name: abi.non_fungible_tokens[0]?.name, - }, - tx_id: tx.transaction_identifier.hash, - tx_index: tx.metadata.position.index, - }); - } - } - for (const event of tx.metadata.receipt.events) { - switch (event.type) { - case 'SmartContractEvent': - const notification = getContractLogMetadataUpdateNotification(tx.metadata.sender, event); - if (notification) { - this.notifications.push({ - event: notification, - tx_id: tx.transaction_identifier.hash, - tx_index: tx.metadata.position.index, - event_index: event.position.index, - }); - continue; - } - const mint = getContractLogSftMintEvent(event); - if (mint) { - this.sftMints.push({ - event: mint, - tx_id: tx.transaction_identifier.hash, - tx_index: tx.metadata.position.index, - event_index: event.position.index, - }); - continue; - } - break; - case 'FTMintEvent': - case 'FTBurnEvent': - const principal = contractPrincipalFromAssetIdentifier(event.data.asset_identifier); - const previous = this.ftSupplyDelta.get(principal) ?? BigNumber(0); - let amount = BigNumber(event.data.amount); - if (event.type === 'FTBurnEvent') amount = amount.negated(); - this.ftSupplyDelta.set(principal, previous.plus(amount)); - break; - case 'NFTMintEvent': - const value = decodeClarityValue(event.data.raw_value); - if (value.type_id == ClarityTypeID.UInt) - this.nftMints.push({ - event: { - contractId: event.data.asset_identifier.split('::')[0], - tokenId: BigInt(value.value), - }, - tx_id: tx.transaction_identifier.hash, - tx_index: tx.metadata.position.index, - event_index: event.position.index, - }); - break; - } - } - } -} diff --git a/src/pg/chainhook/chainhook-pg-store.ts b/src/pg/chainhook/chainhook-pg-store.ts index b36de1b..bfd6c42 100644 --- a/src/pg/chainhook/chainhook-pg-store.ts +++ b/src/pg/chainhook/chainhook-pg-store.ts @@ -5,7 +5,6 @@ import { logger, stopwatch, } from '@hirosystems/api-toolkit'; -import { StacksEvent, StacksPayload } from '@hirosystems/chainhook-client'; import { ENV } from '../../env'; import { NftMintEvent, @@ -13,45 +12,23 @@ import { TokenMetadataUpdateNotification, } from '../../token-processor/util/sip-validation'; import { DbSmartContractInsert, DbTokenType, DbSmartContract, DbSipNumber } from '../types'; -import { BlockCache, CachedEvent } from './block-cache'; import { dbSipNumberToDbTokenType } from '../../token-processor/util/helpers'; import BigNumber from 'bignumber.js'; +import { SnpProcessedBlock, SnpProcessedEvent } from '../../snp/snp-block-processor'; export class ChainhookPgStore extends BasePgStoreModule { - async processPayload(payload: StacksPayload): Promise { + async writeBlock(block: SnpProcessedBlock): Promise { await this.sqlWriteTransaction(async sql => { - for (const block of payload.rollback) { - logger.info(`ChainhookPgStore rollback block ${block.block_identifier.index}`); - const time = stopwatch(); - await this.updateStacksBlock(sql, block, 'rollback'); - logger.info( - `ChainhookPgStore rollback block ${ - block.block_identifier.index - } finished in ${time.getElapsedSeconds()}s` - ); - } - if (payload.rollback.length) { - const earliestRolledBack = Math.min(...payload.rollback.map(r => r.block_identifier.index)); - await this.updateChainTipBlockHeight(earliestRolledBack - 1); - } - for (const block of payload.apply) { - if (block.block_identifier.index <= (await this.getLastIngestedBlockHeight())) { - logger.info( - `ChainhookPgStore skipping previously ingested block ${block.block_identifier.index}` - ); - continue; - } - logger.info(`ChainhookPgStore apply block ${block.block_identifier.index}`); - const time = stopwatch(); - await this.updateStacksBlock(sql, block, 'apply'); - await this.enqueueDynamicTokensDueForRefresh(); - await this.updateChainTipBlockHeight(block.block_identifier.index); - logger.info( - `ChainhookPgStore apply block ${ - block.block_identifier.index - } finished in ${time.getElapsedSeconds()}s` - ); - } + logger.info(`ChainhookPgStore apply block ${block.block_height} #${block.index_block_hash}`); + const time = stopwatch(); + await this.applyTransactions(sql, block); + await this.enqueueDynamicTokensDueForRefresh(); + await this.updateChainTipBlockHeight(block.block_height); + logger.info( + `ChainhookPgStore apply block ${block.block_height} #${ + block.index_block_hash + } finished in ${time.getElapsedSeconds()}s` + ); }); } @@ -100,14 +77,14 @@ export class ChainhookPgStore extends BasePgStoreModule { async applyContractDeployment( sql: PgSqlClient, - contract: CachedEvent, - cache: BlockCache + contract: SnpProcessedEvent, + block: SnpProcessedBlock ) { await this.enqueueContract(sql, { principal: contract.event.principal, sip: contract.event.sip, - block_height: cache.block.index, - index_block_hash: cache.block.hash, + block_height: block.block_height, + index_block_hash: block.index_block_hash, tx_id: contract.tx_id, tx_index: contract.tx_index, fungible_token_name: contract.event.fungible_token_name ?? null, @@ -163,58 +140,39 @@ export class ChainhookPgStore extends BasePgStoreModule { return result[0].block_height; } - private async updateStacksBlock( - sql: PgSqlClient, - block: StacksEvent, - direction: 'apply' | 'rollback' - ) { - const cache = new BlockCache(block.block_identifier); - for (const tx of block.transactions) { - cache.transaction(tx); - } - switch (direction) { - case 'apply': - await this.applyTransactions(sql, cache); - break; - case 'rollback': - await this.rollBackTransactions(sql, cache); - break; - } - } - - private async applyTransactions(sql: PgSqlClient, cache: BlockCache) { - for (const contract of cache.contracts) - await this.applyContractDeployment(sql, contract, cache); - for (const notification of cache.notifications) - await this.applyNotification(sql, notification, cache); - await this.applyTokenMints(sql, cache.nftMints, DbTokenType.nft, cache); - await this.applyTokenMints(sql, cache.sftMints, DbTokenType.sft, cache); - for (const [contract, delta] of cache.ftSupplyDelta) - await this.applyFtSupplyChange(sql, contract, delta, cache); + private async applyTransactions(sql: PgSqlClient, block: SnpProcessedBlock) { + for (const contract of block.contracts) + await this.applyContractDeployment(sql, contract, block); + for (const notification of block.notifications) + await this.applyNotification(sql, notification, block); + await this.applyTokenMints(sql, block.nftMints, DbTokenType.nft, block); + await this.applyTokenMints(sql, block.sftMints, DbTokenType.sft, block); + for (const [contract, delta] of block.ftSupplyDelta) + await this.applyFtSupplyChange(sql, contract, delta, block); } - private async rollBackTransactions(sql: PgSqlClient, cache: BlockCache) { - for (const contract of cache.contracts) - await this.rollBackContractDeployment(sql, contract, cache); - for (const notification of cache.notifications) - await this.rollBackNotification(sql, notification, cache); - await this.rollBackTokenMints(sql, cache.nftMints, DbTokenType.nft, cache); - await this.rollBackTokenMints(sql, cache.sftMints, DbTokenType.sft, cache); - for (const [contract, delta] of cache.ftSupplyDelta) - await this.applyFtSupplyChange(sql, contract, delta.negated(), cache); - } + // private async rollBackTransactions(sql: PgSqlClient, cache: BlockCache) { + // for (const contract of cache.contracts) + // await this.rollBackContractDeployment(sql, contract, cache); + // for (const notification of cache.notifications) + // await this.rollBackNotification(sql, notification, cache); + // await this.rollBackTokenMints(sql, cache.nftMints, DbTokenType.nft, cache); + // await this.rollBackTokenMints(sql, cache.sftMints, DbTokenType.sft, cache); + // for (const [contract, delta] of cache.ftSupplyDelta) + // await this.applyFtSupplyChange(sql, contract, delta.negated(), cache); + // } private async applyNotification( sql: PgSqlClient, - event: CachedEvent, - cache: BlockCache + event: SnpProcessedEvent, + block: SnpProcessedBlock ) { const contractResult = await sql<{ id: number }[]>` SELECT id FROM smart_contracts WHERE principal = ${event.event.contract_id} LIMIT 1 `; if (contractResult.count == 0) { logger.warn( - `ChainhookPgStore found SIP-019 notification for non-existing token contract ${event.event.contract_id} at block ${cache.block.index}` + `ChainhookPgStore found SIP-019 notification for non-existing token contract ${event.event.contract_id} at block ${block.block_height} #${block.index_block_hash}` ); return; } @@ -241,8 +199,8 @@ export class ChainhookPgStore extends BasePgStoreModule { INSERT INTO update_notifications (token_id, update_mode, ttl, block_height, index_block_hash, tx_id, tx_index, event_index) ( - SELECT id, ${notification.update_mode}, ${notification.ttl ?? null}, ${cache.block.index}, - ${cache.block.hash}, ${event.tx_id}, ${event.tx_index}, + SELECT id, ${notification.update_mode}, ${notification.ttl ?? null}, + ${block.block_height}, ${block.index_block_hash}, ${event.tx_id}, ${event.tx_index}, ${event.event_index} FROM previous_modes WHERE update_mode <> 'frozen' @@ -256,7 +214,7 @@ export class ChainhookPgStore extends BasePgStoreModule { logger.info( `ChainhookPgStore apply SIP-019 notification ${notification.contract_id} (${ notification.token_ids ?? 'all' - }) at block ${cache.block.index}` + }) at block ${block.block_height} #${block.index_block_hash}` ); } @@ -264,7 +222,7 @@ export class ChainhookPgStore extends BasePgStoreModule { sql: PgSqlClient, contract: string, delta: BigNumber, - cache: BlockCache + block: SnpProcessedBlock ): Promise { await sql` UPDATE tokens @@ -273,7 +231,7 @@ export class ChainhookPgStore extends BasePgStoreModule { AND token_number = 1 `; logger.info( - `ChainhookPgStore apply FT supply change for ${contract} (${delta}) at block ${cache.block.index}` + `ChainhookPgStore apply FT supply change for ${contract} (${delta}) at block ${block.block_height} #${block.index_block_hash}` ); } @@ -339,9 +297,9 @@ export class ChainhookPgStore extends BasePgStoreModule { private async applyTokenMints( sql: PgSqlClient, - mints: CachedEvent[], + mints: SnpProcessedEvent[], tokenType: DbTokenType, - cache: BlockCache + block: SnpProcessedBlock ): Promise { if (mints.length == 0) return; for await (const batch of batchIterate(mints, 500)) { @@ -354,14 +312,14 @@ export class ChainhookPgStore extends BasePgStoreModule { logger.info( `ChainhookPgStore apply ${tokenType.toUpperCase()} mint ${m.event.contractId} (${ m.event.tokenId - }) at block ${cache.block.index}` + }) at block ${block.block_height} #${block.index_block_hash}` ); tokenValues.set(tokenKey, [ m.event.contractId, tokenType, m.event.tokenId.toString(), - cache.block.index, - cache.block.hash, + block.block_height, + block.index_block_hash, m.tx_id, m.tx_index, ]); diff --git a/src/snp/schemas.ts b/src/snp/schemas.ts index 984b418..f72804a 100644 --- a/src/snp/schemas.ts +++ b/src/snp/schemas.ts @@ -80,6 +80,7 @@ export const SnpEventSchema = Type.Union([ export type SnpEvent = Static; export const SnpTransactionSchema = Type.Object({ + raw_tx: Type.String(), status: Type.Union([ Type.Literal('success'), Type.Literal('abort_by_response'), diff --git a/src/snp/snp-block-processor.ts b/src/snp/snp-block-processor.ts new file mode 100644 index 0000000..336b0c5 --- /dev/null +++ b/src/snp/snp-block-processor.ts @@ -0,0 +1,195 @@ +import BigNumber from 'bignumber.js'; +import { + SnpBlock, + SnpFtBurnEvent, + SnpFtMintEvent, + SnpNftMintEvent, + SnpSmartContractPrintEvent, + SnpTransaction, +} from './schemas'; +import { + getContractLogMetadataUpdateNotification, + getContractLogSftMintEvent, + getSmartContractSip, + NftMintEvent, + SftMintEvent, + SmartContractDeployment, + TokenMetadataUpdateNotification, +} from '../token-processor/util/sip-validation'; +import { ClarityAbi } from '@stacks/transactions'; +import { + ClarityTypeID, + decodeClarityValue, + DecodedTxResult, + decodeTransaction, + TxPayloadTypeID, +} from '@hirosystems/stacks-encoding-native-js'; +import { ChainhookPgStore } from '../pg/chainhook/chainhook-pg-store'; + +export type SnpProcessedEvent = { + event: T; + tx_id: string; + tx_index: number; + event_index?: number; +}; + +export type SnpIndexedTransaction = { + tx: SnpTransaction; + decoded: DecodedTxResult; +}; + +export type SnpProcessedBlock = { + block_height: number; + index_block_hash: string; + transactions: Map; + contracts: SnpProcessedEvent[]; + notifications: SnpProcessedEvent[]; + sftMints: SnpProcessedEvent[]; + nftMints: SnpProcessedEvent[]; + ftSupplyDelta: Map; +}; + +export class SnpBlockProcessor { + private readonly db: ChainhookPgStore; + + private block: SnpProcessedBlock = { + block_height: 0, + index_block_hash: '', + transactions: new Map(), + contracts: [], + notifications: [], + sftMints: [], + nftMints: [], + ftSupplyDelta: new Map(), + }; + + constructor(args: { db: ChainhookPgStore }) { + this.db = args.db; + } + + async process(block: SnpBlock): Promise { + this.block.block_height = block.block_height; + this.block.index_block_hash = block.index_block_hash; + + for (const transaction of block.transactions) { + if (transaction.status !== 'success') continue; + + const indexedTransaction: SnpIndexedTransaction = { + tx: transaction, + decoded: decodeTransaction(transaction.raw_tx.substring(2)), + }; + this.block.transactions.set(transaction.txid, indexedTransaction); + + // Check for smart contract deployments. + this.processSmartContract(indexedTransaction); + } + + // Check for token metadata updates and token supply deltas. + for (const event of block.events) { + const transaction = this.block.transactions.get(event.txid); + if (!transaction) continue; + switch (event.type) { + case 'contract_event': + this.processContractEvent(transaction, event); + break; + case 'ft_mint_event': + this.processFtMintEvent(event); + break; + case 'ft_burn_event': + this.processFtBurnEvent(event); + break; + case 'nft_mint_event': + this.processNftMintEvent(transaction, event); + break; + case 'nft_burn_event': + // Burned NFTs still have their metadata in the database, so we don't need to do anything + // here. + break; + } + } + + await this.db.writeBlock(this.block); + } + + private processSmartContract(transaction: SnpIndexedTransaction) { + if (transaction.tx.contract_interface == null) return; + + // Parse the included ABI to check if it's a token contract. + const abi = JSON.parse(transaction.tx.contract_interface) as ClarityAbi; + const sip = getSmartContractSip(abi); + if (!sip) return; + + const sender = transaction.decoded.auth.origin_condition.signer.address; + const payload = transaction.decoded.payload; + if ( + payload.type_id === TxPayloadTypeID.SmartContract || + payload.type_id === TxPayloadTypeID.VersionedSmartContract + ) { + this.block.contracts.push({ + event: { + principal: `${sender}.${payload.contract_name}`, + sip, + fungible_token_name: abi.fungible_tokens[0]?.name, + non_fungible_token_name: abi.non_fungible_tokens[0]?.name, + }, + tx_id: transaction.tx.txid, + tx_index: transaction.tx.tx_index, + }); + } + } + + private processContractEvent( + transaction: SnpIndexedTransaction, + event: SnpSmartContractPrintEvent + ) { + const sender = transaction.decoded.auth.origin_condition.signer.address; + const notification = getContractLogMetadataUpdateNotification(sender, event); + if (notification) { + this.block.notifications.push({ + event: notification, + tx_id: event.txid, + tx_index: transaction.tx.tx_index, + event_index: event.event_index, + }); + return; + } + const mint = getContractLogSftMintEvent(event); + if (mint) { + this.block.sftMints.push({ + event: mint, + tx_id: event.txid, + tx_index: transaction.tx.tx_index, + event_index: event.event_index, + }); + return; + } + } + + private processFtMintEvent(event: SnpFtMintEvent) { + const principal = event.ft_mint_event.asset_identifier.split('::')[0]; + const previous = this.block.ftSupplyDelta.get(principal) ?? BigNumber(0); + const amount = BigNumber(event.ft_mint_event.amount); + this.block.ftSupplyDelta.set(principal, previous.plus(amount)); + } + + private processFtBurnEvent(event: SnpFtBurnEvent) { + const principal = event.ft_burn_event.asset_identifier.split('::')[0]; + const previous = this.block.ftSupplyDelta.get(principal) ?? BigNumber(0); + const amount = BigNumber(event.ft_burn_event.amount); + this.block.ftSupplyDelta.set(principal, previous.minus(amount)); + } + + private processNftMintEvent(transaction: SnpIndexedTransaction, event: SnpNftMintEvent) { + const value = decodeClarityValue(event.nft_mint_event.raw_value); + if (value.type_id === ClarityTypeID.UInt) + this.block.nftMints.push({ + event: { + contractId: event.nft_mint_event.asset_identifier.split('::')[0], + tokenId: BigInt(value.value), + }, + tx_id: event.txid, + tx_index: transaction.tx.tx_index, + event_index: event.event_index, + }); + } +} diff --git a/src/snp/snp-event-stream.ts b/src/snp/snp-event-stream.ts index 3c5d79b..c7f2abe 100644 --- a/src/snp/snp-event-stream.ts +++ b/src/snp/snp-event-stream.ts @@ -1,24 +1,31 @@ -import { parseBoolean, SERVER_VERSION } from '@hirosystems/api-toolkit'; +import { SERVER_VERSION } from '@hirosystems/api-toolkit'; import { logger as defaultLogger } from '@hirosystems/api-toolkit'; import { StacksEventStream, StacksEventStreamType } from '@hirosystems/salt-n-pepper-client'; import { EventEmitter } from 'node:events'; -import { SnpBlock, SnpBlockSchema } from './schemas'; +import { SnpBlockSchema } from './schemas'; import { TypeCompiler } from '@sinclair/typebox/compiler'; +import { SnpBlockProcessor } from './snp-block-processor'; const SnpBlockCType = TypeCompiler.Compile(SnpBlockSchema); export class SnpEventStreamHandler { - // db: PgWriteStore; - logger = defaultLogger.child({ name: 'SnpEventStreamHandler' }); - snpClientStream: StacksEventStream; - redisUrl: string; - redisStreamPrefix: string | undefined; + private readonly blockProcessor: SnpBlockProcessor; + private readonly logger = defaultLogger.child({ name: 'SnpEventStreamHandler' }); + private readonly snpClientStream: StacksEventStream; + private readonly redisUrl: string; + private readonly redisStreamPrefix: string | undefined; readonly events = new EventEmitter<{ processedMessage: [{ msgId: string }]; }>(); - constructor(opts: { redisUrl: string; redisStreamPrefix: string; lastMessageId: string }) { + constructor(opts: { + redisUrl: string; + redisStreamPrefix: string; + lastMessageId: string; + blockProcessor: SnpBlockProcessor; + }) { + this.blockProcessor = opts.blockProcessor; this.redisUrl = opts.redisUrl; this.redisStreamPrefix = opts.redisStreamPrefix; @@ -42,7 +49,7 @@ export class SnpEventStreamHandler { }); } - async handleMsg(messageId: string, timestamp: string, path: string, body: any) { + async handleMsg(messageId: string, _timestamp: string, path: string, body: any) { this.logger.debug(`Received SNP stream event ${path}, msgId: ${messageId}`); if (path !== '/new_block') { this.logger.warn(`Unsupported SNP stream event ${path}, skipping...`); @@ -51,23 +58,13 @@ export class SnpEventStreamHandler { if (!SnpBlockCType.Check(body)) { throw new Error(`Failed to parse SNP block body: ${body}`); } - const block = body; - - // const response = await this.eventServer.fastifyInstance.inject({ - // method: 'POST', - // url: path, - // payload: body, - // }); - - // if (response.statusCode < 200 || response.statusCode > 299) { - // const errorMessage = `Failed to process SNP message ${messageId} at path ${path}, status: ${response.statusCode}, body: ${response.body}`; - // this.logger.error(errorMessage); - // throw new Error(errorMessage); - // } - - // await this.db.updateLastIngestedSnpRedisMsgId(this.db.sql, messageId); - await Promise.resolve(); - this.events.emit('processedMessage', { msgId: messageId }); + try { + await this.blockProcessor.process(body); + this.events.emit('processedMessage', { msgId: messageId }); + } catch (error) { + this.logger.error(error, `Failed to process block`); + throw new Error(`Failed to process block: ${error}`); + } } async stop(): Promise { diff --git a/src/token-processor/util/sip-validation.ts b/src/token-processor/util/sip-validation.ts index 80ce614..2ad102f 100644 --- a/src/token-processor/util/sip-validation.ts +++ b/src/token-processor/util/sip-validation.ts @@ -8,7 +8,7 @@ import { decodeClarityValue, } from '@hirosystems/stacks-encoding-native-js'; import { DbSipNumber } from '../../pg/types'; -import { StacksTransactionSmartContractEvent } from '@hirosystems/chainhook-client'; +import { SnpSmartContractPrintEvent } from '../../snp/schemas'; const FtTraitFunctions: ClarityAbiFunction[] = [ { @@ -307,9 +307,9 @@ export type TokenMetadataUpdateNotification = { */ export function getContractLogMetadataUpdateNotification( sender: string, - event: StacksTransactionSmartContractEvent + event: SnpSmartContractPrintEvent ): TokenMetadataUpdateNotification | undefined { - const log = event.data; + const log = event.contract_event; try { // Validate that we have the correct SIP-019 payload structure. const value = decodeClarityValue(log.raw_value); @@ -381,9 +381,9 @@ export type SftMintEvent = NftMintEvent & { }; export function getContractLogSftMintEvent( - event: StacksTransactionSmartContractEvent + event: SnpSmartContractPrintEvent ): SftMintEvent | undefined { - const log = event.data; + const log = event.contract_event; try { // Validate that we have the correct SIP-013 `sft_mint` payload structure. const value = decodeClarityValue(log.raw_value); From 2a41ba6070516838de73ecba4252315a87269ab5 Mon Sep 17 00:00:00 2001 From: Rafael Cardenas Date: Tue, 6 Jan 2026 23:09:43 -0600 Subject: [PATCH 04/19] rename pg --- src/pg/pg-store.ts | 6 +++--- .../chainhook-pg-store.ts => snp-pg-store.ts} | 12 ++++++------ src/snp/snp-block-processor.ts | 6 +++--- 3 files changed, 12 insertions(+), 12 deletions(-) rename src/pg/{chainhook/chainhook-pg-store.ts => snp-pg-store.ts} (97%) diff --git a/src/pg/pg-store.ts b/src/pg/pg-store.ts index 427e96f..41340d3 100644 --- a/src/pg/pg-store.ts +++ b/src/pg/pg-store.ts @@ -42,7 +42,7 @@ import { runMigrations, } from '@hirosystems/api-toolkit'; import * as path from 'path'; -import { ChainhookPgStore } from './chainhook/chainhook-pg-store'; +import { SnpPgStore } from './snp-pg-store'; export const MIGRATIONS_DIR = path.join(__dirname, '../../migrations'); @@ -50,7 +50,7 @@ export const MIGRATIONS_DIR = path.join(__dirname, '../../migrations'); * Connects and queries the Token Metadata Service's local postgres DB. */ export class PgStore extends BasePgStore { - readonly chainhook: ChainhookPgStore; + readonly snp: SnpPgStore; static async connect(opts?: { skipMigrations: boolean }): Promise { const pgConfig = { @@ -77,7 +77,7 @@ export class PgStore extends BasePgStore { constructor(sql: PgSqlClient) { super(sql); - this.chainhook = new ChainhookPgStore(this); + this.snp = new SnpPgStore(this); } async getSmartContract( diff --git a/src/pg/chainhook/chainhook-pg-store.ts b/src/pg/snp-pg-store.ts similarity index 97% rename from src/pg/chainhook/chainhook-pg-store.ts rename to src/pg/snp-pg-store.ts index bfd6c42..0199dc4 100644 --- a/src/pg/chainhook/chainhook-pg-store.ts +++ b/src/pg/snp-pg-store.ts @@ -5,18 +5,18 @@ import { logger, stopwatch, } from '@hirosystems/api-toolkit'; -import { ENV } from '../../env'; +import { ENV } from '../env'; import { NftMintEvent, SmartContractDeployment, TokenMetadataUpdateNotification, -} from '../../token-processor/util/sip-validation'; -import { DbSmartContractInsert, DbTokenType, DbSmartContract, DbSipNumber } from '../types'; -import { dbSipNumberToDbTokenType } from '../../token-processor/util/helpers'; +} from '../token-processor/util/sip-validation'; +import { DbSmartContractInsert, DbTokenType, DbSmartContract, DbSipNumber } from './types'; +import { dbSipNumberToDbTokenType } from '../token-processor/util/helpers'; import BigNumber from 'bignumber.js'; -import { SnpProcessedBlock, SnpProcessedEvent } from '../../snp/snp-block-processor'; +import { SnpProcessedBlock, SnpProcessedEvent } from '../snp/snp-block-processor'; -export class ChainhookPgStore extends BasePgStoreModule { +export class SnpPgStore extends BasePgStoreModule { async writeBlock(block: SnpProcessedBlock): Promise { await this.sqlWriteTransaction(async sql => { logger.info(`ChainhookPgStore apply block ${block.block_height} #${block.index_block_hash}`); diff --git a/src/snp/snp-block-processor.ts b/src/snp/snp-block-processor.ts index 336b0c5..119b376 100644 --- a/src/snp/snp-block-processor.ts +++ b/src/snp/snp-block-processor.ts @@ -24,7 +24,7 @@ import { decodeTransaction, TxPayloadTypeID, } from '@hirosystems/stacks-encoding-native-js'; -import { ChainhookPgStore } from '../pg/chainhook/chainhook-pg-store'; +import { SnpPgStore } from '../pg/snp-pg-store'; export type SnpProcessedEvent = { event: T; @@ -50,7 +50,7 @@ export type SnpProcessedBlock = { }; export class SnpBlockProcessor { - private readonly db: ChainhookPgStore; + private readonly db: SnpPgStore; private block: SnpProcessedBlock = { block_height: 0, @@ -63,7 +63,7 @@ export class SnpBlockProcessor { ftSupplyDelta: new Map(), }; - constructor(args: { db: ChainhookPgStore }) { + constructor(args: { db: SnpPgStore }) { this.db = args.db; } From 18f6dd9e49d5f3add0a95894ec771efb9cd2b43e Mon Sep 17 00:00:00 2001 From: Rafael Cardenas Date: Wed, 7 Jan 2026 12:59:53 -0600 Subject: [PATCH 05/19] rename --- src/pg/pg-store.ts | 6 +- ...np-pg-store.ts => stacks-core-pg-store.ts} | 77 +++------ src/{snp => stacks-core}/schemas.ts | 58 +++---- src/{snp => stacks-core}/snp-event-stream.ts | 10 +- .../stacks-core-block-processor.ts} | 162 +++++++++++++----- src/token-processor/util/sip-validation.ts | 6 +- 6 files changed, 187 insertions(+), 132 deletions(-) rename src/pg/{snp-pg-store.ts => stacks-core-pg-store.ts} (82%) rename src/{snp => stacks-core}/schemas.ts (50%) rename src/{snp => stacks-core}/snp-event-stream.ts (89%) rename src/{snp/snp-block-processor.ts => stacks-core/stacks-core-block-processor.ts} (51%) diff --git a/src/pg/pg-store.ts b/src/pg/pg-store.ts index 41340d3..69eb1e3 100644 --- a/src/pg/pg-store.ts +++ b/src/pg/pg-store.ts @@ -42,7 +42,7 @@ import { runMigrations, } from '@hirosystems/api-toolkit'; import * as path from 'path'; -import { SnpPgStore } from './snp-pg-store'; +import { StacksCorePgStore } from './stacks-core-pg-store'; export const MIGRATIONS_DIR = path.join(__dirname, '../../migrations'); @@ -50,7 +50,7 @@ export const MIGRATIONS_DIR = path.join(__dirname, '../../migrations'); * Connects and queries the Token Metadata Service's local postgres DB. */ export class PgStore extends BasePgStore { - readonly snp: SnpPgStore; + readonly snp: StacksCorePgStore; static async connect(opts?: { skipMigrations: boolean }): Promise { const pgConfig = { @@ -77,7 +77,7 @@ export class PgStore extends BasePgStore { constructor(sql: PgSqlClient) { super(sql); - this.snp = new SnpPgStore(this); + this.snp = new StacksCorePgStore(this); } async getSmartContract( diff --git a/src/pg/snp-pg-store.ts b/src/pg/stacks-core-pg-store.ts similarity index 82% rename from src/pg/snp-pg-store.ts rename to src/pg/stacks-core-pg-store.ts index 0199dc4..aeb9b46 100644 --- a/src/pg/snp-pg-store.ts +++ b/src/pg/stacks-core-pg-store.ts @@ -14,21 +14,21 @@ import { import { DbSmartContractInsert, DbTokenType, DbSmartContract, DbSipNumber } from './types'; import { dbSipNumberToDbTokenType } from '../token-processor/util/helpers'; import BigNumber from 'bignumber.js'; -import { SnpProcessedBlock, SnpProcessedEvent } from '../snp/snp-block-processor'; +import { + ProcessedStacksCoreBlock, + ProcessedStacksCoreEvent, +} from '../stacks-core/stacks-core-block-processor'; -export class SnpPgStore extends BasePgStoreModule { - async writeBlock(block: SnpProcessedBlock): Promise { +export class StacksCorePgStore extends BasePgStoreModule { + /** + * Writes a processed Stacks Core block to the database. + * @param block - The processed Stacks Core block to write. + */ + async writeBlock(block: ProcessedStacksCoreBlock): Promise { await this.sqlWriteTransaction(async sql => { - logger.info(`ChainhookPgStore apply block ${block.block_height} #${block.index_block_hash}`); - const time = stopwatch(); await this.applyTransactions(sql, block); await this.enqueueDynamicTokensDueForRefresh(); - await this.updateChainTipBlockHeight(block.block_height); - logger.info( - `ChainhookPgStore apply block ${block.block_height} #${ - block.index_block_hash - } finished in ${time.getElapsedSeconds()}s` - ); + await this.updateChainTipBlockHeight(block.blockHeight); }); } @@ -77,14 +77,14 @@ export class SnpPgStore extends BasePgStoreModule { async applyContractDeployment( sql: PgSqlClient, - contract: SnpProcessedEvent, - block: SnpProcessedBlock + contract: ProcessedStacksCoreEvent, + block: ProcessedStacksCoreBlock ) { await this.enqueueContract(sql, { principal: contract.event.principal, sip: contract.event.sip, - block_height: block.block_height, - index_block_hash: block.index_block_hash, + block_height: block.blockHeight, + index_block_hash: block.indexBlockHash, tx_id: contract.tx_id, tx_index: contract.tx_index, fungible_token_name: contract.event.fungible_token_name ?? null, @@ -126,9 +126,6 @@ export class SnpPgStore extends BasePgStoreModule { ON CONFLICT (smart_contract_id) WHERE token_id IS NULL DO UPDATE SET updated_at = NOW(), status = 'pending' `; - logger.info( - `ChainhookPgStore apply contract deploy ${contract.principal} (${contract.sip}) at block ${contract.block_height}` - ); } async updateChainTipBlockHeight(blockHeight: number): Promise { @@ -140,7 +137,7 @@ export class SnpPgStore extends BasePgStoreModule { return result[0].block_height; } - private async applyTransactions(sql: PgSqlClient, block: SnpProcessedBlock) { + private async applyTransactions(sql: PgSqlClient, block: ProcessedStacksCoreBlock) { for (const contract of block.contracts) await this.applyContractDeployment(sql, contract, block); for (const notification of block.notifications) @@ -164,16 +161,13 @@ export class SnpPgStore extends BasePgStoreModule { private async applyNotification( sql: PgSqlClient, - event: SnpProcessedEvent, - block: SnpProcessedBlock + event: ProcessedStacksCoreEvent, + block: ProcessedStacksCoreBlock ) { const contractResult = await sql<{ id: number }[]>` SELECT id FROM smart_contracts WHERE principal = ${event.event.contract_id} LIMIT 1 `; if (contractResult.count == 0) { - logger.warn( - `ChainhookPgStore found SIP-019 notification for non-existing token contract ${event.event.contract_id} at block ${block.block_height} #${block.index_block_hash}` - ); return; } const notification = event.event; @@ -200,7 +194,7 @@ export class SnpPgStore extends BasePgStoreModule { (token_id, update_mode, ttl, block_height, index_block_hash, tx_id, tx_index, event_index) ( SELECT id, ${notification.update_mode}, ${notification.ttl ?? null}, - ${block.block_height}, ${block.index_block_hash}, ${event.tx_id}, ${event.tx_index}, + ${block.blockHeight}, ${block.indexBlockHash}, ${event.tx_id}, ${event.tx_index}, ${event.event_index} FROM previous_modes WHERE update_mode <> 'frozen' @@ -211,18 +205,13 @@ export class SnpPgStore extends BasePgStoreModule { SET status = 'pending', updated_at = NOW() WHERE token_id IN (SELECT token_id FROM new_mode_inserts) `; - logger.info( - `ChainhookPgStore apply SIP-019 notification ${notification.contract_id} (${ - notification.token_ids ?? 'all' - }) at block ${block.block_height} #${block.index_block_hash}` - ); } private async applyFtSupplyChange( sql: PgSqlClient, contract: string, delta: BigNumber, - block: SnpProcessedBlock + block: ProcessedStacksCoreBlock ): Promise { await sql` UPDATE tokens @@ -230,9 +219,6 @@ export class SnpPgStore extends BasePgStoreModule { WHERE smart_contract_id = (SELECT id FROM smart_contracts WHERE principal = ${contract}) AND token_number = 1 `; - logger.info( - `ChainhookPgStore apply FT supply change for ${contract} (${delta}) at block ${block.block_height} #${block.index_block_hash}` - ); } private async rollBackContractDeployment( @@ -243,9 +229,6 @@ export class SnpPgStore extends BasePgStoreModule { await sql` DELETE FROM smart_contracts WHERE principal = ${contract.event.principal} `; - logger.info( - `ChainhookPgStore rollback contract ${contract.event.principal} at block ${cache.block.index}` - ); } private async rollBackNotification( @@ -259,11 +242,6 @@ export class SnpPgStore extends BasePgStoreModule { AND tx_index = ${notification.tx_index} AND event_index = ${notification.event_index} `; - logger.info( - `ChainhookPgStore rollback SIP-019 notification ${notification.event.contract_id} (${ - notification.event.token_ids ?? 'all' - }) at block ${cache.block.index}` - ); } private async enqueueDynamicTokensDueForRefresh(): Promise { @@ -297,9 +275,9 @@ export class SnpPgStore extends BasePgStoreModule { private async applyTokenMints( sql: PgSqlClient, - mints: SnpProcessedEvent[], + mints: ProcessedStacksCoreEvent[], tokenType: DbTokenType, - block: SnpProcessedBlock + block: ProcessedStacksCoreBlock ): Promise { if (mints.length == 0) return; for await (const batch of batchIterate(mints, 500)) { @@ -309,17 +287,12 @@ export class SnpPgStore extends BasePgStoreModule { // This makes sure we only keep the first occurrence. const tokenKey = `${m.event.contractId}-${m.event.tokenId}`; if (tokenValues.has(tokenKey)) continue; - logger.info( - `ChainhookPgStore apply ${tokenType.toUpperCase()} mint ${m.event.contractId} (${ - m.event.tokenId - }) at block ${block.block_height} #${block.index_block_hash}` - ); tokenValues.set(tokenKey, [ m.event.contractId, tokenType, m.event.tokenId.toString(), - block.block_height, - block.index_block_hash, + block.blockHeight, + block.indexBlockHash, m.tx_id, m.tx_index, ]); @@ -363,7 +336,7 @@ export class SnpPgStore extends BasePgStoreModule { for await (const batch of batchIterate(mints, 500)) { const values = batch.map(m => { logger.info( - `ChainhookPgStore rollback ${tokenType.toUpperCase()} mint ${m.event.contractId} (${ + `StacksCorePgStore rollback ${tokenType.toUpperCase()} mint ${m.event.contractId} (${ m.event.tokenId }) at block ${cache.block.index}` ); diff --git a/src/snp/schemas.ts b/src/stacks-core/schemas.ts similarity index 50% rename from src/snp/schemas.ts rename to src/stacks-core/schemas.ts index f72804a..27604f9 100644 --- a/src/snp/schemas.ts +++ b/src/stacks-core/schemas.ts @@ -1,12 +1,12 @@ import { Static, Type } from '@sinclair/typebox'; -const SnpBaseEventSchema = Type.Object({ +const StacksCoreBaseEventSchema = Type.Object({ txid: Type.String(), event_index: Type.Number(), }); -export const SnpSmartContractPrintEventSchema = Type.Composite([ - SnpBaseEventSchema, +export const StacksCoreContractEventSchema = Type.Composite([ + StacksCoreBaseEventSchema, Type.Object({ type: Type.Literal('contract_event'), contract_event: Type.Object({ @@ -16,10 +16,10 @@ export const SnpSmartContractPrintEventSchema = Type.Composite([ }), }), ]); -export type SnpSmartContractPrintEvent = Static; +export type StacksCoreContractEvent = Static; -export const SnpNftMintEventSchema = Type.Composite([ - SnpBaseEventSchema, +export const StacksCoreNftMintEventSchema = Type.Composite([ + StacksCoreBaseEventSchema, Type.Object({ type: Type.Literal('nft_mint_event'), nft_mint_event: Type.Object({ @@ -29,10 +29,10 @@ export const SnpNftMintEventSchema = Type.Composite([ }), }), ]); -export type SnpNftMintEvent = Static; +export type StacksCoreNftMintEvent = Static; -export const SnpNftBurnEventSchema = Type.Composite([ - SnpBaseEventSchema, +export const StacksCoreNftBurnEventSchema = Type.Composite([ + StacksCoreBaseEventSchema, Type.Object({ type: Type.Literal('nft_burn_event'), nft_burn_event: Type.Object({ @@ -42,10 +42,10 @@ export const SnpNftBurnEventSchema = Type.Composite([ }), }), ]); -export type SnpNftBurnEvent = Static; +export type StacksCoreNftBurnEvent = Static; -export const SnpFtMintEventSchema = Type.Composite([ - SnpBaseEventSchema, +export const StacksCoreFtMintEventSchema = Type.Composite([ + StacksCoreBaseEventSchema, Type.Object({ type: Type.Literal('ft_mint_event'), ft_mint_event: Type.Object({ @@ -55,10 +55,10 @@ export const SnpFtMintEventSchema = Type.Composite([ }), }), ]); -export type SnpFtMintEvent = Static; +export type StacksCoreFtMintEvent = Static; -export const SnpFtBurnEventSchema = Type.Composite([ - SnpBaseEventSchema, +export const StacksCoreFtBurnEventSchema = Type.Composite([ + StacksCoreBaseEventSchema, Type.Object({ type: Type.Literal('ft_burn_event'), ft_burn_event: Type.Object({ @@ -68,18 +68,18 @@ export const SnpFtBurnEventSchema = Type.Composite([ }), }), ]); -export type SnpFtBurnEvent = Static; +export type StacksCoreFtBurnEvent = Static; -export const SnpEventSchema = Type.Union([ - SnpSmartContractPrintEventSchema, - SnpNftMintEventSchema, - SnpNftBurnEventSchema, - SnpFtMintEventSchema, - SnpFtBurnEventSchema, +export const StacksCoreEventSchema = Type.Union([ + StacksCoreContractEventSchema, + StacksCoreNftMintEventSchema, + StacksCoreNftBurnEventSchema, + StacksCoreFtMintEventSchema, + StacksCoreFtBurnEventSchema, ]); -export type SnpEvent = Static; +export type StacksCoreEvent = Static; -export const SnpTransactionSchema = Type.Object({ +export const StacksCoreTransactionSchema = Type.Object({ raw_tx: Type.String(), status: Type.Union([ Type.Literal('success'), @@ -90,13 +90,13 @@ export const SnpTransactionSchema = Type.Object({ tx_index: Type.Number(), contract_interface: Type.Union([Type.Null(), Type.String()]), }); -export type SnpTransaction = Static; +export type StacksCoreTransaction = Static; -export const SnpBlockSchema = Type.Object({ +export const StacksCoreBlockSchema = Type.Object({ block_height: Type.Number(), index_block_hash: Type.String(), parent_index_block_hash: Type.String(), - events: Type.Array(SnpEventSchema), - transactions: Type.Array(SnpTransactionSchema), + events: Type.Array(StacksCoreEventSchema), + transactions: Type.Array(StacksCoreTransactionSchema), }); -export type SnpBlock = Static; +export type StacksCoreBlock = Static; diff --git a/src/snp/snp-event-stream.ts b/src/stacks-core/snp-event-stream.ts similarity index 89% rename from src/snp/snp-event-stream.ts rename to src/stacks-core/snp-event-stream.ts index c7f2abe..6dfbeab 100644 --- a/src/snp/snp-event-stream.ts +++ b/src/stacks-core/snp-event-stream.ts @@ -2,14 +2,14 @@ import { SERVER_VERSION } from '@hirosystems/api-toolkit'; import { logger as defaultLogger } from '@hirosystems/api-toolkit'; import { StacksEventStream, StacksEventStreamType } from '@hirosystems/salt-n-pepper-client'; import { EventEmitter } from 'node:events'; -import { SnpBlockSchema } from './schemas'; +import { StacksCoreBlockSchema } from './schemas'; import { TypeCompiler } from '@sinclair/typebox/compiler'; -import { SnpBlockProcessor } from './snp-block-processor'; +import { StacksCoreBlockProcessor } from './stacks-core-block-processor'; -const SnpBlockCType = TypeCompiler.Compile(SnpBlockSchema); +const SnpBlockCType = TypeCompiler.Compile(StacksCoreBlockSchema); export class SnpEventStreamHandler { - private readonly blockProcessor: SnpBlockProcessor; + private readonly blockProcessor: StacksCoreBlockProcessor; private readonly logger = defaultLogger.child({ name: 'SnpEventStreamHandler' }); private readonly snpClientStream: StacksEventStream; private readonly redisUrl: string; @@ -23,7 +23,7 @@ export class SnpEventStreamHandler { redisUrl: string; redisStreamPrefix: string; lastMessageId: string; - blockProcessor: SnpBlockProcessor; + blockProcessor: StacksCoreBlockProcessor; }) { this.blockProcessor = opts.blockProcessor; this.redisUrl = opts.redisUrl; diff --git a/src/snp/snp-block-processor.ts b/src/stacks-core/stacks-core-block-processor.ts similarity index 51% rename from src/snp/snp-block-processor.ts rename to src/stacks-core/stacks-core-block-processor.ts index 119b376..e459a44 100644 --- a/src/snp/snp-block-processor.ts +++ b/src/stacks-core/stacks-core-block-processor.ts @@ -1,11 +1,11 @@ import BigNumber from 'bignumber.js'; import { - SnpBlock, - SnpFtBurnEvent, - SnpFtMintEvent, - SnpNftMintEvent, - SnpSmartContractPrintEvent, - SnpTransaction, + StacksCoreBlock, + StacksCoreFtBurnEvent, + StacksCoreFtMintEvent, + StacksCoreNftMintEvent, + StacksCoreContractEvent, + StacksCoreTransaction, } from './schemas'; import { getContractLogMetadataUpdateNotification, @@ -24,38 +24,43 @@ import { decodeTransaction, TxPayloadTypeID, } from '@hirosystems/stacks-encoding-native-js'; -import { SnpPgStore } from '../pg/snp-pg-store'; +import { StacksCorePgStore } from '../pg/stacks-core-pg-store'; +import { logger, stopwatch } from '@hirosystems/api-toolkit'; -export type SnpProcessedEvent = { +export type ProcessedStacksCoreEvent = { event: T; tx_id: string; tx_index: number; event_index?: number; }; -export type SnpIndexedTransaction = { - tx: SnpTransaction; +export type ProcessedStacksCoreTransaction = { + tx: StacksCoreTransaction; decoded: DecodedTxResult; }; -export type SnpProcessedBlock = { - block_height: number; - index_block_hash: string; - transactions: Map; - contracts: SnpProcessedEvent[]; - notifications: SnpProcessedEvent[]; - sftMints: SnpProcessedEvent[]; - nftMints: SnpProcessedEvent[]; +export type ProcessedStacksCoreBlock = { + blockHeight: number; + indexBlockHash: string; + transactions: Map; + contracts: ProcessedStacksCoreEvent[]; + notifications: ProcessedStacksCoreEvent[]; + sftMints: ProcessedStacksCoreEvent[]; + nftMints: ProcessedStacksCoreEvent[]; ftSupplyDelta: Map; }; -export class SnpBlockProcessor { - private readonly db: SnpPgStore; +/** + * Processes a Stacks Core block and writes contract deployments, token metadata updates, token + * supply deltas, and token mints to the database. + */ +export class StacksCoreBlockProcessor { + private readonly db: StacksCorePgStore; - private block: SnpProcessedBlock = { - block_height: 0, - index_block_hash: '', - transactions: new Map(), + private block: ProcessedStacksCoreBlock = { + blockHeight: 0, + indexBlockHash: '', + transactions: new Map(), contracts: [], notifications: [], sftMints: [], @@ -63,18 +68,23 @@ export class SnpBlockProcessor { ftSupplyDelta: new Map(), }; - constructor(args: { db: SnpPgStore }) { + constructor(args: { db: StacksCorePgStore }) { this.db = args.db; } - async process(block: SnpBlock): Promise { - this.block.block_height = block.block_height; - this.block.index_block_hash = block.index_block_hash; + async process(block: StacksCoreBlock): Promise { + const time = stopwatch(); + this.clear(); + logger.info( + `${this.constructor.name} processing block ${block.block_height} #${block.index_block_hash}` + ); + this.block.blockHeight = block.block_height; + this.block.indexBlockHash = block.index_block_hash; for (const transaction of block.transactions) { if (transaction.status !== 'success') continue; - const indexedTransaction: SnpIndexedTransaction = { + const indexedTransaction: ProcessedStacksCoreTransaction = { tx: transaction, decoded: decodeTransaction(transaction.raw_tx.substring(2)), }; @@ -83,7 +93,6 @@ export class SnpBlockProcessor { // Check for smart contract deployments. this.processSmartContract(indexedTransaction); } - // Check for token metadata updates and token supply deltas. for (const event of block.events) { const transaction = this.block.transactions.get(event.txid); @@ -109,9 +118,28 @@ export class SnpBlockProcessor { } await this.db.writeBlock(this.block); + this.clear(); + logger.info( + `${this.constructor.name} processed block ${block.block_height} #${ + block.index_block_hash + } in ${time.getElapsedSeconds()}s` + ); + } + + private clear() { + this.block = { + blockHeight: 0, + indexBlockHash: '', + transactions: new Map(), + contracts: [], + notifications: [], + sftMints: [], + nftMints: [], + ftSupplyDelta: new Map(), + }; } - private processSmartContract(transaction: SnpIndexedTransaction) { + private processSmartContract(transaction: ProcessedStacksCoreTransaction) { if (transaction.tx.contract_interface == null) return; // Parse the included ABI to check if it's a token contract. @@ -125,9 +153,10 @@ export class SnpBlockProcessor { payload.type_id === TxPayloadTypeID.SmartContract || payload.type_id === TxPayloadTypeID.VersionedSmartContract ) { + const principal = `${sender}.${payload.contract_name}`; this.block.contracts.push({ event: { - principal: `${sender}.${payload.contract_name}`, + principal, sip, fungible_token_name: abi.fungible_tokens[0]?.name, non_fungible_token_name: abi.non_fungible_tokens[0]?.name, @@ -135,12 +164,20 @@ export class SnpBlockProcessor { tx_id: transaction.tx.txid, tx_index: transaction.tx.tx_index, }); + logger.info( + { + contract: principal, + sip, + txid: transaction.tx.txid, + }, + `${this.constructor.name} found contract ${principal} (${sip})` + ); } } private processContractEvent( - transaction: SnpIndexedTransaction, - event: SnpSmartContractPrintEvent + transaction: ProcessedStacksCoreTransaction, + event: StacksCoreContractEvent ) { const sender = transaction.decoded.auth.origin_condition.signer.address; const notification = getContractLogMetadataUpdateNotification(sender, event); @@ -151,6 +188,13 @@ export class SnpBlockProcessor { tx_index: transaction.tx.tx_index, event_index: event.event_index, }); + logger.info( + { + contract: notification.contract_id, + txid: event.txid, + }, + `${this.constructor.name} found metadata update notification for ${notification.contract_id}` + ); return; } const mint = getContractLogSftMintEvent(event); @@ -161,35 +205,73 @@ export class SnpBlockProcessor { tx_index: transaction.tx.tx_index, event_index: event.event_index, }); + logger.info( + { + contract: mint.contractId, + txid: event.txid, + amount: mint.amount, + }, + `${this.constructor.name} found SFT mint for ${mint.contractId} #${mint.tokenId}` + ); return; } } - private processFtMintEvent(event: SnpFtMintEvent) { + private processFtMintEvent(event: StacksCoreFtMintEvent) { const principal = event.ft_mint_event.asset_identifier.split('::')[0]; const previous = this.block.ftSupplyDelta.get(principal) ?? BigNumber(0); const amount = BigNumber(event.ft_mint_event.amount); this.block.ftSupplyDelta.set(principal, previous.plus(amount)); + logger.info( + { + contract: principal, + amount: amount.toString(), + txid: event.txid, + }, + `${this.constructor.name} found FT mint for ${principal}` + ); } - private processFtBurnEvent(event: SnpFtBurnEvent) { + private processFtBurnEvent(event: StacksCoreFtBurnEvent) { const principal = event.ft_burn_event.asset_identifier.split('::')[0]; const previous = this.block.ftSupplyDelta.get(principal) ?? BigNumber(0); const amount = BigNumber(event.ft_burn_event.amount); this.block.ftSupplyDelta.set(principal, previous.minus(amount)); + logger.info( + { + contract: principal, + amount: amount.toString(), + txid: event.txid, + }, + `${this.constructor.name} found FT burn for ${principal}` + ); } - private processNftMintEvent(transaction: SnpIndexedTransaction, event: SnpNftMintEvent) { + private processNftMintEvent( + transaction: ProcessedStacksCoreTransaction, + event: StacksCoreNftMintEvent + ) { const value = decodeClarityValue(event.nft_mint_event.raw_value); - if (value.type_id === ClarityTypeID.UInt) + if (value.type_id === ClarityTypeID.UInt) { + const principal = event.nft_mint_event.asset_identifier.split('::')[0]; + const tokenId = BigInt(value.value); this.block.nftMints.push({ event: { - contractId: event.nft_mint_event.asset_identifier.split('::')[0], - tokenId: BigInt(value.value), + contractId: principal, + tokenId, }, tx_id: event.txid, tx_index: transaction.tx.tx_index, event_index: event.event_index, }); + logger.info( + { + contract: principal, + tokenId: tokenId.toString(), + txid: event.txid, + }, + `${this.constructor.name} found NFT mint for ${principal} #${tokenId}` + ); + } } } diff --git a/src/token-processor/util/sip-validation.ts b/src/token-processor/util/sip-validation.ts index 2ad102f..9480622 100644 --- a/src/token-processor/util/sip-validation.ts +++ b/src/token-processor/util/sip-validation.ts @@ -8,7 +8,7 @@ import { decodeClarityValue, } from '@hirosystems/stacks-encoding-native-js'; import { DbSipNumber } from '../../pg/types'; -import { SnpSmartContractPrintEvent } from '../../snp/schemas'; +import { StacksCoreContractEvent } from '../../stacks-core/schemas'; const FtTraitFunctions: ClarityAbiFunction[] = [ { @@ -307,7 +307,7 @@ export type TokenMetadataUpdateNotification = { */ export function getContractLogMetadataUpdateNotification( sender: string, - event: SnpSmartContractPrintEvent + event: StacksCoreContractEvent ): TokenMetadataUpdateNotification | undefined { const log = event.contract_event; try { @@ -381,7 +381,7 @@ export type SftMintEvent = NftMintEvent & { }; export function getContractLogSftMintEvent( - event: SnpSmartContractPrintEvent + event: StacksCoreContractEvent ): SftMintEvent | undefined { const log = event.contract_event; try { From aa1e8152542897f018e9f22d7f1edb0981f0701c Mon Sep 17 00:00:00 2001 From: Rafael Cardenas Date: Fri, 9 Jan 2026 11:47:07 -0600 Subject: [PATCH 06/19] reorg handling --- migrations/1661125881755_blocks.ts | 17 ++ migrations/1670264425574_smart-contracts.ts | 3 + migrations/1670265062169_tokens.ts | 4 +- migrations/1671125881755_chain-tip.ts | 27 --- migrations/1767813638023_ft-supply-delta.ts | 28 +++ src/pg/stacks-core-pg-store.ts | 174 ++++++++---------- src/pg/types.ts | 5 + .../stacks-core-block-processor.ts | 86 +++++---- 8 files changed, 185 insertions(+), 159 deletions(-) create mode 100644 migrations/1661125881755_blocks.ts delete mode 100644 migrations/1671125881755_chain-tip.ts create mode 100644 migrations/1767813638023_ft-supply-delta.ts diff --git a/migrations/1661125881755_blocks.ts b/migrations/1661125881755_blocks.ts new file mode 100644 index 0000000..5ef3d55 --- /dev/null +++ b/migrations/1661125881755_blocks.ts @@ -0,0 +1,17 @@ +/* eslint-disable @typescript-eslint/naming-convention */ +import { MigrationBuilder, ColumnDefinitions } from 'node-pg-migrate'; + +export const shorthands: ColumnDefinitions | undefined = undefined; + +export function up(pgm: MigrationBuilder): void { + pgm.createTable('blocks', { + index_block_hash: { + type: 'text', + primaryKey: true, + }, + block_height: { + type: 'int', + notNull: true, + }, + }); +} diff --git a/migrations/1670264425574_smart-contracts.ts b/migrations/1670264425574_smart-contracts.ts index c1ae92f..346fed7 100644 --- a/migrations/1670264425574_smart-contracts.ts +++ b/migrations/1670264425574_smart-contracts.ts @@ -35,6 +35,8 @@ export function up(pgm: MigrationBuilder): void { index_block_hash: { type: 'text', notNull: true, + references: 'blocks', + onDelete: 'CASCADE', }, tx_id: { type: 'text', @@ -54,4 +56,5 @@ export function up(pgm: MigrationBuilder): void { }, }); pgm.createIndex('smart_contracts', [{ name: 'block_height', sort: 'DESC' }]); + pgm.createIndex('smart_contracts', ['index_block_hash']); } diff --git a/migrations/1670265062169_tokens.ts b/migrations/1670265062169_tokens.ts index 02c70db..1795553 100644 --- a/migrations/1670265062169_tokens.ts +++ b/migrations/1670265062169_tokens.ts @@ -46,6 +46,8 @@ export function up(pgm: MigrationBuilder): void { index_block_hash: { type: 'text', notNull: true, + references: 'blocks', + onDelete: 'CASCADE', }, tx_id: { type: 'text', @@ -76,5 +78,5 @@ export function up(pgm: MigrationBuilder): void { pgm.createIndex('tokens', ['symbol']); pgm.createIndex('tokens', ['type', 'LOWER(name)'], { where: "type = 'ft'" }); pgm.createIndex('tokens', ['type', 'LOWER(symbol)'], { where: "type = 'ft'" }); - pgm.createIndex('tokens', ['type']); + pgm.createIndex('tokens', ['index_block_hash']); } diff --git a/migrations/1671125881755_chain-tip.ts b/migrations/1671125881755_chain-tip.ts deleted file mode 100644 index 2f52fb4..0000000 --- a/migrations/1671125881755_chain-tip.ts +++ /dev/null @@ -1,27 +0,0 @@ -/* eslint-disable @typescript-eslint/naming-convention */ -import { MigrationBuilder, ColumnDefinitions } from 'node-pg-migrate'; - -export const shorthands: ColumnDefinitions | undefined = undefined; - -export function up(pgm: MigrationBuilder): void { - pgm.createTable('chain_tip', { - id: { - type: 'bool', - primaryKey: true, - default: true, - }, - block_height: { - type: 'int', - notNull: true, - default: 1, - }, - }); - // Ensure only a single row can exist - pgm.addConstraint('chain_tip', 'chain_tip_one_row', 'CHECK(id)'); - // Create the single row - pgm.sql('INSERT INTO chain_tip VALUES(DEFAULT)'); -} - -export function down(pgm: MigrationBuilder): void { - pgm.dropTable('chain_tip'); -} diff --git a/migrations/1767813638023_ft-supply-delta.ts b/migrations/1767813638023_ft-supply-delta.ts new file mode 100644 index 0000000..ed31c39 --- /dev/null +++ b/migrations/1767813638023_ft-supply-delta.ts @@ -0,0 +1,28 @@ +/* eslint-disable @typescript-eslint/naming-convention */ +import { MigrationBuilder, ColumnDefinitions } from 'node-pg-migrate'; + +export const shorthands: ColumnDefinitions | undefined = undefined; + +export function up(pgm: MigrationBuilder): void { + pgm.createTable('ft_supply_deltas', { + token_id: { + type: 'int', + references: 'tokens', + onDelete: 'CASCADE', + }, + block_height: { + type: 'int', + notNull: true, + }, + index_block_hash: { + type: 'text', + notNull: true, + references: 'blocks', + onDelete: 'CASCADE', + }, + delta: { + type: 'numeric', + notNull: true, + }, + }); +} diff --git a/src/pg/stacks-core-pg-store.ts b/src/pg/stacks-core-pg-store.ts index aeb9b46..d2a7b75 100644 --- a/src/pg/stacks-core-pg-store.ts +++ b/src/pg/stacks-core-pg-store.ts @@ -1,17 +1,17 @@ -import { - BasePgStoreModule, - PgSqlClient, - batchIterate, - logger, - stopwatch, -} from '@hirosystems/api-toolkit'; +import { BasePgStoreModule, PgSqlClient, batchIterate, logger } from '@hirosystems/api-toolkit'; import { ENV } from '../env'; import { NftMintEvent, SmartContractDeployment, TokenMetadataUpdateNotification, } from '../token-processor/util/sip-validation'; -import { DbSmartContractInsert, DbTokenType, DbSmartContract, DbSipNumber } from './types'; +import { + DbSmartContractInsert, + DbTokenType, + DbSmartContract, + DbSipNumber, + DbChainTip, +} from './types'; import { dbSipNumberToDbTokenType } from '../token-processor/util/helpers'; import BigNumber from 'bignumber.js'; import { @@ -26,12 +26,65 @@ export class StacksCorePgStore extends BasePgStoreModule { */ async writeBlock(block: ProcessedStacksCoreBlock): Promise { await this.sqlWriteTransaction(async sql => { - await this.applyTransactions(sql, block); + await this.insertBlock(sql, block); + for (const contract of block.contracts) + await this.applyContractDeployment(sql, contract, block); + for (const notification of block.notifications) + await this.applyNotification(sql, notification, block); + await this.applyTokenMints(sql, block.nftMints, DbTokenType.nft, block); + await this.applyTokenMints(sql, block.sftMints, DbTokenType.sft, block); + for (const [contract, delta] of block.ftSupplyDelta) + await this.applyFtSupplyChange(sql, contract, delta, block); await this.enqueueDynamicTokensDueForRefresh(); - await this.updateChainTipBlockHeight(block.blockHeight); }); } + async insertBlock(sql: PgSqlClient, block: ProcessedStacksCoreBlock): Promise { + const values = { + block_height: block.blockHeight, + index_block_hash: block.indexBlockHash, + }; + await sql`INSERT INTO blocks ${sql(values)}`; + } + + async getChainTip(sql: PgSqlClient): Promise { + const result = await sql` + SELECT index_block_hash, block_height + FROM blocks + ORDER BY block_height DESC + LIMIT 1 + `; + return result.count > 0 ? result[0] : null; + } + + /** + * Reverts the database to a new chain tip after a re-org. + * @param sql - The SQL client to use. + * @param newChainTip - The new chain tip to revert to. + */ + async revertToChainTip(sql: PgSqlClient, newChainTip: DbChainTip): Promise { + // Before deleting blocks, we need to undo all FT supply deltas for the blocks we're about to + // delete. + await sql` + WITH ft_supply_deltas AS ( + SELECT token_id, SUM(delta) AS delta + FROM ft_supply_deltas + WHERE block_height > ${newChainTip.block_height} + GROUP BY token_id + ) + UPDATE tokens + SET total_supply = total_supply - (SELECT delta FROM ft_supply_deltas WHERE token_id = tokens.id), + updated_at = NOW() + WHERE id IN (SELECT token_id FROM ft_supply_deltas) + `; + // Finally, delete all blocks with a height greater than the chain tip's block height. This will + // cascade delete all tokens, smart contracts, FT supply deltas, update notifications and jobs + // associated with those blocks. + await sql` + DELETE FROM blocks WHERE block_height > ${newChainTip.block_height} + `; + } + /** * Inserts new tokens and new token queue entries until `token_count` items are created, usually * used when processing an NFT contract that has just been deployed. @@ -128,37 +181,6 @@ export class StacksCorePgStore extends BasePgStoreModule { `; } - async updateChainTipBlockHeight(blockHeight: number): Promise { - await this.sql`UPDATE chain_tip SET block_height = ${blockHeight}`; - } - - private async getLastIngestedBlockHeight(): Promise { - const result = await this.sql<{ block_height: number }[]>`SELECT block_height FROM chain_tip`; - return result[0].block_height; - } - - private async applyTransactions(sql: PgSqlClient, block: ProcessedStacksCoreBlock) { - for (const contract of block.contracts) - await this.applyContractDeployment(sql, contract, block); - for (const notification of block.notifications) - await this.applyNotification(sql, notification, block); - await this.applyTokenMints(sql, block.nftMints, DbTokenType.nft, block); - await this.applyTokenMints(sql, block.sftMints, DbTokenType.sft, block); - for (const [contract, delta] of block.ftSupplyDelta) - await this.applyFtSupplyChange(sql, contract, delta, block); - } - - // private async rollBackTransactions(sql: PgSqlClient, cache: BlockCache) { - // for (const contract of cache.contracts) - // await this.rollBackContractDeployment(sql, contract, cache); - // for (const notification of cache.notifications) - // await this.rollBackNotification(sql, notification, cache); - // await this.rollBackTokenMints(sql, cache.nftMints, DbTokenType.nft, cache); - // await this.rollBackTokenMints(sql, cache.sftMints, DbTokenType.sft, cache); - // for (const [contract, delta] of cache.ftSupplyDelta) - // await this.applyFtSupplyChange(sql, contract, delta.negated(), cache); - // } - private async applyNotification( sql: PgSqlClient, event: ProcessedStacksCoreEvent, @@ -214,33 +236,24 @@ export class StacksCorePgStore extends BasePgStoreModule { block: ProcessedStacksCoreBlock ): Promise { await sql` + WITH smart_contract_id AS ( + SELECT id FROM smart_contracts + WHERE principal = ${contract} + ), + token_id AS ( + SELECT id FROM tokens + WHERE smart_contract_id = (SELECT id FROM smart_contract_id) + AND token_number = 1 + ), + delta_insert AS ( + INSERT INTO ft_supply_deltas (token_id, block_height, index_block_hash, delta) + VALUES ( + (SELECT id FROM token_id), ${block.blockHeight}, ${block.indexBlockHash}, ${delta} + ) + ) UPDATE tokens SET total_supply = total_supply + ${delta}, updated_at = NOW() - WHERE smart_contract_id = (SELECT id FROM smart_contracts WHERE principal = ${contract}) - AND token_number = 1 - `; - } - - private async rollBackContractDeployment( - sql: PgSqlClient, - contract: CachedEvent, - cache: BlockCache - ): Promise { - await sql` - DELETE FROM smart_contracts WHERE principal = ${contract.event.principal} - `; - } - - private async rollBackNotification( - sql: PgSqlClient, - notification: CachedEvent, - cache: BlockCache - ): Promise { - await sql` - DELETE FROM update_notifications - WHERE block_height = ${cache.block.index} - AND tx_index = ${notification.tx_index} - AND event_index = ${notification.event_index} + WHERE id = (SELECT id FROM token_id) `; } @@ -325,33 +338,4 @@ export class StacksCorePgStore extends BasePgStoreModule { `; } } - - private async rollBackTokenMints( - sql: PgSqlClient, - mints: CachedEvent[], - tokenType: DbTokenType, - cache: BlockCache - ): Promise { - if (mints.length == 0) return; - for await (const batch of batchIterate(mints, 500)) { - const values = batch.map(m => { - logger.info( - `StacksCorePgStore rollback ${tokenType.toUpperCase()} mint ${m.event.contractId} (${ - m.event.tokenId - }) at block ${cache.block.index}` - ); - return [m.event.contractId, m.event.tokenId.toString()]; - }); - await sql` - WITH delete_values (principal, token_number) AS (VALUES ${sql(values)}) - DELETE FROM tokens WHERE id IN ( - SELECT t.id - FROM delete_values AS d - INNER JOIN smart_contracts AS s ON s.principal = d.principal::text - INNER JOIN tokens AS t - ON t.smart_contract_id = s.id AND t.token_number = d.token_number::bigint - ) - `; - } - } } diff --git a/src/pg/types.ts b/src/pg/types.ts index 06eb70f..c57da04 100644 --- a/src/pg/types.ts +++ b/src/pg/types.ts @@ -1,6 +1,11 @@ import { PgJsonb, PgNumeric, PgSqlQuery } from '@hirosystems/api-toolkit'; import { FtOrderBy, Order } from '../api/schemas'; +export type DbChainTip = { + index_block_hash: string; + block_height: number; +}; + export enum DbSipNumber { /** Non-Fungible Tokens */ sip009 = 'sip-009', diff --git a/src/stacks-core/stacks-core-block-processor.ts b/src/stacks-core/stacks-core-block-processor.ts index e459a44..0f6e3c7 100644 --- a/src/stacks-core/stacks-core-block-processor.ts +++ b/src/stacks-core/stacks-core-block-processor.ts @@ -78,49 +78,63 @@ export class StacksCoreBlockProcessor { logger.info( `${this.constructor.name} processing block ${block.block_height} #${block.index_block_hash}` ); - this.block.blockHeight = block.block_height; - this.block.indexBlockHash = block.index_block_hash; - for (const transaction of block.transactions) { - if (transaction.status !== 'success') continue; + await this.db.sqlWriteTransaction(async sql => { + // Check if this block represents a re-org. Revert to its parent's chain tip if it does. + const chainTip = await this.db.getChainTip(sql); + if (chainTip && chainTip.index_block_hash !== block.parent_index_block_hash) { + logger.info( + `${this.constructor.name} detected re-org, reverting to chain tip at parent block ${ + block.block_height - 1 + } ${block.parent_index_block_hash}` + ); + await this.db.revertToChainTip(sql, chainTip); + } - const indexedTransaction: ProcessedStacksCoreTransaction = { - tx: transaction, - decoded: decodeTransaction(transaction.raw_tx.substring(2)), - }; - this.block.transactions.set(transaction.txid, indexedTransaction); + // Process the block. + this.block.blockHeight = block.block_height; + this.block.indexBlockHash = block.index_block_hash; + for (const transaction of block.transactions) { + if (transaction.status !== 'success') continue; - // Check for smart contract deployments. - this.processSmartContract(indexedTransaction); - } - // Check for token metadata updates and token supply deltas. - for (const event of block.events) { - const transaction = this.block.transactions.get(event.txid); - if (!transaction) continue; - switch (event.type) { - case 'contract_event': - this.processContractEvent(transaction, event); - break; - case 'ft_mint_event': - this.processFtMintEvent(event); - break; - case 'ft_burn_event': - this.processFtBurnEvent(event); - break; - case 'nft_mint_event': - this.processNftMintEvent(transaction, event); - break; - case 'nft_burn_event': - // Burned NFTs still have their metadata in the database, so we don't need to do anything - // here. - break; + const indexedTransaction: ProcessedStacksCoreTransaction = { + tx: transaction, + decoded: decodeTransaction(transaction.raw_tx.substring(2)), + }; + this.block.transactions.set(transaction.txid, indexedTransaction); + + // Check for smart contract deployments. + this.processSmartContract(indexedTransaction); + } + // Check for token metadata updates and token supply deltas. + for (const event of block.events) { + const transaction = this.block.transactions.get(event.txid); + if (!transaction) continue; + switch (event.type) { + case 'contract_event': + this.processContractEvent(transaction, event); + break; + case 'ft_mint_event': + this.processFtMintEvent(event); + break; + case 'ft_burn_event': + this.processFtBurnEvent(event); + break; + case 'nft_mint_event': + this.processNftMintEvent(transaction, event); + break; + case 'nft_burn_event': + // Burned NFTs still have their metadata in the database, so we don't need to do anything + // here. + break; + } } - } - await this.db.writeBlock(this.block); + await this.db.writeBlock(this.block); + }); this.clear(); logger.info( - `${this.constructor.name} processed block ${block.block_height} #${ + `${this.constructor.name} processed block ${block.block_height} ${ block.index_block_hash } in ${time.getElapsedSeconds()}s` ); From c5ae993341e85731a58b561a8f92cd9614f24f16 Mon Sep 17 00:00:00 2001 From: Rafael Cardenas Date: Fri, 9 Jan 2026 17:17:05 -0600 Subject: [PATCH 07/19] build errors --- src/admin-rpc/init.ts | 2 +- src/index.ts | 12 ++++++++---- src/pg/pg-store.ts | 4 ++-- src/stacks-core/snp-event-stream.ts | 19 +++++++++++++++++++ .../queue/job/process-smart-contract-job.ts | 3 +-- 5 files changed, 31 insertions(+), 9 deletions(-) diff --git a/src/admin-rpc/init.ts b/src/admin-rpc/init.ts index 1bb4258..620a4af 100644 --- a/src/admin-rpc/init.ts +++ b/src/admin-rpc/init.ts @@ -180,7 +180,7 @@ export const AdminApi: FastifyPluginCallback, Server, TypeB // Enqueue contract for processing. await fastify.db.sqlWriteTransaction(async sql => { - await fastify.db.chainhook.enqueueContract(sql, { + await fastify.db.core.enqueueContract(sql, { block_height: contract.block_height, index_block_hash: block.index_block_hash, principal: contract.contract_id, diff --git a/src/index.ts b/src/index.ts index eaf8ae7..8ba9ad3 100644 --- a/src/index.ts +++ b/src/index.ts @@ -6,7 +6,7 @@ import { ENV } from './env'; import { buildAdminRpcServer } from './admin-rpc/init'; import { isProdEnv } from './api/util/helpers'; import { buildProfilerServer, logger, registerShutdownConfig } from '@hirosystems/api-toolkit'; -import { closeChainhookServer, startChainhookServer } from './chainhook/server'; +import { buildSnpEventStreamHandler } from './stacks-core/snp-event-stream'; /** * Initializes background services. Only for `default` and `writeonly` run modes. @@ -25,12 +25,16 @@ async function initBackgroundServices(db: PgStore) { }); if (ENV.JOB_QUEUE_AUTO_START) jobQueue.start(); - const server = await startChainhookServer({ db }); + const snpEventStreamHandler = buildSnpEventStreamHandler({ + redisUrl: ENV.SNP_REDIS_URL, + redisStreamPrefix: ENV.SNP_REDIS_STREAM_KEY_PREFIX, + db: db.core, + }); registerShutdownConfig({ - name: 'Chainhook Server', + name: 'SNP Event Stream Handler', forceKillable: false, handler: async () => { - await closeChainhookServer(server); + await snpEventStreamHandler.stop(); }, }); diff --git a/src/pg/pg-store.ts b/src/pg/pg-store.ts index 69eb1e3..d695c1e 100644 --- a/src/pg/pg-store.ts +++ b/src/pg/pg-store.ts @@ -50,7 +50,7 @@ export const MIGRATIONS_DIR = path.join(__dirname, '../../migrations'); * Connects and queries the Token Metadata Service's local postgres DB. */ export class PgStore extends BasePgStore { - readonly snp: StacksCorePgStore; + readonly core: StacksCorePgStore; static async connect(opts?: { skipMigrations: boolean }): Promise { const pgConfig = { @@ -77,7 +77,7 @@ export class PgStore extends BasePgStore { constructor(sql: PgSqlClient) { super(sql); - this.snp = new StacksCorePgStore(this); + this.core = new StacksCorePgStore(this); } async getSmartContract( diff --git a/src/stacks-core/snp-event-stream.ts b/src/stacks-core/snp-event-stream.ts index 6dfbeab..c0843cf 100644 --- a/src/stacks-core/snp-event-stream.ts +++ b/src/stacks-core/snp-event-stream.ts @@ -5,9 +5,14 @@ import { EventEmitter } from 'node:events'; import { StacksCoreBlockSchema } from './schemas'; import { TypeCompiler } from '@sinclair/typebox/compiler'; import { StacksCoreBlockProcessor } from './stacks-core-block-processor'; +import { StacksCorePgStore } from '../pg/stacks-core-pg-store'; const SnpBlockCType = TypeCompiler.Compile(StacksCoreBlockSchema); +/** + * Handles the SNP event stream and processes Stacks Core blocks. + * This is used to index the Stacks Core blockchain and write blocks to the database. + */ export class SnpEventStreamHandler { private readonly blockProcessor: StacksCoreBlockProcessor; private readonly logger = defaultLogger.child({ name: 'SnpEventStreamHandler' }); @@ -71,3 +76,17 @@ export class SnpEventStreamHandler { await this.snpClientStream.stop(); } } + +export function buildSnpEventStreamHandler(opts: { + redisUrl: string; + redisStreamPrefix: string; + db: StacksCorePgStore; +}) { + const blockProcessor = new StacksCoreBlockProcessor({ db: opts.db }); + return new SnpEventStreamHandler({ + redisUrl: opts.redisUrl, + redisStreamPrefix: opts.redisStreamPrefix, + lastMessageId: '0', + blockProcessor, + }); +} diff --git a/src/token-processor/queue/job/process-smart-contract-job.ts b/src/token-processor/queue/job/process-smart-contract-job.ts index 1aeab0f..cc28cb0 100644 --- a/src/token-processor/queue/job/process-smart-contract-job.ts +++ b/src/token-processor/queue/job/process-smart-contract-job.ts @@ -2,7 +2,6 @@ import { ENV } from '../../../env'; import { DbSipNumber, DbSmartContract } from '../../../pg/types'; import { Job } from './job'; import { StacksNodeRpcClient } from '../../stacks-node/stacks-node-rpc-client'; -import { dbSipNumberToDbTokenType } from '../../util/helpers'; import { logger } from '@hirosystems/api-toolkit'; /** @@ -74,7 +73,7 @@ export class ProcessSmartContractJob extends Job { `ProcessSmartContractJob enqueueing ${tokenCount} tokens for ${this.description()}` ); await this.db.updateSmartContractTokenCount({ id: contract.id, count: tokenCount }); - await this.db.chainhook.insertAndEnqueueSequentialTokens(sql, { + await this.db.core.insertAndEnqueueSequentialTokens(sql, { smart_contract: contract, token_count: tokenCount, }); From 3da369522ae73e09944f088867f7249c2e65b80c Mon Sep 17 00:00:00 2001 From: Rafael Cardenas Date: Fri, 9 Jan 2026 17:29:16 -0600 Subject: [PATCH 08/19] status --- src/api/routes/status.ts | 35 +++++++++----------------------- src/api/schemas.ts | 43 ++++++++-------------------------------- src/pg/pg-store.ts | 5 ----- tests/api/cache.test.ts | 11 +++++++++- tests/api/status.test.ts | 38 +++++++++++++---------------------- 5 files changed, 41 insertions(+), 91 deletions(-) diff --git a/src/api/routes/status.ts b/src/api/routes/status.ts index 7898681..22928fc 100644 --- a/src/api/routes/status.ts +++ b/src/api/routes/status.ts @@ -17,7 +17,7 @@ export const StatusRoutes: FastifyPluginCallback< schema: { operationId: 'getApiStatus', summary: 'API Status', - description: 'Displays the status of the API and its current workload', + description: 'Displays the status of the API', tags: ['Status'], response: { 200: ApiStatusResponse, @@ -26,35 +26,18 @@ export const StatusRoutes: FastifyPluginCallback< }, async (request, reply) => { const result = await fastify.db.sqlTransaction(async sql => { - const block_height = await fastify.db.getChainTipBlockHeight(); - - const smartContracts: Record = {}; - const contractCounts = await fastify.db.getSmartContractCounts(); - for (const row of contractCounts) { - smartContracts[row.sip] = row.count; - } - - const tokens: Record = {}; - const tokenCounts = await fastify.db.getTokenCounts(); - for (const row of tokenCounts) { - tokens[row.type] = row.count; + let chain_tip = null; + const chainTipResult = await fastify.db.core.getChainTip(sql); + if (chainTipResult) { + chain_tip = { + block_height: chainTipResult.block_height, + index_block_hash: chainTipResult.index_block_hash, + }; } - - const queue: Record = {}; - const jobCounts = await fastify.db.getJobStatusCounts(); - for (const row of jobCounts) { - queue[row.status] = row.count; - } - return { server_version: `token-metadata-api ${SERVER_VERSION.tag} (${SERVER_VERSION.branch}:${SERVER_VERSION.commit})`, status: 'ready', - chain_tip: { - block_height, - }, - tokens: tokenCounts.length ? tokens : undefined, - token_contracts: contractCounts.length ? smartContracts : undefined, - job_queue: jobCounts.length ? queue : undefined, + chain_tip: chain_tip, }; }); await reply.send(result); diff --git a/src/api/schemas.ts b/src/api/schemas.ts index d96b4e1..63da73f 100644 --- a/src/api/schemas.ts +++ b/src/api/schemas.ts @@ -1,5 +1,5 @@ import { SwaggerOptions } from '@fastify/swagger'; -import { SERVER_VERSION } from '@hirosystems/api-toolkit'; +import { Nullable, SERVER_VERSION } from '@hirosystems/api-toolkit'; import { Static, TSchema, Type } from '@sinclair/typebox'; export const OpenApiSchemaOptions: SwaggerOptions = { @@ -333,40 +333,13 @@ export const ApiStatusResponse = Type.Object( { server_version: Type.String({ examples: ['token-metadata-api v0.0.1 (master:a1b2c3)'] }), status: Type.String({ examples: ['ready'] }), - chain_tip: Type.Object({ - block_height: Type.Integer({ examples: [163541] }), - }), - tokens: Type.Optional( - Type.Object( - { - ft: Type.Optional(Type.Integer({ examples: [512] })), - nft: Type.Optional(Type.Integer({ examples: [493452] })), - sft: Type.Optional(Type.Integer({ examples: [44] })), - }, - { title: 'Api Token Count' } - ) - ), - token_contracts: Type.Optional( - Type.Object( - { - 'sip-009': Type.Optional(Type.Integer({ examples: [3101] })), - 'sip-010': Type.Optional(Type.Integer({ examples: [512] })), - 'sip-013': Type.Optional(Type.Integer({ examples: [11] })), - }, - { title: 'Api Token Contract Count' } - ) - ), - job_queue: Type.Optional( - Type.Object( - { - pending: Type.Optional(Type.Integer({ examples: [430562] })), - queued: Type.Optional(Type.Integer({ examples: [512] })), - done: Type.Optional(Type.Integer({ examples: [12532] })), - failed: Type.Optional(Type.Integer({ examples: [11] })), - invalid: Type.Optional(Type.Integer({ examples: [20] })), - }, - { title: 'Api Job Count' } - ) + chain_tip: Nullable( + Type.Object({ + block_height: Type.Integer({ examples: [163541] }), + index_block_hash: Type.String({ + examples: ['0x1234567890abcdef1234567890abcdef1234567890abcdef'], + }), + }) ), }, { title: 'Api Status Response' } diff --git a/src/pg/pg-store.ts b/src/pg/pg-store.ts index d695c1e..111a057 100644 --- a/src/pg/pg-store.ts +++ b/src/pg/pg-store.ts @@ -287,11 +287,6 @@ export class PgStore extends BasePgStore { } } - async getChainTipBlockHeight(): Promise { - const result = await this.sql<{ block_height: number }[]>`SELECT block_height FROM chain_tip`; - return result[0].block_height; - } - /** * Returns a token ETag based on its last updated date. * @param contractPrincipal - smart contract principal diff --git a/tests/api/cache.test.ts b/tests/api/cache.test.ts index 5d14745..0b526a9 100644 --- a/tests/api/cache.test.ts +++ b/tests/api/cache.test.ts @@ -44,7 +44,16 @@ describe('ETag cache', () => { }); expect(cached.statusCode).toBe(304); - await db.chainhook.updateChainTipBlockHeight(100); + await db.core.insertBlock(db.sql, { + blockHeight: 100, + indexBlockHash: '0x123', + transactions: new Map(), + contracts: [], + notifications: [], + sftMints: [], + nftMints: [], + ftSupplyDelta: new Map(), + }); const cached2 = await fastify.inject({ method: 'GET', url: '/metadata/v1/', diff --git a/tests/api/status.test.ts b/tests/api/status.test.ts index 7d5d031..d92cc92 100644 --- a/tests/api/status.test.ts +++ b/tests/api/status.test.ts @@ -30,48 +30,38 @@ describe('Status routes', () => { expect(json).toStrictEqual({ server_version: 'token-metadata-api v0.0.1 (test:123456)', status: 'ready', - chain_tip: { - block_height: 1, - }, + chain_tip: null, }); const noVersionResponse = await fastify.inject({ method: 'GET', url: '/metadata/' }); expect(response.statusCode).toEqual(noVersionResponse.statusCode); expect(json).toStrictEqual(noVersionResponse.json()); }); - test('returns status counts', async () => { + test('returns status when a block has been processed', async () => { await insertAndEnqueueTestContractWithTokens( db, 'SP2SYHR84SDJJDK8M09HFS4KBFXPPCX9H7RZ9YVTS.hello-world', DbSipNumber.sip009, 4n ); - await db.chainhook.updateChainTipBlockHeight(100); - await db.sql`UPDATE jobs SET status = 'failed' WHERE id = 2`; - await db.sql`UPDATE jobs SET status = 'invalid' WHERE id = 3`; - await db.sql`UPDATE jobs SET status = 'queued' WHERE id = 4`; - await db.sql`UPDATE jobs SET status = 'done' WHERE id = 5`; - + await db.core.insertBlock(db.sql, { + blockHeight: 1, + indexBlockHash: '0x123', + transactions: new Map(), + contracts: [], + notifications: [], + sftMints: [], + nftMints: [], + ftSupplyDelta: new Map(), + }); const response = await fastify.inject({ method: 'GET', url: '/metadata/v1/' }); const json = response.json(); expect(json).toStrictEqual({ server_version: 'token-metadata-api v0.0.1 (test:123456)', status: 'ready', chain_tip: { - block_height: 100, - }, - job_queue: { - pending: 1, - failed: 1, - invalid: 1, - queued: 1, - done: 1, - }, - token_contracts: { - 'sip-009': 1, - }, - tokens: { - nft: 4, + block_height: 1, + index_block_hash: '0x123', }, }); }); From cea0741a64c8a160ac3e7109e487c28af2de7a56 Mon Sep 17 00:00:00 2001 From: Rafael Cardenas Date: Fri, 9 Jan 2026 17:41:49 -0600 Subject: [PATCH 09/19] chain tip and network --- src/api/util/cache.ts | 5 +- src/index.ts | 3 +- src/token-processor/queue/job-queue.ts | 9 ++- src/token-processor/queue/job/job.ts | 5 +- .../queue/job/process-smart-contract-job.ts | 1 + .../queue/job/process-token-job.ts | 2 +- .../stacks-node/stacks-node-rpc-client.ts | 8 +- .../token-processor-metrics.ts | 4 +- tests/token-queue/sip-validation.test.ts | 73 ++++++++++--------- 9 files changed, 64 insertions(+), 46 deletions(-) diff --git a/src/api/util/cache.ts b/src/api/util/cache.ts index e86a1e4..3d21b0d 100644 --- a/src/api/util/cache.ts +++ b/src/api/util/cache.ts @@ -12,9 +12,8 @@ async function handleCache(type: ETagType, request: FastifyRequest, reply: Fasti let etag: string | undefined; switch (type) { case ETagType.chainTip: - // TODO: We should use the `index_block_hash` here instead of the `block_hash`, but we'll need - // a DB change for this. - etag = (await request.server.db.getChainTipBlockHeight()).toString(); + const chainTip = await request.server.db.core.getChainTip(request.server.db.sql); + etag = chainTip?.index_block_hash; break; case ETagType.token: etag = await getTokenEtag(request); diff --git a/src/index.ts b/src/index.ts index 8ba9ad3..b5a8e65 100644 --- a/src/index.ts +++ b/src/index.ts @@ -7,6 +7,7 @@ import { buildAdminRpcServer } from './admin-rpc/init'; import { isProdEnv } from './api/util/helpers'; import { buildProfilerServer, logger, registerShutdownConfig } from '@hirosystems/api-toolkit'; import { buildSnpEventStreamHandler } from './stacks-core/snp-event-stream'; +import { StacksNetworkName } from '@stacks/network'; /** * Initializes background services. Only for `default` and `writeonly` run modes. @@ -15,7 +16,7 @@ import { buildSnpEventStreamHandler } from './stacks-core/snp-event-stream'; async function initBackgroundServices(db: PgStore) { logger.info('Initializing background services...'); - const jobQueue = new JobQueue({ db }); + const jobQueue = new JobQueue({ db, network: ENV.NETWORK as StacksNetworkName }); registerShutdownConfig({ name: 'Job Queue', forceKillable: false, diff --git a/src/token-processor/queue/job-queue.ts b/src/token-processor/queue/job-queue.ts index 25870ce..ea88992 100644 --- a/src/token-processor/queue/job-queue.ts +++ b/src/token-processor/queue/job-queue.ts @@ -5,6 +5,7 @@ import { ENV } from '../../env'; import { ProcessSmartContractJob } from './job/process-smart-contract-job'; import { ProcessTokenJob } from './job/process-token-job'; import { logger, timeout } from '@hirosystems/api-toolkit'; +import { StacksNetworkName } from '@stacks/network'; /** * A priority queue that organizes all necessary work for contract ingestion and token metadata @@ -33,12 +34,14 @@ import { logger, timeout } from '@hirosystems/api-toolkit'; export class JobQueue { private readonly queue: PQueue; private readonly db: PgStore; + private readonly network: StacksNetworkName; /** IDs of jobs currently being processed by the queue. */ private jobIds: Set; private _isRunning = false; - constructor(args: { db: PgStore }) { + constructor(args: { db: PgStore; network: StacksNetworkName }) { this.db = args.db; + this.network = args.network; this.queue = new PQueue({ concurrency: ENV.JOB_QUEUE_CONCURRENCY_LIMIT, autoStart: false, @@ -91,9 +94,9 @@ export class JobQueue { try { if (this._isRunning) { if (job.token_id) { - await new ProcessTokenJob({ db: this.db, job }).work(); + await new ProcessTokenJob({ db: this.db, job, network: this.network }).work(); } else if (job.smart_contract_id) { - await new ProcessSmartContractJob({ db: this.db, job }).work(); + await new ProcessSmartContractJob({ db: this.db, job, network: this.network }).work(); } } else { logger.info(`JobQueue cancelling job ${job.id}, queue is now closed`); diff --git a/src/token-processor/queue/job/job.ts b/src/token-processor/queue/job/job.ts index ad27240..9ce793b 100644 --- a/src/token-processor/queue/job/job.ts +++ b/src/token-processor/queue/job/job.ts @@ -5,6 +5,7 @@ import { DbJob, DbJobInvalidReason, DbJobStatus } from '../../../pg/types'; import { getUserErrorInvalidReason, TooManyRequestsHttpError, UserError } from '../../util/errors'; import { RetryableJobError } from '../errors'; import { getJobQueueProcessingMode, JobQueueProcessingMode } from '../helpers'; +import { StacksNetworkName } from '@stacks/network'; /** * An abstract class for a job that will be processed by `JobQueue`. It only contains logic for @@ -13,10 +14,12 @@ import { getJobQueueProcessingMode, JobQueueProcessingMode } from '../helpers'; export abstract class Job { protected readonly db: PgStore; protected readonly job: DbJob; + protected readonly network: StacksNetworkName; - constructor(args: { db: PgStore; job: DbJob }) { + constructor(args: { db: PgStore; job: DbJob; network: StacksNetworkName }) { this.db = args.db; this.job = args.job; + this.network = args.network; } /** diff --git a/src/token-processor/queue/job/process-smart-contract-job.ts b/src/token-processor/queue/job/process-smart-contract-job.ts index cc28cb0..ce1dc8e 100644 --- a/src/token-processor/queue/job/process-smart-contract-job.ts +++ b/src/token-processor/queue/job/process-smart-contract-job.ts @@ -50,6 +50,7 @@ export class ProcessSmartContractJob extends Job { private async getNftContractLastTokenId(contract: DbSmartContract): Promise { const client = StacksNodeRpcClient.create({ contractPrincipal: contract.principal, + network: this.network, }); return await client.readUIntFromContract('get-last-token-id'); } diff --git a/src/token-processor/queue/job/process-token-job.ts b/src/token-processor/queue/job/process-token-job.ts index 40b90a2..76d3f61 100644 --- a/src/token-processor/queue/job/process-token-job.ts +++ b/src/token-processor/queue/job/process-token-job.ts @@ -1,6 +1,5 @@ import { cvToHex, uintCV } from '@stacks/transactions'; import { ClarityValueUInt, decodeClarityValueToRepr } from '@hirosystems/stacks-encoding-native-js'; -import { ENV } from '../../../env'; import { DbMetadataLocaleInsertBundle, DbProcessedTokenUpdateBundle, @@ -52,6 +51,7 @@ export class ProcessTokenJob extends Job { const client = StacksNodeRpcClient.create({ contractPrincipal: contract.principal, + network: this.network, }); logger.info(`ProcessTokenJob processing ${this.description()}`); switch (token.type) { diff --git a/src/token-processor/stacks-node/stacks-node-rpc-client.ts b/src/token-processor/stacks-node/stacks-node-rpc-client.ts index 8e4ce66..bf90ef5 100644 --- a/src/token-processor/stacks-node/stacks-node-rpc-client.ts +++ b/src/token-processor/stacks-node/stacks-node-rpc-client.ts @@ -14,6 +14,7 @@ import { StacksNodeHttpError, } from '../util/errors'; import { ClarityAbi, getAddressFromPrivateKey, makeRandomPrivKey } from '@stacks/transactions'; +import { StacksNetworkName } from '@stacks/network'; interface ReadOnlyContractCallSuccessResponse { okay: true; @@ -39,9 +40,12 @@ export class StacksNodeRpcClient { private readonly senderAddress: string; private readonly basePath: string; - static create(args: { contractPrincipal: string }): StacksNodeRpcClient { + static create(args: { + contractPrincipal: string; + network: StacksNetworkName; + }): StacksNodeRpcClient { const randomPrivKey = makeRandomPrivKey(); - const senderAddress = getAddressFromPrivateKey(randomPrivKey, 'mainnet'); + const senderAddress = getAddressFromPrivateKey(randomPrivKey, args.network); const client = new StacksNodeRpcClient({ contractPrincipal: args.contractPrincipal, senderAddress: senderAddress, diff --git a/src/token-processor/token-processor-metrics.ts b/src/token-processor/token-processor-metrics.ts index d85d0dc..71cfae7 100644 --- a/src/token-processor/token-processor-metrics.ts +++ b/src/token-processor/token-processor-metrics.ts @@ -19,8 +19,8 @@ export class TokenProcessorMetrics { name: `token_metadata_block_height`, help: 'The most recent Bitcoin block height ingested by the API', async collect() { - const height = await db.getChainTipBlockHeight(); - this.set(height); + const height = await db.core.getChainTip(db.sql); + this.set(height?.block_height ?? 0); }, }); this.token_metadata_job_count = new prom.Gauge({ diff --git a/tests/token-queue/sip-validation.test.ts b/tests/token-queue/sip-validation.test.ts index 09bc6dd..a05a52e 100644 --- a/tests/token-queue/sip-validation.test.ts +++ b/tests/token-queue/sip-validation.test.ts @@ -9,7 +9,7 @@ import { uintCV, } from '@stacks/transactions'; import { getContractLogMetadataUpdateNotification } from '../../src/token-processor/util/sip-validation'; -import { StacksTransactionSmartContractEvent } from '@hirosystems/chainhook-client'; +import { StacksCoreContractEvent } from '../../src/stacks-core/schemas'; describe('SIP Validation', () => { test('SIP-019 FT notification', () => { @@ -24,13 +24,14 @@ describe('SIP Validation', () => { 'contract-id': principalCV(contractId), }), }); - const event1: StacksTransactionSmartContractEvent = { - type: 'SmartContractEvent', - position: { index: 0 }, - data: { + const event1: StacksCoreContractEvent = { + txid: '0x123', + event_index: 0, + type: 'contract_event', + contract_event: { contract_identifier: contractId, - raw_value: cvToHex(tuple1), topic: 'print', + raw_value: cvToHex(tuple1), }, }; const notification1 = getContractLogMetadataUpdateNotification(address, event1); @@ -54,13 +55,14 @@ describe('SIP Validation', () => { }); // Invalid notification senders - const event2: StacksTransactionSmartContractEvent = { - type: 'SmartContractEvent', - position: { index: 0 }, - data: { + const event2: StacksCoreContractEvent = { + txid: '0x123', + event_index: 0, + type: 'contract_event', + contract_event: { contract_identifier: 'SPCAQ4RCYJ30BYKJ9Z6BRGS3169PWZNN89NH4MCS.hic-1', - raw_value: cvToHex(tuple1), topic: 'print', + raw_value: cvToHex(tuple1), }, }; const notification2 = getContractLogMetadataUpdateNotification( @@ -70,13 +72,14 @@ describe('SIP Validation', () => { expect(notification2).toBeUndefined(); // Sent by the contract owner - const event3: StacksTransactionSmartContractEvent = { - type: 'SmartContractEvent', - position: { index: 0 }, - data: { + const event3: StacksCoreContractEvent = { + txid: '0x123', + event_index: 0, + type: 'contract_event', + contract_event: { contract_identifier: 'SPCAQ4RCYJ30BYKJ9Z6BRGS3169PWZNN89NH4MCS.hic-1', - raw_value: cvToHex(tuple1), topic: 'print', + raw_value: cvToHex(tuple1), }, }; const notification3 = getContractLogMetadataUpdateNotification(address, event3); @@ -86,13 +89,14 @@ describe('SIP Validation', () => { expect(notification3?.token_ids).toBeUndefined(); // Emitted by the correct contract - const event4: StacksTransactionSmartContractEvent = { - type: 'SmartContractEvent', - position: { index: 0 }, - data: { + const event4: StacksCoreContractEvent = { + txid: '0x123', + event_index: 0, + type: 'contract_event', + contract_event: { contract_identifier: contractId, - raw_value: cvToHex(tuple1), topic: 'print', + raw_value: cvToHex(tuple1), }, }; const notification4 = getContractLogMetadataUpdateNotification( @@ -110,10 +114,11 @@ describe('SIP Validation', () => { const contractId = `${address}.fine-art-exhibition-v1`; // Taken from tx 0xfc81a8c30025d7135d4313ea746831de1c7794478d4e0d23ef76970ee071cf20 - const event1: StacksTransactionSmartContractEvent = { - type: 'SmartContractEvent', - position: { index: 0 }, - data: { + const event1: StacksCoreContractEvent = { + txid: '0x123', + event_index: 0, + type: 'contract_event', + contract_event: { contract_identifier: contractId, topic: 'print', raw_value: @@ -135,10 +140,11 @@ describe('SIP Validation', () => { 'token-ids': listCV([intCV(1), intCV(2)]), }), }); - const event2: StacksTransactionSmartContractEvent = { - type: 'SmartContractEvent', - position: { index: 0 }, - data: { + const event2: StacksCoreContractEvent = { + txid: '0x123', + event_index: 0, + type: 'contract_event', + contract_event: { contract_identifier: contractId, topic: 'print', raw_value: cvToHex(tuple2), @@ -166,10 +172,11 @@ describe('SIP Validation', () => { ttl: uintCV(9999), }), }); - const event: StacksTransactionSmartContractEvent = { - type: 'SmartContractEvent', - position: { index: 0 }, - data: { + const event: StacksCoreContractEvent = { + txid: '0x123', + event_index: 0, + type: 'contract_event', + contract_event: { contract_identifier: contractId, topic: 'print', raw_value: cvToHex(tuple), From 218ef95d0c53240587a3251b4f4b3f5cc7c6a3e3 Mon Sep 17 00:00:00 2001 From: Rafael Cardenas Date: Fri, 9 Jan 2026 17:44:26 -0600 Subject: [PATCH 10/19] tests --- tests/token-queue/job-queue.test.ts | 7 +- tests/token-queue/job.test.ts | 14 ++-- .../process-smart-contract-job.test.ts | 67 ++----------------- tests/token-queue/process-token-job.test.ts | 40 +++++------ 4 files changed, 36 insertions(+), 92 deletions(-) diff --git a/tests/token-queue/job-queue.test.ts b/tests/token-queue/job-queue.test.ts index 2bade33..3e97202 100644 --- a/tests/token-queue/job-queue.test.ts +++ b/tests/token-queue/job-queue.test.ts @@ -4,9 +4,10 @@ import { DbJob, DbJobStatus, DbSipNumber } from '../../src/pg/types'; import { JobQueue } from '../../src/token-processor/queue/job-queue'; import { insertAndEnqueueTestContract } from '../helpers'; import { cycleMigrations, timeout } from '@hirosystems/api-toolkit'; +import { StacksNetworkName } from '@stacks/network'; class TestJobQueue extends JobQueue { - constructor(args: { db: PgStore }) { + constructor(args: { db: PgStore; network: StacksNetworkName }) { super(args); this['_isRunning'] = true; // Simulate a running queue. } @@ -26,7 +27,7 @@ describe('JobQueue', () => { ENV.PGDATABASE = 'postgres'; db = await PgStore.connect({ skipMigrations: true }); await cycleMigrations(MIGRATIONS_DIR); - queue = new TestJobQueue({ db }); + queue = new TestJobQueue({ db, network: 'mainnet' }); }); afterEach(async () => { @@ -81,7 +82,7 @@ describe('JobQueue', () => { test('pg connection errors are not re-thrown', async () => { await insertAndEnqueueTestContract(db, 'ABCD.test-ft', DbSipNumber.sip010); - const queue = new JobQueue({ db }); + const queue = new JobQueue({ db, network: 'mainnet' }); // Close DB and start the queue. If the error is not handled correctly, the test will fail. await db.close(); queue.start(); diff --git a/tests/token-queue/job.test.ts b/tests/token-queue/job.test.ts index 72b929d..ce642d5 100644 --- a/tests/token-queue/job.test.ts +++ b/tests/token-queue/job.test.ts @@ -50,7 +50,7 @@ describe('Job', () => { }); test('valid job marked as done', async () => { - const job = new TestDbJob({ db, job: dbJob }); + const job = new TestDbJob({ db, job: dbJob, network: 'mainnet' }); await expect(job.work()).resolves.not.toThrow(); const jobs1 = await db.getPendingJobBatch({ limit: 1 }); @@ -61,7 +61,7 @@ describe('Job', () => { }); test('retryable error increases retry_count', async () => { - const job = new TestRetryableJob({ db, job: dbJob }); + const job = new TestRetryableJob({ db, job: dbJob, network: 'mainnet' }); await expect(job.work()).resolves.not.toThrow(); const jobs1 = await db.getJob({ id: 1 }); @@ -75,7 +75,7 @@ describe('Job', () => { }); test('user error marks job invalid', async () => { - const job = new TestUserErrorJob({ db, job: dbJob }); + const job = new TestUserErrorJob({ db, job: dbJob, network: 'mainnet' }); await expect(job.work()).resolves.not.toThrow(); const jobs1 = await db.getPendingJobBatch({ limit: 1 }); @@ -88,7 +88,7 @@ describe('Job', () => { test('retry_count limit reached marks entry as failed', async () => { ENV.JOB_QUEUE_STRICT_MODE = false; ENV.JOB_QUEUE_MAX_RETRIES = 0; - const job = new TestRetryableJob({ db, job: dbJob }); + const job = new TestRetryableJob({ db, job: dbJob, network: 'mainnet' }); await expect(job.work()).resolves.not.toThrow(); const status = await db.sql<{ status: string }[]>`SELECT status FROM jobs`; @@ -99,7 +99,7 @@ describe('Job', () => { ENV.JOB_QUEUE_STRICT_MODE = true; ENV.JOB_QUEUE_MAX_RETRIES = 0; ENV.JOB_QUEUE_RETRY_AFTER_MS = 0; - const job = new TestRetryableJob({ db, job: dbJob }); + const job = new TestRetryableJob({ db, job: dbJob, network: 'mainnet' }); await expect(job.work()).resolves.not.toThrow(); const jobs1 = await db.getPendingJobBatch({ limit: 1 }); @@ -109,7 +109,7 @@ describe('Job', () => { test('pending job batches consider retry_after', async () => { ENV.JOB_QUEUE_RETRY_AFTER_MS = 200; - const job = new TestRetryableJob({ db, job: dbJob }); + const job = new TestRetryableJob({ db, job: dbJob, network: 'mainnet' }); await expect(job.work()).resolves.not.toThrow(); const jobs1 = await db.getPendingJobBatch({ limit: 1 }); @@ -122,7 +122,7 @@ describe('Job', () => { test('db errors are not re-thrown', async () => { await db.close(); - const job = new TestDbJob({ db, job: dbJob }); + const job = new TestDbJob({ db, job: dbJob, network: 'mainnet' }); await expect(job.work()).resolves.not.toThrow(); }); }); diff --git a/tests/token-queue/process-smart-contract-job.test.ts b/tests/token-queue/process-smart-contract-job.test.ts index aa69647..4cd7768 100644 --- a/tests/token-queue/process-smart-contract-job.test.ts +++ b/tests/token-queue/process-smart-contract-job.test.ts @@ -1,13 +1,7 @@ -import { bufferCV, cvToHex, tupleCV, uintCV } from '@stacks/transactions'; +import { cvToHex, uintCV } from '@stacks/transactions'; import { MockAgent, setGlobalDispatcher } from 'undici'; import { MIGRATIONS_DIR, PgStore } from '../../src/pg/pg-store'; -import { - DbSipNumber, - DbSmartContractInsert, - DbToken, - DbTokenType, - TOKENS_COLUMNS, -} from '../../src/pg/types'; +import { DbSipNumber, DbToken, DbTokenType, TOKENS_COLUMNS } from '../../src/pg/types'; import { ProcessSmartContractJob } from '../../src/token-processor/queue/job/process-smart-contract-job'; import { ENV } from '../../src/env'; import { cycleMigrations } from '@hirosystems/api-toolkit'; @@ -31,6 +25,7 @@ describe('ProcessSmartContractJob', () => { const processor = new ProcessSmartContractJob({ db, job, + network: 'mainnet', }); await processor.work(); @@ -59,6 +54,7 @@ describe('ProcessSmartContractJob', () => { const processor = new ProcessSmartContractJob({ db, job, + network: 'mainnet', }); await processor.work(); @@ -87,64 +83,11 @@ describe('ProcessSmartContractJob', () => { const processor = new ProcessSmartContractJob({ db, job, + network: 'mainnet', }); await processor.work(); const tokens = await db.sql`SELECT ${db.sql(TOKENS_COLUMNS)} FROM tokens`; expect(tokens.count).toBe(0); }); - - // test('enqueues minted tokens for SFT contract', async () => { - // const address = 'SP3K8BC0PPEVCV7NZ6QSRWPQ2JE9E5B6N3PA0KBR9'; - // const contractId = `${address}.key-alex-autoalex-v1`; - - // const values: DbSmartContractInsert = { - // principal: contractId, - // sip: DbSipNumber.sip013, - // abi: '"some"', - // tx_id: '0x123456', - // block_height: 1, - // }; - // const job = await db.chainhook.insertAndEnqueueSmartContract({ values }); - - // // Create mint events. - // const event1: BlockchainDbContractLog = { - // contract_identifier: contractId, - // sender_address: address, - // value: cvToHex( - // tupleCV({ - // type: bufferCV(Buffer.from('sft_mint')), - // recipient: bufferCV(Buffer.from(address)), - // 'token-id': uintCV(3), - // amount: uintCV(1000), - // }) - // ), - // }; - // const event2: BlockchainDbContractLog = { - // contract_identifier: contractId, - // sender_address: address, - // value: cvToHex( - // tupleCV({ - // type: bufferCV(Buffer.from('sft_mint')), - // recipient: bufferCV(Buffer.from(address)), - // 'token-id': uintCV(7), - // amount: uintCV(2000), - // }) - // ), - // }; - - // const apiDb = new MockPgBlockchainApiStore(); - // apiDb.contractLogsByContract = [event1, event2]; - // const processor = new ProcessSmartContractJob({ db, job, apiDb }); - // await processor.work(); - - // const tokens = await db.sql`SELECT ${db.sql(TOKENS_COLUMNS)} FROM tokens`; - // expect(tokens.count).toBe(2); - // expect(tokens[0].type).toBe(DbTokenType.sft); - // expect(tokens[0].smart_contract_id).toBe(1); - // expect(tokens[0].token_number).toBe('3'); - // expect(tokens[1].type).toBe(DbTokenType.sft); - // expect(tokens[1].smart_contract_id).toBe(1); - // expect(tokens[1].token_number).toBe('7'); - // }); }); diff --git a/tests/token-queue/process-token-job.test.ts b/tests/token-queue/process-token-job.test.ts index 76e9141..1d02ec6 100644 --- a/tests/token-queue/process-token-job.test.ts +++ b/tests/token-queue/process-token-job.test.ts @@ -7,8 +7,6 @@ import { DbMetadataAttribute, DbMetadataProperty, DbSipNumber, - DbSmartContractInsert, - DbTokenType, } from '../../src/pg/types'; import { ENV } from '../../src/env'; import { ProcessTokenJob } from '../../src/token-processor/queue/job/process-token-job'; @@ -97,7 +95,7 @@ describe('ProcessTokenJob', () => { }); setGlobalDispatcher(agent); - const processor = new ProcessTokenJob({ db, job: tokenJob }); + const processor = new ProcessTokenJob({ db, job: tokenJob, network: 'mainnet' }); await processor.work(); const token = await db.getToken({ id: 1 }); @@ -169,7 +167,7 @@ describe('ProcessTokenJob', () => { .persist(); setGlobalDispatcher(agent); - const processor = new ProcessTokenJob({ db, job: tokenJob }); + const processor = new ProcessTokenJob({ db, job: tokenJob, network: 'mainnet' }); await processor.work(); const token = await db.getToken({ id: 1 }); @@ -239,7 +237,7 @@ describe('ProcessTokenJob', () => { }); setGlobalDispatcher(agent); - const processor = new ProcessTokenJob({ db, job: tokenJob }); + const processor = new ProcessTokenJob({ db, job: tokenJob, network: 'mainnet' }); await processor.work(); const token = await db.getToken({ id: 1 }); @@ -329,7 +327,7 @@ describe('ProcessTokenJob', () => { }); setGlobalDispatcher(agent); - const processor = new ProcessTokenJob({ db, job: tokenJob }); + const processor = new ProcessTokenJob({ db, job: tokenJob, network: 'mainnet' }); await processor.work(); const token = await db.getTokenMetadataBundle({ @@ -408,7 +406,7 @@ describe('ProcessTokenJob', () => { .reply(200, metadata); setGlobalDispatcher(agent); - await new ProcessTokenJob({ db, job: tokenJob }).work(); + await new ProcessTokenJob({ db, job: tokenJob, network: 'mainnet' }).work(); const bundle = await db.getTokenMetadataBundle({ contractPrincipal: 'ABCD.test-nft', @@ -524,7 +522,7 @@ describe('ProcessTokenJob', () => { .reply(200, metadataSpanish); setGlobalDispatcher(agent); - await new ProcessTokenJob({ db, job: tokenJob }).work(); + await new ProcessTokenJob({ db, job: tokenJob, network: 'mainnet' }).work(); const bundle = await db.getTokenMetadataBundle({ contractPrincipal: 'ABCD.test-nft', @@ -620,7 +618,7 @@ describe('ProcessTokenJob', () => { setGlobalDispatcher(agent); // Process once - await new ProcessTokenJob({ db, job: tokenJob }).work(); + await new ProcessTokenJob({ db, job: tokenJob, network: 'mainnet' }).work(); const bundle1 = await db.getTokenMetadataBundle({ contractPrincipal: 'ABCD.test-nft', @@ -662,7 +660,7 @@ describe('ProcessTokenJob', () => { }) .reply(200, metadata2); await db.updateJobStatus({ id: tokenJob.id, status: DbJobStatus.pending }); - await new ProcessTokenJob({ db, job: tokenJob }).work(); + await new ProcessTokenJob({ db, job: tokenJob, network: 'mainnet' }).work(); const bundle2 = await db.getTokenMetadataBundle({ contractPrincipal: 'ABCD.test-nft', @@ -717,7 +715,7 @@ describe('ProcessTokenJob', () => { .reply(200, metadata); setGlobalDispatcher(agent); - await new ProcessTokenJob({ db, job: tokenJob }).work(); + await new ProcessTokenJob({ db, job: tokenJob, network: 'mainnet' }).work(); await expect( db.getTokenMetadataBundle({ @@ -777,7 +775,7 @@ describe('ProcessTokenJob', () => { }); setGlobalDispatcher(agent); - const processor = new ProcessTokenJob({ db, job: tokenJob }); + const processor = new ProcessTokenJob({ db, job: tokenJob, network: 'mainnet' }); await processor.work(); const token = await db.getToken({ id: 1 }); @@ -859,7 +857,7 @@ describe('ProcessTokenJob', () => { }) .reply(429, { error: 'nope' }, { headers: { 'retry-after': '999' } }); try { - await new ProcessTokenJob({ db, job: tokenJob }).work(); + await new ProcessTokenJob({ db, job: tokenJob, network: 'mainnet' }).work(); } catch (error) { expect(error).toBeInstanceOf(RetryableJobError); const err = error as RetryableJobError; @@ -876,9 +874,9 @@ describe('ProcessTokenJob', () => { retry_after: 99999, }, }); - await expect(new ProcessTokenJob({ db, job: tokenJob }).handler()).rejects.toThrow( - /skipping fetch to rate-limited hostname/ - ); + await expect( + new ProcessTokenJob({ db, job: tokenJob, network: 'mainnet' }).handler() + ).rejects.toThrow(/skipping fetch to rate-limited hostname/); const host = await db.getRateLimitedHost({ hostname: 'm.io' }); expect(host).not.toBeUndefined(); }); @@ -913,7 +911,9 @@ describe('ProcessTokenJob', () => { `; // Token is processed now. - await expect(new ProcessTokenJob({ db, job: tokenJob }).handler()).resolves.not.toThrow(); + await expect( + new ProcessTokenJob({ db, job: tokenJob, network: 'mainnet' }).handler() + ).resolves.not.toThrow(); // Rate limited host is gone. const host = await db.getRateLimitedHost({ hostname: 'm.io' }); @@ -945,8 +945,8 @@ describe('ProcessTokenJob', () => { .reply(200, mockResponse); setGlobalDispatcher(agent); - await expect(new ProcessTokenJob({ db, job: tokenJob }).handler()).rejects.toThrow( - RetryableJobError - ); + await expect( + new ProcessTokenJob({ db, job: tokenJob, network: 'mainnet' }).handler() + ).rejects.toThrow(RetryableJobError); }); }); From aa54b02c3e6bc1d5ebb1c27cd5f89414ef0aec81 Mon Sep 17 00:00:00 2001 From: Rafael Cardenas Date: Sun, 11 Jan 2026 21:12:49 -0600 Subject: [PATCH 11/19] progress --- migrations/1661125881755_blocks.ts | 4 + src/pg/stacks-core-pg-store.ts | 99 +++---- .../stacks-core-block-processor.ts | 245 +++++++----------- src/token-processor/util/sip-validation.ts | 75 ++++-- tests/helpers.ts | 7 - 5 files changed, 212 insertions(+), 218 deletions(-) diff --git a/migrations/1661125881755_blocks.ts b/migrations/1661125881755_blocks.ts index 5ef3d55..5cfd9fb 100644 --- a/migrations/1661125881755_blocks.ts +++ b/migrations/1661125881755_blocks.ts @@ -9,6 +9,10 @@ export function up(pgm: MigrationBuilder): void { type: 'text', primaryKey: true, }, + parent_index_block_hash: { + type: 'text', + notNull: true, + }, block_height: { type: 'int', notNull: true, diff --git a/src/pg/stacks-core-pg-store.ts b/src/pg/stacks-core-pg-store.ts index d2a7b75..b738726 100644 --- a/src/pg/stacks-core-pg-store.ts +++ b/src/pg/stacks-core-pg-store.ts @@ -2,6 +2,7 @@ import { BasePgStoreModule, PgSqlClient, batchIterate, logger } from '@hirosyste import { ENV } from '../env'; import { NftMintEvent, + SftMintEvent, SmartContractDeployment, TokenMetadataUpdateNotification, } from '../token-processor/util/sip-validation'; @@ -14,35 +15,40 @@ import { } from './types'; import { dbSipNumberToDbTokenType } from '../token-processor/util/helpers'; import BigNumber from 'bignumber.js'; -import { - ProcessedStacksCoreBlock, - ProcessedStacksCoreEvent, -} from '../stacks-core/stacks-core-block-processor'; +import { DecodedStacksBlock } from '../stacks-core/stacks-core-block-processor'; export class StacksCorePgStore extends BasePgStoreModule { /** * Writes a processed Stacks Core block to the database. * @param block - The processed Stacks Core block to write. */ - async writeBlock(block: ProcessedStacksCoreBlock): Promise { + async writeProcessedBlock(args: { + block: DecodedStacksBlock; + contracts: SmartContractDeployment[]; + notifications: TokenMetadataUpdateNotification[]; + nftMints: NftMintEvent[]; + sftMints: SftMintEvent[]; + ftSupplyDelta: Map; + }): Promise { await this.sqlWriteTransaction(async sql => { - await this.insertBlock(sql, block); - for (const contract of block.contracts) - await this.applyContractDeployment(sql, contract, block); - for (const notification of block.notifications) - await this.applyNotification(sql, notification, block); - await this.applyTokenMints(sql, block.nftMints, DbTokenType.nft, block); - await this.applyTokenMints(sql, block.sftMints, DbTokenType.sft, block); - for (const [contract, delta] of block.ftSupplyDelta) - await this.applyFtSupplyChange(sql, contract, delta, block); + await this.insertBlock(sql, args.block); + for (const contract of args.contracts) + await this.applyContractDeployment(sql, contract, args.block); + for (const notification of args.notifications) + await this.applyNotification(sql, notification, args.block); + await this.applyTokenMints(sql, args.nftMints, DbTokenType.nft, args.block); + await this.applyTokenMints(sql, args.sftMints, DbTokenType.sft, args.block); + for (const [contract, delta] of args.ftSupplyDelta) + await this.applyFtSupplyChange(sql, contract, delta, args.block); await this.enqueueDynamicTokensDueForRefresh(); }); } - async insertBlock(sql: PgSqlClient, block: ProcessedStacksCoreBlock): Promise { + async insertBlock(sql: PgSqlClient, block: DecodedStacksBlock): Promise { const values = { - block_height: block.blockHeight, - index_block_hash: block.indexBlockHash, + block_height: block.block_height, + index_block_hash: block.index_block_hash, + parent_index_block_hash: block.parent_index_block_hash, }; await sql`INSERT INTO blocks ${sql(values)}`; } @@ -130,18 +136,18 @@ export class StacksCorePgStore extends BasePgStoreModule { async applyContractDeployment( sql: PgSqlClient, - contract: ProcessedStacksCoreEvent, - block: ProcessedStacksCoreBlock + contract: SmartContractDeployment, + block: DecodedStacksBlock ) { await this.enqueueContract(sql, { - principal: contract.event.principal, - sip: contract.event.sip, - block_height: block.blockHeight, - index_block_hash: block.indexBlockHash, + principal: contract.principal, + sip: contract.sip, + block_height: block.block_height, + index_block_hash: block.index_block_hash, tx_id: contract.tx_id, tx_index: contract.tx_index, - fungible_token_name: contract.event.fungible_token_name ?? null, - non_fungible_token_name: contract.event.non_fungible_token_name ?? null, + fungible_token_name: contract.fungible_token_name ?? null, + non_fungible_token_name: contract.non_fungible_token_name ?? null, }); } @@ -183,27 +189,22 @@ export class StacksCorePgStore extends BasePgStoreModule { private async applyNotification( sql: PgSqlClient, - event: ProcessedStacksCoreEvent, - block: ProcessedStacksCoreBlock + event: TokenMetadataUpdateNotification, + block: DecodedStacksBlock ) { const contractResult = await sql<{ id: number }[]>` - SELECT id FROM smart_contracts WHERE principal = ${event.event.contract_id} LIMIT 1 + SELECT id FROM smart_contracts WHERE principal = ${event.contract_id} LIMIT 1 `; if (contractResult.count == 0) { return; } - const notification = event.event; await sql` WITH affected_token_ids AS ( SELECT t.id FROM tokens AS t INNER JOIN smart_contracts AS s ON s.id = t.smart_contract_id - WHERE s.principal = ${notification.contract_id} - ${ - notification.token_ids?.length - ? sql`AND t.token_number IN ${sql(notification.token_ids)}` - : sql`` - } + WHERE s.principal = ${event.contract_id} + ${event.token_ids?.length ? sql`AND t.token_number IN ${sql(event.token_ids)}` : sql``} ), previous_modes AS ( SELECT DISTINCT ON (a.id) a.id, COALESCE(m.update_mode, 'standard') AS update_mode @@ -215,8 +216,8 @@ export class StacksCorePgStore extends BasePgStoreModule { INSERT INTO update_notifications (token_id, update_mode, ttl, block_height, index_block_hash, tx_id, tx_index, event_index) ( - SELECT id, ${notification.update_mode}, ${notification.ttl ?? null}, - ${block.blockHeight}, ${block.indexBlockHash}, ${event.tx_id}, ${event.tx_index}, + SELECT id, ${event.update_mode}, ${event.ttl ?? null}, + ${block.block_height}, ${block.index_block_hash}, ${event.tx_id}, ${event.tx_index}, ${event.event_index} FROM previous_modes WHERE update_mode <> 'frozen' @@ -233,7 +234,7 @@ export class StacksCorePgStore extends BasePgStoreModule { sql: PgSqlClient, contract: string, delta: BigNumber, - block: ProcessedStacksCoreBlock + block: DecodedStacksBlock ): Promise { await sql` WITH smart_contract_id AS ( @@ -248,7 +249,7 @@ export class StacksCorePgStore extends BasePgStoreModule { delta_insert AS ( INSERT INTO ft_supply_deltas (token_id, block_height, index_block_hash, delta) VALUES ( - (SELECT id FROM token_id), ${block.blockHeight}, ${block.indexBlockHash}, ${delta} + (SELECT id FROM token_id), ${block.block_height}, ${block.index_block_hash}, ${delta} ) ) UPDATE tokens @@ -288,26 +289,26 @@ export class StacksCorePgStore extends BasePgStoreModule { private async applyTokenMints( sql: PgSqlClient, - mints: ProcessedStacksCoreEvent[], + mints: NftMintEvent[], tokenType: DbTokenType, - block: ProcessedStacksCoreBlock + block: DecodedStacksBlock ): Promise { if (mints.length == 0) return; for await (const batch of batchIterate(mints, 500)) { const tokenValues = new Map(); - for (const m of batch) { + for (const mint of batch) { // SFT tokens may mint one single token more than once given that it's an FT within an NFT. // This makes sure we only keep the first occurrence. - const tokenKey = `${m.event.contractId}-${m.event.tokenId}`; + const tokenKey = `${mint.contractId}-${mint.tokenId}`; if (tokenValues.has(tokenKey)) continue; tokenValues.set(tokenKey, [ - m.event.contractId, + mint.contractId, tokenType, - m.event.tokenId.toString(), - block.blockHeight, - block.indexBlockHash, - m.tx_id, - m.tx_index, + mint.tokenId.toString(), + block.block_height, + block.index_block_hash, + mint.tx_id, + mint.tx_index, ]); } await sql` diff --git a/src/stacks-core/stacks-core-block-processor.ts b/src/stacks-core/stacks-core-block-processor.ts index 0f6e3c7..f5fd20e 100644 --- a/src/stacks-core/stacks-core-block-processor.ts +++ b/src/stacks-core/stacks-core-block-processor.ts @@ -6,48 +6,37 @@ import { StacksCoreNftMintEvent, StacksCoreContractEvent, StacksCoreTransaction, + StacksCoreEvent, } from './schemas'; import { getContractLogMetadataUpdateNotification, getContractLogSftMintEvent, - getSmartContractSip, + getSmartContractDeployment, NftMintEvent, SftMintEvent, SmartContractDeployment, TokenMetadataUpdateNotification, } from '../token-processor/util/sip-validation'; -import { ClarityAbi } from '@stacks/transactions'; import { ClarityTypeID, decodeClarityValue, DecodedTxResult, decodeTransaction, - TxPayloadTypeID, } from '@hirosystems/stacks-encoding-native-js'; import { StacksCorePgStore } from '../pg/stacks-core-pg-store'; import { logger, stopwatch } from '@hirosystems/api-toolkit'; -export type ProcessedStacksCoreEvent = { - event: T; - tx_id: string; - tx_index: number; - event_index?: number; -}; - -export type ProcessedStacksCoreTransaction = { +export type DecodedStacksTransaction = { tx: StacksCoreTransaction; decoded: DecodedTxResult; + events: StacksCoreEvent[]; }; -export type ProcessedStacksCoreBlock = { - blockHeight: number; - indexBlockHash: string; - transactions: Map; - contracts: ProcessedStacksCoreEvent[]; - notifications: ProcessedStacksCoreEvent[]; - sftMints: ProcessedStacksCoreEvent[]; - nftMints: ProcessedStacksCoreEvent[]; - ftSupplyDelta: Map; +export type DecodedStacksBlock = { + block_height: number; + index_block_hash: string; + parent_index_block_hash: string; + transactions: DecodedStacksTransaction[]; }; /** @@ -56,152 +45,123 @@ export type ProcessedStacksCoreBlock = { */ export class StacksCoreBlockProcessor { private readonly db: StacksCorePgStore; + private readonly block: DecodedStacksBlock; + + private contracts: SmartContractDeployment[] = []; + private notifications: TokenMetadataUpdateNotification[] = []; + private sftMints: SftMintEvent[] = []; + private nftMints: NftMintEvent[] = []; + private ftSupplyDelta: Map = new Map(); + + static init(args: { block: StacksCoreBlock; db: StacksCorePgStore }): StacksCoreBlockProcessor { + // Group events by transaction ID. + const events: Map = new Map(); + for (const event of args.block.events) { + events.set(event.txid, [...(events.get(event.txid) || []), event]); + } + // Decode transactions and sort their events by event index. + const transactions = args.block.transactions.map(tx => ({ + tx: tx, + decoded: decodeTransaction(tx.raw_tx.substring(2)), + events: (events.get(tx.txid) || []).sort((a, b) => a.event_index - b.event_index), + })); + // Sort transactions by transaction index. + const decodedBlock: DecodedStacksBlock = { + block_height: args.block.block_height, + index_block_hash: args.block.index_block_hash, + parent_index_block_hash: args.block.parent_index_block_hash, + transactions: transactions.sort((a, b) => a.tx.tx_index - b.tx.tx_index), + }; + return new StacksCoreBlockProcessor({ db: args.db, decodedBlock }); + } - private block: ProcessedStacksCoreBlock = { - blockHeight: 0, - indexBlockHash: '', - transactions: new Map(), - contracts: [], - notifications: [], - sftMints: [], - nftMints: [], - ftSupplyDelta: new Map(), - }; - - constructor(args: { db: StacksCorePgStore }) { + constructor(args: { db: StacksCorePgStore; decodedBlock: DecodedStacksBlock }) { this.db = args.db; + this.block = args.decodedBlock; } - async process(block: StacksCoreBlock): Promise { + async process(): Promise { const time = stopwatch(); - this.clear(); logger.info( - `${this.constructor.name} processing block ${block.block_height} #${block.index_block_hash}` + `${this.constructor.name} processing block ${this.block.block_height} #${this.block.index_block_hash}` ); await this.db.sqlWriteTransaction(async sql => { // Check if this block represents a re-org. Revert to its parent's chain tip if it does. const chainTip = await this.db.getChainTip(sql); - if (chainTip && chainTip.index_block_hash !== block.parent_index_block_hash) { + if (chainTip && chainTip.index_block_hash !== this.block.parent_index_block_hash) { logger.info( `${this.constructor.name} detected re-org, reverting to chain tip at parent block ${ - block.block_height - 1 - } ${block.parent_index_block_hash}` + this.block.block_height - 1 + } ${this.block.parent_index_block_hash}` ); await this.db.revertToChainTip(sql, chainTip); } - // Process the block. - this.block.blockHeight = block.block_height; - this.block.indexBlockHash = block.index_block_hash; - for (const transaction of block.transactions) { - if (transaction.status !== 'success') continue; - - const indexedTransaction: ProcessedStacksCoreTransaction = { - tx: transaction, - decoded: decodeTransaction(transaction.raw_tx.substring(2)), - }; - this.block.transactions.set(transaction.txid, indexedTransaction); - - // Check for smart contract deployments. - this.processSmartContract(indexedTransaction); - } - // Check for token metadata updates and token supply deltas. - for (const event of block.events) { - const transaction = this.block.transactions.get(event.txid); - if (!transaction) continue; - switch (event.type) { - case 'contract_event': - this.processContractEvent(transaction, event); - break; - case 'ft_mint_event': - this.processFtMintEvent(event); - break; - case 'ft_burn_event': - this.processFtBurnEvent(event); - break; - case 'nft_mint_event': - this.processNftMintEvent(transaction, event); - break; - case 'nft_burn_event': - // Burned NFTs still have their metadata in the database, so we don't need to do anything - // here. - break; + // Process each transaction in the block. + for (const transaction of this.block.transactions) { + if (transaction.tx.status !== 'success') continue; + this.processTransaction(transaction); + for (const event of transaction.events) { + switch (event.type) { + case 'contract_event': + this.processContractEvent(transaction, event); + break; + case 'ft_mint_event': + this.processFtMintEvent(event); + break; + case 'ft_burn_event': + this.processFtBurnEvent(event); + break; + case 'nft_mint_event': + this.processNftMintEvent(transaction, event); + break; + case 'nft_burn_event': + // Burned NFTs still have their metadata in the database, so we don't need to do + // anything here. + break; + } } } - await this.db.writeBlock(this.block); + await this.db.writeProcessedBlock({ + block: this.block, + contracts: this.contracts, + notifications: this.notifications, + nftMints: this.nftMints, + sftMints: this.sftMints, + ftSupplyDelta: this.ftSupplyDelta, + }); }); - this.clear(); logger.info( - `${this.constructor.name} processed block ${block.block_height} ${ - block.index_block_hash + `${this.constructor.name} processed block ${this.block.block_height} ${ + this.block.index_block_hash } in ${time.getElapsedSeconds()}s` ); } - private clear() { - this.block = { - blockHeight: 0, - indexBlockHash: '', - transactions: new Map(), - contracts: [], - notifications: [], - sftMints: [], - nftMints: [], - ftSupplyDelta: new Map(), - }; - } - - private processSmartContract(transaction: ProcessedStacksCoreTransaction) { - if (transaction.tx.contract_interface == null) return; - - // Parse the included ABI to check if it's a token contract. - const abi = JSON.parse(transaction.tx.contract_interface) as ClarityAbi; - const sip = getSmartContractSip(abi); - if (!sip) return; - - const sender = transaction.decoded.auth.origin_condition.signer.address; - const payload = transaction.decoded.payload; - if ( - payload.type_id === TxPayloadTypeID.SmartContract || - payload.type_id === TxPayloadTypeID.VersionedSmartContract - ) { - const principal = `${sender}.${payload.contract_name}`; - this.block.contracts.push({ - event: { - principal, - sip, - fungible_token_name: abi.fungible_tokens[0]?.name, - non_fungible_token_name: abi.non_fungible_tokens[0]?.name, - }, - tx_id: transaction.tx.txid, - tx_index: transaction.tx.tx_index, - }); + private processTransaction(transaction: DecodedStacksTransaction) { + const deployment = getSmartContractDeployment(transaction); + if (deployment) { + this.contracts.push(deployment); logger.info( { - contract: principal, - sip, + contract: deployment.principal, + sip: deployment.sip, txid: transaction.tx.txid, }, - `${this.constructor.name} found contract ${principal} (${sip})` + `${this.constructor.name} found contract ${deployment.principal} (${deployment.sip})` ); } } private processContractEvent( - transaction: ProcessedStacksCoreTransaction, + transaction: DecodedStacksTransaction, event: StacksCoreContractEvent ) { - const sender = transaction.decoded.auth.origin_condition.signer.address; - const notification = getContractLogMetadataUpdateNotification(sender, event); + const notification = getContractLogMetadataUpdateNotification(transaction, event); if (notification) { - this.block.notifications.push({ - event: notification, - tx_id: event.txid, - tx_index: transaction.tx.tx_index, - event_index: event.event_index, - }); + this.notifications.push(notification); logger.info( { contract: notification.contract_id, @@ -211,14 +171,9 @@ export class StacksCoreBlockProcessor { ); return; } - const mint = getContractLogSftMintEvent(event); + const mint = getContractLogSftMintEvent(transaction, event); if (mint) { - this.block.sftMints.push({ - event: mint, - tx_id: event.txid, - tx_index: transaction.tx.tx_index, - event_index: event.event_index, - }); + this.sftMints.push(mint); logger.info( { contract: mint.contractId, @@ -233,9 +188,9 @@ export class StacksCoreBlockProcessor { private processFtMintEvent(event: StacksCoreFtMintEvent) { const principal = event.ft_mint_event.asset_identifier.split('::')[0]; - const previous = this.block.ftSupplyDelta.get(principal) ?? BigNumber(0); + const previous = this.ftSupplyDelta.get(principal) ?? BigNumber(0); const amount = BigNumber(event.ft_mint_event.amount); - this.block.ftSupplyDelta.set(principal, previous.plus(amount)); + this.ftSupplyDelta.set(principal, previous.plus(amount)); logger.info( { contract: principal, @@ -248,9 +203,9 @@ export class StacksCoreBlockProcessor { private processFtBurnEvent(event: StacksCoreFtBurnEvent) { const principal = event.ft_burn_event.asset_identifier.split('::')[0]; - const previous = this.block.ftSupplyDelta.get(principal) ?? BigNumber(0); + const previous = this.ftSupplyDelta.get(principal) ?? BigNumber(0); const amount = BigNumber(event.ft_burn_event.amount); - this.block.ftSupplyDelta.set(principal, previous.minus(amount)); + this.ftSupplyDelta.set(principal, previous.minus(amount)); logger.info( { contract: principal, @@ -262,21 +217,19 @@ export class StacksCoreBlockProcessor { } private processNftMintEvent( - transaction: ProcessedStacksCoreTransaction, + transaction: DecodedStacksTransaction, event: StacksCoreNftMintEvent ) { const value = decodeClarityValue(event.nft_mint_event.raw_value); if (value.type_id === ClarityTypeID.UInt) { const principal = event.nft_mint_event.asset_identifier.split('::')[0]; const tokenId = BigInt(value.value); - this.block.nftMints.push({ - event: { - contractId: principal, - tokenId, - }, - tx_id: event.txid, + this.nftMints.push({ + tx_id: transaction.tx.txid, tx_index: transaction.tx.tx_index, event_index: event.event_index, + contractId: principal, + tokenId, }); logger.info( { diff --git a/src/token-processor/util/sip-validation.ts b/src/token-processor/util/sip-validation.ts index 9480622..c6a6771 100644 --- a/src/token-processor/util/sip-validation.ts +++ b/src/token-processor/util/sip-validation.ts @@ -6,9 +6,11 @@ import { ClarityValueList, ClarityValueUInt, decodeClarityValue, + TxPayloadTypeID, } from '@hirosystems/stacks-encoding-native-js'; import { DbSipNumber } from '../../pg/types'; import { StacksCoreContractEvent } from '../../stacks-core/schemas'; +import { DecodedStacksTransaction } from '../../stacks-core/stacks-core-block-processor'; const FtTraitFunctions: ClarityAbiFunction[] = [ { @@ -284,16 +286,22 @@ export function tokenClassFromSipNumber(sip: DbSipNumber): TokenClass { } } -type MetadataUpdateMode = 'standard' | 'frozen' | 'dynamic'; +export type SipEventContext = { + tx_id: string; + tx_index: number; + event_index?: number; +}; -export type SmartContractDeployment = { +export type SmartContractDeployment = SipEventContext & { principal: string; sip: DbSipNumber; fungible_token_name?: string; non_fungible_token_name?: string; }; -export type TokenMetadataUpdateNotification = { +type MetadataUpdateMode = 'standard' | 'frozen' | 'dynamic'; + +export type TokenMetadataUpdateNotification = SipEventContext & { token_class: TokenClass; contract_id: string; update_mode: MetadataUpdateMode; @@ -301,15 +309,26 @@ export type TokenMetadataUpdateNotification = { ttl?: bigint; }; +export type NftMintEvent = SipEventContext & { + contractId: string; + tokenId: bigint; +}; + +export type SftMintEvent = NftMintEvent & { + amount: bigint; + recipient: string; +}; + /** * Takes in a contract log entry and returns a metadata update notification object if valid. * @param log - Contract log entry */ export function getContractLogMetadataUpdateNotification( - sender: string, + transaction: DecodedStacksTransaction, event: StacksCoreContractEvent ): TokenMetadataUpdateNotification | undefined { const log = event.contract_event; + const sender = transaction.decoded.auth.origin_condition.signer.address; try { // Validate that we have the correct SIP-019 payload structure. const value = decodeClarityValue(log.raw_value); @@ -359,6 +378,9 @@ export function getContractLogMetadataUpdateNotification( } return { + tx_id: transaction.tx.txid, + tx_index: transaction.tx.tx_index, + event_index: event.event_index, token_class: tokenClass as TokenClass, contract_id: contractId, token_ids: tokenIds, @@ -370,17 +392,8 @@ export function getContractLogMetadataUpdateNotification( } } -export type NftMintEvent = { - contractId: string; - tokenId: bigint; -}; - -export type SftMintEvent = NftMintEvent & { - amount: bigint; - recipient: string; -}; - export function getContractLogSftMintEvent( + transaction: DecodedStacksTransaction, event: StacksCoreContractEvent ): SftMintEvent | undefined { const log = event.contract_event; @@ -395,14 +408,44 @@ export function getContractLogSftMintEvent( const tokenId = (value.data['token-id'] as ClarityValueUInt).value; const amount = (value.data['amount'] as ClarityValueUInt).value; - const event: SftMintEvent = { + return { + tx_id: transaction.tx.txid, + tx_index: transaction.tx.tx_index, + event_index: event.event_index, contractId: log.contract_identifier, tokenId: BigInt(tokenId), amount: BigInt(amount), recipient: recipient, }; - return event; } catch (error) { return; } } + +export function getSmartContractDeployment( + transaction: DecodedStacksTransaction +): SmartContractDeployment | undefined { + if (transaction.tx.contract_interface == null) return; + + // Parse the included ABI to check if it's a token contract. + const abi = JSON.parse(transaction.tx.contract_interface) as ClarityAbi; + const sip = getSmartContractSip(abi); + if (!sip) return; + + const sender = transaction.decoded.auth.origin_condition.signer.address; + const payload = transaction.decoded.payload; + if ( + payload.type_id === TxPayloadTypeID.SmartContract || + payload.type_id === TxPayloadTypeID.VersionedSmartContract + ) { + const principal = `${sender}.${payload.contract_name}`; + return { + tx_id: transaction.tx.txid, + tx_index: transaction.tx.tx_index, + principal, + sip, + fungible_token_name: abi.fungible_tokens[0]?.name, + non_fungible_token_name: abi.non_fungible_tokens[0]?.name, + }; + } +} diff --git a/tests/helpers.ts b/tests/helpers.ts index c5a8282..7aaf40c 100644 --- a/tests/helpers.ts +++ b/tests/helpers.ts @@ -4,13 +4,6 @@ import { buildApiServer } from '../src/api/init'; import { FastifyBaseLogger, FastifyInstance } from 'fastify'; import { IncomingMessage, Server, ServerResponse } from 'http'; import { TypeBoxTypeProvider } from '@fastify/type-provider-typebox'; -import { - StacksEvent, - StacksPayload, - StacksTransaction, - StacksTransactionEvent, -} from '@hirosystems/chainhook-client'; -import { BlockCache, CachedEvent } from '../src/pg/chainhook/block-cache'; import { SmartContractDeployment } from '../src/token-processor/util/sip-validation'; import { DbJob, DbSipNumber, DbSmartContract, DbUpdateNotification } from '../src/pg/types'; import { waiter } from '@hirosystems/api-toolkit'; From 56642a484510daa143c80840a8f3427e39df6b31 Mon Sep 17 00:00:00 2001 From: Rafael Cardenas Date: Tue, 13 Jan 2026 09:11:21 -0600 Subject: [PATCH 12/19] test build errors --- package.json | 2 +- src/stacks-core/snp-event-stream.ts | 5 +- .../stacks-core-block-processor.ts | 135 +++-- tests/chainhook/chainhook-observer.test.ts | 183 ------ tests/chainhook/nft-events.test.ts | 148 ----- tests/chainhook/notifications.test.ts | 559 ------------------ tests/chainhook/sft-events.test.ts | 117 ---- tests/chainhook/smart-contracts.test.ts | 174 ------ tests/helpers.ts | 317 +++++----- .../ft-events.test.ts | 90 ++- tests/stacks-core/nft-events.test.ts | 102 ++++ tests/stacks-core/notifications.test.ts | 525 ++++++++++++++++ tests/stacks-core/sft-events.test.ts | 74 +++ tests/stacks-core/smart-contracts.test.ts | 93 +++ tests/stacks-core/snp-event-stream.test.ts | 187 ++++++ tests/token-queue/sip-validation.test.ts | 87 ++- 16 files changed, 1340 insertions(+), 1458 deletions(-) delete mode 100644 tests/chainhook/chainhook-observer.test.ts delete mode 100644 tests/chainhook/nft-events.test.ts delete mode 100644 tests/chainhook/notifications.test.ts delete mode 100644 tests/chainhook/sft-events.test.ts delete mode 100644 tests/chainhook/smart-contracts.test.ts rename tests/{chainhook => stacks-core}/ft-events.test.ts (65%) create mode 100644 tests/stacks-core/nft-events.test.ts create mode 100644 tests/stacks-core/notifications.test.ts create mode 100644 tests/stacks-core/sft-events.test.ts create mode 100644 tests/stacks-core/smart-contracts.test.ts create mode 100644 tests/stacks-core/snp-event-stream.test.ts diff --git a/package.json b/package.json index 1c3d962..a947bc9 100644 --- a/package.json +++ b/package.json @@ -20,7 +20,7 @@ "test": "jest --runInBand", "test:admin": "npm run test -- ./tests/admin/", "test:api": "npm run test -- ./tests/api/", - "test:chainhook": "npm run test -- ./tests/chainhook/", + "test:stacks-core": "npm run test -- ./tests/stacks-core/", "test:token-queue": "npm run test -- ./tests/token-queue/", "testenv:run": "docker compose -f docker/docker-compose.dev.postgres.yml up", "testenv:stop": "docker compose -f docker/docker-compose.dev.postgres.yml down -v -t 0", diff --git a/src/stacks-core/snp-event-stream.ts b/src/stacks-core/snp-event-stream.ts index c0843cf..4e557a7 100644 --- a/src/stacks-core/snp-event-stream.ts +++ b/src/stacks-core/snp-event-stream.ts @@ -4,7 +4,7 @@ import { StacksEventStream, StacksEventStreamType } from '@hirosystems/salt-n-pe import { EventEmitter } from 'node:events'; import { StacksCoreBlockSchema } from './schemas'; import { TypeCompiler } from '@sinclair/typebox/compiler'; -import { StacksCoreBlockProcessor } from './stacks-core-block-processor'; +import { decodeStacksCoreBlock, StacksCoreBlockProcessor } from './stacks-core-block-processor'; import { StacksCorePgStore } from '../pg/stacks-core-pg-store'; const SnpBlockCType = TypeCompiler.Compile(StacksCoreBlockSchema); @@ -64,7 +64,8 @@ export class SnpEventStreamHandler { throw new Error(`Failed to parse SNP block body: ${body}`); } try { - await this.blockProcessor.process(body); + const decodedBlock = decodeStacksCoreBlock(body); + await this.blockProcessor.processBlock(decodedBlock); this.events.emit('processedMessage', { msgId: messageId }); } catch (error) { this.logger.error(error, `Failed to process block`); diff --git a/src/stacks-core/stacks-core-block-processor.ts b/src/stacks-core/stacks-core-block-processor.ts index f5fd20e..e1c2fb4 100644 --- a/src/stacks-core/stacks-core-block-processor.ts +++ b/src/stacks-core/stacks-core-block-processor.ts @@ -39,82 +39,85 @@ export type DecodedStacksBlock = { transactions: DecodedStacksTransaction[]; }; +/** + * Decodes a Stacks Core block message into a standardized block object. + * @param block - The Stacks Core block message to decode. + * @returns The decoded Stacks Core block. + */ +export function decodeStacksCoreBlock(block: StacksCoreBlock): DecodedStacksBlock { + // Group events by transaction ID. + const events: Map = new Map(); + for (const event of block.events) { + events.set(event.txid, [...(events.get(event.txid) || []), event]); + } + // Decode transactions and sort their events by event index. + const transactions = block.transactions.map(tx => ({ + tx: tx, + decoded: decodeTransaction(tx.raw_tx.substring(2)), + events: (events.get(tx.txid) || []).sort((a, b) => a.event_index - b.event_index), + })); + // Sort transactions by transaction index. + const decodedBlock: DecodedStacksBlock = { + block_height: block.block_height, + index_block_hash: block.index_block_hash, + parent_index_block_hash: block.parent_index_block_hash, + transactions: transactions.sort((a, b) => a.tx.tx_index - b.tx.tx_index), + }; + return decodedBlock; +} + /** * Processes a Stacks Core block and writes contract deployments, token metadata updates, token * supply deltas, and token mints to the database. */ export class StacksCoreBlockProcessor { private readonly db: StacksCorePgStore; - private readonly block: DecodedStacksBlock; - - private contracts: SmartContractDeployment[] = []; - private notifications: TokenMetadataUpdateNotification[] = []; - private sftMints: SftMintEvent[] = []; - private nftMints: NftMintEvent[] = []; - private ftSupplyDelta: Map = new Map(); - static init(args: { block: StacksCoreBlock; db: StacksCorePgStore }): StacksCoreBlockProcessor { - // Group events by transaction ID. - const events: Map = new Map(); - for (const event of args.block.events) { - events.set(event.txid, [...(events.get(event.txid) || []), event]); - } - // Decode transactions and sort their events by event index. - const transactions = args.block.transactions.map(tx => ({ - tx: tx, - decoded: decodeTransaction(tx.raw_tx.substring(2)), - events: (events.get(tx.txid) || []).sort((a, b) => a.event_index - b.event_index), - })); - // Sort transactions by transaction index. - const decodedBlock: DecodedStacksBlock = { - block_height: args.block.block_height, - index_block_hash: args.block.index_block_hash, - parent_index_block_hash: args.block.parent_index_block_hash, - transactions: transactions.sort((a, b) => a.tx.tx_index - b.tx.tx_index), - }; - return new StacksCoreBlockProcessor({ db: args.db, decodedBlock }); - } - - constructor(args: { db: StacksCorePgStore; decodedBlock: DecodedStacksBlock }) { + constructor(args: { db: StacksCorePgStore }) { this.db = args.db; - this.block = args.decodedBlock; } - async process(): Promise { + async processBlock(block: DecodedStacksBlock): Promise { const time = stopwatch(); logger.info( - `${this.constructor.name} processing block ${this.block.block_height} #${this.block.index_block_hash}` + `${this.constructor.name} processing block ${block.block_height} #${block.index_block_hash}` ); await this.db.sqlWriteTransaction(async sql => { // Check if this block represents a re-org. Revert to its parent's chain tip if it does. const chainTip = await this.db.getChainTip(sql); - if (chainTip && chainTip.index_block_hash !== this.block.parent_index_block_hash) { + if (chainTip && chainTip.index_block_hash !== block.parent_index_block_hash) { logger.info( `${this.constructor.name} detected re-org, reverting to chain tip at parent block ${ - this.block.block_height - 1 - } ${this.block.parent_index_block_hash}` + block.block_height - 1 + } ${block.parent_index_block_hash}` ); await this.db.revertToChainTip(sql, chainTip); } + const contracts: SmartContractDeployment[] = []; + const notifications: TokenMetadataUpdateNotification[] = []; + const sftMints: SftMintEvent[] = []; + const nftMints: NftMintEvent[] = []; + const ftSupplyDelta: Map = new Map(); + // Process each transaction in the block. - for (const transaction of this.block.transactions) { + for (const transaction of block.transactions) { if (transaction.tx.status !== 'success') continue; - this.processTransaction(transaction); + this.processTransaction(transaction, contracts); for (const event of transaction.events) { switch (event.type) { case 'contract_event': - this.processContractEvent(transaction, event); + this.processContractEvent(transaction, event, notifications, sftMints); break; case 'ft_mint_event': - this.processFtMintEvent(event); + this.processFtMintEvent(event, ftSupplyDelta); break; case 'ft_burn_event': - this.processFtBurnEvent(event); + this.processFtBurnEvent(event, ftSupplyDelta); break; case 'nft_mint_event': - this.processNftMintEvent(transaction, event); + this.processNftMintEvent(transaction, event, nftMints); break; case 'nft_burn_event': // Burned NFTs still have their metadata in the database, so we don't need to do @@ -125,25 +128,28 @@ export class StacksCoreBlockProcessor { } await this.db.writeProcessedBlock({ - block: this.block, - contracts: this.contracts, - notifications: this.notifications, - nftMints: this.nftMints, - sftMints: this.sftMints, - ftSupplyDelta: this.ftSupplyDelta, + block, + contracts, + notifications, + nftMints, + sftMints, + ftSupplyDelta, }); }); logger.info( - `${this.constructor.name} processed block ${this.block.block_height} ${ - this.block.index_block_hash + `${this.constructor.name} processed block ${block.block_height} ${ + block.index_block_hash } in ${time.getElapsedSeconds()}s` ); } - private processTransaction(transaction: DecodedStacksTransaction) { + private processTransaction( + transaction: DecodedStacksTransaction, + contracts: SmartContractDeployment[] + ) { const deployment = getSmartContractDeployment(transaction); if (deployment) { - this.contracts.push(deployment); + contracts.push(deployment); logger.info( { contract: deployment.principal, @@ -157,11 +163,13 @@ export class StacksCoreBlockProcessor { private processContractEvent( transaction: DecodedStacksTransaction, - event: StacksCoreContractEvent + event: StacksCoreContractEvent, + notifications: TokenMetadataUpdateNotification[], + sftMints: SftMintEvent[] ) { const notification = getContractLogMetadataUpdateNotification(transaction, event); if (notification) { - this.notifications.push(notification); + notifications.push(notification); logger.info( { contract: notification.contract_id, @@ -173,7 +181,7 @@ export class StacksCoreBlockProcessor { } const mint = getContractLogSftMintEvent(transaction, event); if (mint) { - this.sftMints.push(mint); + sftMints.push(mint); logger.info( { contract: mint.contractId, @@ -186,11 +194,11 @@ export class StacksCoreBlockProcessor { } } - private processFtMintEvent(event: StacksCoreFtMintEvent) { + private processFtMintEvent(event: StacksCoreFtMintEvent, ftSupplyDelta: Map) { const principal = event.ft_mint_event.asset_identifier.split('::')[0]; - const previous = this.ftSupplyDelta.get(principal) ?? BigNumber(0); + const previous = ftSupplyDelta.get(principal) ?? BigNumber(0); const amount = BigNumber(event.ft_mint_event.amount); - this.ftSupplyDelta.set(principal, previous.plus(amount)); + ftSupplyDelta.set(principal, previous.plus(amount)); logger.info( { contract: principal, @@ -201,11 +209,11 @@ export class StacksCoreBlockProcessor { ); } - private processFtBurnEvent(event: StacksCoreFtBurnEvent) { + private processFtBurnEvent(event: StacksCoreFtBurnEvent, ftSupplyDelta: Map) { const principal = event.ft_burn_event.asset_identifier.split('::')[0]; - const previous = this.ftSupplyDelta.get(principal) ?? BigNumber(0); + const previous = ftSupplyDelta.get(principal) ?? BigNumber(0); const amount = BigNumber(event.ft_burn_event.amount); - this.ftSupplyDelta.set(principal, previous.minus(amount)); + ftSupplyDelta.set(principal, previous.minus(amount)); logger.info( { contract: principal, @@ -218,13 +226,14 @@ export class StacksCoreBlockProcessor { private processNftMintEvent( transaction: DecodedStacksTransaction, - event: StacksCoreNftMintEvent + event: StacksCoreNftMintEvent, + nftMints: NftMintEvent[] ) { const value = decodeClarityValue(event.nft_mint_event.raw_value); if (value.type_id === ClarityTypeID.UInt) { const principal = event.nft_mint_event.asset_identifier.split('::')[0]; const tokenId = BigInt(value.value); - this.nftMints.push({ + nftMints.push({ tx_id: transaction.tx.txid, tx_index: transaction.tx.tx_index, event_index: event.event_index, diff --git a/tests/chainhook/chainhook-observer.test.ts b/tests/chainhook/chainhook-observer.test.ts deleted file mode 100644 index 024a875..0000000 --- a/tests/chainhook/chainhook-observer.test.ts +++ /dev/null @@ -1,183 +0,0 @@ -import { cvToHex, tupleCV, bufferCV, uintCV, stringUtf8CV } from '@stacks/transactions'; -import { DbSipNumber } from '../../src/pg/types'; -import { cycleMigrations } from '@hirosystems/api-toolkit'; -import { ENV } from '../../src/env'; -import { PgStore, MIGRATIONS_DIR } from '../../src/pg/pg-store'; -import { - insertAndEnqueueTestContractWithTokens, - markAllJobsAsDone, - TestChainhookPayloadBuilder, -} from '../helpers'; - -describe('Chainhook observer', () => { - let db: PgStore; - - beforeEach(async () => { - ENV.PGDATABASE = 'postgres'; - db = await PgStore.connect({ skipMigrations: true }); - await cycleMigrations(MIGRATIONS_DIR); - }); - - afterEach(async () => { - await db.close(); - }); - - describe('chain tip', () => { - test('updates chain tip on chainhook event', async () => { - await db.chainhook.processPayload( - new TestChainhookPayloadBuilder() - .apply() - .block({ height: 100 }) - .transaction({ hash: '0x01', sender: 'SP1K1A1PMGW2ZJCNF46NWZWHG8TS1D23EGH1KNK60' }) - .contractDeploy('SP1K1A1PMGW2ZJCNF46NWZWHG8TS1D23EGH1KNK60.friedger-pool-nft', { - maps: [], - functions: [], - variables: [], - fungible_tokens: [], - non_fungible_tokens: [], - }) - .build() - ); - await expect(db.getChainTipBlockHeight()).resolves.toBe(100); - - await db.chainhook.processPayload( - new TestChainhookPayloadBuilder() - .apply() - .block({ height: 101 }) - .transaction({ hash: '0x01', sender: 'SP1K1A1PMGW2ZJCNF46NWZWHG8TS1D23EGH1KNK60' }) - .event({ - type: 'SmartContractEvent', - position: { index: 0 }, - data: { - contract_identifier: 'SP1K1A1PMGW2ZJCNF46NWZWHG8TS1D23EGH1KNK60.friedger-pool-nft', - topic: 'print', - raw_value: cvToHex(stringUtf8CV('test')), - }, - }) - .build() - ); - await expect(db.getChainTipBlockHeight()).resolves.toBe(101); - }); - - test('enqueues dynamic tokens for refresh with standard interval', async () => { - const address = 'SP1K1A1PMGW2ZJCNF46NWZWHG8TS1D23EGH1KNK60'; - const contractId = `${address}.friedger-pool-nft`; - ENV.METADATA_DYNAMIC_TOKEN_REFRESH_INTERVAL = 86400; - await insertAndEnqueueTestContractWithTokens(db, contractId, DbSipNumber.sip009, 1n); - // Mark as dynamic - await db.chainhook.processPayload( - new TestChainhookPayloadBuilder() - .apply() - .block({ height: 90 }) - .transaction({ hash: '0x01', sender: address }) - .event({ - type: 'SmartContractEvent', - position: { index: 0 }, - data: { - contract_identifier: contractId, - topic: 'print', - raw_value: cvToHex( - tupleCV({ - notification: bufferCV(Buffer.from('token-metadata-update')), - payload: tupleCV({ - 'token-class': bufferCV(Buffer.from('nft')), - 'contract-id': bufferCV(Buffer.from(contractId)), - 'update-mode': bufferCV(Buffer.from('dynamic')), - }), - }) - ), - }, - }) - .build() - ); - // Set updated_at for testing. - await db.sql` - UPDATE tokens - SET updated_at = NOW() - INTERVAL '2 days' - WHERE id = 1 - `; - await markAllJobsAsDone(db); - - await db.chainhook.processPayload( - new TestChainhookPayloadBuilder() - .apply() - .block({ height: 95 }) - .transaction({ hash: '0x01', sender: 'SP1K1A1PMGW2ZJCNF46NWZWHG8TS1D23EGH1KNK60' }) - .event({ - type: 'SmartContractEvent', - position: { index: 0 }, - data: { - contract_identifier: 'SP1K1A1PMGW2ZJCNF46NWZWHG8TS1D23EGH1KNK60.friedger-pool-nft', - topic: 'print', - raw_value: cvToHex(stringUtf8CV('test')), - }, - }) - .build() - ); - - const job = await db.getJob({ id: 2 }); - expect(job?.status).toBe('pending'); - }); - - test('enqueues dynamic tokens for refresh with ttl', async () => { - const address = 'SP1K1A1PMGW2ZJCNF46NWZWHG8TS1D23EGH1KNK60'; - const contractId = `${address}.friedger-pool-nft`; - ENV.METADATA_DYNAMIC_TOKEN_REFRESH_INTERVAL = 99999; - await insertAndEnqueueTestContractWithTokens(db, contractId, DbSipNumber.sip009, 1n); - // Mark as dynamic - await db.chainhook.processPayload( - new TestChainhookPayloadBuilder() - .apply() - .block({ height: 90 }) - .transaction({ hash: '0x01', sender: address }) - .event({ - type: 'SmartContractEvent', - position: { index: 0 }, - data: { - contract_identifier: contractId, - topic: 'print', - raw_value: cvToHex( - tupleCV({ - notification: bufferCV(Buffer.from('token-metadata-update')), - payload: tupleCV({ - 'token-class': bufferCV(Buffer.from('nft')), - 'contract-id': bufferCV(Buffer.from(contractId)), - 'update-mode': bufferCV(Buffer.from('dynamic')), - ttl: uintCV(3600), - }), - }) - ), - }, - }) - .build() - ); - // Set updated_at for testing - await db.sql` - UPDATE tokens - SET updated_at = NOW() - INTERVAL '2 hours' - WHERE id = 1 - `; - await markAllJobsAsDone(db); - - await db.chainhook.processPayload( - new TestChainhookPayloadBuilder() - .apply() - .block({ height: 95 }) - .transaction({ hash: '0x01', sender: 'SP1K1A1PMGW2ZJCNF46NWZWHG8TS1D23EGH1KNK60' }) - .event({ - type: 'SmartContractEvent', - position: { index: 0 }, - data: { - contract_identifier: 'SP1K1A1PMGW2ZJCNF46NWZWHG8TS1D23EGH1KNK60.friedger-pool-nft', - topic: 'print', - raw_value: cvToHex(stringUtf8CV('test')), - }, - }) - .build() - ); - - const job = await db.getJob({ id: 2 }); - expect(job?.status).toBe('pending'); - }); - }); -}); diff --git a/tests/chainhook/nft-events.test.ts b/tests/chainhook/nft-events.test.ts deleted file mode 100644 index 0770f37..0000000 --- a/tests/chainhook/nft-events.test.ts +++ /dev/null @@ -1,148 +0,0 @@ -import { cvToHex, uintCV } from '@stacks/transactions'; -import { DbSipNumber } from '../../src/pg/types'; -import { cycleMigrations } from '@hirosystems/api-toolkit'; -import { ENV } from '../../src/env'; -import { PgStore, MIGRATIONS_DIR } from '../../src/pg/pg-store'; -import { - insertAndEnqueueTestContractWithTokens, - getJobCount, - getTokenCount, - markAllJobsAsDone, - TestChainhookPayloadBuilder, - SIP_009_ABI, -} from '../helpers'; - -describe('NFT events', () => { - let db: PgStore; - - beforeEach(async () => { - ENV.PGDATABASE = 'postgres'; - db = await PgStore.connect({ skipMigrations: true }); - await cycleMigrations(MIGRATIONS_DIR); - }); - - afterEach(async () => { - await db.close(); - }); - - test('NFT mint enqueues metadata fetch', async () => { - const address = 'SP1K1A1PMGW2ZJCNF46NWZWHG8TS1D23EGH1KNK60'; - const contractId = `${address}.friedger-pool-nft`; - await insertAndEnqueueTestContractWithTokens(db, contractId, DbSipNumber.sip009, 3n); - await markAllJobsAsDone(db); - - // Get 4th token via mint - await db.chainhook.processPayload( - new TestChainhookPayloadBuilder() - .apply() - .block({ height: 100 }) - .transaction({ hash: '0x01', sender: address }) - .event({ - type: 'NFTMintEvent', - position: { index: 0 }, - data: { - asset_identifier: `${contractId}::friedger-nft`, - recipient: address, - raw_value: cvToHex(uintCV(4)), - }, - }) - .build() - ); - - const jobs = await db.getPendingJobBatch({ limit: 1 }); - expect(jobs).toHaveLength(1); - expect(jobs[0].token_id).toBe(4); - await expect(db.getToken({ id: 4 })).resolves.not.toBeUndefined(); - }); - - test('NFT contract can start with zero tokens', async () => { - const address = 'SP1K1A1PMGW2ZJCNF46NWZWHG8TS1D23EGH1KNK60'; - const contractId = `${address}.friedger-pool-nft`; - await db.chainhook.processPayload( - new TestChainhookPayloadBuilder() - .apply() - .block({ height: 90 }) - .transaction({ hash: '0x01', sender: address }) - .contractDeploy(contractId, SIP_009_ABI) - .build() - ); - await db.updateSmartContractTokenCount({ id: 1, count: 0n }); - await markAllJobsAsDone(db); - - await db.chainhook.processPayload( - new TestChainhookPayloadBuilder() - .apply() - .block({ height: 100 }) - .transaction({ hash: '0x01', sender: address }) - .event({ - type: 'NFTMintEvent', - position: { index: 0 }, - data: { - asset_identifier: `${contractId}::crashpunks-v2`, - recipient: address, - raw_value: cvToHex(uintCV(1)), - }, - }) - .build() - ); - - const jobs = await db.getPendingJobBatch({ limit: 1 }); - expect(jobs).toHaveLength(1); - expect(jobs[0].token_id).toBe(1); - await expect(db.getToken({ id: 1 })).resolves.not.toBeUndefined(); - }); - - test('NFT mint is ignored if contract does not exist', async () => { - const address = 'SP1K1A1PMGW2ZJCNF46NWZWHG8TS1D23EGH1KNK60'; - const contractId = `${address}.friedger-pool-nft`; - - await db.chainhook.processPayload( - new TestChainhookPayloadBuilder() - .apply() - .block({ height: 100 }) - .transaction({ hash: '0x01', sender: address }) - .event({ - type: 'NFTMintEvent', - position: { index: 0 }, - data: { - asset_identifier: `${contractId}::crashpunks-v2`, - recipient: address, - raw_value: cvToHex(uintCV(1)), - }, - }) - .build() - ); - - const jobs = await db.getPendingJobBatch({ limit: 1 }); - expect(jobs).toHaveLength(0); - await expect(db.getToken({ id: 1 })).resolves.toBeUndefined(); - }); - - test('NFT mint roll back removes token', async () => { - const address = 'SP1K1A1PMGW2ZJCNF46NWZWHG8TS1D23EGH1KNK60'; - const contractId = `${address}.friedger-pool-nft`; - await insertAndEnqueueTestContractWithTokens(db, contractId, DbSipNumber.sip009, 3n); - await markAllJobsAsDone(db); - - // Roll back token 3 - await db.chainhook.processPayload( - new TestChainhookPayloadBuilder() - .rollback() - .block({ height: 100 }) - .transaction({ hash: '0x01', sender: address }) - .event({ - type: 'NFTMintEvent', - position: { index: 0 }, - data: { - asset_identifier: `${contractId}::friedger-nft`, - recipient: address, - raw_value: cvToHex(uintCV(3)), - }, - }) - .build() - ); - - await expect(getTokenCount(db)).resolves.toBe('2'); - await expect(getJobCount(db)).resolves.toBe('3'); // Only the contract + other token jobs - }); -}); diff --git a/tests/chainhook/notifications.test.ts b/tests/chainhook/notifications.test.ts deleted file mode 100644 index 5f8c415..0000000 --- a/tests/chainhook/notifications.test.ts +++ /dev/null @@ -1,559 +0,0 @@ -import { cvToHex, tupleCV, bufferCV, listCV, uintCV, stringUtf8CV } from '@stacks/transactions'; -import { DbSipNumber } from '../../src/pg/types'; -import { cycleMigrations } from '@hirosystems/api-toolkit'; -import { ENV } from '../../src/env'; -import { PgStore, MIGRATIONS_DIR } from '../../src/pg/pg-store'; -import { - getLatestContractTokenNotifications, - getLatestTokenNotification, - insertAndEnqueueTestContractWithTokens, - markAllJobsAsDone, - TestChainhookPayloadBuilder, -} from '../helpers'; - -describe('token metadata notifications', () => { - let db: PgStore; - - beforeEach(async () => { - ENV.PGDATABASE = 'postgres'; - db = await PgStore.connect({ skipMigrations: true }); - await cycleMigrations(MIGRATIONS_DIR); - }); - - afterEach(async () => { - await db.close(); - }); - - test('enqueues notification for all tokens in contract', async () => { - const address = 'SP1K1A1PMGW2ZJCNF46NWZWHG8TS1D23EGH1KNK60'; - const contractId = `${address}.friedger-pool-nft`; - await insertAndEnqueueTestContractWithTokens(db, contractId, DbSipNumber.sip009, 3n); - await markAllJobsAsDone(db); - - await db.chainhook.processPayload( - new TestChainhookPayloadBuilder() - .apply() - .block({ height: 100 }) - .transaction({ hash: '0x01', sender: address }) - .event({ - type: 'SmartContractEvent', - position: { index: 0 }, - data: { - contract_identifier: contractId, - topic: 'print', - raw_value: cvToHex( - tupleCV({ - notification: bufferCV(Buffer.from('token-metadata-update')), - payload: tupleCV({ - 'token-class': bufferCV(Buffer.from('nft')), - 'contract-id': bufferCV(Buffer.from(contractId)), - }), - }) - ), - }, - }) - .build() - ); - - await expect(db.getPendingJobBatch({ limit: 10 })).resolves.toHaveLength(3); - const notifs = await getLatestContractTokenNotifications(db, contractId); - expect(notifs).toHaveLength(3); - expect(notifs[0].token_id).toBe(1); - expect(notifs[0].update_mode).toBe('standard'); - expect(notifs[0].block_height).toBe(100); - }); - - test('enqueues notification for specific tokens in contract', async () => { - const address = 'SP1K1A1PMGW2ZJCNF46NWZWHG8TS1D23EGH1KNK60'; - const contractId = `${address}.friedger-pool-nft`; - await insertAndEnqueueTestContractWithTokens(db, contractId, DbSipNumber.sip009, 3n); - await markAllJobsAsDone(db); - - await db.chainhook.processPayload( - new TestChainhookPayloadBuilder() - .apply() - .block({ height: 100 }) - .transaction({ hash: '0x01', sender: address }) - .event({ - type: 'SmartContractEvent', - position: { index: 0 }, - data: { - contract_identifier: contractId, - topic: 'print', - raw_value: cvToHex( - tupleCV({ - notification: bufferCV(Buffer.from('token-metadata-update')), - payload: tupleCV({ - 'token-class': bufferCV(Buffer.from('nft')), - 'contract-id': bufferCV(Buffer.from(contractId)), - 'token-ids': listCV([uintCV(1), uintCV(2)]), - }), - }) - ), - }, - }) - .build() - ); - - const jobs = await db.getPendingJobBatch({ limit: 10 }); - expect(jobs.length).toBe(2); // Only two tokens - expect(jobs[0].token_id).toBe(1); - await expect(getLatestTokenNotification(db, 1)).resolves.not.toBeUndefined(); - expect(jobs[1].token_id).toBe(2); - await expect(getLatestTokenNotification(db, 2)).resolves.not.toBeUndefined(); - await expect(getLatestTokenNotification(db, 3)).resolves.toBeUndefined(); - }); - - test('updates token refresh mode', async () => { - const address = 'SP1K1A1PMGW2ZJCNF46NWZWHG8TS1D23EGH1KNK60'; - const contractId = `${address}.friedger-pool-nft`; - await insertAndEnqueueTestContractWithTokens(db, contractId, DbSipNumber.sip009, 1n); - await markAllJobsAsDone(db); - - await db.chainhook.processPayload( - new TestChainhookPayloadBuilder() - .apply() - .block({ height: 100 }) - .transaction({ hash: '0x01', sender: address }) - .event({ - type: 'SmartContractEvent', - position: { index: 0 }, - data: { - contract_identifier: contractId, - topic: 'print', - raw_value: cvToHex( - tupleCV({ - notification: bufferCV(Buffer.from('token-metadata-update')), - payload: tupleCV({ - 'token-class': bufferCV(Buffer.from('nft')), - 'contract-id': bufferCV(Buffer.from(contractId)), - 'token-ids': listCV([uintCV(1)]), - 'update-mode': stringUtf8CV('frozen'), // Mark as frozen. - }), - }) - ), - }, - }) - .build() - ); - - const notif = await getLatestTokenNotification(db, 1); - expect(notif?.update_mode).toBe('frozen'); - }); - - test('ignores notification for frozen tokens', async () => { - const address = 'SP1K1A1PMGW2ZJCNF46NWZWHG8TS1D23EGH1KNK60'; - const contractId = `${address}.friedger-pool-nft`; - await insertAndEnqueueTestContractWithTokens(db, contractId, DbSipNumber.sip009, 1n); - await markAllJobsAsDone(db); - - // Mark as frozen - await db.chainhook.processPayload( - new TestChainhookPayloadBuilder() - .apply() - .block({ height: 90 }) - .transaction({ hash: '0x01', sender: address }) - .event({ - type: 'SmartContractEvent', - position: { index: 0 }, - data: { - contract_identifier: contractId, - topic: 'print', - raw_value: cvToHex( - tupleCV({ - notification: bufferCV(Buffer.from('token-metadata-update')), - payload: tupleCV({ - 'token-class': bufferCV(Buffer.from('nft')), - 'contract-id': bufferCV(Buffer.from(contractId)), - 'token-ids': listCV([uintCV(1)]), - 'update-mode': bufferCV(Buffer.from('frozen')), - }), - }) - ), - }, - }) - .build() - ); - await markAllJobsAsDone(db); - - await db.chainhook.processPayload( - new TestChainhookPayloadBuilder() - .apply() - .block({ height: 100 }) - .transaction({ hash: '0x01', sender: address }) - .event({ - type: 'SmartContractEvent', - position: { index: 0 }, - data: { - contract_identifier: contractId, - topic: 'print', - raw_value: cvToHex( - tupleCV({ - notification: bufferCV(Buffer.from('token-metadata-update')), - payload: tupleCV({ - 'token-class': bufferCV(Buffer.from('nft')), - 'contract-id': bufferCV(Buffer.from(contractId)), - 'token-ids': listCV([uintCV(1)]), - }), - }) - ), - }, - }) - .build() - ); - - const jobs2 = await db.getPendingJobBatch({ limit: 10 }); - expect(jobs2.length).toBe(0); // No tokens queued. - const notif = await getLatestTokenNotification(db, 1); - expect(notif).not.toBeUndefined(); - expect(notif?.block_height).toBe(90); - expect(notif?.update_mode).toBe('frozen'); // Keeps the old frozen notif - }); - - test('second token notification replaces previous', async () => { - const address = 'SP1K1A1PMGW2ZJCNF46NWZWHG8TS1D23EGH1KNK60'; - const contractId = `${address}.friedger-pool-nft`; - await insertAndEnqueueTestContractWithTokens(db, contractId, DbSipNumber.sip009, 1n); - await markAllJobsAsDone(db); - - await db.chainhook.processPayload( - new TestChainhookPayloadBuilder() - .apply() - .block({ height: 90 }) - .transaction({ hash: '0x01', sender: address }) - .event({ - type: 'SmartContractEvent', - position: { index: 0 }, - data: { - contract_identifier: contractId, - topic: 'print', - raw_value: cvToHex( - tupleCV({ - notification: bufferCV(Buffer.from('token-metadata-update')), - payload: tupleCV({ - 'token-class': bufferCV(Buffer.from('nft')), - 'contract-id': bufferCV(Buffer.from(contractId)), - 'token-ids': listCV([uintCV(1)]), - 'update-mode': bufferCV(Buffer.from('dynamic')), - ttl: uintCV(3600), - }), - }) - ), - }, - }) - .build() - ); - await markAllJobsAsDone(db); - const notif1 = await getLatestTokenNotification(db, 1); - expect(notif1).not.toBeUndefined(); - expect(notif1?.block_height).toBe(90); - expect(notif1?.update_mode).toBe('dynamic'); - expect(notif1?.ttl).toBe('3600'); - - await db.chainhook.processPayload( - new TestChainhookPayloadBuilder() - .apply() - .block({ height: 100 }) - .transaction({ hash: '0x01', sender: address }) - .event({ - type: 'SmartContractEvent', - position: { index: 0 }, - data: { - contract_identifier: contractId, - topic: 'print', - raw_value: cvToHex( - tupleCV({ - notification: bufferCV(Buffer.from('token-metadata-update')), - payload: tupleCV({ - 'token-class': bufferCV(Buffer.from('nft')), - 'contract-id': bufferCV(Buffer.from(contractId)), - 'token-ids': listCV([uintCV(1)]), - }), - }) - ), - }, - }) - .build() - ); - - const notif2 = await getLatestTokenNotification(db, 1); - expect(notif2).not.toBeUndefined(); - expect(notif2?.block_height).toBe(100); - expect(notif2?.update_mode).toBe('standard'); - expect(notif2?.ttl).toBeNull(); - }); - - test('contract notification replaces token notification', async () => { - const address = 'SP1K1A1PMGW2ZJCNF46NWZWHG8TS1D23EGH1KNK60'; - const contractId = `${address}.friedger-pool-nft`; - await insertAndEnqueueTestContractWithTokens(db, contractId, DbSipNumber.sip009, 1n); - await markAllJobsAsDone(db); - - await db.chainhook.processPayload( - new TestChainhookPayloadBuilder() - .apply() - .block({ height: 90 }) - .transaction({ hash: '0x01', sender: address }) - .event({ - type: 'SmartContractEvent', - position: { index: 0 }, - data: { - contract_identifier: contractId, - topic: 'print', - raw_value: cvToHex( - tupleCV({ - notification: bufferCV(Buffer.from('token-metadata-update')), - payload: tupleCV({ - 'token-class': bufferCV(Buffer.from('nft')), - 'contract-id': bufferCV(Buffer.from(contractId)), - 'token-ids': listCV([uintCV(1)]), - }), - }) - ), - }, - }) - .build() - ); - await markAllJobsAsDone(db); - const notif1 = await getLatestTokenNotification(db, 1); - expect(notif1).not.toBeUndefined(); - expect(notif1?.block_height).toBe(90); - - await db.chainhook.processPayload( - new TestChainhookPayloadBuilder() - .apply() - .block({ height: 100 }) - .transaction({ hash: '0x01', sender: address }) - .event({ - type: 'SmartContractEvent', - position: { index: 0 }, - data: { - contract_identifier: contractId, - topic: 'print', - raw_value: cvToHex( - tupleCV({ - notification: bufferCV(Buffer.from('token-metadata-update')), - payload: tupleCV({ - 'token-class': bufferCV(Buffer.from('nft')), - 'contract-id': bufferCV(Buffer.from(contractId)), - }), - }) - ), - }, - }) - .build() - ); - - const notif2 = await getLatestTokenNotification(db, 1); - expect(notif2).not.toBeUndefined(); - expect(notif2?.block_height).toBe(100); - }); - - test('rolls back notification', async () => { - const address = 'SP1K1A1PMGW2ZJCNF46NWZWHG8TS1D23EGH1KNK60'; - const contractId = `${address}.friedger-pool-nft`; - await insertAndEnqueueTestContractWithTokens(db, contractId, DbSipNumber.sip009, 3n); - - await db.chainhook.processPayload( - new TestChainhookPayloadBuilder() - .apply() - .block({ height: 101 }) - .transaction({ hash: '0x01', sender: address }) - .event({ - type: 'SmartContractEvent', - position: { index: 0 }, - data: { - contract_identifier: contractId, - topic: 'print', - raw_value: cvToHex( - tupleCV({ - notification: bufferCV(Buffer.from('token-metadata-update')), - payload: tupleCV({ - 'token-class': bufferCV(Buffer.from('nft')), - 'contract-id': bufferCV(Buffer.from(contractId)), - }), - }) - ), - }, - }) - .build() - ); - await markAllJobsAsDone(db); - await expect(getLatestTokenNotification(db, 1)).resolves.not.toBeUndefined(); - await db.chainhook.processPayload( - new TestChainhookPayloadBuilder() - .rollback() - .block({ height: 101 }) - .transaction({ hash: '0x01', sender: address }) - .event({ - type: 'SmartContractEvent', - position: { index: 0 }, - data: { - contract_identifier: contractId, - topic: 'print', - raw_value: cvToHex( - tupleCV({ - notification: bufferCV(Buffer.from('token-metadata-update')), - payload: tupleCV({ - 'token-class': bufferCV(Buffer.from('nft')), - 'contract-id': bufferCV(Buffer.from(contractId)), - }), - }) - ), - }, - }) - .build() - ); - await expect(getLatestTokenNotification(db, 1)).resolves.toBeUndefined(); - }); - - test('second notification rollback restores pointer to the first notification', async () => { - const address = 'SP1K1A1PMGW2ZJCNF46NWZWHG8TS1D23EGH1KNK60'; - const contractId = `${address}.friedger-pool-nft`; - await insertAndEnqueueTestContractWithTokens(db, contractId, DbSipNumber.sip009, 3n); - - // Write 2 notifications, test rollback changes ref to old notification. - await db.chainhook.processPayload( - new TestChainhookPayloadBuilder() - .apply() - .block({ height: 100 }) - .transaction({ hash: '0x01', sender: address }) - .event({ - type: 'SmartContractEvent', - position: { index: 0 }, - data: { - contract_identifier: contractId, - topic: 'print', - raw_value: cvToHex( - tupleCV({ - notification: bufferCV(Buffer.from('token-metadata-update')), - payload: tupleCV({ - 'token-class': bufferCV(Buffer.from('nft')), - 'contract-id': bufferCV(Buffer.from(contractId)), - 'token-ids': listCV([uintCV(1)]), - }), - }) - ), - }, - }) - .build() - ); - await db.chainhook.processPayload( - new TestChainhookPayloadBuilder() - .apply() - .block({ height: 101 }) - .transaction({ hash: '0x01', sender: address }) - .event({ - type: 'SmartContractEvent', - position: { index: 0 }, - data: { - contract_identifier: contractId, - topic: 'print', - raw_value: cvToHex( - tupleCV({ - notification: bufferCV(Buffer.from('token-metadata-update')), - payload: tupleCV({ - 'token-class': bufferCV(Buffer.from('nft')), - 'contract-id': bufferCV(Buffer.from(contractId)), - 'token-ids': listCV([uintCV(1)]), - 'update-mode': bufferCV(Buffer.from('frozen')), - }), - }) - ), - }, - }) - .build() - ); - await markAllJobsAsDone(db); - const notif2 = await getLatestTokenNotification(db, 1); - expect(notif2).not.toBeUndefined(); - expect(notif2?.block_height).toBe(101); - expect(notif2?.update_mode).toBe('frozen'); - - await db.chainhook.processPayload( - new TestChainhookPayloadBuilder() - .rollback() - .block({ height: 101 }) - .transaction({ hash: '0x01', sender: address }) - .event({ - type: 'SmartContractEvent', - position: { index: 0 }, - data: { - contract_identifier: contractId, - topic: 'print', - raw_value: cvToHex( - tupleCV({ - notification: bufferCV(Buffer.from('token-metadata-update')), - payload: tupleCV({ - 'token-class': bufferCV(Buffer.from('nft')), - 'contract-id': bufferCV(Buffer.from(contractId)), - 'token-ids': listCV([uintCV(1)]), - 'update-mode': bufferCV(Buffer.from('frozen')), - }), - }) - ), - }, - }) - .build() - ); - const notif1 = await getLatestTokenNotification(db, 1); - expect(notif1).not.toBeUndefined(); - expect(notif1?.block_height).toBe(100); - expect(notif1?.update_mode).toBe('standard'); - }); - - test('ignores other contract log events', async () => { - await db.chainhook.processPayload( - new TestChainhookPayloadBuilder() - .apply() - .block({ height: 100 }) - .transaction({ hash: '0x01', sender: 'SP1K1A1PMGW2ZJCNF46NWZWHG8TS1D23EGH1KNK60' }) - .event({ - type: 'SmartContractEvent', - position: { index: 0 }, - data: { - contract_identifier: 'SP1K1A1PMGW2ZJCNF46NWZWHG8TS1D23EGH1KNK60.friedger-pool-nft', - topic: 'print', - raw_value: cvToHex(stringUtf8CV('test')), - }, - }) - .build() - ); - await expect(db.getPendingJobBatch({ limit: 1 })).resolves.toHaveLength(0); - }); - - test('ignores notification from incorrect sender', async () => { - const address = 'SP1K1A1PMGW2ZJCNF46NWZWHG8TS1D23EGH1KNK60'; - const contractId = `${address}.friedger-pool-nft`; - await insertAndEnqueueTestContractWithTokens(db, contractId, DbSipNumber.sip009, 1n); - await markAllJobsAsDone(db); - - await db.chainhook.processPayload( - new TestChainhookPayloadBuilder() - .apply() - .block({ height: 100 }) - // Incorrect sender - .transaction({ hash: '0x01', sender: 'SP29BPZ6BD5D8509Y9VP70J0V7VKKDDFCRPHA0T6A' }) - .event({ - type: 'SmartContractEvent', - position: { index: 0 }, - data: { - contract_identifier: 'SP29BPZ6BD5D8509Y9VP70J0V7VKKDDFCRPHA0T6A.another-contract', - topic: 'print', - raw_value: cvToHex( - tupleCV({ - notification: bufferCV(Buffer.from('token-metadata-update')), - payload: tupleCV({ - 'token-class': bufferCV(Buffer.from('nft')), - 'contract-id': bufferCV(Buffer.from(contractId)), - }), - }) - ), - }, - }) - .build() - ); - - await expect(db.getPendingJobBatch({ limit: 1 })).resolves.toHaveLength(0); - }); -}); diff --git a/tests/chainhook/sft-events.test.ts b/tests/chainhook/sft-events.test.ts deleted file mode 100644 index a2d589a..0000000 --- a/tests/chainhook/sft-events.test.ts +++ /dev/null @@ -1,117 +0,0 @@ -import { cvToHex, tupleCV, bufferCV, uintCV } from '@stacks/transactions'; -import { DbSipNumber, DbTokenType } from '../../src/pg/types'; -import { cycleMigrations } from '@hirosystems/api-toolkit'; -import { ENV } from '../../src/env'; -import { PgStore, MIGRATIONS_DIR } from '../../src/pg/pg-store'; -import { - insertAndEnqueueTestContract, - insertAndEnqueueTestContractWithTokens, - getJobCount, - getTokenCount, - TestChainhookPayloadBuilder, - markAllJobsAsDone, -} from '../helpers'; - -describe('SFT events', () => { - let db: PgStore; - - beforeEach(async () => { - ENV.PGDATABASE = 'postgres'; - db = await PgStore.connect({ skipMigrations: true }); - await cycleMigrations(MIGRATIONS_DIR); - }); - - afterEach(async () => { - await db.close(); - }); - - test('SFT mint enqueues minted token for valid contract', async () => { - const address = 'SP3K8BC0PPEVCV7NZ6QSRWPQ2JE9E5B6N3PA0KBR9'; - const contractId = `${address}.key-alex-autoalex-v1`; - await insertAndEnqueueTestContract(db, contractId, DbSipNumber.sip013); - await markAllJobsAsDone(db); - - await db.chainhook.processPayload( - new TestChainhookPayloadBuilder() - .apply() - .block({ height: 100 }) - .transaction({ hash: '0x01', sender: address }) - .event({ - type: 'SmartContractEvent', - position: { index: 0 }, - data: { - contract_identifier: contractId, - topic: 'print', - raw_value: cvToHex( - tupleCV({ - type: bufferCV(Buffer.from('sft_mint')), - recipient: bufferCV(Buffer.from(address)), - 'token-id': uintCV(3), - amount: uintCV(1000), - }) - ), - }, - }) - // Try a duplicate of the same token but different amount - .event({ - type: 'SmartContractEvent', - position: { index: 1 }, - data: { - contract_identifier: contractId, - topic: 'print', - raw_value: cvToHex( - tupleCV({ - type: bufferCV(Buffer.from('sft_mint')), - recipient: bufferCV(Buffer.from(address)), - 'token-id': uintCV(3), - amount: uintCV(200), - }) - ), - }, - }) - .build() - ); - - const token = await db.getToken({ id: 1 }); - expect(token?.type).toBe(DbTokenType.sft); - expect(token?.token_number).toBe('3'); - const jobs = await db.getPendingJobBatch({ limit: 1 }); - expect(jobs).toHaveLength(1); - expect(jobs[0].token_id).toBe(1); - }); - - test('rolls back SFT mint', async () => { - const address = 'SP3K8BC0PPEVCV7NZ6QSRWPQ2JE9E5B6N3PA0KBR9'; - const contractId = 'key-alex-autoalex-v1'; - const principal = `${address}.${contractId}`; - await insertAndEnqueueTestContractWithTokens(db, principal, DbSipNumber.sip013, 1n); - await markAllJobsAsDone(db); - - await db.chainhook.processPayload( - new TestChainhookPayloadBuilder() - .rollback() - .block({ height: 100 }) - .transaction({ hash: '0x01', sender: address }) - .event({ - type: 'SmartContractEvent', - position: { index: 0 }, - data: { - contract_identifier: principal, - topic: 'print', - raw_value: cvToHex( - tupleCV({ - type: bufferCV(Buffer.from('sft_mint')), - recipient: bufferCV(Buffer.from(address)), - 'token-id': uintCV(1), - amount: uintCV(1000), - }) - ), - }, - }) - .build() - ); - - await expect(getTokenCount(db)).resolves.toBe('0'); - await expect(getJobCount(db)).resolves.toBe('1'); // Only the smart contract job - }); -}); diff --git a/tests/chainhook/smart-contracts.test.ts b/tests/chainhook/smart-contracts.test.ts deleted file mode 100644 index a65da72..0000000 --- a/tests/chainhook/smart-contracts.test.ts +++ /dev/null @@ -1,174 +0,0 @@ -import { DbSipNumber, DbSmartContract } from '../../src/pg/types'; -import { cycleMigrations } from '@hirosystems/api-toolkit'; -import { ENV } from '../../src/env'; -import { PgStore, MIGRATIONS_DIR } from '../../src/pg/pg-store'; -import { - insertAndEnqueueTestContract, - insertAndEnqueueTestContractWithTokens, - getJobCount, - getTokenCount, - SIP_009_ABI, - TestChainhookPayloadBuilder, -} from '../helpers'; -import { ProcessSmartContractJob } from '../../src/token-processor/queue/job/process-smart-contract-job'; -import { ProcessTokenJob } from '../../src/token-processor/queue/job/process-token-job'; - -describe('contract deployments', () => { - let db: PgStore; - - beforeEach(async () => { - ENV.PGDATABASE = 'postgres'; - db = await PgStore.connect({ skipMigrations: true }); - await cycleMigrations(MIGRATIONS_DIR); - }); - - afterEach(async () => { - await db.close(); - }); - - test('enqueues valid token contract', async () => { - await db.chainhook.processPayload( - new TestChainhookPayloadBuilder() - .apply() - .block({ height: 100 }) - .transaction({ hash: '0x01', sender: 'SP1K1A1PMGW2ZJCNF46NWZWHG8TS1D23EGH1KNK60' }) - .contractDeploy('SP1K1A1PMGW2ZJCNF46NWZWHG8TS1D23EGH1KNK60.friedger-pool-nft', SIP_009_ABI) - .build() - ); - const dbContract = await db.getSmartContract({ id: 1 }); - expect(dbContract?.sip).toBe(DbSipNumber.sip009); - expect(dbContract?.principal).toBe( - 'SP1K1A1PMGW2ZJCNF46NWZWHG8TS1D23EGH1KNK60.friedger-pool-nft' - ); - await expect(db.getPendingJobBatch({ limit: 1 })).resolves.toHaveLength(1); - }); - - test('ignores token contract from a failed transaction', async () => { - await db.chainhook.processPayload( - new TestChainhookPayloadBuilder() - .apply() - .block({ height: 100 }) - .transaction({ - hash: '0x01', - sender: 'SP1K1A1PMGW2ZJCNF46NWZWHG8TS1D23EGH1KNK60', - success: false, // Failed - }) - .contractDeploy('SP1K1A1PMGW2ZJCNF46NWZWHG8TS1D23EGH1KNK60.friedger-pool-nft', SIP_009_ABI) - .build() - ); - await expect(db.getSmartContract({ id: 1 })).resolves.toBeUndefined(); - await expect(db.getPendingJobBatch({ limit: 1 })).resolves.toHaveLength(0); - }); - - test('ignores non-token contract', async () => { - await db.chainhook.processPayload( - new TestChainhookPayloadBuilder() - .apply() - .block({ height: 100 }) - .transaction({ hash: '0x01', sender: 'SP1K1A1PMGW2ZJCNF46NWZWHG8TS1D23EGH1KNK60' }) - .contractDeploy('SP1K1A1PMGW2ZJCNF46NWZWHG8TS1D23EGH1KNK60.friedger-pool-nft', { - maps: [], - functions: [], - variables: [], - fungible_tokens: [], - non_fungible_tokens: [], - }) - .build() - ); - await expect(db.getSmartContract({ id: 1 })).resolves.toBeUndefined(); - await expect(db.getPendingJobBatch({ limit: 1 })).resolves.toHaveLength(0); - }); - - test('rolls back contract', async () => { - const address = 'SP1K1A1PMGW2ZJCNF46NWZWHG8TS1D23EGH1KNK60'; - const contractId = `${address}.friedger-pool-nft`; - await insertAndEnqueueTestContractWithTokens(db, contractId, DbSipNumber.sip009, 3n); - - await db.chainhook.processPayload( - new TestChainhookPayloadBuilder() - .rollback() - .block({ height: 100 }) - .transaction({ hash: '0x01', sender: 'SP1K1A1PMGW2ZJCNF46NWZWHG8TS1D23EGH1KNK60' }) - .contractDeploy('SP1K1A1PMGW2ZJCNF46NWZWHG8TS1D23EGH1KNK60.friedger-pool-nft', SIP_009_ABI) - .build() - ); - - // Everything is deleted. - await expect(db.getSmartContract({ principal: contractId })).resolves.toBeUndefined(); - await expect(getTokenCount(db)).resolves.toBe('0'); - await expect(getJobCount(db)).resolves.toBe('0'); - }); - - test('contract roll back handles in-flight job correctly', async () => { - const address = 'SP1K1A1PMGW2ZJCNF46NWZWHG8TS1D23EGH1KNK60'; - const principal = `${address}.friedger-pool-nft`; - const job = await insertAndEnqueueTestContract(db, principal, DbSipNumber.sip009); - const contract = (await db.getSmartContract({ principal })) as DbSmartContract; - - await db.chainhook.processPayload( - new TestChainhookPayloadBuilder() - .rollback() - .block({ height: 100 }) - .transaction({ hash: '0x01', sender: 'SP1K1A1PMGW2ZJCNF46NWZWHG8TS1D23EGH1KNK60' }) - .contractDeploy('SP1K1A1PMGW2ZJCNF46NWZWHG8TS1D23EGH1KNK60.friedger-pool-nft', SIP_009_ABI) - .build() - ); - - const handler = new ProcessSmartContractJob({ db, job }); - await expect(handler.work()).resolves.not.toThrow(); - await expect(handler['enqueueTokens'](contract, 1n)).resolves.not.toThrow(); - }); - - test('contract roll back handles in-flight token jobs correctly', async () => { - const address = 'SP1K1A1PMGW2ZJCNF46NWZWHG8TS1D23EGH1KNK60'; - const principal = `${address}.friedger-pool-nft`; - const jobs = await insertAndEnqueueTestContractWithTokens( - db, - principal, - DbSipNumber.sip009, - 1n - ); - - await db.chainhook.processPayload( - new TestChainhookPayloadBuilder() - .rollback() - .block({ height: 100 }) - .transaction({ hash: '0x01', sender: 'SP1K1A1PMGW2ZJCNF46NWZWHG8TS1D23EGH1KNK60' }) - .contractDeploy('SP1K1A1PMGW2ZJCNF46NWZWHG8TS1D23EGH1KNK60.friedger-pool-nft', SIP_009_ABI) - .build() - ); - - const handler = new ProcessTokenJob({ db, job: jobs[0] }); - await expect(handler.work()).resolves.not.toThrow(); - await expect( - db.updateProcessedTokenWithMetadata({ - id: 1, - values: { - token: { - name: 'test', - symbol: 'TEST', - decimals: 4, - total_supply: '200', - uri: 'http://test.com', - }, - metadataLocales: [ - { - metadata: { - sip: 16, - token_id: 1, - name: 'test', - l10n_locale: 'en', - l10n_uri: 'http://test.com', - l10n_default: true, - description: 'test', - image: 'http://test.com', - cached_image: 'http://test.com', - cached_thumbnail_image: 'http://test.com', - }, - }, - ], - }, - }) - ).resolves.not.toThrow(); - }); -}); diff --git a/tests/helpers.ts b/tests/helpers.ts index 7aaf40c..8c188ff 100644 --- a/tests/helpers.ts +++ b/tests/helpers.ts @@ -7,6 +7,18 @@ import { TypeBoxTypeProvider } from '@fastify/type-provider-typebox'; import { SmartContractDeployment } from '../src/token-processor/util/sip-validation'; import { DbJob, DbSipNumber, DbSmartContract, DbUpdateNotification } from '../src/pg/types'; import { waiter } from '@hirosystems/api-toolkit'; +import { + DecodedStacksBlock, + DecodedStacksTransaction, +} from '../src/stacks-core/stacks-core-block-processor'; +import { + AnchorModeID, + DecodedTxResult, + PostConditionModeID, + TransactionVersion, + TxPayloadTypeID, +} from '@hirosystems/stacks-encoding-native-js'; +import { ClarityAbi } from '@stacks/transactions'; export type TestFastifyServer = FastifyInstance< Server, @@ -61,7 +73,7 @@ export async function closeTestServer(server: http.Server) { await serverDone; } -export const SIP_009_ABI = { +export const SIP_009_ABI: ClarityAbi = { maps: [ { key: { @@ -447,7 +459,7 @@ export const SIP_009_ABI = { non_fungible_tokens: [{ name: 'crashpunks-v2', type: 'uint128' }], }; -export const SIP_010_ABI = { +export const SIP_010_ABI: ClarityAbi = { maps: [], functions: [ { @@ -761,7 +773,7 @@ export const SIP_010_ABI = { non_fungible_tokens: [], }; -export const SIP_013_ABI = { +export const SIP_013_ABI: ClarityAbi = { maps: [ { key: 'principal', name: 'approved-contracts', value: 'bool' }, { @@ -1271,135 +1283,6 @@ export const SIP_013_ABI = { non_fungible_tokens: [], }; -export class TestChainhookPayloadBuilder { - private payload: StacksPayload = { - apply: [], - rollback: [], - chainhook: { - uuid: 'test', - predicate: { - scope: 'block_height', - higher_than: 0, - }, - is_streaming_blocks: true, - }, - events: [], - }; - private action: 'apply' | 'rollback' = 'apply'; - private get lastBlock(): StacksEvent { - return this.payload[this.action][this.payload[this.action].length - 1] as StacksEvent; - } - private get lastBlockTx(): StacksTransaction { - return this.lastBlock.transactions[this.lastBlock.transactions.length - 1]; - } - - streamingBlocks(streaming: boolean): this { - this.payload.chainhook.is_streaming_blocks = streaming; - return this; - } - - apply(): this { - this.action = 'apply'; - return this; - } - - rollback(): this { - this.action = 'rollback'; - return this; - } - - block(args: { height: number; hash?: string; timestamp?: number }): this { - this.payload[this.action].push({ - block_identifier: { - hash: args.hash ?? '0x9430a78c5e166000980136a22764af72ff0f734b2108e33cfe5f9e3d4430adda', - index: args.height, - }, - metadata: { - bitcoin_anchor_block_identifier: { - hash: '0x0000000000000000000bb26339f877f36e92d5a11d75fc2e34aed3f7623937fe', - index: 705573, - }, - confirm_microblock_identifier: null, - pox_cycle_index: 18, - pox_cycle_length: 2100, - pox_cycle_position: 1722, - stacks_block_hash: '0xbccf63ec2438cf497786ce617ec7e64e2b27ee023a28a0927ee36b81870115d2', - tenure_height: null, - block_time: null, - signer_bitvec: null, - signer_signature: null, - signer_public_keys: null, - cycle_number: null, - reward_set: null, - }, - parent_block_identifier: { - hash: '0xca71af03f9a3012491af2f59f3244ecb241551803d641f8c8306ffa1187938b4', - index: args.height - 1, - }, - timestamp: 1634572508, - transactions: [], - }); - return this; - } - - transaction(args: { hash: string; sender?: string; success?: boolean }): this { - this.lastBlock.transactions.push({ - metadata: { - contract_abi: null, - description: 'description', - execution_cost: { - read_count: 5, - read_length: 5526, - runtime: 6430000, - write_count: 2, - write_length: 1, - }, - fee: 2574302, - kind: { type: 'Coinbase' }, - nonce: 8665, - position: { index: 1 }, - proof: null, - raw_tx: '0x00', - receipt: { - contract_calls_stack: [], - events: [], - mutated_assets_radius: [], - mutated_contracts_radius: ['SP466FNC0P7JWTNM2R9T199QRZN1MYEDTAR0KP27.miamicoin-token'], - }, - result: '(ok true)', - sender: args.sender ?? 'SP3HXJJMJQ06GNAZ8XWDN1QM48JEDC6PP6W3YZPZJ', - success: args.success ?? true, - }, - operations: [], - transaction_identifier: { - hash: args.hash, - }, - }); - return this; - } - - event(args: StacksTransactionEvent): this { - this.lastBlockTx.metadata.receipt.events.push(args); - return this; - } - - contractDeploy(contract_identifier: string, abi: any): this { - this.lastBlockTx.metadata.kind = { - data: { - code: 'code', - contract_identifier, - }, - type: 'ContractDeployment', - }; - this.lastBlockTx.metadata.contract_abi = abi; - return this; - } - - build(): StacksPayload { - return this.payload; - } -} - export async function insertAndEnqueueTestContract( db: PgStore, principal: string, @@ -1407,17 +1290,18 @@ export async function insertAndEnqueueTestContract( tx_id?: string ): Promise { return await db.sqlWriteTransaction(async sql => { - const cache = new BlockCache({ hash: '0x000001', index: 1 }); - const deploy: CachedEvent = { - event: { - principal, - sip, - fungible_token_name: sip == DbSipNumber.sip010 ? 'ft-token' : undefined, - }, + const block = { + block_height: 1, + index_block_hash: '0x000001', + } as DecodedStacksBlock; + const deploy: SmartContractDeployment = { + principal, + sip, + fungible_token_name: sip == DbSipNumber.sip010 ? 'ft-token' : undefined, tx_id: tx_id ?? '0x123456', tx_index: 0, }; - await db.chainhook.applyContractDeployment(sql, deploy, cache); + await db.core.applyContractDeployment(sql, deploy, block); const smart_contract = (await db.getSmartContract({ principal })) as DbSmartContract; const jobs = await sql` @@ -1437,7 +1321,7 @@ export async function insertAndEnqueueTestContractWithTokens( return await db.sqlWriteTransaction(async sql => { await insertAndEnqueueTestContract(db, principal, sip, tx_id); const smart_contract = (await db.getSmartContract({ principal })) as DbSmartContract; - await db.chainhook.insertAndEnqueueSequentialTokens(sql, { + await db.core.insertAndEnqueueSequentialTokens(sql, { smart_contract, token_count, }); @@ -1496,3 +1380,154 @@ export async function getLatestContractTokenNotifications( ORDER BY token_id, block_height DESC, tx_index DESC, event_index DESC `; } + +export type TestTransactionArgs = { + tx_id?: string; + tx_index?: number; + sender?: string; + status?: 'success' | 'abort_by_response' | 'abort_by_post_condition'; + contract_interface?: string; +}; + +export class TestTransactionBuilder { + private readonly transaction: DecodedStacksTransaction; + + constructor(args: TestTransactionArgs) { + this.transaction = { + tx: { + txid: args.tx_id ?? '0x01', + tx_index: args.tx_index ?? 0, + raw_tx: '', + status: args.status ?? 'success', + contract_interface: args.contract_interface ?? null, + }, + decoded: { + auth: { + origin_condition: { + signer: { + address: args.sender ?? 'SP1K1A1PMGW2ZJCNF46NWZWHG8TS1D23EGH1KNK60', + }, + }, + } as DecodedTxResult['auth'], + tx_id: args.tx_id ?? '0x01', + version: TransactionVersion.Mainnet, + chain_id: 1, + anchor_mode: AnchorModeID.Any, + post_condition_mode: PostConditionModeID.Deny, + post_conditions: [], + post_conditions_buffer: '', + payload: { + type_id: TxPayloadTypeID.Coinbase, + payload_buffer: '', + }, + }, + events: [], + }; + } + + setSmartContractPayload(contract_name: string, abi: ClarityAbi): TestTransactionBuilder { + this.transaction.decoded.payload = { + type_id: TxPayloadTypeID.SmartContract, + contract_name, + code_body: 'some-code-body', + }; + this.transaction.tx.contract_interface = JSON.stringify(abi); + return this; + } + + addFtMintEvent( + asset_identifier: string, + recipient: string, + amount: string + ): TestTransactionBuilder { + this.transaction.events.push({ + type: 'ft_mint_event', + ft_mint_event: { + asset_identifier, + recipient, + amount, + }, + event_index: this.transaction.events.length, + txid: this.transaction.tx.txid, + }); + return this; + } + + addFtBurnEvent(asset_identifier: string, sender: string, amount: string): TestTransactionBuilder { + this.transaction.events.push({ + type: 'ft_burn_event', + ft_burn_event: { + asset_identifier, + sender, + amount, + }, + event_index: this.transaction.events.length, + txid: this.transaction.tx.txid, + }); + return this; + } + + addNftMintEvent( + asset_identifier: string, + recipient: string, + raw_value: string + ): TestTransactionBuilder { + this.transaction.events.push({ + type: 'nft_mint_event', + nft_mint_event: { + asset_identifier, + recipient, + raw_value, + }, + event_index: this.transaction.events.length, + txid: this.transaction.tx.txid, + }); + return this; + } + + addContractEvent(contract_identifier: string, raw_value: string): TestTransactionBuilder { + this.transaction.events.push({ + type: 'contract_event', + contract_event: { + contract_identifier, + topic: 'print', + raw_value, + }, + event_index: this.transaction.events.length, + txid: this.transaction.tx.txid, + }); + return this; + } + + build(): DecodedStacksTransaction { + return this.transaction; + } +} + +export type TestBlockArgs = { + block_height?: number; + index_block_hash?: string; + parent_index_block_hash?: string; +}; + +export class TestBlockBuilder { + private readonly block: DecodedStacksBlock; + + constructor(args: TestBlockArgs) { + this.block = { + block_height: args.block_height ?? 1, + index_block_hash: args.index_block_hash ?? '0x000001', + parent_index_block_hash: args.parent_index_block_hash ?? '0x000000', + transactions: [], + }; + } + + addTransaction(transaction: DecodedStacksTransaction): TestBlockBuilder { + this.block.transactions.push(transaction); + return this; + } + + build(): DecodedStacksBlock { + return this.block; + } +} diff --git a/tests/chainhook/ft-events.test.ts b/tests/stacks-core/ft-events.test.ts similarity index 65% rename from tests/chainhook/ft-events.test.ts rename to tests/stacks-core/ft-events.test.ts index 8542119..e0c9447 100644 --- a/tests/chainhook/ft-events.test.ts +++ b/tests/stacks-core/ft-events.test.ts @@ -6,16 +6,20 @@ import { insertAndEnqueueTestContractWithTokens, getTokenCount, markAllJobsAsDone, - TestChainhookPayloadBuilder, + TestTransactionBuilder, + TestBlockBuilder, } from '../helpers'; +import { StacksCoreBlockProcessor } from '../../src/stacks-core/stacks-core-block-processor'; describe('FT events', () => { let db: PgStore; + let processor: StacksCoreBlockProcessor; beforeEach(async () => { ENV.PGDATABASE = 'postgres'; db = await PgStore.connect({ skipMigrations: true }); await cycleMigrations(MIGRATIONS_DIR); + processor = new StacksCoreBlockProcessor({ db: db.core }); }); afterEach(async () => { @@ -40,20 +44,13 @@ describe('FT events', () => { let token = await db.getToken({ id: 1 }); expect(token?.total_supply).toBe('10000'); - await db.chainhook.processPayload( - new TestChainhookPayloadBuilder() - .apply() - .block({ height: 100 }) - .transaction({ hash: '0x01', sender: address }) - .event({ - type: 'FTMintEvent', - position: { index: 0 }, - data: { - asset_identifier: `${contractId}::usdc`, - recipient: address, - amount: '2000', - }, - }) + await processor.processBlock( + new TestBlockBuilder({ block_height: 100 }) + .addTransaction( + new TestTransactionBuilder({ tx_id: '0x01', sender: address }) + .addFtMintEvent(`${contractId}::usdc`, address, '2000') + .build() + ) .build() ); @@ -67,20 +64,13 @@ describe('FT events', () => { await insertAndEnqueueTestContractWithTokens(db, contractId, DbSipNumber.sip010, 1n); await markAllJobsAsDone(db); - await db.chainhook.processPayload( - new TestChainhookPayloadBuilder() - .apply() - .block({ height: 100 }) - .transaction({ hash: '0x01', sender: address }) - .event({ - type: 'FTMintEvent', - position: { index: 0 }, - data: { - asset_identifier: `${contractId}::usdc`, - recipient: address, - amount: '2000', - }, - }) + await processor.processBlock( + new TestBlockBuilder({ block_height: 100 }) + .addTransaction( + new TestTransactionBuilder({ tx_id: '0x01', sender: address }) + .addFtMintEvent(`${contractId}::usdc`, address, '2000') + .build() + ) .build() ); @@ -107,20 +97,13 @@ describe('FT events', () => { let token = await db.getToken({ id: 1 }); expect(token?.total_supply).toBe('10000'); - await db.chainhook.processPayload( - new TestChainhookPayloadBuilder() - .apply() - .block({ height: 100 }) - .transaction({ hash: '0x01', sender: address }) - .event({ - type: 'FTBurnEvent', - position: { index: 0 }, - data: { - asset_identifier: `${contractId}::usdc`, - sender: address, - amount: '2000', - }, - }) + await processor.processBlock( + new TestBlockBuilder({ block_height: 100 }) + .addTransaction( + new TestTransactionBuilder({ tx_id: '0x01', sender: address }) + .addFtBurnEvent(`${contractId}::usdc`, address, '2000') + .build() + ) .build() ); @@ -134,20 +117,13 @@ describe('FT events', () => { await insertAndEnqueueTestContractWithTokens(db, contractId, DbSipNumber.sip010, 1n); await markAllJobsAsDone(db); - await db.chainhook.processPayload( - new TestChainhookPayloadBuilder() - .apply() - .block({ height: 100 }) - .transaction({ hash: '0x01', sender: address }) - .event({ - type: 'FTBurnEvent', - position: { index: 0 }, - data: { - asset_identifier: `${contractId}::usdc`, - sender: address, - amount: '2000', - }, - }) + await processor.processBlock( + new TestBlockBuilder({ block_height: 100 }) + .addTransaction( + new TestTransactionBuilder({ tx_id: '0x01', sender: address }) + .addFtBurnEvent(`${contractId}::usdc`, address, '2000') + .build() + ) .build() ); diff --git a/tests/stacks-core/nft-events.test.ts b/tests/stacks-core/nft-events.test.ts new file mode 100644 index 0000000..83bf134 --- /dev/null +++ b/tests/stacks-core/nft-events.test.ts @@ -0,0 +1,102 @@ +import { cvToHex, uintCV } from '@stacks/transactions'; +import { DbSipNumber } from '../../src/pg/types'; +import { cycleMigrations } from '@hirosystems/api-toolkit'; +import { ENV } from '../../src/env'; +import { PgStore, MIGRATIONS_DIR } from '../../src/pg/pg-store'; +import { + insertAndEnqueueTestContractWithTokens, + markAllJobsAsDone, + TestTransactionBuilder, + TestBlockBuilder, + SIP_009_ABI, +} from '../helpers'; +import { StacksCoreBlockProcessor } from '../../src/stacks-core/stacks-core-block-processor'; + +describe('NFT events', () => { + let db: PgStore; + let processor: StacksCoreBlockProcessor; + + beforeEach(async () => { + ENV.PGDATABASE = 'postgres'; + db = await PgStore.connect({ skipMigrations: true }); + await cycleMigrations(MIGRATIONS_DIR); + processor = new StacksCoreBlockProcessor({ db: db.core }); + }); + + afterEach(async () => { + await db.close(); + }); + + test('NFT mint enqueues metadata fetch', async () => { + const address = 'SP1K1A1PMGW2ZJCNF46NWZWHG8TS1D23EGH1KNK60'; + const contractId = `${address}.friedger-pool-nft`; + await insertAndEnqueueTestContractWithTokens(db, contractId, DbSipNumber.sip009, 3n); + await markAllJobsAsDone(db); + + // Get 4th token via mint + await processor.processBlock( + new TestBlockBuilder({ block_height: 100 }) + .addTransaction( + new TestTransactionBuilder({ tx_id: '0x01', sender: address }) + .addNftMintEvent(`${contractId}::friedger-nft`, address, cvToHex(uintCV(4))) + .build() + ) + .build() + ); + + const jobs = await db.getPendingJobBatch({ limit: 1 }); + expect(jobs).toHaveLength(1); + expect(jobs[0].token_id).toBe(4); + await expect(db.getToken({ id: 4 })).resolves.not.toBeUndefined(); + }); + + test('NFT contract can start with zero tokens', async () => { + const address = 'SP1K1A1PMGW2ZJCNF46NWZWHG8TS1D23EGH1KNK60'; + const contractId = `${address}.friedger-pool-nft`; + await processor.processBlock( + new TestBlockBuilder({ block_height: 90 }) + .addTransaction( + new TestTransactionBuilder({ tx_id: '0x01', sender: address }) + .setSmartContractPayload(contractId, SIP_009_ABI) + .build() + ) + .build() + ); + await db.updateSmartContractTokenCount({ id: 1, count: 0n }); + await markAllJobsAsDone(db); + + await processor.processBlock( + new TestBlockBuilder({ block_height: 100 }) + .addTransaction( + new TestTransactionBuilder({ tx_id: '0x01', sender: address }) + .addNftMintEvent(`${contractId}::crashpunks-v2`, address, cvToHex(uintCV(1))) + .build() + ) + .build() + ); + + const jobs = await db.getPendingJobBatch({ limit: 1 }); + expect(jobs).toHaveLength(1); + expect(jobs[0].token_id).toBe(1); + await expect(db.getToken({ id: 1 })).resolves.not.toBeUndefined(); + }); + + test('NFT mint is ignored if contract does not exist', async () => { + const address = 'SP1K1A1PMGW2ZJCNF46NWZWHG8TS1D23EGH1KNK60'; + const contractId = `${address}.friedger-pool-nft`; + + await processor.processBlock( + new TestBlockBuilder({ block_height: 100 }) + .addTransaction( + new TestTransactionBuilder({ tx_id: '0x01', sender: address }) + .addNftMintEvent(`${contractId}::crashpunks-v2`, address, cvToHex(uintCV(1))) + .build() + ) + .build() + ); + + const jobs = await db.getPendingJobBatch({ limit: 1 }); + expect(jobs).toHaveLength(0); + await expect(db.getToken({ id: 1 })).resolves.toBeUndefined(); + }); +}); diff --git a/tests/stacks-core/notifications.test.ts b/tests/stacks-core/notifications.test.ts new file mode 100644 index 0000000..9ba1392 --- /dev/null +++ b/tests/stacks-core/notifications.test.ts @@ -0,0 +1,525 @@ +import { cvToHex, tupleCV, bufferCV, listCV, uintCV, stringUtf8CV } from '@stacks/transactions'; +import { DbSipNumber } from '../../src/pg/types'; +import { cycleMigrations } from '@hirosystems/api-toolkit'; +import { ENV } from '../../src/env'; +import { PgStore, MIGRATIONS_DIR } from '../../src/pg/pg-store'; +import { + getLatestContractTokenNotifications, + getLatestTokenNotification, + insertAndEnqueueTestContractWithTokens, + markAllJobsAsDone, + TestTransactionBuilder, + TestBlockBuilder, +} from '../helpers'; +import { StacksCoreBlockProcessor } from '../../src/stacks-core/stacks-core-block-processor'; + +describe('token metadata notifications', () => { + let db: PgStore; + let processor: StacksCoreBlockProcessor; + + beforeEach(async () => { + ENV.PGDATABASE = 'postgres'; + db = await PgStore.connect({ skipMigrations: true }); + await cycleMigrations(MIGRATIONS_DIR); + processor = new StacksCoreBlockProcessor({ db: db.core }); + }); + + afterEach(async () => { + await db.close(); + }); + + test('enqueues notification for all tokens in contract', async () => { + const address = 'SP1K1A1PMGW2ZJCNF46NWZWHG8TS1D23EGH1KNK60'; + const contractId = `${address}.friedger-pool-nft`; + await insertAndEnqueueTestContractWithTokens(db, contractId, DbSipNumber.sip009, 3n); + await markAllJobsAsDone(db); + + await processor.processBlock( + new TestBlockBuilder({ block_height: 100 }) + .addTransaction( + new TestTransactionBuilder({ tx_id: '0x01', sender: address }) + .addContractEvent( + contractId, + cvToHex( + tupleCV({ + notification: bufferCV(Buffer.from('token-metadata-update')), + payload: tupleCV({ + 'token-class': bufferCV(Buffer.from('nft')), + 'contract-id': bufferCV(Buffer.from(contractId)), + }), + }) + ) + ) + .build() + ) + .build() + ); + + await expect(db.getPendingJobBatch({ limit: 10 })).resolves.toHaveLength(3); + const notifs = await getLatestContractTokenNotifications(db, contractId); + expect(notifs).toHaveLength(3); + expect(notifs[0].token_id).toBe(1); + expect(notifs[0].update_mode).toBe('standard'); + expect(notifs[0].block_height).toBe(100); + }); + + test('enqueues notification for specific tokens in contract', async () => { + const address = 'SP1K1A1PMGW2ZJCNF46NWZWHG8TS1D23EGH1KNK60'; + const contractId = `${address}.friedger-pool-nft`; + await insertAndEnqueueTestContractWithTokens(db, contractId, DbSipNumber.sip009, 3n); + await markAllJobsAsDone(db); + + await processor.processBlock( + new TestBlockBuilder({ block_height: 100 }) + .addTransaction( + new TestTransactionBuilder({ tx_id: '0x01', sender: address }) + .addContractEvent( + contractId, + cvToHex( + tupleCV({ + notification: bufferCV(Buffer.from('token-metadata-update')), + payload: tupleCV({ + 'token-class': bufferCV(Buffer.from('nft')), + 'contract-id': bufferCV(Buffer.from(contractId)), + 'token-ids': listCV([uintCV(1), uintCV(2)]), + }), + }) + ) + ) + .build() + ) + .build() + ); + + const jobs = await db.getPendingJobBatch({ limit: 10 }); + expect(jobs.length).toBe(2); // Only two tokens + expect(jobs[0].token_id).toBe(1); + await expect(getLatestTokenNotification(db, 1)).resolves.not.toBeUndefined(); + expect(jobs[1].token_id).toBe(2); + await expect(getLatestTokenNotification(db, 2)).resolves.not.toBeUndefined(); + await expect(getLatestTokenNotification(db, 3)).resolves.toBeUndefined(); + }); + + test('updates token refresh mode', async () => { + const address = 'SP1K1A1PMGW2ZJCNF46NWZWHG8TS1D23EGH1KNK60'; + const contractId = `${address}.friedger-pool-nft`; + await insertAndEnqueueTestContractWithTokens(db, contractId, DbSipNumber.sip009, 1n); + await markAllJobsAsDone(db); + + await processor.processBlock( + new TestBlockBuilder({ block_height: 100 }) + .addTransaction( + new TestTransactionBuilder({ tx_id: '0x01', sender: address }) + .addContractEvent( + contractId, + cvToHex( + tupleCV({ + notification: bufferCV(Buffer.from('token-metadata-update')), + payload: tupleCV({ + 'token-class': bufferCV(Buffer.from('nft')), + 'contract-id': bufferCV(Buffer.from(contractId)), + 'token-ids': listCV([uintCV(1)]), + 'update-mode': stringUtf8CV('frozen'), // Mark as frozen. + }), + }) + ) + ) + .build() + ) + .build() + ); + + const notif = await getLatestTokenNotification(db, 1); + expect(notif?.update_mode).toBe('frozen'); + }); + + test('ignores notification for frozen tokens', async () => { + const address = 'SP1K1A1PMGW2ZJCNF46NWZWHG8TS1D23EGH1KNK60'; + const contractId = `${address}.friedger-pool-nft`; + await insertAndEnqueueTestContractWithTokens(db, contractId, DbSipNumber.sip009, 1n); + await markAllJobsAsDone(db); + + // Mark as frozen + await processor.processBlock( + new TestBlockBuilder({ block_height: 90 }) + .addTransaction( + new TestTransactionBuilder({ tx_id: '0x01', sender: address }) + .addContractEvent( + contractId, + cvToHex( + tupleCV({ + notification: bufferCV(Buffer.from('token-metadata-update')), + payload: tupleCV({ + 'token-class': bufferCV(Buffer.from('nft')), + 'contract-id': bufferCV(Buffer.from(contractId)), + 'token-ids': listCV([uintCV(1)]), + 'update-mode': bufferCV(Buffer.from('frozen')), + }), + }) + ) + ) + .build() + ) + .build() + ); + await markAllJobsAsDone(db); + + await processor.processBlock( + new TestBlockBuilder({ block_height: 100 }) + .addTransaction( + new TestTransactionBuilder({ tx_id: '0x01', sender: address }) + .addContractEvent( + contractId, + cvToHex( + tupleCV({ + notification: bufferCV(Buffer.from('token-metadata-update')), + payload: tupleCV({ + 'token-class': bufferCV(Buffer.from('nft')), + 'contract-id': bufferCV(Buffer.from(contractId)), + 'token-ids': listCV([uintCV(1)]), + }), + }) + ) + ) + .build() + ) + .build() + ); + + const jobs2 = await db.getPendingJobBatch({ limit: 10 }); + expect(jobs2.length).toBe(0); // No tokens queued. + const notif = await getLatestTokenNotification(db, 1); + expect(notif).not.toBeUndefined(); + expect(notif?.block_height).toBe(90); + expect(notif?.update_mode).toBe('frozen'); // Keeps the old frozen notif + }); + + test('second token notification replaces previous', async () => { + const address = 'SP1K1A1PMGW2ZJCNF46NWZWHG8TS1D23EGH1KNK60'; + const contractId = `${address}.friedger-pool-nft`; + await insertAndEnqueueTestContractWithTokens(db, contractId, DbSipNumber.sip009, 1n); + await markAllJobsAsDone(db); + + await processor.processBlock( + new TestBlockBuilder({ block_height: 90 }) + .addTransaction( + new TestTransactionBuilder({ tx_id: '0x01', sender: address }) + .addContractEvent( + contractId, + cvToHex( + tupleCV({ + notification: bufferCV(Buffer.from('token-metadata-update')), + payload: tupleCV({ + 'token-class': bufferCV(Buffer.from('nft')), + 'contract-id': bufferCV(Buffer.from(contractId)), + 'token-ids': listCV([uintCV(1)]), + 'update-mode': bufferCV(Buffer.from('dynamic')), + ttl: uintCV(3600), + }), + }) + ) + ) + .build() + ) + .build() + ); + await markAllJobsAsDone(db); + const notif1 = await getLatestTokenNotification(db, 1); + expect(notif1).not.toBeUndefined(); + expect(notif1?.block_height).toBe(90); + expect(notif1?.update_mode).toBe('dynamic'); + expect(notif1?.ttl).toBe('3600'); + + await processor.processBlock( + new TestBlockBuilder({ block_height: 100 }) + .addTransaction( + new TestTransactionBuilder({ tx_id: '0x01', sender: address }) + .addContractEvent( + contractId, + cvToHex( + tupleCV({ + notification: bufferCV(Buffer.from('token-metadata-update')), + payload: tupleCV({ + 'token-class': bufferCV(Buffer.from('nft')), + 'contract-id': bufferCV(Buffer.from(contractId)), + 'token-ids': listCV([uintCV(1)]), + }), + }) + ) + ) + .build() + ) + .build() + ); + + const notif2 = await getLatestTokenNotification(db, 1); + expect(notif2).not.toBeUndefined(); + expect(notif2?.block_height).toBe(100); + expect(notif2?.update_mode).toBe('standard'); + expect(notif2?.ttl).toBeNull(); + }); + + test('contract notification replaces token notification', async () => { + const address = 'SP1K1A1PMGW2ZJCNF46NWZWHG8TS1D23EGH1KNK60'; + const contractId = `${address}.friedger-pool-nft`; + await insertAndEnqueueTestContractWithTokens(db, contractId, DbSipNumber.sip009, 1n); + await markAllJobsAsDone(db); + + await processor.processBlock( + new TestBlockBuilder({ block_height: 90 }) + .addTransaction( + new TestTransactionBuilder({ tx_id: '0x01', sender: address }) + .addContractEvent( + contractId, + cvToHex( + tupleCV({ + notification: bufferCV(Buffer.from('token-metadata-update')), + payload: tupleCV({ + 'token-class': bufferCV(Buffer.from('nft')), + 'contract-id': bufferCV(Buffer.from(contractId)), + 'token-ids': listCV([uintCV(1)]), + }), + }) + ) + ) + .build() + ) + .build() + ); + await markAllJobsAsDone(db); + const notif1 = await getLatestTokenNotification(db, 1); + expect(notif1).not.toBeUndefined(); + expect(notif1?.block_height).toBe(90); + + await processor.processBlock( + new TestBlockBuilder({ block_height: 100 }) + .addTransaction( + new TestTransactionBuilder({ tx_id: '0x01', sender: address }) + .addContractEvent( + contractId, + cvToHex( + tupleCV({ + notification: bufferCV(Buffer.from('token-metadata-update')), + payload: tupleCV({ + 'token-class': bufferCV(Buffer.from('nft')), + 'contract-id': bufferCV(Buffer.from(contractId)), + }), + }) + ) + ) + .build() + ) + .build() + ); + + const notif2 = await getLatestTokenNotification(db, 1); + expect(notif2).not.toBeUndefined(); + expect(notif2?.block_height).toBe(100); + }); + + // test('rolls back notification', async () => { + // const address = 'SP1K1A1PMGW2ZJCNF46NWZWHG8TS1D23EGH1KNK60'; + // const contractId = `${address}.friedger-pool-nft`; + // await insertAndEnqueueTestContractWithTokens(db, contractId, DbSipNumber.sip009, 3n); + + // await db.chainhook.processPayload( + // new TestChainhookPayloadBuilder() + // .apply() + // .block({ height: 101 }) + // .transaction({ hash: '0x01', sender: address }) + // .event({ + // type: 'SmartContractEvent', + // position: { index: 0 }, + // data: { + // contract_identifier: contractId, + // topic: 'print', + // raw_value: cvToHex( + // tupleCV({ + // notification: bufferCV(Buffer.from('token-metadata-update')), + // payload: tupleCV({ + // 'token-class': bufferCV(Buffer.from('nft')), + // 'contract-id': bufferCV(Buffer.from(contractId)), + // }), + // }) + // ), + // }, + // }) + // .build() + // ); + // await markAllJobsAsDone(db); + // await expect(getLatestTokenNotification(db, 1)).resolves.not.toBeUndefined(); + // await db.chainhook.processPayload( + // new TestChainhookPayloadBuilder() + // .rollback() + // .block({ height: 101 }) + // .transaction({ hash: '0x01', sender: address }) + // .event({ + // type: 'SmartContractEvent', + // position: { index: 0 }, + // data: { + // contract_identifier: contractId, + // topic: 'print', + // raw_value: cvToHex( + // tupleCV({ + // notification: bufferCV(Buffer.from('token-metadata-update')), + // payload: tupleCV({ + // 'token-class': bufferCV(Buffer.from('nft')), + // 'contract-id': bufferCV(Buffer.from(contractId)), + // }), + // }) + // ), + // }, + // }) + // .build() + // ); + // await expect(getLatestTokenNotification(db, 1)).resolves.toBeUndefined(); + // }); + + // test('second notification rollback restores pointer to the first notification', async () => { + // const address = 'SP1K1A1PMGW2ZJCNF46NWZWHG8TS1D23EGH1KNK60'; + // const contractId = `${address}.friedger-pool-nft`; + // await insertAndEnqueueTestContractWithTokens(db, contractId, DbSipNumber.sip009, 3n); + + // // Write 2 notifications, test rollback changes ref to old notification. + // await db.chainhook.processPayload( + // new TestChainhookPayloadBuilder() + // .apply() + // .block({ height: 100 }) + // .transaction({ hash: '0x01', sender: address }) + // .event({ + // type: 'SmartContractEvent', + // position: { index: 0 }, + // data: { + // contract_identifier: contractId, + // topic: 'print', + // raw_value: cvToHex( + // tupleCV({ + // notification: bufferCV(Buffer.from('token-metadata-update')), + // payload: tupleCV({ + // 'token-class': bufferCV(Buffer.from('nft')), + // 'contract-id': bufferCV(Buffer.from(contractId)), + // 'token-ids': listCV([uintCV(1)]), + // }), + // }) + // ), + // }, + // }) + // .build() + // ); + // await db.chainhook.processPayload( + // new TestChainhookPayloadBuilder() + // .apply() + // .block({ height: 101 }) + // .transaction({ hash: '0x01', sender: address }) + // .event({ + // type: 'SmartContractEvent', + // position: { index: 0 }, + // data: { + // contract_identifier: contractId, + // topic: 'print', + // raw_value: cvToHex( + // tupleCV({ + // notification: bufferCV(Buffer.from('token-metadata-update')), + // payload: tupleCV({ + // 'token-class': bufferCV(Buffer.from('nft')), + // 'contract-id': bufferCV(Buffer.from(contractId)), + // 'token-ids': listCV([uintCV(1)]), + // 'update-mode': bufferCV(Buffer.from('frozen')), + // }), + // }) + // ), + // }, + // }) + // .build() + // ); + // await markAllJobsAsDone(db); + // const notif2 = await getLatestTokenNotification(db, 1); + // expect(notif2).not.toBeUndefined(); + // expect(notif2?.block_height).toBe(101); + // expect(notif2?.update_mode).toBe('frozen'); + + // await db.chainhook.processPayload( + // new TestChainhookPayloadBuilder() + // .rollback() + // .block({ height: 101 }) + // .transaction({ hash: '0x01', sender: address }) + // .event({ + // type: 'SmartContractEvent', + // position: { index: 0 }, + // data: { + // contract_identifier: contractId, + // topic: 'print', + // raw_value: cvToHex( + // tupleCV({ + // notification: bufferCV(Buffer.from('token-metadata-update')), + // payload: tupleCV({ + // 'token-class': bufferCV(Buffer.from('nft')), + // 'contract-id': bufferCV(Buffer.from(contractId)), + // 'token-ids': listCV([uintCV(1)]), + // 'update-mode': bufferCV(Buffer.from('frozen')), + // }), + // }) + // ), + // }, + // }) + // .build() + // ); + // const notif1 = await getLatestTokenNotification(db, 1); + // expect(notif1).not.toBeUndefined(); + // expect(notif1?.block_height).toBe(100); + // expect(notif1?.update_mode).toBe('standard'); + // }); + + test('ignores other contract log events', async () => { + await processor.processBlock( + new TestBlockBuilder({ block_height: 100 }) + .addTransaction( + new TestTransactionBuilder({ + tx_id: '0x01', + sender: 'SP1K1A1PMGW2ZJCNF46NWZWHG8TS1D23EGH1KNK60', + }) + .addContractEvent( + 'SP1K1A1PMGW2ZJCNF46NWZWHG8TS1D23EGH1KNK60.friedger-pool-nft', + cvToHex(stringUtf8CV('test')) + ) + .build() + ) + .build() + ); + await expect(db.getPendingJobBatch({ limit: 1 })).resolves.toHaveLength(0); + }); + + test('ignores notification from incorrect sender', async () => { + const address = 'SP1K1A1PMGW2ZJCNF46NWZWHG8TS1D23EGH1KNK60'; + const contractId = `${address}.friedger-pool-nft`; + await insertAndEnqueueTestContractWithTokens(db, contractId, DbSipNumber.sip009, 1n); + await markAllJobsAsDone(db); + + await processor.processBlock( + new TestBlockBuilder({ block_height: 100 }) + .addTransaction( + new TestTransactionBuilder({ + tx_id: '0x01', + // Incorrect sender + sender: 'SP29BPZ6BD5D8509Y9VP70J0V7VKKDDFCRPHA0T6A', + }) + .addContractEvent( + 'SP29BPZ6BD5D8509Y9VP70J0V7VKKDDFCRPHA0T6A.another-contract', + cvToHex( + tupleCV({ + notification: bufferCV(Buffer.from('token-metadata-update')), + payload: tupleCV({ + 'token-class': bufferCV(Buffer.from('nft')), + 'contract-id': bufferCV(Buffer.from(contractId)), + }), + }) + ) + ) + .build() + ) + .build() + ); + + await expect(db.getPendingJobBatch({ limit: 1 })).resolves.toHaveLength(0); + }); +}); diff --git a/tests/stacks-core/sft-events.test.ts b/tests/stacks-core/sft-events.test.ts new file mode 100644 index 0000000..209b7ad --- /dev/null +++ b/tests/stacks-core/sft-events.test.ts @@ -0,0 +1,74 @@ +import { cvToHex, tupleCV, bufferCV, uintCV } from '@stacks/transactions'; +import { DbSipNumber, DbTokenType } from '../../src/pg/types'; +import { cycleMigrations } from '@hirosystems/api-toolkit'; +import { ENV } from '../../src/env'; +import { PgStore, MIGRATIONS_DIR } from '../../src/pg/pg-store'; +import { + insertAndEnqueueTestContract, + TestTransactionBuilder, + TestBlockBuilder, + markAllJobsAsDone, +} from '../helpers'; +import { StacksCoreBlockProcessor } from '../../src/stacks-core/stacks-core-block-processor'; + +describe('SFT events', () => { + let db: PgStore; + let processor: StacksCoreBlockProcessor; + + beforeEach(async () => { + ENV.PGDATABASE = 'postgres'; + db = await PgStore.connect({ skipMigrations: true }); + await cycleMigrations(MIGRATIONS_DIR); + processor = new StacksCoreBlockProcessor({ db: db.core }); + }); + + afterEach(async () => { + await db.close(); + }); + + test('SFT mint enqueues minted token for valid contract', async () => { + const address = 'SP3K8BC0PPEVCV7NZ6QSRWPQ2JE9E5B6N3PA0KBR9'; + const contractId = `${address}.key-alex-autoalex-v1`; + await insertAndEnqueueTestContract(db, contractId, DbSipNumber.sip013); + await markAllJobsAsDone(db); + + await processor.processBlock( + new TestBlockBuilder({ block_height: 100 }) + .addTransaction( + new TestTransactionBuilder({ tx_id: '0x01', sender: address }) + .addContractEvent( + contractId, + cvToHex( + tupleCV({ + type: bufferCV(Buffer.from('sft_mint')), + recipient: bufferCV(Buffer.from(address)), + 'token-id': uintCV(3), + amount: uintCV(1000), + }) + ) + ) + // Try a duplicate of the same token but different amount + .addContractEvent( + contractId, + cvToHex( + tupleCV({ + type: bufferCV(Buffer.from('sft_mint')), + recipient: bufferCV(Buffer.from(address)), + 'token-id': uintCV(3), + amount: uintCV(200), + }) + ) + ) + .build() + ) + .build() + ); + + const token = await db.getToken({ id: 1 }); + expect(token?.type).toBe(DbTokenType.sft); + expect(token?.token_number).toBe('3'); + const jobs = await db.getPendingJobBatch({ limit: 1 }); + expect(jobs).toHaveLength(1); + expect(jobs[0].token_id).toBe(1); + }); +}); diff --git a/tests/stacks-core/smart-contracts.test.ts b/tests/stacks-core/smart-contracts.test.ts new file mode 100644 index 0000000..fe2cb7e --- /dev/null +++ b/tests/stacks-core/smart-contracts.test.ts @@ -0,0 +1,93 @@ +import { DbSipNumber, DbSmartContract } from '../../src/pg/types'; +import { cycleMigrations } from '@hirosystems/api-toolkit'; +import { ENV } from '../../src/env'; +import { PgStore, MIGRATIONS_DIR } from '../../src/pg/pg-store'; +import { SIP_009_ABI, TestTransactionBuilder, TestBlockBuilder } from '../helpers'; +import { StacksCoreBlockProcessor } from '../../src/stacks-core/stacks-core-block-processor'; + +describe('contract deployments', () => { + let db: PgStore; + let processor: StacksCoreBlockProcessor; + + beforeEach(async () => { + ENV.PGDATABASE = 'postgres'; + db = await PgStore.connect({ skipMigrations: true }); + await cycleMigrations(MIGRATIONS_DIR); + processor = new StacksCoreBlockProcessor({ db: db.core }); + }); + + afterEach(async () => { + await db.close(); + }); + + test('enqueues valid token contract', async () => { + await processor.processBlock( + new TestBlockBuilder({ block_height: 100 }) + .addTransaction( + new TestTransactionBuilder({ + tx_id: '0x01', + sender: 'SP1K1A1PMGW2ZJCNF46NWZWHG8TS1D23EGH1KNK60', + }) + .setSmartContractPayload( + 'SP1K1A1PMGW2ZJCNF46NWZWHG8TS1D23EGH1KNK60.friedger-pool-nft', + SIP_009_ABI + ) + .build() + ) + .build() + ); + const dbContract = await db.getSmartContract({ id: 1 }); + expect(dbContract?.sip).toBe(DbSipNumber.sip009); + expect(dbContract?.principal).toBe( + 'SP1K1A1PMGW2ZJCNF46NWZWHG8TS1D23EGH1KNK60.friedger-pool-nft' + ); + await expect(db.getPendingJobBatch({ limit: 1 })).resolves.toHaveLength(1); + }); + + test('ignores token contract from a failed transaction', async () => { + await processor.processBlock( + new TestBlockBuilder({ block_height: 100 }) + .addTransaction( + new TestTransactionBuilder({ + tx_id: '0x01', + sender: 'SP1K1A1PMGW2ZJCNF46NWZWHG8TS1D23EGH1KNK60', + status: 'abort_by_post_condition', // Failed + }) + .setSmartContractPayload( + 'SP1K1A1PMGW2ZJCNF46NWZWHG8TS1D23EGH1KNK60.friedger-pool-nft', + SIP_009_ABI + ) + .build() + ) + .build() + ); + await expect(db.getSmartContract({ id: 1 })).resolves.toBeUndefined(); + await expect(db.getPendingJobBatch({ limit: 1 })).resolves.toHaveLength(0); + }); + + test('ignores non-token contract', async () => { + await processor.processBlock( + new TestBlockBuilder({ block_height: 100 }) + .addTransaction( + new TestTransactionBuilder({ + tx_id: '0x01', + sender: 'SP1K1A1PMGW2ZJCNF46NWZWHG8TS1D23EGH1KNK60', + }) + .setSmartContractPayload( + 'SP1K1A1PMGW2ZJCNF46NWZWHG8TS1D23EGH1KNK60.friedger-pool-nft', + { + maps: [], + functions: [], + variables: [], + fungible_tokens: [], + non_fungible_tokens: [], + } + ) + .build() + ) + .build() + ); + await expect(db.getSmartContract({ id: 1 })).resolves.toBeUndefined(); + await expect(db.getPendingJobBatch({ limit: 1 })).resolves.toHaveLength(0); + }); +}); diff --git a/tests/stacks-core/snp-event-stream.test.ts b/tests/stacks-core/snp-event-stream.test.ts new file mode 100644 index 0000000..3cccf7a --- /dev/null +++ b/tests/stacks-core/snp-event-stream.test.ts @@ -0,0 +1,187 @@ +import { cvToHex, tupleCV, bufferCV, uintCV, stringUtf8CV } from '@stacks/transactions'; +import { DbSipNumber } from '../../src/pg/types'; +import { cycleMigrations } from '@hirosystems/api-toolkit'; +import { ENV } from '../../src/env'; +import { PgStore, MIGRATIONS_DIR } from '../../src/pg/pg-store'; +import { + insertAndEnqueueTestContractWithTokens, + markAllJobsAsDone, + TestTransactionBuilder, + TestBlockBuilder, +} from '../helpers'; +import { StacksCoreBlockProcessor } from '../../src/stacks-core/stacks-core-block-processor'; + +describe('SNP event stream', () => { + let db: PgStore; + let processor: StacksCoreBlockProcessor; + + beforeEach(async () => { + ENV.PGDATABASE = 'postgres'; + db = await PgStore.connect({ skipMigrations: true }); + await cycleMigrations(MIGRATIONS_DIR); + processor = new StacksCoreBlockProcessor({ db: db.core }); + }); + + afterEach(async () => { + await db.close(); + }); + + // describe('chain tip', () => { + // test('updates chain tip on chainhook event', async () => { + // await db.chainhook.processPayload( + // new TestChainhookPayloadBuilder() + // .apply() + // .block({ height: 100 }) + // .transaction({ hash: '0x01', sender: 'SP1K1A1PMGW2ZJCNF46NWZWHG8TS1D23EGH1KNK60' }) + // .contractDeploy('SP1K1A1PMGW2ZJCNF46NWZWHG8TS1D23EGH1KNK60.friedger-pool-nft', { + // maps: [], + // functions: [], + // variables: [], + // fungible_tokens: [], + // non_fungible_tokens: [], + // }) + // .build() + // ); + // await expect(db.getChainTipBlockHeight()).resolves.toBe(100); + + // await db.chainhook.processPayload( + // new TestChainhookPayloadBuilder() + // .apply() + // .block({ height: 101 }) + // .transaction({ hash: '0x01', sender: 'SP1K1A1PMGW2ZJCNF46NWZWHG8TS1D23EGH1KNK60' }) + // .event({ + // type: 'SmartContractEvent', + // position: { index: 0 }, + // data: { + // contract_identifier: 'SP1K1A1PMGW2ZJCNF46NWZWHG8TS1D23EGH1KNK60.friedger-pool-nft', + // topic: 'print', + // raw_value: cvToHex(stringUtf8CV('test')), + // }, + // }) + // .build() + // ); + // await expect(db.getChainTipBlockHeight()).resolves.toBe(101); + // }); + + // test('enqueues dynamic tokens for refresh with standard interval', async () => { + // const address = 'SP1K1A1PMGW2ZJCNF46NWZWHG8TS1D23EGH1KNK60'; + // const contractId = `${address}.friedger-pool-nft`; + // ENV.METADATA_DYNAMIC_TOKEN_REFRESH_INTERVAL = 86400; + // await insertAndEnqueueTestContractWithTokens(db, contractId, DbSipNumber.sip009, 1n); + // // Mark as dynamic + // await db.chainhook.processPayload( + // new TestChainhookPayloadBuilder() + // .apply() + // .block({ height: 90 }) + // .transaction({ hash: '0x01', sender: address }) + // .event({ + // type: 'SmartContractEvent', + // position: { index: 0 }, + // data: { + // contract_identifier: contractId, + // topic: 'print', + // raw_value: cvToHex( + // tupleCV({ + // notification: bufferCV(Buffer.from('token-metadata-update')), + // payload: tupleCV({ + // 'token-class': bufferCV(Buffer.from('nft')), + // 'contract-id': bufferCV(Buffer.from(contractId)), + // 'update-mode': bufferCV(Buffer.from('dynamic')), + // }), + // }) + // ), + // }, + // }) + // .build() + // ); + // // Set updated_at for testing. + // await db.sql` + // UPDATE tokens + // SET updated_at = NOW() - INTERVAL '2 days' + // WHERE id = 1 + // `; + // await markAllJobsAsDone(db); + + // await db.chainhook.processPayload( + // new TestChainhookPayloadBuilder() + // .apply() + // .block({ height: 95 }) + // .transaction({ hash: '0x01', sender: 'SP1K1A1PMGW2ZJCNF46NWZWHG8TS1D23EGH1KNK60' }) + // .event({ + // type: 'SmartContractEvent', + // position: { index: 0 }, + // data: { + // contract_identifier: 'SP1K1A1PMGW2ZJCNF46NWZWHG8TS1D23EGH1KNK60.friedger-pool-nft', + // topic: 'print', + // raw_value: cvToHex(stringUtf8CV('test')), + // }, + // }) + // .build() + // ); + + // const job = await db.getJob({ id: 2 }); + // expect(job?.status).toBe('pending'); + // }); + + // test('enqueues dynamic tokens for refresh with ttl', async () => { + // const address = 'SP1K1A1PMGW2ZJCNF46NWZWHG8TS1D23EGH1KNK60'; + // const contractId = `${address}.friedger-pool-nft`; + // ENV.METADATA_DYNAMIC_TOKEN_REFRESH_INTERVAL = 99999; + // await insertAndEnqueueTestContractWithTokens(db, contractId, DbSipNumber.sip009, 1n); + // // Mark as dynamic + // await db.chainhook.processPayload( + // new TestChainhookPayloadBuilder() + // .apply() + // .block({ height: 90 }) + // .transaction({ hash: '0x01', sender: address }) + // .event({ + // type: 'SmartContractEvent', + // position: { index: 0 }, + // data: { + // contract_identifier: contractId, + // topic: 'print', + // raw_value: cvToHex( + // tupleCV({ + // notification: bufferCV(Buffer.from('token-metadata-update')), + // payload: tupleCV({ + // 'token-class': bufferCV(Buffer.from('nft')), + // 'contract-id': bufferCV(Buffer.from(contractId)), + // 'update-mode': bufferCV(Buffer.from('dynamic')), + // ttl: uintCV(3600), + // }), + // }) + // ), + // }, + // }) + // .build() + // ); + // // Set updated_at for testing + // await db.sql` + // UPDATE tokens + // SET updated_at = NOW() - INTERVAL '2 hours' + // WHERE id = 1 + // `; + // await markAllJobsAsDone(db); + + // await db.chainhook.processPayload( + // new TestChainhookPayloadBuilder() + // .apply() + // .block({ height: 95 }) + // .transaction({ hash: '0x01', sender: 'SP1K1A1PMGW2ZJCNF46NWZWHG8TS1D23EGH1KNK60' }) + // .event({ + // type: 'SmartContractEvent', + // position: { index: 0 }, + // data: { + // contract_identifier: 'SP1K1A1PMGW2ZJCNF46NWZWHG8TS1D23EGH1KNK60.friedger-pool-nft', + // topic: 'print', + // raw_value: cvToHex(stringUtf8CV('test')), + // }, + // }) + // .build() + // ); + + // const job = await db.getJob({ id: 2 }); + // expect(job?.status).toBe('pending'); + // }); + // }); +}); diff --git a/tests/token-queue/sip-validation.test.ts b/tests/token-queue/sip-validation.test.ts index a05a52e..263c4dc 100644 --- a/tests/token-queue/sip-validation.test.ts +++ b/tests/token-queue/sip-validation.test.ts @@ -10,6 +10,7 @@ import { } from '@stacks/transactions'; import { getContractLogMetadataUpdateNotification } from '../../src/token-processor/util/sip-validation'; import { StacksCoreContractEvent } from '../../src/stacks-core/schemas'; +import { DecodedStacksTransaction } from '../../src/stacks-core/stacks-core-block-processor'; describe('SIP Validation', () => { test('SIP-019 FT notification', () => { @@ -24,6 +25,17 @@ describe('SIP Validation', () => { 'contract-id': principalCV(contractId), }), }); + const tx = { + decoded: { + auth: { + origin_condition: { + signer: { + address: address, + }, + }, + }, + }, + } as DecodedStacksTransaction; const event1: StacksCoreContractEvent = { txid: '0x123', event_index: 0, @@ -34,7 +46,7 @@ describe('SIP Validation', () => { raw_value: cvToHex(tuple1), }, }; - const notification1 = getContractLogMetadataUpdateNotification(address, event1); + const notification1 = getContractLogMetadataUpdateNotification(tx, event1); expect(notification1).not.toBeUndefined(); expect(notification1?.contract_id).toBe(contractId); expect(notification1?.token_class).toBe('ft'); @@ -55,6 +67,17 @@ describe('SIP Validation', () => { }); // Invalid notification senders + const tx2 = { + decoded: { + auth: { + origin_condition: { + signer: { + address: 'SPCAQ4RCYJ30BYKJ9Z6BRGS3169PWZNN89NH4MCS', + }, + }, + }, + }, + } as DecodedStacksTransaction; const event2: StacksCoreContractEvent = { txid: '0x123', event_index: 0, @@ -65,13 +88,21 @@ describe('SIP Validation', () => { raw_value: cvToHex(tuple1), }, }; - const notification2 = getContractLogMetadataUpdateNotification( - 'SPCAQ4RCYJ30BYKJ9Z6BRGS3169PWZNN89NH4MCS', - event2 - ); + const notification2 = getContractLogMetadataUpdateNotification(tx2, event2); expect(notification2).toBeUndefined(); // Sent by the contract owner + const tx3 = { + decoded: { + auth: { + origin_condition: { + signer: { + address, + }, + }, + }, + }, + } as DecodedStacksTransaction; const event3: StacksCoreContractEvent = { txid: '0x123', event_index: 0, @@ -82,13 +113,24 @@ describe('SIP Validation', () => { raw_value: cvToHex(tuple1), }, }; - const notification3 = getContractLogMetadataUpdateNotification(address, event3); + const notification3 = getContractLogMetadataUpdateNotification(tx3, event3); expect(notification3).not.toBeUndefined(); expect(notification3?.contract_id).toBe(contractId); expect(notification3?.token_class).toBe('ft'); expect(notification3?.token_ids).toBeUndefined(); // Emitted by the correct contract + const tx4 = { + decoded: { + auth: { + origin_condition: { + signer: { + address: 'SPCAQ4RCYJ30BYKJ9Z6BRGS3169PWZNN89NH4MCS', + }, + }, + }, + }, + } as DecodedStacksTransaction; const event4: StacksCoreContractEvent = { txid: '0x123', event_index: 0, @@ -99,10 +141,7 @@ describe('SIP Validation', () => { raw_value: cvToHex(tuple1), }, }; - const notification4 = getContractLogMetadataUpdateNotification( - 'SPCAQ4RCYJ30BYKJ9Z6BRGS3169PWZNN89NH4MCS', - event4 - ); + const notification4 = getContractLogMetadataUpdateNotification(tx4, event4); expect(notification4).not.toBeUndefined(); expect(notification4?.contract_id).toBe(contractId); expect(notification4?.token_class).toBe('ft'); @@ -114,6 +153,17 @@ describe('SIP Validation', () => { const contractId = `${address}.fine-art-exhibition-v1`; // Taken from tx 0xfc81a8c30025d7135d4313ea746831de1c7794478d4e0d23ef76970ee071cf20 + const tx1 = { + decoded: { + auth: { + origin_condition: { + signer: { + address, + }, + }, + }, + }, + } as DecodedStacksTransaction; const event1: StacksCoreContractEvent = { txid: '0x123', event_index: 0, @@ -125,7 +175,7 @@ describe('SIP Validation', () => { '0x0c000000020c6e6f74696669636174696f6e0d00000015746f6b656e2d6d657461646174612d757064617465077061796c6f61640c000000020b636f6e74726163742d69640616faa051721e9a12470ad03f6316a918fb4819c6ba1666696e652d6172742d65786869626974696f6e2d76310b746f6b656e2d636c6173730d000000036e6674', }, }; - const notification1 = getContractLogMetadataUpdateNotification(address, event1); + const notification1 = getContractLogMetadataUpdateNotification(tx1, event1); expect(notification1).not.toBeUndefined(); expect(notification1?.contract_id).toBe(contractId); expect(notification1?.token_class).toBe('nft'); @@ -150,7 +200,7 @@ describe('SIP Validation', () => { raw_value: cvToHex(tuple2), }, }; - const notification2 = getContractLogMetadataUpdateNotification(address, event2); + const notification2 = getContractLogMetadataUpdateNotification(tx1, event2); expect(notification2).not.toBeUndefined(); expect(notification2?.contract_id).toBe(contractId); expect(notification2?.token_class).toBe('nft'); @@ -182,7 +232,18 @@ describe('SIP Validation', () => { raw_value: cvToHex(tuple), }, }; - const notification = getContractLogMetadataUpdateNotification(address, event); + const tx = { + decoded: { + auth: { + origin_condition: { + signer: { + address, + }, + }, + }, + }, + } as DecodedStacksTransaction; + const notification = getContractLogMetadataUpdateNotification(tx, event); expect(notification).not.toBeUndefined(); expect(notification?.contract_id).toBe(contractId); expect(notification?.token_class).toBe('nft'); From 99d06e0f1c14e9d23188d242299c69f8cc303903 Mon Sep 17 00:00:00 2001 From: Rafael Cardenas Date: Tue, 13 Jan 2026 11:02:13 -0600 Subject: [PATCH 13/19] fix queue tests --- .github/workflows/ci.yml | 2 +- .vscode/launch.json | 4 +- src/pg/stacks-core-pg-store.ts | 2 +- tests/admin/admin-rpc.test.ts | 2 +- tests/helpers.ts | 7 +- tests/setup.ts | 4 +- tests/token-queue/sip-validation.test.ts | 210 +++++++---------------- 7 files changed, 79 insertions(+), 152 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 1719a53..3c7167e 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -53,7 +53,7 @@ jobs: strategy: fail-fast: false matrix: - suite: [admin, api, chainhook, token-queue] + suite: [admin, api, stacks-core, token-queue] runs-on: ubuntu-latest env: API_HOST: 127.0.0.1 diff --git a/.vscode/launch.json b/.vscode/launch.json index 1c6ad64..3bd3747 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -110,13 +110,13 @@ { "type": "node", "request": "launch", - "name": "Jest: Chainhook", + "name": "Jest: Stacks Core", "program": "${workspaceFolder}/node_modules/jest/bin/jest", "args": [ "--testTimeout=3600000", "--runInBand", "--no-cache", - "${workspaceFolder}/tests/chainhook/", + "${workspaceFolder}/tests/stacks-core/", ], "outputCapture": "std", "console": "integratedTerminal", diff --git a/src/pg/stacks-core-pg-store.ts b/src/pg/stacks-core-pg-store.ts index b738726..2f80664 100644 --- a/src/pg/stacks-core-pg-store.ts +++ b/src/pg/stacks-core-pg-store.ts @@ -50,7 +50,7 @@ export class StacksCorePgStore extends BasePgStoreModule { index_block_hash: block.index_block_hash, parent_index_block_hash: block.parent_index_block_hash, }; - await sql`INSERT INTO blocks ${sql(values)}`; + await sql`INSERT INTO blocks ${sql(values)} ON CONFLICT (index_block_hash) DO NOTHING`; } async getChainTip(sql: PgSqlClient): Promise { diff --git a/tests/admin/admin-rpc.test.ts b/tests/admin/admin-rpc.test.ts index 3f36e1b..398d217 100644 --- a/tests/admin/admin-rpc.test.ts +++ b/tests/admin/admin-rpc.test.ts @@ -21,7 +21,7 @@ describe('Admin RPC', () => { beforeEach(async () => { ENV.PGDATABASE = 'postgres'; db = await PgStore.connect({ skipMigrations: true }); - jobQueue = new JobQueue({ db }); + jobQueue = new JobQueue({ db, network: 'mainnet' }); fastify = await buildAdminRpcServer({ db, jobQueue }); await cycleMigrations(MIGRATIONS_DIR); }); diff --git a/tests/helpers.ts b/tests/helpers.ts index 8c188ff..09be7be 100644 --- a/tests/helpers.ts +++ b/tests/helpers.ts @@ -1290,10 +1290,12 @@ export async function insertAndEnqueueTestContract( tx_id?: string ): Promise { return await db.sqlWriteTransaction(async sql => { - const block = { + const block: DecodedStacksBlock = { block_height: 1, index_block_hash: '0x000001', - } as DecodedStacksBlock; + parent_index_block_hash: '0x000000', + transactions: [], + }; const deploy: SmartContractDeployment = { principal, sip, @@ -1301,6 +1303,7 @@ export async function insertAndEnqueueTestContract( tx_id: tx_id ?? '0x123456', tx_index: 0, }; + await db.core.insertBlock(sql, block); await db.core.applyContractDeployment(sql, deploy, block); const smart_contract = (await db.getSmartContract({ principal })) as DbSmartContract; diff --git a/tests/setup.ts b/tests/setup.ts index 25def34..da280e2 100644 --- a/tests/setup.ts +++ b/tests/setup.ts @@ -1,5 +1,7 @@ // ts-unused-exports:disable-next-line export default (): void => { process.env.PGDATABASE = 'postgres'; - process.env.CHAINHOOK_NODE_AUTH_TOKEN = 'test'; + process.env.NETWORK = 'mainnet'; + process.env.SNP_REDIS_URL = 'redis://localhost:6379'; + process.env.SNP_REDIS_STREAM_KEY_PREFIX = 'test'; }; diff --git a/tests/token-queue/sip-validation.test.ts b/tests/token-queue/sip-validation.test.ts index 263c4dc..3070a28 100644 --- a/tests/token-queue/sip-validation.test.ts +++ b/tests/token-queue/sip-validation.test.ts @@ -10,7 +10,7 @@ import { } from '@stacks/transactions'; import { getContractLogMetadataUpdateNotification } from '../../src/token-processor/util/sip-validation'; import { StacksCoreContractEvent } from '../../src/stacks-core/schemas'; -import { DecodedStacksTransaction } from '../../src/stacks-core/stacks-core-block-processor'; +import { TestTransactionBuilder } from '../helpers'; describe('SIP Validation', () => { test('SIP-019 FT notification', () => { @@ -25,28 +25,13 @@ describe('SIP Validation', () => { 'contract-id': principalCV(contractId), }), }); - const tx = { - decoded: { - auth: { - origin_condition: { - signer: { - address: address, - }, - }, - }, - }, - } as DecodedStacksTransaction; - const event1: StacksCoreContractEvent = { - txid: '0x123', - event_index: 0, - type: 'contract_event', - contract_event: { - contract_identifier: contractId, - topic: 'print', - raw_value: cvToHex(tuple1), - }, - }; - const notification1 = getContractLogMetadataUpdateNotification(tx, event1); + const tx = new TestTransactionBuilder({ tx_id: '0x123', sender: address }) + .addContractEvent(contractId, cvToHex(tuple1)) + .build(); + const notification1 = getContractLogMetadataUpdateNotification( + tx, + tx.events[0] as StacksCoreContractEvent + ); expect(notification1).not.toBeUndefined(); expect(notification1?.contract_id).toBe(contractId); expect(notification1?.token_class).toBe('ft'); @@ -67,81 +52,42 @@ describe('SIP Validation', () => { }); // Invalid notification senders - const tx2 = { - decoded: { - auth: { - origin_condition: { - signer: { - address: 'SPCAQ4RCYJ30BYKJ9Z6BRGS3169PWZNN89NH4MCS', - }, - }, - }, - }, - } as DecodedStacksTransaction; - const event2: StacksCoreContractEvent = { - txid: '0x123', - event_index: 0, - type: 'contract_event', - contract_event: { - contract_identifier: 'SPCAQ4RCYJ30BYKJ9Z6BRGS3169PWZNN89NH4MCS.hic-1', - topic: 'print', - raw_value: cvToHex(tuple1), - }, - }; - const notification2 = getContractLogMetadataUpdateNotification(tx2, event2); + const tx2 = new TestTransactionBuilder({ + tx_id: '0x123', + sender: 'SPCAQ4RCYJ30BYKJ9Z6BRGS3169PWZNN89NH4MCS', + }) + .addContractEvent('SPCAQ4RCYJ30BYKJ9Z6BRGS3169PWZNN89NH4MCS.hic-1', cvToHex(tuple1)) + .build(); + const notification2 = getContractLogMetadataUpdateNotification( + tx2, + tx2.events[0] as StacksCoreContractEvent + ); expect(notification2).toBeUndefined(); // Sent by the contract owner - const tx3 = { - decoded: { - auth: { - origin_condition: { - signer: { - address, - }, - }, - }, - }, - } as DecodedStacksTransaction; - const event3: StacksCoreContractEvent = { - txid: '0x123', - event_index: 0, - type: 'contract_event', - contract_event: { - contract_identifier: 'SPCAQ4RCYJ30BYKJ9Z6BRGS3169PWZNN89NH4MCS.hic-1', - topic: 'print', - raw_value: cvToHex(tuple1), - }, - }; - const notification3 = getContractLogMetadataUpdateNotification(tx3, event3); + const tx3 = new TestTransactionBuilder({ tx_id: '0x123', sender: address }) + .addContractEvent('SPCAQ4RCYJ30BYKJ9Z6BRGS3169PWZNN89NH4MCS.hic-1', cvToHex(tuple1)) + .build(); + const notification3 = getContractLogMetadataUpdateNotification( + tx3, + tx3.events[0] as StacksCoreContractEvent + ); expect(notification3).not.toBeUndefined(); expect(notification3?.contract_id).toBe(contractId); expect(notification3?.token_class).toBe('ft'); expect(notification3?.token_ids).toBeUndefined(); // Emitted by the correct contract - const tx4 = { - decoded: { - auth: { - origin_condition: { - signer: { - address: 'SPCAQ4RCYJ30BYKJ9Z6BRGS3169PWZNN89NH4MCS', - }, - }, - }, - }, - } as DecodedStacksTransaction; - const event4: StacksCoreContractEvent = { - txid: '0x123', - event_index: 0, - type: 'contract_event', - contract_event: { - contract_identifier: contractId, - topic: 'print', - raw_value: cvToHex(tuple1), - }, - }; - const notification4 = getContractLogMetadataUpdateNotification(tx4, event4); + const tx4 = new TestTransactionBuilder({ + tx_id: '0x123', + sender: 'SPCAQ4RCYJ30BYKJ9Z6BRGS3169PWZNN89NH4MCS', + }) + .addContractEvent(contractId, cvToHex(tuple1)) + .build(); + const notification4 = getContractLogMetadataUpdateNotification( + tx4, + tx4.events[0] as StacksCoreContractEvent + ); expect(notification4).not.toBeUndefined(); expect(notification4?.contract_id).toBe(contractId); expect(notification4?.token_class).toBe('ft'); @@ -153,29 +99,16 @@ describe('SIP Validation', () => { const contractId = `${address}.fine-art-exhibition-v1`; // Taken from tx 0xfc81a8c30025d7135d4313ea746831de1c7794478d4e0d23ef76970ee071cf20 - const tx1 = { - decoded: { - auth: { - origin_condition: { - signer: { - address, - }, - }, - }, - }, - } as DecodedStacksTransaction; - const event1: StacksCoreContractEvent = { - txid: '0x123', - event_index: 0, - type: 'contract_event', - contract_event: { - contract_identifier: contractId, - topic: 'print', - raw_value: - '0x0c000000020c6e6f74696669636174696f6e0d00000015746f6b656e2d6d657461646174612d757064617465077061796c6f61640c000000020b636f6e74726163742d69640616faa051721e9a12470ad03f6316a918fb4819c6ba1666696e652d6172742d65786869626974696f6e2d76310b746f6b656e2d636c6173730d000000036e6674', - }, - }; - const notification1 = getContractLogMetadataUpdateNotification(tx1, event1); + const tx1 = new TestTransactionBuilder({ tx_id: '0x123', sender: address }) + .addContractEvent( + contractId, + '0x0c000000020c6e6f74696669636174696f6e0d00000015746f6b656e2d6d657461646174612d757064617465077061796c6f61640c000000020b636f6e74726163742d69640616faa051721e9a12470ad03f6316a918fb4819c6ba1666696e652d6172742d65786869626974696f6e2d76310b746f6b656e2d636c6173730d000000036e6674' + ) + .build(); + const notification1 = getContractLogMetadataUpdateNotification( + tx1, + tx1.events[0] as StacksCoreContractEvent + ); expect(notification1).not.toBeUndefined(); expect(notification1?.contract_id).toBe(contractId); expect(notification1?.token_class).toBe('nft'); @@ -212,38 +145,27 @@ describe('SIP Validation', () => { const contractId = `${address}.fine-art-exhibition-v1`; // Add token IDs - const tuple = tupleCV({ - notification: bufferCV(Buffer.from('token-metadata-update')), - payload: tupleCV({ - 'token-class': bufferCV(Buffer.from('nft')), - 'contract-id': bufferCV(Buffer.from(contractId)), - 'token-ids': listCV([intCV(1), intCV(2)]), - 'update-mode': stringAsciiCV('dynamic'), - ttl: uintCV(9999), - }), - }); - const event: StacksCoreContractEvent = { - txid: '0x123', - event_index: 0, - type: 'contract_event', - contract_event: { - contract_identifier: contractId, - topic: 'print', - raw_value: cvToHex(tuple), - }, - }; - const tx = { - decoded: { - auth: { - origin_condition: { - signer: { - address, - }, - }, - }, - }, - } as DecodedStacksTransaction; - const notification = getContractLogMetadataUpdateNotification(tx, event); + const tx = new TestTransactionBuilder({ tx_id: '0x123', sender: address }) + .addContractEvent( + contractId, + cvToHex( + tupleCV({ + notification: bufferCV(Buffer.from('token-metadata-update')), + payload: tupleCV({ + 'token-class': bufferCV(Buffer.from('nft')), + 'contract-id': bufferCV(Buffer.from(contractId)), + 'token-ids': listCV([intCV(1), intCV(2)]), + 'update-mode': stringAsciiCV('dynamic'), + ttl: uintCV(9999), + }), + }) + ) + ) + .build(); + const notification = getContractLogMetadataUpdateNotification( + tx, + tx.events[0] as StacksCoreContractEvent + ); expect(notification).not.toBeUndefined(); expect(notification?.contract_id).toBe(contractId); expect(notification?.token_class).toBe('nft'); From 634658bb70fc4e4fe6f5febba4a65bbc3513ed99 Mon Sep 17 00:00:00 2001 From: Rafael Cardenas Date: Tue, 13 Jan 2026 11:13:25 -0600 Subject: [PATCH 14/19] admin tests pass --- tests/admin/admin-rpc.test.ts | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/tests/admin/admin-rpc.test.ts b/tests/admin/admin-rpc.test.ts index 398d217..2948096 100644 --- a/tests/admin/admin-rpc.test.ts +++ b/tests/admin/admin-rpc.test.ts @@ -401,6 +401,12 @@ describe('Admin RPC', () => { const principal = 'SP2SYHR84SDJJDK8M09HFS4KBFXPPCX9H7RZ9YVTS.hello-world'; await fastify.listen({ host: ENV.API_HOST, port: ENV.API_PORT }); + await db.core.insertBlock(db.sql, { + block_height: 5, + index_block_hash: '0x242424', + parent_index_block_hash: '0x000000', + transactions: [], + }); nock('https://api.mainnet.hiro.so') .get(`/extended/v1/contract/${principal}`) .reply( From b8efe472440933387f901acc188b2f0a26902f96 Mon Sep 17 00:00:00 2001 From: Rafael Cardenas Date: Tue, 13 Jan 2026 11:19:25 -0600 Subject: [PATCH 15/19] api tests pass --- tests/api/cache.test.ts | 21 ++++++++++++--------- tests/api/status.test.ts | 18 ++++-------------- 2 files changed, 16 insertions(+), 23 deletions(-) diff --git a/tests/api/cache.test.ts b/tests/api/cache.test.ts index 0b526a9..279ee13 100644 --- a/tests/api/cache.test.ts +++ b/tests/api/cache.test.ts @@ -25,13 +25,20 @@ describe('ETag cache', () => { }); test('chain tip cache control', async () => { + await db.core.insertBlock(db.sql, { + block_height: 99, + index_block_hash: '0x99', + parent_index_block_hash: '0x000000', + transactions: [], + }); const response = await fastify.inject({ method: 'GET', url: '/metadata/v1/' }); const json = response.json(); expect(json).toStrictEqual({ server_version: 'token-metadata-api v0.0.1 (test:123456)', status: 'ready', chain_tip: { - block_height: 1, + block_height: 99, + index_block_hash: '0x99', }, }); expect(response.headers.etag).not.toBeUndefined(); @@ -45,14 +52,10 @@ describe('ETag cache', () => { expect(cached.statusCode).toBe(304); await db.core.insertBlock(db.sql, { - blockHeight: 100, - indexBlockHash: '0x123', - transactions: new Map(), - contracts: [], - notifications: [], - sftMints: [], - nftMints: [], - ftSupplyDelta: new Map(), + block_height: 100, + index_block_hash: '0x100', + parent_index_block_hash: '0x99', + transactions: [], }); const cached2 = await fastify.inject({ method: 'GET', diff --git a/tests/api/status.test.ts b/tests/api/status.test.ts index d92cc92..320599c 100644 --- a/tests/api/status.test.ts +++ b/tests/api/status.test.ts @@ -38,21 +38,11 @@ describe('Status routes', () => { }); test('returns status when a block has been processed', async () => { - await insertAndEnqueueTestContractWithTokens( - db, - 'SP2SYHR84SDJJDK8M09HFS4KBFXPPCX9H7RZ9YVTS.hello-world', - DbSipNumber.sip009, - 4n - ); await db.core.insertBlock(db.sql, { - blockHeight: 1, - indexBlockHash: '0x123', - transactions: new Map(), - contracts: [], - notifications: [], - sftMints: [], - nftMints: [], - ftSupplyDelta: new Map(), + block_height: 1, + index_block_hash: '0x123', + parent_index_block_hash: '0x000000', + transactions: [], }); const response = await fastify.inject({ method: 'GET', url: '/metadata/v1/' }); const json = response.json(); From 4144556efe91d41565864b7ed48735d02d0f4ac1 Mon Sep 17 00:00:00 2001 From: Rafael Cardenas Date: Tue, 13 Jan 2026 11:22:34 -0600 Subject: [PATCH 16/19] core tests pass --- tests/stacks-core/nft-events.test.ts | 5 +++-- tests/stacks-core/smart-contracts.test.ts | 5 +---- tests/stacks-core/snp-event-stream.test.ts | 2 ++ 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/tests/stacks-core/nft-events.test.ts b/tests/stacks-core/nft-events.test.ts index 83bf134..7aec169 100644 --- a/tests/stacks-core/nft-events.test.ts +++ b/tests/stacks-core/nft-events.test.ts @@ -52,12 +52,13 @@ describe('NFT events', () => { test('NFT contract can start with zero tokens', async () => { const address = 'SP1K1A1PMGW2ZJCNF46NWZWHG8TS1D23EGH1KNK60'; - const contractId = `${address}.friedger-pool-nft`; + const contractName = 'friedger-pool-nft'; + const contractId = `${address}.${contractName}`; await processor.processBlock( new TestBlockBuilder({ block_height: 90 }) .addTransaction( new TestTransactionBuilder({ tx_id: '0x01', sender: address }) - .setSmartContractPayload(contractId, SIP_009_ABI) + .setSmartContractPayload(contractName, SIP_009_ABI) .build() ) .build() diff --git a/tests/stacks-core/smart-contracts.test.ts b/tests/stacks-core/smart-contracts.test.ts index fe2cb7e..5dcae08 100644 --- a/tests/stacks-core/smart-contracts.test.ts +++ b/tests/stacks-core/smart-contracts.test.ts @@ -28,10 +28,7 @@ describe('contract deployments', () => { tx_id: '0x01', sender: 'SP1K1A1PMGW2ZJCNF46NWZWHG8TS1D23EGH1KNK60', }) - .setSmartContractPayload( - 'SP1K1A1PMGW2ZJCNF46NWZWHG8TS1D23EGH1KNK60.friedger-pool-nft', - SIP_009_ABI - ) + .setSmartContractPayload('friedger-pool-nft', SIP_009_ABI) .build() ) .build() diff --git a/tests/stacks-core/snp-event-stream.test.ts b/tests/stacks-core/snp-event-stream.test.ts index 3cccf7a..0eab01f 100644 --- a/tests/stacks-core/snp-event-stream.test.ts +++ b/tests/stacks-core/snp-event-stream.test.ts @@ -26,6 +26,8 @@ describe('SNP event stream', () => { await db.close(); }); + test('stub', () => {}); + // describe('chain tip', () => { // test('updates chain tip on chainhook event', async () => { // await db.chainhook.processPayload( From a5b7c79d56ec2f5d220ede2fd3d28604a6cc1dab Mon Sep 17 00:00:00 2001 From: Rafael Cardenas Date: Tue, 13 Jan 2026 13:33:31 -0600 Subject: [PATCH 17/19] stacks core tests pass --- src/stacks-core/snp-event-stream.ts | 7 +- tests/stacks-core/snp-event-stream.test.ts | 320 +++++++++++---------- 2 files changed, 172 insertions(+), 155 deletions(-) diff --git a/src/stacks-core/snp-event-stream.ts b/src/stacks-core/snp-event-stream.ts index 4e557a7..19ff77c 100644 --- a/src/stacks-core/snp-event-stream.ts +++ b/src/stacks-core/snp-event-stream.ts @@ -55,8 +55,11 @@ export class SnpEventStreamHandler { } async handleMsg(messageId: string, _timestamp: string, path: string, body: any) { - this.logger.debug(`Received SNP stream event ${path}, msgId: ${messageId}`); - if (path !== '/new_block') { + this.logger.info(`Received SNP stream event ${path}, msgId: ${messageId}`); + if (path === '/new_burn_block') { + this.logger.info(`Ignoring new burn block event, msgId: ${messageId}`); + return; + } else if (path !== '/new_block') { this.logger.warn(`Unsupported SNP stream event ${path}, skipping...`); return; } diff --git a/tests/stacks-core/snp-event-stream.test.ts b/tests/stacks-core/snp-event-stream.test.ts index 0eab01f..001fdcb 100644 --- a/tests/stacks-core/snp-event-stream.test.ts +++ b/tests/stacks-core/snp-event-stream.test.ts @@ -26,164 +26,178 @@ describe('SNP event stream', () => { await db.close(); }); - test('stub', () => {}); + describe('chain tip', () => { + test('updates chain tip on chainhook event', async () => { + await processor.processBlock( + new TestBlockBuilder({ + block_height: 100, + index_block_hash: '0x000001', + parent_index_block_hash: '0x000000', + }) + .addTransaction( + new TestTransactionBuilder({ + tx_id: '0x01', + sender: 'SP1K1A1PMGW2ZJCNF46NWZWHG8TS1D23EGH1KNK60', + }).build() + ) + .build() + ); + await expect(db.core.getChainTip(db.sql)).resolves.toStrictEqual({ + index_block_hash: '0x000001', + block_height: 100, + }); - // describe('chain tip', () => { - // test('updates chain tip on chainhook event', async () => { - // await db.chainhook.processPayload( - // new TestChainhookPayloadBuilder() - // .apply() - // .block({ height: 100 }) - // .transaction({ hash: '0x01', sender: 'SP1K1A1PMGW2ZJCNF46NWZWHG8TS1D23EGH1KNK60' }) - // .contractDeploy('SP1K1A1PMGW2ZJCNF46NWZWHG8TS1D23EGH1KNK60.friedger-pool-nft', { - // maps: [], - // functions: [], - // variables: [], - // fungible_tokens: [], - // non_fungible_tokens: [], - // }) - // .build() - // ); - // await expect(db.getChainTipBlockHeight()).resolves.toBe(100); + await processor.processBlock( + new TestBlockBuilder({ + block_height: 101, + index_block_hash: '0x000002', + parent_index_block_hash: '0x000001', + }) + .addTransaction( + new TestTransactionBuilder({ + tx_id: '0x01', + sender: 'SP1K1A1PMGW2ZJCNF46NWZWHG8TS1D23EGH1KNK60', + }).build() + ) + .build() + ); + await expect(db.core.getChainTip(db.sql)).resolves.toStrictEqual({ + index_block_hash: '0x000002', + block_height: 101, + }); + }); - // await db.chainhook.processPayload( - // new TestChainhookPayloadBuilder() - // .apply() - // .block({ height: 101 }) - // .transaction({ hash: '0x01', sender: 'SP1K1A1PMGW2ZJCNF46NWZWHG8TS1D23EGH1KNK60' }) - // .event({ - // type: 'SmartContractEvent', - // position: { index: 0 }, - // data: { - // contract_identifier: 'SP1K1A1PMGW2ZJCNF46NWZWHG8TS1D23EGH1KNK60.friedger-pool-nft', - // topic: 'print', - // raw_value: cvToHex(stringUtf8CV('test')), - // }, - // }) - // .build() - // ); - // await expect(db.getChainTipBlockHeight()).resolves.toBe(101); - // }); + test('enqueues dynamic tokens for refresh with standard interval', async () => { + const address = 'SP1K1A1PMGW2ZJCNF46NWZWHG8TS1D23EGH1KNK60'; + const contractId = `${address}.friedger-pool-nft`; + ENV.METADATA_DYNAMIC_TOKEN_REFRESH_INTERVAL = 86400; + await insertAndEnqueueTestContractWithTokens(db, contractId, DbSipNumber.sip009, 1n); + // Mark as dynamic + await processor.processBlock( + new TestBlockBuilder({ + block_height: 90, + index_block_hash: '0x000003', + parent_index_block_hash: '0x000002', + }) + .addTransaction( + new TestTransactionBuilder({ + tx_id: '0x01', + sender: address, + }) + .addContractEvent( + contractId, + cvToHex( + tupleCV({ + notification: bufferCV(Buffer.from('token-metadata-update')), + payload: tupleCV({ + 'token-class': bufferCV(Buffer.from('nft')), + 'contract-id': bufferCV(Buffer.from(contractId)), + 'update-mode': bufferCV(Buffer.from('dynamic')), + }), + }) + ) + ) + .build() + ) + .build() + ); + // Set updated_at for testing. + await db.sql` + UPDATE tokens + SET updated_at = NOW() - INTERVAL '2 days' + WHERE id = 1 + `; + await markAllJobsAsDone(db); - // test('enqueues dynamic tokens for refresh with standard interval', async () => { - // const address = 'SP1K1A1PMGW2ZJCNF46NWZWHG8TS1D23EGH1KNK60'; - // const contractId = `${address}.friedger-pool-nft`; - // ENV.METADATA_DYNAMIC_TOKEN_REFRESH_INTERVAL = 86400; - // await insertAndEnqueueTestContractWithTokens(db, contractId, DbSipNumber.sip009, 1n); - // // Mark as dynamic - // await db.chainhook.processPayload( - // new TestChainhookPayloadBuilder() - // .apply() - // .block({ height: 90 }) - // .transaction({ hash: '0x01', sender: address }) - // .event({ - // type: 'SmartContractEvent', - // position: { index: 0 }, - // data: { - // contract_identifier: contractId, - // topic: 'print', - // raw_value: cvToHex( - // tupleCV({ - // notification: bufferCV(Buffer.from('token-metadata-update')), - // payload: tupleCV({ - // 'token-class': bufferCV(Buffer.from('nft')), - // 'contract-id': bufferCV(Buffer.from(contractId)), - // 'update-mode': bufferCV(Buffer.from('dynamic')), - // }), - // }) - // ), - // }, - // }) - // .build() - // ); - // // Set updated_at for testing. - // await db.sql` - // UPDATE tokens - // SET updated_at = NOW() - INTERVAL '2 days' - // WHERE id = 1 - // `; - // await markAllJobsAsDone(db); + await processor.processBlock( + new TestBlockBuilder({ + block_height: 95, + index_block_hash: '0x000004', + parent_index_block_hash: '0x000003', + }) + .addTransaction( + new TestTransactionBuilder({ + tx_id: '0x01', + sender: 'SP1K1A1PMGW2ZJCNF46NWZWHG8TS1D23EGH1KNK60', + }) + .addContractEvent( + 'SP1K1A1PMGW2ZJCNF46NWZWHG8TS1D23EGH1KNK60.friedger-pool-nft', + cvToHex(stringUtf8CV('test')) + ) + .build() + ) + .build() + ); - // await db.chainhook.processPayload( - // new TestChainhookPayloadBuilder() - // .apply() - // .block({ height: 95 }) - // .transaction({ hash: '0x01', sender: 'SP1K1A1PMGW2ZJCNF46NWZWHG8TS1D23EGH1KNK60' }) - // .event({ - // type: 'SmartContractEvent', - // position: { index: 0 }, - // data: { - // contract_identifier: 'SP1K1A1PMGW2ZJCNF46NWZWHG8TS1D23EGH1KNK60.friedger-pool-nft', - // topic: 'print', - // raw_value: cvToHex(stringUtf8CV('test')), - // }, - // }) - // .build() - // ); + const job = await db.getJob({ id: 2 }); + expect(job?.status).toBe('pending'); + }); - // const job = await db.getJob({ id: 2 }); - // expect(job?.status).toBe('pending'); - // }); + test('enqueues dynamic tokens for refresh with ttl', async () => { + const address = 'SP1K1A1PMGW2ZJCNF46NWZWHG8TS1D23EGH1KNK60'; + const contractId = `${address}.friedger-pool-nft`; + ENV.METADATA_DYNAMIC_TOKEN_REFRESH_INTERVAL = 99999; + await insertAndEnqueueTestContractWithTokens(db, contractId, DbSipNumber.sip009, 1n); + // Mark as dynamic + await processor.processBlock( + new TestBlockBuilder({ + block_height: 90, + index_block_hash: '0x000003', + parent_index_block_hash: '0x000002', + }) + .addTransaction( + new TestTransactionBuilder({ + tx_id: '0x01', + sender: address, + }) + .addContractEvent( + contractId, + cvToHex( + tupleCV({ + notification: bufferCV(Buffer.from('token-metadata-update')), + payload: tupleCV({ + 'token-class': bufferCV(Buffer.from('nft')), + 'contract-id': bufferCV(Buffer.from(contractId)), + 'update-mode': bufferCV(Buffer.from('dynamic')), + ttl: uintCV(3600), + }), + }) + ) + ) + .build() + ) + .build() + ); + // Set updated_at for testing + await db.sql` + UPDATE tokens + SET updated_at = NOW() - INTERVAL '2 hours' + WHERE id = 1 + `; + await markAllJobsAsDone(db); - // test('enqueues dynamic tokens for refresh with ttl', async () => { - // const address = 'SP1K1A1PMGW2ZJCNF46NWZWHG8TS1D23EGH1KNK60'; - // const contractId = `${address}.friedger-pool-nft`; - // ENV.METADATA_DYNAMIC_TOKEN_REFRESH_INTERVAL = 99999; - // await insertAndEnqueueTestContractWithTokens(db, contractId, DbSipNumber.sip009, 1n); - // // Mark as dynamic - // await db.chainhook.processPayload( - // new TestChainhookPayloadBuilder() - // .apply() - // .block({ height: 90 }) - // .transaction({ hash: '0x01', sender: address }) - // .event({ - // type: 'SmartContractEvent', - // position: { index: 0 }, - // data: { - // contract_identifier: contractId, - // topic: 'print', - // raw_value: cvToHex( - // tupleCV({ - // notification: bufferCV(Buffer.from('token-metadata-update')), - // payload: tupleCV({ - // 'token-class': bufferCV(Buffer.from('nft')), - // 'contract-id': bufferCV(Buffer.from(contractId)), - // 'update-mode': bufferCV(Buffer.from('dynamic')), - // ttl: uintCV(3600), - // }), - // }) - // ), - // }, - // }) - // .build() - // ); - // // Set updated_at for testing - // await db.sql` - // UPDATE tokens - // SET updated_at = NOW() - INTERVAL '2 hours' - // WHERE id = 1 - // `; - // await markAllJobsAsDone(db); + await processor.processBlock( + new TestBlockBuilder({ + block_height: 95, + index_block_hash: '0x000004', + parent_index_block_hash: '0x000003', + }) + .addTransaction( + new TestTransactionBuilder({ + tx_id: '0x01', + sender: 'SP1K1A1PMGW2ZJCNF46NWZWHG8TS1D23EGH1KNK60', + }) + .addContractEvent( + 'SP1K1A1PMGW2ZJCNF46NWZWHG8TS1D23EGH1KNK60.friedger-pool-nft', + cvToHex(stringUtf8CV('test')) + ) + .build() + ) + .build() + ); - // await db.chainhook.processPayload( - // new TestChainhookPayloadBuilder() - // .apply() - // .block({ height: 95 }) - // .transaction({ hash: '0x01', sender: 'SP1K1A1PMGW2ZJCNF46NWZWHG8TS1D23EGH1KNK60' }) - // .event({ - // type: 'SmartContractEvent', - // position: { index: 0 }, - // data: { - // contract_identifier: 'SP1K1A1PMGW2ZJCNF46NWZWHG8TS1D23EGH1KNK60.friedger-pool-nft', - // topic: 'print', - // raw_value: cvToHex(stringUtf8CV('test')), - // }, - // }) - // .build() - // ); - - // const job = await db.getJob({ id: 2 }); - // expect(job?.status).toBe('pending'); - // }); - // }); + const job = await db.getJob({ id: 2 }); + expect(job?.status).toBe('pending'); + }); + }); }); From dba37bc48c17f1bdb6bc921a627bad6d85a52d93 Mon Sep 17 00:00:00 2001 From: Rafael Cardenas Date: Tue, 13 Jan 2026 13:34:28 -0600 Subject: [PATCH 18/19] rename --- .../{snp-event-stream.test.ts => block-processor.test.ts} | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) rename tests/stacks-core/{snp-event-stream.test.ts => block-processor.test.ts} (99%) diff --git a/tests/stacks-core/snp-event-stream.test.ts b/tests/stacks-core/block-processor.test.ts similarity index 99% rename from tests/stacks-core/snp-event-stream.test.ts rename to tests/stacks-core/block-processor.test.ts index 001fdcb..63b2f3d 100644 --- a/tests/stacks-core/snp-event-stream.test.ts +++ b/tests/stacks-core/block-processor.test.ts @@ -11,7 +11,7 @@ import { } from '../helpers'; import { StacksCoreBlockProcessor } from '../../src/stacks-core/stacks-core-block-processor'; -describe('SNP event stream', () => { +describe('Block processor', () => { let db: PgStore; let processor: StacksCoreBlockProcessor; From 46677368bb85c781da470d42d6df06be89bc1156 Mon Sep 17 00:00:00 2001 From: Rafael Cardenas Date: Tue, 13 Jan 2026 14:05:55 -0600 Subject: [PATCH 19/19] reorg test --- .../1721103820876_update-notifications.ts | 2 + ...a.ts => 1767813638023_ft-supply-deltas.ts} | 0 src/pg/stacks-core-pg-store.ts | 4 +- .../stacks-core-block-processor.ts | 5 +- tests/stacks-core/block-processor.test.ts | 260 ++++++++++++++++++ 5 files changed, 268 insertions(+), 3 deletions(-) rename migrations/{1767813638023_ft-supply-delta.ts => 1767813638023_ft-supply-deltas.ts} (100%) diff --git a/migrations/1721103820876_update-notifications.ts b/migrations/1721103820876_update-notifications.ts index c680c77..64deba6 100644 --- a/migrations/1721103820876_update-notifications.ts +++ b/migrations/1721103820876_update-notifications.ts @@ -27,6 +27,8 @@ export function up(pgm: MigrationBuilder): void { index_block_hash: { type: 'text', notNull: true, + references: 'blocks', + onDelete: 'CASCADE', }, tx_id: { type: 'text', diff --git a/migrations/1767813638023_ft-supply-delta.ts b/migrations/1767813638023_ft-supply-deltas.ts similarity index 100% rename from migrations/1767813638023_ft-supply-delta.ts rename to migrations/1767813638023_ft-supply-deltas.ts diff --git a/src/pg/stacks-core-pg-store.ts b/src/pg/stacks-core-pg-store.ts index 2f80664..831a758 100644 --- a/src/pg/stacks-core-pg-store.ts +++ b/src/pg/stacks-core-pg-store.ts @@ -83,8 +83,8 @@ export class StacksCorePgStore extends BasePgStoreModule { updated_at = NOW() WHERE id IN (SELECT token_id FROM ft_supply_deltas) `; - // Finally, delete all blocks with a height greater than the chain tip's block height. This will - // cascade delete all tokens, smart contracts, FT supply deltas, update notifications and jobs + // Delete all blocks with a height greater than the chain tip's block height. This will + // cascade delete all tokens, smart contracts, FT supply deltas, update notifications, and jobs // associated with those blocks. await sql` DELETE FROM blocks WHERE block_height > ${newChainTip.block_height} diff --git a/src/stacks-core/stacks-core-block-processor.ts b/src/stacks-core/stacks-core-block-processor.ts index e1c2fb4..32879dd 100644 --- a/src/stacks-core/stacks-core-block-processor.ts +++ b/src/stacks-core/stacks-core-block-processor.ts @@ -92,7 +92,10 @@ export class StacksCoreBlockProcessor { block.block_height - 1 } ${block.parent_index_block_hash}` ); - await this.db.revertToChainTip(sql, chainTip); + await this.db.revertToChainTip(sql, { + block_height: block.block_height - 1, + index_block_hash: block.parent_index_block_hash, + }); } const contracts: SmartContractDeployment[] = []; diff --git a/tests/stacks-core/block-processor.test.ts b/tests/stacks-core/block-processor.test.ts index 63b2f3d..7039c92 100644 --- a/tests/stacks-core/block-processor.test.ts +++ b/tests/stacks-core/block-processor.test.ts @@ -8,6 +8,8 @@ import { markAllJobsAsDone, TestTransactionBuilder, TestBlockBuilder, + SIP_009_ABI, + SIP_010_ABI, } from '../helpers'; import { StacksCoreBlockProcessor } from '../../src/stacks-core/stacks-core-block-processor'; @@ -200,4 +202,262 @@ describe('Block processor', () => { expect(job?.status).toBe('pending'); }); }); + + describe('reorg handling', () => { + test('reverts to last valid chain tip with token contracts, mints, burns and notifications', async () => { + const address = 'SP1K1A1PMGW2ZJCNF46NWZWHG8TS1D23EGH1KNK60'; + const ftContractId = `${address}.test-ft`; + const nftContractId = `${address}.test-nft`; + + // Helper to build blocks with specific height and hash + const buildBlock = (height: number) => { + const hex = height.toString(16).padStart(2, '0'); + return new TestBlockBuilder({ + block_height: height, + index_block_hash: `0x0000${hex}`, + parent_index_block_hash: `0x0000${(height - 1).toString(16).padStart(2, '0')}`, + }); + }; + + // Process 30 blocks with various token operations spread across them + for (let i = 1; i <= 30; i++) { + const hex = i.toString(16).padStart(2, '0'); + const builder = buildBlock(i); + const txBuilder = new TestTransactionBuilder({ + tx_id: `0x01${hex}`, + sender: address, + }); + + // Block 5: Deploy FT contract + if (i === 5) { + txBuilder.setSmartContractPayload('test-ft', SIP_010_ABI); + } + + // Block 10: Deploy NFT contract + if (i === 10) { + txBuilder.setSmartContractPayload('test-nft', SIP_009_ABI); + } + + // Block 12: First FT mint (1000 tokens) - will survive reorg + if (i === 12) { + txBuilder.addFtMintEvent(`${ftContractId}::newyorkcitycoin`, address, '1000'); + } + + // Block 15: Second FT mint (500 tokens) - will survive reorg + if (i === 15) { + txBuilder.addFtMintEvent(`${ftContractId}::newyorkcitycoin`, address, '500'); + } + + // Block 18: NFT mint (token #1) - will survive reorg + if (i === 18) { + txBuilder.addNftMintEvent(`${nftContractId}::crashpunks-v2`, address, cvToHex(uintCV(1))); + } + + // Block 20: FT burn (200 tokens) - will survive reorg + if (i === 20) { + txBuilder.addFtBurnEvent(`${ftContractId}::newyorkcitycoin`, address, '200'); + } + + // Block 22: NFT mint (token #2) - will survive reorg + if (i === 22) { + txBuilder.addNftMintEvent(`${nftContractId}::crashpunks-v2`, address, cvToHex(uintCV(2))); + } + + // Block 24: Notification marking NFT as dynamic - will survive reorg + if (i === 24) { + txBuilder.addContractEvent( + nftContractId, + cvToHex( + tupleCV({ + notification: bufferCV(Buffer.from('token-metadata-update')), + payload: tupleCV({ + 'token-class': bufferCV(Buffer.from('nft')), + 'contract-id': bufferCV(Buffer.from(nftContractId)), + 'update-mode': bufferCV(Buffer.from('dynamic')), + }), + }) + ) + ); + } + + // Block 26: FT mint (3000 tokens) - WILL BE REVERTED + if (i === 26) { + txBuilder.addFtMintEvent(`${ftContractId}::newyorkcitycoin`, address, '3000'); + } + + // Block 27: NFT mint (token #3) - WILL BE REVERTED + if (i === 27) { + txBuilder.addNftMintEvent(`${nftContractId}::crashpunks-v2`, address, cvToHex(uintCV(3))); + } + + // Block 28: FT burn (100 tokens) - WILL BE REVERTED + if (i === 28) { + txBuilder.addFtBurnEvent(`${ftContractId}::newyorkcitycoin`, address, '100'); + } + + // Block 29: Notification marking NFT as frozen - WILL BE REVERTED + if (i === 29) { + txBuilder.addContractEvent( + nftContractId, + cvToHex( + tupleCV({ + notification: bufferCV(Buffer.from('token-metadata-update')), + payload: tupleCV({ + 'token-class': bufferCV(Buffer.from('nft')), + 'contract-id': bufferCV(Buffer.from(nftContractId)), + 'update-mode': bufferCV(Buffer.from('frozen')), + }), + }) + ) + ); + } + + // Block 30: Another FT mint (500 tokens) - WILL BE REVERTED + if (i === 30) { + txBuilder.addFtMintEvent(`${ftContractId}::newyorkcitycoin`, address, '500'); + } + + builder.addTransaction(txBuilder.build()); + await processor.processBlock(builder.build()); + + // After block 5: Initialize FT token with 0 supply + // FT contracts need their token record created before supply deltas can be tracked + if (i === 5) { + const ftContract = await db.getSmartContract({ principal: ftContractId }); + expect(ftContract).not.toBeUndefined(); + if (ftContract) { + await db.core.insertAndEnqueueSequentialTokens(db.sql, { + smart_contract: ftContract, + token_count: 1n, + }); + // Initialize total_supply to 0 so supply deltas work correctly + await db.sql`UPDATE tokens SET total_supply = 0 WHERE smart_contract_id = ${ftContract.id}`; + } + } + } + + // Verify state before reorg + await expect(db.core.getChainTip(db.sql)).resolves.toStrictEqual({ + index_block_hash: '0x00001e', // Block 30 + block_height: 30, + }); + + // Verify FT contract exists + const ftContract = await db.getSmartContract({ principal: ftContractId }); + expect(ftContract).not.toBeUndefined(); + expect(ftContract?.sip).toBe(DbSipNumber.sip010); + + // Verify NFT contract exists + const nftContract = await db.getSmartContract({ principal: nftContractId }); + expect(nftContract).not.toBeUndefined(); + expect(nftContract?.sip).toBe(DbSipNumber.sip009); + + // Verify FT token supply: 1000 + 500 - 200 + 3000 - 100 + 500 = 4700 + const ftToken = await db.getToken({ id: 1 }); + expect(ftToken?.total_supply).toBe('4700'); + + // Verify NFT tokens exist (3 tokens: #1, #2, #3) + const nftToken1 = await db.getToken({ id: 2 }); + const nftToken2 = await db.getToken({ id: 3 }); + const nftToken3 = await db.getToken({ id: 4 }); + expect(nftToken1).not.toBeUndefined(); + expect(nftToken2).not.toBeUndefined(); + expect(nftToken3).not.toBeUndefined(); + + // Verify notifications exist + // Block 24: dynamic notification applied to 2 existing NFT tokens (tokens #1, #2) + // Block 29: frozen notification applied to 3 NFT tokens (tokens #1, #2, #3) + const notificationsBefore = await db.sql` + SELECT * FROM update_notifications ORDER BY block_height + `; + expect(notificationsBefore.length).toBe(5); // 2 (block 24) + 3 (block 29) + + // Verify jobs exist for all NFT tokens (including token #3 which will be reverted) + // Jobs: FT contract job, NFT contract job, FT token job, NFT token #1 job, NFT token #2 job, NFT token #3 job + const jobsBefore = await db.sql<{ id: number; token_id: number | null }[]>` + SELECT id, token_id FROM jobs ORDER BY id + `; + expect(jobsBefore.length).toBe(6); + // Verify job for NFT token #3 exists (token_id = 4) + const nftToken3JobBefore = jobsBefore.find(j => j.token_id === 4); + expect(nftToken3JobBefore).not.toBeUndefined(); + + // Now trigger a reorg: new block 26 with parent pointing to block 25 + // This will invalidate blocks 26-30 + await processor.processBlock( + new TestBlockBuilder({ + block_height: 26, + index_block_hash: '0x0000ff', // Different hash for new fork + parent_index_block_hash: '0x000019', // Parent is block 25 + }) + .addTransaction( + new TestTransactionBuilder({ + tx_id: '0x0200', + sender: address, + }).build() + ) + .build() + ); + + // Verify chain tip is now at the new block 26 + await expect(db.core.getChainTip(db.sql)).resolves.toStrictEqual({ + index_block_hash: '0x0000ff', + block_height: 26, + }); + + // Verify contracts still exist (deployed before reorg point) + const ftContractAfter = await db.getSmartContract({ principal: ftContractId }); + expect(ftContractAfter).not.toBeUndefined(); + + const nftContractAfter = await db.getSmartContract({ principal: nftContractId }); + expect(nftContractAfter).not.toBeUndefined(); + + // Verify FT token supply is reverted: 1000 + 500 - 200 = 1300 + // The mints at blocks 26, 30 (+3000, +500) and burn at block 28 (-100) are reverted + const ftTokenAfter = await db.getToken({ id: 1 }); + expect(ftTokenAfter?.total_supply).toBe('1300'); + + // Verify NFT token #3 (minted at block 27) is reverted + const nftToken1After = await db.getToken({ id: 2 }); + const nftToken2After = await db.getToken({ id: 3 }); + const nftToken3After = await db.getToken({ id: 4 }); + expect(nftToken1After).not.toBeUndefined(); // Still exists (block 18) + expect(nftToken2After).not.toBeUndefined(); // Still exists (block 22) + expect(nftToken3After).toBeUndefined(); // Reverted (block 27) + + // Verify notifications: frozen notification at block 29 should be reverted + // Only dynamic notification at block 24 should remain + const notificationsAfter = await db.sql` + SELECT * FROM update_notifications ORDER BY block_height + `; + expect(notificationsAfter.length).toBe(2); // Only dynamic notification (x 2 tokens) + for (const notification of notificationsAfter) { + expect(notification.update_mode).toBe('dynamic'); + expect(notification.block_height).toBe(24); + } + + // Verify blocks 26-30 are deleted + const blocksAfter = await db.sql<{ block_height: number }[]>` + SELECT block_height FROM blocks ORDER BY block_height + `; + expect(blocksAfter.length).toBe(26); // Blocks 1-25 + new block 26 + const maxBlockHeight = Math.max(...blocksAfter.map(b => b.block_height)); + expect(maxBlockHeight).toBe(26); + + // Verify job for NFT token #3 was deleted (cascade delete via token deletion) + // Jobs remaining: FT contract job, NFT contract job, FT token job, NFT token #1 job, NFT token #2 job + const jobsAfter = await db.sql<{ id: number; token_id: number | null }[]>` + SELECT id, token_id FROM jobs ORDER BY id + `; + expect(jobsAfter.length).toBe(5); // One less job (NFT token #3 job deleted) + // Verify job for NFT token #3 no longer exists + const nftToken3JobAfter = jobsAfter.find(j => j.token_id === 4); + expect(nftToken3JobAfter).toBeUndefined(); + // Verify jobs for surviving tokens still exist + const nftToken1JobAfter = jobsAfter.find(j => j.token_id === 2); + const nftToken2JobAfter = jobsAfter.find(j => j.token_id === 3); + expect(nftToken1JobAfter).not.toBeUndefined(); + expect(nftToken2JobAfter).not.toBeUndefined(); + }); + }); });