diff --git a/packages/contracts/contracts/rewards/RewardsManager.sol b/packages/contracts/contracts/rewards/RewardsManager.sol index 3f45073c4..85143530a 100644 --- a/packages/contracts/contracts/rewards/RewardsManager.sol +++ b/packages/contracts/contracts/rewards/RewardsManager.sol @@ -20,6 +20,7 @@ import { IRewardsManager } from "@graphprotocol/interfaces/contracts/contracts/r import { IIssuanceAllocationDistribution } from "@graphprotocol/interfaces/contracts/issuance/allocate/IIssuanceAllocationDistribution.sol"; import { IIssuanceTarget } from "@graphprotocol/interfaces/contracts/issuance/allocate/IIssuanceTarget.sol"; import { IRewardsEligibility } from "@graphprotocol/interfaces/contracts/issuance/eligibility/IRewardsEligibility.sol"; +import { RewardsReclaim } from "@graphprotocol/interfaces/contracts/contracts/rewards/RewardsReclaim.sol"; /** * @title Rewards Manager Contract @@ -109,43 +110,29 @@ contract RewardsManager is RewardsManagerV6Storage, GraphUpgradeable, IERC165, I ); /** - * @notice Emitted when the eligibility reclaim address is set - * @param oldReclaimAddress Previous eligibility reclaim address - * @param newReclaimAddress New eligibility reclaim address + * @notice Emitted when a reclaim address is set + * @param reason The reclaim reason identifier + * @param oldAddress Previous address + * @param newAddress New address */ - event IndexerEligibilityReclaimAddressSet(address indexed oldReclaimAddress, address indexed newReclaimAddress); + event ReclaimAddressSet(bytes32 indexed reason, address indexed oldAddress, address indexed newAddress); /** - * @notice Emitted when the subgraph reclaim address is set - * @param oldReclaimAddress Previous subgraph reclaim address - * @param newReclaimAddress New subgraph reclaim address - */ - event SubgraphDeniedReclaimAddressSet(address indexed oldReclaimAddress, address indexed newReclaimAddress); - - /** - * @notice Emitted when denied rewards are reclaimed due to eligibility - * @param indexer Address of the indexer whose rewards were denied - * @param allocationID Address of the allocation + * @notice Emitted when rewards are reclaimed to a configured address + * @param reason The reclaim reason identifier * @param amount Amount of rewards reclaimed - */ - event RewardsReclaimedDueToIndexerEligibility( - address indexed indexer, - address indexed allocationID, - uint256 amount - ); - - /** - * @notice Emitted when denied rewards are reclaimed due to subgraph denylist - * @param indexer Address of the indexer whose rewards were denied + * @param indexer Address of the indexer * @param allocationID Address of the allocation - * @param subgraphDeploymentID Subgraph deployment ID that was denied - * @param amount Amount of rewards reclaimed + * @param subgraphDeploymentID Subgraph deployment ID for the allocation + * @param data Additional context data for the reclaim */ - event RewardsReclaimedDueToSubgraphDenylist( + event RewardsReclaimed( + bytes32 indexed reason, + uint256 amount, address indexed indexer, address indexed allocationID, - bytes32 indexed subgraphDeploymentID, - uint256 amount + bytes32 subgraphDeploymentID, + bytes data ); // -- Modifiers -- @@ -306,27 +293,17 @@ contract RewardsManager is RewardsManagerV6Storage, GraphUpgradeable, IERC165, I /** * @inheritdoc IRewardsManager - * @dev Set to zero address to disable eligibility reclaim functionality + * @dev bytes32(0) is reserved as an invalid reason to prevent accidental misconfiguration + * and catch uninitialized reason identifiers. */ - function setIndexerEligibilityReclaimAddress(address newReclaimAddress) external override onlyGovernor { - address oldReclaimAddress = indexerEligibilityReclaimAddress; + function setReclaimAddress(bytes32 reason, address newAddress) external override onlyGovernor { + require(reason != bytes32(0), "Cannot set reclaim address for (bytes32(0))"); - if (oldReclaimAddress != newReclaimAddress) { - indexerEligibilityReclaimAddress = newReclaimAddress; - emit IndexerEligibilityReclaimAddressSet(oldReclaimAddress, newReclaimAddress); - } - } - - /** - * @inheritdoc IRewardsManager - * @dev Set to zero address to disable subgraph reclaim functionality - */ - function setSubgraphDeniedReclaimAddress(address newReclaimAddress) external override onlyGovernor { - address oldReclaimAddress = subgraphDeniedReclaimAddress; + address oldAddress = reclaimAddresses[reason]; - if (oldReclaimAddress != newReclaimAddress) { - subgraphDeniedReclaimAddress = newReclaimAddress; - emit SubgraphDeniedReclaimAddressSet(oldReclaimAddress, newReclaimAddress); + if (oldAddress != newAddress) { + reclaimAddresses[reason] = newAddress; + emit ReclaimAddressSet(reason, oldAddress, newAddress); } } @@ -363,10 +340,10 @@ contract RewardsManager is RewardsManagerV6Storage, GraphUpgradeable, IERC165, I * @dev Gets the effective issuance per block, taking into account the IssuanceAllocator if set */ function getRewardsIssuancePerBlock() public view override returns (uint256) { - if (address(issuanceAllocator) != address(0)) { - return issuanceAllocator.getTargetIssuancePerBlock(address(this)).selfIssuancePerBlock; - } - return issuancePerBlock; + return + address(issuanceAllocator) != address(0) + ? issuanceAllocator.getTargetIssuancePerBlock(address(this)).selfIssuanceRate + : issuancePerBlock; } /** @@ -561,57 +538,118 @@ contract RewardsManager is RewardsManagerV6Storage, GraphUpgradeable, IERC165, I } /** - * @notice Checks for and handles denial and reclaim of rewards due to subgraph deny list - * @dev If denied, emits RewardsDenied event and mints to reclaim address if configured + * @notice Calculate rewards for an allocation + * @param rewardsIssuer Address of the rewards issuer calling the function + * @param allocationID Address of the allocation + * @return rewards Amount of rewards calculated + * @return indexer Address of the indexer + * @return subgraphDeploymentID Subgraph deployment ID + */ + function _calcAllocationRewards( + address rewardsIssuer, + address allocationID + ) private returns (uint256 rewards, address indexer, bytes32 subgraphDeploymentID) { + ( + bool isActive, + address _indexer, + bytes32 _subgraphDeploymentID, + uint256 tokens, + uint256 accRewardsPerAllocatedToken, + uint256 accRewardsPending + ) = IRewardsIssuer(rewardsIssuer).getAllocationData(allocationID); + + uint256 updatedAccRewardsPerAllocatedToken = onSubgraphAllocationUpdate(_subgraphDeploymentID); + + rewards = isActive + ? accRewardsPending.add( + _calcRewards(tokens, accRewardsPerAllocatedToken, updatedAccRewardsPerAllocatedToken) + ) + : 0; + + indexer = _indexer; + subgraphDeploymentID = _subgraphDeploymentID; + } + + /** + * @notice Common function to reclaim rewards to a configured address + * @param reason The reclaim reason identifier + * @param rewards Amount of rewards to reclaim * @param indexer Address of the indexer * @param allocationID Address of the allocation - * @param subgraphDeploymentID Subgraph deployment ID - * @param rewards Amount of rewards that would be distributed - * @return True if rewards are denied, false otherwise + * @param subgraphDeploymentID Subgraph deployment ID for the allocation + * @param data Additional context data for the reclaim + * @return reclaimed The amount of rewards that were reclaimed (0 if no reclaim address set) */ - function _rewardsDeniedDueToSubgraphDenyList( + function _reclaimRewards( + bytes32 reason, + uint256 rewards, address indexer, address allocationID, bytes32 subgraphDeploymentID, - uint256 rewards - ) private returns (bool) { - if (isDenied(subgraphDeploymentID)) { - emit RewardsDenied(indexer, allocationID); - - // If a reclaim address is set, mint the denied rewards there - if (0 < rewards && subgraphDeniedReclaimAddress != address(0)) { - graphToken().mint(subgraphDeniedReclaimAddress, rewards); - emit RewardsReclaimedDueToSubgraphDenylist(indexer, allocationID, subgraphDeploymentID, rewards); - } - return true; + bytes memory data + ) private returns (uint256 reclaimed) { + address target = reclaimAddresses[reason]; + if (0 < rewards && target != address(0)) { + graphToken().mint(target, rewards); + emit RewardsReclaimed(reason, rewards, indexer, allocationID, subgraphDeploymentID, data); + reclaimed = rewards; } - return false; } /** - * @notice Checks for and handles denial and reclaim of rewards due to indexer eligibility - * @dev If denied, emits RewardsDeniedDueToEligibility event and mints to reclaim address if configured + * @notice Check if rewards should be denied and attempt to reclaim them + * @param rewards Amount of rewards to check * @param indexer Address of the indexer * @param allocationID Address of the allocation - * @param rewards Amount of rewards that would be distributed - * @return True if rewards are denied, false otherwise - */ - function _rewardsDeniedDueToIndexerEligibility( + * @param subgraphDeploymentID Subgraph deployment ID for the allocation + * @return denied True if rewards should be denied (either reclaimed or dropped), false if they should be minted + * @dev First successful reclaim wins - checks performed in order with short-circuit on reclaim: + * 1. Subgraph deny list: emit RewardsDenied. If reclaim address set → reclaim and return (STOP, eligibility not checked) + * 2. Indexer eligibility: Checked if subgraph not denied OR denied without reclaim address. Emit RewardsDeniedDueToEligibility. If reclaim address set → reclaim and return + * Multiple denial events may be emitted only when multiple checks fail without reclaim addresses configured. + * Any failing check without a reclaim address still denies rewards (drops them without minting). + */ + function _deniedRewards( + uint256 rewards, address indexer, address allocationID, - uint256 rewards - ) private returns (bool) { + bytes32 subgraphDeploymentID + ) private returns (bool denied) { + if (isDenied(subgraphDeploymentID)) { + emit RewardsDenied(indexer, allocationID); + if ( + 0 < + _reclaimRewards( + RewardsReclaim.SUBGRAPH_DENIED, + rewards, + indexer, + allocationID, + subgraphDeploymentID, + "" + ) + ) { + return true; // Successfully reclaimed, deny rewards + } + denied = true; // Denied but no reclaim address + } + if (address(rewardsEligibilityOracle) != address(0) && !rewardsEligibilityOracle.isEligible(indexer)) { emit RewardsDeniedDueToEligibility(indexer, allocationID, rewards); - - // If a reclaim address is set, mint the denied rewards there - if (0 < rewards && indexerEligibilityReclaimAddress != address(0)) { - graphToken().mint(indexerEligibilityReclaimAddress, rewards); - emit RewardsReclaimedDueToIndexerEligibility(indexer, allocationID, rewards); + if ( + 0 < + _reclaimRewards( + RewardsReclaim.INDEXER_INELIGIBLE, + rewards, + indexer, + allocationID, + subgraphDeploymentID, + "" + ) + ) { + return true; // Successfully reclaimed, deny rewards } - return true; + denied = true; // Denied but no reclaim address } - return false; } /** @@ -619,6 +657,11 @@ contract RewardsManager is RewardsManagerV6Storage, GraphUpgradeable, IERC165, I * @dev This function can only be called by an authorized rewards issuer which are * the staking contract (for legacy allocations), and the subgraph service (for new allocations). * Mints 0 tokens if the allocation is not active. + * @dev First successful reclaim wins - short-circuits on reclaim: + * - If subgraph denied with reclaim address → reclaim to SUBGRAPH_DENIED address (eligibility NOT checked) + * - If subgraph not denied OR denied without address, then check eligibility → reclaim to INDEXER_INELIGIBLE if configured + * - Subsequent denial emitted only when earlier denial has no reclaim address + * - Any denial without reclaim address drops rewards (no minting) */ function takeRewards(address _allocationID) external override returns (uint256) { address rewardsIssuer = msg.sender; @@ -627,39 +670,37 @@ contract RewardsManager is RewardsManagerV6Storage, GraphUpgradeable, IERC165, I "Caller must be a rewards issuer" ); - ( - bool isActive, - address indexer, - bytes32 subgraphDeploymentID, - uint256 tokens, - uint256 accRewardsPerAllocatedToken, - uint256 accRewardsPending - ) = IRewardsIssuer(rewardsIssuer).getAllocationData(_allocationID); - - uint256 updatedAccRewardsPerAllocatedToken = onSubgraphAllocationUpdate(subgraphDeploymentID); + (uint256 rewards, address indexer, bytes32 subgraphDeploymentID) = _calcAllocationRewards( + rewardsIssuer, + _allocationID + ); - uint256 rewards = 0; - if (isActive) { - // Calculate rewards accrued by this allocation - rewards = accRewardsPending.add( - _calcRewards(tokens, accRewardsPerAllocatedToken, updatedAccRewardsPerAllocatedToken) - ); - } + if (rewards == 0) return 0; + if (_deniedRewards(rewards, indexer, _allocationID, subgraphDeploymentID)) return 0; - if (_rewardsDeniedDueToSubgraphDenyList(indexer, _allocationID, subgraphDeploymentID, rewards)) return 0; + graphToken().mint(rewardsIssuer, rewards); + emit HorizonRewardsAssigned(indexer, _allocationID, rewards); - if (_rewardsDeniedDueToIndexerEligibility(indexer, _allocationID, rewards)) return 0; + return rewards; + } - // Mint rewards to the rewards issuer - if (rewards > 0) { - // Mint directly to rewards issuer for the reward amount - // The rewards issuer contract will do bookkeeping of the reward and - // assign in proportion to each stakeholder incentive - graphToken().mint(rewardsIssuer, rewards); - } + /** + * @inheritdoc IRewardsManager + * @dev bytes32(0) as a reason is reserved as a no-op and will not be reclaimed. + */ + function reclaimRewards( + bytes32 reason, + address allocationID, + bytes calldata data + ) external override returns (uint256) { + address rewardsIssuer = msg.sender; + require(rewardsIssuer == address(subgraphService), "Not a rewards issuer"); - emit HorizonRewardsAssigned(indexer, _allocationID, rewards); + (uint256 rewards, address indexer, bytes32 subgraphDeploymentID) = _calcAllocationRewards( + rewardsIssuer, + allocationID + ); - return rewards; + return _reclaimRewards(reason, rewards, indexer, allocationID, subgraphDeploymentID, data); } } diff --git a/packages/contracts/contracts/rewards/RewardsManagerStorage.sol b/packages/contracts/contracts/rewards/RewardsManagerStorage.sol index c1d9c37dd..5cc134bf7 100644 --- a/packages/contracts/contracts/rewards/RewardsManagerStorage.sol +++ b/packages/contracts/contracts/rewards/RewardsManagerStorage.sol @@ -90,8 +90,7 @@ contract RewardsManagerV6Storage is RewardsManagerV5Storage { IRewardsEligibility public rewardsEligibilityOracle; /// @notice Address of the issuance allocator IIssuanceAllocationDistribution public issuanceAllocator; - /// @notice Address to receive tokens denied due to indexer eligibility checks, set to zero to disable - address public indexerEligibilityReclaimAddress; - /// @notice Address to receive tokens denied due to subgraph denylist, set to zero to disable - address public subgraphDeniedReclaimAddress; + /// @notice Mapping of reclaim reason identifiers to reclaim addresses + /// @dev Uses bytes32 for extensibility. See RewardsReclaim library for canonical reasons. + mapping(bytes32 => address) public reclaimAddresses; } diff --git a/packages/contracts/contracts/tests/MockIssuanceAllocator.sol b/packages/contracts/contracts/tests/MockIssuanceAllocator.sol index ba1f8f2bd..6113b8bc0 100644 --- a/packages/contracts/contracts/tests/MockIssuanceAllocator.sol +++ b/packages/contracts/contracts/tests/MockIssuanceAllocator.sol @@ -58,9 +58,9 @@ contract MockIssuanceAllocator is IERC165, IIssuanceAllocationDistribution { IIssuanceTarget(target).beforeIssuanceAllocationChange(); } _targetIssuance[target] = TargetIssuancePerBlock({ - allocatorIssuancePerBlock: allocatorIssuance, + allocatorIssuanceRate: allocatorIssuance, allocatorIssuanceBlockAppliedTo: block.number, - selfIssuancePerBlock: selfIssuance, + selfIssuanceRate: selfIssuance, selfIssuanceBlockAppliedTo: block.number }); } diff --git a/packages/contracts/contracts/tests/MockSubgraphService.sol b/packages/contracts/contracts/tests/MockSubgraphService.sol index 703edd010..75049b399 100644 --- a/packages/contracts/contracts/tests/MockSubgraphService.sol +++ b/packages/contracts/contracts/tests/MockSubgraphService.sol @@ -102,4 +102,28 @@ contract MockSubgraphService is IRewardsIssuer { function getSubgraphAllocatedTokens(bytes32 subgraphDeploymentId) external view override returns (uint256) { return subgraphAllocatedTokens[subgraphDeploymentId]; } + + /** + * @notice Helper function to call reclaimRewards on RewardsManager for testing + * @param rewardsManager Address of the RewardsManager contract + * @param reason Reason identifier for reclaiming rewards + * @param allocationId The allocation ID + * @param contextData Additional context data for the reclaim + * @return Amount of rewards reclaimed + */ + function callReclaimRewards( + address rewardsManager, + bytes32 reason, + address allocationId, + bytes calldata contextData + ) external returns (uint256) { + // Call reclaimRewards on the RewardsManager + // solhint-disable-next-line avoid-low-level-calls + (bool success, bytes memory data) = rewardsManager.call( + // solhint-disable-next-line gas-small-strings + abi.encodeWithSignature("reclaimRewards(bytes32,address,bytes)", reason, allocationId, contextData) + ); + require(success, "reclaimRewards call failed"); + return abi.decode(data, (uint256)); + } } diff --git a/packages/contracts/test/tests/unit/rewards/rewards-distribution.test.ts b/packages/contracts/test/tests/unit/rewards/rewards-distribution.test.ts index cb3f46107..07a0ea0e2 100644 --- a/packages/contracts/test/tests/unit/rewards/rewards-distribution.test.ts +++ b/packages/contracts/test/tests/unit/rewards/rewards-distribution.test.ts @@ -21,6 +21,10 @@ import { NetworkFixture } from '../lib/fixtures' const MAX_PPM = 1000000 +// TODO: Behavior change - HorizonRewardsAssigned is no longer emitted when rewards == 0 +// Set to true if the old behavior is restored (emitting event for zero rewards) +const EMIT_EVENT_FOR_ZERO_REWARDS = false + const { HashZero, WeiPerEther } = constants const toRound = (n: BigNumber) => formatGRT(n.add(toGRT('0.5'))).split('.')[0] @@ -321,9 +325,13 @@ describe('Rewards - Distribution', () => { // Close allocation. At this point rewards should be collected for that indexer const tx = staking.connect(indexer1).closeAllocation(allocationID1, randomHexBytes()) - await expect(tx) - .emit(rewardsManager, 'HorizonRewardsAssigned') - .withArgs(indexer1.address, allocationID1, toBN(0)) + if (EMIT_EVENT_FOR_ZERO_REWARDS) { + await expect(tx) + .emit(rewardsManager, 'HorizonRewardsAssigned') + .withArgs(indexer1.address, allocationID1, toBN(0)) + } else { + await expect(tx).to.not.emit(rewardsManager, 'HorizonRewardsAssigned') + } }) it('does not revert with an underflow if the minimum signal changes, and signal came after allocation', async function () { @@ -339,9 +347,13 @@ describe('Rewards - Distribution', () => { // Close allocation. At this point rewards should be collected for that indexer const tx = staking.connect(indexer1).closeAllocation(allocationID1, randomHexBytes()) - await expect(tx) - .emit(rewardsManager, 'HorizonRewardsAssigned') - .withArgs(indexer1.address, allocationID1, toBN(0)) + if (EMIT_EVENT_FOR_ZERO_REWARDS) { + await expect(tx) + .emit(rewardsManager, 'HorizonRewardsAssigned') + .withArgs(indexer1.address, allocationID1, toBN(0)) + } else { + await expect(tx).to.not.emit(rewardsManager, 'HorizonRewardsAssigned') + } }) it('does not revert if signal was already under minimum', async function () { @@ -356,9 +368,13 @@ describe('Rewards - Distribution', () => { // Close allocation. At this point rewards should be collected for that indexer const tx = staking.connect(indexer1).closeAllocation(allocationID1, randomHexBytes()) - await expect(tx) - .emit(rewardsManager, 'HorizonRewardsAssigned') - .withArgs(indexer1.address, allocationID1, toBN(0)) + if (EMIT_EVENT_FOR_ZERO_REWARDS) { + await expect(tx) + .emit(rewardsManager, 'HorizonRewardsAssigned') + .withArgs(indexer1.address, allocationID1, toBN(0)) + } else { + await expect(tx).to.not.emit(rewardsManager, 'HorizonRewardsAssigned') + } }) it('should distribute rewards on closed allocation and send to destination', async function () { @@ -499,7 +515,11 @@ describe('Rewards - Distribution', () => { // Close allocation. At this point rewards should be zero const tx = staking.connect(indexer1).closeAllocation(allocationID1, randomHexBytes()) - await expect(tx).emit(rewardsManager, 'HorizonRewardsAssigned').withArgs(indexer1.address, allocationID1, 0) + if (EMIT_EVENT_FOR_ZERO_REWARDS) { + await expect(tx).emit(rewardsManager, 'HorizonRewardsAssigned').withArgs(indexer1.address, allocationID1, 0) + } else { + await expect(tx).to.not.emit(rewardsManager, 'HorizonRewardsAssigned') + } // After state - should be unchanged since no rewards were minted const afterTokenSupply = await grt.totalSupply() diff --git a/packages/contracts/test/tests/unit/rewards/rewards-eligibility-oracle.test.ts b/packages/contracts/test/tests/unit/rewards/rewards-eligibility-oracle.test.ts index 108eb3391..57a742ec5 100644 --- a/packages/contracts/test/tests/unit/rewards/rewards-eligibility-oracle.test.ts +++ b/packages/contracts/test/tests/unit/rewards/rewards-eligibility-oracle.test.ts @@ -256,15 +256,20 @@ describe('Rewards - Eligibility Oracle', () => { // Jump to next epoch await helpers.mineEpoch(epochManager) - // Close allocation - denylist should be checked first + // Close allocation - both checks will be performed const tx = staking.connect(indexer1).closeAllocation(allocationID1, randomHexBytes()) - // Verify: Denylist wins (checked first in RewardsManager.takeRewards line 522) - // Should emit RewardsDenied (not RewardsDeniedDueToEligibility) + const expectedIndexingRewards = toGRT('1400') + + // Verify: Both denial events are emitted (new "first successful reclaim" behavior) + // Since neither has a reclaim address configured, both checks run and both events emit await expect(tx).emit(rewardsManager, 'RewardsDenied').withArgs(indexer1.address, allocationID1) + await expect(tx) + .emit(rewardsManager, 'RewardsDeniedDueToEligibility') + .withArgs(indexer1.address, allocationID1, expectedIndexingRewards) - // Verify: REO event is NOT emitted - await expect(tx).to.not.emit(rewardsManager, 'RewardsDeniedDueToEligibility') + // Rewards are dropped (no reclaim happens since neither has address configured) + await expect(tx).to.not.emit(rewardsManager, 'RewardsReclaimed') }) it('should check REO when denylist allows but indexer ineligible', async function () { diff --git a/packages/contracts/test/tests/unit/rewards/rewards-interface.test.ts b/packages/contracts/test/tests/unit/rewards/rewards-interface.test.ts index d7db40458..721deb45c 100644 --- a/packages/contracts/test/tests/unit/rewards/rewards-interface.test.ts +++ b/packages/contracts/test/tests/unit/rewards/rewards-interface.test.ts @@ -57,7 +57,7 @@ describe('RewardsManager interfaces', () => { }) it('IRewardsManager should have stable interface ID', () => { - expect(IRewardsManager__factory.interfaceId).to.equal('0x731e44f0') + expect(IRewardsManager__factory.interfaceId).to.equal('0x45dd0aa0') }) }) diff --git a/packages/contracts/test/tests/unit/rewards/rewards-reclaim.test.ts b/packages/contracts/test/tests/unit/rewards/rewards-reclaim.test.ts index d7071840f..b5bd11413 100644 --- a/packages/contracts/test/tests/unit/rewards/rewards-reclaim.test.ts +++ b/packages/contracts/test/tests/unit/rewards/rewards-reclaim.test.ts @@ -6,19 +6,25 @@ import { RewardsManager } from '@graphprotocol/contracts' import { deriveChannelKey, GraphNetworkContracts, helpers, randomHexBytes, toGRT } from '@graphprotocol/sdk' import type { SignerWithAddress } from '@nomiclabs/hardhat-ethers/signers' import { expect } from 'chai' -import { constants } from 'ethers' +import { constants, utils } from 'ethers' import hre from 'hardhat' import { NetworkFixture } from '../lib/fixtures' const { HashZero } = constants +// Reclaim reason identifiers (matching RewardsReclaim.sol) +const INDEXER_INELIGIBLE = utils.id('INDEXER_INELIGIBLE') +const SUBGRAPH_DENIED = utils.id('SUBGRAPH_DENIED') +const CLOSE_ALLOCATION = utils.id('CLOSE_ALLOCATION') + describe('Rewards - Reclaim Addresses', () => { const graph = hre.graph() let curator1: SignerWithAddress let governor: SignerWithAddress let indexer1: SignerWithAddress let reclaimWallet: SignerWithAddress + let otherWallet: SignerWithAddress let fixture: NetworkFixture @@ -68,6 +74,7 @@ describe('Rewards - Reclaim Addresses', () => { curator1 = testAccounts[0] indexer1 = testAccounts[1] reclaimWallet = testAccounts[2] + otherWallet = testAccounts[3] ;({ governor } = await graph.getNamedAccounts()) fixture = new NetworkFixture(graph.provider) @@ -97,78 +104,58 @@ describe('Rewards - Reclaim Addresses', () => { await fixture.tearDown() }) - describe('setIndexerEligibilityReclaimAddress', function () { + describe('setReclaimAddress', function () { it('should reject if not governor', async function () { - const tx = rewardsManager.connect(indexer1).setIndexerEligibilityReclaimAddress(reclaimWallet.address) + const tx = rewardsManager.connect(indexer1).setReclaimAddress(INDEXER_INELIGIBLE, reclaimWallet.address) await expect(tx).revertedWith('Only Controller governor') }) - it('should set eligibility reclaim address if governor', async function () { - const tx = rewardsManager.connect(governor).setIndexerEligibilityReclaimAddress(reclaimWallet.address) - await expect(tx) - .emit(rewardsManager, 'IndexerEligibilityReclaimAddressSet') - .withArgs(constants.AddressZero, reclaimWallet.address) - - expect(await rewardsManager.indexerEligibilityReclaimAddress()).eq(reclaimWallet.address) + it('should reject setting reclaim address for bytes32(0)', async function () { + const tx = rewardsManager.connect(governor).setReclaimAddress(HashZero, reclaimWallet.address) + await expect(tx).revertedWith('Cannot set reclaim address for (bytes32(0))') }) - it('should allow setting to zero address', async function () { - await rewardsManager.connect(governor).setIndexerEligibilityReclaimAddress(reclaimWallet.address) - - const tx = rewardsManager.connect(governor).setIndexerEligibilityReclaimAddress(constants.AddressZero) + it('should set eligibility reclaim address if governor', async function () { + const tx = rewardsManager.connect(governor).setReclaimAddress(INDEXER_INELIGIBLE, reclaimWallet.address) await expect(tx) - .emit(rewardsManager, 'IndexerEligibilityReclaimAddressSet') - .withArgs(reclaimWallet.address, constants.AddressZero) + .emit(rewardsManager, 'ReclaimAddressSet') + .withArgs(INDEXER_INELIGIBLE, constants.AddressZero, reclaimWallet.address) - expect(await rewardsManager.indexerEligibilityReclaimAddress()).eq(constants.AddressZero) + expect(await rewardsManager.reclaimAddresses(INDEXER_INELIGIBLE)).eq(reclaimWallet.address) }) - it('should not emit event when setting same address', async function () { - await rewardsManager.connect(governor).setIndexerEligibilityReclaimAddress(reclaimWallet.address) - - const tx = rewardsManager.connect(governor).setIndexerEligibilityReclaimAddress(reclaimWallet.address) - await expect(tx).to.not.emit(rewardsManager, 'IndexerEligibilityReclaimAddressSet') - }) - }) - - describe('setSubgraphDeniedReclaimAddress', function () { - it('should reject if not governor', async function () { - const tx = rewardsManager.connect(indexer1).setSubgraphDeniedReclaimAddress(reclaimWallet.address) - await expect(tx).revertedWith('Only Controller governor') - }) - - it('should set subgraph reclaim address if governor', async function () { - const tx = rewardsManager.connect(governor).setSubgraphDeniedReclaimAddress(reclaimWallet.address) + it('should set subgraph denied reclaim address if governor', async function () { + const tx = rewardsManager.connect(governor).setReclaimAddress(SUBGRAPH_DENIED, reclaimWallet.address) await expect(tx) - .emit(rewardsManager, 'SubgraphDeniedReclaimAddressSet') - .withArgs(constants.AddressZero, reclaimWallet.address) + .emit(rewardsManager, 'ReclaimAddressSet') + .withArgs(SUBGRAPH_DENIED, constants.AddressZero, reclaimWallet.address) - expect(await rewardsManager.subgraphDeniedReclaimAddress()).eq(reclaimWallet.address) + expect(await rewardsManager.reclaimAddresses(SUBGRAPH_DENIED)).eq(reclaimWallet.address) }) it('should allow setting to zero address', async function () { - await rewardsManager.connect(governor).setSubgraphDeniedReclaimAddress(reclaimWallet.address) + await rewardsManager.connect(governor).setReclaimAddress(INDEXER_INELIGIBLE, reclaimWallet.address) - const tx = rewardsManager.connect(governor).setSubgraphDeniedReclaimAddress(constants.AddressZero) + const tx = rewardsManager.connect(governor).setReclaimAddress(INDEXER_INELIGIBLE, constants.AddressZero) await expect(tx) - .emit(rewardsManager, 'SubgraphDeniedReclaimAddressSet') - .withArgs(reclaimWallet.address, constants.AddressZero) + .emit(rewardsManager, 'ReclaimAddressSet') + .withArgs(INDEXER_INELIGIBLE, reclaimWallet.address, constants.AddressZero) - expect(await rewardsManager.subgraphDeniedReclaimAddress()).eq(constants.AddressZero) + expect(await rewardsManager.reclaimAddresses(INDEXER_INELIGIBLE)).eq(constants.AddressZero) }) it('should not emit event when setting same address', async function () { - await rewardsManager.connect(governor).setSubgraphDeniedReclaimAddress(reclaimWallet.address) + await rewardsManager.connect(governor).setReclaimAddress(INDEXER_INELIGIBLE, reclaimWallet.address) - const tx = rewardsManager.connect(governor).setSubgraphDeniedReclaimAddress(reclaimWallet.address) - await expect(tx).to.not.emit(rewardsManager, 'SubgraphDeniedReclaimAddressSet') + const tx = rewardsManager.connect(governor).setReclaimAddress(INDEXER_INELIGIBLE, reclaimWallet.address) + await expect(tx).to.not.emit(rewardsManager, 'ReclaimAddressSet') }) }) describe('reclaim denied rewards - subgraph denylist', function () { it('should mint to reclaim address when subgraph denied and reclaim address set', async function () { // Setup reclaim address - await rewardsManager.connect(governor).setSubgraphDeniedReclaimAddress(reclaimWallet.address) + await rewardsManager.connect(governor).setReclaimAddress(SUBGRAPH_DENIED, reclaimWallet.address) // Setup denylist await rewardsManager.connect(governor).setSubgraphAvailabilityOracle(governor.address) @@ -193,8 +180,8 @@ describe('Rewards - Reclaim Addresses', () => { const tx = staking.connect(indexer1).closeAllocation(allocationID1, randomHexBytes()) await expect(tx).emit(rewardsManager, 'RewardsDenied').withArgs(indexer1.address, allocationID1) await expect(tx) - .emit(rewardsManager, 'RewardsReclaimedDueToSubgraphDenylist') - .withArgs(indexer1.address, allocationID1, subgraphDeploymentID1, expectedRewards) + .emit(rewardsManager, 'RewardsReclaimed') + .withArgs(SUBGRAPH_DENIED, expectedRewards, indexer1.address, allocationID1, subgraphDeploymentID1, '0x') // Check reclaim wallet received the rewards const balanceAfter = await grt.balanceOf(reclaimWallet.address) @@ -220,14 +207,14 @@ describe('Rewards - Reclaim Addresses', () => { // Close allocation - should only emit denial event, not reclaim const tx = staking.connect(indexer1).closeAllocation(allocationID1, randomHexBytes()) await expect(tx).emit(rewardsManager, 'RewardsDenied').withArgs(indexer1.address, allocationID1) - await expect(tx).to.not.emit(rewardsManager, 'RewardsReclaimedDueToSubgraphDenylist') + await expect(tx).to.not.emit(rewardsManager, 'RewardsReclaimed') }) }) describe('reclaim denied rewards - eligibility', function () { it('should mint to reclaim address when eligibility denied and reclaim address set', async function () { // Setup reclaim address - await rewardsManager.connect(governor).setIndexerEligibilityReclaimAddress(reclaimWallet.address) + await rewardsManager.connect(governor).setReclaimAddress(INDEXER_INELIGIBLE, reclaimWallet.address) // Setup eligibility oracle that denies const MockRewardsEligibilityOracleFactory = await hre.ethers.getContractFactory( @@ -258,8 +245,8 @@ describe('Rewards - Reclaim Addresses', () => { .emit(rewardsManager, 'RewardsDeniedDueToEligibility') .withArgs(indexer1.address, allocationID1, expectedRewards) await expect(tx) - .emit(rewardsManager, 'RewardsReclaimedDueToIndexerEligibility') - .withArgs(indexer1.address, allocationID1, expectedRewards) + .emit(rewardsManager, 'RewardsReclaimed') + .withArgs(INDEXER_INELIGIBLE, expectedRewards, indexer1.address, allocationID1, subgraphDeploymentID1, '0x') // Check reclaim wallet received the rewards const balanceAfter = await grt.balanceOf(reclaimWallet.address) @@ -293,7 +280,309 @@ describe('Rewards - Reclaim Addresses', () => { await expect(tx) .emit(rewardsManager, 'RewardsDeniedDueToEligibility') .withArgs(indexer1.address, allocationID1, expectedRewards) - await expect(tx).to.not.emit(rewardsManager, 'RewardsReclaimedDueToIndexerEligibility') + await expect(tx).to.not.emit(rewardsManager, 'RewardsReclaimed') + }) + }) + + describe('reclaim precedence - first successful reclaim wins', function () { + it('should reclaim to SUBGRAPH_DENIED when both fail and both addresses configured', async function () { + // Setup BOTH reclaim addresses + await rewardsManager.connect(governor).setReclaimAddress(SUBGRAPH_DENIED, reclaimWallet.address) + await rewardsManager.connect(governor).setReclaimAddress(INDEXER_INELIGIBLE, otherWallet.address) + + // Setup denylist + await rewardsManager.connect(governor).setSubgraphAvailabilityOracle(governor.address) + await rewardsManager.connect(governor).setDenied(subgraphDeploymentID1, true) + + // Setup eligibility oracle that denies + const MockRewardsEligibilityOracleFactory = await hre.ethers.getContractFactory( + 'contracts/tests/MockRewardsEligibilityOracle.sol:MockRewardsEligibilityOracle', + ) + const mockOracle = await MockRewardsEligibilityOracleFactory.deploy(false) // Deny + await mockOracle.deployed() + await rewardsManager.connect(governor).setRewardsEligibilityOracle(mockOracle.address) + + // Align with the epoch boundary + await helpers.mineEpoch(epochManager) + + // Setup allocation + await setupIndexerAllocation() + + // Jump to next epoch + await helpers.mineEpoch(epochManager) + + const expectedRewards = toGRT('1400') + + // Check balances before + const subgraphDeniedBalanceBefore = await grt.balanceOf(reclaimWallet.address) + const indexerIneligibleBalanceBefore = await grt.balanceOf(otherWallet.address) + + // Close allocation - should reclaim to SUBGRAPH_DENIED address (first check) + const tx = staking.connect(indexer1).closeAllocation(allocationID1, randomHexBytes()) + await expect(tx).emit(rewardsManager, 'RewardsDenied').withArgs(indexer1.address, allocationID1) + await expect(tx) + .emit(rewardsManager, 'RewardsReclaimed') + .withArgs(SUBGRAPH_DENIED, expectedRewards, indexer1.address, allocationID1, subgraphDeploymentID1, '0x') + + // Only SUBGRAPH_DENIED wallet should receive rewards (first successful reclaim wins) + const subgraphDeniedBalanceAfter = await grt.balanceOf(reclaimWallet.address) + const indexerIneligibleBalanceAfter = await grt.balanceOf(otherWallet.address) + + expect(subgraphDeniedBalanceAfter.sub(subgraphDeniedBalanceBefore)).eq(expectedRewards) + expect(indexerIneligibleBalanceAfter.sub(indexerIneligibleBalanceBefore)).eq(0) + }) + + it('should reclaim to INDEXER_INELIGIBLE when both fail but only second address configured', async function () { + // Setup ONLY INDEXER_INELIGIBLE reclaim address (not SUBGRAPH_DENIED) + await rewardsManager.connect(governor).setReclaimAddress(INDEXER_INELIGIBLE, otherWallet.address) + + // Setup denylist + await rewardsManager.connect(governor).setSubgraphAvailabilityOracle(governor.address) + await rewardsManager.connect(governor).setDenied(subgraphDeploymentID1, true) + + // Setup eligibility oracle that denies + const MockRewardsEligibilityOracleFactory = await hre.ethers.getContractFactory( + 'contracts/tests/MockRewardsEligibilityOracle.sol:MockRewardsEligibilityOracle', + ) + const mockOracle = await MockRewardsEligibilityOracleFactory.deploy(false) // Deny + await mockOracle.deployed() + await rewardsManager.connect(governor).setRewardsEligibilityOracle(mockOracle.address) + + // Align with the epoch boundary + await helpers.mineEpoch(epochManager) + + // Setup allocation + await setupIndexerAllocation() + + // Jump to next epoch + await helpers.mineEpoch(epochManager) + + const expectedRewards = toGRT('1400') + + // Check balance before + const balanceBefore = await grt.balanceOf(otherWallet.address) + + // Close allocation - should emit both denial events, but only reclaim to INDEXER_INELIGIBLE + const tx = staking.connect(indexer1).closeAllocation(allocationID1, randomHexBytes()) + await expect(tx).emit(rewardsManager, 'RewardsDenied').withArgs(indexer1.address, allocationID1) + await expect(tx) + .emit(rewardsManager, 'RewardsDeniedDueToEligibility') + .withArgs(indexer1.address, allocationID1, expectedRewards) + await expect(tx) + .emit(rewardsManager, 'RewardsReclaimed') + .withArgs(INDEXER_INELIGIBLE, expectedRewards, indexer1.address, allocationID1, subgraphDeploymentID1, '0x') + + // INDEXER_INELIGIBLE wallet should receive rewards + const balanceAfter = await grt.balanceOf(otherWallet.address) + expect(balanceAfter.sub(balanceBefore)).eq(expectedRewards) + }) + + it('should drop rewards when both fail and neither address configured', async function () { + // Do NOT set any reclaim addresses + + // Setup denylist + await rewardsManager.connect(governor).setSubgraphAvailabilityOracle(governor.address) + await rewardsManager.connect(governor).setDenied(subgraphDeploymentID1, true) + + // Setup eligibility oracle that denies + const MockRewardsEligibilityOracleFactory = await hre.ethers.getContractFactory( + 'contracts/tests/MockRewardsEligibilityOracle.sol:MockRewardsEligibilityOracle', + ) + const mockOracle = await MockRewardsEligibilityOracleFactory.deploy(false) // Deny + await mockOracle.deployed() + await rewardsManager.connect(governor).setRewardsEligibilityOracle(mockOracle.address) + + // Align with the epoch boundary + await helpers.mineEpoch(epochManager) + + // Setup allocation + await setupIndexerAllocation() + + // Jump to next epoch + await helpers.mineEpoch(epochManager) + + const expectedRewards = toGRT('1400') + + // Close allocation - should emit both denial events but NO reclaim + const tx = staking.connect(indexer1).closeAllocation(allocationID1, randomHexBytes()) + await expect(tx).emit(rewardsManager, 'RewardsDenied').withArgs(indexer1.address, allocationID1) + await expect(tx) + .emit(rewardsManager, 'RewardsDeniedDueToEligibility') + .withArgs(indexer1.address, allocationID1, expectedRewards) + await expect(tx).to.not.emit(rewardsManager, 'RewardsReclaimed') + }) + + it('should drop rewards when subgraph denied without address even if indexer eligible', async function () { + // Do NOT set SUBGRAPH_DENIED reclaim address + + // Setup denylist + await rewardsManager.connect(governor).setSubgraphAvailabilityOracle(governor.address) + await rewardsManager.connect(governor).setDenied(subgraphDeploymentID1, true) + + // Setup eligibility oracle that ALLOWS (indexer is eligible) + const MockRewardsEligibilityOracleFactory = await hre.ethers.getContractFactory( + 'contracts/tests/MockRewardsEligibilityOracle.sol:MockRewardsEligibilityOracle', + ) + const mockOracle = await MockRewardsEligibilityOracleFactory.deploy(true) // Allow + await mockOracle.deployed() + await rewardsManager.connect(governor).setRewardsEligibilityOracle(mockOracle.address) + + // Align with the epoch boundary + await helpers.mineEpoch(epochManager) + + // Setup allocation + await setupIndexerAllocation() + + // Jump to next epoch + await helpers.mineEpoch(epochManager) + + // Close allocation - should emit denied event but NO eligibility event, NO reclaim + const tx = staking.connect(indexer1).closeAllocation(allocationID1, randomHexBytes()) + await expect(tx).emit(rewardsManager, 'RewardsDenied').withArgs(indexer1.address, allocationID1) + await expect(tx).to.not.emit(rewardsManager, 'RewardsDeniedDueToEligibility') + await expect(tx).to.not.emit(rewardsManager, 'RewardsReclaimed') + await expect(tx).to.not.emit(rewardsManager, 'HorizonRewardsAssigned') + }) + }) + + describe('reclaimRewards - force close allocation', function () { + let mockSubgraphService: any + + beforeEach(async function () { + // Deploy mock subgraph service + const MockSubgraphServiceFactory = await hre.ethers.getContractFactory( + 'contracts/tests/MockSubgraphService.sol:MockSubgraphService', + ) + mockSubgraphService = await MockSubgraphServiceFactory.deploy() + await mockSubgraphService.deployed() + + // Set it as the subgraph service in rewards manager + await rewardsManager.connect(governor).setSubgraphService(mockSubgraphService.address) + }) + + it('should reclaim rewards when reclaim address is set', async function () { + // Set reclaim address for ForceCloseAllocation + await rewardsManager.connect(governor).setReclaimAddress(CLOSE_ALLOCATION, reclaimWallet.address) + + // Setup allocation in real staking contract + await setupIndexerAllocation() + + // Also set allocation data in mock so RewardsManager can query it + const tokensAllocated = toGRT('12500') + await mockSubgraphService.setAllocation( + allocationID1, + true, // isActive + indexer1.address, + subgraphDeploymentID1, + tokensAllocated, + 0, // accRewardsPerAllocatedToken starts at 0 + 0, // accRewardsPending + ) + await mockSubgraphService.setSubgraphAllocatedTokens(subgraphDeploymentID1, tokensAllocated) + + // Jump to next epoch to accrue rewards + await helpers.mineEpoch(epochManager) + + // Check balance before + const balanceBefore = await grt.balanceOf(reclaimWallet.address) + + // Call reclaimRewards via mock subgraph service + const tx = await mockSubgraphService.callReclaimRewards( + rewardsManager.address, + CLOSE_ALLOCATION, + allocationID1, + '0x', + ) + + // Verify event was emitted (don't check exact amount, it depends on rewards calculation) + await expect(tx).emit(rewardsManager, 'RewardsReclaimed') + + // Check balance after - should have increased + const balanceAfter = await grt.balanceOf(reclaimWallet.address) + const rewardsClaimed = balanceAfter.sub(balanceBefore) + expect(rewardsClaimed).to.be.gt(0) + }) + + it('should not reclaim when reclaim address is not set', async function () { + // Do NOT set reclaim address (defaults to zero) + + // Setup allocation in real staking contract + await setupIndexerAllocation() + + // Also set allocation data in mock + const tokensAllocated = toGRT('12500') + await mockSubgraphService.setAllocation( + allocationID1, + true, + indexer1.address, + subgraphDeploymentID1, + tokensAllocated, + 0, + 0, + ) + await mockSubgraphService.setSubgraphAllocatedTokens(subgraphDeploymentID1, tokensAllocated) + + // Jump to next epoch to accrue rewards + await helpers.mineEpoch(epochManager) + + // Call reclaimRewards via mock subgraph service - should not emit RewardsReclaimed + const tx = await mockSubgraphService.callReclaimRewards( + rewardsManager.address, + CLOSE_ALLOCATION, + allocationID1, + '0x', + ) + await expect(tx).to.not.emit(rewardsManager, 'RewardsReclaimed') + }) + + it('should return 0 and not emit when reclaim address is not set and no rewards', async function () { + // Do NOT set reclaim address (zero address) + + // Setup allocation but mark it as inactive (no rewards) + const tokensAllocated = toGRT('12500') + await mockSubgraphService.setAllocation( + allocationID1, + false, // NOT active - this will return 0 rewards + indexer1.address, + subgraphDeploymentID1, + tokensAllocated, + 0, + 0, + ) + await mockSubgraphService.setSubgraphAllocatedTokens(subgraphDeploymentID1, tokensAllocated) + + // Call reclaimRewards - should return 0 and not emit + const result = await mockSubgraphService.callStatic.callReclaimRewards( + rewardsManager.address, + CLOSE_ALLOCATION, + allocationID1, + '0x', + ) + expect(result).eq(0) + + const tx = await mockSubgraphService.callReclaimRewards( + rewardsManager.address, + CLOSE_ALLOCATION, + allocationID1, + '0x', + ) + await expect(tx).to.not.emit(rewardsManager, 'RewardsReclaimed') + }) + + it('should reject when called by unauthorized address', async function () { + // Try to call reclaimRewards directly from indexer1 (not the subgraph service) + // Note: Contract types need to be regenerated after interface changes + // Using manual encoding for now + const abiCoder = hre.ethers.utils.defaultAbiCoder + const selector = hre.ethers.utils.id('reclaimRewards(bytes32,address,bytes)').slice(0, 10) + const params = abiCoder.encode(['bytes32', 'address', 'bytes'], [CLOSE_ALLOCATION, allocationID1, '0x']) + const data = selector + params.slice(2) + + const tx = indexer1.sendTransaction({ + to: rewardsManager.address, + data: data, + }) + await expect(tx).revertedWith('Not a rewards issuer') }) }) }) diff --git a/packages/contracts/test/tests/unit/rewards/rewards-subgraph-service.test.ts b/packages/contracts/test/tests/unit/rewards/rewards-subgraph-service.test.ts index f75785ecd..a8c3b0c08 100644 --- a/packages/contracts/test/tests/unit/rewards/rewards-subgraph-service.test.ts +++ b/packages/contracts/test/tests/unit/rewards/rewards-subgraph-service.test.ts @@ -10,6 +10,10 @@ import { network } from 'hardhat' import { NetworkFixture } from '../lib/fixtures' +// TODO: Behavior change - HorizonRewardsAssigned is no longer emitted when rewards == 0 +// Set to true if the old behavior is restored (emitting event for zero rewards) +const EMIT_EVENT_FOR_ZERO_REWARDS = false + describe('Rewards - SubgraphService', () => { const graph = hre.graph() let curator1: SignerWithAddress @@ -253,9 +257,13 @@ describe('Rewards - SubgraphService', () => { const mockSubgraphServiceSigner = await hre.ethers.getSigner(mockSubgraphService.address) - // Take rewards should return 0 and emit event with 0 amount + // Take rewards should return 0 const tx = rewardsManager.connect(mockSubgraphServiceSigner).takeRewards(allocationID1) - await expect(tx).emit(rewardsManager, 'HorizonRewardsAssigned').withArgs(indexer1.address, allocationID1, 0) + if (EMIT_EVENT_FOR_ZERO_REWARDS) { + await expect(tx).emit(rewardsManager, 'HorizonRewardsAssigned').withArgs(indexer1.address, allocationID1, 0) + } else { + await expect(tx).to.not.emit(rewardsManager, 'HorizonRewardsAssigned') + } // Stop impersonating await network.provider.request({ @@ -300,7 +308,11 @@ describe('Rewards - SubgraphService', () => { // Take rewards should succeed with 0 amount const tx = rewardsManager.connect(mockSubgraphServiceSigner).takeRewards(allocationID1) - await expect(tx).emit(rewardsManager, 'HorizonRewardsAssigned').withArgs(indexer1.address, allocationID1, 0) + if (EMIT_EVENT_FOR_ZERO_REWARDS) { + await expect(tx).emit(rewardsManager, 'HorizonRewardsAssigned').withArgs(indexer1.address, allocationID1, 0) + } else { + await expect(tx).to.not.emit(rewardsManager, 'HorizonRewardsAssigned') + } // Stop impersonating await network.provider.request({ @@ -383,7 +395,7 @@ describe('Rewards - SubgraphService', () => { await rewardsManager.connect(governor).setSubgraphAvailabilityOracle(governor.address) await rewardsManager.connect(governor).setDenied(subgraphDeploymentID1, true) - // Setup allocation + // Setup allocation with some pending rewards so rewards > 0 await mockSubgraphService.setAllocation( allocationID1, true, @@ -391,9 +403,11 @@ describe('Rewards - SubgraphService', () => { subgraphDeploymentID1, toGRT('12500'), 0, - 0, + toGRT('100'), // accRewardsPending > 0 so rewards will be calculated ) + await mockSubgraphService.setSubgraphAllocatedTokens(subgraphDeploymentID1, toGRT('12500')) + // Impersonate the mock subgraph service contract await network.provider.request({ method: 'hardhat_impersonateAccount', diff --git a/packages/contracts/test/tests/unit/rewards/rewards.test.ts b/packages/contracts/test/tests/unit/rewards/rewards.test.ts index e6171cc13..b4f9e68c2 100644 --- a/packages/contracts/test/tests/unit/rewards/rewards.test.ts +++ b/packages/contracts/test/tests/unit/rewards/rewards.test.ts @@ -15,15 +15,23 @@ import { import type { SignerWithAddress } from '@nomiclabs/hardhat-ethers/signers' import { BigNumber as BN } from 'bignumber.js' import { expect } from 'chai' -import { BigNumber, constants } from 'ethers' +import { BigNumber, constants, utils } from 'ethers' import hre from 'hardhat' import { NetworkFixture } from '../lib/fixtures' const MAX_PPM = 1000000 +// TODO: Behavior change - HorizonRewardsAssigned is no longer emitted when rewards == 0 +// Set to true if the old behavior is restored (emitting event for zero rewards) +const EMIT_EVENT_FOR_ZERO_REWARDS = false + const { HashZero, WeiPerEther } = constants +// Reclaim reason identifiers (matching RewardsReclaim.sol) +const INDEXER_INELIGIBLE = utils.id('INDEXER_INELIGIBLE') +const SUBGRAPH_DENIED = utils.id('SUBGRAPH_DENIED') + const toRound = (n: BigNumber) => formatGRT(n.add(toGRT('0.5'))).split('.')[0] describe('Rewards', () => { @@ -711,9 +719,13 @@ describe('Rewards', () => { // Close allocation. At this point rewards should be collected for that indexer const tx = staking.connect(indexer1).closeAllocation(allocationID1, randomHexBytes()) - await expect(tx) - .emit(rewardsManager, 'HorizonRewardsAssigned') - .withArgs(indexer1.address, allocationID1, toBN(0)) + if (EMIT_EVENT_FOR_ZERO_REWARDS) { + await expect(tx) + .emit(rewardsManager, 'HorizonRewardsAssigned') + .withArgs(indexer1.address, allocationID1, toBN(0)) + } else { + await expect(tx).to.not.emit(rewardsManager, 'HorizonRewardsAssigned') + } }) it('does not revert with an underflow if the minimum signal changes, and signal came after allocation', async function () { @@ -729,9 +741,13 @@ describe('Rewards', () => { // Close allocation. At this point rewards should be collected for that indexer const tx = staking.connect(indexer1).closeAllocation(allocationID1, randomHexBytes()) - await expect(tx) - .emit(rewardsManager, 'HorizonRewardsAssigned') - .withArgs(indexer1.address, allocationID1, toBN(0)) + if (EMIT_EVENT_FOR_ZERO_REWARDS) { + await expect(tx) + .emit(rewardsManager, 'HorizonRewardsAssigned') + .withArgs(indexer1.address, allocationID1, toBN(0)) + } else { + await expect(tx).to.not.emit(rewardsManager, 'HorizonRewardsAssigned') + } }) it('does not revert if signal was already under minimum', async function () { @@ -746,9 +762,13 @@ describe('Rewards', () => { // Close allocation. At this point rewards should be collected for that indexer const tx = staking.connect(indexer1).closeAllocation(allocationID1, randomHexBytes()) - await expect(tx) - .emit(rewardsManager, 'HorizonRewardsAssigned') - .withArgs(indexer1.address, allocationID1, toBN(0)) + if (EMIT_EVENT_FOR_ZERO_REWARDS) { + await expect(tx) + .emit(rewardsManager, 'HorizonRewardsAssigned') + .withArgs(indexer1.address, allocationID1, toBN(0)) + } else { + await expect(tx).to.not.emit(rewardsManager, 'HorizonRewardsAssigned') + } }) it('should distribute rewards on closed allocation and send to destination', async function () { @@ -889,7 +909,11 @@ describe('Rewards', () => { // Close allocation. At this point rewards should be zero const tx = staking.connect(indexer1).closeAllocation(allocationID1, randomHexBytes()) - await expect(tx).emit(rewardsManager, 'HorizonRewardsAssigned').withArgs(indexer1.address, allocationID1, 0) + if (EMIT_EVENT_FOR_ZERO_REWARDS) { + await expect(tx).emit(rewardsManager, 'HorizonRewardsAssigned').withArgs(indexer1.address, allocationID1, 0) + } else { + await expect(tx).to.not.emit(rewardsManager, 'HorizonRewardsAssigned') + } // After state - should be unchanged since no rewards were minted const afterTokenSupply = await grt.totalSupply() @@ -899,6 +923,88 @@ describe('Rewards', () => { expect(afterTokenSupply).eq(beforeTokenSupply) expect(afterStakingBalance).eq(beforeStakingBalance) }) + + it('should handle zero rewards with denylist and reclaim address', async function () { + // Setup reclaim address for SubgraphDenied + const reclaimWallet = assetHolder + await rewardsManager.connect(governor).setReclaimAddress(SUBGRAPH_DENIED, reclaimWallet.address) + + // Setup denylist + await rewardsManager.connect(governor).setSubgraphAvailabilityOracle(governor.address) + await rewardsManager.connect(governor).setDenied(subgraphDeploymentID1, true) + + // Align with the epoch boundary + await helpers.mineEpoch(epochManager) + + // Setup allocation with zero rewards (no signal) + const tokensToAllocate = toGRT('12500') + await staking.connect(indexer1).stake(tokensToAllocate) + await staking + .connect(indexer1) + .allocateFrom( + indexer1.address, + subgraphDeploymentID1, + tokensToAllocate, + allocationID1, + metadata, + await channelKey1.generateProof(indexer1.address), + ) + + // Close allocation immediately (same epoch) - should have zero rewards + const tx = staking.connect(indexer1).closeAllocation(allocationID1, randomHexBytes()) + + // Should not emit events for zero rewards + await expect(tx).to.not.emit(rewardsManager, 'RewardsDenied') + await expect(tx).to.not.emit(rewardsManager, 'RewardsReclaimed') + if (EMIT_EVENT_FOR_ZERO_REWARDS) { + await expect(tx).emit(rewardsManager, 'HorizonRewardsAssigned').withArgs(indexer1.address, allocationID1, 0) + } else { + await expect(tx).to.not.emit(rewardsManager, 'HorizonRewardsAssigned') + } + }) + + it('should handle zero rewards with eligibility oracle and reclaim address', async function () { + // Setup reclaim address for IndexerIneligible + const reclaimWallet = assetHolder + await rewardsManager.connect(governor).setReclaimAddress(INDEXER_INELIGIBLE, reclaimWallet.address) + + // Setup eligibility oracle that denies + const MockRewardsEligibilityOracleFactory = await hre.ethers.getContractFactory( + 'contracts/tests/MockRewardsEligibilityOracle.sol:MockRewardsEligibilityOracle', + ) + const mockOracle = await MockRewardsEligibilityOracleFactory.deploy(false) // Deny + await mockOracle.deployed() + await rewardsManager.connect(governor).setRewardsEligibilityOracle(mockOracle.address) + + // Align with the epoch boundary + await helpers.mineEpoch(epochManager) + + // Setup allocation with zero rewards (no signal) + const tokensToAllocate = toGRT('12500') + await staking.connect(indexer1).stake(tokensToAllocate) + await staking + .connect(indexer1) + .allocateFrom( + indexer1.address, + subgraphDeploymentID1, + tokensToAllocate, + allocationID1, + metadata, + await channelKey1.generateProof(indexer1.address), + ) + + // Close allocation immediately (same epoch) - should have zero rewards + const tx = staking.connect(indexer1).closeAllocation(allocationID1, randomHexBytes()) + + // Should not emit events for zero rewards + await expect(tx).to.not.emit(rewardsManager, 'RewardsDeniedDueToEligibility') + await expect(tx).to.not.emit(rewardsManager, 'RewardsReclaimed') + if (EMIT_EVENT_FOR_ZERO_REWARDS) { + await expect(tx).emit(rewardsManager, 'HorizonRewardsAssigned').withArgs(indexer1.address, allocationID1, 0) + } else { + await expect(tx).to.not.emit(rewardsManager, 'HorizonRewardsAssigned') + } + }) }) }) diff --git a/packages/interfaces/contracts/contracts/rewards/IRewardsManager.sol b/packages/interfaces/contracts/contracts/rewards/IRewardsManager.sol index dd5346b06..87adda601 100644 --- a/packages/interfaces/contracts/contracts/rewards/IRewardsManager.sol +++ b/packages/interfaces/contracts/contracts/rewards/IRewardsManager.sol @@ -50,18 +50,12 @@ interface IRewardsManager { function setRewardsEligibilityOracle(address newRewardsEligibilityOracle) external; /** - * @notice Set the indexer eligibility reclaim address - * @dev Address to mint tokens that would be denied due to indexer eligibility. Set to zero to disable. - * @param newReclaimAddress The address to receive eligibility-denied tokens + * @notice Set the reclaim address for a specific reason + * @dev Address to mint tokens for denied/reclaimed rewards. Set to zero to disable. + * @param reason The reclaim reason identifier (see RewardsReclaim library for canonical reasons) + * @param newReclaimAddress The address to receive tokens */ - function setIndexerEligibilityReclaimAddress(address newReclaimAddress) external; - - /** - * @notice Set the subgraph denied reclaim address - * @dev Address to mint tokens that would be denied due to subgraph denylist. Set to zero to disable. - * @param newReclaimAddress The address to receive subgraph-denied tokens - */ - function setSubgraphDeniedReclaimAddress(address newReclaimAddress) external; + function setReclaimAddress(bytes32 reason, address newReclaimAddress) external; // -- Denylist -- @@ -156,6 +150,17 @@ interface IRewardsManager { */ function takeRewards(address allocationID) external returns (uint256); + /** + * @notice Reclaim rewards for an allocation + * @dev This function can only be called by an authorized rewards issuer. + * Calculates pending rewards and mints them to the configured reclaim address. + * @param reason The reclaim reason identifier (see RewardsReclaim library for canonical reasons) + * @param allocationID Allocation + * @param data Arbitrary data to include in the RewardsReclaimed event for additional context + * @return The amount of rewards that were reclaimed (0 if no reclaim address set) + */ + function reclaimRewards(bytes32 reason, address allocationID, bytes calldata data) external returns (uint256); + // -- Hooks -- /** diff --git a/packages/interfaces/contracts/contracts/rewards/RewardsReclaim.sol b/packages/interfaces/contracts/contracts/rewards/RewardsReclaim.sol new file mode 100644 index 000000000..dab4eed71 --- /dev/null +++ b/packages/interfaces/contracts/contracts/rewards/RewardsReclaim.sol @@ -0,0 +1,63 @@ +// SPDX-License-Identifier: GPL-2.0-or-later + +pragma solidity ^0.7.6 || ^0.8.0; + +/** + * @title RewardsReclaim + * @author Edge & Node + * @notice Canonical definitions for rewards reclaim reasons + * @dev Uses bytes32 identifiers (like OpenZeppelin roles) to allow decentralized extension. + * New reasons can be defined by any contract without modifying this library. + * These constants provide standard reasons used across The Graph Protocol. + * + * Note: bytes32(0) is reserved and cannot be used as a reclaim reason. This design prevents: + * 1. Accidental misconfiguration from setting a reclaim address for an invalid/uninitialized reason + * 2. Invalid reclaim operations when a reason identifier was not properly set + * The zero value serves as a sentinel to catch configuration errors at the protocol level. + * + * How reclaim reasons are used depends on the specific implementation. Different contracts + * may handle multiple applicable reclaim reasons differently. + */ +library RewardsReclaim { + /** + * @notice Reclaim rewards - indexer failed eligibility check + * @dev Indexer is not eligible to receive rewards according to eligibility oracle + */ + bytes32 public constant INDEXER_INELIGIBLE = keccak256("INDEXER_INELIGIBLE"); + + /** + * @notice Reclaim rewards - subgraph is on denylist + * @dev Subgraph deployment has been denied rewards by availability oracle + */ + bytes32 public constant SUBGRAPH_DENIED = keccak256("SUBGRAPH_DENIED"); + + /** + * @notice Reclaim rewards - POI submitted too late + * @dev Proof of Indexing was submitted after the staleness deadline + */ + bytes32 public constant STALE_POI = keccak256("STALE_POI"); + + /** + * @notice Reclaim rewards - allocation has no tokens + * @dev Altruistic allocation (zero tokens) is not eligible for rewards + */ + bytes32 public constant ALTRUISTIC_ALLOCATION = keccak256("ALTRUISTIC_ALLOCATION"); + + /** + * @notice Reclaim rewards - no POI provided + * @dev Allocation closed without providing a Proof of Indexing + */ + bytes32 public constant ZERO_POI = keccak256("ZERO_POI"); + + /** + * @notice Reclaim rewards - allocation created in current epoch + * @dev Allocation must exist for at least one full epoch to earn rewards + */ + bytes32 public constant ALLOCATION_TOO_YOUNG = keccak256("ALLOCATION_TOO_YOUNG"); + + /** + * @notice Reclaim rewards - allocation closed without POI + * @dev Allocation was closed without providing a Proof of Indexing + */ + bytes32 public constant CLOSE_ALLOCATION = keccak256("CLOSE_ALLOCATION"); +} diff --git a/packages/interfaces/contracts/issuance/allocate/IIssuanceAllocationAdministration.sol b/packages/interfaces/contracts/issuance/allocate/IIssuanceAllocationAdministration.sol index 1887b0c07..d225c80c9 100644 --- a/packages/interfaces/contracts/issuance/allocate/IIssuanceAllocationAdministration.sol +++ b/packages/interfaces/contracts/issuance/allocate/IIssuanceAllocationAdministration.sol @@ -2,6 +2,8 @@ pragma solidity ^0.7.6 || ^0.8.0; +import { IIssuanceTarget } from "./IIssuanceTarget.sol"; + /** * @title IIssuanceAllocationAdministration * @author Edge & Node @@ -12,98 +14,124 @@ interface IIssuanceAllocationAdministration { /** * @notice Set the issuance per block. * @param newIssuancePerBlock New issuance per block - * @param evenIfDistributionPending If true, set even if there is pending issuance distribution - * @return True if the value is applied (including if already the case), false if not applied due to paused state + * @return applied True if the value is applied (including if already the case) + * @dev Requires distribution to have reached block.number + */ + function setIssuancePerBlock(uint256 newIssuancePerBlock) external returns (bool applied); + + /** + * @notice Set the issuance per block, requiring distribution has reached at least the specified block + * @param newIssuancePerBlock New issuance per block + * @param minDistributedBlock Minimum block number that distribution must have reached + * @return applied True if the value is applied (including if already the case), false if distribution hasn't reached minDistributedBlock + * @dev Governance should explicitly call + * distributePendingIssuance(blockNumber) first if distribution is behind minDistributedBlock. + * @dev This allows configuration changes while paused: first call distributePendingIssuance(blockNumber), + * then call this function with the same or lower blockNumber. */ - function setIssuancePerBlock(uint256 newIssuancePerBlock, bool evenIfDistributionPending) external returns (bool); + function setIssuancePerBlock( + uint256 newIssuancePerBlock, + uint256 minDistributedBlock + ) external returns (bool applied); /** * @notice Set the allocation for a target with only allocator minting - * @param target Address of the target to update - * @param allocatorMintingPPM Allocator-minting allocation for the target (in PPM) - * @return True if the value is applied (including if already the case), false if not applied - * @dev This variant sets selfMintingPPM to 0 and evenIfDistributionPending to false + * @param target The target contract to update + * @param allocatorMintingRate Allocator-minting rate for the target (tokens per block) + * @return applied True if the value is applied (including if already the case), false if not applied + * @dev This variant sets selfMintingRate to 0 and evenIfDistributionPending to false */ - function setTargetAllocation(address target, uint256 allocatorMintingPPM) external returns (bool); + function setTargetAllocation(IIssuanceTarget target, uint256 allocatorMintingRate) external returns (bool applied); /** * @notice Set the allocation for a target with both allocator and self minting - * @param target Address of the target to update - * @param allocatorMintingPPM Allocator-minting allocation for the target (in PPM) - * @param selfMintingPPM Self-minting allocation for the target (in PPM) - * @return True if the value is applied (including if already the case), false if not applied + * @param target The target contract to update + * @param allocatorMintingRate Allocator-minting rate for the target (tokens per block) + * @param selfMintingRate Self-minting rate for the target (tokens per block) + * @return applied True if the value is applied (including if already the case), false if not applied * @dev This variant sets evenIfDistributionPending to false */ function setTargetAllocation( - address target, - uint256 allocatorMintingPPM, - uint256 selfMintingPPM - ) external returns (bool); + IIssuanceTarget target, + uint256 allocatorMintingRate, + uint256 selfMintingRate + ) external returns (bool applied); /** - * @notice Set the allocation for a target - * @param target Address of the target to update - * @param allocatorMintingPPM Allocator-minting allocation for the target (in PPM) - * @param selfMintingPPM Self-minting allocation for the target (in PPM) - * @param evenIfDistributionPending Whether to force the allocation change even if issuance has not been distributed up to the current block - * @return True if the value is applied (including if already the case), false if not applied + * @notice Set the allocation for a target, provided distribution has reached at least the specified block + * @param target The target contract to update + * @param allocatorMintingRate Allocator-minting rate for the target (tokens per block) + * @param selfMintingRate Self-minting rate for the target (tokens per block) + * @param minDistributedBlock Minimum block number that distribution must have reached + * @return applied True if the value is applied (including if already the case), false if distribution hasn't reached minDistributedBlock and therefore the change was not applied + * @dev Governance should explicitly call + * distributePendingIssuance(blockNumber) first if paused and not distributed up to minDistributedBlock block. + * @dev This allows configuration changes while paused: first call distributePendingIssuance(blockNumber), + * then call this function with the same or lower blockNumber. */ function setTargetAllocation( - address target, - uint256 allocatorMintingPPM, - uint256 selfMintingPPM, - bool evenIfDistributionPending - ) external returns (bool); + IIssuanceTarget target, + uint256 allocatorMintingRate, + uint256 selfMintingRate, + uint256 minDistributedBlock + ) external returns (bool applied); /** * @notice Notify a specific target about an upcoming allocation change * @param target Address of the target to notify - * @return True if notification was sent or already sent this block, false otherwise + * @return notified True if notification was sent or already sent this block, false otherwise */ - function notifyTarget(address target) external returns (bool); + function notifyTarget(address target) external returns (bool notified); /** * @notice Force set the lastChangeNotifiedBlock for a target to a specific block number * @param target Address of the target to update * @param blockNumber Block number to set as the lastChangeNotifiedBlock - * @return The block number that was set + * @return notificationBlock The block number that was set * @dev This can be used to enable notification to be sent again (by setting to a past block) * @dev or to prevent notification until a future block (by setting to current or future block). */ - function forceTargetNoChangeNotificationBlock(address target, uint256 blockNumber) external returns (uint256); + function forceTargetNoChangeNotificationBlock( + address target, + uint256 blockNumber + ) external returns (uint256 notificationBlock); /** * @notice Set the address that receives the default portion of issuance not allocated to other targets - * @param newAddress The new default allocation address (can be address(0)) - * @return True if successful + * @param newAddress The new default target address (can be address(0)) + * @return applied True if applied */ - function setDefaultAllocationAddress(address newAddress) external returns (bool); + function setDefaultTarget(address newAddress) external returns (bool applied); /** * @notice Set the address that receives the default portion of issuance not allocated to other targets - * @param newAddress The new default allocation address (can be address(0)) - * @param evenIfDistributionPending Whether to force the allocation change even if issuance has not been distributed up to the current block - * @return True if successful + * @param newAddress The new default target address (can be address(0)) + * @param minDistributedBlock Minimum block number that distribution must have reached + * @return applied True if applied, false if distribution has not reached minDistributedBlock and therefore the change was not applied + * @dev Governance should explicitly call + * distributePendingIssuance(blockNumber) first if paused and distribution is not up to minDistributedBlock block. + * then call this function with the same or lower blockNumber. */ - function setDefaultAllocationAddress(address newAddress, bool evenIfDistributionPending) external returns (bool); + function setDefaultTarget(address newAddress, uint256 minDistributedBlock) external returns (bool applied); /** - * @notice Distribute any pending accumulated issuance to allocator-minting targets. - * @return Block number up to which issuance has been distributed - * @dev This function can be called even when the contract is paused. - * @dev If there is no pending issuance, this function is a no-op. - * @dev If allocatorMintingAllowance is 0 (all targets are self-minting), this function is a no-op. + * @notice Distribute pending accumulated allocator-minting issuance + * @dev Distributes accumulated allocator-minting issuance using current rates + * (retroactively applied to the period from lastDistributionBlock to current block). + * Prioritizes non-default targets getting full rates; default gets remainder. + * @dev Finalizes self-minting accumulation for the period being distributed. + * @return distributedBlock Block number that issuance was distributed up to */ - function distributePendingIssuance() external returns (uint256); + function distributePendingIssuance() external returns (uint256 distributedBlock); /** - * @notice Distribute any pending accumulated issuance to allocator-minting targets, accumulating up to a specific block. - * @param toBlockNumber The block number to accumulate pending issuance up to (must be >= lastIssuanceAccumulationBlock and <= current block) - * @return Block number up to which issuance has been distributed - * @dev This function can be called even when the contract is paused. - * @dev Accumulates pending issuance up to the specified block, then distributes all accumulated issuance. - * @dev If there is no pending issuance after accumulation, this function is a no-op for distribution. - * @dev If allocatorMintingAllowance is 0 (all targets are self-minting), this function is a no-op for distribution. + * @notice Distribute pending accumulated allocator-minting issuance up to specified block + * @param toBlockNumber Block number to distribute up to (must be <= block.number and >= lastDistributionBlock) + * @dev Distributes accumulated allocator-minting issuance using current rates + * (retroactively applied to the period from lastDistributionBlock to toBlockNumber). + * Prioritizes non-default targets getting full rates; default gets remainder. + * @dev Finalizes self-minting accumulation for the period being distributed. + * @return distributedBlock Block number that issuance was distributed up to */ - function distributePendingIssuance(uint256 toBlockNumber) external returns (uint256); + function distributePendingIssuance(uint256 toBlockNumber) external returns (uint256 distributedBlock); } diff --git a/packages/interfaces/contracts/issuance/allocate/IIssuanceAllocationDistribution.sol b/packages/interfaces/contracts/issuance/allocate/IIssuanceAllocationDistribution.sol index 4b27eaf39..affbb84e4 100644 --- a/packages/interfaces/contracts/issuance/allocate/IIssuanceAllocationDistribution.sol +++ b/packages/interfaces/contracts/issuance/allocate/IIssuanceAllocationDistribution.sol @@ -24,10 +24,12 @@ interface IIssuanceAllocationDistribution { /** * @notice Target issuance per block information * @param target Address of the target - * @return TargetIssuancePerBlock struct containing allocatorIssuanceBlockAppliedTo, selfIssuanceBlockAppliedTo, allocatorIssuancePerBlock, and selfIssuancePerBlock + * @return targetIssuance TargetIssuancePerBlock struct containing allocatorIssuanceBlockAppliedTo, selfIssuanceBlockAppliedTo, allocatorIssuanceRate, and selfIssuanceRate * @dev This function does not revert when paused, instead the caller is expected to correctly read and apply the information provided. * @dev Targets should check allocatorIssuanceBlockAppliedTo and selfIssuanceBlockAppliedTo - if either is not the current block, that type of issuance is paused for that target. * @dev Targets should not check the allocator's pause state directly, but rely on the blockAppliedTo fields to determine if issuance is paused. */ - function getTargetIssuancePerBlock(address target) external view returns (TargetIssuancePerBlock memory); + function getTargetIssuancePerBlock( + address target + ) external view returns (TargetIssuancePerBlock memory targetIssuance); } diff --git a/packages/interfaces/contracts/issuance/allocate/IIssuanceAllocationStatus.sol b/packages/interfaces/contracts/issuance/allocate/IIssuanceAllocationStatus.sol index baf70116c..ca0ca3129 100644 --- a/packages/interfaces/contracts/issuance/allocate/IIssuanceAllocationStatus.sol +++ b/packages/interfaces/contracts/issuance/allocate/IIssuanceAllocationStatus.sol @@ -3,7 +3,7 @@ pragma solidity ^0.7.6 || ^0.8.0; pragma abicoder v2; -import { Allocation } from "./IIssuanceAllocatorTypes.sol"; +import { Allocation, DistributionState } from "./IIssuanceAllocatorTypes.sol"; /** * @title IIssuanceAllocationStatus @@ -16,56 +16,44 @@ interface IIssuanceAllocationStatus { /** * @notice Get the current allocation for a target * @param target Address of the target - * @return Allocation struct containing total, allocator-minting, and self-minting allocations + * @return allocation Allocation struct containing total, allocator-minting, and self-minting allocations */ - function getTargetAllocation(address target) external view returns (Allocation memory); + function getTargetAllocation(address target) external view returns (Allocation memory allocation); /** * @notice Get the current global allocation totals - * @return Allocation struct containing total, allocator-minting, and self-minting allocations across all targets + * @return allocation Allocation struct containing total, allocator-minting, and self-minting allocations across all targets */ - function getTotalAllocation() external view returns (Allocation memory); + function getTotalAllocation() external view returns (Allocation memory allocation); /** * @notice Get all allocated target addresses - * @return Array of target addresses + * @return targets Array of target addresses */ - function getTargets() external view returns (address[] memory); + function getTargets() external view returns (address[] memory targets); /** * @notice Get a specific allocated target address by index * @param index The index of the target address to retrieve - * @return The target address at the specified index + * @return target The target address at the specified index */ - function getTargetAt(uint256 index) external view returns (address); + function getTargetAt(uint256 index) external view returns (address target); /** * @notice Get the number of allocated targets - * @return The total number of allocated targets + * @return count The total number of allocated targets */ - function getTargetCount() external view returns (uint256); + function getTargetCount() external view returns (uint256 count); /** * @notice Get the current issuance per block - * @return The current issuance per block + * @return issuancePerBlock The current issuance per block */ - function issuancePerBlock() external view returns (uint256); + function getIssuancePerBlock() external view returns (uint256 issuancePerBlock); /** - * @notice Get the last block number where issuance was distributed - * @return The last block number where issuance was distributed + * @notice Get pending issuance distribution state + * @return distributionState DistributionState struct containing block tracking and accumulation info */ - function lastIssuanceDistributionBlock() external view returns (uint256); - - /** - * @notice Get the last block number where issuance was accumulated during pause - * @return The last block number where issuance was accumulated during pause - */ - function lastIssuanceAccumulationBlock() external view returns (uint256); - - /** - * @notice Get the amount of pending accumulated allocator issuance - * @return The amount of pending accumulated allocator issuance - */ - function pendingAccumulatedAllocatorIssuance() external view returns (uint256); + function getDistributionState() external view returns (DistributionState memory distributionState); } diff --git a/packages/interfaces/contracts/issuance/allocate/IIssuanceAllocatorTypes.sol b/packages/interfaces/contracts/issuance/allocate/IIssuanceAllocatorTypes.sol index 3a410da37..f1eb58ca5 100644 --- a/packages/interfaces/contracts/issuance/allocate/IIssuanceAllocatorTypes.sol +++ b/packages/interfaces/contracts/issuance/allocate/IIssuanceAllocatorTypes.sol @@ -5,38 +5,50 @@ pragma abicoder v2; /** * @notice Target issuance per block information - * @param allocatorIssuancePerBlock Issuance per block for allocator-minting (non-self-minting) + * @param allocatorIssuanceRate Issuance rate for allocator-minting (tokens per block) * @param allocatorIssuanceBlockAppliedTo The block up to which allocator issuance has been applied - * @param selfIssuancePerBlock Issuance per block for self-minting + * @param selfIssuanceRate Issuance rate for self-minting (tokens per block) * @param selfIssuanceBlockAppliedTo The block up to which self issuance has been applied */ struct TargetIssuancePerBlock { - uint256 allocatorIssuancePerBlock; + uint256 allocatorIssuanceRate; uint256 allocatorIssuanceBlockAppliedTo; - uint256 selfIssuancePerBlock; + uint256 selfIssuanceRate; uint256 selfIssuanceBlockAppliedTo; } /** * @notice Allocation information - * @param totalAllocationPPM Total allocation in PPM (allocatorMintingAllocationPPM + selfMintingAllocationPPM) - * @param allocatorMintingPPM Allocator-minting allocation in PPM (Parts Per Million) - * @param selfMintingPPM Self-minting allocation in PPM (Parts Per Million) + * @param totalAllocationRate Total allocation rate (tokens per block: allocatorMintingRate + selfMintingRate) + * @param allocatorMintingRate Allocator-minting allocation rate (tokens per block) + * @param selfMintingRate Self-minting allocation rate (tokens per block) */ struct Allocation { - uint256 totalAllocationPPM; - uint256 allocatorMintingPPM; - uint256 selfMintingPPM; + uint256 totalAllocationRate; + uint256 allocatorMintingRate; + uint256 selfMintingRate; } /** * @notice Allocation target information - * @param allocatorMintingPPM The allocator-minting allocation amount in PPM (Parts Per Million) - * @param selfMintingPPM The self-minting allocation amount in PPM (Parts Per Million) + * @param allocatorMintingRate The allocator-minting allocation rate (tokens per block) + * @param selfMintingRate The self-minting allocation rate (tokens per block) * @param lastChangeNotifiedBlock Last block when this target was notified of changes */ struct AllocationTarget { - uint256 allocatorMintingPPM; - uint256 selfMintingPPM; + uint256 allocatorMintingRate; + uint256 selfMintingRate; uint256 lastChangeNotifiedBlock; } + +/** + * @notice Distribution state information + * @param lastDistributionBlock Last block where allocator-minting issuance was distributed + * @param lastSelfMintingBlock Last block where self-minting issuance was applied + * @param selfMintingOffset Self-minting that offsets allocator-minting budget (starts during pause, clears on distribution) + */ +struct DistributionState { + uint256 lastDistributionBlock; + uint256 lastSelfMintingBlock; + uint256 selfMintingOffset; +} diff --git a/packages/issuance/contracts/allocate/IssuanceAllocator.md b/packages/issuance/contracts/allocate/IssuanceAllocator.md index d624b9894..6a00d92d1 100644 --- a/packages/issuance/contracts/allocate/IssuanceAllocator.md +++ b/packages/issuance/contracts/allocate/IssuanceAllocator.md @@ -1,10 +1,10 @@ # IssuanceAllocator -The IssuanceAllocator is a smart contract responsible for allocating token issuance to different components of The Graph protocol. It calculates issuance for all targets based on their configured proportions and handles minting for allocator-minting targets. +The IssuanceAllocator is a smart contract responsible for allocating token issuance to different components of The Graph protocol. It calculates issuance for all targets based on their configured rates (tokens per block) and handles minting for allocator-minting targets. ## Overview -The contract operates as a central distribution hub for newly minted Graph tokens, ensuring that different protocol components receive their allocated share of token issuance according to predefined proportions. It supports both allocator-minting targets (recommended for new targets) and self-minting targets (for backwards compatibility), with the ability to have mixed allocations primarily for migration scenarios. +The contract operates as a central distribution hub for newly minted Graph tokens, ensuring that different protocol components receive their allocated share of token issuance according to configured rates. It maintains a 100% allocation invariant through a default target mechanism, where any unallocated portion automatically goes to the default target. It supports both allocator-minting targets (recommended for new targets) and self-minting targets (for backwards compatibility), with the ability to have mixed allocations primarily for migration scenarios. ## Architecture @@ -34,30 +34,30 @@ The IssuanceAllocator includes a pause and accumulation system designed to respo When the contract is paused: - **Distribution stops**: `distributeIssuance()` returns early without minting any tokens, returning the last block when issuance was distributed. -- **Accumulation begins**: Issuance for allocator-minting targets accumulates in `pendingAccumulatedAllocatorIssuance` and will be distributed when the contract is unpaused (or in the interim via `distributePendingIssuance()`) according to their configured proportions at the time of distribution. +- **Accumulation begins**: Self-minting allowances accumulate in `selfMintingOffset`, reducing the allocator-minting budget. When distribution resumes, current rates are applied retroactively to the entire undistributed period. - **Self-minting continues**: Self-minting targets can still query their allocation, but should check the `blockAppliedTo` fields to respect pause state. Because RewardsManager does not check `blockAppliedTo` and will mint tokens even when the allocator is paused, the initial implementation does not pause self-minting targets. (This behavior is subject to change in future versions, and new targets should check `blockAppliedTo`.) Note that RewardsManager is independently pausable. -- **Configuration allowed**: Governance functions like `setIssuancePerBlock()` and `setTargetAllocation()` still work. However, unlike changes made while unpaused, changes made will be applied from lastIssuanceDistributionBlock rather than the current block. -- **Notifications continue**: Targets are still notified of allocation changes, and should check the `blockAppliedTo` fields to correctly apply changes. +- **Configuration allowed**: Governance functions like `setIssuancePerBlock()` and `setTargetAllocation()` still work. Rate changes apply immediately. When distribution resumes (either automatically when unpaused or manually via `distributePendingIssuance()`), the current rates are used retroactively for the entire undistributed period from `lastDistributionBlock` to the distribution block. +- **Notifications continue**: Targets are still notified of allocation changes even when paused, and should check the `blockAppliedTo` fields to correctly apply changes. #### Accumulation Logic -During pause periods, the contract tracks: +During pause periods, the contract tracks self-minting allowances that reduce the allocator-minting budget: -- `lastIssuanceAccumulationBlock`: Updated to current block whenever accumulation occurs -- `pendingAccumulatedAllocatorIssuance`: Accumulates issuance intended for allocator-minting targets -- Calculation: `(issuancePerBlock * blocksSinceLastAccumulation * (MILLION - totalSelfMintingAllocationPPM)) / MILLION` -- **Internal accumulation**: The contract uses private `accumulatePendingIssuance()` functions to handle accumulation logic, which can be triggered automatically during rate changes or manually via the public `distributePendingIssuance(uint256)` function +- `lastSelfMintingBlock`: Updated to current block whenever self-minting advances (continuously, even when paused) +- `selfMintingOffset`: Accumulates self-minting amounts that will reduce the allocator-minting budget when distribution resumes +- Calculation: `totalSelfMintingRate * blocksSinceLastSelfMinting` +- **Conservative accumulation**: Once accumulation starts (during pause), it continues through any unpaused periods until distribution clears it. #### Recovery Process -When unpausing or manually distributing: +When distribution resumes: -1. **Automatic distribution**: `distributeIssuance()` first calls `_distributePendingIssuance()` to handle accumulated issuance +1. **Automatic distribution**: `distributeIssuance()` detects accumulated self-minting and triggers retroactive distribution 2. **Manual distribution**: `distributePendingIssuance()` can be called directly by governance, even while paused -3. **Proportional allocation**: Pending issuance is distributed proportionally among current allocator-minting targets -4. **Clean slate**: After distribution, `pendingAccumulatedAllocatorIssuance` is reset to 0 - -Note that if there are no allocator-minting targets all pending issuance is lost. If not all of the allocation allowance is used, there will be a proportional amount of accumulated issuance lost. +3. **Retroactive application**: Current rates are applied retroactively to the entire undistributed period +4. **Budget reduction**: Accumulated self-minting reduces the allocator-minting budget for the period +5. **Priority distribution**: Non-default targets receive their full rates first (if budget allows), default target receives remainder +6. **Clean slate**: After distribution to current block, `selfMintingOffset` is reset to 0 #### Use Cases @@ -72,21 +72,20 @@ This system enables: The contract uses ERC-7201 namespaced storage to prevent storage collisions in upgradeable contracts: -- `issuancePerBlock`: Total token issuance per block across all targets -- `lastIssuanceDistributionBlock`: Last block when issuance was distributed -- `lastIssuanceAccumulationBlock`: Last block when issuance was accumulated during pause -- `allocationTargets`: Maps target addresses to their allocation data (allocator-minting PPM, self-minting PPM, notification status) -- `targetAddresses`: Array of all registered target addresses with non-zero total allocations -- `totalAllocationPPM`: Sum of all allocations across all targets (cannot exceed 1,000,000 PPM = 100%) -- `totalAllocatorMintingAllocationPPM`: Sum of allocator-minting allocations across all targets -- `totalSelfMintingAllocationPPM`: Sum of self-minting allocations across all targets -- `pendingAccumulatedAllocatorIssuance`: Accumulated issuance for allocator-minting targets during pause - -### Constants +- `issuancePerBlock`: Total token issuance rate per block across all targets (tokens per block) +- `lastDistributionBlock`: Last block when allocator-minting issuance was distributed +- `lastSelfMintingBlock`: Last block when self-minting allowances were calculated and tracked +- `selfMintingOffset`: Accumulated self-minting that offsets allocator-minting budget (starts during pause, clears on distribution) +- `allocationTargets`: Maps target addresses to their allocation data (allocatorMintingRate, selfMintingRate, lastChangeNotifiedBlock) +- `targetAddresses`: Array of all target addresses (index 0 is always the default target, indices 1+ are explicitly allocated targets) +- `totalSelfMintingRate`: Sum of self-minting rates across all targets (tokens per block) -The contract inherits the following constant from `BaseUpgradeable`: +**Allocation Invariant:** The contract maintains a 100% allocation invariant: -- **MILLION**: `1,000,000` - Used as the denominator for Parts Per Million (PPM) calculations. For example, 50% allocation would be represented as 500,000 PPM. +- A default target exists at `targetAddresses[0]` (initially `address(0)`) +- Total allocator-minting rate + total self-minting rate always equals `issuancePerBlock` +- The default target automatically receives any unallocated portion +- When the default address is `address(0)`, the unallocated portion is not minted ## Core Functions @@ -101,69 +100,121 @@ The contract inherits the following constant from `BaseUpgradeable`: - First distributes any pending accumulated issuance from pause periods - Calculates blocks since last distribution - Mints tokens proportionally to allocator-minting targets only - - Updates `lastIssuanceDistributionBlock` to current block - - Returns early with current `lastIssuanceDistributionBlock` when paused (no distribution occurs) + - Updates `lastDistributionBlock` to current block when not paused + - Returns `lastDistributionBlock` when paused (no distribution occurs, block number frozen) - Returns early if no blocks have passed since last distribution - Can be called by anyone to trigger distribution -#### `setIssuancePerBlock(uint256 newIssuancePerBlock, bool evenIfDistributionPending) → bool` +#### `setIssuancePerBlock(uint256 newIssuancePerBlock) → bool` - **Access**: GOVERNOR_ROLE only - **Purpose**: Set the total token issuance rate per block - **Parameters**: - `newIssuancePerBlock` - New issuance rate in tokens per block - - `evenIfDistributionPending` - If true, skip distribution requirement (notifications still occur) -- **Returns**: True if applied, false if blocked by pending operations +- **Returns**: True if applied - **Events**: Emits `IssuancePerBlockUpdated` - **Notes**: - - Automatically distributes or accumulates pending issuance before changing rate (unless evenIfDistributionPending=true or paused) - - Notifies all targets of the upcoming change (unless paused) - - Returns false if distribution fails and evenIfDistributionPending=false, reverts if notification fails + - Requires distribution to have reached `block.number` + - Automatically distributes pending issuance before changing rate + - Notifies the default target of the upcoming change + - Only the default target's rate changes; other targets' rates remain fixed - L1GraphTokenGateway must be updated when this changes to maintain bridge functionality - No-op if new rate equals current rate (returns true immediately) +#### `setIssuancePerBlock(uint256 newIssuancePerBlock, uint256 minDistributedBlock) → bool` + +- **Access**: GOVERNOR_ROLE only +- **Purpose**: Set the total token issuance rate per block, requiring distribution has reached at least the specified block +- **Parameters**: + - `newIssuancePerBlock` - New issuance rate in tokens per block + - `minDistributedBlock` - Minimum block number that distribution must have reached +- **Returns**: True if applied, false if distribution hasn't reached `minDistributedBlock` +- **Events**: Emits `IssuancePerBlockUpdated` +- **Notes**: + - Allows configuration changes while paused: first call `distributePendingIssuance(blockNumber)`, then this function with same or lower blockNumber + - Rate changes apply immediately and are used retroactively when distribution resumes + ### Target Management The contract provides multiple overloaded functions for setting target allocations: -#### `setTargetAllocation(address target, uint256 allocatorMintingPPM) → bool` +#### `setTargetAllocation(IIssuanceTarget target, uint256 allocatorMintingRate) → bool` - **Access**: GOVERNOR_ROLE only -- **Purpose**: Set allocator-minting allocation only (selfMintingPPM=0, evenIfDistributionPending=false) +- **Purpose**: Set allocator-minting rate only (selfMintingRate=0) - **Parameters**: - `target` - Target contract address (must support IIssuanceTarget interface) - - `allocatorMintingPPM` - Allocator-minting allocation in PPM (0 removes target if no self-minting allocation) + - `allocatorMintingRate` - Allocator-minting rate in tokens per block (0 removes target if no self-minting rate) +- **Returns**: True if applied +- **Events**: Emits `TargetAllocationUpdated` +- **Notes**: + - Requires distribution to have reached `block.number` + - Cannot be used for the default target (use `setDefaultTarget()` instead) -#### `setTargetAllocation(address target, uint256 allocatorMintingPPM, uint256 selfMintingPPM) → bool` +#### `setTargetAllocation(IIssuanceTarget target, uint256 allocatorMintingRate, uint256 selfMintingRate) → bool` - **Access**: GOVERNOR_ROLE only -- **Purpose**: Set both allocator-minting and self-minting allocations (evenIfDistributionPending=false) +- **Purpose**: Set both allocator-minting and self-minting rates - **Parameters**: - `target` - Target contract address (must support IIssuanceTarget interface) - - `allocatorMintingPPM` - Allocator-minting allocation in PPM - - `selfMintingPPM` - Self-minting allocation in PPM + - `allocatorMintingRate` - Allocator-minting rate in tokens per block + - `selfMintingRate` - Self-minting rate in tokens per block +- **Returns**: True if applied +- **Events**: Emits `TargetAllocationUpdated` +- **Notes**: + - Requires distribution to have reached `block.number` + - Cannot be used for the default target (use `setDefaultTarget()` instead) -#### `setTargetAllocation(address target, uint256 allocatorMintingPPM, uint256 selfMintingPPM, bool evenIfDistributionPending) → bool` +#### `setTargetAllocation(IIssuanceTarget target, uint256 allocatorMintingRate, uint256 selfMintingRate, uint256 minDistributedBlock) → bool` - **Access**: GOVERNOR_ROLE only -- **Purpose**: Set both allocations with full control over distribution requirements +- **Purpose**: Set both rates, requiring distribution has reached at least the specified block - **Parameters**: - `target` - Target contract address (must support IIssuanceTarget interface) - - `allocatorMintingPPM` - Allocator-minting allocation in PPM - - `selfMintingPPM` - Self-minting allocation in PPM - - `evenIfDistributionPending` - If true, skip distribution requirement (notifications still occur) -- **Returns**: True if applied, false if blocked by pending operations -- **Events**: Emits `TargetAllocationUpdated` with total allocation (allocatorMintingPPM + selfMintingPPM) + - `allocatorMintingRate` - Allocator-minting rate in tokens per block + - `selfMintingRate` - Self-minting rate in tokens per block + - `minDistributedBlock` - Minimum block number that distribution must have reached +- **Returns**: True if applied, false if distribution hasn't reached `minDistributedBlock` +- **Events**: Emits `TargetAllocationUpdated` - **Behavior**: - - Validates target supports IIssuanceTarget interface (for non-zero total allocations) - - No-op if new allocations equal current allocations (returns true immediately) - - Distributes or accumulates pending issuance before changing allocation (unless evenIfDistributionPending=true) + - Validates target supports IIssuanceTarget interface (for non-zero total rates) + - No-op if new rates equal current rates (returns true immediately) + - Distributes pending issuance before changing allocation - Notifies target of upcoming change (always occurs unless overridden by `forceTargetNoChangeNotificationBlock()`) - - Returns false if distribution fails (when evenIfDistributionPending=false), reverts if notification fails - - Validates total allocation doesn't exceed MILLION after notification (prevents reentrancy issues) - - Adds target to registry if total allocation > 0 and not already present - - Removes target from registry if total allocation = 0 (uses swap-and-pop for gas efficiency) + - Reverts if notification fails + - Validates requested rates don't exceed available budget (prevents exceeding 100% invariant) + - Adds target to registry if total rate > 0 and not already present + - Removes target from registry if total rate = 0 (uses swap-and-pop for gas efficiency) - Deletes allocation data when removing target from registry + - Default target automatically adjusted to maintain 100% invariant + - Allows configuration changes while paused: first call `distributePendingIssuance(blockNumber)`, then this function + +#### `setDefaultTarget(address newAddress) → bool` + +- **Access**: GOVERNOR_ROLE only +- **Purpose**: Set the address that receives the default portion of issuance (unallocated to other targets) +- **Parameters**: + - `newAddress` - The new default target address (can be `address(0)`) +- **Returns**: True if applied +- **Events**: Emits `DefaultTargetUpdated` +- **Notes**: + - Requires distribution to have reached `block.number` + - The default target automatically receives any unallocated portion to maintain 100% invariant + - When set to `address(0)`, the unallocated portion is not minted + - Cannot set default to an address that already has an explicit allocation + - Notifies both old and new addresses + +#### `setDefaultTarget(address newAddress, uint256 minDistributedBlock) → bool` + +- **Access**: GOVERNOR_ROLE only +- **Purpose**: Set the default target address, requiring distribution has reached at least the specified block +- **Parameters**: + - `newAddress` - The new default target address (can be `address(0)`) + - `minDistributedBlock` - Minimum block number that distribution must have reached +- **Returns**: True if applied, false if distribution hasn't reached `minDistributedBlock` +- **Events**: Emits `DefaultTargetUpdated` +- **Notes**: + - Allows configuration changes while paused: first call `distributePendingIssuance(blockNumber)`, then this function #### `notifyTarget(address target) → bool` @@ -185,25 +236,26 @@ The contract provides multiple overloaded functions for setting target allocatio #### `distributePendingIssuance() → uint256` - **Access**: GOVERNOR_ROLE only -- **Purpose**: Distribute any pending accumulated issuance to allocator-minting targets +- **Purpose**: Distribute pending accumulated allocator-minting issuance using current rates - **Returns**: Block number up to which issuance has been distributed - **Notes**: - - Distributes issuance that accumulated while paused + - Distributes retroactively using current rates for the entire undistributed period - Can be called even when the contract is paused - - No-op if there is no pending issuance or all targets are self-minting + - Prioritizes non-default targets getting full rates; default gets remainder + - Finalizes self-minting accumulation for the distributed period #### `distributePendingIssuance(uint256 toBlockNumber) → uint256` - **Access**: GOVERNOR_ROLE only -- **Purpose**: Accumulate pending issuance up to a specific block, then distribute all accumulated issuance +- **Purpose**: Distribute pending accumulated allocator-minting issuance up to a specific block - **Parameters**: - - `toBlockNumber` - Block number to accumulate to (must be >= lastIssuanceAccumulationBlock and <= current block) + - `toBlockNumber` - Block number to distribute up to (must be >= lastDistributionBlock and <= current block) - **Returns**: Block number up to which issuance has been distributed - **Notes**: - - First accumulates pending issuance up to the specified block - - Then distributes all accumulated issuance to allocator-minting targets + - Distributes retroactively using current rates from lastDistributionBlock to toBlockNumber - Can be called even when the contract is paused - Will revert with `ToBlockOutOfRange()` if toBlockNumber is invalid + - Useful for gradual catch-up during pause or for setting up configuration changes ### View Functions @@ -211,100 +263,98 @@ The contract provides multiple overloaded functions for setting target allocatio - **Purpose**: Get current allocation for a target - **Returns**: Allocation struct containing: - - `totalAllocationPPM`: Total allocation (allocatorMintingAllocationPPM + selfMintingAllocationPPM) - - `allocatorMintingAllocationPPM`: Allocator-minting allocation in PPM - - `selfMintingAllocationPPM`: Self-minting allocation in PPM + - `totalAllocationRate`: Total allocation rate (allocatorMintingRate + selfMintingRate) in tokens per block + - `allocatorMintingRate`: Allocator-minting rate in tokens per block + - `selfMintingRate`: Self-minting rate in tokens per block +- **Notes**: Returns assigned allocation regardless of whether target is `address(0)` or the default target #### `getTotalAllocation() → Allocation` - **Purpose**: Get current global allocation totals - **Returns**: Allocation struct with totals across all targets +- **Notes**: When default target is `address(0)`, its allocation is excluded from reported totals (treated as unallocated since `address(0)` cannot receive minting) #### `getTargets() → address[]` -- **Purpose**: Get all target addresses with non-zero total allocations +- **Purpose**: Get all target addresses (including default target at index 0) - **Returns**: Array of target addresses #### `getTargetAt(uint256 index) → address` - **Purpose**: Get a specific target address by index - **Returns**: Target address at the specified index +- **Notes**: Index 0 is always the default target #### `getTargetCount() → uint256` -- **Purpose**: Get the number of allocated targets -- **Returns**: Total number of targets with non-zero allocations +- **Purpose**: Get the number of targets (including default target) +- **Returns**: Total number of targets (always >= 1) #### `getTargetIssuancePerBlock(address target) → TargetIssuancePerBlock` -- **Purpose**: Get issuance per block information for a target +- **Purpose**: Get issuance rate information for a target - **Returns**: TargetIssuancePerBlock struct containing: - - `allocatorIssuancePerBlock`: Issuance per block for allocator-minting portion - - `allocatorIssuanceBlockAppliedTo`: Block up to which allocator issuance has been applied - - `selfIssuancePerBlock`: Issuance per block for self-minting portion - - `selfIssuanceBlockAppliedTo`: Block up to which self issuance has been applied (always current block) + - `allocatorIssuanceRate`: Allocator-minting rate in tokens per block + - `allocatorIssuanceBlockAppliedTo`: Block up to which allocator issuance has been distributed (`lastDistributionBlock`) + - `selfIssuanceRate`: Self-minting rate in tokens per block + - `selfIssuanceBlockAppliedTo`: Block up to which self-minting allowances have been calculated (`lastSelfMintingBlock`) - **Notes**: - Does not revert when paused - callers should check blockAppliedTo fields - - If allocatorIssuanceBlockAppliedTo is not current block, allocator issuance is paused - - Self-minting targets should use this to determine how much to mint - -#### `issuancePerBlock() → uint256` + - If `allocatorIssuanceBlockAppliedTo < block.number`, allocator distribution is behind (likely paused) + - Self-minting targets should use this to determine their issuance rate + - Returns assigned rates regardless of whether target is `address(0)` or the default -- **Purpose**: Get the current total issuance per block -- **Returns**: Current issuance per block across all targets +#### `getIssuancePerBlock() → uint256` -#### `lastIssuanceDistributionBlock() → uint256` +- **Purpose**: Get the current total issuance rate per block +- **Returns**: Current issuance rate in tokens per block across all targets -- **Purpose**: Get the last block where issuance was distributed -- **Returns**: Last distribution block number +#### `getDistributionState() → DistributionState` -#### `lastIssuanceAccumulationBlock() → uint256` - -- **Purpose**: Get the last block where issuance was accumulated during pause -- **Returns**: Last accumulation block number - -#### `pendingAccumulatedAllocatorIssuance() → uint256` - -- **Purpose**: Get the amount of pending accumulated allocator issuance -- **Returns**: Amount of issuance accumulated during pause periods +- **Purpose**: Get pending issuance distribution state +- **Returns**: DistributionState struct containing: + - `lastDistributionBlock`: Last block where allocator-minting issuance was distributed + - `lastSelfMintingBlock`: Last block where self-minting allowances were calculated + - `selfMintingOffset`: Accumulated self-minting that will reduce allocator-minting budget #### `getTargetData(address target) → AllocationTarget` - **Purpose**: Get internal target data (implementation-specific) -- **Returns**: AllocationTarget struct containing allocatorMintingPPM, selfMintingPPM, and lastChangeNotifiedBlock +- **Returns**: AllocationTarget struct containing allocatorMintingRate, selfMintingRate, and lastChangeNotifiedBlock - **Notes**: Primarily for operator use and debugging ## Allocation Logic +### Rate-Based System + +The contract uses absolute rates (tokens per block) rather than proportional allocations: + +- Each target has an `allocatorMintingRate` (tokens per block for allocator-minting) +- Each target has a `selfMintingRate` (tokens per block for self-minting) +- The default target automatically receives: `issuancePerBlock - sum(all other targets' rates)` + ### Distribution Calculation -For each target during distribution, only the allocator-minting portion is distributed: +For each target during normal distribution, only the allocator-minting portion is distributed: ```solidity -targetIssuance = (totalNewIssuance * targetAllocatorMintingPPM) / MILLION +targetIssuance = targetAllocatorMintingRate * blocksSinceLastDistribution ``` -For self-minting targets, they query their allocation via `getTargetIssuancePerBlock()`: +For self-minting targets, they query their rate via `getTargetIssuancePerBlock()`: ```solidity -selfIssuancePerBlock = (issuancePerBlock * targetSelfMintingPPM) / MILLION +selfIssuanceRate = targetSelfMintingRate ``` -Where: - -- `totalNewIssuance = issuancePerBlock * blocksSinceLastDistribution` -- `targetAllocatorMintingPPM` is the target's allocator-minting allocation in PPM -- `targetSelfMintingPPM` is the target's self-minting allocation in PPM -- `MILLION = 1,000,000` (representing 100%) - -### Allocation Constraints +### Allocation Constraints and Invariants -- Total allocation across all targets cannot exceed 1,000,000 PPM (100%) -- Individual target allocations (allocator-minting + self-minting) can be any value from 0 to 1,000,000 PPM -- Setting both allocations to 0 removes the target from the registry -- Allocations are measured in PPM for precision (1 PPM = 0.0001%) -- Small rounding losses may occur in calculations due to integer division (this is acceptable) -- Each target can have both allocator-minting and self-minting allocations, though typically only one is used +- **100% Invariant**: `sum(all allocatorMintingRates) + sum(all selfMintingRates) == issuancePerBlock` (always) +- **Default Target**: Automatically adjusted to maintain the 100% invariant when other allocations change +- **Available Budget**: When setting a target's allocation, available budget = default target's allocator rate + target's current total rate +- **Removing Targets**: Setting both rates to 0 removes the target from the active list (except default target) +- **Rounding**: Small rounding losses may occur during proportional distribution (when budget is insufficient) +- **Mixed Allocations**: Each target can have both allocator-minting and self-minting rates, though typically only one is used ## Change Notification System @@ -320,7 +370,7 @@ Before any allocation changes, targets are notified via the `IIssuanceTarget.bef - Notifications are tracked per target using `lastChangeNotifiedBlock` - Failed notifications cause the entire transaction to revert - Use `forceTargetNoChangeNotificationBlock()` to skip notification for broken targets before removing them -- Notifications cannot be skipped (the `evenIfDistributionPending` parameter only affects distribution requirements) +- Notifications always occur when allocations change (even when paused) - Manual notification is available for gas limit recovery via `notifyTarget()` ## Gas Limit Recovery @@ -338,33 +388,50 @@ The contract includes several mechanisms to handle potential gas limit issues: 1. **Pause functionality**: Contract can be paused to stop operations during recovery 2. **Individual target notification**: `notifyTarget()` allows notifying targets one by one (will revert if target notification reverts) 3. **Force notification override**: `forceTargetNoChangeNotificationBlock()` can skip problematic targets -4. **Force parameters**: Both `setIssuancePerBlock()` and `setTargetAllocation()` accept `evenIfDistributionPending` flags to skip distribution requirements -5. **Target removal**: Use `forceTargetNoChangeNotificationBlock()` to skip notification, then remove malfunctioning targets by setting both allocations to 0 +4. **Controlled distribution**: Functions accept `minDistributedBlock` parameter to allow configuration changes while paused (after calling `distributePendingIssuance(blockNumber)`) +5. **Target removal**: Use `forceTargetNoChangeNotificationBlock()` to skip notification, then remove malfunctioning targets by setting both rates to 0 6. **Pending issuance distribution**: `distributePendingIssuance()` can be called manually to distribute accumulated issuance ## Events ```solidity -event IssuanceDistributed(address indexed target, uint256 amount); -event TargetAllocationUpdated(address indexed target, uint256 newAllocation); +event IssuanceDistributed(address indexed target, uint256 amount, uint256 indexed fromBlock, uint256 indexed toBlock); + +event TargetAllocationUpdated(address indexed target, uint256 newAllocatorMintingRate, uint256 newSelfMintingRate); + event IssuancePerBlockUpdated(uint256 oldIssuancePerBlock, uint256 newIssuancePerBlock); + +event DefaultTargetUpdated(address indexed oldAddress, address indexed newAddress); + +event IssuanceSelfMintAllowance( + address indexed target, + uint256 amount, + uint256 indexed fromBlock, + uint256 indexed toBlock +); ``` ## Error Conditions ```solidity error TargetAddressCannotBeZero(); -error InsufficientAllocationAvailable(); -error TargetDoesNotSupportIIssuanceTarget(); -error ToBlockOutOfRange(); +error InsufficientAllocationAvailable(uint256 requested, uint256 available); +error InsufficientUnallocatedForRateDecrease(uint256 oldRate, uint256 newRate, uint256 unallocated); +error TargetDoesNotSupportIIssuanceTarget(address target); +error ToBlockOutOfRange(uint256 toBlock, uint256 minBlock, uint256 maxBlock); +error CannotSetAllocationForDefaultTarget(address defaultTarget); +error CannotSetDefaultToAllocatedTarget(address target); ``` ### Error Descriptions -- **TargetAddressCannotBeZero**: Thrown when attempting to set allocation for the zero address -- **InsufficientAllocationAvailable**: Thrown when the total allocation would exceed 1,000,000 PPM (100%) +- **TargetAddressCannotBeZero**: Thrown when attempting to set allocation for the zero address (note: zero address can be the default target) +- **InsufficientAllocationAvailable**: Thrown when the requested allocation exceeds available budget (default target allocation + current target allocation) +- **InsufficientUnallocatedForRateDecrease**: Thrown when attempting to decrease issuance rate without sufficient unallocated budget in the default target - **TargetDoesNotSupportIIssuanceTarget**: Thrown when a target contract does not implement the required IIssuanceTarget interface -- **ToBlockOutOfRange**: Thrown when the `toBlockNumber` parameter in `distributePendingIssuance(uint256)` is outside the valid range (must be >= lastIssuanceAccumulationBlock and <= current block) +- **ToBlockOutOfRange**: Thrown when the `toBlockNumber` parameter in `distributePendingIssuance(uint256)` is outside the valid range (must be >= lastDistributionBlock and <= current block) +- **CannotSetAllocationForDefaultTarget**: Thrown when attempting to use `setTargetAllocation()` on the default target address +- **CannotSetDefaultToAllocatedTarget**: Thrown when attempting to set the default target to an address that already has an explicit allocation ## Usage Patterns @@ -372,7 +439,11 @@ error ToBlockOutOfRange(); 1. Deploy contract with Graph Token address 2. Initialize with governor address + - `lastDistributionBlock` is set to `block.number` at initialization as a safety guard against pausing before configuration + - This should be updated during initial configuration when `setIssuancePerBlock()` is called 3. Set initial issuance per block rate + - Updates `lastDistributionBlock` to current block via distribution call + - This establishes the correct starting point for issuance tracking 4. Add targets with their allocations 5. Grant minter role to IssuanceAllocator on Graph Token @@ -385,9 +456,9 @@ error ToBlockOutOfRange(); ### Emergency Scenarios -- **Gas limit issues**: Use pause, individual notifications, and `evenIfDistributionPending` parameters -- **Target failures**: Use `forceTargetNoChangeNotificationBlock()` to skip notification, then remove problematic targets by setting both allocations to 0 -- **Rate changes**: Use `evenIfDistributionPending` parameter to bypass distribution requirements +- **Gas limit issues**: Use pause, individual notifications, and `minDistributedBlock` parameters with `distributePendingIssuance()` +- **Target failures**: Use `forceTargetNoChangeNotificationBlock()` to skip notification, then remove problematic targets by setting both rates to 0 +- **Configuration while paused**: Call `distributePendingIssuance(blockNumber)` first, then use `minDistributedBlock` parameter in setter functions ### For L1 Bridge Integration @@ -397,7 +468,10 @@ When `setIssuancePerBlock()` is called, the L1GraphTokenGateway's `updateL2MintA - Only governor can modify allocations and issuance rates - Interface validation prevents adding incompatible targets -- Total allocation limits prevent over-allocation +- 100% allocation invariant maintained automatically through default target mechanism +- Budget validation prevents over-allocation - Pause functionality provides emergency stop capability - Notification system ensures targets can prepare for changes -- Self-minting targets must respect paused state to prevent unauthorized minting +- Self-minting targets should respect paused state (check `blockAppliedTo` fields) +- Reentrancy guards protect governance functions +- Default target mechanism ensures total issuance never exceeds configured rate diff --git a/packages/issuance/contracts/allocate/IssuanceAllocator.sol b/packages/issuance/contracts/allocate/IssuanceAllocator.sol index ac0714622..66b7c94c1 100644 --- a/packages/issuance/contracts/allocate/IssuanceAllocator.sol +++ b/packages/issuance/contracts/allocate/IssuanceAllocator.sol @@ -5,7 +5,8 @@ pragma solidity 0.8.27; import { TargetIssuancePerBlock, Allocation, - AllocationTarget + AllocationTarget, + DistributionState } from "@graphprotocol/interfaces/contracts/issuance/allocate/IIssuanceAllocatorTypes.sol"; import { IIssuanceAllocationDistribution } from "@graphprotocol/interfaces/contracts/issuance/allocate/IIssuanceAllocationDistribution.sol"; import { IIssuanceAllocationAdministration } from "@graphprotocol/interfaces/contracts/issuance/allocate/IIssuanceAllocationAdministration.sol"; @@ -13,6 +14,7 @@ import { IIssuanceAllocationStatus } from "@graphprotocol/interfaces/contracts/i import { IIssuanceAllocationData } from "@graphprotocol/interfaces/contracts/issuance/allocate/IIssuanceAllocationData.sol"; import { IIssuanceTarget } from "@graphprotocol/interfaces/contracts/issuance/allocate/IIssuanceTarget.sol"; import { BaseUpgradeable } from "../common/BaseUpgradeable.sol"; +import { ReentrancyGuardTransientUpgradeable } from "@openzeppelin/contracts-upgradeable/utils/ReentrancyGuardTransientUpgradeable.sol"; import { IERC165 } from "@openzeppelin/contracts/utils/introspection/IERC165.sol"; // solhint-disable-next-line no-unused-import @@ -22,16 +24,16 @@ import { ERC165Upgradeable } from "@openzeppelin/contracts-upgradeable/utils/int * @title IssuanceAllocator * @author Edge & Node * @notice This contract is responsible for allocating token issuance to different components - * of the protocol. It calculates issuance for all targets based on their configured proportions - * and handles minting for allocator-minting portions. + * of the protocol. It calculates issuance for all targets based on their configured rates + * (tokens per block) and handles minting for allocator-minting targets. * - * @dev The contract maintains a 100% allocation invariant through a default allocation mechanism: - * - A default allocation target exists at targetAddresses[0] (initialized to address(0)) - * - The default allocation automatically receives any unallocated portion of issuance - * - Total allocation across all targets always equals 100% (tracked in parts per MILLION) - * - The default allocation address can be changed via setDefaultAllocationAddress() - * - When the default address is address(0), the unallocated portion is not minted - * - Regular targets cannot be set as the default allocation address + * @dev The contract maintains a 100% allocation invariant through a default target mechanism: + * - A default target target exists at targetAddresses[0] (initialized to address(0)) + * - The default target automatically receives any unallocated portion of issuance + * - Total allocation across all targets always equals issuancePerBlock (tracked as absolute rates) + * - The default target address can be changed via setDefaultAllocationAddress() + * - When the default address is address(0), this 'unallocated' portion is not minted + * - Regular targets cannot be set as the default target address * * @dev The contract supports two types of allocation for each target: * 1. Allocator-minting allocation: The IssuanceAllocator calculates and mints tokens directly to targets @@ -47,6 +49,16 @@ import { ERC165Upgradeable } from "@openzeppelin/contracts-upgradeable/utils/int * the IssuanceAllocator. The self-minting allocation is intended only for backwards compatibility * with existing contracts. * + * @dev Pause Behavior: + * - Allocator-minting: Completely suspended during pause. No tokens minted, lastDistributionBlock frozen. + * When unpaused, distributes retroactively using current rates for entire undistributed period. (Distribution will be triggered by calling distributeIssuance() when not paused.) + * - Self-minting: Continues tracking via events and accumulation during pause. Accumulated self-minting + * reduces allocator-minting budget when distribution resumes, ensuring total issuance conservation. + * - Ongoing accumulation: Once accumulation starts (during pause), continues through any unpaused + * periods until distribution clears it, preventing loss of self-minting allowances across pause cycles. + * - Tracking divergence: lastSelfMintingBlock advances during pause (for allowance tracking) while + * lastDistributionBlock stays frozen (no allocator-minting). This is intentional and correct. + * * @dev There are a number of scenarios where the IssuanceAllocator could run into issues, including: * 1. The targetAddresses array could grow large enough that it exceeds the gas limit when calling distributeIssuance. * 2. When notifying targets of allocation changes the calls to `beforeIssuanceAllocationChange` could exceed the gas limit. @@ -62,10 +74,24 @@ import { ERC165Upgradeable } from "@openzeppelin/contracts-upgradeable/utils/int * * In combination these should allow recovery from gas limit issues or malfunctioning targets, with fine-grained control over * which targets are notified of changes and when. + * + * @dev Reentrancy Protection: + * The contract code is designed to be reentrant-safe and should be carefully reviewed and maintained + * to preserve this property. However, reentrancy guards (using transient storage per EIP-1153) are + * applied to governance functions that modify configuration or state as an additional layer of defense. + * This provides protection against potential issues if the multi-sig governor role were to have known + * signatures that could be exploited by malicious actors to trigger reentrant calls. + * + * The `distributeIssuance()` function intentionally does NOT have a reentrancy guard to allow + * legitimate use cases where targets call it during notifications (e.g., to claim pending issuance + * before allocation changes). This is safe because distributeIssuance() has built-in block-tracking + * protection (preventing double-distribution in the same block), makes no external calls that could + * expose inconsistent state, and does not modify allocations. * @custom:security-contact Please email security+contracts@thegraph.com if you find any bugs. We might have an active bug bounty program. */ contract IssuanceAllocator is BaseUpgradeable, + ReentrancyGuardTransientUpgradeable, IIssuanceAllocationDistribution, IIssuanceAllocationAdministration, IIssuanceAllocationStatus, @@ -81,26 +107,25 @@ contract IssuanceAllocator is /// @notice Main storage structure for IssuanceAllocator using ERC-7201 namespaced storage /// @param issuancePerBlock Total issuance per block across all targets - /// @param lastDistributionBlock Last block when issuance was distributed - /// @param lastAccumulationBlock Last block when pending issuance was accumulated - /// @dev Design invariant: lastDistributionBlock <= lastAccumulationBlock + /// @param lastDistributionBlock Last block when allocator-minting issuance was distributed + /// @param lastSelfMintingBlock Last block when self-minting was advanced + /// @param selfMintingOffset Self-minting that offsets allocator-minting budget (accumulates during pause, clears on distribution) /// @param allocationTargets Mapping of target addresses to their allocation data - /// @param targetAddresses Array of all target addresses (including default allocation at index 0) - /// @param totalSelfMintingPPM Total self-minting allocation (in PPM) across all targets - /// @param pendingAccumulatedAllocatorIssuance Accumulated but not distributed issuance for allocator-minting from lastDistributionBlock to lastAccumulationBlock - /// @dev Design invariant: Total allocation across all targets always equals MILLION (100%) - /// @dev Design invariant: targetAddresses[0] is always the default allocation address - /// @dev Design invariant: 1 <= targetAddresses.length (default allocation always exists) - /// @dev Design invariant: Default allocation (targetAddresses[0]) is automatically adjusted to maintain 100% total + /// @param targetAddresses Array of all target addresses (including default target at index 0) + /// @param totalSelfMintingRate Total self-minting rate (tokens per block) across all targets + /// @dev Design invariant: totalAllocatorRate + totalSelfMintingRate == issuancePerBlock (always 100% allocated) + /// @dev Design invariant: targetAddresses[0] is always the default target address + /// @dev Design invariant: 1 <= targetAddresses.length (default target always exists) + /// @dev Design invariant: default target (targetAddresses[0]) is automatically adjusted to maintain 100% total /// @custom:storage-location erc7201:graphprotocol.storage.IssuanceAllocator struct IssuanceAllocatorData { uint256 issuancePerBlock; uint256 lastDistributionBlock; - uint256 lastAccumulationBlock; + uint256 lastSelfMintingBlock; + uint256 selfMintingOffset; mapping(address => AllocationTarget) allocationTargets; address[] targetAddresses; - uint256 totalSelfMintingPPM; - uint256 pendingAccumulatedAllocatorIssuance; + uint256 totalSelfMintingRate; } /** @@ -123,35 +148,55 @@ contract IssuanceAllocator is /// @notice Thrown when attempting to add a target with zero address error TargetAddressCannotBeZero(); - /// @notice Thrown when the total allocation would exceed 100% (MILLION) - error InsufficientAllocationAvailable(); + /// @notice Thrown when the total allocation would exceed available budget + /// @param requested The total requested allocation (allocator + self minting) + /// @param available The available budget for this target + error InsufficientAllocationAvailable(uint256 requested, uint256 available); + + /// @notice Thrown when attempting to decrease issuance rate without sufficient unallocated budget + /// @param oldRate The current issuance rate + /// @param newRate The proposed new issuance rate + /// @param unallocated The unallocated budget available to absorb the decrease + error InsufficientUnallocatedForRateDecrease(uint256 oldRate, uint256 newRate, uint256 unallocated); /// @notice Thrown when a target does not support the IIssuanceTarget interface - error TargetDoesNotSupportIIssuanceTarget(); + /// @param target The target address that doesn't support the interface + error TargetDoesNotSupportIIssuanceTarget(address target); /// @notice Thrown when toBlockNumber is out of valid range for accumulation - error ToBlockOutOfRange(); + /// @param toBlock The invalid block number provided + /// @param minBlock The minimum valid block number (lastDistributionBlock) + /// @param maxBlock The maximum valid block number (current block) + error ToBlockOutOfRange(uint256 toBlock, uint256 minBlock, uint256 maxBlock); - /// @notice Thrown when attempting to set allocation for the default allocation target - error CannotSetAllocationForDefaultTarget(); + /// @notice Thrown when attempting to set allocation for the default target target + /// @param defaultTarget The address of the default target + error CannotSetAllocationForDefaultTarget(address defaultTarget); - /// @notice Thrown when attempting to set default allocation address to a normally allocated target - error CannotSetDefaultToAllocatedTarget(); + /// @notice Thrown when attempting to set default target address to a normally allocated target + /// @param target The target address that already has an allocation + error CannotSetDefaultToAllocatedTarget(address target); // -- Events -- /// @notice Emitted when issuance is distributed to a target /// @param target The address of the target that received issuance /// @param amount The amount of tokens distributed - event IssuanceDistributed(address indexed target, uint256 amount); // solhint-disable-line gas-indexed-events - // Do not need to index amount, filtering by amount ranges is not expected use case + /// @param fromBlock First block included in this distribution (inclusive) + /// @param toBlock Last block included in this distribution (inclusive). Range is [fromBlock, toBlock] + event IssuanceDistributed( + address indexed target, + uint256 amount, + uint256 indexed fromBlock, + uint256 indexed toBlock + ); // solhint-disable-line gas-indexed-events /// @notice Emitted when a target's allocation is updated /// @param target The address of the target whose allocation was updated - /// @param newAllocatorMintingPPM The new allocator-minting allocation (in PPM) for the target - /// @param newSelfMintingPPM The new self-minting allocation (in PPM) for the target - event TargetAllocationUpdated(address indexed target, uint256 newAllocatorMintingPPM, uint256 newSelfMintingPPM); // solhint-disable-line gas-indexed-events - // Do not need to index PPM values + /// @param newAllocatorMintingRate The new allocator-minting rate (tokens per block) for the target + /// @param newSelfMintingRate The new self-minting rate (tokens per block) for the target + event TargetAllocationUpdated(address indexed target, uint256 newAllocatorMintingRate, uint256 newSelfMintingRate); // solhint-disable-line gas-indexed-events + // Do not need to index rate values /// @notice Emitted when the issuance per block is updated /// @param oldIssuancePerBlock The previous issuance per block amount @@ -159,10 +204,22 @@ contract IssuanceAllocator is event IssuancePerBlockUpdated(uint256 oldIssuancePerBlock, uint256 newIssuancePerBlock); // solhint-disable-line gas-indexed-events // Do not need to index issuance per block values - /// @notice Emitted when the default allocation address is updated - /// @param oldAddress The previous default allocation address - /// @param newAddress The new default allocation address - event DefaultAllocationAddressUpdated(address indexed oldAddress, address indexed newAddress); + /// @notice Emitted when the default target is updated + /// @param oldAddress The previous default target address + /// @param newAddress The new default target address + event DefaultTargetUpdated(address indexed oldAddress, address indexed newAddress); + + /// @notice Emitted when self-minting allowance is calculated for a target + /// @param target The address of the target with self-minting allocation + /// @param amount The amount of tokens available for self-minting + /// @param fromBlock First block included in this allowance period (inclusive) + /// @param toBlock Last block included in this allowance period (inclusive). Range is [fromBlock, toBlock] + event IssuanceSelfMintAllowance( + address indexed target, + uint256 amount, + uint256 indexed fromBlock, + uint256 indexed toBlock + ); // solhint-disable-line gas-indexed-events // -- Constructor -- @@ -180,17 +237,33 @@ contract IssuanceAllocator is /** * @notice Initialize the IssuanceAllocator contract * @param _governor Address that will have the GOVERNOR_ROLE - * @dev Initializes with a default allocation at index 0 set to address(0) with 100% allocation + * @dev Initializes with a default target at index 0 set to address(0) + * @dev default target will receive all unallocated issuance (initially 0 until rate is set) + * @dev Initialization: lastDistributionBlock is set to block.number as a safety guard against + * pausing before configuration. lastSelfMintingBlock defaults to 0. issuancePerBlock is 0. + * Once setIssuancePerBlock() is called, it triggers _distributeIssuance() which updates + * lastDistributionBlock to current block, establishing the starting point for issuance tracking. + * @dev Rate changes while paused: Rate changes are stored but distributeIssuance() will NOT + * apply them while paused - it returns immediately with frozen lastDistributionBlock. When + * distribution eventually resumes (via unpause or manual distributePendingIssuance()), the + * CURRENT rates at that time are applied retroactively to the entire undistributed period. + * Governance must exercise caution when changing rates while paused to ensure they are applied + * to the correct block range. See setIssuancePerBlock() documentation for details. */ function initialize(address _governor) external virtual initializer { __BaseUpgradeable_init(_governor); + __ReentrancyGuardTransient_init(); IssuanceAllocatorData storage $ = _getIssuanceAllocatorStorage(); - // Initialize default allocation at index 0 with address(0) and 100% allocator-minting + // Initialize default target at index 0 with address(0) + // Rates are 0 initially; default gets remainder when issuancePerBlock is set $.targetAddresses.push(address(0)); - $.allocationTargets[address(0)].allocatorMintingPPM = MILLION; - $.allocationTargets[address(0)].selfMintingPPM = 0; + + // To guard against extreme edge case of pausing before setting issuancePerBlock, we initialize + // lastDistributionBlock to block.number. This should be updated to the correct starting block + // during configuration by governance. + $.lastDistributionBlock = block.number; } // -- Core Functionality -- @@ -211,87 +284,335 @@ contract IssuanceAllocator is /** * @inheritdoc IIssuanceAllocationDistribution * @dev Implementation details: - * - For allocator-minting portions, tokens are minted and transferred directly to targets based on their allocation - * - For self-minting portions (like the legacy RewardsManager), it does not mint tokens directly. Instead, these contracts are expected to handle minting themselves + * - For allocator-minting targets, tokens are minted and transferred directly to targets based on their allocation rate + * - For self-minting targets (like the legacy RewardsManager), it does not mint tokens directly. Instead, these contracts are expected to handle minting themselves * - The self-minting allocation is intended only for backwards compatibility with existing contracts and should not be used for new targets. New targets should use allocator-minting allocation to ensure robust control of token issuance by the IssuanceAllocator - * - Unless paused will always result in lastIssuanceBlock == block.number, even if there is no issuance to distribute + * @dev Pause behavior: + * - When paused: Self-minting allowances tracked via events/accumulation, but no allocator-minting tokens distributed. + * Returns lastDistributionBlock (frozen at pause point). lastSelfMintingBlock advances to current block. + * - When unpaused: Normal distribution if no accumulated self-minting, otherwise retroactive distribution + * using current rates for entire undistributed period, with accumulated self-minting reducing allocator budget. + * - Unless paused, always advances lastDistributionBlock to block.number, even if no issuance to distribute. + * @dev Reentrancy: This function intentionally does NOT have a reentrancy guard to allow targets to + * legitimately call it during notifications (e.g., to claim pending issuance before their allocation changes). + * This is safe because the function has built-in block-tracking protection that prevents double-distribution + * within the same block, makes no external calls that could expose inconsistent state, and does not modify allocations. */ function distributeIssuance() external override returns (uint256) { - return _distributeIssuance(); + IssuanceAllocatorData storage $ = _getIssuanceAllocatorStorage(); + // Optimize common case: if already distributed this block, return immediately (~60% gas savings). + // Multiple targets may call this in the same block; first call distributes, rest are no-ops. + return $.lastDistributionBlock == block.number ? block.number : _distributeIssuance(); + } + + /** + * @notice Advances self-minting block and emits allowance events + * @dev When paused, accumulates self-minting amounts. This accumulation reduces the allocator-minting + * budget when distribution resumes, ensuring total issuance stays within bounds. + * When not paused, just emits self-minting allowance events. + * Called by _distributeIssuance() which anyone can call. + * Optimized for no-op cases: very cheap when already at current block. + */ + function _advanceSelfMintingBlock() private { + IssuanceAllocatorData storage $ = _getIssuanceAllocatorStorage(); + + uint256 previousBlock = $.lastSelfMintingBlock; + if (previousBlock == block.number) return; + + uint256 blocks = block.number - previousBlock; + + // Accumulate if currently paused OR if there's existing accumulated balance. + // Once accumulation starts (during pause), continue through any unpaused periods + // until distribution clears the accumulation. This is conservative and allows + // better recovery when distribution is delayed through pause/unpause cycles. + if (paused() || 0 < $.selfMintingOffset) $.selfMintingOffset += $.totalSelfMintingRate * blocks; + $.lastSelfMintingBlock = block.number; + uint256 fromBlock = previousBlock + 1; + + // Emit self-minting allowance events + if (0 < $.totalSelfMintingRate) { + for (uint256 i = 0; i < $.targetAddresses.length; ++i) { + address target = $.targetAddresses[i]; + AllocationTarget storage targetData = $.allocationTargets[target]; + + if (0 < targetData.selfMintingRate) { + uint256 amount = targetData.selfMintingRate * blocks; + emit IssuanceSelfMintAllowance(target, amount, fromBlock, block.number); + } + } + } } /** * @notice Internal implementation for `distributeIssuance` * @dev Handles the actual distribution logic. + * @dev Always calls _advanceSelfMintingBlock() first (advances lastSelfMintingBlock, tracks self-minting). + * @dev If paused: Returns lastDistributionBlock without distributing allocator-minting (frozen state). + * @dev If unpaused: Chooses distribution path based on accumulated self-minting: + * - With accumulation: retroactive distribution path (current rates, reduced allocator budget) + * - Without accumulation: normal distribution path (simple per-block minting) * @return Block number distributed to */ function _distributeIssuance() private returns (uint256) { IssuanceAllocatorData storage $ = _getIssuanceAllocatorStorage(); + _advanceSelfMintingBlock(); if (paused()) return $.lastDistributionBlock; - _distributePendingIssuance(); + return 0 < $.selfMintingOffset ? _distributePendingIssuance(block.number) : _performNormalDistribution(); + } - uint256 blocksSinceLastIssuance = block.number - $.lastDistributionBlock; - if (blocksSinceLastIssuance == 0) return $.lastDistributionBlock; + /** + * @notice Performs normal (non-pending) issuance distribution + * @dev Distributes allocator-minting issuance to all targets based on their rates + * @dev Assumes contract is not paused and pending issuance has already been distributed + * @return Block number distributed to + */ + function _performNormalDistribution() private returns (uint256) { + IssuanceAllocatorData storage $ = _getIssuanceAllocatorStorage(); + + uint256 blocks = block.number - $.lastDistributionBlock; + if (blocks == 0) return $.lastDistributionBlock; + + uint256 fromBlock = $.lastDistributionBlock + 1; + + for (uint256 i = 0; i < $.targetAddresses.length; ++i) { + address target = $.targetAddresses[i]; + if (target == address(0)) continue; + + AllocationTarget storage targetData = $.allocationTargets[target]; + if (0 < targetData.allocatorMintingRate) { + uint256 amount = targetData.allocatorMintingRate * blocks; + GRAPH_TOKEN.mint(target, amount); + emit IssuanceDistributed(target, amount, fromBlock, block.number); + } + } - // Note: Theoretical overflow risk exists if issuancePerBlock * blocksSinceLastIssuance > type(uint256).max - // In practice, this would require either: - // 1. Extremely high issuancePerBlock (governance error), and/or - // 2. Contract paused for an implausibly long time (decades) - // If such overflow occurs, the transaction reverts (Solidity 0.8.x), indicating the contract - // is in a state requiring governance intervention. - uint256 newIssuance = $.issuancePerBlock * blocksSinceLastIssuance; $.lastDistributionBlock = block.number; - $.lastAccumulationBlock = block.number; + return block.number; + } - if (0 < newIssuance) { - for (uint256 i = 0; i < $.targetAddresses.length; ++i) { - address target = $.targetAddresses[i]; + /** + * @inheritdoc IIssuanceAllocationAdministration + */ + function distributePendingIssuance() external override onlyRole(GOVERNOR_ROLE) nonReentrant returns (uint256) { + return _distributePendingIssuance(block.number); + } - // Skip minting to zero address (default allocation when not configured) - if (target == address(0)) continue; + /** + * @inheritdoc IIssuanceAllocationAdministration + */ + function distributePendingIssuance( + uint256 toBlockNumber + ) external override onlyRole(GOVERNOR_ROLE) nonReentrant returns (uint256) { + return _distributePendingIssuance(toBlockNumber); + } - AllocationTarget storage targetData = $.allocationTargets[target]; + /** + * @notice Internal implementation for distributing pending accumulated allocator-minting issuance + * @param toBlockNumber Block number to distribute up to + * @dev Distributes allocator-minting issuance for undistributed period using current rates + * (retroactively applied to from lastDistributionBlock to toBlockNumber, inclusive of both endpoints). + * @dev Called when 0 < self-minting offset, which occurs after pause periods or when + * distribution is delayed across pause/unpause cycles. Conservative accumulation strategy + * continues accumulating through unpaused periods until distribution clears it. + * The undistributed period (lastDistributionBlock to toBlockNumber) could theoretically span multiple pause/unpause cycles. In practice this is unlikely if there are active targets that call distributeIssuance(). + * @dev Current rate is always applied retroactively to undistributed period, to the extent possible given the accumulated self-minting offset. + * If any interim rate was higher than current rate, there might be insufficient allocation + * to satisfy required allocations. In this case, we make the best match to honour the current rate. + * There will never more issuance relative to what the max interim issuance rate was, but in some circumstances the current rate is insufficient to satisfy the accumulated self-minting. In other cases, to satisfy the current rate, we distribute proportionally less to non-default targets than their current allocation rate. + * @dev Constraint: cannot distribute more than total issuance for the period. + * @dev Shortfall: When accumulated self-minting exceeds what current rate allows for the period, + * the total issuance already exceeded current rate expectations. No allocator-minting distributed. + * @dev When allocator-minting is available, there are two distribution cases: + * (1) available < allowance: proportional distribution among non-default, default gets zero + * (2) allowance <= available: full rates to non-default, remainder to default + * Where allowance is allocator rate (for non-default targets) * blocks, and available is total issuance for period minus accumulated self-minting. + * @return Block number that issuance was distributed up to + */ + function _distributePendingIssuance(uint256 toBlockNumber) private returns (uint256) { + _advanceSelfMintingBlock(); + IssuanceAllocatorData storage $ = _getIssuanceAllocatorStorage(); - if (0 < targetData.allocatorMintingPPM) { - // There can be a small rounding loss here. This is acceptable. - uint256 targetIssuance = (newIssuance * targetData.allocatorMintingPPM) / MILLION; + require( + $.lastDistributionBlock <= toBlockNumber && toBlockNumber <= block.number, // solhint-disable-line gas-strict-inequalities + ToBlockOutOfRange(toBlockNumber, $.lastDistributionBlock, block.number) + ); + + uint256 blocks = toBlockNumber - $.lastDistributionBlock; + if (blocks == 0) return toBlockNumber; + + // Overflow is not possible with reasonable parameters. For example, with issuancePerBlock + // at 1e24 (1 million GRT with 18 decimals) and blocks at 1e9 (hundreds of years), the product is + // ~1e33, well below uint256 max (~1e77). Similar multiplications throughout this contract operate + // under the same range assumptions. + uint256 totalForPeriod = $.issuancePerBlock * blocks; + uint256 selfMintingOffset = $.selfMintingOffset; + + uint256 available = selfMintingOffset < totalForPeriod ? totalForPeriod - selfMintingOffset : 0; + + if (0 < available) { + // Calculate non-default allocated rate using the allocation invariant. + // Since totalAllocatorRate + totalSelfMintingRate == issuancePerBlock (100% invariant), + // and default target is part of totalAllocatorRate, we can derive: + // allocatedRate = issuancePerBlock - totalSelfMintingRate - defaultAllocatorRate + address defaultAddress = $.targetAddresses[0]; + AllocationTarget storage defaultTarget = $.allocationTargets[defaultAddress]; + uint256 allocatedRate = $.issuancePerBlock - $.totalSelfMintingRate - defaultTarget.allocatorMintingRate; - GRAPH_TOKEN.mint(target, targetIssuance); - emit IssuanceDistributed(target, targetIssuance); - } + uint256 allocatedTotal = allocatedRate * blocks; + + if (available < allocatedTotal) _distributePendingProportionally(available, allocatedRate, toBlockNumber); + else _distributePendingWithFullRate(blocks, available, allocatedTotal, toBlockNumber); + } + + $.lastDistributionBlock = toBlockNumber; + + // Update accumulated self-minting after distribution. + // Subtract the period budget used (min of accumulated and totalForPeriod). + // When caught up to current block, clear all since nothing remains to distribute. + if (toBlockNumber == block.number) $.selfMintingOffset = 0; + else $.selfMintingOffset = totalForPeriod < selfMintingOffset ? selfMintingOffset - totalForPeriod : 0; + + return toBlockNumber; + } + + /** + * @notice Distribute pending issuance with full rates to non-default targets + * @param blocks Number of blocks in the distribution period + * @param available Total available allocator-minting budget for the period + * @param allocatedTotal Total amount allocated to non-default targets at full rate + * @param toBlockNumber Block number distributing to + * @dev Sufficient budget: non-default targets get full rates, default gets remainder + */ + function _distributePendingWithFullRate( + uint256 blocks, + uint256 available, + uint256 allocatedTotal, + uint256 toBlockNumber + ) internal { + IssuanceAllocatorData storage $ = _getIssuanceAllocatorStorage(); + + uint256 fromBlock = $.lastDistributionBlock + 1; + + // Give non-default targets their full rates + for (uint256 i = 1; i < $.targetAddresses.length; ++i) { + address target = $.targetAddresses[i]; + AllocationTarget storage targetData = $.allocationTargets[target]; + + if (0 < targetData.allocatorMintingRate) { + uint256 amount = targetData.allocatorMintingRate * blocks; + GRAPH_TOKEN.mint(target, amount); + emit IssuanceDistributed(target, amount, fromBlock, toBlockNumber); + } + } + + // Default target gets remainder (may be 0 if exactly matched) + uint256 remainingForDefault = available - allocatedTotal; + if (0 < remainingForDefault) { + address defaultAddress = $.targetAddresses[0]; + if (defaultAddress != address(0)) { + GRAPH_TOKEN.mint(defaultAddress, remainingForDefault); + emit IssuanceDistributed(defaultAddress, remainingForDefault, fromBlock, toBlockNumber); } } + } + + /** + * @notice Distribute pending issuance proportionally among non-default targets + * @param available Total available allocator-minting budget for the period + * @param allocatedRate Total rate allocated to non-default targets + * @param toBlockNumber Block number distributing to + * @dev Insufficient budget: non-default targets get proportional shares, default gets zero + * @dev Proportional distribution may result in rounding loss (dust), which is acceptable + */ + function _distributePendingProportionally( + uint256 available, + uint256 allocatedRate, + uint256 toBlockNumber + ) internal { + IssuanceAllocatorData storage $ = _getIssuanceAllocatorStorage(); + + // Defensive: prevent division by zero and handle edge cases. Should not be reachable based on + // caller logic (only called when available < allocatedTotal and both available > 0, blocks > 0). + if (allocatedRate == 0 || available == 0) return; + + uint256 fromBlock = $.lastDistributionBlock + 1; + + // Non-default targets get proportional shares (reduced amounts) + // Default is excluded (receives zero) + for (uint256 i = 1; i < $.targetAddresses.length; ++i) { + address target = $.targetAddresses[i]; + AllocationTarget storage targetData = $.allocationTargets[target]; - return $.lastDistributionBlock; + if (0 < targetData.allocatorMintingRate) { + // Proportional distribution using integer division causes rounding loss. + // Since Solidity division always floors (truncates toward zero), this can ONLY lose tokens, + // never over-distribute. The lost tokens (dust) remain unallocated. + // This is acceptable because: + // 1. The amount is negligible (< number of targets) + // 2. It maintains safety (never over-mint) + // 3. Alternative of tracking and distributing dust adds complexity without significant benefit + uint256 amount = (available * targetData.allocatorMintingRate) / allocatedRate; + GRAPH_TOKEN.mint(target, amount); + emit IssuanceDistributed(target, amount, fromBlock, toBlockNumber); + } + } + } + + /** + * @inheritdoc IIssuanceAllocationAdministration + */ + function setIssuancePerBlock( + uint256 newIssuancePerBlock + ) external override onlyRole(GOVERNOR_ROLE) nonReentrant returns (bool) { + return _setIssuancePerBlock(newIssuancePerBlock, block.number); } /** * @inheritdoc IIssuanceAllocationAdministration * @dev Implementation details: - * - `distributeIssuance` will be called before changing the rate *unless the contract is paused and evenIfDistributionPending is false* - * - `beforeIssuanceAllocationChange` will be called on all targets before changing the rate, even when the contract is paused + * - Requires distribution to have reached at least minDistributedBlock + * - This allows configuration changes after calling distributePendingIssuance(blockNumber) while paused + * - Only the default target is notified (target rates don't change, only default target changes) + * - Target rates stay fixed; default target absorbs the change * - Whenever the rate is changed, the updateL2MintAllowance function _must_ be called on the L1GraphTokenGateway in L1, to ensure the bridge can mint the right amount of tokens */ function setIssuancePerBlock( uint256 newIssuancePerBlock, - bool evenIfDistributionPending - ) external override onlyRole(GOVERNOR_ROLE) returns (bool) { + uint256 minDistributedBlock + ) external override onlyRole(GOVERNOR_ROLE) nonReentrant returns (bool) { + return _setIssuancePerBlock(newIssuancePerBlock, minDistributedBlock); + } + + /** + * @notice Internal implementation for setting issuance per block + * @param newIssuancePerBlock New issuance per block + * @param minDistributedBlock Minimum block number that distribution must have reached + * @return True if the value is applied + */ + function _setIssuancePerBlock(uint256 newIssuancePerBlock, uint256 minDistributedBlock) private returns (bool) { IssuanceAllocatorData storage $ = _getIssuanceAllocatorStorage(); + uint256 oldIssuancePerBlock = $.issuancePerBlock; + if (newIssuancePerBlock == oldIssuancePerBlock) return true; - if (newIssuancePerBlock == $.issuancePerBlock) return true; + if (_distributeIssuance() < minDistributedBlock) return false; - if (_distributeIssuance() < block.number) { - if (evenIfDistributionPending) accumulatePendingIssuance(); - else return false; - } - notifyAllTargets(); + _notifyTarget($.targetAddresses[0]); - uint256 oldIssuancePerBlock = $.issuancePerBlock; + AllocationTarget storage defaultTarget = $.allocationTargets[$.targetAddresses[0]]; + uint256 unallocated = defaultTarget.allocatorMintingRate; + + require( + oldIssuancePerBlock <= newIssuancePerBlock + unallocated, // solhint-disable-line gas-strict-inequalities + InsufficientUnallocatedForRateDecrease(oldIssuancePerBlock, newIssuancePerBlock, unallocated) + ); + + defaultTarget.allocatorMintingRate = unallocated + newIssuancePerBlock - oldIssuancePerBlock; $.issuancePerBlock = newIssuancePerBlock; emit IssuancePerBlockUpdated(oldIssuancePerBlock, newIssuancePerBlock); + return true; } @@ -308,7 +629,7 @@ contract IssuanceAllocator is * @return True if notification was sent or already sent for this block. Always returns true for address(0) without notifying. */ function _notifyTarget(address target) private returns (bool) { - // Skip notification for zero address (default allocation when unset) + // Skip notification for zero address (default target when unset) if (target == address(0)) return true; IssuanceAllocatorData storage $ = _getIssuanceAllocatorStorage(); @@ -327,26 +648,13 @@ contract IssuanceAllocator is return true; } - /** - * @notice Notify all targets (used prior to an allocation or rate change) - * @dev Each target is notified at most once per block. - * Will revert if any target notification reverts. - */ - function notifyAllTargets() private { - IssuanceAllocatorData storage $ = _getIssuanceAllocatorStorage(); - - for (uint256 i = 0; i < $.targetAddresses.length; ++i) { - _notifyTarget($.targetAddresses[i]); - } - } - /** * @inheritdoc IIssuanceAllocationAdministration * @dev Implementation details: * - The target will be notified at most once per block to prevent reentrancy looping * - Will revert if target notification reverts */ - function notifyTarget(address target) external override onlyRole(GOVERNOR_ROLE) returns (bool) { + function notifyTarget(address target) external override onlyRole(GOVERNOR_ROLE) nonReentrant returns (bool) { return _notifyTarget(target); } @@ -372,39 +680,40 @@ contract IssuanceAllocator is /** * @inheritdoc IIssuanceAllocationAdministration - * @dev Delegates to _setTargetAllocation with selfMintingPPM=0 and evenIfDistributionPending=false */ function setTargetAllocation( - address target, - uint256 allocatorMintingPPM - ) external override onlyRole(GOVERNOR_ROLE) returns (bool) { - return _setTargetAllocation(target, allocatorMintingPPM, 0, false); + IIssuanceTarget target, + uint256 allocatorMintingRate + ) external override onlyRole(GOVERNOR_ROLE) nonReentrant returns (bool) { + return _setTargetAllocation(address(target), allocatorMintingRate, 0, block.number); } /** * @inheritdoc IIssuanceAllocationAdministration - * @dev Delegates to _setTargetAllocation with evenIfDistributionPending=false */ function setTargetAllocation( - address target, - uint256 allocatorMintingPPM, - uint256 selfMintingPPM - ) external override onlyRole(GOVERNOR_ROLE) returns (bool) { - return _setTargetAllocation(target, allocatorMintingPPM, selfMintingPPM, false); + IIssuanceTarget target, + uint256 allocatorMintingRate, + uint256 selfMintingRate + ) external override onlyRole(GOVERNOR_ROLE) nonReentrant returns (bool) { + return _setTargetAllocation(address(target), allocatorMintingRate, selfMintingRate, block.number); } /** * @inheritdoc IIssuanceAllocationAdministration * @dev Implementation details: + * - Requires distribution has reached at least minDistributedBlock issuance to change allocation + * - This allows configuration changes while paused by being deliberate about which block to distribute to * - If the new allocations are the same as the current allocations, this function is a no-op * - If both allocations are 0 and the target doesn't exist, this function is a no-op * - If both allocations are 0 and the target exists, the target will be removed * - If any allocation is non-zero and the target doesn't exist, the target will be added - * - Will revert if the total allocation would exceed 100% (MILLION), or if attempting to add a target that doesn't support IIssuanceTarget + * - Will revert if the total allocation would exceed available capacity (default target + current target allocation) + * - Will revert if attempting to add a target that doesn't support IIssuanceTarget * * Self-minting allocation is a special case for backwards compatibility with * existing contracts like the RewardsManager. The IssuanceAllocator calculates - * issuance for self-minting portions but does not mint tokens directly for them. Self-minting targets + * issuance for self-minting targets but does not mint tokens directly for them. Self-minting targets * should call getTargetIssuancePerBlock to determine their issuance amount and mint * tokens accordingly. For example, the RewardsManager contract is expected to call * getTargetIssuancePerBlock in its takeRewards function to calculate the correct @@ -412,62 +721,60 @@ contract IssuanceAllocator is * the issuance schedule and should not mint more tokens than allocated. */ function setTargetAllocation( - address target, - uint256 allocatorMintingPPM, - uint256 selfMintingPPM, - bool evenIfDistributionPending - ) external override onlyRole(GOVERNOR_ROLE) returns (bool) { - return _setTargetAllocation(target, allocatorMintingPPM, selfMintingPPM, evenIfDistributionPending); + IIssuanceTarget target, + uint256 allocatorMintingRate, + uint256 selfMintingRate, + uint256 minDistributedBlock + ) external override onlyRole(GOVERNOR_ROLE) nonReentrant returns (bool) { + return _setTargetAllocation(address(target), allocatorMintingRate, selfMintingRate, minDistributedBlock); } /** * @inheritdoc IIssuanceAllocationAdministration */ - function setDefaultAllocationAddress(address newAddress) external override onlyRole(GOVERNOR_ROLE) returns (bool) { - return _setDefaultAllocationAddress(newAddress, false); + function setDefaultTarget( + address newAddress + ) external override onlyRole(GOVERNOR_ROLE) nonReentrant returns (bool) { + return _setDefaultTarget(newAddress, block.number); } /** * @inheritdoc IIssuanceAllocationAdministration */ - function setDefaultAllocationAddress( + function setDefaultTarget( address newAddress, - bool evenIfDistributionPending - ) external override onlyRole(GOVERNOR_ROLE) returns (bool) { - return _setDefaultAllocationAddress(newAddress, evenIfDistributionPending); + uint256 minDistributedBlock + ) external override onlyRole(GOVERNOR_ROLE) nonReentrant returns (bool) { + return _setDefaultTarget(newAddress, minDistributedBlock); } /** - * @notice Internal implementation for setting default allocation address - * @param newAddress The address to set as the new default allocation target - * @param evenIfDistributionPending Whether to force the change even if issuance distribution is behind + * @notice Internal implementation for setting default target + * @param newAddress The address to set as the new default target + * @param minDistributedBlock Minimum block number that distribution must have reached * @return True if the value is applied (including if already the case), false if not applied due to paused state - * @dev The default allocation automatically receives the portion of issuance not allocated to other targets - * @dev This maintains the invariant that total allocation is always 100% + * @dev The default target automatically receives the portion of issuance not allocated to other targets + * @dev This maintains the invariant that total allocation always equals issuancePerBlock * @dev Reverts if attempting to set to an address that has a normal (non-default) allocation * @dev Allocation data is copied from the old default to the new default, including lastChangeNotifiedBlock * @dev No-op if setting to the same address */ - function _setDefaultAllocationAddress(address newAddress, bool evenIfDistributionPending) internal returns (bool) { + function _setDefaultTarget(address newAddress, uint256 minDistributedBlock) internal returns (bool) { IssuanceAllocatorData storage $ = _getIssuanceAllocatorStorage(); address oldAddress = $.targetAddresses[0]; - - // No-op if setting to same address if (newAddress == oldAddress) return true; - // Cannot set default allocation to a normally allocated target + // Cannot set default target to a normally allocated target // Check if newAddress is in targetAddresses (excluding index 0 which is the default) // Note: This is O(n) for the number of targets, which could become expensive as targets increase. - // However, other operations (distribution, notifications) already loop through all targets and - // would encounter gas issues first. Recovery mechanisms exist (pause, per-target notification control). + // However, distribution operations already loop through all targets and + // would encounter gas issues first. Recovery mechanisms exist. for (uint256 i = 1; i < $.targetAddresses.length; ++i) { - require($.targetAddresses[i] != newAddress, CannotSetDefaultToAllocatedTarget()); + require($.targetAddresses[i] != newAddress, CannotSetDefaultToAllocatedTarget(newAddress)); } - // Distribute any pending issuance to the old default address before changing. - // If paused and evenIfDistributionPending is false, return false to prevent the change. - if (!_handleDistributionBeforeAllocation(oldAddress, 0, evenIfDistributionPending)) return false; + if (_distributeIssuance() < minDistributedBlock) return false; // Notify both old and new addresses of the allocation change _notifyTarget(oldAddress); @@ -476,8 +783,8 @@ contract IssuanceAllocator is // Preserve the notification block of newAddress before copying old address data uint256 newAddressNotificationBlock = $.allocationTargets[newAddress].lastChangeNotifiedBlock; - // Update the default allocation address at index 0 - // This copies allocation data from old to new, including allocatorMintingPPM and selfMintingPPM + // Update the default target at index 0 + // This copies allocation data from old to new, including allocatorMintingRate and selfMintingRate $.targetAddresses[0] = newAddress; $.allocationTargets[newAddress] = $.allocationTargets[oldAddress]; delete $.allocationTargets[oldAddress]; @@ -485,29 +792,30 @@ contract IssuanceAllocator is // Restore the notification block for newAddress (regard as target-specific, not about default) $.allocationTargets[newAddress].lastChangeNotifiedBlock = newAddressNotificationBlock; - emit DefaultAllocationAddressUpdated(oldAddress, newAddress); + emit DefaultTargetUpdated(oldAddress, newAddress); return true; } /** * @notice Internal implementation for setting target allocation * @param target Address of the target to update - * @param allocatorMintingPPM Allocator-minting allocation for the target (in PPM) - * @param selfMintingPPM Self-minting allocation for the target (in PPM) - * @param evenIfDistributionPending Whether to force the allocation change even if issuance distribution is behind + * @param allocatorMintingRate Allocator-minting rate for the target (tokens per block) + * @param selfMintingRate Self-minting rate for the target (tokens per block) + * @param minDistributedBlock Minimum block number that distribution must have reached * @return True if the value is applied (including if already the case), false if not applied due to paused state */ function _setTargetAllocation( address target, - uint256 allocatorMintingPPM, - uint256 selfMintingPPM, - bool evenIfDistributionPending + uint256 allocatorMintingRate, + uint256 selfMintingRate, + uint256 minDistributedBlock ) internal returns (bool) { - if (!_validateTargetAllocation(target, allocatorMintingPPM, selfMintingPPM)) return true; // No change needed + if (!_validateAllocationChange(target, allocatorMintingRate, selfMintingRate)) return true; - if (!_handleDistributionBeforeAllocation(target, selfMintingPPM, evenIfDistributionPending)) return false; // Distribution pending and not forced + if (_distributeIssuance() < minDistributedBlock) return false; _notifyTarget(target); + _notifyTarget(_getIssuanceAllocatorStorage().targetAddresses[0]); // Total allocation calculation and check is delayed until after notifications. // Distributing and notifying unnecessarily is harmless, but we need to prevent @@ -516,84 +824,60 @@ contract IssuanceAllocator is // make a call to set target allocation, but better to be paranoid.) // Validate totals and auto-adjust default allocation BEFORE updating target data // so we can read the old allocation values - _validateAndUpdateTotalAllocations(target, allocatorMintingPPM, selfMintingPPM); + _validateAndUpdateTotalAllocations(target, allocatorMintingRate, selfMintingRate); // Then update the target's allocation data - _updateTargetAllocationData(target, allocatorMintingPPM, selfMintingPPM); + _updateTargetAllocationData(target, allocatorMintingRate, selfMintingRate); - emit TargetAllocationUpdated(target, allocatorMintingPPM, selfMintingPPM); + emit TargetAllocationUpdated(target, allocatorMintingRate, selfMintingRate); return true; } /** - * @notice Validates target address and interface support, returns false if allocation is unchanged + * @notice Validates allocation change for a target * @param target Address of the target to validate - * @param allocatorMintingPPM Allocator-minting allocation for the target (in PPM) - * @param selfMintingPPM Self-minting allocation for the target (in PPM) + * @param allocatorMintingRate Allocator-minting rate for the target (tokens per block) + * @param selfMintingRate Self-minting rate for the target (tokens per block) * @return True if validation passes and allocation change is needed, false if allocation is already set to these values + * @dev Reverts if target is address(0), default target, or doesn't support IIssuanceTarget (for non-zero rates) */ - function _validateTargetAllocation( + function _validateAllocationChange( address target, - uint256 allocatorMintingPPM, - uint256 selfMintingPPM + uint256 allocatorMintingRate, + uint256 selfMintingRate ) private view returns (bool) { require(target != address(0), TargetAddressCannotBeZero()); IssuanceAllocatorData storage $ = _getIssuanceAllocatorStorage(); - require(target != $.targetAddresses[0], CannotSetAllocationForDefaultTarget()); + require(target != $.targetAddresses[0], CannotSetAllocationForDefaultTarget($.targetAddresses[0])); AllocationTarget storage targetData = $.allocationTargets[target]; - if (targetData.allocatorMintingPPM == allocatorMintingPPM && targetData.selfMintingPPM == selfMintingPPM) + if (targetData.allocatorMintingRate == allocatorMintingRate && targetData.selfMintingRate == selfMintingRate) return false; // No change needed - if (allocatorMintingPPM != 0 || selfMintingPPM != 0) + if (allocatorMintingRate != 0 || selfMintingRate != 0) require( IERC165(target).supportsInterface(type(IIssuanceTarget).interfaceId), - TargetDoesNotSupportIIssuanceTarget() + TargetDoesNotSupportIIssuanceTarget(target) ); return true; } /** - * @notice Distributes current issuance and handles accumulation for self-minting changes - * @param target Address of the target being updated - * @param selfMintingPPM New self-minting allocation for the target (in PPM) - * @param evenIfDistributionPending Whether to force the allocation change even if issuance distribution is behind - * @return True if allocation change should proceed, false if distribution is behind and not forced - */ - function _handleDistributionBeforeAllocation( - address target, - uint256 selfMintingPPM, - bool evenIfDistributionPending - ) private returns (bool) { - if (_distributeIssuance() < block.number) { - if (!evenIfDistributionPending) return false; - - // A change in self-minting allocation changes the accumulation rate for pending allocator-minting. - // So for a self-minting change, accumulate pending issuance prior to the rate change. - IssuanceAllocatorData storage $ = _getIssuanceAllocatorStorage(); - AllocationTarget storage targetData = $.allocationTargets[target]; - if (selfMintingPPM != targetData.selfMintingPPM) accumulatePendingIssuance(); - } - - return true; - } - - /** - * @notice Updates global allocation totals and auto-adjusts default allocation to maintain 100% invariant + * @notice Updates global allocation totals and auto-adjusts default target to maintain 100% invariant * @param target Address of the target being updated - * @param allocatorMintingPPM New allocator-minting allocation for the target (in PPM) - * @param selfMintingPPM New self-minting allocation for the target (in PPM) - * @dev The default allocation (at targetAddresses[0]) is automatically adjusted to ensure total allocation equals MILLION + * @param allocatorMintingRate New allocator-minting rate for the target (tokens per block) + * @param selfMintingRate New self-minting rate for the target (tokens per block) + * @dev The default target (at targetAddresses[0]) is automatically adjusted to ensure total allocation equals issuancePerBlock * @dev This function is called BEFORE the target's allocation data has been updated so we can read old values */ function _validateAndUpdateTotalAllocations( address target, - uint256 allocatorMintingPPM, - uint256 selfMintingPPM + uint256 allocatorMintingRate, + uint256 selfMintingRate ) private { IssuanceAllocatorData storage $ = _getIssuanceAllocatorStorage(); AllocationTarget storage targetData = $.allocationTargets[target]; @@ -601,49 +885,55 @@ contract IssuanceAllocator is // Calculations occur after notifications in the caller to prevent reentrancy issues - // availablePPM comprises the default allocation's current allocator-minting PPM, - // the target's current allocator-minting PPM, and the target's current self-minting PPM. + // availableRate comprises the default target's current allocator-minting rate, + // the target's current allocator-minting rate, and the target's current self-minting rate. // This maintains the 100% allocation invariant by calculating how much can be reallocated // to the target without exceeding total available allocation. - uint256 availablePPM = defaultTarget.allocatorMintingPPM + - targetData.allocatorMintingPPM + - targetData.selfMintingPPM; - // solhint-disable-next-line gas-strict-inequalities - require(allocatorMintingPPM + selfMintingPPM <= availablePPM, InsufficientAllocationAvailable()); - - defaultTarget.allocatorMintingPPM = availablePPM - allocatorMintingPPM - selfMintingPPM; - $.totalSelfMintingPPM = $.totalSelfMintingPPM - targetData.selfMintingPPM + selfMintingPPM; + uint256 availableRate = defaultTarget.allocatorMintingRate + + targetData.allocatorMintingRate + + targetData.selfMintingRate; + require( + allocatorMintingRate + selfMintingRate <= availableRate, // solhint-disable-line gas-strict-inequalities + InsufficientAllocationAvailable(allocatorMintingRate + selfMintingRate, availableRate) + ); + + defaultTarget.allocatorMintingRate = availableRate - allocatorMintingRate - selfMintingRate; + $.totalSelfMintingRate = $.totalSelfMintingRate - targetData.selfMintingRate + selfMintingRate; } /** * @notice Sets target allocation values and adds/removes target from active list * @param target Address of the target being updated - * @param allocatorMintingPPM New allocator-minting allocation for the target (in PPM) - * @param selfMintingPPM New self-minting allocation for the target (in PPM) - * @dev This function is never called for the default allocation (at index 0), which is handled separately + * @param allocatorMintingRate New allocator-minting rate for the target (tokens per block) + * @param selfMintingRate New self-minting rate for the target (tokens per block) + * @dev This function is never called for the default target (at index 0), which is handled separately */ - function _updateTargetAllocationData(address target, uint256 allocatorMintingPPM, uint256 selfMintingPPM) private { + function _updateTargetAllocationData( + address target, + uint256 allocatorMintingRate, + uint256 selfMintingRate + ) private { IssuanceAllocatorData storage $ = _getIssuanceAllocatorStorage(); AllocationTarget storage targetData = $.allocationTargets[target]; // Internal design invariants: - // - targetAddresses[0] is always the default allocation and is never removed - // - targetAddresses[1..n] contains all non-default targets with explicitly set non-zero allocations + // - targetAddresses[0] is always the default target and is never removed + // - targetAddresses[1..] contains all non-default targets with explicitly set non-zero allocations // - targetAddresses does not contain duplicates // - allocationTargets mapping contains allocation data for all targets in targetAddresses - // - Default allocation is automatically adjusted by _validateAndUpdateTotalAllocations + // - default target is automatically adjusted by _validateAndUpdateTotalAllocations // - Governance actions can create allocationTarget mappings with lastChangeNotifiedBlock set for targets not in targetAddresses. This is valid. // Therefore: // - Only add a non-default target to the list if it previously had no allocation // - Remove a non-default target from the list when setting both allocations to 0 // - Delete allocationTargets mapping entry when removing a target from targetAddresses // - Do not set lastChangeNotifiedBlock in this function - if (allocatorMintingPPM != 0 || selfMintingPPM != 0) { + if (allocatorMintingRate != 0 || selfMintingRate != 0) { // Add to list if previously had no allocation - if (targetData.allocatorMintingPPM == 0 && targetData.selfMintingPPM == 0) $.targetAddresses.push(target); + if (targetData.allocatorMintingRate == 0 && targetData.selfMintingRate == 0) $.targetAddresses.push(target); - targetData.allocatorMintingPPM = allocatorMintingPPM; - targetData.selfMintingPPM = selfMintingPPM; + targetData.allocatorMintingRate = allocatorMintingRate; + targetData.selfMintingRate = selfMintingRate; } else { // Remove target completely (from list and mapping) _removeTarget(target); @@ -653,7 +943,7 @@ contract IssuanceAllocator is /** * @notice Removes target from targetAddresses array and deletes its allocation data * @param target Address of the target to remove - * @dev Starts at index 1 since index 0 is always the default allocation and should never be removed + * @dev Starts at index 1 since index 0 is always the default target and should never be removed * @dev Uses swap-and-pop for gas efficiency */ function _removeTarget(address target) private { @@ -669,131 +959,26 @@ contract IssuanceAllocator is } } - /** - * @inheritdoc IIssuanceAllocationAdministration - * @dev Implementation details: - * - This function can only be called by Governor role - * - Distributes pending issuance that has accumulated while paused - * - This function can be called even when the contract is paused to perform interim distributions - * - If there is no pending issuance, this function is a no-op - * - If allocatorMintingAllowance is 0 (all targets are self-minting), pending issuance will be lost - */ - function distributePendingIssuance() external override onlyRole(GOVERNOR_ROLE) returns (uint256) { - return _distributePendingIssuance(); - } - - /** - * @inheritdoc IIssuanceAllocationAdministration - * @dev Implementation details: - * - This function can only be called by Governor role - * - Accumulates pending issuance up to the specified block, then distributes all accumulated issuance - * - This function can be called even when the contract is paused - * - If allocatorMintingAllowance is 0 (all targets are self-minting), pending issuance will be lost - */ - function distributePendingIssuance( - uint256 toBlockNumber - ) external override onlyRole(GOVERNOR_ROLE) returns (uint256) { - accumulatePendingIssuance(toBlockNumber); - return _distributePendingIssuance(); - } - - /** - * @notice Distributes any pending accumulated issuance - * @dev Called from _distributeIssuance to handle accumulated issuance from pause periods. - * @return Block number up to which issuance has been distributed - */ - function _distributePendingIssuance() private returns (uint256) { - IssuanceAllocatorData storage $ = _getIssuanceAllocatorStorage(); - - uint256 pendingAmount = $.pendingAccumulatedAllocatorIssuance; - $.lastDistributionBlock = $.lastAccumulationBlock; - - if (pendingAmount == 0) return $.lastDistributionBlock; - $.pendingAccumulatedAllocatorIssuance = 0; - - if ($.totalSelfMintingPPM == MILLION) return $.lastDistributionBlock; - - for (uint256 i = 0; i < $.targetAddresses.length; ++i) { - address target = $.targetAddresses[i]; - - // Skip minting to zero address (default allocation when not configured) - if (target == address(0)) continue; - - AllocationTarget storage targetData = $.allocationTargets[target]; - - if (0 < targetData.allocatorMintingPPM) { - // There can be a small rounding loss here. This is acceptable. - // Pending issuance is distributed in proportion to allocator-minting portion of total available allocation. - uint256 targetIssuance = (pendingAmount * targetData.allocatorMintingPPM) / - (MILLION - $.totalSelfMintingPPM); - GRAPH_TOKEN.mint(target, targetIssuance); - emit IssuanceDistributed(target, targetIssuance); - } - } - - return $.lastDistributionBlock; - } - - /** - * @notice Accumulates pending issuance for allocator-minting targets to the current block - * @dev Used to accumulate pending issuance while paused prior to a rate or allocator-minting allocation change. - * @return The block number that has been accumulated to - */ - function accumulatePendingIssuance() private returns (uint256) { - return accumulatePendingIssuance(block.number); - } - - /** - * @notice Accumulates pending issuance for allocator-minting targets during pause periods - * @dev Accumulates pending issuance for allocator-minting targets during pause periods. - * @param toBlockNumber The block number to accumulate to (must be >= lastIssuanceAccumulationBlock and <= current block). - * @return The block number that has been accumulated to - */ - function accumulatePendingIssuance(uint256 toBlockNumber) private returns (uint256) { - IssuanceAllocatorData storage $ = _getIssuanceAllocatorStorage(); - - // solhint-disable-next-line gas-strict-inequalities - require($.lastAccumulationBlock <= toBlockNumber && toBlockNumber <= block.number, ToBlockOutOfRange()); - - uint256 blocksToAccumulate = toBlockNumber - $.lastAccumulationBlock; - if (0 < blocksToAccumulate) { - uint256 totalIssuance = $.issuancePerBlock * blocksToAccumulate; - // There can be a small rounding loss here. This is acceptable. - $.pendingAccumulatedAllocatorIssuance += (totalIssuance * (MILLION - $.totalSelfMintingPPM)) / MILLION; - $.lastAccumulationBlock = toBlockNumber; - } - - return $.lastAccumulationBlock; - } - // -- View Functions -- /** * @inheritdoc IIssuanceAllocationStatus */ - function issuancePerBlock() external view override returns (uint256) { + function getIssuancePerBlock() external view override returns (uint256) { return _getIssuanceAllocatorStorage().issuancePerBlock; } /** * @inheritdoc IIssuanceAllocationStatus */ - function lastIssuanceDistributionBlock() external view override returns (uint256) { - return _getIssuanceAllocatorStorage().lastDistributionBlock; - } - - /** - * @inheritdoc IIssuanceAllocationStatus - */ - function lastIssuanceAccumulationBlock() external view override returns (uint256) { - return _getIssuanceAllocatorStorage().lastAccumulationBlock; - } - - /** - * @inheritdoc IIssuanceAllocationStatus - */ - function pendingAccumulatedAllocatorIssuance() external view override returns (uint256) { - return _getIssuanceAllocatorStorage().pendingAccumulatedAllocatorIssuance; + function getDistributionState() external view override returns (DistributionState memory) { + IssuanceAllocatorData storage $ = _getIssuanceAllocatorStorage(); + return + DistributionState({ + lastDistributionBlock: $.lastDistributionBlock, + lastSelfMintingBlock: $.lastSelfMintingBlock, + selfMintingOffset: $.selfMintingOffset + }); } /** @@ -826,63 +1011,57 @@ contract IssuanceAllocator is /** * @inheritdoc IIssuanceAllocationStatus + * @dev Returns assigned allocation regardless of whether target is address(0) or the default. + * @dev For address(0), no minting occurs but the allocation represents the unallocated portion. + * @dev For effective allocations excluding unmintable portion, use getTotalAllocation(). */ function getTargetAllocation(address target) external view override returns (Allocation memory) { AllocationTarget storage targetData = _getIssuanceAllocatorStorage().allocationTargets[target]; return Allocation({ - totalAllocationPPM: targetData.allocatorMintingPPM + targetData.selfMintingPPM, - allocatorMintingPPM: targetData.allocatorMintingPPM, - selfMintingPPM: targetData.selfMintingPPM + totalAllocationRate: targetData.allocatorMintingRate + targetData.selfMintingRate, + allocatorMintingRate: targetData.allocatorMintingRate, + selfMintingRate: targetData.selfMintingRate }); } /** * @inheritdoc IIssuanceAllocationDistribution + * @dev Returns assigned issuance rates regardless of whether target is address(0) or the default. + * @dev For address(0), no minting occurs but rates reflect what would be issued if mintable. + * @dev selfIssuanceBlockAppliedTo reflects the last block for which self-minting allowances have been + * calculated and emitted (lastSelfMintingBlock). This advances continuously, unaffected by pause state. */ function getTargetIssuancePerBlock(address target) external view override returns (TargetIssuancePerBlock memory) { IssuanceAllocatorData storage $ = _getIssuanceAllocatorStorage(); AllocationTarget storage targetData = $.allocationTargets[target]; - // There can be small losses due to rounding. This is acceptable. return TargetIssuancePerBlock({ - allocatorIssuancePerBlock: ($.issuancePerBlock * targetData.allocatorMintingPPM) / MILLION, + allocatorIssuanceRate: targetData.allocatorMintingRate, allocatorIssuanceBlockAppliedTo: $.lastDistributionBlock, - selfIssuancePerBlock: ($.issuancePerBlock * targetData.selfMintingPPM) / MILLION, - selfIssuanceBlockAppliedTo: block.number + selfIssuanceRate: targetData.selfMintingRate, + selfIssuanceBlockAppliedTo: $.lastSelfMintingBlock }); } /** * @inheritdoc IIssuanceAllocationStatus - * @dev For reporting purposes, if the default allocation target is address(0), its allocation + * @dev For reporting purposes, if the default target target is address(0), its allocation * @dev is treated as "unallocated" since address(0) cannot receive minting. - * @dev When default is address(0): returns actual allocated amounts (may be less than 100%) - * @dev When default is a real address: returns 100% total allocation + * @dev When default is address(0): returns actual allocated amounts (may be less than issuancePerBlock) + * @dev When default is a real address: returns issuancePerBlock * @dev Note: Internally, the contract always maintains 100% allocation invariant, even when default is address(0) */ - function getTotalAllocation() external view override returns (Allocation memory) { + function getTotalAllocation() external view override returns (Allocation memory allocation) { IssuanceAllocatorData storage $ = _getIssuanceAllocatorStorage(); - uint256 totalAllocatorMinting = MILLION - $.totalSelfMintingPPM; - uint256 totalAllocation = MILLION; - // If default is address(0), exclude its allocation from reported totals - // since it doesn't actually receive minting (effectively unallocated) - address defaultAddress = $.targetAddresses[0]; - if (defaultAddress == address(0)) { - AllocationTarget storage defaultTarget = $.allocationTargets[defaultAddress]; - uint256 defaultAllocation = defaultTarget.allocatorMintingPPM; - totalAllocatorMinting -= defaultAllocation; - totalAllocation -= defaultAllocation; - } - - return - Allocation({ - totalAllocationPPM: totalAllocation, - allocatorMintingPPM: totalAllocatorMinting, - selfMintingPPM: $.totalSelfMintingPPM - }); + // since it doe not receive minting (so it is considered unallocated). + // Address(0) will only have non-zero allocation when it is the default target, + // so we can directly subtract zero address allocation. + allocation.totalAllocationRate = $.issuancePerBlock - $.allocationTargets[address(0)].allocatorMintingRate; + allocation.selfMintingRate = $.totalSelfMintingRate; + allocation.allocatorMintingRate = allocation.totalAllocationRate - allocation.selfMintingRate; } } diff --git a/packages/issuance/contracts/test/allocate/IssuanceAllocatorTestHarness.sol b/packages/issuance/contracts/test/allocate/IssuanceAllocatorTestHarness.sol new file mode 100644 index 000000000..a0362449a --- /dev/null +++ b/packages/issuance/contracts/test/allocate/IssuanceAllocatorTestHarness.sol @@ -0,0 +1,51 @@ +// SPDX-License-Identifier: GPL-2.0-or-later + +pragma solidity 0.8.27; + +import { IssuanceAllocator } from "../../allocate/IssuanceAllocator.sol"; + +/** + * @title IssuanceAllocatorTestHarness + * @author Edge & Node + * @notice Test harness to expose internal functions for white-box testing + * @dev This contract allows direct testing of internal distribution functions to achieve 100% coverage + */ +contract IssuanceAllocatorTestHarness is IssuanceAllocator { + /** + * @notice Constructor for the test harness + * @param _graphToken Address of the Graph Token contract + */ + constructor(address _graphToken) IssuanceAllocator(_graphToken) {} + + /** + * @notice Exposes _distributePendingProportionally for testing + * @dev Allows testing of defensive checks and edge cases + * @param available Total available allocator-minting budget for the period + * @param allocatedRate Total rate allocated to non-default targets + * @param toBlockNumber Block number distributing to + */ + function exposed_distributePendingProportionally( + uint256 available, + uint256 allocatedRate, + uint256 toBlockNumber + ) external { + _distributePendingProportionally(available, allocatedRate, toBlockNumber); + } + + /** + * @notice Exposes _distributePendingWithFullRate for testing + * @dev Allows testing of edge cases in full rate distribution + * @param blocks Number of blocks in the distribution period + * @param available Total available allocator-minting budget for the period + * @param allocatedTotal Total amount allocated to non-default targets at full rate + * @param toBlockNumber Block number distributing to + */ + function exposed_distributePendingWithFullRate( + uint256 blocks, + uint256 available, + uint256 allocatedTotal, + uint256 toBlockNumber + ) external { + _distributePendingWithFullRate(blocks, available, allocatedTotal, toBlockNumber); + } +} diff --git a/packages/issuance/contracts/test/allocate/MockNotificationTracker.sol b/packages/issuance/contracts/test/allocate/MockNotificationTracker.sol new file mode 100644 index 000000000..a33212282 --- /dev/null +++ b/packages/issuance/contracts/test/allocate/MockNotificationTracker.sol @@ -0,0 +1,45 @@ +// SPDX-License-Identifier: MIT +pragma solidity ^0.8.24; + +import { IIssuanceTarget } from "@graphprotocol/interfaces/contracts/issuance/allocate/IIssuanceTarget.sol"; +import { ERC165 } from "@openzeppelin/contracts/utils/introspection/ERC165.sol"; + +/** + * @title MockNotificationTracker + * @author Edge & Node + * @notice A mock contract that tracks notification calls for testing + * @dev Records when beforeIssuanceAllocationChange is called + */ +contract MockNotificationTracker is IIssuanceTarget, ERC165 { + /// @notice Number of times the contract has been notified + uint256 public notificationCount; + + /// @notice Block number of the last notification received + uint256 public lastNotificationBlock; + + /// @notice Emitted when a notification is received + /// @param blockNumber The block number when notification was received + /// @param count The total notification count after this notification + event NotificationReceived(uint256 indexed blockNumber, uint256 indexed count); // solhint-disable-line gas-indexed-events + + /// @inheritdoc IIssuanceTarget + function beforeIssuanceAllocationChange() external override { + ++notificationCount; + lastNotificationBlock = block.number; + emit NotificationReceived(block.number, notificationCount); + } + + /// @inheritdoc IIssuanceTarget + function setIssuanceAllocator(address _issuanceAllocator) external pure override {} + + /// @inheritdoc ERC165 + function supportsInterface(bytes4 interfaceId) public view virtual override returns (bool) { + return interfaceId == type(IIssuanceTarget).interfaceId || super.supportsInterface(interfaceId); + } + + /// @notice Resets the notification counter and last block to zero + function resetNotificationCount() external { + notificationCount = 0; + lastNotificationBlock = 0; + } +} diff --git a/packages/issuance/contracts/test/allocate/MockReentrantTarget.sol b/packages/issuance/contracts/test/allocate/MockReentrantTarget.sol new file mode 100644 index 000000000..484648805 --- /dev/null +++ b/packages/issuance/contracts/test/allocate/MockReentrantTarget.sol @@ -0,0 +1,96 @@ +// SPDX-License-Identifier: MIT +pragma solidity ^0.8.24; + +import { IIssuanceTarget } from "@graphprotocol/interfaces/contracts/issuance/allocate/IIssuanceTarget.sol"; +import { IIssuanceAllocationDistribution } from "@graphprotocol/interfaces/contracts/issuance/allocate/IIssuanceAllocationDistribution.sol"; +import { IIssuanceAllocationAdministration } from "@graphprotocol/interfaces/contracts/issuance/allocate/IIssuanceAllocationAdministration.sol"; +import { ERC165 } from "@openzeppelin/contracts/utils/introspection/ERC165.sol"; + +/** + * @title MockReentrantTarget + * @author Edge & Node + * @notice A malicious mock contract that attempts reentrancy attacks for testing + * @dev Used for testing reentrancy protection in IssuanceAllocator + */ +contract MockReentrantTarget is IIssuanceTarget, ERC165 { + /// @notice The issuance allocator to target for reentrancy attacks + address public issuanceAllocator; + /// @notice The configured reentrancy action to perform + ReentrantAction public actionToPerform; + /// @notice Whether reentrancy should be attempted + bool public shouldAttemptReentrancy; + + enum ReentrantAction { + None, + DistributeIssuance, + SetTargetAllocation1Param, + SetTargetAllocation2Param, + SetTargetAllocation3Param, + SetIssuancePerBlock, + SetIssuancePerBlock2Param, + NotifyTarget, + SetDefaultTarget1Param, + SetDefaultTarget2Param, + DistributePendingIssuance0Param, + DistributePendingIssuance1Param + } + + /// @notice Sets the action to perform during reentrancy attempt + /// @param _action The reentrancy action to configure + function setReentrantAction(ReentrantAction _action) external { + actionToPerform = _action; + shouldAttemptReentrancy = _action != ReentrantAction.None; + } + + /// @inheritdoc IIssuanceTarget + function beforeIssuanceAllocationChange() external override { + if (!shouldAttemptReentrancy) return; + + // Attempt reentrancy based on configured action + if (actionToPerform == ReentrantAction.DistributeIssuance) { + IIssuanceAllocationDistribution(issuanceAllocator).distributeIssuance(); + } else if (actionToPerform == ReentrantAction.SetTargetAllocation1Param) { + IIssuanceAllocationAdministration(issuanceAllocator).setTargetAllocation( + IIssuanceTarget(address(this)), + 1000 + ); + } else if (actionToPerform == ReentrantAction.SetTargetAllocation2Param) { + IIssuanceAllocationAdministration(issuanceAllocator).setTargetAllocation( + IIssuanceTarget(address(this)), + 1000, + 0 + ); + } else if (actionToPerform == ReentrantAction.SetTargetAllocation3Param) { + IIssuanceAllocationAdministration(issuanceAllocator).setTargetAllocation( + IIssuanceTarget(address(this)), + 1000, + 0, + block.number + ); + } else if (actionToPerform == ReentrantAction.SetIssuancePerBlock) { + IIssuanceAllocationAdministration(issuanceAllocator).setIssuancePerBlock(1000); + } else if (actionToPerform == ReentrantAction.SetIssuancePerBlock2Param) { + IIssuanceAllocationAdministration(issuanceAllocator).setIssuancePerBlock(1000, block.number); + } else if (actionToPerform == ReentrantAction.NotifyTarget) { + IIssuanceAllocationAdministration(issuanceAllocator).notifyTarget(address(this)); + } else if (actionToPerform == ReentrantAction.SetDefaultTarget1Param) { + IIssuanceAllocationAdministration(issuanceAllocator).setDefaultTarget(address(this)); + } else if (actionToPerform == ReentrantAction.SetDefaultTarget2Param) { + IIssuanceAllocationAdministration(issuanceAllocator).setDefaultTarget(address(this), block.number); + } else if (actionToPerform == ReentrantAction.DistributePendingIssuance0Param) { + IIssuanceAllocationAdministration(issuanceAllocator).distributePendingIssuance(); + } else if (actionToPerform == ReentrantAction.DistributePendingIssuance1Param) { + IIssuanceAllocationAdministration(issuanceAllocator).distributePendingIssuance(block.number); + } + } + + /// @inheritdoc IIssuanceTarget + function setIssuanceAllocator(address _issuanceAllocator) external override { + issuanceAllocator = _issuanceAllocator; + } + + /// @inheritdoc ERC165 + function supportsInterface(bytes4 interfaceId) public view virtual override returns (bool) { + return interfaceId == type(IIssuanceTarget).interfaceId || super.supportsInterface(interfaceId); + } +} diff --git a/packages/issuance/test/tests/allocate/AccessControl.test.ts b/packages/issuance/test/tests/allocate/AccessControl.test.ts index 74af599a1..141a730aa 100644 --- a/packages/issuance/test/tests/allocate/AccessControl.test.ts +++ b/packages/issuance/test/tests/allocate/AccessControl.test.ts @@ -37,16 +37,28 @@ describe('Allocate Access Control Tests', () => { describe('IssuanceAllocator Access Control', () => { describe('setIssuancePerBlock', () => { it('should revert when non-governor calls setIssuancePerBlock', async () => { + await expect( + contracts.issuanceAllocator.connect(accounts.nonGovernor).setIssuancePerBlock(ethers.parseEther('200')), + ).to.be.revertedWithCustomError(contracts.issuanceAllocator, 'AccessControlUnauthorizedAccount') + }) + + it('should allow governor to call setIssuancePerBlock', async () => { + await expect( + contracts.issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('200')), + ).to.not.be.reverted + }) + + it('should revert when non-governor calls setIssuancePerBlock (2-param variant)', async () => { await expect( contracts.issuanceAllocator .connect(accounts.nonGovernor) - .setIssuancePerBlock(ethers.parseEther('200'), false), + ['setIssuancePerBlock(uint256,uint256)'](ethers.parseEther('300'), 0), ).to.be.revertedWithCustomError(contracts.issuanceAllocator, 'AccessControlUnauthorizedAccount') }) - it('should allow governor to call setIssuancePerBlock', async () => { + it('should allow governor to call setIssuancePerBlock (2-param variant)', async () => { await expect( - contracts.issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('200'), false), + contracts.issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('300')), ).to.not.be.reverted }) }) @@ -56,7 +68,7 @@ describe('Allocate Access Control Tests', () => { await expect( contracts.issuanceAllocator .connect(accounts.nonGovernor) - ['setTargetAllocation(address,uint256,uint256,bool)'](accounts.nonGovernor.address, 100000, 0, false), + ['setTargetAllocation(address,uint256,uint256)'](accounts.nonGovernor.address, 100000, 0), ).to.be.revertedWithCustomError(contracts.issuanceAllocator, 'AccessControlUnauthorizedAccount') }) @@ -65,7 +77,24 @@ describe('Allocate Access Control Tests', () => { await expect( contracts.issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](contracts.directAllocation.target, 100000, 0, false), + ['setTargetAllocation(address,uint256,uint256)'](contracts.directAllocation.target, 100000, 0), + ).to.not.be.reverted + }) + + it('should revert when non-governor calls setTargetAllocation (3-param variant)', async () => { + await expect( + contracts.issuanceAllocator + .connect(accounts.nonGovernor) + ['setTargetAllocation(address,uint256,uint256,uint256)'](accounts.nonGovernor.address, 100000, 0, 0), + ).to.be.revertedWithCustomError(contracts.issuanceAllocator, 'AccessControlUnauthorizedAccount') + }) + + it('should allow governor to call setTargetAllocation (3-param variant)', async () => { + // Use a valid target contract address instead of EOA + await expect( + contracts.issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,uint256)'](contracts.directAllocation.target, 100000, 0, 0), ).to.not.be.reverted }) }) @@ -81,7 +110,7 @@ describe('Allocate Access Control Tests', () => { // First add the target so notifyTarget has something to notify await contracts.issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](contracts.directAllocation.target, 100000, 0, false) + ['setTargetAllocation(address,uint256,uint256)'](contracts.directAllocation.target, 100000, 0) await expect( contracts.issuanceAllocator.connect(accounts.governor).notifyTarget(contracts.directAllocation.target), diff --git a/packages/issuance/test/tests/allocate/DefaultAllocation.test.ts b/packages/issuance/test/tests/allocate/DefaultTarget.test.ts similarity index 75% rename from packages/issuance/test/tests/allocate/DefaultAllocation.test.ts rename to packages/issuance/test/tests/allocate/DefaultTarget.test.ts index 5b7937ecc..ed10be459 100644 --- a/packages/issuance/test/tests/allocate/DefaultAllocation.test.ts +++ b/packages/issuance/test/tests/allocate/DefaultTarget.test.ts @@ -15,7 +15,6 @@ describe('IssuanceAllocator - Default Allocation', () => { let target3 let addresses - const MILLION = 1_000_000n const issuancePerBlock = ethers.parseEther('100') beforeEach(async () => { @@ -44,7 +43,7 @@ describe('IssuanceAllocator - Default Allocation', () => { }) describe('Initialization', () => { - it('should initialize with default allocation at index 0', async () => { + it('should initialize with default target at index 0', async () => { const targetCount = await issuanceAllocator.getTargetCount() expect(targetCount).to.equal(1n) @@ -56,102 +55,102 @@ describe('IssuanceAllocator - Default Allocation', () => { const defaultAddress = await issuanceAllocator.getTargetAt(0) const allocation = await issuanceAllocator.getTargetAllocation(defaultAddress) - expect(allocation.totalAllocationPPM).to.equal(MILLION) - expect(allocation.allocatorMintingPPM).to.equal(MILLION) - expect(allocation.selfMintingPPM).to.equal(0n) + expect(allocation.totalAllocationRate).to.equal(issuancePerBlock) + expect(allocation.allocatorMintingRate).to.equal(issuancePerBlock) + expect(allocation.selfMintingRate).to.equal(0n) }) it('should report total allocation as 0% when default is address(0)', async () => { const totalAllocation = await issuanceAllocator.getTotalAllocation() // When default is address(0), it is treated as unallocated for reporting purposes - expect(totalAllocation.totalAllocationPPM).to.equal(0n) - expect(totalAllocation.allocatorMintingPPM).to.equal(0n) - expect(totalAllocation.selfMintingPPM).to.equal(0n) + expect(totalAllocation.totalAllocationRate).to.equal(0n) + expect(totalAllocation.allocatorMintingRate).to.equal(0n) + expect(totalAllocation.selfMintingRate).to.equal(0n) }) }) describe('100% Allocation Invariant', () => { - it('should auto-adjust default allocation when setting normal target allocation', async () => { - const allocation1PPM = 300_000n // 30% + it('should auto-adjust default target when setting normal target allocation', async () => { + const allocation1Rate = ethers.parseEther('30') // 30% await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256)'](addresses.target1, allocation1PPM) + ['setTargetAllocation(address,uint256)'](addresses.target1, allocation1Rate) // Check target1 has correct allocation const target1Allocation = await issuanceAllocator.getTargetAllocation(addresses.target1) - expect(target1Allocation.totalAllocationPPM).to.equal(allocation1PPM) + expect(target1Allocation.totalAllocationRate).to.equal(allocation1Rate) - // Check default allocation was auto-adjusted + // Check default target was auto-adjusted const defaultAddress = await issuanceAllocator.getTargetAt(0) const defaultAllocation = await issuanceAllocator.getTargetAllocation(defaultAddress) - expect(defaultAllocation.totalAllocationPPM).to.equal(MILLION - allocation1PPM) + expect(defaultAllocation.totalAllocationRate).to.equal(issuancePerBlock - allocation1Rate) // Check reported total (excludes default since it's address(0)) const totalAllocation = await issuanceAllocator.getTotalAllocation() - expect(totalAllocation.totalAllocationPPM).to.equal(allocation1PPM) + expect(totalAllocation.totalAllocationRate).to.equal(allocation1Rate) }) it('should maintain 100% invariant with multiple targets', async () => { - const allocation1PPM = 200_000n // 20% - const allocation2PPM = 350_000n // 35% - const allocation3PPM = 150_000n // 15% + const allocation1Rate = ethers.parseEther('20') // 20% + const allocation2Rate = ethers.parseEther('35') // 35% + const allocation3Rate = ethers.parseEther('15') // 15% await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256)'](addresses.target1, allocation1PPM) + ['setTargetAllocation(address,uint256)'](addresses.target1, allocation1Rate) await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256)'](addresses.target2, allocation2PPM) + ['setTargetAllocation(address,uint256)'](addresses.target2, allocation2Rate) await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256)'](addresses.target3, allocation3PPM) + ['setTargetAllocation(address,uint256)'](addresses.target3, allocation3Rate) - // Check default allocation is 30% (100% - 20% - 35% - 15%) + // Check default target is 30% (100% - 20% - 35% - 15%) const defaultAddress = await issuanceAllocator.getTargetAt(0) const defaultAllocation = await issuanceAllocator.getTargetAllocation(defaultAddress) - const expectedDefault = MILLION - allocation1PPM - allocation2PPM - allocation3PPM - expect(defaultAllocation.totalAllocationPPM).to.equal(expectedDefault) + const expectedDefault = issuancePerBlock - allocation1Rate - allocation2Rate - allocation3Rate + expect(defaultAllocation.totalAllocationRate).to.equal(expectedDefault) // Check reported total (excludes default since it's address(0)) const totalAllocation = await issuanceAllocator.getTotalAllocation() - expect(totalAllocation.totalAllocationPPM).to.equal(allocation1PPM + allocation2PPM + allocation3PPM) + expect(totalAllocation.totalAllocationRate).to.equal(allocation1Rate + allocation2Rate + allocation3Rate) }) - it('should allow 0% default allocation when all allocation is assigned', async () => { - const allocation1PPM = 600_000n // 60% - const allocation2PPM = 400_000n // 40% + it('should allow 0% default target when all allocation is assigned', async () => { + const allocation1Rate = ethers.parseEther('60') // 60% + const allocation2Rate = ethers.parseEther('40') // 40% await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256)'](addresses.target1, allocation1PPM) + ['setTargetAllocation(address,uint256)'](addresses.target1, allocation1Rate) await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256)'](addresses.target2, allocation2PPM) + ['setTargetAllocation(address,uint256)'](addresses.target2, allocation2Rate) - // Check default allocation is 0% + // Check default target is 0% const defaultAddress = await issuanceAllocator.getTargetAt(0) const defaultAllocation = await issuanceAllocator.getTargetAllocation(defaultAddress) - expect(defaultAllocation.totalAllocationPPM).to.equal(0n) + expect(defaultAllocation.totalAllocationRate).to.equal(0n) // Check reported total is 100% (default has 0%, so exclusion doesn't matter) const totalAllocation = await issuanceAllocator.getTotalAllocation() - expect(totalAllocation.totalAllocationPPM).to.equal(MILLION) + expect(totalAllocation.totalAllocationRate).to.equal(issuancePerBlock) }) - it('should revert if non-default allocations exceed 100%', async () => { - const allocation1PPM = 600_000n // 60% - const allocation2PPM = 500_000n // 50% (total would be 110%) + it('should revert if non-default targets exceed 100%', async () => { + const allocation1Rate = ethers.parseEther('60') // 60% + const allocation2Rate = ethers.parseEther('50') // 50% (total would be 110%) await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256)'](addresses.target1, allocation1PPM) + ['setTargetAllocation(address,uint256)'](addresses.target1, allocation1Rate) await expectCustomError( issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256)'](addresses.target2, allocation2PPM), + ['setTargetAllocation(address,uint256)'](addresses.target2, allocation2Rate), issuanceAllocator, 'InsufficientAllocationAvailable', ) @@ -161,36 +160,36 @@ describe('IssuanceAllocator - Default Allocation', () => { // Set up initial allocations await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256)'](addresses.target1, 300_000n) + ['setTargetAllocation(address,uint256)'](addresses.target1, ethers.parseEther('30')) await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256)'](addresses.target2, 200_000n) + ['setTargetAllocation(address,uint256)'](addresses.target2, ethers.parseEther('20')) // Default should be 50% let defaultAddress = await issuanceAllocator.getTargetAt(0) let defaultAllocation = await issuanceAllocator.getTargetAllocation(defaultAddress) - expect(defaultAllocation.totalAllocationPPM).to.equal(500_000n) + expect(defaultAllocation.totalAllocationRate).to.equal(ethers.parseEther('50')) // Remove target1 allocation await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target1, 0, 0, false) + ['setTargetAllocation(address,uint256,uint256)'](addresses.target1, 0, 0) // Default should now be 80% defaultAddress = await issuanceAllocator.getTargetAt(0) defaultAllocation = await issuanceAllocator.getTargetAllocation(defaultAddress) - expect(defaultAllocation.totalAllocationPPM).to.equal(800_000n) + expect(defaultAllocation.totalAllocationRate).to.equal(ethers.parseEther('80')) // Reported total excludes default (only target2's 20% is reported) const totalAllocation = await issuanceAllocator.getTotalAllocation() - expect(totalAllocation.totalAllocationPPM).to.equal(200_000n) + expect(totalAllocation.totalAllocationRate).to.equal(ethers.parseEther('20')) }) it('should handle self-minting allocations correctly in 100% invariant', async () => { - const allocator1 = 200_000n - const self1 = 100_000n - const allocator2 = 300_000n - const self2 = 50_000n + const allocator1 = ethers.parseEther('20') + const self1 = ethers.parseEther('10') + const allocator2 = ethers.parseEther('30') + const self2 = ethers.parseEther('5') await issuanceAllocator .connect(accounts.governor) @@ -203,20 +202,20 @@ describe('IssuanceAllocator - Default Allocation', () => { // Default should be: 35% const defaultAddress = await issuanceAllocator.getTargetAt(0) const defaultAllocation = await issuanceAllocator.getTargetAllocation(defaultAddress) - expect(defaultAllocation.totalAllocationPPM).to.equal(350_000n) + expect(defaultAllocation.totalAllocationRate).to.equal(ethers.parseEther('35')) // Reported total excludes default (only target1+target2's 65% is reported) const totalAllocation = await issuanceAllocator.getTotalAllocation() - expect(totalAllocation.totalAllocationPPM).to.equal(allocator1 + self1 + allocator2 + self2) - expect(totalAllocation.selfMintingPPM).to.equal(self1 + self2) + expect(totalAllocation.totalAllocationRate).to.equal(allocator1 + self1 + allocator2 + self2) + expect(totalAllocation.selfMintingRate).to.equal(self1 + self2) }) }) - describe('setDefaultAllocationAddress', () => { - it('should allow governor to change default allocation address', async () => { + describe('setDefaultTarget', () => { + it('should allow governor to change default target address', async () => { const newDefaultAddress = addresses.target1 - await issuanceAllocator.connect(accounts.governor).setDefaultAllocationAddress(newDefaultAddress) + await issuanceAllocator.connect(accounts.governor).setDefaultTarget(newDefaultAddress) const defaultAddress = await issuanceAllocator.getTargetAt(0) expect(defaultAddress).to.equal(newDefaultAddress) @@ -226,45 +225,45 @@ describe('IssuanceAllocator - Default Allocation', () => { // Set a target allocation first await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256)'](addresses.target2, 400_000n) + ['setTargetAllocation(address,uint256)'](addresses.target2, ethers.parseEther('40')) // Default should be 60% let defaultAddress = await issuanceAllocator.getTargetAt(0) let defaultAllocation = await issuanceAllocator.getTargetAllocation(defaultAddress) - expect(defaultAllocation.totalAllocationPPM).to.equal(600_000n) + expect(defaultAllocation.totalAllocationRate).to.equal(ethers.parseEther('60')) // Change default address - await issuanceAllocator.connect(accounts.governor).setDefaultAllocationAddress(addresses.target1) + await issuanceAllocator.connect(accounts.governor).setDefaultTarget(addresses.target1) // Check new address has the same allocation defaultAddress = await issuanceAllocator.getTargetAt(0) expect(defaultAddress).to.equal(addresses.target1) defaultAllocation = await issuanceAllocator.getTargetAllocation(addresses.target1) - expect(defaultAllocation.totalAllocationPPM).to.equal(600_000n) + expect(defaultAllocation.totalAllocationRate).to.equal(ethers.parseEther('60')) // Old address should have zero allocation const oldAllocation = await issuanceAllocator.getTargetAllocation(ethers.ZeroAddress) - expect(oldAllocation.totalAllocationPPM).to.equal(0n) + expect(oldAllocation.totalAllocationRate).to.equal(0n) }) - it('should emit DefaultAllocationAddressUpdated event', async () => { + it('should emit DefaultTargetUpdated event', async () => { const newDefaultAddress = addresses.target1 - await expect(issuanceAllocator.connect(accounts.governor).setDefaultAllocationAddress(newDefaultAddress)) - .to.emit(issuanceAllocator, 'DefaultAllocationAddressUpdated') + await expect(issuanceAllocator.connect(accounts.governor).setDefaultTarget(newDefaultAddress)) + .to.emit(issuanceAllocator, 'DefaultTargetUpdated') .withArgs(ethers.ZeroAddress, newDefaultAddress) }) it('should be no-op when setting to same address', async () => { const currentAddress = await issuanceAllocator.getTargetAt(0) - const tx = await issuanceAllocator.connect(accounts.governor).setDefaultAllocationAddress(currentAddress) + const tx = await issuanceAllocator.connect(accounts.governor).setDefaultTarget(currentAddress) const receipt = await tx.wait() // Should not emit event when no-op const events = receipt!.logs.filter((log: any) => { try { - return issuanceAllocator.interface.parseLog(log)?.name === 'DefaultAllocationAddressUpdated' + return issuanceAllocator.interface.parseLog(log)?.name === 'DefaultTargetUpdated' } catch { return false } @@ -274,36 +273,36 @@ describe('IssuanceAllocator - Default Allocation', () => { it('should revert when non-governor tries to change default address', async () => { await expect( - issuanceAllocator.connect(accounts.user).setDefaultAllocationAddress(addresses.target1), + issuanceAllocator.connect(accounts.user).setDefaultTarget(addresses.target1), ).to.be.revertedWithCustomError(issuanceAllocator, 'AccessControlUnauthorizedAccount') }) - it('should revert when non-governor tries to change default address with evenIfDistributionPending flag', async () => { + it('should revert when non-governor tries to change default address with explicit fromBlockNumber', async () => { + const currentBlock = await ethers.provider.getBlockNumber() await expect( - issuanceAllocator.connect(accounts.user)['setDefaultAllocationAddress(address,bool)'](addresses.target1, true), + issuanceAllocator.connect(accounts.user)['setDefaultTarget(address,uint256)'](addresses.target1, currentBlock), ).to.be.revertedWithCustomError(issuanceAllocator, 'AccessControlUnauthorizedAccount') }) - it('should return false when trying to change default address while paused without evenIfDistributionPending', async () => { + it('should return false when trying to change default address while paused without explicit fromBlockNumber', async () => { // Grant pause role and pause const PAUSE_ROLE = ethers.keccak256(ethers.toUtf8Bytes('PAUSE_ROLE')) await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) await issuanceAllocator.connect(accounts.governor).pause() - // Try to change default without force - should return false (checked via staticCall) - const result = await issuanceAllocator - .connect(accounts.governor) - .setDefaultAllocationAddress.staticCall(addresses.target3) + // Try to change default without explicit fromBlockNumber - should return false (checked via staticCall) + const result = await issuanceAllocator.connect(accounts.governor).setDefaultTarget.staticCall(addresses.target3) expect(result).to.equal(false) // Verify allocation didn't change const currentDefault = await issuanceAllocator.getTargetAt(0) expect(currentDefault).to.equal(ethers.ZeroAddress) - // Should succeed with evenIfDistributionPending=true + // Should succeed with explicit minDistributedBlock that has been reached + const lastDistributionBlock = (await issuanceAllocator.getDistributionState()).lastDistributionBlock await issuanceAllocator .connect(accounts.governor) - ['setDefaultAllocationAddress(address,bool)'](addresses.target3, true) + ['setDefaultTarget(address,uint256)'](addresses.target3, lastDistributionBlock) const newDefault = await issuanceAllocator.getTargetAt(0) expect(newDefault).to.equal(addresses.target3) @@ -313,11 +312,11 @@ describe('IssuanceAllocator - Default Allocation', () => { // Set target1 as a normal allocation await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256)'](addresses.target1, 300_000n) + ['setTargetAllocation(address,uint256)'](addresses.target1, ethers.parseEther('30')) // Try to set target1 as default should fail await expectCustomError( - issuanceAllocator.connect(accounts.governor).setDefaultAllocationAddress(addresses.target1), + issuanceAllocator.connect(accounts.governor).setDefaultTarget(addresses.target1), issuanceAllocator, 'CannotSetDefaultToAllocatedTarget', ) @@ -325,10 +324,10 @@ describe('IssuanceAllocator - Default Allocation', () => { it('should allow changing back to zero address', async () => { // Change to target1 - await issuanceAllocator.connect(accounts.governor).setDefaultAllocationAddress(addresses.target1) + await issuanceAllocator.connect(accounts.governor).setDefaultTarget(addresses.target1) // Change back to zero address - await issuanceAllocator.connect(accounts.governor).setDefaultAllocationAddress(ethers.ZeroAddress) + await issuanceAllocator.connect(accounts.governor).setDefaultTarget(ethers.ZeroAddress) const defaultAddress = await issuanceAllocator.getTargetAt(0) expect(defaultAddress).to.equal(ethers.ZeroAddress) @@ -342,7 +341,9 @@ describe('IssuanceAllocator - Default Allocation', () => { // When default is address(0), the zero address check happens first await expectCustomError( - issuanceAllocator.connect(accounts.governor)['setTargetAllocation(address,uint256)'](defaultAddress, 500_000n), + issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256)'](defaultAddress, ethers.parseEther('50')), issuanceAllocator, 'TargetAddressCannotBeZero', ) @@ -350,13 +351,13 @@ describe('IssuanceAllocator - Default Allocation', () => { it('should revert when trying to set allocation for changed default target', async () => { // Change default to target1 - await issuanceAllocator.connect(accounts.governor).setDefaultAllocationAddress(addresses.target1) + await issuanceAllocator.connect(accounts.governor).setDefaultTarget(addresses.target1) // Should not be able to set allocation for target1 now await expectCustomError( issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256)'](addresses.target1, 500_000n), + ['setTargetAllocation(address,uint256)'](addresses.target1, ethers.parseEther('50')), issuanceAllocator, 'CannotSetAllocationForDefaultTarget', ) @@ -364,48 +365,48 @@ describe('IssuanceAllocator - Default Allocation', () => { it('should allow setting allocation for previous default address after it changes', async () => { // Change default to target1 - await issuanceAllocator.connect(accounts.governor).setDefaultAllocationAddress(addresses.target1) + await issuanceAllocator.connect(accounts.governor).setDefaultTarget(addresses.target1) // Change default to target2 (target1 is no longer the default) - await issuanceAllocator.connect(accounts.governor).setDefaultAllocationAddress(addresses.target2) + await issuanceAllocator.connect(accounts.governor).setDefaultTarget(addresses.target2) // Now target1 can receive a normal allocation since it's no longer the default await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256)'](addresses.target1, 300_000n) + ['setTargetAllocation(address,uint256)'](addresses.target1, ethers.parseEther('30')) const allocation = await issuanceAllocator.getTargetAllocation(addresses.target1) - expect(allocation.totalAllocationPPM).to.equal(300_000n) + expect(allocation.totalAllocationRate).to.equal(ethers.parseEther('30')) }) it('should revert when trying to set allocation for address(0) when default is not address(0)', async () => { // Change default to target1 - await issuanceAllocator.connect(accounts.governor).setDefaultAllocationAddress(addresses.target1) + await issuanceAllocator.connect(accounts.governor).setDefaultTarget(addresses.target1) // Try to set allocation for address(0) directly should fail await expectCustomError( issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256)'](ethers.ZeroAddress, 300_000n), + ['setTargetAllocation(address,uint256)'](ethers.ZeroAddress, ethers.parseEther('30')), issuanceAllocator, 'TargetAddressCannotBeZero', ) }) }) - describe('Distribution with default allocation', () => { + describe('Distribution with default target', () => { it('should not mint to zero address when default is unset', async () => { // Set a normal target allocation (this is block 1) await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256)'](addresses.target1, 400_000n) + ['setTargetAllocation(address,uint256)'](addresses.target1, ethers.parseEther('40')) // Distribute (this is block 2, so we distribute for block 1->2 = 1 block since last distribution) await issuanceAllocator.distributeIssuance() // Target1 should receive 40% of issuance for the block between setTargetAllocation and distributeIssuance const target1Balance = await graphToken.balanceOf(addresses.target1) - const expectedTarget1 = (issuancePerBlock * 400_000n) / MILLION + const expectedTarget1 = (issuancePerBlock * ethers.parseEther('40')) / issuancePerBlock expect(target1Balance).to.equal(expectedTarget1) // Zero address should have nothing (cannot be minted to) @@ -417,41 +418,41 @@ describe('IssuanceAllocator - Default Allocation', () => { it('should mint to default address when it is set', async () => { // Change default to target3 - await issuanceAllocator.connect(accounts.governor).setDefaultAllocationAddress(addresses.target3) + await issuanceAllocator.connect(accounts.governor).setDefaultTarget(addresses.target3) // Set target1 allocation await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256)'](addresses.target1, 300_000n) + ['setTargetAllocation(address,uint256)'](addresses.target1, ethers.parseEther('30')) // Distribute to settle issuance await issuanceAllocator.distributeIssuance() // Target1 should receive 30% for 1 block const target1Balance = await graphToken.balanceOf(addresses.target1) - const expectedTarget1 = (issuancePerBlock * 300_000n) / MILLION + const expectedTarget1 = (issuancePerBlock * ethers.parseEther('30')) / issuancePerBlock expect(target1Balance).to.equal(expectedTarget1) // Target3 (default) should receive: - // - 100% for 1 block (from setDefaultAllocationAddress to setTargetAllocation) + // - 100% for 1 block (from setDefaultTarget to setTargetAllocation) // - 70% for 1 block (from setTargetAllocation to distributeIssuance) const target3Balance = await graphToken.balanceOf(addresses.target3) - const expectedTarget3 = issuancePerBlock + (issuancePerBlock * 700_000n) / MILLION + const expectedTarget3 = issuancePerBlock + ethers.parseEther('70') expect(target3Balance).to.equal(expectedTarget3) }) it('should distribute correctly with multiple targets and default', async () => { // Set default to target3 - await issuanceAllocator.connect(accounts.governor).setDefaultAllocationAddress(addresses.target3) + await issuanceAllocator.connect(accounts.governor).setDefaultTarget(addresses.target3) // Set allocations (target3 gets remaining 50% as default) await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256)'](addresses.target1, 200_000n) // 20% + ['setTargetAllocation(address,uint256)'](addresses.target1, ethers.parseEther('20')) // 20% await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256)'](addresses.target2, 300_000n) // 30% + ['setTargetAllocation(address,uint256)'](addresses.target2, ethers.parseEther('30')) // 30% // Distribute to settle issuance await issuanceAllocator.distributeIssuance() @@ -464,10 +465,9 @@ describe('IssuanceAllocator - Default Allocation', () => { const target2Balance = await graphToken.balanceOf(addresses.target2) const target3Balance = await graphToken.balanceOf(addresses.target3) - const expectedTarget1 = (issuancePerBlock * 200_000n * 2n) / MILLION - const expectedTarget2 = (issuancePerBlock * 300_000n) / MILLION - const expectedTarget3 = - issuancePerBlock + (issuancePerBlock * 800_000n) / MILLION + (issuancePerBlock * 500_000n) / MILLION + const expectedTarget1 = (issuancePerBlock * ethers.parseEther('20') * 2n) / issuancePerBlock + const expectedTarget2 = (issuancePerBlock * ethers.parseEther('30')) / issuancePerBlock + const expectedTarget3 = issuancePerBlock + ethers.parseEther('80') + ethers.parseEther('50') expect(target1Balance).to.equal(expectedTarget1) expect(target2Balance).to.equal(expectedTarget2) @@ -478,15 +478,15 @@ describe('IssuanceAllocator - Default Allocation', () => { expect(totalMinted).to.equal(issuancePerBlock * 3n) }) - it('should handle distribution when default allocation is 0%', async () => { + it('should handle distribution when default target is 0%', async () => { // Allocate 100% to explicit targets (default gets 0%) await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256)'](addresses.target1, 600_000n) + ['setTargetAllocation(address,uint256)'](addresses.target1, ethers.parseEther('60')) // At this point target1 has 60%, default has 40% await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256)'](addresses.target2, 400_000n) + ['setTargetAllocation(address,uint256)'](addresses.target2, ethers.parseEther('40')) // Now target1 has 60%, target2 has 40%, default has 0% // Distribute (1 block since last setTargetAllocation) @@ -501,42 +501,42 @@ describe('IssuanceAllocator - Default Allocation', () => { // + 60% (from second setTargetAllocation to final distributeIssuance) // = 120% of one block = 60% * 2 blocks const target1Balance = await graphToken.balanceOf(addresses.target1) - expect(target1Balance).to.equal((issuancePerBlock * 600_000n * 2n) / MILLION) + expect(target1Balance).to.equal((issuancePerBlock * ethers.parseEther('60') * 2n) / issuancePerBlock) // Target2 receives: 40% (from second setTargetAllocation to final distributeIssuance) const target2Balance = await graphToken.balanceOf(addresses.target2) - expect(target2Balance).to.equal((issuancePerBlock * 400_000n) / MILLION) + expect(target2Balance).to.equal((issuancePerBlock * ethers.parseEther('40')) / issuancePerBlock) // Default allocation is now 0% const defaultAddress = await issuanceAllocator.getTargetAt(0) const defaultAllocation = await issuanceAllocator.getTargetAllocation(defaultAddress) - expect(defaultAllocation.totalAllocationPPM).to.equal(0n) + expect(defaultAllocation.totalAllocationRate).to.equal(0n) }) - it('should distribute during setDefaultAllocationAddress when using default behavior', async () => { - // Change default to target3 WITHOUT evenIfDistributionPending flag (uses default false) + it('should distribute during setDefaultTarget when using default behavior', async () => { + // Change default to target3 using the simple variant (no explicit fromBlockNumber) // This should distribute issuance up to current block before changing the default - await issuanceAllocator.connect(accounts.governor).setDefaultAllocationAddress(addresses.target3) + await issuanceAllocator.connect(accounts.governor).setDefaultTarget(addresses.target3) // Set target1 allocation await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target1, 300_000n, 0n, true) + ['setTargetAllocation(address,uint256,uint256,uint256)'](addresses.target1, ethers.parseEther('30'), 0n, 0) // Distribute once more await issuanceAllocator.distributeIssuance() // Target3 (default) should receive: - // - 0% for 1 block (setDefaultAllocationAddress distributes to old default (zero address) before changing) - // - 100% for 1 block (from setDefaultAllocationAddress to setTargetAllocation) + // - 0% for 1 block (setDefaultTarget distributes to old default (zero address) before changing) + // - 100% for 1 block (from setDefaultTarget to setTargetAllocation) // - 70% for 1 block (from setTargetAllocation to final distributeIssuance) const target3Balance = await graphToken.balanceOf(addresses.target3) - const expectedTarget3 = issuancePerBlock + (issuancePerBlock * 700_000n) / MILLION + const expectedTarget3 = issuancePerBlock + ethers.parseEther('70') expect(target3Balance).to.equal(expectedTarget3) // Target1 should receive 30% for 1 block const target1Balance = await graphToken.balanceOf(addresses.target1) - const expectedTarget1 = (issuancePerBlock * 300_000n) / MILLION + const expectedTarget1 = (issuancePerBlock * ethers.parseEther('30')) / issuancePerBlock expect(target1Balance).to.equal(expectedTarget1) }) @@ -547,10 +547,10 @@ describe('IssuanceAllocator - Default Allocation', () => { // Set target1 as normal allocation with 30% await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256)'](addresses.target1, 300_000n) + ['setTargetAllocation(address,uint256)'](addresses.target1, ethers.parseEther('30')) let allocation = await issuanceAllocator.getTargetAllocation(addresses.target1) - expect(allocation.totalAllocationPPM).to.equal(300_000n) + expect(allocation.totalAllocationRate).to.equal(ethers.parseEther('30')) // Remove target1's allocation (set to 0%) await issuanceAllocator.connect(accounts.governor)['setTargetAllocation(address,uint256)'](addresses.target1, 0n) @@ -564,46 +564,46 @@ describe('IssuanceAllocator - Default Allocation', () => { expect(targets).to.not.include(addresses.target1) // Should not be in list anymore // Now set target1 as default - should work and not have stale allocation data - await issuanceAllocator.connect(accounts.governor).setDefaultAllocationAddress(addresses.target1) + await issuanceAllocator.connect(accounts.governor).setDefaultTarget(addresses.target1) // Verify target1 is now default with 100% allocation (since no other targets) const defaultAddress = await issuanceAllocator.getTargetAt(0) expect(defaultAddress).to.equal(addresses.target1) allocation = await issuanceAllocator.getTargetAllocation(addresses.target1) - expect(allocation.totalAllocationPPM).to.equal(MILLION) // Should have full allocation as default + expect(allocation.totalAllocationRate).to.equal(issuancePerBlock) // Should have full allocation as default }) it('should handle changing default when default has 0% allocation', async () => { // Allocate 100% to other targets so default has 0% await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256)'](addresses.target1, 600_000n) + ['setTargetAllocation(address,uint256)'](addresses.target1, ethers.parseEther('60')) await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256)'](addresses.target2, 400_000n) + ['setTargetAllocation(address,uint256)'](addresses.target2, ethers.parseEther('40')) // Default should now have 0% const defaultAddress = await issuanceAllocator.getTargetAt(0) const defaultAllocation = await issuanceAllocator.getTargetAllocation(defaultAddress) - expect(defaultAllocation.totalAllocationPPM).to.equal(0n) + expect(defaultAllocation.totalAllocationRate).to.equal(0n) // Change default to target3 - await issuanceAllocator.connect(accounts.governor).setDefaultAllocationAddress(addresses.target3) + await issuanceAllocator.connect(accounts.governor).setDefaultTarget(addresses.target3) // New default should have 0% (same as old default) const newDefaultAddress = await issuanceAllocator.getTargetAt(0) expect(newDefaultAddress).to.equal(addresses.target3) const newDefaultAllocation = await issuanceAllocator.getTargetAllocation(addresses.target3) - expect(newDefaultAllocation.totalAllocationPPM).to.equal(0n) + expect(newDefaultAllocation.totalAllocationRate).to.equal(0n) // Other allocations should be maintained const target1Allocation = await issuanceAllocator.getTargetAllocation(addresses.target1) const target2Allocation = await issuanceAllocator.getTargetAllocation(addresses.target2) - expect(target1Allocation.totalAllocationPPM).to.equal(600_000n) - expect(target2Allocation.totalAllocationPPM).to.equal(400_000n) + expect(target1Allocation.totalAllocationRate).to.equal(ethers.parseEther('60')) + expect(target2Allocation.totalAllocationRate).to.equal(ethers.parseEther('40')) }) it('should handle changing from initial address(0) default without errors', async () => { @@ -614,7 +614,7 @@ describe('IssuanceAllocator - Default Allocation', () => { // Add a normal allocation so there's pending issuance to distribute await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256)'](addresses.target1, 400_000n) + ['setTargetAllocation(address,uint256)'](addresses.target1, ethers.parseEther('40')) // Mine a few blocks to accumulate issuance await ethers.provider.send('evm_mine', []) @@ -625,7 +625,7 @@ describe('IssuanceAllocator - Default Allocation', () => { // 1. Call _handleDistributionBeforeAllocation(address(0), ...) - should not revert // 2. Call _notifyTarget(address(0)) - should return early safely // 3. Delete allocationTargets[address(0)] - should not cause issues - await issuanceAllocator.connect(accounts.governor).setDefaultAllocationAddress(addresses.target2) + await issuanceAllocator.connect(accounts.governor).setDefaultTarget(addresses.target2) // Verify the change succeeded const newDefault = await issuanceAllocator.getTargetAt(0) @@ -638,9 +638,9 @@ describe('IssuanceAllocator - Default Allocation', () => { // Distribute and verify target2 (new default) receives correct allocation await issuanceAllocator.distributeIssuance() - // Target2 should have received 60% for 1 block (from setDefaultAllocationAddress to distributeIssuance) + // Target2 should have received 60% for 1 block (from setDefaultTarget to distributeIssuance) const target2Balance = await graphToken.balanceOf(addresses.target2) - const expectedTarget2 = (issuancePerBlock * 600_000n) / MILLION + const expectedTarget2 = (issuancePerBlock * ethers.parseEther('60')) / issuancePerBlock expect(target2Balance).to.equal(expectedTarget2) // Target1 should have accumulated tokens across multiple blocks @@ -656,7 +656,7 @@ describe('IssuanceAllocator - Default Allocation', () => { it('should not transfer future notification block from old default to new default', async () => { // Set initial default to target1 - await issuanceAllocator.connect(accounts.governor).setDefaultAllocationAddress(addresses.target1) + await issuanceAllocator.connect(accounts.governor).setDefaultTarget(addresses.target1) // Force a future notification block on target1 (the current default) const currentBlock = await ethers.provider.getBlockNumber() @@ -670,7 +670,7 @@ describe('IssuanceAllocator - Default Allocation', () => { expect(target1DataBefore.lastChangeNotifiedBlock).to.equal(futureBlock) // Change default from target1 to target2 - await issuanceAllocator.connect(accounts.governor).setDefaultAllocationAddress(addresses.target2) + await issuanceAllocator.connect(accounts.governor).setDefaultTarget(addresses.target2) // Verify target2 (new default) has its own notification block (current block), not the future block from target1 const target2Data = await issuanceAllocator.getTargetData(addresses.target2) @@ -694,14 +694,14 @@ describe('IssuanceAllocator - Default Allocation', () => { await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256)'](addresses.target1, 300_000n) + ['setTargetAllocation(address,uint256)'](addresses.target1, ethers.parseEther('30')) count = await issuanceAllocator.getTargetCount() expect(count).to.equal(2n) // Default + target1 await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256)'](addresses.target2, 200_000n) + ['setTargetAllocation(address,uint256)'](addresses.target2, ethers.parseEther('20')) count = await issuanceAllocator.getTargetCount() expect(count).to.equal(3n) // Default + target1 + target2 @@ -710,7 +710,7 @@ describe('IssuanceAllocator - Default Allocation', () => { it('should include default in getTargets array', async () => { await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256)'](addresses.target1, 300_000n) + ['setTargetAllocation(address,uint256)'](addresses.target1, ethers.parseEther('30')) const targets = await issuanceAllocator.getTargets() expect(targets.length).to.equal(2) @@ -721,29 +721,29 @@ describe('IssuanceAllocator - Default Allocation', () => { it('should return correct data for default target', async () => { await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256)'](addresses.target1, 400_000n) + ['setTargetAllocation(address,uint256)'](addresses.target1, ethers.parseEther('40')) const defaultAddress = await issuanceAllocator.getTargetAt(0) const data = await issuanceAllocator.getTargetData(defaultAddress) - expect(data.allocatorMintingPPM).to.equal(600_000n) - expect(data.selfMintingPPM).to.equal(0n) + expect(data.allocatorMintingRate).to.equal(ethers.parseEther('60')) + expect(data.selfMintingRate).to.equal(0n) }) it('should report 100% total allocation when default is a real address', async () => { // Set target1 allocation first await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256)'](addresses.target1, 300_000n) + ['setTargetAllocation(address,uint256)'](addresses.target1, ethers.parseEther('30')) // Change default to target2 (a real address, not address(0)) - await issuanceAllocator.connect(accounts.governor).setDefaultAllocationAddress(addresses.target2) + await issuanceAllocator.connect(accounts.governor).setDefaultTarget(addresses.target2) // When default is a real address, it should report 100% total allocation const totalAllocation = await issuanceAllocator.getTotalAllocation() - expect(totalAllocation.totalAllocationPPM).to.equal(MILLION) - expect(totalAllocation.allocatorMintingPPM).to.equal(MILLION) // target1=30% + target2=70% = 100% - expect(totalAllocation.selfMintingPPM).to.equal(0n) + expect(totalAllocation.totalAllocationRate).to.equal(issuancePerBlock) + expect(totalAllocation.allocatorMintingRate).to.equal(issuancePerBlock) // target1=30% + target2=70% = 100% + expect(totalAllocation.selfMintingRate).to.equal(0n) }) }) }) diff --git a/packages/issuance/test/tests/allocate/DefensiveChecks.test.ts b/packages/issuance/test/tests/allocate/DefensiveChecks.test.ts new file mode 100644 index 000000000..56ebed829 --- /dev/null +++ b/packages/issuance/test/tests/allocate/DefensiveChecks.test.ts @@ -0,0 +1,71 @@ +import { expect } from 'chai' +import hre from 'hardhat' +const { ethers } = hre +const { upgrades } = require('hardhat') + +import { deployTestGraphToken, getTestAccounts } from '../common/fixtures' + +describe('IssuanceAllocator - Defensive Checks', function () { + let accounts + let issuanceAllocator + let graphToken + + beforeEach(async function () { + accounts = await getTestAccounts() + graphToken = await deployTestGraphToken() + + // Deploy test harness as regular upgradeable contract with explicit validation skip + const IssuanceAllocatorFactory = await ethers.getContractFactory('IssuanceAllocatorTestHarness') + const issuanceAllocatorContract = await upgrades.deployProxy( + IssuanceAllocatorFactory, + [accounts.governor.address], + { + constructorArgs: [await graphToken.getAddress()], + initializer: 'initialize', + unsafeAllow: ['constructor', 'state-variable-immutable'], + }, + ) + issuanceAllocator = issuanceAllocatorContract + + // Add IssuanceAllocator as minter + await graphToken.connect(accounts.governor).addMinter(await issuanceAllocator.getAddress()) + }) + + describe('_distributePendingProportionally defensive checks', function () { + it('should return early when allocatedRate is 0', async function () { + // Call exposed function with allocatedRate = 0 + // This should return early without reverting + await expect( + issuanceAllocator.exposed_distributePendingProportionally( + 100, // available + 0, // allocatedRate = 0 (defensive check) + 1000, // toBlockNumber + ), + ).to.not.be.reverted + }) + + it('should return early when available is 0', async function () { + // Call exposed function with available = 0 + // This should return early without reverting + await expect( + issuanceAllocator.exposed_distributePendingProportionally( + 0, // available = 0 (defensive check) + 100, // allocatedRate + 1000, // toBlockNumber + ), + ).to.not.be.reverted + }) + + it('should return early when both are 0', async function () { + // Call exposed function with both = 0 + // This should return early without reverting + await expect( + issuanceAllocator.exposed_distributePendingProportionally( + 0, // available = 0 + 0, // allocatedRate = 0 + 1000, // toBlockNumber + ), + ).to.not.be.reverted + }) + }) +}) diff --git a/packages/issuance/test/tests/allocate/InterfaceIdStability.test.ts b/packages/issuance/test/tests/allocate/InterfaceIdStability.test.ts index ea29a2ea9..f2e86437c 100644 --- a/packages/issuance/test/tests/allocate/InterfaceIdStability.test.ts +++ b/packages/issuance/test/tests/allocate/InterfaceIdStability.test.ts @@ -26,11 +26,11 @@ describe('Allocate Interface ID Stability', () => { }) it('IIssuanceAllocationAdministration should have stable interface ID', () => { - expect(IIssuanceAllocationAdministration__factory.interfaceId).to.equal('0x1110962a') + expect(IIssuanceAllocationAdministration__factory.interfaceId).to.equal('0xd0b6c0e8') }) it('IIssuanceAllocationStatus should have stable interface ID', () => { - expect(IIssuanceAllocationStatus__factory.interfaceId).to.equal('0xc0ba8a55') + expect(IIssuanceAllocationStatus__factory.interfaceId).to.equal('0xa896602d') }) it('IIssuanceAllocationData should have stable interface ID', () => { diff --git a/packages/issuance/test/tests/allocate/IssuanceAllocator.test.ts b/packages/issuance/test/tests/allocate/IssuanceAllocator.test.ts index 599f9b334..cca35ac0b 100644 --- a/packages/issuance/test/tests/allocate/IssuanceAllocator.test.ts +++ b/packages/issuance/test/tests/allocate/IssuanceAllocator.test.ts @@ -4,7 +4,7 @@ const { ethers } = hre import { deployTestGraphToken, getTestAccounts, SHARED_CONSTANTS } from '../common/fixtures' import { deployDirectAllocation, deployIssuanceAllocator } from './fixtures' -import { calculateExpectedAccumulation, parseEther } from './issuanceCalculations' +// calculateExpectedAccumulation removed with PPM model // Import optimization helpers for common test utilities import { expectCustomError } from './optimizationHelpers' @@ -77,12 +77,12 @@ describe('IssuanceAllocator', () => { // Remove all existing allocations (except default at index 0) try { const targetCount = await issuanceAllocator.getTargetCount() - // Skip index 0 (default allocation) and remove from index 1 onwards + // Skip index 0 (default target) and remove from index 1 onwards for (let i = 1; i < targetCount; i++) { const targetAddr = await issuanceAllocator.getTargetAt(1) // Always remove index 1 await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](targetAddr, 0, 0, false) + ['setTargetAllocation(address,uint256,uint256)'](targetAddr, 0, 0) } } catch (_e) { // Ignore errors during cleanup @@ -99,9 +99,9 @@ describe('IssuanceAllocator', () => { // Reset issuance per block to default try { - const currentIssuance = await issuanceAllocator.issuancePerBlock() + const currentIssuance = await issuanceAllocator.getIssuancePerBlock() if (currentIssuance !== issuancePerBlock) { - await issuanceAllocator.connect(accounts.governor)['setIssuancePerBlock(uint256,bool)'](issuancePerBlock, true) + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(issuancePerBlock) } } catch (_e) { // Ignore if can't reset @@ -188,7 +188,7 @@ describe('IssuanceAllocator', () => { // Verify all initialization state in one test expect(await issuanceAllocator.hasRole(GOVERNOR_ROLE, accounts.governor.address)).to.be.true - expect(await issuanceAllocator.issuancePerBlock()).to.equal(issuancePerBlock) + expect(await issuanceAllocator.getIssuancePerBlock()).to.equal(issuancePerBlock) // Verify re-initialization is prevented await expect(issuanceAllocator.initialize(accounts.governor.address)).to.be.revertedWithCustomError( @@ -208,17 +208,17 @@ describe('IssuanceAllocator', () => { await expect( issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target1, 100000, 0, false), + ['setTargetAllocation(address,uint256,uint256)'](addresses.target1, 100000, 0), ).to.not.be.reverted // Verify the target was added const targetData = await issuanceAllocator.getTargetData(addresses.target1) - expect(targetData.allocatorMintingPPM).to.equal(100000) - expect(targetData.selfMintingPPM).to.equal(0) + expect(targetData.allocatorMintingRate).to.equal(100000) + expect(targetData.selfMintingRate).to.equal(0) const allocation = await issuanceAllocator.getTargetAllocation(addresses.target1) - expect(allocation.totalAllocationPPM).to.equal(100000) - expect(allocation.allocatorMintingPPM).to.equal(100000) - expect(allocation.selfMintingPPM).to.equal(0) + expect(allocation.totalAllocationRate).to.equal(100000) + expect(allocation.allocatorMintingRate).to.equal(100000) + expect(allocation.selfMintingRate).to.equal(0) }) it('should revert when adding EOA targets (no contract code)', async () => { @@ -229,7 +229,7 @@ describe('IssuanceAllocator', () => { await expect( issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](eoaAddress, 100000, 0, false), + ['setTargetAllocation(address,uint256,uint256)'](eoaAddress, 100000, 0), ).to.be.reverted }) @@ -245,7 +245,7 @@ describe('IssuanceAllocator', () => { await expect( issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](contractAddress, 100000, 0, false), + ['setTargetAllocation(address,uint256,uint256)'](contractAddress, 100000, 0), ).to.be.revertedWithCustomError(issuanceAllocator, 'TargetDoesNotSupportIIssuanceTarget') }) @@ -258,19 +258,18 @@ describe('IssuanceAllocator', () => { const contractAddress = await mockRevertingTarget.getAddress() // This should revert because MockRevertingTarget reverts during notification - // force=true only affects distribution, not notification failures await expect( issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](contractAddress, 100000, 0, true), + ['setTargetAllocation(address,uint256,uint256,uint256)'](contractAddress, 100000, 0, 0), ).to.be.revertedWithCustomError(mockRevertingTarget, 'TargetRevertsIntentionally') // Verify the target was NOT added because the transaction reverted const targetData = await issuanceAllocator.getTargetData(contractAddress) - expect(targetData.allocatorMintingPPM).to.equal(0) - expect(targetData.selfMintingPPM).to.equal(0) + expect(targetData.allocatorMintingRate).to.equal(0) + expect(targetData.selfMintingRate).to.equal(0) const allocation = await issuanceAllocator.getTargetAllocation(contractAddress) - expect(allocation.totalAllocationPPM).to.equal(0) + expect(allocation.totalAllocationRate).to.equal(0) }) it('should allow re-adding existing target with same self-minter flag', async () => { @@ -279,13 +278,13 @@ describe('IssuanceAllocator', () => { // Add the target first time await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target1, 100000, 0, false) + ['setTargetAllocation(address,uint256,uint256)'](addresses.target1, 100000, 0) // Should succeed when setting allocation again with same flag (no interface check needed) await expect( issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target1, 200000, 0, false), + ['setTargetAllocation(address,uint256,uint256)'](addresses.target1, 200000, 0), ).to.not.be.reverted }) }) @@ -300,28 +299,28 @@ describe('IssuanceAllocator', () => { const allocation = 300000 // 30% in PPM await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target1, allocation, 0, false) + ['setTargetAllocation(address,uint256,uint256)'](addresses.target1, allocation, 0) // Verify allocation is set and target exists const target1Allocation = await issuanceAllocator.getTargetAllocation(addresses.target1) - expect(target1Allocation.totalAllocationPPM).to.equal(allocation) + expect(target1Allocation.totalAllocationRate).to.equal(allocation) const totalAlloc = await issuanceAllocator.getTotalAllocation() - // With default as address(0), only non-default allocations are reported - expect(totalAlloc.totalAllocationPPM).to.equal(allocation) + // With default as address(0), only non-default targets are reported + expect(totalAlloc.totalAllocationRate).to.equal(allocation) // Remove target by setting allocation to 0 await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target1, 0, 0, false) + ['setTargetAllocation(address,uint256,uint256)'](addresses.target1, 0, 0) // Verify target is removed (only default remains) const targets = await issuanceAllocator.getTargets() - expect(targets.length).to.equal(1) // Only default allocation + expect(targets.length).to.equal(1) // Only default target // Verify reported total is 0% (default has it all, but isn't reported) { const totalAlloc = await issuanceAllocator.getTotalAllocation() - expect(totalAlloc.totalAllocationPPM).to.equal(0) + expect(totalAlloc.totalAllocationRate).to.equal(0) } }) @@ -331,20 +330,20 @@ describe('IssuanceAllocator', () => { // Add targets with allocations in one step await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target1, 300000, 0, false) // 30% + ['setTargetAllocation(address,uint256,uint256)'](addresses.target1, 300000, 0) // 30% await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target2, 400000, 0, false) // 40% + ['setTargetAllocation(address,uint256,uint256)'](addresses.target2, 400000, 0) // 40% // Verify allocations are set const target1Allocation = await issuanceAllocator.getTargetAllocation(addresses.target1) const target2Allocation = await issuanceAllocator.getTargetAllocation(addresses.target2) - expect(target1Allocation.totalAllocationPPM).to.equal(300000) - expect(target2Allocation.totalAllocationPPM).to.equal(400000) + expect(target1Allocation.totalAllocationRate).to.equal(300000) + expect(target2Allocation.totalAllocationRate).to.equal(400000) { const totalAlloc = await issuanceAllocator.getTotalAllocation() - // With default as address(0), only non-default allocations are reported (70%) - expect(totalAlloc.totalAllocationPPM).to.equal(700000) + // With default as address(0), only non-default targets are reported (70%) + expect(totalAlloc.totalAllocationRate).to.equal(700000) } // Get initial target addresses (including default) @@ -354,7 +353,7 @@ describe('IssuanceAllocator', () => { // Remove target2 by setting allocation to 0 (tests the swap-and-pop logic in the contract) await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target2, 0, 0, false) + ['setTargetAllocation(address,uint256,uint256)'](addresses.target2, 0, 0) // Verify target2 is removed but target1 and default remain const remainingTargets = await issuanceAllocator.getTargets() @@ -364,7 +363,7 @@ describe('IssuanceAllocator', () => { // Verify reported total excludes default (only target1's 30% is reported) { const totalAlloc = await issuanceAllocator.getTotalAllocation() - expect(totalAlloc.totalAllocationPPM).to.equal(300000) + expect(totalAlloc.totalAllocationRate).to.equal(300000) } }) @@ -374,25 +373,25 @@ describe('IssuanceAllocator', () => { // Add targets with allocations in one step await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target1, 100000, 0, false) // 10% + ['setTargetAllocation(address,uint256,uint256)'](addresses.target1, 100000, 0) // 10% await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target2, 200000, 0, false) // 20% + ['setTargetAllocation(address,uint256,uint256)'](addresses.target2, 200000, 0) // 20% // Verify targets were added const target1Info = await issuanceAllocator.getTargetData(addresses.target1) const target2Info = await issuanceAllocator.getTargetData(addresses.target2) // Check that targets exist by verifying they have non-zero allocations - expect(target1Info.allocatorMintingPPM + target1Info.selfMintingPPM).to.equal(100000) - expect(target2Info.allocatorMintingPPM + target2Info.selfMintingPPM).to.equal(200000) - expect(target1Info.selfMintingPPM).to.equal(0) - expect(target2Info.selfMintingPPM).to.equal(0) + expect(target1Info.allocatorMintingRate + target1Info.selfMintingRate).to.equal(100000) + expect(target2Info.allocatorMintingRate + target2Info.selfMintingRate).to.equal(200000) + expect(target1Info.selfMintingRate).to.equal(0) + expect(target2Info.selfMintingRate).to.equal(0) // Verify reported total excludes default (only target1+target2's 70% is reported) { const totalAlloc = await issuanceAllocator.getTotalAllocation() - expect(totalAlloc.totalAllocationPPM).to.equal(300000) + expect(totalAlloc.totalAllocationRate).to.equal(300000) } }) @@ -405,20 +404,20 @@ describe('IssuanceAllocator', () => { await expect( issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](nonExistentTarget, 500_000, 0, false), + ['setTargetAllocation(address,uint256,uint256)'](nonExistentTarget, 500_000, 0), ).to.be.reverted // Test 2: Should revert when total allocation would exceed 100% // Set allocation for target1 to 60% await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target1, 600_000, 0, false) + ['setTargetAllocation(address,uint256,uint256)'](addresses.target1, ethers.parseEther('60'), 0) // Try to set allocation for target2 to 50%, which would exceed 100% (60% + 50% > 100%) await expectCustomError( issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target2, 500_000, 0, false), + ['setTargetAllocation(address,uint256,uint256)'](addresses.target2, ethers.parseEther('50'), 0), issuanceAllocator, 'InsufficientAllocationAvailable', ) @@ -432,10 +431,10 @@ describe('IssuanceAllocator', () => { // Add targets with different self-minter flags and set allocations await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target1, 300000, 0, false) // 30%, allocator-minting + ['setTargetAllocation(address,uint256,uint256)'](addresses.target1, 300000, 0) // 30%, allocator-minting await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target2, 0, 400000, false) // 40%, self-minting + ['setTargetAllocation(address,uint256,uint256)'](addresses.target2, 0, 400000) // 40%, self-minting // Get balances after setting allocations (some tokens may have been minted due to setTargetAllocation calling distributeIssuance) const balanceAfterAllocation1 = await (graphToken as any).balanceOf(addresses.target1) @@ -466,7 +465,7 @@ describe('IssuanceAllocator', () => { // Add target and set allocation in one step await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target1, 300000, 0, false) // 30% + ['setTargetAllocation(address,uint256,uint256)'](addresses.target1, 300000, 0) // 30% // Mine some blocks for (let i = 0; i < 5; i++) { @@ -486,7 +485,7 @@ describe('IssuanceAllocator', () => { // Add target and set allocation in one step await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target1, 300000, 0, false) // 30% + ['setTargetAllocation(address,uint256,uint256)'](addresses.target1, 300000, 0) // 30% // Mine some blocks for (let i = 0; i < 5; i++) { @@ -499,7 +498,7 @@ describe('IssuanceAllocator', () => { // Get initial balance and lastIssuanceDistributionBlock before pausing const { graphToken } = sharedContracts const initialBalance = await (graphToken as any).balanceOf(addresses.target1) - const initialLastIssuanceBlock = await issuanceAllocator.lastIssuanceDistributionBlock() + const initialLastIssuanceBlock = (await issuanceAllocator.getDistributionState()).lastDistributionBlock // Pause the contract await issuanceAllocator.connect(accounts.governor).pause() @@ -513,7 +512,7 @@ describe('IssuanceAllocator', () => { // Verify no tokens were minted and lastIssuanceDistributionBlock was not updated const finalBalance = await (graphToken as any).balanceOf(addresses.target1) - const finalLastIssuanceBlock = await issuanceAllocator.lastIssuanceDistributionBlock() + const finalLastIssuanceBlock = (await issuanceAllocator.getDistributionState()).lastDistributionBlock expect(finalBalance).to.equal(initialBalance) expect(finalLastIssuanceBlock).to.equal(initialLastIssuanceBlock) @@ -528,16 +527,16 @@ describe('IssuanceAllocator', () => { // Add target as allocator-minting with 30% allocation await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 300000, 0, false) // 30%, allocator-minting + ['setTargetAllocation(address,uint256,uint256)'](await target1.getAddress(), 300000, 0) // 30%, allocator-minting // Verify initial state const initialAllocation = await issuanceAllocator.getTargetAllocation(await target1.getAddress()) - expect(initialAllocation.selfMintingPPM).to.equal(0) + expect(initialAllocation.selfMintingRate).to.equal(0) // Change to self-minting with same allocation - this should NOT return early const result = await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'].staticCall(await target1.getAddress(), 0, 300000, true) // Same allocation, but now self-minting + ['setTargetAllocation(address,uint256,uint256,uint256)'].staticCall(await target1.getAddress(), 0, 300000, 0) // Same allocation, but now self-minting // Should return true (indicating change was made) expect(result).to.be.true @@ -545,11 +544,11 @@ describe('IssuanceAllocator', () => { // Actually make the change await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 0, 300000, false) + ['setTargetAllocation(address,uint256,uint256)'](await target1.getAddress(), 0, 300000) // Verify the selfMinter flag was updated const updatedAllocation = await issuanceAllocator.getTargetAllocation(await target1.getAddress()) - expect(updatedAllocation.selfMintingPPM).to.be.gt(0) + expect(updatedAllocation.selfMintingRate).to.be.gt(0) }) it('should update selfMinter flag when changing from self-minting to allocator-minting', async () => { @@ -561,16 +560,16 @@ describe('IssuanceAllocator', () => { // Add target as self-minting with 30% allocation await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 0, 300000, false) // 30%, self-minting + ['setTargetAllocation(address,uint256,uint256)'](await target1.getAddress(), 0, 300000) // 30%, self-minting // Verify initial state const initialAllocation2 = await issuanceAllocator.getTargetAllocation(await target1.getAddress()) - expect(initialAllocation2.selfMintingPPM).to.be.gt(0) + expect(initialAllocation2.selfMintingRate).to.be.gt(0) // Change to allocator-minting with same allocation - this should NOT return early const result = await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'].staticCall(await target1.getAddress(), 300000, 0, false) // Same allocation, but now allocator-minting + ['setTargetAllocation(address,uint256,uint256,uint256)'].staticCall(await target1.getAddress(), 300000, 0, 0) // Same allocation, but now allocator-minting // Should return true (indicating change was made) expect(result).to.be.true @@ -578,11 +577,11 @@ describe('IssuanceAllocator', () => { // Actually make the change await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 300000, 0, false) + ['setTargetAllocation(address,uint256,uint256)'](await target1.getAddress(), 300000, 0) // Verify the selfMinter flag was updated const finalAllocation = await issuanceAllocator.getTargetAllocation(await target1.getAddress()) - expect(finalAllocation.selfMintingPPM).to.equal(0) + expect(finalAllocation.selfMintingRate).to.equal(0) }) it('should track totalActiveSelfMintingAllocation correctly with incremental updates', async () => { @@ -594,907 +593,73 @@ describe('IssuanceAllocator', () => { // Initially should be 0 (no targets) { const totalAlloc = await issuanceAllocator.getTotalAllocation() - expect(totalAlloc.selfMintingPPM).to.equal(0) + expect(totalAlloc.selfMintingRate).to.equal(0) } // Add self-minting target with 30% allocation (300000 PPM) await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 0, 300000, false) // 30%, self-minting + ['setTargetAllocation(address,uint256,uint256)'](await target1.getAddress(), 0, 300000) // 30%, self-minting // Should now be 300000 PPM { const totalAlloc = await issuanceAllocator.getTotalAllocation() - expect(totalAlloc.selfMintingPPM).to.equal(300000) + expect(totalAlloc.selfMintingRate).to.equal(300000) } // Add allocator-minting target with 20% allocation (200000 PPM) await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await target2.getAddress(), 200000, 0, false) // 20%, allocator-minting + ['setTargetAllocation(address,uint256,uint256)'](await target2.getAddress(), 200000, 0) // 20%, allocator-minting // totalActiveSelfMintingAllocation should remain the same (still 300000 PPM) { const totalAlloc = await issuanceAllocator.getTotalAllocation() - expect(totalAlloc.selfMintingPPM).to.equal(300000) + expect(totalAlloc.selfMintingRate).to.equal(300000) } // Change target2 to self-minting with 10% allocation (100000 PPM) await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await target2.getAddress(), 0, 100000, false) // 10%, self-minting + ['setTargetAllocation(address,uint256,uint256)'](await target2.getAddress(), 0, 100000) // 10%, self-minting // Should now be 400000 PPM (300000 + 100000) { const totalAlloc = await issuanceAllocator.getTotalAllocation() - expect(totalAlloc.selfMintingPPM).to.equal(400000) + expect(totalAlloc.selfMintingRate).to.equal(400000) } // Change target1 from self-minting to allocator-minting (same allocation) await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 300000, 0, false) // 30%, allocator-minting + ['setTargetAllocation(address,uint256,uint256)'](await target1.getAddress(), 300000, 0) // 30%, allocator-minting // Should now be 100000 PPM (400000 - 300000) { const totalAlloc = await issuanceAllocator.getTotalAllocation() - expect(totalAlloc.selfMintingPPM).to.equal(100000) + expect(totalAlloc.selfMintingRate).to.equal(100000) } // Remove target2 (set allocation to 0) await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await target2.getAddress(), 0, 0, false) // Remove target2 + ['setTargetAllocation(address,uint256,uint256)'](await target2.getAddress(), 0, 0) // Remove target2 // Should now be 0 PPM (100000 - 100000) { const totalAlloc = await issuanceAllocator.getTotalAllocation() - expect(totalAlloc.selfMintingPPM).to.equal(0) + expect(totalAlloc.selfMintingRate).to.equal(0) } // Add target1 back as self-minting with 50% allocation await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 0, 500000, false) // 50%, self-minting + ['setTargetAllocation(address,uint256,uint256)'](await target1.getAddress(), 0, 500000) // 50%, self-minting // Should now be 500000 PPM { const totalAlloc = await issuanceAllocator.getTotalAllocation() - expect(totalAlloc.selfMintingPPM).to.equal(500000) - } - }) - - it('should test new getter functions for accumulation fields', async () => { - const { issuanceAllocator } = sharedContracts - - // After setup, accumulation block should be set to the same as distribution block - // because setIssuancePerBlock was called during setup, which triggers _distributeIssuance - const initialAccumulationBlock = await issuanceAllocator.lastIssuanceAccumulationBlock() - const initialDistributionBlock = await issuanceAllocator.lastIssuanceDistributionBlock() - expect(initialAccumulationBlock).to.equal(initialDistributionBlock) - expect(initialAccumulationBlock).to.be.gt(0) - - // After another distribution, both blocks should be updated to the same value - await issuanceAllocator.connect(accounts.governor).distributeIssuance() - const distributionBlock = await issuanceAllocator.lastIssuanceDistributionBlock() - const accumulationBlock = await issuanceAllocator.lastIssuanceAccumulationBlock() - expect(distributionBlock).to.be.gt(initialDistributionBlock) - expect(accumulationBlock).to.equal(distributionBlock) // Both updated to same block during normal distribution - - // Pending should be 0 after normal distribution (not paused, no accumulation) - const pendingAmount = await issuanceAllocator.pendingAccumulatedAllocatorIssuance() - expect(pendingAmount).to.equal(0) - }) - }) - - describe('Granular Pausing and Accumulation', () => { - it('should accumulate issuance when self-minting allocation changes during pause', async () => { - const { issuanceAllocator, addresses } = sharedContracts - - // Grant pause role - await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) - - // Set issuance rate and add targets - await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('100'), false) - await issuanceAllocator - .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target1, 300000, 0, false) // 30% allocator-minting - await issuanceAllocator - .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target2, 0, 200000, false) // 20% self-minting - - // Distribute once to initialize blocks - await issuanceAllocator.connect(accounts.governor).distributeIssuance() - - // Pause the contract - await issuanceAllocator.connect(accounts.governor).pause() - - // Mine some blocks - await ethers.provider.send('evm_mine', []) - await ethers.provider.send('evm_mine', []) - - // Change self-minting allocation while paused - this should trigger accumulation - await issuanceAllocator - .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target2, 0, 300000, true) // Change self-minting from 20% to 30% - - // Check that issuance was accumulated - const pendingAmount = await issuanceAllocator.pendingAccumulatedAllocatorIssuance() - expect(pendingAmount).to.be.gt(0) - - // Verify accumulation block was updated - const currentBlock = await ethers.provider.getBlockNumber() - expect(await issuanceAllocator.lastIssuanceAccumulationBlock()).to.equal(currentBlock) - }) - - it('should NOT accumulate issuance when only allocator-minting allocation changes during pause', async () => { - const { issuanceAllocator, graphToken, addresses } = sharedContracts - - // Grant pause role - await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) - - // Set issuance rate and add targets - await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('100'), false) - await issuanceAllocator - .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target1, 300000, 0, false) // 30% allocator-minting - await issuanceAllocator - .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target2, 0, 200000, false) // 20% self-minting - - // Distribute once to initialize blocks - await issuanceAllocator.connect(accounts.governor).distributeIssuance() - - // Pause the contract - await issuanceAllocator.connect(accounts.governor).pause() - - // Get initial pending amount (should be 0) - const initialPendingAmount = await issuanceAllocator.pendingAccumulatedAllocatorIssuance() - expect(initialPendingAmount).to.equal(0) - - // Mine some blocks - await ethers.provider.send('evm_mine', []) - await ethers.provider.send('evm_mine', []) - - // Change only allocator-minting allocation while paused - this should NOT trigger accumulation - await issuanceAllocator - .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target1, 400000, 0, true) // Change allocator-minting from 30% to 40% - - // Check that issuance was NOT accumulated (should still be 0) - const pendingAmount = await issuanceAllocator.pendingAccumulatedAllocatorIssuance() - expect(pendingAmount).to.equal(0) - - // Test the pendingAmount == 0 early return path by calling distributeIssuance when there's no pending amount - // First clear the pending amount by unpausing and distributing - await issuanceAllocator.connect(accounts.governor).unpause() - await issuanceAllocator.connect(accounts.governor).distributeIssuance() - expect(await issuanceAllocator.pendingAccumulatedAllocatorIssuance()).to.equal(0) - - // Now call distributeIssuance again - this should hit the early return in _distributePendingIssuance - const balanceBefore = await (graphToken as any).balanceOf(addresses.target1) - await issuanceAllocator.connect(accounts.governor).distributeIssuance() - const balanceAfter = await (graphToken as any).balanceOf(addresses.target1) - - // Should still distribute normal issuance (not pending), proving the early return worked correctly - expect(balanceAfter).to.be.gt(balanceBefore) - }) - - it('should distribute pending accumulated issuance when resuming from pause', async () => { - const { issuanceAllocator, graphToken, target1, target2 } = await setupIssuanceAllocator() - - // Setup - await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) - await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) - await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('100'), false) - - // Add allocator-minting targets only - await issuanceAllocator - .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 600000, 0, false) // 60% - await issuanceAllocator - .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await target2.getAddress(), 400000, 0, false) // 40% - - // Distribute once to initialize - await issuanceAllocator.connect(accounts.governor).distributeIssuance() - const initialBalance1 = await (graphToken as any).balanceOf(await target1.getAddress()) - const initialBalance2 = await (graphToken as any).balanceOf(await target2.getAddress()) - - // Pause and accumulate some issuance - await issuanceAllocator.connect(accounts.governor).pause() - await ethers.provider.send('evm_mine', []) - await ethers.provider.send('evm_mine', []) - - // Trigger accumulation by changing rate - await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('200'), true) - - const pendingBefore = await issuanceAllocator.pendingAccumulatedAllocatorIssuance() - expect(pendingBefore).to.be.gt(0) - - // Unpause and distribute - should distribute pending + new issuance - await issuanceAllocator.connect(accounts.governor).unpause() - await issuanceAllocator.connect(accounts.governor).distributeIssuance() - - // Check that pending was distributed proportionally - const finalBalance1 = await (graphToken as any).balanceOf(await target1.getAddress()) - const finalBalance2 = await (graphToken as any).balanceOf(await target2.getAddress()) - - expect(finalBalance1).to.be.gt(initialBalance1) - expect(finalBalance2).to.be.gt(initialBalance2) - - // Verify pending was reset - expect(await issuanceAllocator.pendingAccumulatedAllocatorIssuance()).to.equal(0) - }) - - it('should handle accumulation with mixed self-minting and allocator-minting targets', async () => { - const { issuanceAllocator, graphToken, target1, target2 } = await setupIssuanceAllocator() - - // Setup - await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) - await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) - await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('100'), false) - - // Mix of targets: 20% allocator-minting, 5% self-minting (leaving 75% for default, total 95% allocator) - await issuanceAllocator - .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 200000, 0, false) // 20% allocator-minting - await issuanceAllocator - .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await target2.getAddress(), 0, 50000, false) // 5% self-minting - - // Initialize distribution - await issuanceAllocator.connect(accounts.governor).distributeIssuance() - - await issuanceAllocator.connect(accounts.governor).pause() - - // Mine blocks and trigger accumulation by changing self-minting allocation - await ethers.provider.send('evm_mine', []) - await ethers.provider.send('evm_mine', []) - await issuanceAllocator - .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await target2.getAddress(), 0, 0, true) // Change self-minting from 5% to 0% - - // Accumulation should happen from lastIssuanceDistributionBlock to current block - const blockAfterAccumulation = await ethers.provider.getBlockNumber() - - const pendingAmount = await issuanceAllocator.pendingAccumulatedAllocatorIssuance() - const lastDistributionBlock = await issuanceAllocator.lastIssuanceDistributionBlock() - - // Calculate what accumulation SHOULD be from lastDistributionBlock - // During accumulation: 20% (target1) + 75% (default) = 95% allocator-minting, 5% self-minting - // Accumulated issuance is based on the 95% allocator-minting that was active during accumulation - const blocksFromDistribution = BigInt(blockAfterAccumulation) - BigInt(lastDistributionBlock) - const allocatorMintingDuringAccumulation = 950000n // 95% in PPM - const expectedFromDistribution = calculateExpectedAccumulation( - parseEther('100'), - blocksFromDistribution, - allocatorMintingDuringAccumulation, - ) - - expect(pendingAmount).to.equal(expectedFromDistribution) - - // Now test distribution of pending issuance to cover the self-minter branch - const initialBalance1 = await (graphToken as any).balanceOf(await target1.getAddress()) - const initialBalance2 = await (graphToken as any).balanceOf(await target2.getAddress()) - - // Unpause and distribute - should only mint to allocator-minting target (target1), not self-minting (target2) - await issuanceAllocator.connect(accounts.governor).unpause() - await issuanceAllocator.connect(accounts.governor).distributeIssuance() - - // target1 (allocator-minting) should receive tokens, target2 (self-minting) should not receive pending tokens - const finalBalance1 = await (graphToken as any).balanceOf(await target1.getAddress()) - const finalBalance2 = await (graphToken as any).balanceOf(await target2.getAddress()) - expect(finalBalance1).to.be.gt(initialBalance1) // Allocator-minting target gets tokens - expect(finalBalance2).to.equal(initialBalance2) // Self-minting target gets no tokens from pending distribution - expect(await issuanceAllocator.pendingAccumulatedAllocatorIssuance()).to.equal(0) - }) - - it('should distribute pending issuance with correct proportional amounts', async () => { - const { issuanceAllocator, graphToken, target1, target2 } = await setupIssuanceAllocator() - - // Setup - await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) - await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) - await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('1000'), false) - - // Mix of targets: 15% and 25% allocator-minting (40% total), 10% self-minting (leaving 50% for default, total 90% allocator) - await issuanceAllocator - .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 150000, 0, false) // 15% allocator-minting - await issuanceAllocator - .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await target2.getAddress(), 250000, 0, false) // 25% allocator-minting - - // Add a self-minting target to create the mixed scenario - const MockTarget = await ethers.getContractFactory('MockSimpleTarget') - const selfMintingTarget = await MockTarget.deploy() - await issuanceAllocator - .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await selfMintingTarget.getAddress(), 0, 100000, false) // 10% self-minting - - // Initialize and pause - await issuanceAllocator.connect(accounts.governor).distributeIssuance() - const initialBalance1 = await (graphToken as any).balanceOf(await target1.getAddress()) - const initialBalance2 = await (graphToken as any).balanceOf(await target2.getAddress()) - - await issuanceAllocator.connect(accounts.governor).pause() - - // Mine exactly 2 blocks and trigger accumulation by changing self-minting allocation - await ethers.provider.send('evm_mine', []) - await ethers.provider.send('evm_mine', []) - await issuanceAllocator - .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await selfMintingTarget.getAddress(), 0, 0, true) // Change self-minting from 10% to 0% - - // Calculate actual blocks accumulated (from block 0 since lastIssuanceAccumulationBlock starts at 0) - const blockAfterAccumulation = await ethers.provider.getBlockNumber() - - // Verify accumulation: 90% allocator-minting allocation (150000 + 250000 + 500000 default = 900000 PPM) - // Accumulation should happen from lastIssuanceDistributionBlock to current block - const pendingAmount = await issuanceAllocator.pendingAccumulatedAllocatorIssuance() - const lastDistributionBlock = await issuanceAllocator.lastIssuanceDistributionBlock() - - // Calculate expected accumulation from when issuance was last distributed - // During accumulation: 15% (target1) + 25% (target2) + 50% (default) = 90% allocator-minting, 10% self-minting - const blocksToAccumulate = BigInt(blockAfterAccumulation) - BigInt(lastDistributionBlock) - const allocatorMintingDuringAccumulation = 900000n // 90% in PPM - const expectedPending = calculateExpectedAccumulation( - parseEther('1000'), - blocksToAccumulate, - allocatorMintingDuringAccumulation, - ) - expect(pendingAmount).to.equal(expectedPending) - - // Unpause and distribute - await issuanceAllocator.connect(accounts.governor).unpause() - await issuanceAllocator.connect(accounts.governor).distributeIssuance() - - // Verify exact distribution amounts - const finalBalance1 = await (graphToken as any).balanceOf(await target1.getAddress()) - const finalBalance2 = await (graphToken as any).balanceOf(await target2.getAddress()) - - // Calculate expected distributions: - // Total allocator-minting allocation after change: 150000 + 250000 + 600000 (default) = 1000000 (100%) - // target1 should get: 2000 * (150000 / 1000000) = 300 tokens from pending (doubled due to known issue) - // target2 should get: 2000 * (250000 / 1000000) = 500 tokens from pending (doubled due to known issue) - const expectedTarget1Pending = ethers.parseEther('300') - const expectedTarget2Pending = ethers.parseEther('500') - - // Account for any additional issuance from the distribution block itself - const pendingDistribution1 = finalBalance1 - initialBalance1 - const pendingDistribution2 = finalBalance2 - initialBalance2 - - // The pending distribution should be at least the expected amounts - // (might be slightly more due to additional block issuance) - expect(pendingDistribution1).to.be.gte(expectedTarget1Pending) - expect(pendingDistribution2).to.be.gte(expectedTarget2Pending) - - // Verify the ratio is correct: target2 should get 1.67x what target1 gets from pending - // (250000 / 150000 = 1.67) - const ratio = (BigInt(pendingDistribution2) * 1000n) / BigInt(pendingDistribution1) // Multiply by 1000 for precision - expect(ratio).to.be.closeTo(1667n, 100n) // Allow larger tolerance due to default allocation adjustments - - // Verify pending was reset - expect(await issuanceAllocator.pendingAccumulatedAllocatorIssuance()).to.equal(0) - }) - - it('should distribute 100% of pending issuance when only allocator-minting targets exist', async () => { - const { issuanceAllocator, graphToken, target1, target2 } = await setupIssuanceAllocator() - - // Setup - await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) - await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) - await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('1000'), false) - - // Allocator-minting targets: 30% and 50%, plus a small self-minting target initially (leaving 19% for default) - await issuanceAllocator - .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 300000, 0, false) // 30% allocator-minting - await issuanceAllocator - .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await target2.getAddress(), 500000, 10000, false) // 50% allocator-minting, 1% self-minting - - // Initialize and pause - await issuanceAllocator.connect(accounts.governor).distributeIssuance() - const initialBalance1 = await (graphToken as any).balanceOf(await target1.getAddress()) - const initialBalance2 = await (graphToken as any).balanceOf(await target2.getAddress()) - - await issuanceAllocator.connect(accounts.governor).pause() - - // Mine exactly 3 blocks and trigger accumulation by removing self-minting - await ethers.provider.send('evm_mine', []) - await ethers.provider.send('evm_mine', []) - await ethers.provider.send('evm_mine', []) - await issuanceAllocator - .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await target2.getAddress(), 510000, 0, true) // Remove self-minting (now 51% allocator-minting, leaving 19% for default) - - // Calculate actual blocks accumulated (from block 0 since lastIssuanceAccumulationBlock starts at 0) - const blockAfterAccumulation = await ethers.provider.getBlockNumber() - - // Verify accumulation: should use the OLD allocation (80% allocator-minting) that was active during pause - // Accumulation happens BEFORE the allocation change, so uses 30% + 50% + 19% default = 99% allocator-minting, 1% self-minting - const pendingAmount = await issuanceAllocator.pendingAccumulatedAllocatorIssuance() - const lastDistributionBlock = await issuanceAllocator.lastIssuanceDistributionBlock() - - // Calculate expected accumulation using the OLD allocation (before the change) - const blocksToAccumulate = BigInt(blockAfterAccumulation) - BigInt(lastDistributionBlock) - const oldAllocatorMintingPPM = 300000n + 500000n + 190000n // 30% + 50% + 19% default = 99% - const expectedPending = calculateExpectedAccumulation( - parseEther('1000'), - blocksToAccumulate, - oldAllocatorMintingPPM, - ) - expect(pendingAmount).to.equal(expectedPending) - - // Unpause and distribute - await issuanceAllocator.connect(accounts.governor).unpause() - await issuanceAllocator.connect(accounts.governor).distributeIssuance() - - // Verify exact distribution amounts - const finalBalance1 = await (graphToken as any).balanceOf(await target1.getAddress()) - const finalBalance2 = await (graphToken as any).balanceOf(await target2.getAddress()) - - // Calculate expected distributions: - // Total allocator-minting allocation: 300000 + 510000 + 190000 = 1000000 (100%) - // target1 should get: 5000 * (300000 / 1000000) = 1500 tokens from pending - // target2 should get: 5000 * (510000 / 1000000) = 2550 tokens from pending - const expectedTarget1Pending = ethers.parseEther('1500') - const expectedTarget2Pending = ethers.parseEther('2550') - - // Account for any additional issuance from the distribution block itself - const pendingDistribution1 = finalBalance1 - initialBalance1 - const pendingDistribution2 = finalBalance2 - initialBalance2 - - // The pending distribution should be at least the expected amounts - expect(pendingDistribution1).to.be.gte(expectedTarget1Pending) - expect(pendingDistribution2).to.be.gte(expectedTarget2Pending) - - // Verify the ratio is correct: target2 should get 1.7x what target1 gets from pending - // (510000 / 300000 = 1.7) - const ratio = (BigInt(pendingDistribution2) * 1000n) / BigInt(pendingDistribution1) // Multiply by 1000 for precision - expect(ratio).to.be.closeTo(1700n, 50n) // Allow small rounding tolerance - - // Verify pending was reset - expect(await issuanceAllocator.pendingAccumulatedAllocatorIssuance()).to.equal(0) - }) - - it('should distribute total amounts that add up to expected issuance rate', async () => { - const { issuanceAllocator, graphToken, target1, target2 } = await setupIssuanceAllocator() - - // Setup - await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) - await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) - await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('1000'), false) - - // Create a third target for more comprehensive testing - const MockTarget = await ethers.getContractFactory('MockSimpleTarget') - const target3 = await MockTarget.deploy() - - // Mix of targets: 25% + 15% + 10% allocator-minting (50% total), 20% self-minting (leaving 30% for default, total 80% allocator) - await issuanceAllocator - .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 250000, 0, false) // 25% allocator-minting - await issuanceAllocator - .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await target2.getAddress(), 150000, 0, false) // 15% allocator-minting - await issuanceAllocator - .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await target3.getAddress(), 100000, 0, false) // 10% allocator-minting - - // Add a self-minting target - const selfMintingTarget = await MockTarget.deploy() - await issuanceAllocator - .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await selfMintingTarget.getAddress(), 0, 200000, false) // 20% self-minting - - // Initialize and pause - await issuanceAllocator.connect(accounts.governor).distributeIssuance() - const initialBalance1 = await (graphToken as any).balanceOf(await target1.getAddress()) - const initialBalance2 = await (graphToken as any).balanceOf(await target2.getAddress()) - const initialBalance3 = await (graphToken as any).balanceOf(await target3.getAddress()) - - await issuanceAllocator.connect(accounts.governor).pause() - - // Mine exactly 5 blocks and trigger accumulation by changing self-minting allocation - for (let i = 0; i < 5; i++) { - await ethers.provider.send('evm_mine', []) - } - await issuanceAllocator - .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await selfMintingTarget.getAddress(), 0, 100000, true) // Change self-minting from 20% to 10% - - // Calculate actual blocks accumulated (from block 0 since lastIssuanceAccumulationBlock starts at 0) - const blockAfterAccumulation = await ethers.provider.getBlockNumber() - - // Calculate expected total accumulation: 80% allocator-minting allocation (25% + 15% + 10% + 30% default = 800000 PPM) - // Accumulation should happen from lastIssuanceDistributionBlock to current block - const pendingAmount = await issuanceAllocator.pendingAccumulatedAllocatorIssuance() - const lastDistributionBlock = await issuanceAllocator.lastIssuanceDistributionBlock() - - // Calculate expected accumulation from when issuance was last distributed - // During accumulation: 25% (target1) + 15% (target2) + 10% (target3) + 30% (default) = 80% allocator-minting, 20% self-minting - const blocksToAccumulate = BigInt(blockAfterAccumulation) - BigInt(lastDistributionBlock) - const allocatorMintingDuringAccumulation = 800000n // 80% in PPM - const expectedPending = calculateExpectedAccumulation( - parseEther('1000'), - blocksToAccumulate, - allocatorMintingDuringAccumulation, - ) - expect(pendingAmount).to.equal(expectedPending) - - // Unpause and distribute - await issuanceAllocator.connect(accounts.governor).unpause() - await issuanceAllocator.connect(accounts.governor).distributeIssuance() - - // Calculate actual distributions - const finalBalance1 = await (graphToken as any).balanceOf(await target1.getAddress()) - const finalBalance2 = await (graphToken as any).balanceOf(await target2.getAddress()) - const finalBalance3 = await (graphToken as any).balanceOf(await target3.getAddress()) - - const distribution1 = finalBalance1 - initialBalance1 - const distribution2 = finalBalance2 - initialBalance2 - const distribution3 = finalBalance3 - initialBalance3 - const totalDistributed = distribution1 + distribution2 + distribution3 - - // Verify total distributed amount is reasonable - // The three explicit targets get 50% of total allocation, default gets 30% - // So they should receive (50/80) = 62.5% of pending allocator-minting issuance - // Plus additional issuance from blocks between accumulation and distribution - const expectedMinimumToThreeTargets = (pendingAmount * 50n) / 80n - expect(totalDistributed).to.be.gte(expectedMinimumToThreeTargets) - - // Verify proportional distribution within allocator-minting targets - // Actual allocations: target1=25%, target2=15%, target3=10% - // Expected ratios: target1:target2:target3 = 25:15:10 = 5:3:2 - const ratio12 = (BigInt(distribution1) * 1000n) / BigInt(distribution2) // Should be ~1667 (5/3 * 1000) - const ratio13 = (BigInt(distribution1) * 1000n) / BigInt(distribution3) // Should be ~2500 (5/2 * 1000) - const ratio23 = (BigInt(distribution2) * 1000n) / BigInt(distribution3) // Should be ~1500 (3/2 * 1000) - - expect(ratio12).to.be.closeTo(1667n, 100n) // 5:3 ratio with tolerance - expect(ratio13).to.be.closeTo(2500n, 200n) // 5:2 ratio with tolerance - expect(ratio23).to.be.closeTo(1500n, 150n) // 3:2 ratio with tolerance - - // Verify pending was reset - expect(await issuanceAllocator.pendingAccumulatedAllocatorIssuance()).to.equal(0) - }) - - it('should distribute correct total amounts during normal operation', async () => { - const { issuanceAllocator, graphToken, target1, target2 } = await setupIssuanceAllocator() - - // Setup - await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) - await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('1000'), false) - - // Create mixed targets: 40% + 20% allocator-minting (60% total), 40% self-minting - await issuanceAllocator - .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 400000, 0, false) // 40% allocator-minting - await issuanceAllocator - .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await target2.getAddress(), 200000, 0, false) // 20% allocator-minting - - // Add a self-minting target - const MockTarget = await ethers.getContractFactory('MockSimpleTarget') - const selfMintingTarget = await MockTarget.deploy() - await issuanceAllocator - .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await selfMintingTarget.getAddress(), 0, 400000, false) // 40% self-minting - - // Get initial balances - const initialBalance1 = await (graphToken as any).balanceOf(await target1.getAddress()) - const initialBalance2 = await (graphToken as any).balanceOf(await target2.getAddress()) - const initialBlock = await issuanceAllocator.lastIssuanceDistributionBlock() - - // Mine exactly 3 blocks - await ethers.provider.send('evm_mine', []) - await ethers.provider.send('evm_mine', []) - await ethers.provider.send('evm_mine', []) - - // Distribute issuance - await issuanceAllocator.connect(accounts.governor).distributeIssuance() - - // Calculate actual distributions - const finalBalance1 = await (graphToken as any).balanceOf(await target1.getAddress()) - const finalBalance2 = await (graphToken as any).balanceOf(await target2.getAddress()) - - const distribution1 = finalBalance1 - initialBalance1 - const distribution2 = finalBalance2 - initialBalance2 - const totalDistributed = distribution1 + distribution2 - - // Calculate expected total for allocator-minting targets (60% total allocation) - // Distribution should happen from the PREVIOUS distribution block to current block - const currentBlock = await ethers.provider.getBlockNumber() - - // Use the initial block (before distribution) to calculate expected distribution - // We mined 3 blocks, so distribution should be for 3 blocks - const blocksDistributed = BigInt(currentBlock) - BigInt(initialBlock) - const allocation = await issuanceAllocator.getTotalAllocation() - const expectedAllocatorMintingTotal = calculateExpectedAccumulation( - parseEther('1000'), - blocksDistributed, // Should be 3 blocks - allocation.allocatorMintingPPM, // 60% allocator-minting - ) - - // Verify total distributed matches expected - expect(totalDistributed).to.equal(expectedAllocatorMintingTotal) - - // Verify proportional distribution - // target1 should get: expectedTotal * (400000 / 600000) = expectedTotal * 2/3 - // target2 should get: expectedTotal * (200000 / 600000) = expectedTotal * 1/3 - const expectedDistribution1 = (expectedAllocatorMintingTotal * 400000n) / 600000n - const expectedDistribution2 = (expectedAllocatorMintingTotal * 200000n) / 600000n - - expect(distribution1).to.equal(expectedDistribution1) - expect(distribution2).to.equal(expectedDistribution2) - - // Verify ratio: target1 should get 2x what target2 gets - const ratio = (BigInt(distribution1) * 1000n) / BigInt(distribution2) // Should be ~2000 (2 * 1000) - expect(ratio).to.equal(2000n) - }) - - it('should handle complete pause cycle with self-minting changes, allocator-minting changes, and rate changes', async () => { - const { issuanceAllocator, graphToken, target1, target2 } = await setupIssuanceAllocator() - - // Setup - await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) - await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) - await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('1000'), false) - - // Create additional targets for comprehensive testing - const MockTarget = await ethers.getContractFactory('MockSimpleTarget') - const target3 = await MockTarget.deploy() - const target4 = await MockTarget.deploy() - const selfMintingTarget1 = await MockTarget.deploy() - const selfMintingTarget2 = await MockTarget.deploy() - - // Initial setup: 25% + 15% allocator-minting (40% total), 25% + 15% self-minting (40% total), 20% free - await issuanceAllocator - .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 250000, 0, false) // 25% allocator-minting - await issuanceAllocator - .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await target2.getAddress(), 150000, 0, false) // 15% allocator-minting - await issuanceAllocator - .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await selfMintingTarget1.getAddress(), 0, 250000, false) // 25% self-minting - await issuanceAllocator - .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await selfMintingTarget2.getAddress(), 0, 150000, false) // 15% self-minting - - // Initialize and get starting balances - await issuanceAllocator.connect(accounts.governor).distributeIssuance() - const initialBalance1 = await (graphToken as any).balanceOf(await target1.getAddress()) - const initialBalance2 = await (graphToken as any).balanceOf(await target2.getAddress()) - - // Pause the contract - await issuanceAllocator.connect(accounts.governor).pause() - - // Phase 1: Mine blocks with original rate (1000 per block) - await ethers.provider.send('evm_mine', []) - await ethers.provider.send('evm_mine', []) - - // Phase 2: Change issuance rate during pause (triggers accumulation) - await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('2000'), false) - - // Phase 3: Mine more blocks with new rate - await ethers.provider.send('evm_mine', []) - - // Phase 4: Add new allocator-minting target during pause - await issuanceAllocator - .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await target3.getAddress(), 100000, 0, true) // 10% allocator-minting, force=true - - // Phase 5: Change existing allocator-minting target allocation - await issuanceAllocator - .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 200000, 0, true) // Change from 25% to 20%, force=true - - // Phase 6: Add new self-minting target during pause - await issuanceAllocator - .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await target4.getAddress(), 0, 100000, true) // 10% self-minting, force=true - - // Phase 7: Change existing self-minting target allocation - await issuanceAllocator - .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await selfMintingTarget1.getAddress(), 0, 50000, true) // Change from 25% to 5%, force=true - - // Phase 8: Mine more blocks - await ethers.provider.send('evm_mine', []) - - // Phase 9: Change rate again during pause - await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('3000'), false) - - // Phase 10: Mine final blocks - await ethers.provider.send('evm_mine', []) - - // Verify accumulation occurred - const pendingAmount = await issuanceAllocator.pendingAccumulatedAllocatorIssuance() - expect(pendingAmount).to.be.gt(0) - - // Expected accumulation from multiple phases with rate and allocation changes: - // Phase 1: 2 blocks * 1000 * (1000000 - 500000) / 1000000 = 2000 * 0.5 = 1000 - // Phase 3: 1 block * 2000 * (1000000 - 500000) / 1000000 = 2000 * 0.5 = 1000 - // Phase 8: 1 block * 2000 * (1000000 - 410000) / 1000000 = 2000 * 0.59 = 1180 - // Phase 10: 1 block * 3000 * (1000000 - 410000) / 1000000 = 3000 * 0.59 = 1770 - // Accumulation occurs at each self-minting allocation change during pause - - // Get initial balances for new targets - const initialBalance3 = await (graphToken as any).balanceOf(await target3.getAddress()) - - // Unpause and distribute - await issuanceAllocator.connect(accounts.governor).unpause() - await issuanceAllocator.connect(accounts.governor).distributeIssuance() - - // Get final balances - const finalBalance1 = await (graphToken as any).balanceOf(await target1.getAddress()) - const finalBalance2 = await (graphToken as any).balanceOf(await target2.getAddress()) - const finalBalance3 = await (graphToken as any).balanceOf(await target3.getAddress()) - - // Calculate distributions - const distribution1 = finalBalance1 - initialBalance1 - const distribution2 = finalBalance2 - initialBalance2 - const distribution3 = finalBalance3 - initialBalance3 - const totalDistributed = distribution1 + distribution2 + distribution3 - - // All targets should have received tokens proportionally - - // All allocator-minting targets should receive tokens proportional to their CURRENT allocations - expect(distribution1).to.be.gt(0) - expect(distribution2).to.be.gt(0) - expect(distribution3).to.be.gt(0) // target3 added during pause should also receive tokens - - // Verify total distributed is reasonable (should be at least the pending amount) - expect(totalDistributed).to.be.gte(pendingAmount) - - // Verify final allocations are correct - // Final allocator-minting allocations: target1=20%, target2=15%, target3=10% (total 45%) - // Final self-minting allocations: selfMintingTarget1=5%, selfMintingTarget2=15%, target4=10% (total 30%) - { - const totalAlloc = await issuanceAllocator.getTotalAllocation() - expect(totalAlloc.selfMintingPPM).to.equal(300000) - } // 30% - - // Verify proportional distribution based on CURRENT allocations - // Current allocator-minting allocations: target1=20%, target2=15%, target3=10% - // Expected ratios: target1:target2:target3 = 20:15:10 = 4:3:2 - const ratio12 = (BigInt(distribution1) * 1000n) / BigInt(distribution2) // Should be ~1333 (4/3 * 1000) - const ratio13 = (BigInt(distribution1) * 1000n) / BigInt(distribution3) // Should be ~2000 (4/2 * 1000) - const ratio23 = (BigInt(distribution2) * 1000n) / BigInt(distribution3) // Should be ~1500 (3/2 * 1000) - - expect(ratio12).to.be.closeTo(1333n, 200n) // 4:3 ratio with tolerance - expect(ratio13).to.be.closeTo(2000n, 200n) // 4:2 = 2:1 ratio with tolerance - expect(ratio23).to.be.closeTo(1500n, 150n) // 3:2 = 1.5:1 ratio with tolerance - - // Verify pending was reset - expect(await issuanceAllocator.pendingAccumulatedAllocatorIssuance()).to.equal(0) - }) - - it('should reset pending issuance when all allocator-minting targets removed during pause', async () => { - const { issuanceAllocator, graphToken, target1 } = await setupIssuanceAllocator() - - // Setup - await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) - await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) - await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('1000'), false) - - // Start with allocator-minting target: 50% allocator-minting - await issuanceAllocator - .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 500000, 0, false) // 50% allocator-minting - - // Initialize and pause - await issuanceAllocator.connect(accounts.governor).distributeIssuance() - await issuanceAllocator.connect(accounts.governor).pause() - - // Mine blocks to accumulate pending issuance - await ethers.provider.send('evm_mine', []) - await ethers.provider.send('evm_mine', []) - await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('2000'), true) // Trigger accumulation - - // Verify pending issuance was accumulated - const pendingAmount = await issuanceAllocator.pendingAccumulatedAllocatorIssuance() - expect(pendingAmount).to.be.gt(0) - - // Remove allocator-minting target and set 100% self-minting during pause - await issuanceAllocator - .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 0, 0, true) // Remove allocator-minting target - - const MockTarget = await ethers.getContractFactory('MockSimpleTarget') - const selfMintingTarget = await MockTarget.deploy() - await issuanceAllocator - .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await selfMintingTarget.getAddress(), 0, 1000000, true) // 100% self-minting - - // Verify we now have 100% self-minting allocation - { - const totalAlloc = await issuanceAllocator.getTotalAllocation() - expect(totalAlloc.selfMintingPPM).to.equal(1000000) - } - - // Unpause and distribute - should hit the allocatorMintingAllowance == 0 branch - await issuanceAllocator.connect(accounts.governor).unpause() - await issuanceAllocator.connect(accounts.governor).distributeIssuance() - - // The key test: verify that the allocatorMintingAllowance == 0 branch was hit successfully - // This test successfully hits the missing branch and achieves 100% coverage - // The exact pending amount varies due to timing, but the important thing is no revert occurs - const finalPendingAmount = await issuanceAllocator.pendingAccumulatedAllocatorIssuance() - expect(finalPendingAmount).to.be.gte(0) // System handles edge case without reverting - - // Verify the removed target's balance (may have received tokens from earlier operations) - const finalBalance1 = await (graphToken as any).balanceOf(await target1.getAddress()) - expect(finalBalance1).to.be.gte(0) // Target may have received tokens before removal - }) - - it('should handle edge case with no allocator-minting targets during pause', async () => { - const { issuanceAllocator, graphToken, target1 } = await setupIssuanceAllocator() - - // Setup with only self-minting targets - await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) - await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) - await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('100'), false) - await issuanceAllocator - .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 0, 500000, false) // 50% self-minting only - - // Initialize and pause - await issuanceAllocator.connect(accounts.governor).distributeIssuance() - await issuanceAllocator.connect(accounts.governor).pause() - - // Mine blocks and trigger accumulation - await ethers.provider.send('evm_mine', []) - await ethers.provider.send('evm_mine', []) - await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('200'), false) - - // Should accumulate based on totalAllocatorMintingAllocation - // Since we only have self-minting targets (no allocator-minting), totalAllocatorMintingAllocation = 0 - // Therefore, no accumulation should happen - const pendingAmount = await issuanceAllocator.pendingAccumulatedAllocatorIssuance() - expect(pendingAmount).to.equal(0) // No allocator-minting targets, so no accumulation - }) - - it('should handle zero blocksSinceLastAccumulation in _distributeOrAccumulateIssuance', async () => { - const { issuanceAllocator, graphToken, target1 } = await setupIssuanceAllocator() - - // Setup - await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) - await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) - await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('100'), false) - await issuanceAllocator - .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 300000, 0, false) - - // Initialize and pause - await issuanceAllocator.connect(accounts.governor).distributeIssuance() - await issuanceAllocator.connect(accounts.governor).pause() - - // Disable auto-mining to control block creation - await ethers.provider.send('evm_setAutomine', [false]) - - try { - // Queue two transactions that will trigger accumulation in the same block - const tx1 = issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('200'), false) - const tx2 = issuanceAllocator - .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 400000, 0, false) - - // Mine a single block containing both transactions - await ethers.provider.send('evm_mine', []) - - // Wait for both transactions to complete - await tx1 - await tx2 - - // The second call should have blocksSinceLastAccumulation == 0 - // Both calls should work without error, demonstrating the else path is covered - expect(await issuanceAllocator.pendingAccumulatedAllocatorIssuance()).to.be.gte(0) - } finally { - // Re-enable auto-mining - await ethers.provider.send('evm_setAutomine', [true]) + expect(totalAlloc.selfMintingRate).to.equal(500000) } }) }) @@ -1504,9 +669,9 @@ describe('IssuanceAllocator', () => { const { issuanceAllocator } = sharedContracts const newIssuancePerBlock = ethers.parseEther('200') - await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(newIssuancePerBlock, false) + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(newIssuancePerBlock) - expect(await issuanceAllocator.issuancePerBlock()).to.equal(newIssuancePerBlock) + expect(await issuanceAllocator.getIssuancePerBlock()).to.equal(newIssuancePerBlock) }) it('should notify targets with contract code when changing issuance rate', async () => { @@ -1515,7 +680,7 @@ describe('IssuanceAllocator', () => { // Add target and set allocation in one step await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target1, 300000, 0, false) // 30% + ['setTargetAllocation(address,uint256,uint256)'](addresses.target1, 300000, 0) // 30% // Mine some blocks to ensure distributeIssuance will update to current block await ethers.provider.send('evm_mine', []) @@ -1523,10 +688,10 @@ describe('IssuanceAllocator', () => { // Change issuance rate - this should trigger _preIssuanceChangeDistributionAndNotification // which will iterate through targets and call beforeIssuanceAllocationChange on targets with code const newIssuancePerBlock = ethers.parseEther('200') - await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(newIssuancePerBlock, false) + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(newIssuancePerBlock) // Verify the issuance rate was updated - expect(await issuanceAllocator.issuancePerBlock()).to.equal(newIssuancePerBlock) + expect(await issuanceAllocator.getIssuancePerBlock()).to.equal(newIssuancePerBlock) }) it('should handle targets without contract code when changing issuance rate', async () => { @@ -1539,7 +704,7 @@ describe('IssuanceAllocator', () => { const mockTarget = await deployMockSimpleTarget() await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await mockTarget.getAddress(), 300000, 0, false) // 30% + ['setTargetAllocation(address,uint256,uint256)'](await mockTarget.getAddress(), 300000, 0) // 30% // Mine some blocks to ensure distributeIssuance will update to current block await ethers.provider.send('evm_mine', []) @@ -1547,22 +712,22 @@ describe('IssuanceAllocator', () => { // Change issuance rate - this should trigger _preIssuanceChangeDistributionAndNotification // which will iterate through targets and notify them const newIssuancePerBlock = ethers.parseEther('200') - await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(newIssuancePerBlock, false) + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(newIssuancePerBlock) // Verify the issuance rate was updated - expect(await issuanceAllocator.issuancePerBlock()).to.equal(newIssuancePerBlock) + expect(await issuanceAllocator.getIssuancePerBlock()).to.equal(newIssuancePerBlock) }) it('should handle zero issuance when distributing', async () => { const { issuanceAllocator, graphToken, addresses } = sharedContracts // Set issuance per block to 0 - await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(0, false) + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(0) // Add target and set allocation in one step await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target1, 300000, 0, false) // 30% + ['setTargetAllocation(address,uint256,uint256)'](addresses.target1, 0, 0) // 30% // Get initial balance const initialBalance = await (graphToken as any).balanceOf(addresses.target1) @@ -1578,13 +743,47 @@ describe('IssuanceAllocator', () => { expect(finalBalance).to.equal(initialBalance) }) + it('should revert when decreasing issuance rate with insufficient unallocated budget', async () => { + const { issuanceAllocator, graphToken, target1 } = await setupIssuanceAllocator() + + // Add issuanceAllocator as minter + await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) + + // Set initial issuance rate + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('1000')) + + // Allocate almost everything to target1, leaving very little for default + // target1 gets 950 ether/block, default gets 50 ether/block + await issuanceAllocator + .connect(accounts.governor) + [ + 'setTargetAllocation(address,uint256,uint256,uint256)' + ](await target1.getAddress(), ethers.parseEther('950'), 0, 0) + + // Verify the current allocation + const allocationBefore = await issuanceAllocator.getTargetAllocation(await target1.getAddress()) + expect(allocationBefore.allocatorMintingRate).to.equal(ethers.parseEther('950')) + + // Verify current issuance and unallocated amount + const issuanceBefore = await issuanceAllocator.getIssuancePerBlock() + expect(issuanceBefore).to.equal(ethers.parseEther('1000')) + + // Try to decrease issuance rate by 100 ether (to 900 ether/block) + // This would require default to absorb -100 ether/block change + // But default only has 50 ether/block unallocated + // So this should fail: oldIssuancePerBlock (1000) > newIssuancePerBlock (900) + unallocated (50) + await expect( + issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('900')), + ).to.be.revertedWithCustomError(issuanceAllocator, 'InsufficientUnallocatedForRateDecrease') + }) + it('should allow governor to manually notify a specific target', async () => { const { issuanceAllocator, addresses } = sharedContracts // Add target and set allocation in one step await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target1, 300000, 0, false) // 30% + ['setTargetAllocation(address,uint256,uint256)'](addresses.target1, 300000, 0) // 30% // Manually notify the target using the new notifyTarget function const result = await issuanceAllocator.connect(accounts.governor).notifyTarget.staticCall(addresses.target1) @@ -1608,7 +807,7 @@ describe('IssuanceAllocator', () => { // Add a target and set allocation in one step await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target1, 100000, 0, false) + ['setTargetAllocation(address,uint256,uint256)'](addresses.target1, 100000, 0) // Try to notify the target - should succeed since it has contract code const result = await issuanceAllocator.connect(accounts.governor).notifyTarget.staticCall(addresses.target1) @@ -1623,7 +822,7 @@ describe('IssuanceAllocator', () => { // Add a target and set allocation in one step to trigger _notifyTarget call const result = await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'].staticCall(addresses.target1, 100000, 0, false) + ['setTargetAllocation(address,uint256,uint256,uint256)'].staticCall(addresses.target1, 100000, 0, 0) // Should return true (allocation was set) and notification succeeded expect(result).to.be.true @@ -1631,11 +830,11 @@ describe('IssuanceAllocator', () => { // Actually set the allocation to verify the internal _notifyTarget call await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target1, 100000, 0, false) + ['setTargetAllocation(address,uint256,uint256)'](addresses.target1, 100000, 0) // Verify allocation was set const mockTargetAllocation = await issuanceAllocator.getTargetAllocation(addresses.target1) - expect(mockTargetAllocation.totalAllocationPPM).to.equal(100000) + expect(mockTargetAllocation.totalAllocationRate).to.equal(100000) }) it('should only notify target once per block', async () => { @@ -1647,7 +846,7 @@ describe('IssuanceAllocator', () => { // Add target and set allocation in one step await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 300000, 0, false) // 30% + ['setTargetAllocation(address,uint256,uint256)'](await target1.getAddress(), 300000, 0) // 30% // First notification should return true const result1 = await issuanceAllocator @@ -1702,12 +901,12 @@ describe('IssuanceAllocator', () => { await expect( issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await revertingTarget.getAddress(), 300000, 0, false), + ['setTargetAllocation(address,uint256,uint256)'](await revertingTarget.getAddress(), 300000, 0), ).to.be.revertedWithCustomError(revertingTarget, 'TargetRevertsIntentionally') // The allocation should NOT be set because the transaction reverted const revertingTargetAllocation = await issuanceAllocator.getTargetAllocation(await revertingTarget.getAddress()) - expect(revertingTargetAllocation.totalAllocationPPM).to.equal(0) + expect(revertingTargetAllocation.totalAllocationRate).to.equal(0) }) it('should revert and not set allocation when target notification fails even with force=true', async () => { @@ -1726,12 +925,12 @@ describe('IssuanceAllocator', () => { await expect( issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await revertingTarget.getAddress(), 300000, 0, true), + ['setTargetAllocation(address,uint256,uint256,uint256)'](await revertingTarget.getAddress(), 300000, 0, 0), ).to.be.revertedWithCustomError(revertingTarget, 'TargetRevertsIntentionally') // The allocation should NOT be set because the transaction reverted const allocation = await issuanceAllocator.getTargetAllocation(await revertingTarget.getAddress()) - expect(allocation.totalAllocationPPM).to.equal(0) + expect(allocation.totalAllocationRate).to.equal(0) }) it('should return false when setTargetAllocation called with force=false and issuance distribution is behind', async () => { @@ -1741,11 +940,11 @@ describe('IssuanceAllocator', () => { await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) // Set initial issuance rate and distribute once to set lastIssuanceDistributionBlock - await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('100'), false) + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('100')) await issuanceAllocator.connect(accounts.governor).distributeIssuance() // Get the current lastIssuanceDistributionBlock - const lastIssuanceBlock = await issuanceAllocator.lastIssuanceDistributionBlock() + const lastIssuanceBlock = (await issuanceAllocator.getDistributionState()).lastDistributionBlock // Grant pause role and pause the contract await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) @@ -1760,19 +959,21 @@ describe('IssuanceAllocator', () => { const currentBlock = await ethers.provider.getBlockNumber() expect(lastIssuanceBlock).to.be.lt(currentBlock) - // While still paused, call setTargetAllocation with force=false - // This should return false because _distributeIssuance() < block.number && !force evaluates to true - // This tests the uncovered branch and statement + // While still paused, call setTargetAllocation with minDistributedBlock=currentBlock + // This should return false because _distributeIssuance() < minDistributedBlock + // (lastDistributionBlock is behind currentBlock due to pause) const result = await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'].staticCall(await target1.getAddress(), 300000, 0, false) + [ + 'setTargetAllocation(address,uint256,uint256,uint256)' + ].staticCall(await target1.getAddress(), ethers.parseEther('30'), 0, currentBlock) - // Should return false due to issuance being behind and force=false + // Should return false due to issuance being behind the required minimum expect(result).to.be.false - // Allocation should not be set + // Allocation is not actually set (staticCall) const allocation = await issuanceAllocator.getTargetAllocation(await target1.getAddress()) - expect(allocation.totalAllocationPPM).to.equal(0) + expect(allocation.totalAllocationRate).to.equal(0) }) it('should allow setTargetAllocation with force=true when issuance distribution is behind', async () => { @@ -1782,11 +983,11 @@ describe('IssuanceAllocator', () => { await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) // Set initial issuance rate and distribute once to set lastIssuanceDistributionBlock - await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('100'), false) + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('100')) await issuanceAllocator.connect(accounts.governor).distributeIssuance() // Get the current lastIssuanceDistributionBlock - const lastIssuanceBlock = await issuanceAllocator.lastIssuanceDistributionBlock() + const lastIssuanceBlock = (await issuanceAllocator.getDistributionState()).lastDistributionBlock // Grant pause role and pause the contract await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) @@ -1806,11 +1007,11 @@ describe('IssuanceAllocator', () => { // This tests the uncovered branch where (_distributeIssuance() < block.number && !force) evaluates to false due to force=true await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 300000, 0, true) + ['setTargetAllocation(address,uint256,uint256,uint256)'](await target1.getAddress(), 300000, 0, 0) // Should succeed and set the allocation const allocation = await issuanceAllocator.getTargetAllocation(await target1.getAddress()) - expect(allocation.totalAllocationPPM).to.equal(300000) + expect(allocation.totalAllocationRate).to.equal(300000) }) }) @@ -1821,7 +1022,7 @@ describe('IssuanceAllocator', () => { // Add target and set allocation in one step await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target1, 100000, 0, false) + ['setTargetAllocation(address,uint256,uint256)'](addresses.target1, 100000, 0) // Force set lastChangeNotifiedBlock to current block const currentBlock = await ethers.provider.getBlockNumber() @@ -1872,7 +1073,7 @@ describe('IssuanceAllocator', () => { // Add target and set allocation in one step to trigger notification await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 300000, 0, false) + ['setTargetAllocation(address,uint256,uint256)'](await target1.getAddress(), 300000, 0) // Verify target was notified (lastChangeNotifiedBlock should be current block) const currentBlock = await ethers.provider.getBlockNumber() @@ -1923,7 +1124,7 @@ describe('IssuanceAllocator', () => { // Add target and set allocation in one step await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 100000, 0, false) + ['setTargetAllocation(address,uint256,uint256)'](await target1.getAddress(), 100000, 0) // Force set lastChangeNotifiedBlock to current block const currentBlock = await ethers.provider.getBlockNumber() @@ -1964,7 +1165,7 @@ describe('IssuanceAllocator', () => { // Add target and set allocation in one step await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 100000, 0, false) + ['setTargetAllocation(address,uint256,uint256)'](await target1.getAddress(), 100000, 0) // Force set lastChangeNotifiedBlock to a future block (current + 2) const currentBlock = await ethers.provider.getBlockNumber() @@ -2025,24 +1226,24 @@ describe('IssuanceAllocator', () => { // Test 1: Setting allocation to 0 for non-existent target should not revert await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](nonExistentTarget, 0, 0, false) + ['setTargetAllocation(address,uint256,uint256)'](nonExistentTarget, 0, 0) // Verify no non-default targets were added (only default remains) const targets = await issuanceAllocator.getTargets() - expect(targets.length).to.equal(1) // Only default allocation + expect(targets.length).to.equal(1) // Only default target // Verify reported total is 0% (all in default, which isn't reported) const totalAlloc = await issuanceAllocator.getTotalAllocation() - expect(totalAlloc.totalAllocationPPM).to.equal(0) + expect(totalAlloc.totalAllocationRate).to.equal(0) // Test 2: Removing non-existent target (by setting allocation to 0 again) should not revert await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](nonExistentTarget, 0, 0, false) + ['setTargetAllocation(address,uint256,uint256)'](nonExistentTarget, 0, 0) // Verify still only default target const targetsAfter = await issuanceAllocator.getTargets() - expect(targetsAfter.length).to.equal(1) // Only default allocation + expect(targetsAfter.length).to.equal(1) // Only default target }) }) @@ -2051,7 +1252,7 @@ describe('IssuanceAllocator', () => { const { issuanceAllocator } = sharedContracts // Get initial lastIssuanceDistributionBlock - const initialBlock = await issuanceAllocator.lastIssuanceDistributionBlock() + const initialBlock = (await issuanceAllocator.getDistributionState()).lastDistributionBlock // Mine a block await ethers.provider.send('evm_mine', []) @@ -2060,26 +1261,26 @@ describe('IssuanceAllocator', () => { await issuanceAllocator.connect(accounts.governor).distributeIssuance() // Now lastIssuanceDistributionBlock should be updated - const newBlock = await issuanceAllocator.lastIssuanceDistributionBlock() + const newBlock = (await issuanceAllocator.getDistributionState()).lastDistributionBlock expect(newBlock).to.be.gt(initialBlock) }) it('should manage target count and array correctly', async () => { const { issuanceAllocator, addresses } = sharedContracts - // Test initial state (with default allocation) + // Test initial state (with default target) expect(await issuanceAllocator.getTargetCount()).to.equal(1) // Default allocation exists expect((await issuanceAllocator.getTargets()).length).to.equal(1) // Test adding targets await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target1, 100000, 0, false) + ['setTargetAllocation(address,uint256,uint256)'](addresses.target1, 100000, 0) expect(await issuanceAllocator.getTargetCount()).to.equal(2) // Default + target1 await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target2, 200000, 0, false) + ['setTargetAllocation(address,uint256,uint256)'](addresses.target2, 200000, 0) expect(await issuanceAllocator.getTargetCount()).to.equal(3) // Default + target1 + target2 // Test getTargets array content @@ -2091,12 +1292,12 @@ describe('IssuanceAllocator', () => { // Test removing targets await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target1, 0, 0, false) + ['setTargetAllocation(address,uint256,uint256)'](addresses.target1, 0, 0) expect(await issuanceAllocator.getTargetCount()).to.equal(2) // Default + target2 await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target2, 0, 0, false) + ['setTargetAllocation(address,uint256,uint256)'](addresses.target2, 0, 0) expect(await issuanceAllocator.getTargetCount()).to.equal(1) // Only default remains expect((await issuanceAllocator.getTargets()).length).to.equal(1) }) @@ -2107,16 +1308,16 @@ describe('IssuanceAllocator', () => { // Add targets await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target1, 100000, 0, false) + ['setTargetAllocation(address,uint256,uint256)'](addresses.target1, 100000, 0) await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target2, 200000, 0, false) + ['setTargetAllocation(address,uint256,uint256)'](addresses.target2, 200000, 0) // Get addresses array const targetAddresses = await issuanceAllocator.getTargets() // Check that the addresses are in the correct order - // targetAddresses[0] is the default allocation (address(0)) + // targetAddresses[0] is the default target (address(0)) expect(targetAddresses[0]).to.equal(ethers.ZeroAddress) // Default expect(targetAddresses[1]).to.equal(addresses.target1) expect(targetAddresses[2]).to.equal(addresses.target2) @@ -2132,20 +1333,20 @@ describe('IssuanceAllocator', () => { // Add targets await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 100000, 0, false) + ['setTargetAllocation(address,uint256,uint256)'](await target1.getAddress(), 100000, 0) await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await target2.getAddress(), 200000, 0, false) + ['setTargetAllocation(address,uint256,uint256)'](await target2.getAddress(), 200000, 0) await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await target3.getAddress(), 0, 300000, false) + ['setTargetAllocation(address,uint256,uint256)'](await target3.getAddress(), 0, 300000) // Get all target addresses const addresses = await issuanceAllocator.getTargets() expect(addresses.length).to.equal(4) // Default + 3 targets // Check that the addresses are in the correct order - // addresses[0] is the default allocation (address(0)) + // addresses[0] is the default target (address(0)) expect(addresses[0]).to.equal(ethers.ZeroAddress) // Default expect(addresses[1]).to.equal(await target1.getAddress()) expect(addresses[2]).to.equal(await target2.getAddress()) @@ -2165,11 +1366,11 @@ describe('IssuanceAllocator', () => { const allocation = 300000 // 30% in PPM await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target1, allocation, 0, false) + ['setTargetAllocation(address,uint256,uint256)'](addresses.target1, allocation, 0) // Now allocation should be set const targetAllocation = await issuanceAllocator.getTargetAllocation(addresses.target1) - expect(targetAllocation.totalAllocationPPM).to.equal(allocation) + expect(targetAllocation.totalAllocationRate).to.equal(allocation) }) it('should return the correct allocation types', async () => { @@ -2181,20 +1382,20 @@ describe('IssuanceAllocator', () => { // Add targets with different allocation types await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 100000, 0, false) + ['setTargetAllocation(address,uint256,uint256)'](await target1.getAddress(), 100000, 0) await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await target2.getAddress(), 0, 200000, false) + ['setTargetAllocation(address,uint256,uint256)'](await target2.getAddress(), 0, 200000) // Check allocation types const target1Allocation = await issuanceAllocator.getTargetAllocation(await target1.getAddress()) const target2Allocation = await issuanceAllocator.getTargetAllocation(await target2.getAddress()) - expect(target1Allocation.selfMintingPPM).to.equal(0) // Not self-minting - expect(target1Allocation.allocatorMintingPPM).to.equal(100000) // Allocator-minting + expect(target1Allocation.selfMintingRate).to.equal(0) // Not self-minting + expect(target1Allocation.allocatorMintingRate).to.equal(100000) // Allocator-minting - expect(target2Allocation.selfMintingPPM).to.equal(200000) // Self-minting - expect(target2Allocation.allocatorMintingPPM).to.equal(0) // Not allocator-minting + expect(target2Allocation.selfMintingRate).to.equal(200000) // Self-minting + expect(target2Allocation.allocatorMintingRate).to.equal(0) // Not allocator-minting }) }) @@ -2207,36 +1408,36 @@ describe('IssuanceAllocator', () => { // Adding new target const addResult = await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'].staticCall(await target.getAddress(), 100000, 0, false) + ['setTargetAllocation(address,uint256,uint256,uint256)'].staticCall(await target.getAddress(), 100000, 0, 0) expect(addResult).to.equal(true) // Actually add the target await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await target.getAddress(), 100000, 0, false) + ['setTargetAllocation(address,uint256,uint256)'](await target.getAddress(), 100000, 0) // Changing existing allocation const changeResult = await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'].staticCall(await target.getAddress(), 200000, 0, false) + ['setTargetAllocation(address,uint256,uint256,uint256)'].staticCall(await target.getAddress(), 200000, 0, 0) expect(changeResult).to.equal(true) // Setting same allocation (no-op) const sameResult = await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'].staticCall(await target.getAddress(), 100000, 0, false) + ['setTargetAllocation(address,uint256,uint256,uint256)'].staticCall(await target.getAddress(), 100000, 0, 0) expect(sameResult).to.equal(true) // Removing target const removeResult = await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'].staticCall(await target.getAddress(), 0, 0, false) + ['setTargetAllocation(address,uint256,uint256,uint256)'].staticCall(await target.getAddress(), 0, 0, 0) expect(removeResult).to.equal(true) // Setting allocation to 0 for non-existent target const nonExistentResult = await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'].staticCall(accounts.nonGovernor.address, 0, 0, false) + ['setTargetAllocation(address,uint256,uint256,uint256)'].staticCall(accounts.nonGovernor.address, 0, 0, 0) expect(nonExistentResult).to.equal(true) }) }) @@ -2255,8 +1456,8 @@ describe('IssuanceAllocator', () => { // Verify the allocation was set correctly const allocation1 = await issuanceAllocator.getTargetAllocation(await target1.getAddress()) - expect(allocation1.allocatorMintingPPM).to.equal(allocatorPPM) - expect(allocation1.selfMintingPPM).to.equal(0) + expect(allocation1.allocatorMintingRate).to.equal(allocatorPPM) + expect(allocation1.selfMintingRate).to.equal(0) // Test 2: 3-parameter overload (allocator + self) const allocatorPPM2 = 200000 // 20% @@ -2267,8 +1468,8 @@ describe('IssuanceAllocator', () => { // Verify the allocation was set correctly const allocation2 = await issuanceAllocator.getTargetAllocation(await target2.getAddress()) - expect(allocation2.allocatorMintingPPM).to.equal(allocatorPPM2) - expect(allocation2.selfMintingPPM).to.equal(selfPPM) + expect(allocation2.allocatorMintingRate).to.equal(allocatorPPM2) + expect(allocation2.selfMintingRate).to.equal(selfPPM) // Test 3: Access control - 2-parameter overload should require governor await expect( @@ -2292,32 +1493,42 @@ describe('IssuanceAllocator', () => { // Should return true for normal operations const newRate = ethers.parseEther('200') - const normalResult = await issuanceAllocator - .connect(accounts.governor) - .setIssuancePerBlock.staticCall(newRate, false) + const normalResult = await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock.staticCall(newRate) expect(normalResult).to.equal(true) // Should return true even when setting same rate const sameResult = await issuanceAllocator .connect(accounts.governor) - .setIssuancePerBlock.staticCall(issuancePerBlock, false) + .setIssuancePerBlock.staticCall(issuancePerBlock) expect(sameResult).to.equal(true) // Grant pause role and pause the contract await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) await issuanceAllocator.connect(accounts.governor).pause() - // Should return false when paused without force - const pausedResult = await issuanceAllocator - .connect(accounts.governor) - .setIssuancePerBlock.staticCall(newRate, false) + // setIssuancePerBlock returns false when paused without explicit fromBlockNumber + const pausedResult = await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock.staticCall(newRate) expect(pausedResult).to.equal(false) - // Should return true when paused with force=true - const forcedResult = await issuanceAllocator + // setIssuancePerBlock returns true when paused with explicit fromBlockNumber that has been reached + const lastDistributionBlock = await (await issuanceAllocator.getDistributionState()).lastDistributionBlock + const pausedWithBlockResult = await issuanceAllocator + .connect(accounts.governor) + ['setIssuancePerBlock(uint256,uint256)'].staticCall(newRate, lastDistributionBlock) + expect(pausedWithBlockResult).to.equal(true) + + // Actually execute the call with fromBlockNumber to cover all branches + await issuanceAllocator .connect(accounts.governor) - .setIssuancePerBlock.staticCall(newRate, true) - expect(forcedResult).to.equal(true) + ['setIssuancePerBlock(uint256,uint256)'](newRate, lastDistributionBlock) + expect(await issuanceAllocator.getIssuancePerBlock()).to.equal(newRate) + + // Verify the simple variant still returns false when paused + const differentRate = ethers.parseEther('2000') + const result = await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock.staticCall(differentRate) + expect(result).to.equal(false) + // Rate should not change because paused and no explicit fromBlockNumber + expect(await issuanceAllocator.getIssuancePerBlock()).to.equal(newRate) }) }) @@ -2327,14 +1538,14 @@ describe('IssuanceAllocator', () => { // Should return lastIssuanceDistributionBlock when no blocks have passed await issuanceAllocator.connect(accounts.governor).distributeIssuance() - const lastIssuanceBlock = await issuanceAllocator.lastIssuanceDistributionBlock() + const lastIssuanceBlock = (await issuanceAllocator.getDistributionState()).lastDistributionBlock const noBlocksResult = await issuanceAllocator.connect(accounts.governor).distributeIssuance.staticCall() expect(noBlocksResult).to.equal(lastIssuanceBlock) // Add a target and mine blocks to test distribution await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target1, 300000, 0, false) // 30% + ['setTargetAllocation(address,uint256,uint256)'](addresses.target1, 300000, 0) // 30% await ethers.provider.send('evm_mine', []) // Should return current block number when issuance is distributed @@ -2348,39 +1559,44 @@ describe('IssuanceAllocator', () => { describe('getTargetIssuancePerBlock', () => { it('should return correct issuance for different target configurations', async () => { const { issuanceAllocator, addresses } = sharedContracts - const issuancePerBlock = await issuanceAllocator.issuancePerBlock() - const PPM = 1_000_000 + // OLD: These were used for PPM calculations + // const issuancePerBlock = await issuanceAllocator.getIssuancePerBlock() + // const PPM = 1_000_000 // Test unregistered target (should return zeros) let result = await issuanceAllocator.getTargetIssuancePerBlock(addresses.target1) - expect(result.selfIssuancePerBlock).to.equal(0) - expect(result.allocatorIssuancePerBlock).to.equal(0) + expect(result.selfIssuanceRate).to.equal(0) + expect(result.allocatorIssuanceRate).to.equal(0) expect(result.allocatorIssuanceBlockAppliedTo).to.be.greaterThanOrEqual(0) expect(result.selfIssuanceBlockAppliedTo).to.be.greaterThanOrEqual(0) // Test self-minting target with 30% allocation await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target1, 0, 300000, false) + ['setTargetAllocation(address,uint256,uint256)'](addresses.target1, 0, ethers.parseEther('30')) - const expectedSelfIssuance = (issuancePerBlock * BigInt(300000)) / BigInt(PPM) + const expectedSelfIssuance = ethers.parseEther('30') result = await issuanceAllocator.getTargetIssuancePerBlock(addresses.target1) - expect(result.selfIssuancePerBlock).to.equal(expectedSelfIssuance) - expect(result.allocatorIssuancePerBlock).to.equal(0) - expect(result.selfIssuanceBlockAppliedTo).to.equal(await ethers.provider.getBlockNumber()) - expect(result.allocatorIssuanceBlockAppliedTo).to.equal(await issuanceAllocator.lastIssuanceDistributionBlock()) + expect(result.selfIssuanceRate).to.equal(expectedSelfIssuance) + expect(result.allocatorIssuanceRate).to.equal(0) + // expect(result.selfIssuanceBlockAppliedTo).to.equal(await issuanceAllocator.lastIssuanceAccumulationBlock()) + expect(result.allocatorIssuanceBlockAppliedTo).to.equal( + (await issuanceAllocator.getDistributionState()).lastDistributionBlock, + ) // Test allocator-minting target with 40% allocation (reset target1 first) await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target1, 400000, 0, false) + ['setTargetAllocation(address,uint256,uint256)'](addresses.target1, ethers.parseEther('40'), 0) - const expectedAllocatorIssuance = (issuancePerBlock * BigInt(400000)) / BigInt(PPM) + const expectedAllocatorIssuance = ethers.parseEther('40') result = await issuanceAllocator.getTargetIssuancePerBlock(addresses.target1) - expect(result.allocatorIssuancePerBlock).to.equal(expectedAllocatorIssuance) - expect(result.selfIssuancePerBlock).to.equal(0) - expect(result.allocatorIssuanceBlockAppliedTo).to.equal(await ethers.provider.getBlockNumber()) - expect(result.selfIssuanceBlockAppliedTo).to.equal(await ethers.provider.getBlockNumber()) + expect(result.allocatorIssuanceRate).to.equal(expectedAllocatorIssuance) + expect(result.selfIssuanceRate).to.equal(0) + expect(result.allocatorIssuanceBlockAppliedTo).to.equal( + (await issuanceAllocator.getDistributionState()).lastDistributionBlock, + ) + // expect(result.selfIssuanceBlockAppliedTo).to.equal(await issuanceAllocator.lastIssuanceAccumulationBlock()) }) it('should not revert when contract is paused and blockAppliedTo indicates pause state', async () => { @@ -2389,7 +1605,7 @@ describe('IssuanceAllocator', () => { // Add target as self-minter with 30% allocation await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target1, 0, 300000, false) // 30%, self-minter + ['setTargetAllocation(address,uint256,uint256)'](addresses.target1, 0, ethers.parseEther('30')) // 30%, self-minter // Distribute issuance to set blockAppliedTo to current block await issuanceAllocator.distributeIssuance() @@ -2399,19 +1615,21 @@ describe('IssuanceAllocator', () => { await issuanceAllocator.connect(accounts.governor).pause() // Should not revert when paused - this is the key difference from old functions - const currentBlockBeforeCall = await ethers.provider.getBlockNumber() const result = await issuanceAllocator.getTargetIssuancePerBlock(addresses.target1) - const issuancePerBlock = await issuanceAllocator.issuancePerBlock() - const PPM = 1_000_000 - const expectedIssuance = (issuancePerBlock * BigInt(300000)) / BigInt(PPM) + // OLD: These were used for PPM calculations + // const issuancePerBlock = await issuanceAllocator.getIssuancePerBlock() + // const PPM = 1_000_000 + const expectedIssuance = ethers.parseEther('30') - expect(result.selfIssuancePerBlock).to.equal(expectedIssuance) - expect(result.allocatorIssuancePerBlock).to.equal(0) - // For self-minting targets, selfIssuanceBlockAppliedTo should always be current block, even when paused - expect(result.selfIssuanceBlockAppliedTo).to.equal(currentBlockBeforeCall) + expect(result.selfIssuanceRate).to.equal(expectedIssuance) + expect(result.allocatorIssuanceRate).to.equal(0) + // For self-minting targets, selfIssuanceBlockAppliedTo reflects when events were last emitted (lastAccumulationBlock) + // expect(result.selfIssuanceBlockAppliedTo).to.equal(await issuanceAllocator.lastIssuanceAccumulationBlock()) // allocatorIssuanceBlockAppliedTo should be the last distribution block (before pause) - expect(result.allocatorIssuanceBlockAppliedTo).to.equal(await issuanceAllocator.lastIssuanceDistributionBlock()) + expect(result.allocatorIssuanceBlockAppliedTo).to.equal( + (await issuanceAllocator.getDistributionState()).lastDistributionBlock, + ) }) it('should show blockAppliedTo updates after distribution', async () => { @@ -2423,7 +1641,9 @@ describe('IssuanceAllocator', () => { // Add target as allocator-minter with 50% allocation await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 500000, 0, false) // 50%, allocator-minter + [ + 'setTargetAllocation(address,uint256,uint256,uint256)' + ](await target1.getAddress(), ethers.parseEther('50'), 0, 0) // 50%, allocator-minter // allocatorIssuanceBlockAppliedTo should be current block since setTargetAllocation triggers distribution let result = await issuanceAllocator.getTargetIssuancePerBlock(await target1.getAddress()) @@ -2439,87 +1659,119 @@ describe('IssuanceAllocator', () => { expect(result.allocatorIssuanceBlockAppliedTo).to.equal(distributionBlock) expect(result.selfIssuanceBlockAppliedTo).to.equal(distributionBlock) - const issuancePerBlock = await issuanceAllocator.issuancePerBlock() - const PPM = 1_000_000 - const expectedIssuance = (issuancePerBlock * BigInt(500000)) / BigInt(PPM) - expect(result.allocatorIssuancePerBlock).to.equal(expectedIssuance) - expect(result.selfIssuancePerBlock).to.equal(0) + // OLD: These were used for PPM calculations + // const issuancePerBlock = await issuanceAllocator.getIssuancePerBlock() + // const PPM = 1_000_000 + const expectedIssuance = ethers.parseEther('50') + expect(result.allocatorIssuanceRate).to.equal(expectedIssuance) + expect(result.selfIssuanceRate).to.equal(0) }) }) - describe('distributePendingIssuance', () => { - it('should only allow governor to call distributePendingIssuance', async () => { - const { issuanceAllocator } = sharedContracts + describe('Notification Behavior When Paused', () => { + it('should notify targets of allocation changes even when paused', async () => { + const { issuanceAllocator, addresses } = sharedContracts + + // Setup + await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('100')) + + // Add initial allocation + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](addresses.target1, 300000, 0) // 30% - // Non-governor should not be able to call distributePendingIssuance - await expect( - issuanceAllocator.connect(accounts.nonGovernor)['distributePendingIssuance()'](), - ).to.be.revertedWithCustomError(issuanceAllocator, 'AccessControlUnauthorizedAccount') + // Pause the contract + await issuanceAllocator.connect(accounts.governor).pause() - // Governor should be able to call distributePendingIssuance (even if no pending issuance) - await expect(issuanceAllocator.connect(accounts.governor)['distributePendingIssuance()']()).to.not.be.reverted + // Change allocation while paused - should notify target even though paused + const lastDistributionBlock = await (await issuanceAllocator.getDistributionState()).lastDistributionBlock + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,uint256)'](addresses.target1, 400000, 0, lastDistributionBlock) // Change to 40% - // Test return value using staticCall - should return lastIssuanceDistributionBlock - const result = await issuanceAllocator.connect(accounts.governor).distributePendingIssuance.staticCall() - const lastDistributionBlock = await issuanceAllocator.lastIssuanceDistributionBlock() - expect(result).to.equal(lastDistributionBlock) + // Verify that beforeIssuanceAllocationChange was called on the target + // This is verified by checking that the transaction succeeded and the allocation was updated + const allocation = await issuanceAllocator.getTargetAllocation(addresses.target1) + expect(allocation.allocatorMintingRate).to.equal(400000) }) - it('should be a no-op when there is no pending issuance', async () => { + it('should notify targets of issuance rate changes even when paused', async () => { const { issuanceAllocator, addresses } = sharedContracts - // Setup with zero issuance rate to ensure no pending accumulation - await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(0, false) // No issuance + // Setup + await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('100')) + + // Add target await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target1, 300000, 0, false) // 30% + ['setTargetAllocation(address,uint256)'](addresses.target1, 300000) // 30% - // Initialize distribution - await issuanceAllocator.connect(accounts.governor).distributeIssuance() + // Pause the contract + await issuanceAllocator.connect(accounts.governor).pause() - // Verify no pending issuance (should be 0 since issuance rate is 0) - expect(await issuanceAllocator.pendingAccumulatedAllocatorIssuance()).to.equal(0) + // Change issuance rate while paused - should notify targets even though paused + // Use explicit fromBlockNumber to allow change while paused + const lastDistributionBlock = await (await issuanceAllocator.getDistributionState()).lastDistributionBlock + await issuanceAllocator + .connect(accounts.governor) + ['setIssuancePerBlock(uint256,uint256)'](ethers.parseEther('200'), lastDistributionBlock) - const { graphToken } = sharedContracts - const initialBalance = await (graphToken as any).balanceOf(addresses.target1) + // Verify that the rate change was applied + expect(await issuanceAllocator.getIssuancePerBlock()).to.equal(ethers.parseEther('200')) + }) - // Call distributePendingIssuance - should be no-op - await issuanceAllocator.connect(accounts.governor)['distributePendingIssuance()']() + it('should not notify targets when no actual change occurs', async () => { + const { issuanceAllocator, graphToken, target1 } = await setupIssuanceAllocator() + + // Setup + await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('100')) + + // Add target + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](await target1.getAddress(), 300000, 0) // 30% + + // Try to set the same allocation - should not notify (no change) + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](await target1.getAddress(), 300000, 0) // Same 30% - // Test return value using staticCall - should return lastIssuanceDistributionBlock - const result = await issuanceAllocator.connect(accounts.governor).distributePendingIssuance.staticCall() - const lastDistributionBlock = await issuanceAllocator.lastIssuanceDistributionBlock() + // Verify allocation is unchanged + const allocation = await issuanceAllocator.getTargetAllocation(await target1.getAddress()) + expect(allocation.allocatorMintingRate).to.equal(300000) - // Should return last distribution block (since no pending issuance to distribute) - expect(result).to.equal(lastDistributionBlock) + // Try to set the same issuance rate - should not notify (no change) + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('100')) - // Balance should remain the same - expect(await (graphToken as any).balanceOf(addresses.target1)).to.equal(initialBalance) - expect(await issuanceAllocator.pendingAccumulatedAllocatorIssuance()).to.equal(0) + expect(await issuanceAllocator.getIssuancePerBlock()).to.equal(ethers.parseEther('100')) }) + }) - it('should distribute pending issuance to allocator-minting targets', async () => { + describe('Pending Issuance Distribution', () => { + it('should handle distributePendingIssuance with accumulated self-minting', async () => { const { issuanceAllocator, graphToken, target1, target2 } = await setupIssuanceAllocator() // Setup await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) - await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('100'), false) + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('100')) - // Add allocator-minting targets and a small self-minting target + // Add allocator-minting and self-minting targets await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 590000, 0, false) // 59% + ['setTargetAllocation(address,uint256,uint256)'](await target1.getAddress(), 400000, 0) // 40% allocator await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await target2.getAddress(), 400000, 10000, false) // 40% allocator + 1% self + ['setTargetAllocation(address,uint256,uint256)'](await target2.getAddress(), 0, 100000) // 10% self // Distribute once to initialize await issuanceAllocator.connect(accounts.governor).distributeIssuance() const initialBalance1 = await (graphToken as any).balanceOf(await target1.getAddress()) - const initialBalance2 = await (graphToken as any).balanceOf(await target2.getAddress()) - // Pause and accumulate some issuance + // Pause and mine blocks to accumulate self-minting await issuanceAllocator.connect(accounts.governor).pause() await ethers.provider.send('evm_mine', []) await ethers.provider.send('evm_mine', []) @@ -2527,1000 +1779,869 @@ describe('IssuanceAllocator', () => { // Trigger accumulation by changing self-minting allocation await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await target2.getAddress(), 400000, 0, true) // Remove self-minting + ['setTargetAllocation(address,uint256,uint256,uint256)'](await target2.getAddress(), 0, 200000, 0) // Change to 20% self - const pendingBefore = await issuanceAllocator.pendingAccumulatedAllocatorIssuance() - expect(pendingBefore).to.be.gt(0) + // Check accumulation exists + const distState = await issuanceAllocator.getDistributionState() + expect(distState.selfMintingOffset).to.be.gt(0) - // Call distributePendingIssuance while still paused + // Call distributePendingIssuance await issuanceAllocator.connect(accounts.governor)['distributePendingIssuance()']() - // Check that pending was distributed proportionally + // Verify tokens were distributed const finalBalance1 = await (graphToken as any).balanceOf(await target1.getAddress()) - const finalBalance2 = await (graphToken as any).balanceOf(await target2.getAddress()) - expect(finalBalance1).to.be.gt(initialBalance1) - expect(finalBalance2).to.be.gt(initialBalance2) - // Verify pending issuance was reset to 0 - expect(await issuanceAllocator.pendingAccumulatedAllocatorIssuance()).to.equal(0) - - // Verify proportional distribution (59% vs 40%) - const distributed1 = finalBalance1 - initialBalance1 - const distributed2 = finalBalance2 - initialBalance2 - const ratio = (BigInt(distributed1) * BigInt(1000)) / BigInt(distributed2) // Multiply by 1000 for precision - expect(ratio).to.be.closeTo(1475n, 50n) // 59/40 = 1.475, with some tolerance for rounding + // Verify accumulation was cleared + const finalDistState = await issuanceAllocator.getDistributionState() + expect(finalDistState.selfMintingOffset).to.equal(0) }) - it('should be a no-op when allocatorMintingAllowance is 0 (all targets are self-minting)', async () => { + it('should handle distributePendingIssuance with toBlockNumber parameter', async () => { const { issuanceAllocator, graphToken, target1 } = await setupIssuanceAllocator() // Setup await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) - await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('100'), false) - - // Add only self-minting targets (100% self-minting) + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('100')) await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 0, 1000000, false) // 100% self-minting + ['setTargetAllocation(address,uint256,uint256)'](await target1.getAddress(), 500000, 100000) - // Distribute once to initialize await issuanceAllocator.connect(accounts.governor).distributeIssuance() + const beforePauseState = await issuanceAllocator.getDistributionState() - // Pause and accumulate some issuance await issuanceAllocator.connect(accounts.governor).pause() await ethers.provider.send('evm_mine', []) await ethers.provider.send('evm_mine', []) + await ethers.provider.send('evm_mine', []) + await ethers.provider.send('evm_mine', []) - // Trigger accumulation by changing rate - await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('200'), false) - - const pendingBefore = await issuanceAllocator.pendingAccumulatedAllocatorIssuance() - expect(pendingBefore).to.equal(0) // Should be 0 because allocatorMintingAllowance is 0 + // Trigger accumulation + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,uint256)'](await target1.getAddress(), 500000, 200000, 0) - const initialBalance = await (graphToken as any).balanceOf(await target1.getAddress()) + const currentBlock = await ethers.provider.getBlockNumber() + const distState = await issuanceAllocator.getDistributionState() + // Distribute only to a block that's midway through the accumulated period + const partialBlock = beforePauseState.lastDistributionBlock + BigInt(2) - // Call distributePendingIssuance - should be no-op due to allocatorMintingAllowance = 0 - await issuanceAllocator.connect(accounts.governor)['distributePendingIssuance()']() + // Distribute to a partial block (not current block) + await issuanceAllocator.connect(accounts.governor)['distributePendingIssuance(uint256)'](partialBlock) - // Balance should remain the same (self-minting targets don't receive tokens from allocator) - expect(await (graphToken as any).balanceOf(await target1.getAddress())).to.equal(initialBalance) + // Verify partial distribution + const afterPartialState = await issuanceAllocator.getDistributionState() + expect(afterPartialState.lastDistributionBlock).to.equal(partialBlock) + // Verify accumulation was partially consumed but some remains + expect(afterPartialState.selfMintingOffset).to.be.lt(distState.selfMintingOffset) - // Pending issuance should be reset to 0 even though nothing was distributed - expect(await issuanceAllocator.pendingAccumulatedAllocatorIssuance()).to.equal(0) + // Distribute remainder to current block + await issuanceAllocator.connect(accounts.governor)['distributePendingIssuance(uint256)'](currentBlock) + const finalState = await issuanceAllocator.getDistributionState() + expect(finalState.selfMintingOffset).to.equal(0) // All cleared }) - it('should work when contract is paused', async () => { + it('should handle distributePendingIssuance when blocks == 0', async () => { const { issuanceAllocator, graphToken, target1 } = await setupIssuanceAllocator() // Setup await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) - await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) - await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('100'), false) - - // Add allocator-minting target + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('100')) await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 500000, 0, false) // 50% + ['setTargetAllocation(address,uint256,uint256)'](await target1.getAddress(), 500000, 0) - // Distribute once to initialize + // Distribute to current block await issuanceAllocator.connect(accounts.governor).distributeIssuance() - const initialBalance = await (graphToken as any).balanceOf(await target1.getAddress()) - // Pause and accumulate some issuance + const distState = await issuanceAllocator.getDistributionState() + const currentBlock = distState.lastDistributionBlock + + // Call distributePendingIssuance with toBlockNumber == lastDistributionBlock (blocks == 0) + const result = await issuanceAllocator + .connect(accounts.governor) + ['distributePendingIssuance(uint256)'].staticCall(currentBlock) + + expect(result).to.equal(currentBlock) + }) + + it('should handle proportional distribution when available < allocatedTotal', async () => { + const { issuanceAllocator, graphToken, target1, target2 } = await setupIssuanceAllocator() + + // Setup with high allocator-minting and high self-minting rates + await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) + await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('1000')) + + // Setup: 40% + 40% allocator-minting, 15% self-minting (5% default) + // Using absolute values (tokens per block, not PPM): + // allocatedRate (non-default) = 1000 - 150 (self) - 50 (default) = 800 ether + await issuanceAllocator.connect(accounts.governor)['setTargetAllocation(address,uint256,uint256,uint256)']( + await target1.getAddress(), + ethers.parseEther('400'), // 400 ether per block allocator-minting + 0, + 0, + ) + await issuanceAllocator.connect(accounts.governor)['setTargetAllocation(address,uint256,uint256,uint256)']( + await target2.getAddress(), + ethers.parseEther('400'), // 400 ether per block allocator-minting + ethers.parseEther('150'), // 150 ether per block self-minting + 0, + ) + + await issuanceAllocator.connect(accounts.governor).distributeIssuance() + + // Pause and mine blocks to build up self-minting accumulation await issuanceAllocator.connect(accounts.governor).pause() - await ethers.provider.send('evm_mine', []) - await ethers.provider.send('evm_mine', []) + for (let i = 0; i < 10; i++) { + await ethers.provider.send('evm_mine', []) + } + // Don't change allocations - just distribute with accumulated self-minting + // After 10 blocks: + // - selfMintingOffset = 150 ether * 10 = 1500 ether + // - totalForPeriod = 1000 ether * 10 = 10000 ether + // - available = 10000 - 1500 = 8500 ether + // - allocatedTotal = 800 ether * 10 = 8000 ether + // So: 8500 > 8000, this won't trigger proportional... + // + // Let me force it by calling distributePendingIssuance for only PART of the period + // This will make available smaller relative to allocatedTotal + + const distState = await issuanceAllocator.getDistributionState() + // Distribute for only 2 blocks instead of all 10 + const partialBlock = distState.lastDistributionBlock + BigInt(2) + + const initialBalance1 = await (graphToken as any).balanceOf(await target1.getAddress()) + const initialBalance2 = await (graphToken as any).balanceOf(await target2.getAddress()) - // Trigger accumulation by changing rate - await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('200'), true) + // For 2 blocks with 10 blocks of accumulated self-minting: + // - selfMintingOffset = 1500 ether (from 10 blocks) + // - totalForPeriod = 1000 * 2 = 2000 ether (only distributing 2 blocks) + // - available = 2000 - 1500 = 500 ether + // - allocatedTotal = 800 * 2 = 1600 ether + // So: 500 < 1600 ✓ triggers proportional distribution! - const pendingBefore = await issuanceAllocator.pendingAccumulatedAllocatorIssuance() - expect(pendingBefore).to.be.gt(0) + // Distribute pending for partial period - should use proportional distribution + await issuanceAllocator.connect(accounts.governor)['distributePendingIssuance(uint256)'](partialBlock) - // Call distributePendingIssuance while paused - should work - await expect(issuanceAllocator.connect(accounts.governor)['distributePendingIssuance()']()).to.not.be.reverted + // Both targets should receive tokens (proportionally reduced due to budget constraint) + const finalBalance1 = await (graphToken as any).balanceOf(await target1.getAddress()) + const finalBalance2 = await (graphToken as any).balanceOf(await target2.getAddress()) - // Check that pending was distributed - const finalBalance = await (graphToken as any).balanceOf(await target1.getAddress()) - expect(finalBalance).to.be.gt(initialBalance) + expect(finalBalance1).to.be.gt(initialBalance1) + expect(finalBalance2).to.be.gt(initialBalance2) - // Verify pending issuance was reset to 0 - expect(await issuanceAllocator.pendingAccumulatedAllocatorIssuance()).to.equal(0) + // Verify proportional distribution (both should get same amount since same allocator rate) + const distributed1 = finalBalance1 - initialBalance1 + const distributed2 = finalBalance2 - initialBalance2 + expect(distributed1).to.be.closeTo(distributed2, ethers.parseEther('1')) }) - it('should emit IssuanceDistributed events for each target', async () => { + it('should distribute remainder to default target in full rate distribution', async () => { const { issuanceAllocator, graphToken, target1, target2 } = await setupIssuanceAllocator() // Setup await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) - await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('100'), false) + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('100')) - // Add allocator-minting targets and a small self-minting target - await issuanceAllocator - .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 300000, 0, false) // 30% + // Set target2 as default target (it's a contract that supports IIssuanceTarget) + await issuanceAllocator.connect(accounts.governor).setDefaultTarget(await target2.getAddress()) + + // Add target with low allocator rate, high self-minting - ensures default gets significant portion await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await target2.getAddress(), 190000, 10000, false) // 19% allocator + 1% self + ['setTargetAllocation(address,uint256,uint256)'](await target1.getAddress(), 100000, 100000) // 10% each - // Distribute once to initialize await issuanceAllocator.connect(accounts.governor).distributeIssuance() + const initialDefaultBalance = await (graphToken as any).balanceOf(await target2.getAddress()) - // Pause and accumulate some issuance + // Pause and accumulate (with small self-minting, available should be > allocatedTotal) await issuanceAllocator.connect(accounts.governor).pause() await ethers.provider.send('evm_mine', []) await ethers.provider.send('evm_mine', []) - // Trigger accumulation by changing self-minting allocation + // Trigger accumulation await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await target2.getAddress(), 200000, 0, true) // Remove self-minting + ['setTargetAllocation(address,uint256,uint256,uint256)'](await target1.getAddress(), 100000, 150000, 0) - const pendingBefore = await issuanceAllocator.pendingAccumulatedAllocatorIssuance() - expect(pendingBefore).to.be.gt(0) - - // Call distributePendingIssuance and check events - const tx = await issuanceAllocator.connect(accounts.governor)['distributePendingIssuance()']() - const receipt = await tx.wait() - - // Should emit events for both targets - const events = receipt.logs.filter( - (log) => log.topics[0] === issuanceAllocator.interface.getEvent('IssuanceDistributed').topicHash, - ) - expect(events.length).to.equal(2) + // Distribute - should give remainder to default target + await issuanceAllocator.connect(accounts.governor)['distributePendingIssuance()']() - // Verify the events contain the correct target addresses - const decodedEvents = events.map((event) => issuanceAllocator.interface.parseLog(event)) - const targetAddresses = decodedEvents.map((event) => event.args.target) - expect(targetAddresses).to.include(await target1.getAddress()) - expect(targetAddresses).to.include(await target2.getAddress()) + // Default target should receive tokens + const finalDefaultBalance = await (graphToken as any).balanceOf(await target2.getAddress()) + expect(finalDefaultBalance).to.be.gt(initialDefaultBalance) }) - describe('distributePendingIssuance(uint256 toBlockNumber)', () => { - it('should validate distributePendingIssuance(uint256) access control and parameters', async () => { - const { issuanceAllocator } = sharedContracts - - // Test 1: Access control - Non-governor should not be able to call distributePendingIssuance - await expect( - issuanceAllocator.connect(accounts.nonGovernor)['distributePendingIssuance(uint256)'](100), - ).to.be.revertedWithCustomError(issuanceAllocator, 'AccessControlUnauthorizedAccount') - - // Test 2: Parameter validation - Should revert when toBlockNumber is less than lastIssuanceAccumulationBlock - const lastAccumulationBlock = await issuanceAllocator.lastIssuanceAccumulationBlock() - const invalidBlock = lastAccumulationBlock - 1n - await expect( - issuanceAllocator.connect(accounts.governor)['distributePendingIssuance(uint256)'](invalidBlock), - ).to.be.revertedWithCustomError(issuanceAllocator, 'ToBlockOutOfRange') - - // Test 3: Parameter validation - Should revert when toBlockNumber is greater than current block - const currentBlock = await ethers.provider.getBlockNumber() - const futureBlock = currentBlock + 10 - await expect( - issuanceAllocator.connect(accounts.governor)['distributePendingIssuance(uint256)'](futureBlock), - ).to.be.revertedWithCustomError(issuanceAllocator, 'ToBlockOutOfRange') - - // Test 4: Valid call - Governor should be able to call distributePendingIssuance with valid block number - await expect(issuanceAllocator.connect(accounts.governor)['distributePendingIssuance(uint256)'](currentBlock)) - .to.not.be.reverted - }) - - it('should accumulate and distribute issuance up to specified block', async () => { - const { issuanceAllocator, graphToken, target1 } = await setupIssuanceAllocator() - - // Setup - await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) - await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) - await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('100'), false) - - // Add target - await issuanceAllocator - .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 500000, 0, false) // 50% - - // Pause to enable accumulation - await issuanceAllocator.connect(accounts.governor).pause() - - // Mine some blocks to create a gap - await ethers.provider.send('hardhat_mine', ['0x5']) // Mine 5 blocks - - const initialBalance = await (graphToken as any).balanceOf(await target1.getAddress()) - const currentBlock = await ethers.provider.getBlockNumber() - const targetBlock = currentBlock - 2 // Accumulate up to 2 blocks ago - - // Call distributePendingIssuance with specific toBlockNumber - await issuanceAllocator.connect(accounts.governor)['distributePendingIssuance(uint256)'](targetBlock) - - // Check that tokens were distributed - const finalBalance = await (graphToken as any).balanceOf(await target1.getAddress()) - expect(finalBalance).to.be.gt(initialBalance) - - // Check that accumulation block was updated to targetBlock - expect(await issuanceAllocator.lastIssuanceAccumulationBlock()).to.equal(targetBlock) - - // Check that distribution block was updated to targetBlock - expect(await issuanceAllocator.lastIssuanceDistributionBlock()).to.equal(targetBlock) - - // Pending should be reset to 0 - expect(await issuanceAllocator.pendingAccumulatedAllocatorIssuance()).to.equal(0) - }) - - it('should work with toBlockNumber equal to lastIssuanceAccumulationBlock (no-op)', async () => { - const { issuanceAllocator, graphToken, target1 } = await setupIssuanceAllocator() - - // Setup - await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) - await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('100'), false) - - // Add target - await issuanceAllocator - .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 500000, 0, false) // 50% - - const lastAccumulationBlock = await issuanceAllocator.lastIssuanceAccumulationBlock() - const initialBalance = await (graphToken as any).balanceOf(await target1.getAddress()) - - // Call with same block number - should be no-op for accumulation - await issuanceAllocator.connect(accounts.governor)['distributePendingIssuance(uint256)'](lastAccumulationBlock) - - // Balance should remain the same (no new accumulation) - const finalBalance = await (graphToken as any).balanceOf(await target1.getAddress()) - expect(finalBalance).to.equal(initialBalance) - - // Blocks should remain the same - expect(await issuanceAllocator.lastIssuanceAccumulationBlock()).to.equal(lastAccumulationBlock) - }) - - it('should work with toBlockNumber equal to current block', async () => { - const { issuanceAllocator, graphToken, target1 } = await setupIssuanceAllocator() - - // Setup - await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) - await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) - await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('100'), false) - - // Add target - await issuanceAllocator - .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 500000, 0, false) // 50% - - // Pause to enable accumulation - await issuanceAllocator.connect(accounts.governor).pause() - - // Mine some blocks to create a gap - await ethers.provider.send('hardhat_mine', ['0x3']) // Mine 3 blocks - - const initialBalance = await (graphToken as any).balanceOf(await target1.getAddress()) - const currentBlock = await ethers.provider.getBlockNumber() - - // Call distributePendingIssuance with current block - await issuanceAllocator.connect(accounts.governor)['distributePendingIssuance(uint256)'](currentBlock) - - // Check that tokens were distributed - const finalBalance = await (graphToken as any).balanceOf(await target1.getAddress()) - expect(finalBalance).to.be.gt(initialBalance) - - // Check that accumulation block was updated to current block - expect(await issuanceAllocator.lastIssuanceAccumulationBlock()).to.equal(currentBlock) - }) - - it('should handle multiple calls with different toBlockNumbers', async () => { - const { issuanceAllocator, graphToken, target1 } = await setupIssuanceAllocator() - - // Setup - await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) - await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) - await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('100'), false) - - // Add target - await issuanceAllocator - .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 500000, 0, false) // 50% - - // Pause to enable accumulation - await issuanceAllocator.connect(accounts.governor).pause() - - // Mine some blocks to create a gap - await ethers.provider.send('hardhat_mine', ['0x5']) // Mine 5 blocks - - const initialBalance = await (graphToken as any).balanceOf(await target1.getAddress()) - const currentBlock = await ethers.provider.getBlockNumber() - const firstTargetBlock = currentBlock - 3 - const secondTargetBlock = currentBlock - 1 + it('should trigger pending distribution path when selfMintingOffset > 0 in distributeIssuance', async () => { + const { issuanceAllocator, graphToken, target1 } = await setupIssuanceAllocator() - // First call - accumulate up to firstTargetBlock - await issuanceAllocator.connect(accounts.governor)['distributePendingIssuance(uint256)'](firstTargetBlock) + // Setup + await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) + await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('100')) + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](await target1.getAddress(), 500000, 100000) - const balanceAfterFirst = await (graphToken as any).balanceOf(await target1.getAddress()) - expect(balanceAfterFirst).to.be.gt(initialBalance) - expect(await issuanceAllocator.lastIssuanceAccumulationBlock()).to.equal(firstTargetBlock) + await issuanceAllocator.connect(accounts.governor).distributeIssuance() + const initialBalance = await (graphToken as any).balanceOf(await target1.getAddress()) - // Second call - accumulate from firstTargetBlock to secondTargetBlock - await issuanceAllocator.connect(accounts.governor)['distributePendingIssuance(uint256)'](secondTargetBlock) + // Pause and accumulate + await issuanceAllocator.connect(accounts.governor).pause() + await ethers.provider.send('evm_mine', []) - const balanceAfterSecond = await (graphToken as any).balanceOf(await target1.getAddress()) - expect(balanceAfterSecond).to.be.gt(balanceAfterFirst) - expect(await issuanceAllocator.lastIssuanceAccumulationBlock()).to.equal(secondTargetBlock) - }) + // Trigger accumulation + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256,uint256)'](await target1.getAddress(), 500000, 200000, 0) - it('should return correct block number after distribution', async () => { - const { issuanceAllocator, graphToken, target1 } = await setupIssuanceAllocator() + // Verify accumulation exists + let distState = await issuanceAllocator.getDistributionState() + expect(distState.selfMintingOffset).to.be.gt(0) - // Setup - await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) - await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) - await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('100'), false) + // Unpause + await issuanceAllocator.connect(accounts.governor).unpause() - // Add target - await issuanceAllocator - .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 500000, 0, false) // 50% + // Call distributeIssuance - should internally call _distributePendingIssuance due to accumulation + await issuanceAllocator.connect(accounts.governor).distributeIssuance() - // Pause to enable accumulation - await issuanceAllocator.connect(accounts.governor).pause() + // Verify tokens distributed and accumulation cleared + const finalBalance = await (graphToken as any).balanceOf(await target1.getAddress()) + expect(finalBalance).to.be.gt(initialBalance) - // Mine some blocks - await ethers.provider.send('hardhat_mine', ['0x3']) // Mine 3 blocks + distState = await issuanceAllocator.getDistributionState() + expect(distState.selfMintingOffset).to.equal(0) + }) - const currentBlock = await ethers.provider.getBlockNumber() - const targetBlock = currentBlock - 1 + it('should revert when non-governor calls distributePendingIssuance()', async () => { + const { issuanceAllocator } = await setupIssuanceAllocator() - // Test return value using staticCall - const result = await issuanceAllocator - .connect(accounts.governor) - ['distributePendingIssuance(uint256)'].staticCall(targetBlock) + await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) + await issuanceAllocator.connect(accounts.governor).pause() + await ethers.provider.send('evm_mine', []) - expect(result).to.equal(targetBlock) - }) + // Try to call distributePendingIssuance() as non-governor + await expect(issuanceAllocator.connect(accounts.user)['distributePendingIssuance()']()).to.be.reverted }) - }) - describe('Notification Behavior When Paused', () => { - it('should notify targets of allocation changes even when paused', async () => { - const { issuanceAllocator, addresses } = sharedContracts + it('should revert when non-governor calls distributePendingIssuance(uint256)', async () => { + const { issuanceAllocator } = await setupIssuanceAllocator() - // Setup await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) - await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('100'), false) - - // Add initial allocation - await issuanceAllocator - .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target1, 300000, 0, false) // 30% - - // Pause the contract await issuanceAllocator.connect(accounts.governor).pause() + await ethers.provider.send('evm_mine', []) - // Change allocation while paused - should notify target even though paused - await issuanceAllocator - .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target1, 400000, 0, true) // Change to 40% + const distState = await issuanceAllocator.getDistributionState() + const blockNumber = distState.lastDistributionBlock + BigInt(1) - // Verify that beforeIssuanceAllocationChange was called on the target - // This is verified by checking that the transaction succeeded and the allocation was updated - const allocation = await issuanceAllocator.getTargetAllocation(addresses.target1) - expect(allocation.allocatorMintingPPM).to.equal(400000) + // Try to call distributePendingIssuance(uint256) as non-governor + await expect(issuanceAllocator.connect(accounts.user)['distributePendingIssuance(uint256)'](blockNumber)).to.be + .reverted }) - it('should notify targets of issuance rate changes even when paused', async () => { - const { issuanceAllocator, addresses } = sharedContracts + it('should revert when toBlockNumber > block.number', async () => { + const { issuanceAllocator } = await setupIssuanceAllocator() - // Setup await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) - await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('100'), false) - - // Add target - await issuanceAllocator - .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](addresses.target1, 300000, 0, false) // 30% - // Pause the contract + // Pause to enable distributePendingIssuance await issuanceAllocator.connect(accounts.governor).pause() + await ethers.provider.send('evm_mine', []) - // Change issuance rate while paused - should notify targets even though paused - await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('200'), true) - - // Verify that the rate change was applied - expect(await issuanceAllocator.issuancePerBlock()).to.equal(ethers.parseEther('200')) + // Try to distribute to a future block + const futureBlock = (await ethers.provider.getBlockNumber()) + 100 + await expect(issuanceAllocator.connect(accounts.governor)['distributePendingIssuance(uint256)'](futureBlock)).to + .be.reverted }) - it('should not notify targets when no actual change occurs', async () => { - const { issuanceAllocator, graphToken, target1 } = await setupIssuanceAllocator() - - // Setup - await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) - await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('100'), false) - - // Add target - await issuanceAllocator - .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 300000, 0, false) // 30% + it('should revert when toBlockNumber < lastDistributionBlock', async () => { + const { issuanceAllocator } = await setupIssuanceAllocator() - // Try to set the same allocation - should not notify (no change) - await issuanceAllocator - .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 300000, 0, false) // Same 30% + await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) - // Verify allocation is unchanged - const allocation = await issuanceAllocator.getTargetAllocation(await target1.getAddress()) - expect(allocation.allocatorMintingPPM).to.equal(300000) + // Pause and mine some blocks + await issuanceAllocator.connect(accounts.governor).pause() + await ethers.provider.send('evm_mine', []) + await ethers.provider.send('evm_mine', []) - // Try to set the same issuance rate - should not notify (no change) - await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('100'), false) + const distState = await issuanceAllocator.getDistributionState() + const pastBlock = distState.lastDistributionBlock - BigInt(1) - expect(await issuanceAllocator.issuancePerBlock()).to.equal(ethers.parseEther('100')) + // Try to distribute to a block before lastDistributionBlock + await expect(issuanceAllocator.connect(accounts.governor)['distributePendingIssuance(uint256)'](pastBlock)).to.be + .reverted }) - }) - describe('Mixed Allocation Distribution Scenarios', () => { - it('should correctly distribute pending issuance with mixed allocations and unallocated space', async () => { - const { issuanceAllocator, graphToken, target1, target2 } = await setupIssuanceAllocator() + it('should handle exact allocation with zero remainder to default', async () => { + const { issuanceAllocator, graphToken, target1 } = await setupIssuanceAllocator() - // Setup await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) - await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('1000'), false) - // Test scenario: 20% allocator-minting + 40% self-minting (leaving 40% for default) - await issuanceAllocator - .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 200000, 0, false) // 20% allocator-minting - await issuanceAllocator - .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await target2.getAddress(), 0, 400000, false) // 40% self-minting - // 40% goes to default allocation + // Set issuance to 1000 ether per block + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('1000')) - // Verify the setup - const totalAllocation = await issuanceAllocator.getTotalAllocation() - expect(totalAllocation.totalAllocationPPM).to.equal(600000) // 60% reported (excludes default's 40%) - expect(totalAllocation.allocatorMintingPPM).to.equal(200000) // 20% allocator (excludes default's 40%) - expect(totalAllocation.selfMintingPPM).to.equal(400000) // 40% self + // Configure target1 with allocator=800, self=200 (total = 1000, leaving 0 for default) + await issuanceAllocator.connect(accounts.governor)['setTargetAllocation(address,uint256,uint256,uint256)']( + await target1.getAddress(), + ethers.parseEther('800'), // 800 ether per block allocator-minting + ethers.parseEther('200'), // 200 ether per block self-minting + 0, + ) - // Distribute once to initialize await issuanceAllocator.connect(accounts.governor).distributeIssuance() - // Pause and accumulate issuance + // Pause and accumulate await issuanceAllocator.connect(accounts.governor).pause() - for (let i = 0; i < 10; i++) { + for (let i = 0; i < 5; i++) { await ethers.provider.send('evm_mine', []) } - // Trigger accumulation by forcing rate change - await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('2000'), true) - - const pendingBefore = await issuanceAllocator.pendingAccumulatedAllocatorIssuance() - expect(pendingBefore).to.be.gt(0) + const initialBalance = await (graphToken as any).balanceOf(await target1.getAddress()) - const initialBalance1 = await (graphToken as any).balanceOf(await target1.getAddress()) - const initialBalance2 = await (graphToken as any).balanceOf(await target2.getAddress()) + const distStateBefore = await issuanceAllocator.getDistributionState() - // Call distributePendingIssuance + // Distribute - should result in exactly 0 remainder for default await issuanceAllocator.connect(accounts.governor)['distributePendingIssuance()']() - const finalBalance1 = await (graphToken as any).balanceOf(await target1.getAddress()) - const finalBalance2 = await (graphToken as any).balanceOf(await target2.getAddress()) - - const distributed1 = finalBalance1 - initialBalance1 - const distributed2 = finalBalance2 - initialBalance2 - - // Target2 (self-minting) should receive nothing from distributePendingIssuance - expect(distributed2).to.equal(0) - - // Target1 should receive the correct proportional amount - // The calculation is: (pendingAmount * 200000) / (1000000 - 400000) = (pendingAmount * 200000) / 600000 = pendingAmount * 1/3 - // So target1 should get exactly 33.33% of the pending amount - const expectedDistribution = (pendingBefore * 200000n) / 600000n // 33.33% of pending - expect(distributed1).to.be.closeTo(expectedDistribution, ethers.parseEther('1')) + const distStateAfter = await issuanceAllocator.getDistributionState() + const blocksDist = distStateAfter.lastDistributionBlock - distStateBefore.lastDistributionBlock - // Verify pending issuance was reset - expect(await issuanceAllocator.pendingAccumulatedAllocatorIssuance()).to.equal(0) + // Calculate expected distribution based on actual blocks + // totalForPeriod = 1000 * blocksDist ether + // selfMintingOffset = 200 * blocksDist ether + // available = (1000 - 200) * blocksDist = 800 * blocksDist ether + // allocatedTotal = 800 * blocksDist ether + // remainder = 0 ✓ + const finalBalance = await (graphToken as any).balanceOf(await target1.getAddress()) + const expectedDistribution = ethers.parseEther('800') * BigInt(blocksDist) + expect(finalBalance - initialBalance).to.equal(expectedDistribution) }) - it('should correctly distribute pending issuance among multiple allocator-minting targets', async () => { - const { issuanceAllocator, graphToken, target1, target2, target3 } = await setupIssuanceAllocator() + it('should handle proportional distribution with target having zero allocator rate', async () => { + const { issuanceAllocator, graphToken, target1, target2 } = await setupIssuanceAllocator() - // Setup await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) - await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('1000'), false) + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('1000')) - // Test scenario: 12% + 8% allocator-minting + 40% self-minting (leaving 40% for default) - await issuanceAllocator - .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 120000, 0, false) // 12% allocator-minting + // target1: allocator=400, self=0 + // target2: allocator=0, self=100 (self-minting only, no allocator-minting) + // default: gets the remainder (500 allocator + 0 self) await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await target2.getAddress(), 80000, 0, false) // 8% allocator-minting - await issuanceAllocator - .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await target3.getAddress(), 0, 400000, false) // 40% self-minting - // 40% goes to default allocation - - // Verify the setup - const totalAllocation = await issuanceAllocator.getTotalAllocation() - expect(totalAllocation.allocatorMintingPPM).to.equal(200000) // 12% + 8% = 20% (excludes default's 40%) - expect(totalAllocation.selfMintingPPM).to.equal(400000) // 40% self + [ + 'setTargetAllocation(address,uint256,uint256,uint256)' + ](await target1.getAddress(), ethers.parseEther('400'), 0, 0) + await issuanceAllocator.connect(accounts.governor)['setTargetAllocation(address,uint256,uint256,uint256)']( + await target2.getAddress(), + 0, // Zero allocator-minting rate + ethers.parseEther('100'), + 0, + ) - // Distribute once to initialize await issuanceAllocator.connect(accounts.governor).distributeIssuance() - // Pause and accumulate issuance + // Pause and accumulate enough self-minting await issuanceAllocator.connect(accounts.governor).pause() - for (let i = 0; i < 10; i++) { + for (let i = 0; i < 15; i++) { await ethers.provider.send('evm_mine', []) } - // Trigger accumulation - await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('2000'), true) - - const pendingBefore = await issuanceAllocator.pendingAccumulatedAllocatorIssuance() - expect(pendingBefore).to.be.gt(0) - + const distStateBefore = await issuanceAllocator.getDistributionState() const initialBalance1 = await (graphToken as any).balanceOf(await target1.getAddress()) const initialBalance2 = await (graphToken as any).balanceOf(await target2.getAddress()) - const initialBalance3 = await (graphToken as any).balanceOf(await target3.getAddress()) - // Call distributePendingIssuance - await issuanceAllocator.connect(accounts.governor)['distributePendingIssuance()']() + // Distribute only 2 blocks (out of the 15+ accumulated) + // With high self-minting accumulation, this creates proportional distribution scenario + // Expected accumulation during pause: 100 ether/block * ~15 blocks = ~1500 ether + // Distribution for 2 blocks: totalForPeriod = 2000 ether, consumed ~= 1500 ether, available ~= 500 ether + // allocatedTotal = 400 ether * 2 = 800 ether + // Since available < allocatedTotal, proportional distribution kicks in + const partialBlock = distStateBefore.lastDistributionBlock + BigInt(2) + + await issuanceAllocator.connect(accounts.governor)['distributePendingIssuance(uint256)'](partialBlock) const finalBalance1 = await (graphToken as any).balanceOf(await target1.getAddress()) const finalBalance2 = await (graphToken as any).balanceOf(await target2.getAddress()) - const finalBalance3 = await (graphToken as any).balanceOf(await target3.getAddress()) + // The key test: target1 should receive some tokens (it has allocatorMintingRate > 0) + // target2 should receive ZERO tokens (it has allocatorMintingRate == 0) + // This proves the `if (0 < targetData.allocatorMintingRate)` branch was tested const distributed1 = finalBalance1 - initialBalance1 - const distributed2 = finalBalance2 - initialBalance2 - const distributed3 = finalBalance3 - initialBalance3 - - // Target3 (self-minting) should receive nothing - expect(distributed3).to.equal(0) - - // Verify proportional distribution between allocator-minting targets - // Target1 should get 12/20 = 60% of the distributed amount - // Target2 should get 8/20 = 40% of the distributed amount - if (distributed1 > 0 && distributed2 > 0) { - const ratio = (BigInt(distributed1) * 1000n) / BigInt(distributed2) // Multiply by 1000 for precision - expect(ratio).to.be.closeTo(1500n, 50n) // 120000/80000 = 1.5 - } - - // Total distributed should equal the allocator-minting portion of pending - // With 20% total allocator-minting (12% + 8%) out of 60% allocator-minting space (20% + 40% default): - // Each target gets: (targetPPM / (MILLION - selfMintingPPM)) * pendingAmount - // Target1: (120000 / 600000) * pendingAmount = 20% of pending - // Target2: (80000 / 600000) * pendingAmount = 13.33% of pending - // Total: 33.33% of pending - const totalDistributed = distributed1 + distributed2 - const expectedTotal = (pendingBefore * 200000n) / 600000n // 33.33% of pending - expect(totalDistributed).to.be.closeTo(expectedTotal, ethers.parseEther('1')) + expect(distributed1).to.be.gt(0) // target1 gets some tokens + expect(finalBalance2).to.equal(initialBalance2) // target2 gets zero (skipped in the if check) }) }) - describe('Edge Cases for Pending Issuance Distribution', () => { - describe('Division by Zero and Near-Zero Denominator Cases', () => { - it('should handle case when totalSelfMintingPPM equals MILLION (100% self-minting)', async () => { - const { issuanceAllocator, graphToken, target1 } = await setupIssuanceAllocator() - - // Setup - await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) - await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) - await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('100'), false) + describe('Pause/Unpause Edge Cases', () => { + // Helper function to deploy a fresh IssuanceAllocator for these tests + async function setupIssuanceAllocator() { + const graphToken = await deployTestGraphToken() + const issuanceAllocator = await deployIssuanceAllocator( + await graphToken.getAddress(), + accounts.governor, + ethers.parseEther('100'), + ) + const target1 = await deployDirectAllocation(await graphToken.getAddress(), accounts.governor) + const target2 = await deployDirectAllocation(await graphToken.getAddress(), accounts.governor) - // Add 100% self-minting target (totalSelfMintingPPM = MILLION) - await issuanceAllocator - .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 0, 1000000, false) // 100% self-minting + return { graphToken, issuanceAllocator, target1, target2 } + } - // Distribute once to initialize - await issuanceAllocator.connect(accounts.governor).distributeIssuance() + it('should handle unpause → mine blocks → pause without distributeIssuance', async () => { + const { issuanceAllocator, graphToken, target1, target2 } = await setupIssuanceAllocator() - // Pause and accumulate some issuance - await issuanceAllocator.connect(accounts.governor).pause() - await ethers.provider.send('evm_mine', []) - await ethers.provider.send('evm_mine', []) + // Setup + await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) + await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('100')) - // Trigger accumulation by changing rate - await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('200'), false) + // Add targets: 30 tokens/block allocator-minting, 20 tokens/block self-minting (leaving 50 for default) + await issuanceAllocator + .connect(accounts.governor) + [ + 'setTargetAllocation(address,uint256,uint256,uint256)' + ](await target1.getAddress(), ethers.parseEther('30'), 0, 0) // 30 tokens/block allocator + await issuanceAllocator + .connect(accounts.governor) + [ + 'setTargetAllocation(address,uint256,uint256,uint256)' + ](await target2.getAddress(), 0, ethers.parseEther('20'), 0) // 20 tokens/block self - const pendingBefore = await issuanceAllocator.pendingAccumulatedAllocatorIssuance() - expect(pendingBefore).to.equal(0) // Should be 0 because no allocator-minting allocation + // Initialize distribution + await issuanceAllocator.connect(accounts.governor).distributeIssuance() + const initialBlock = await ethers.provider.getBlockNumber() - const initialBalance = await (graphToken as any).balanceOf(await target1.getAddress()) + // Track initial balance for target1 (allocator-minting target) + const balance1Initial = await (graphToken as any).balanceOf(await target1.getAddress()) - // Call distributePendingIssuance - should not revert even with division by zero scenario - await expect(issuanceAllocator.connect(accounts.governor)['distributePendingIssuance()']()).to.not.be.reverted + // Phase 1: Pause the contract + await issuanceAllocator.connect(accounts.governor).pause() + const _pauseBlock1 = await ethers.provider.getBlockNumber() - // Balance should remain the same (no allocator-minting targets) - expect(await (graphToken as any).balanceOf(await target1.getAddress())).to.equal(initialBalance) - }) + // Mine a few blocks while paused + await ethers.provider.send('evm_mine', []) + await ethers.provider.send('evm_mine', []) - it('should handle case with very small denominator (totalSelfMintingPPM near MILLION)', async () => { - const { issuanceAllocator, graphToken, target1, target2 } = await setupIssuanceAllocator() + // Phase 2: Unpause WITHOUT calling distributeIssuance + await issuanceAllocator.connect(accounts.governor).unpause() + const _unpauseBlock = await ethers.provider.getBlockNumber() - // Setup with very high issuance rate to ensure accumulation despite small denominator - await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) - await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) - await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('1000000'), false) // Very high rate + // Phase 3: Mine blocks while unpaused, but DON'T call distributeIssuance + await ethers.provider.send('evm_mine', []) + await ethers.provider.send('evm_mine', []) + await ethers.provider.send('evm_mine', []) - // Add targets: 1 PPM allocator-minting, 999,999 PPM self-minting (denominator = 1) - await issuanceAllocator - .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 1, 0, false) // 1 PPM allocator-minting - await issuanceAllocator - .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await target2.getAddress(), 0, 999999, false) // 999,999 PPM self-minting + // Phase 4: Pause again WITHOUT calling distributeIssuance + await issuanceAllocator.connect(accounts.governor).pause() + const _pauseBlock2 = await ethers.provider.getBlockNumber() - // Distribute once to initialize - await issuanceAllocator.connect(accounts.governor).distributeIssuance() + // Mine more blocks while paused + await ethers.provider.send('evm_mine', []) + await ethers.provider.send('evm_mine', []) - // Pause and accumulate significant issuance over many blocks - await issuanceAllocator.connect(accounts.governor).pause() - for (let i = 0; i < 100; i++) { - await ethers.provider.send('evm_mine', []) - } + // Phase 5: Call distributeIssuance while paused + // This is the key test: blocks between unpauseBlock and pauseBlock2 were unpaused, + // but since distributeIssuance is called while paused, self-minting accumulation + // treats them as paused (lazy evaluation) + const tx1 = await issuanceAllocator.connect(accounts.governor).distributeIssuance() + await tx1.wait() + const distributionBlock1 = await ethers.provider.getBlockNumber() + + // Verify: Check distribution state after first distribution + const distState1 = await issuanceAllocator.getDistributionState() + expect(distState1.lastSelfMintingBlock).to.equal(distributionBlock1) + expect(distState1.lastDistributionBlock).to.equal(initialBlock) // Should NOT advance (paused) + expect(distState1.selfMintingOffset).to.be.gt(0) // Should have accumulated + + // Calculate expected self-minting accumulation + // From initialBlock to distributionBlock1 (all blocks treated as paused) + const blocksSinceInitial = BigInt(distributionBlock1) - BigInt(initialBlock) + const selfMintingRate = ethers.parseEther('20') // 20% of 100 = 20 tokens/block + const expectedAccumulation = selfMintingRate * blocksSinceInitial + expect(distState1.selfMintingOffset).to.be.closeTo(expectedAccumulation, ethers.parseEther('1')) + + // Verify no additional allocator-minting was distributed during pause + const balance1AfterPause = await (graphToken as any).balanceOf(await target1.getAddress()) + expect(balance1AfterPause).to.equal(balance1Initial) // Should not have changed during pause + + // Phase 6: Unpause and call distributeIssuance + await issuanceAllocator.connect(accounts.governor).unpause() + await ethers.provider.send('evm_mine', []) - // Trigger accumulation by changing rate (this forces accumulation) - await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('2000000'), true) // Force even if pending + const tx2 = await issuanceAllocator.connect(accounts.governor).distributeIssuance() + await tx2.wait() + const distributionBlock2 = await ethers.provider.getBlockNumber() - const pendingBefore = await issuanceAllocator.pendingAccumulatedAllocatorIssuance() - expect(pendingBefore).to.be.gt(0) + // Verify: Distribution state after second distribution + const distState2 = await issuanceAllocator.getDistributionState() + expect(distState2.lastSelfMintingBlock).to.equal(distributionBlock2) + expect(distState2.lastDistributionBlock).to.equal(distributionBlock2) // Should advance (unpaused) + expect(distState2.selfMintingOffset).to.equal(0) // Should be reset after distribution - const initialBalance = await (graphToken as any).balanceOf(await target1.getAddress()) + // Verify allocator-minting was distributed correctly + const balance1After = await (graphToken as any).balanceOf(await target1.getAddress()) + expect(balance1After).to.be.gt(balance1Initial) // Should have received additional tokens - // Call distributePendingIssuance - should work with very small denominator - await expect(issuanceAllocator.connect(accounts.governor)['distributePendingIssuance()']()).to.not.be.reverted + // Calculate total issuance for the period + const totalBlocks = BigInt(distributionBlock2) - BigInt(initialBlock) + const totalIssuance = ethers.parseEther('100') * totalBlocks - // Target1 should receive all the pending issuance (since it's the only allocator-minting target) - const finalBalance = await (graphToken as any).balanceOf(await target1.getAddress()) - expect(finalBalance).to.be.gt(initialBalance) + // Self-minting should have received their allowance (but not minted via allocator) + // Allocator-minting should have received (totalIssuance - selfMintingOffset) * (30 / 80) + // 30 tokens/block for target1, 50 tokens/block for default = 80 tokens/block total allocator-minting + const expectedAllocatorDistribution = + ((totalIssuance - expectedAccumulation) * ethers.parseEther('30')) / ethers.parseEther('80') - // The distributed amount should equal the pending amount (within rounding) - const distributed = finalBalance - initialBalance - expect(distributed).to.be.closeTo(pendingBefore, ethers.parseEther('1')) - }) + // Allow for rounding errors (compare total distributed amount) + // Note: Tolerance is higher due to multiple distribution events and the initial distribution + const totalDistributed = balance1After - balance1Initial + expect(totalDistributed).to.be.closeTo(expectedAllocatorDistribution, ethers.parseEther('25')) }) - describe('Large Value and Overflow Protection', () => { - it('should handle large pending amounts without overflow', async () => { - const { issuanceAllocator, graphToken, target1 } = await setupIssuanceAllocator() - - // Setup with very high issuance rate - await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) - await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) - await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('1000000'), false) // 1M tokens per block - - // Add target with high allocation - await issuanceAllocator - .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 500000, 0, false) // 50% + it('should use getDistributionState to query distribution state efficiently', async () => { + const { issuanceAllocator, graphToken, target1 } = await setupIssuanceAllocator() - // Distribute once to initialize - await issuanceAllocator.connect(accounts.governor).distributeIssuance() + // Setup + await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) + await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('100')) - // Pause and accumulate for many blocks - await issuanceAllocator.connect(accounts.governor).pause() - for (let i = 0; i < 100; i++) { - await ethers.provider.send('evm_mine', []) - } + await issuanceAllocator + .connect(accounts.governor) + [ + 'setTargetAllocation(address,uint256,uint256,uint256)' + ](await target1.getAddress(), 0, ethers.parseEther('50'), 0) // 50 tokens/block self - // Trigger accumulation by forcing rate change - await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('2000000'), true) // Force even if pending + // Initialize + await issuanceAllocator.connect(accounts.governor).distributeIssuance() + const initBlock = await ethers.provider.getBlockNumber() - const pendingBefore = await issuanceAllocator.pendingAccumulatedAllocatorIssuance() - expect(pendingBefore).to.be.gt(ethers.parseEther('25000000')) // Should be very large (50% of total) + // Verify initial state + let distState = await issuanceAllocator.getDistributionState() + expect(distState.lastDistributionBlock).to.equal(initBlock) + expect(distState.lastSelfMintingBlock).to.equal(initBlock) + expect(distState.selfMintingOffset).to.equal(0) - const initialBalance = await (graphToken as any).balanceOf(await target1.getAddress()) + // Pause and mine blocks + await issuanceAllocator.connect(accounts.governor).pause() + await ethers.provider.send('evm_mine', []) + await ethers.provider.send('evm_mine', []) - // Call distributePendingIssuance - should handle large values without overflow - await expect(issuanceAllocator.connect(accounts.governor)['distributePendingIssuance()']()).to.not.be.reverted + // Call distributeIssuance while paused + await issuanceAllocator.connect(accounts.governor).distributeIssuance() + const pausedDistBlock = await ethers.provider.getBlockNumber() - const finalBalance = await (graphToken as any).balanceOf(await target1.getAddress()) - expect(finalBalance).to.be.gt(initialBalance) + // Verify state after paused distribution + distState = await issuanceAllocator.getDistributionState() + expect(distState.lastSelfMintingBlock).to.equal(pausedDistBlock) + expect(distState.lastDistributionBlock).to.equal(initBlock) // Should NOT advance (paused) + expect(distState.selfMintingOffset).to.be.gt(0) // Should have accumulated - // Verify the calculation is correct for large values - // Target1 has 50% allocation, so it should get: (pendingAmount * 500000) / 1000000 = 50% of pending - const distributed = finalBalance - initialBalance - const expectedDistribution = pendingBefore / 2n // 50% of pending - expect(distributed).to.be.closeTo(expectedDistribution, ethers.parseEther('1000')) // Allow for rounding - }) + // Verify getDistributionState returns consistent values + const distState2 = await issuanceAllocator.getDistributionState() + expect(distState.lastDistributionBlock).to.equal(distState2.lastDistributionBlock) + expect(distState.selfMintingOffset).to.equal(distState2.selfMintingOffset) + expect(distState.lastSelfMintingBlock).to.equal(distState2.lastSelfMintingBlock) }) - describe('Precision and Rounding Edge Cases', () => { - it('should handle small allocations with minimal rounding loss', async () => { - const { issuanceAllocator, graphToken, target1, target2 } = await setupIssuanceAllocator() - - // Setup with higher issuance rate to ensure accumulation - await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) - await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) - await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('1000000'), false) // Higher rate - - // Add targets with very small allocations - await issuanceAllocator - .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 1, 0, false) // 1 PPM - await issuanceAllocator - .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await target2.getAddress(), 2, 0, false) // 2 PPM - - // Distribute once to initialize - await issuanceAllocator.connect(accounts.governor).distributeIssuance() - - // Pause and accumulate over multiple blocks - await issuanceAllocator.connect(accounts.governor).pause() - for (let i = 0; i < 10; i++) { - await ethers.provider.send('evm_mine', []) - } - - // Trigger accumulation by forcing rate change - await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('2000000'), true) - - const pendingBefore = await issuanceAllocator.pendingAccumulatedAllocatorIssuance() - expect(pendingBefore).to.be.gt(0) - - const initialBalance1 = await (graphToken as any).balanceOf(await target1.getAddress()) - const initialBalance2 = await (graphToken as any).balanceOf(await target2.getAddress()) - - // Call distributePendingIssuance - await issuanceAllocator.connect(accounts.governor)['distributePendingIssuance()']() + it('should correctly emit IssuanceSelfMintAllowance events across pause/unpause cycles', async () => { + const { issuanceAllocator, graphToken, target1 } = await setupIssuanceAllocator() - const finalBalance1 = await (graphToken as any).balanceOf(await target1.getAddress()) - const finalBalance2 = await (graphToken as any).balanceOf(await target2.getAddress()) + // Setup + await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) + await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('100')) - const distributed1 = finalBalance1 - initialBalance1 - const distributed2 = finalBalance2 - initialBalance2 + await issuanceAllocator + .connect(accounts.governor) + [ + 'setTargetAllocation(address,uint256,uint256,uint256)' + ](await target1.getAddress(), 0, ethers.parseEther('50'), 0) // 50 tokens/block self - // Verify proportional distribution (target2 should get ~2x target1) - if (distributed1 > 0 && distributed2 > 0) { - const ratio = (BigInt(distributed2) * 1000n) / BigInt(distributed1) // Multiply by 1000 for precision - expect(ratio).to.be.closeTo(2000n, 100n) // Should be close to 2.0 with some tolerance - } - }) + // Initialize + await issuanceAllocator.connect(accounts.governor).distributeIssuance() + const initBlock = await ethers.provider.getBlockNumber() - it('should handle zero pending amount correctly', async () => { - const { issuanceAllocator, graphToken, target1 } = await setupIssuanceAllocator() + // Pause, unpause (without distribute), pause again + await issuanceAllocator.connect(accounts.governor).pause() + await ethers.provider.send('evm_mine', []) + await issuanceAllocator.connect(accounts.governor).unpause() + await ethers.provider.send('evm_mine', []) + await ethers.provider.send('evm_mine', []) + await issuanceAllocator.connect(accounts.governor).pause() + await ethers.provider.send('evm_mine', []) - // Setup - await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) - await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('100'), false) + // Call distributeIssuance while paused + const tx = await issuanceAllocator.connect(accounts.governor).distributeIssuance() + const receipt = await tx.wait() + const currentBlock = await ethers.provider.getBlockNumber() - // Add target - await issuanceAllocator - .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 500000, 0, false) // 50% + // Find IssuanceSelfMintAllowance events + const events = receipt.logs.filter( + (log) => log.topics[0] === issuanceAllocator.interface.getEvent('IssuanceSelfMintAllowance').topicHash, + ) - // Distribute to ensure no pending amount - await issuanceAllocator.connect(accounts.governor).distributeIssuance() - expect(await issuanceAllocator.pendingAccumulatedAllocatorIssuance()).to.equal(0) + // Should emit exactly one event for the entire range + expect(events.length).to.equal(1) - const initialBalance = await (graphToken as any).balanceOf(await target1.getAddress()) + // Decode the event + const decodedEvent = issuanceAllocator.interface.decodeEventLog( + 'IssuanceSelfMintAllowance', + events[0].data, + events[0].topics, + ) - // Call distributePendingIssuance with zero pending - should be no-op - await expect(issuanceAllocator.connect(accounts.governor)['distributePendingIssuance()']()).to.not.be.reverted + // Verify event covers the correct block range (from initBlock+1 to currentBlock) + expect(decodedEvent.fromBlock).to.equal(BigInt(initBlock) + 1n) + expect(decodedEvent.toBlock).to.equal(currentBlock) + expect(decodedEvent.target).to.equal(await target1.getAddress()) - // Balance should remain unchanged - expect(await (graphToken as any).balanceOf(await target1.getAddress())).to.equal(initialBalance) - }) + // Verify amount matches expected (50% of 100 tokens/block * number of blocks) + const blocksInRange = BigInt(currentBlock) - BigInt(initBlock) + const expectedAmount = ethers.parseEther('50') * blocksInRange + expect(decodedEvent.amount).to.be.closeTo(expectedAmount, ethers.parseEther('1')) }) - describe('Mixed Allocation Scenarios', () => { - it('should correctly distribute with extreme allocation ratios', async () => { - const { issuanceAllocator, graphToken, target1, target2, target3 } = await setupIssuanceAllocator() - - // Setup - await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) - await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) - await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('1000'), false) - - // Add targets with extreme ratios: 1 PPM, 499,999 PPM allocator-minting, 500,000 PPM self-minting - await issuanceAllocator - .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 1, 0, false) // 0.0001% - await issuanceAllocator - .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await target2.getAddress(), 499999, 0, false) // 49.9999% - await issuanceAllocator - .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await target3.getAddress(), 0, 500000, false) // 50% self-minting - - // Distribute once to initialize - await issuanceAllocator.connect(accounts.governor).distributeIssuance() - - // Pause and accumulate - await issuanceAllocator.connect(accounts.governor).pause() - for (let i = 0; i < 5; i++) { - await ethers.provider.send('evm_mine', []) - } + it('should continue accumulating through unpaused periods when accumulated balance exists', async () => { + const { issuanceAllocator, graphToken, target1 } = await setupIssuanceAllocator() - // Trigger accumulation by forcing rate change - await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('2000'), true) + // Setup + await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) + await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('100')) - const pendingBefore = await issuanceAllocator.pendingAccumulatedAllocatorIssuance() - expect(pendingBefore).to.be.gt(0) + // Set target1 allocation with both allocator and self minting + await issuanceAllocator + .connect(accounts.governor) + [ + 'setTargetAllocation(address,uint256,uint256,uint256)' + ](await target1.getAddress(), ethers.parseEther('30'), ethers.parseEther('20'), 0) - const initialBalance1 = await (graphToken as any).balanceOf(await target1.getAddress()) - const initialBalance2 = await (graphToken as any).balanceOf(await target2.getAddress()) - const initialBalance3 = await (graphToken as any).balanceOf(await target3.getAddress()) + // Distribute to set starting point + await issuanceAllocator.distributeIssuance() + const blockAfterInitialDist = await ethers.provider.getBlockNumber() - // Call distributePendingIssuance - await issuanceAllocator.connect(accounts.governor)['distributePendingIssuance()']() + // Phase 1: Pause and mine blocks + await issuanceAllocator.connect(accounts.governor).pause() + await ethers.provider.send('evm_mine', []) + await ethers.provider.send('evm_mine', []) + await ethers.provider.send('evm_mine', []) - const finalBalance1 = await (graphToken as any).balanceOf(await target1.getAddress()) - const finalBalance2 = await (graphToken as any).balanceOf(await target2.getAddress()) - const finalBalance3 = await (graphToken as any).balanceOf(await target3.getAddress()) + // Phase 2: Distribute while paused + await issuanceAllocator.distributeIssuance() + const blockDist1 = await ethers.provider.getBlockNumber() - const distributed1 = finalBalance1 - initialBalance1 - const distributed2 = finalBalance2 - initialBalance2 - const distributed3 = finalBalance3 - initialBalance3 + const state1 = await issuanceAllocator.getDistributionState() + const pausedBlocks1 = blockDist1 - blockAfterInitialDist + const expectedAccumulation1 = ethers.parseEther('20') * BigInt(pausedBlocks1) + expect(state1.selfMintingOffset).to.equal(expectedAccumulation1) - // Target3 (self-minting) should receive nothing from distributePendingIssuance - expect(distributed3).to.equal(0) + // Phase 3: Unpause (no distribute) + await issuanceAllocator.connect(accounts.governor).unpause() - // Target2 should receive ~499,999x more than target1 - if (distributed1 > 0 && distributed2 > 0) { - const ratio = distributed2 / distributed1 - expect(ratio).to.be.closeTo(499999n, 1000n) // Allow for rounding - } + // Mine more blocks while unpaused (no distribute!) + await ethers.provider.send('evm_mine', []) + await ethers.provider.send('evm_mine', []) + await ethers.provider.send('evm_mine', []) + await ethers.provider.send('evm_mine', []) + await ethers.provider.send('evm_mine', []) - // Total distributed should equal pending (within rounding) - const totalDistributed = distributed1 + distributed2 - expect(totalDistributed).to.be.closeTo(pendingBefore, ethers.parseEther('0.001')) - }) + // Phase 4: Distribute while unpaused + await issuanceAllocator.distributeIssuance() + const blockDist2 = await ethers.provider.getBlockNumber() - it('should handle dynamic allocation changes affecting denominator', async () => { - const { issuanceAllocator, graphToken, target1, target2 } = await setupIssuanceAllocator() + const state2 = await issuanceAllocator.getDistributionState() + expect(state2.lastSelfMintingBlock).to.equal(blockDist2) + expect(state2.selfMintingOffset).to.equal(0) // Cleared by distribution - // Setup - await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) - await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) - await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('100'), false) + // Phase 5: Pause again (no distribute) + await issuanceAllocator.connect(accounts.governor).pause() + const blockPause2 = await ethers.provider.getBlockNumber() - // Initial setup: 50% allocator-minting, 50% self-minting - await issuanceAllocator - .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 500000, 0, false) // 50% allocator - await issuanceAllocator - .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await target2.getAddress(), 0, 500000, false) // 50% self + // Mine more blocks while paused + await ethers.provider.send('evm_mine', []) + await ethers.provider.send('evm_mine', []) + await ethers.provider.send('evm_mine', []) - // Distribute once to initialize - await issuanceAllocator.connect(accounts.governor).distributeIssuance() + // Phase 6: Distribute while paused + await issuanceAllocator.distributeIssuance() + const blockDist3 = await ethers.provider.getBlockNumber() - // Pause and accumulate - await issuanceAllocator.connect(accounts.governor).pause() - await ethers.provider.send('evm_mine', []) - await ethers.provider.send('evm_mine', []) + const state3 = await issuanceAllocator.getDistributionState() - // Change allocation to make denominator smaller: 10% allocator, 90% self-minting - await issuanceAllocator - .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 100000, 0, true) // 10% allocator - await issuanceAllocator - .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await target2.getAddress(), 0, 900000, true) // 90% self + // THE FIX: With the new logic, accumulation continues from lastSelfMintingBlock + // when paused, even if some of those blocks happened during an unpaused period + // where no distribution occurred. This is conservative and safe. + const blocksAccumulated = blockDist3 - blockDist2 + const actuallyPausedBlocks = blockDist3 - blockPause2 + const unpausedBlocksIncluded = blocksAccumulated - actuallyPausedBlocks - const pendingBefore = await issuanceAllocator.pendingAccumulatedAllocatorIssuance() - expect(pendingBefore).to.be.gt(0) + // Verify the fix: accumulation should be for all blocks from lastSelfMintingBlock + const actualAccumulation = state3.selfMintingOffset + const expectedAccumulation = ethers.parseEther('20') * BigInt(blocksAccumulated) - const initialBalance = await (graphToken as any).balanceOf(await target1.getAddress()) + expect(actualAccumulation).to.equal( + expectedAccumulation, + 'Should accumulate from lastSelfMintingBlock when paused, including unpaused blocks where no distribution occurred', + ) - // Call distributePendingIssuance with changed denominator - await expect(issuanceAllocator.connect(accounts.governor)['distributePendingIssuance()']()).to.not.be.reverted + // Rationale: Once accumulation starts (during pause), continue through any unpaused periods + // until distribution clears the accumulation. This is conservative and allows better recovery. + expect(unpausedBlocksIncluded).to.equal(1) // Should include 1 unpaused block (blockDist2 to blockPause2) + }) - const finalBalance = await (graphToken as any).balanceOf(await target1.getAddress()) - expect(finalBalance).to.be.gt(initialBalance) + it('should correctly handle partial distribution when toBlockNumber < block.number', async () => { + const { issuanceAllocator, graphToken, target1, target2 } = await setupIssuanceAllocator() - // The distribution should use the new denominator (MILLION - 900000 = 100000) - // So target1 should get all the pending amount since it's the only allocator-minting target - const distributed = finalBalance - initialBalance - expect(distributed).to.be.closeTo(pendingBefore, ethers.parseEther('0.001')) - }) - }) + // Setup + await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) + await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('100')) - describe('Boundary Value Testing', () => { - it('should handle totalSelfMintingPPM = 0 (no self-minting targets)', async () => { - const { issuanceAllocator, graphToken, target1, target2 } = await setupIssuanceAllocator() + // Add targets: 30 tokens/block allocator-minting, 20 tokens/block self-minting + await issuanceAllocator + .connect(accounts.governor) + [ + 'setTargetAllocation(address,uint256,uint256,uint256)' + ](await target1.getAddress(), ethers.parseEther('30'), 0, 0) + await issuanceAllocator + .connect(accounts.governor) + [ + 'setTargetAllocation(address,uint256,uint256,uint256)' + ](await target2.getAddress(), 0, ethers.parseEther('20'), 0) - // Setup - await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) - await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) - await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('100'), false) + // Initialize distribution + await issuanceAllocator.connect(accounts.governor).distributeIssuance() + const initialBlock = await ethers.provider.getBlockNumber() - // Add only allocator-minting targets (totalSelfMintingPPM = 0) - await issuanceAllocator - .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 300000, 0, false) // 30% - await issuanceAllocator - .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await target2.getAddress(), 200000, 0, false) // 20% + // Pause and mine blocks to accumulate self-minting + await issuanceAllocator.connect(accounts.governor).pause() + await ethers.provider.send('evm_mine', []) + await ethers.provider.send('evm_mine', []) + await ethers.provider.send('evm_mine', []) + await ethers.provider.send('evm_mine', []) + await ethers.provider.send('evm_mine', []) + await ethers.provider.send('evm_mine', []) + await ethers.provider.send('evm_mine', []) + await ethers.provider.send('evm_mine', []) - // Distribute once to initialize - await issuanceAllocator.connect(accounts.governor).distributeIssuance() + // We've mined 8 blocks while paused (pause tx + 8 evm_mine calls) + // Current block should be initialBlock + 9 (pause + 8 mines) - // Pause and accumulate - await issuanceAllocator.connect(accounts.governor).pause() - await ethers.provider.send('evm_mine', []) - await ethers.provider.send('evm_mine', []) + // Call distributePendingIssuance with toBlockNumber at the halfway point + const midBlock = initialBlock + 5 // Distribute only up to block 5 - // Trigger accumulation by forcing rate change - await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('200'), true) + await issuanceAllocator.connect(accounts.governor)['distributePendingIssuance(uint256)'](midBlock) - const pendingBefore = await issuanceAllocator.pendingAccumulatedAllocatorIssuance() - expect(pendingBefore).to.be.gt(0) + // Check the state after partial distribution + const stateAfterPartial = await issuanceAllocator.getDistributionState() + const actualCurrentBlock = await ethers.provider.getBlockNumber() - const initialBalance1 = await (graphToken as any).balanceOf(await target1.getAddress()) - const initialBalance2 = await (graphToken as any).balanceOf(await target2.getAddress()) + // Budget-based clearing behavior for partial distribution: + // - lastSelfMintingBlock advances to actualCurrentBlock (via _advanceSelfMintingBlock) + // - lastDistributionBlock advances to midBlock (partial distribution) + // - selfMintingOffset is reduced by min(accumulated, totalForPeriod) + // + // In this case: accumulated self-minting from initialBlock to actualCurrentBlock is small + // compared to the period budget (100 tokens/block * 5 blocks distributed = 500 tokens), + // so all accumulated is cleared (budget exceeds accumulated). - // Call distributePendingIssuance - denominator should be MILLION (1,000,000) - await expect(issuanceAllocator.connect(accounts.governor)['distributePendingIssuance()']()).to.not.be.reverted + expect(stateAfterPartial.lastDistributionBlock).to.equal(midBlock) + expect(stateAfterPartial.lastSelfMintingBlock).to.equal(actualCurrentBlock) - const finalBalance1 = await (graphToken as any).balanceOf(await target1.getAddress()) - const finalBalance2 = await (graphToken as any).balanceOf(await target2.getAddress()) + // Budget-based logic: subtract min(accumulated, totalForPeriod) from accumulated + // Since accumulated < totalForPeriod (small accumulation vs large budget for 5 blocks), + // all accumulated is cleared. + expect(stateAfterPartial.selfMintingOffset).to.equal(0, 'Accumulated cleared when less than period budget') - const distributed1 = finalBalance1 - initialBalance1 - const distributed2 = finalBalance2 - initialBalance2 + // Verify subsequent distribution works correctly + await issuanceAllocator.connect(accounts.governor).unpause() + await issuanceAllocator.connect(accounts.governor).distributeIssuance() + const finalBlock = await ethers.provider.getBlockNumber() - // Verify proportional distribution (3:2 ratio) - if (distributed1 > 0 && distributed2 > 0) { - const ratio = (BigInt(distributed1) * 1000n) / BigInt(distributed2) // Multiply by 1000 for precision - expect(ratio).to.be.closeTo(1500n, 50n) // 300000/200000 = 1.5 - } + const stateAfterFinal = await issuanceAllocator.getDistributionState() + expect(stateAfterFinal.selfMintingOffset).to.equal(0) + expect(stateAfterFinal.lastDistributionBlock).to.equal(finalBlock) - // Total distributed should equal the allocated portion of pending - // With 50% total allocator-minting allocation: (30% + 20%) / 100% = 50% of pending - const totalDistributed = distributed1 + distributed2 - const expectedTotal = pendingBefore / 2n // 50% of pending - expect(totalDistributed).to.be.closeTo(expectedTotal, ethers.parseEther('0.001')) - }) + // Verify token distribution is mathematically correct + // The allocator-minting should have received the correct amount accounting for ALL self-minting accumulation + const balance1 = await (graphToken as any).balanceOf(await target1.getAddress()) - it('should handle totalSelfMintingPPM = MILLION - 1 (minimal allocator-minting)', async () => { - const { issuanceAllocator, graphToken, target1, target2 } = await setupIssuanceAllocator() + const totalBlocks = BigInt(finalBlock) - BigInt(initialBlock) + const totalIssuance = ethers.parseEther('100') * totalBlocks + const totalSelfMinting = ethers.parseEther('20') * totalBlocks + const availableForAllocator = totalIssuance - totalSelfMinting + // target1 gets 30/80 of allocator-minting (30 for target1, 50 for default) + const expectedForTarget1 = (availableForAllocator * ethers.parseEther('30')) / ethers.parseEther('80') - // Setup - await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) - await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) - await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('1000'), false) + // Allow higher tolerance due to multiple distribution calls (partial + full) + // Each transaction adds blocks which affects the total issuance calculation + expect(balance1).to.be.closeTo(expectedForTarget1, ethers.parseEther('100')) + }) - // Add targets: 1 PPM allocator-minting, 999,999 PPM self-minting - await issuanceAllocator - .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await target1.getAddress(), 1, 0, false) // 1 PPM allocator - await issuanceAllocator - .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](await target2.getAddress(), 0, 999999, false) // 999,999 PPM self + it('should correctly handle accumulated self-minting that exceeds period budget', async () => { + const { issuanceAllocator, graphToken, target1, target2 } = await setupIssuanceAllocator() - // Distribute once to initialize - await issuanceAllocator.connect(accounts.governor).distributeIssuance() + // Setup + await (graphToken as any).addMinter(await issuanceAllocator.getAddress()) + await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('100')) - // Pause and accumulate significant issuance - await issuanceAllocator.connect(accounts.governor).pause() - for (let i = 0; i < 10; i++) { - await ethers.provider.send('evm_mine', []) - } + // High self-minting rate: 80 tokens/block, allocator: 20 tokens/block + await issuanceAllocator + .connect(accounts.governor) + [ + 'setTargetAllocation(address,uint256,uint256,uint256)' + ](await target1.getAddress(), ethers.parseEther('20'), 0, 0) + await issuanceAllocator + .connect(accounts.governor) + [ + 'setTargetAllocation(address,uint256,uint256,uint256)' + ](await target2.getAddress(), 0, ethers.parseEther('80'), 0) - // Trigger accumulation by forcing rate change - await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('2000'), true) + // Initialize + await issuanceAllocator.connect(accounts.governor).distributeIssuance() + const initialBlock = await ethers.provider.getBlockNumber() - const pendingBefore = await issuanceAllocator.pendingAccumulatedAllocatorIssuance() - expect(pendingBefore).to.be.gt(0) + // Pause and accumulate a lot + await issuanceAllocator.connect(accounts.governor).pause() + await ethers.provider.send('evm_mine', []) + await ethers.provider.send('evm_mine', []) + await ethers.provider.send('evm_mine', []) + await ethers.provider.send('evm_mine', []) + await ethers.provider.send('evm_mine', []) + const afterMining = await ethers.provider.getBlockNumber() - const initialBalance1 = await (graphToken as any).balanceOf(await target1.getAddress()) - const initialBalance2 = await (graphToken as any).balanceOf(await target2.getAddress()) + // Accumulated should be: 80 * (afterMining - initialBlock) + const blocksAccumulated = afterMining - initialBlock + const _expectedAccumulated = ethers.parseEther('80') * BigInt(blocksAccumulated) - // Call distributePendingIssuance - denominator should be 1 - await expect(issuanceAllocator.connect(accounts.governor)['distributePendingIssuance()']()).to.not.be.reverted + // Now distribute only 1 block worth (partialBlock - initialBlock = 1) + const partialBlock = initialBlock + 1 + await issuanceAllocator.connect(accounts.governor)['distributePendingIssuance(uint256)'](partialBlock) - const finalBalance1 = await (graphToken as any).balanceOf(await target1.getAddress()) - const finalBalance2 = await (graphToken as any).balanceOf(await target2.getAddress()) + const stateAfter = await issuanceAllocator.getDistributionState() + const afterDistBlock = await ethers.provider.getBlockNumber() - const distributed1 = finalBalance1 - initialBalance1 - const distributed2 = finalBalance2 - initialBalance2 + // More accumulation happened during the distributePendingIssuance call itself + const totalBlocksAccumulated = afterDistBlock - initialBlock + const totalExpectedAccumulated = ethers.parseEther('80') * BigInt(totalBlocksAccumulated) - // Target2 (self-minting) should receive nothing - expect(distributed2).to.equal(0) + // Budget-based logic: distributed 1 block with totalForPeriod = issuancePerBlock * 1 = 100 + // Subtract budget from accumulated (not rate-based), since we don't know historical rates + const blocksDistributed = partialBlock - initialBlock + const totalForPeriod = ethers.parseEther('100') * BigInt(blocksDistributed) + const expectedRemaining = totalExpectedAccumulated - totalForPeriod - // Target1 should receive all pending issuance - expect(distributed1).to.be.closeTo(pendingBefore, ethers.parseEther('0.001')) - }) + // This should NOT be zero - accumulated exceeds period budget, so remainder is retained + expect(stateAfter.selfMintingOffset).to.be.gt(0) + // Budget-based: accumulated ~480, subtract 100, expect ~380 remaining (within 10 token tolerance) + expect(stateAfter.selfMintingOffset).to.be.closeTo(expectedRemaining, ethers.parseEther('10')) }) }) }) diff --git a/packages/issuance/test/tests/allocate/IssuanceSystem.test.ts b/packages/issuance/test/tests/allocate/IssuanceSystem.test.ts index 77645546a..d21150c81 100644 --- a/packages/issuance/test/tests/allocate/IssuanceSystem.test.ts +++ b/packages/issuance/test/tests/allocate/IssuanceSystem.test.ts @@ -35,13 +35,13 @@ describe('Issuance System', () => { await contracts.issuanceAllocator .connect(accounts.governor) [ - 'setTargetAllocation(address,uint256,uint256,bool)' - ](addresses.target1, TestConstants.ALLOCATION_30_PERCENT, 0, false) + 'setTargetAllocation(address,uint256,uint256,uint256)' + ](addresses.target1, TestConstants.ALLOCATION_30_PERCENT, 0, 0) await contracts.issuanceAllocator .connect(accounts.governor) [ - 'setTargetAllocation(address,uint256,uint256,bool)' - ](addresses.target2, TestConstants.ALLOCATION_40_PERCENT, 0, false) + 'setTargetAllocation(address,uint256,uint256,uint256)' + ](addresses.target2, TestConstants.ALLOCATION_40_PERCENT, 0, 0) // Grant operator roles using predefined constants await contracts.target1 @@ -86,42 +86,46 @@ describe('Issuance System', () => { // Verify initial total allocation (excludes default since it's address(0)) const totalAlloc = await contracts.issuanceAllocator.getTotalAllocation() - expect(totalAlloc.totalAllocationPPM).to.equal(700000) // 70% (30% + 40%, excludes default) + expect(totalAlloc.totalAllocationRate).to.equal(700000) // 70% (30% + 40%, excludes default) // Change allocations: target1 = 50%, target2 = 20% (30% goes to default) await contracts.issuanceAllocator .connect(accounts.governor) [ - 'setTargetAllocation(address,uint256,uint256,bool)' - ](addresses.target1, TestConstants.ALLOCATION_50_PERCENT, 0, false) + 'setTargetAllocation(address,uint256,uint256,uint256)' + ](addresses.target1, TestConstants.ALLOCATION_50_PERCENT, 0, 0) await contracts.issuanceAllocator .connect(accounts.governor) [ - 'setTargetAllocation(address,uint256,uint256,bool)' - ](addresses.target2, TestConstants.ALLOCATION_20_PERCENT, 0, false) + 'setTargetAllocation(address,uint256,uint256,uint256)' + ](addresses.target2, TestConstants.ALLOCATION_20_PERCENT, 0, 0) // Verify updated allocations (excludes default since it's address(0)) const updatedTotalAlloc = await contracts.issuanceAllocator.getTotalAllocation() - expect(updatedTotalAlloc.totalAllocationPPM).to.equal(700000) // 70% (50% + 20%, excludes default) + expect(updatedTotalAlloc.totalAllocationRate).to.equal(700000) // 70% (50% + 20%, excludes default) // Verify individual target allocations const target1Info = await contracts.issuanceAllocator.getTargetData(addresses.target1) const target2Info = await contracts.issuanceAllocator.getTargetData(addresses.target2) - expect(target1Info.allocatorMintingPPM + target1Info.selfMintingPPM).to.equal(TestConstants.ALLOCATION_50_PERCENT) - expect(target2Info.allocatorMintingPPM + target2Info.selfMintingPPM).to.equal(TestConstants.ALLOCATION_20_PERCENT) + expect(target1Info.allocatorMintingRate + target1Info.selfMintingRate).to.equal( + TestConstants.ALLOCATION_50_PERCENT, + ) + expect(target2Info.allocatorMintingRate + target2Info.selfMintingRate).to.equal( + TestConstants.ALLOCATION_20_PERCENT, + ) // Verify proportional issuance distribution (50:20 = 5:2 ratio) const target1Result = await contracts.issuanceAllocator.getTargetIssuancePerBlock(addresses.target1) const target2Result = await contracts.issuanceAllocator.getTargetIssuancePerBlock(addresses.target2) - expect(target1Result.selfIssuancePerBlock).to.equal(0) - expect(target2Result.selfIssuancePerBlock).to.equal(0) + expect(target1Result.selfIssuanceRate).to.equal(0) + expect(target2Result.selfIssuanceRate).to.equal(0) // Verify the ratio using helper function: 50/20 = 2.5, so 2500 in our precision expectRatioToEqual( - target1Result.allocatorIssuancePerBlock, - target2Result.allocatorIssuancePerBlock, + target1Result.allocatorIssuanceRate, + target2Result.allocatorIssuanceRate, 2500n, // 50/20 * 1000 precision TestConstants.DEFAULT_TOLERANCE, ) diff --git a/packages/issuance/test/tests/allocate/ReentrancyProtection.test.ts b/packages/issuance/test/tests/allocate/ReentrancyProtection.test.ts new file mode 100644 index 000000000..245271acb --- /dev/null +++ b/packages/issuance/test/tests/allocate/ReentrancyProtection.test.ts @@ -0,0 +1,265 @@ +import { expect } from 'chai' +import hre from 'hardhat' +const { ethers } = hre + +import { deployTestGraphToken, getTestAccounts, SHARED_CONSTANTS } from '../common/fixtures' +import { deployIssuanceAllocator } from './fixtures' + +/** + * ReentrantAction enum matching MockReentrantTarget.sol + * IMPORTANT: This must be kept in sync with the Solidity enum + */ +enum ReentrantAction { + None, + DistributeIssuance, + SetTargetAllocation1Param, + SetTargetAllocation2Param, + SetTargetAllocation3Param, + SetIssuancePerBlock, + SetIssuancePerBlock2Param, + NotifyTarget, + SetDefaultTarget1Param, + SetDefaultTarget2Param, + DistributePendingIssuance0Param, + DistributePendingIssuance1Param, +} + +describe('IssuanceAllocator - Reentrancy Protection', () => { + let accounts + let graphToken + let issuanceAllocator + let reentrantTarget + let issuancePerBlock + const GOVERNOR_ROLE = SHARED_CONSTANTS.GOVERNOR_ROLE + const PAUSE_ROLE = SHARED_CONSTANTS.PAUSE_ROLE + + beforeEach(async () => { + accounts = await getTestAccounts() + issuancePerBlock = ethers.parseEther('100') + + // Deploy contracts + graphToken = await deployTestGraphToken() + const graphTokenAddress = await graphToken.getAddress() + + issuanceAllocator = await deployIssuanceAllocator(graphTokenAddress, accounts.governor, issuancePerBlock) + + // Grant minter role to issuanceAllocator + await graphToken.addMinter(await issuanceAllocator.getAddress()) + + // Deploy mock reentrant target + const MockReentrantTargetFactory = await ethers.getContractFactory('MockReentrantTarget') + reentrantTarget = await MockReentrantTargetFactory.deploy() + + // Set the issuance allocator address in the reentrant target + await reentrantTarget.setIssuanceAllocator(await issuanceAllocator.getAddress()) + + // Grant GOVERNOR_ROLE and PAUSE_ROLE to the reentrant target so it can attempt protected operations + await issuanceAllocator.connect(accounts.governor).grantRole(GOVERNOR_ROLE, await reentrantTarget.getAddress()) + await issuanceAllocator.connect(accounts.governor).grantRole(PAUSE_ROLE, accounts.governor.address) + }) + + describe('Reentrancy during distributeIssuance', () => { + it('should allow target to call distributeIssuance during notification (legitimate use case)', async () => { + // This verifies that targets can legitimately call distributeIssuance() during notification + // This is safe because: + // 1. distributeIssuance() has block-tracking protection (no-op if already at current block) + // 2. It makes no outward calls (just mints tokens) + // 3. It doesn't modify allocations + // 4. Targets may want to claim pending issuance before allocation changes + + // Add the reentrant target (reentrancy disabled during setup) + await reentrantTarget.setReentrantAction(ReentrantAction.None) + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256)'](await reentrantTarget.getAddress(), ethers.parseEther('50')) + + // Configure to call distributeIssuance during next notification + await reentrantTarget.setReentrantAction(ReentrantAction.DistributeIssuance) + + // Change allocation - the notification will call distributeIssuance + // This should succeed (distributeIssuance is not protected, as it's a legitimate use case) + await expect( + issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256)'](await reentrantTarget.getAddress(), ethers.parseEther('40')), + ).to.not.be.reverted + }) + }) + + describe('Reentrancy during setTargetAllocation', () => { + const testCases = [ + { + name: '1 param variant', + action: ReentrantAction.SetTargetAllocation1Param, + trigger: async (target: string) => + issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256)'](target, ethers.parseEther('40')), + }, + { + name: '2 param variant', + action: ReentrantAction.SetTargetAllocation2Param, + trigger: async (target: string) => + issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](target, ethers.parseEther('40'), 0), + }, + { + name: '3 param variant', + action: ReentrantAction.SetTargetAllocation3Param, + trigger: async (target: string) => + issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](target, ethers.parseEther('40'), 0), + }, + ] + + testCases.forEach(({ name, action, trigger }) => { + it(`should revert when target attempts to reenter setTargetAllocation (${name})`, async () => { + // First add the target with normal behavior + await reentrantTarget.setReentrantAction(ReentrantAction.None) + const targetAddress = await reentrantTarget.getAddress() + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256)'](targetAddress, ethers.parseEther('30')) + + // Now configure it to attempt reentrancy on next notification + await reentrantTarget.setReentrantAction(action) + + // Attempt to change allocation - should revert due to reentrancy + await expect(trigger(targetAddress)).to.be.revertedWithCustomError( + issuanceAllocator, + 'ReentrancyGuardReentrantCall', + ) + }) + }) + }) + + describe('Reentrancy during setIssuancePerBlock', () => { + const testCases = [ + { + name: '1 param variant', + action: ReentrantAction.SetIssuancePerBlock, + trigger: async () => issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('200')), + }, + { + name: '2 param variant', + action: ReentrantAction.SetIssuancePerBlock2Param, + trigger: async () => + issuanceAllocator + .connect(accounts.governor) + ['setIssuancePerBlock(uint256,uint256)'](ethers.parseEther('200'), 0), + }, + ] + + testCases.forEach(({ name, action, trigger }) => { + it(`should revert when target attempts to reenter setIssuancePerBlock (${name})`, async () => { + // Set up a malicious default target + await issuanceAllocator + .connect(accounts.governor) + ['setDefaultTarget(address)'](await reentrantTarget.getAddress()) + + // Configure to attempt reentrancy + await reentrantTarget.setReentrantAction(action) + + // Attempt to change issuance rate - should revert due to reentrancy + await expect(trigger()).to.be.revertedWithCustomError(issuanceAllocator, 'ReentrancyGuardReentrantCall') + }) + }) + }) + + describe('Reentrancy during notifyTarget', () => { + it('should revert when target attempts to reenter notifyTarget', async () => { + // Add the target + await reentrantTarget.setReentrantAction(ReentrantAction.None) + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256)'](await reentrantTarget.getAddress(), ethers.parseEther('25')) + + // Configure to attempt reentrancy + await reentrantTarget.setReentrantAction(ReentrantAction.NotifyTarget) + + // Attempt to notify - should revert due to reentrancy + await expect( + issuanceAllocator.connect(accounts.governor).notifyTarget(await reentrantTarget.getAddress()), + ).to.be.revertedWithCustomError(issuanceAllocator, 'ReentrancyGuardReentrantCall') + }) + }) + + describe('Reentrancy during setDefaultTarget', () => { + const testCases = [ + { + name: '1 param variant', + action: ReentrantAction.SetDefaultTarget1Param, + trigger: async (target: string) => + issuanceAllocator.connect(accounts.governor)['setDefaultTarget(address)'](target), + }, + { + name: '2 param variant', + action: ReentrantAction.SetDefaultTarget2Param, + trigger: async (target: string) => issuanceAllocator.connect(accounts.governor).setDefaultTarget(target), + }, + ] + + testCases.forEach(({ name, action, trigger }) => { + it(`should revert when target attempts to reenter setDefaultTarget (${name})`, async () => { + // Configure to attempt reentrancy + await reentrantTarget.setReentrantAction(action) + + // Attempt to set as default target - should revert due to reentrancy + await expect(trigger(await reentrantTarget.getAddress())).to.be.revertedWithCustomError( + issuanceAllocator, + 'ReentrancyGuardReentrantCall', + ) + }) + }) + }) + + describe('Reentrancy during distributePendingIssuance', () => { + const testCases = [ + { name: '0 param variant', action: ReentrantAction.DistributePendingIssuance0Param }, + { name: '1 param variant', action: ReentrantAction.DistributePendingIssuance1Param }, + ] + + testCases.forEach(({ name, action }) => { + it(`should revert when target attempts to reenter distributePendingIssuance (${name})`, async () => { + // Add the reentrant target with initial allocation + await reentrantTarget.setReentrantAction(ReentrantAction.None) + const targetAddress = await reentrantTarget.getAddress() + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256)'](targetAddress, ethers.parseEther('30')) + + // Configure to attempt calling distributePendingIssuance during next notification + await reentrantTarget.setReentrantAction(action) + + // Attempt to change allocation - should revert due to reentrancy + await expect( + issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256)'](targetAddress, ethers.parseEther('40')), + ).to.be.revertedWithCustomError(issuanceAllocator, 'ReentrancyGuardReentrantCall') + }) + }) + }) + + describe('No reentrancy when disabled', () => { + it('should work normally when reentrancy is not attempted', async () => { + // Ensure reentrant action is None + await reentrantTarget.setReentrantAction(ReentrantAction.None) + + // Add the target with some allocation + await expect( + issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256)'](await reentrantTarget.getAddress(), ethers.parseEther('50')), + ).to.not.be.reverted + + // Mine some blocks + await hre.network.provider.send('hardhat_mine', ['0x0A']) // Mine 10 blocks + + // Distribute should work normally + await expect(issuanceAllocator.distributeIssuance()).to.not.be.reverted + }) + }) +}) diff --git a/packages/issuance/test/tests/allocate/TargetNotification.test.ts b/packages/issuance/test/tests/allocate/TargetNotification.test.ts new file mode 100644 index 000000000..ab7f757a4 --- /dev/null +++ b/packages/issuance/test/tests/allocate/TargetNotification.test.ts @@ -0,0 +1,249 @@ +import { expect } from 'chai' +import hre from 'hardhat' + +const { ethers } = hre + +import { getTestAccounts } from '../common/fixtures' +import { deployTestGraphToken } from '../common/fixtures' +import { deployIssuanceAllocator } from './fixtures' + +describe('IssuanceAllocator - Target Notification', () => { + let accounts + let addresses: { + target1: string + target2: string + defaultTarget: string + } + + let issuanceAllocator + let graphToken + let target1 + let target2 + let defaultTarget + + const issuancePerBlock = ethers.parseEther('100') + + beforeEach(async () => { + // Get test accounts + accounts = await getTestAccounts() + + // Deploy GraphToken + graphToken = await deployTestGraphToken() + + // Deploy IssuanceAllocator + issuanceAllocator = await deployIssuanceAllocator( + await graphToken.getAddress(), + accounts.governor, + issuancePerBlock, + ) + + // Grant minter role to IssuanceAllocator + await graphToken.addMinter(await issuanceAllocator.getAddress()) + + // Deploy mock notification trackers + const MockNotificationTracker = await ethers.getContractFactory('MockNotificationTracker') + target1 = await MockNotificationTracker.deploy() + target2 = await MockNotificationTracker.deploy() + defaultTarget = await MockNotificationTracker.deploy() + + addresses = { + target1: await target1.getAddress(), + target2: await target2.getAddress(), + defaultTarget: await defaultTarget.getAddress(), + } + }) + + describe('setTargetAllocation notifications', () => { + it('should notify both target and default target when setting allocation', async () => { + // Set a non-zero default target first + await issuanceAllocator.connect(accounts.governor).setDefaultTarget(addresses.defaultTarget) + + // Verify initial state + expect(await target1.notificationCount()).to.equal(0) + expect(await defaultTarget.notificationCount()).to.equal(1) // Notified during setDefaultTarget + + // Reset notification count for clean test + await defaultTarget.resetNotificationCount() + + // Set allocation for target1 - should notify BOTH target1 and defaultTarget + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256)'](addresses.target1, ethers.parseEther('30')) + + // Verify both targets were notified + expect(await target1.notificationCount()).to.equal(1) + expect(await defaultTarget.notificationCount()).to.equal(1) + }) + + it('should notify both targets when changing existing allocation', async () => { + // Set a non-zero default target + await issuanceAllocator.connect(accounts.governor).setDefaultTarget(addresses.defaultTarget) + + // Set initial allocation for target1 + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256)'](addresses.target1, ethers.parseEther('30')) + + // Reset counters + await target1.resetNotificationCount() + await defaultTarget.resetNotificationCount() + + // Change allocation for target1 + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256)'](addresses.target1, ethers.parseEther('50')) + + // Both should be notified again + expect(await target1.notificationCount()).to.equal(1) + expect(await defaultTarget.notificationCount()).to.equal(1) + }) + + it('should notify both targets when removing allocation', async () => { + // Set a non-zero default target + await issuanceAllocator.connect(accounts.governor).setDefaultTarget(addresses.defaultTarget) + + // Set initial allocation + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256)'](addresses.target1, ethers.parseEther('30')) + + // Reset counters + await target1.resetNotificationCount() + await defaultTarget.resetNotificationCount() + + // Remove allocation (set to 0) + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256,uint256)'](addresses.target1, 0, 0) + + // Both should be notified + expect(await target1.notificationCount()).to.equal(1) + expect(await defaultTarget.notificationCount()).to.equal(1) + }) + + it('should notify default target even when it is address(0)', async () => { + // Default is address(0) by default, which should handle notification gracefully + expect(await issuanceAllocator.getTargetAt(0)).to.equal(ethers.ZeroAddress) + + // Set allocation for target1 - should not revert even though default is address(0) + await expect( + issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256)'](addresses.target1, ethers.parseEther('30')), + ).to.not.be.reverted + + // Target1 should be notified + expect(await target1.notificationCount()).to.equal(1) + }) + + it('should notify correct targets when setting multiple allocations', async () => { + // Set a non-zero default target + await issuanceAllocator.connect(accounts.governor).setDefaultTarget(addresses.defaultTarget) + await defaultTarget.resetNotificationCount() + + // Set allocation for target1 + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256)'](addresses.target1, ethers.parseEther('30')) + + expect(await target1.notificationCount()).to.equal(1) + expect(await target2.notificationCount()).to.equal(0) + expect(await defaultTarget.notificationCount()).to.equal(1) + + // Reset counters + await target1.resetNotificationCount() + await defaultTarget.resetNotificationCount() + + // Set allocation for target2 + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256)'](addresses.target2, ethers.parseEther('20')) + + // Only target2 and default should be notified (not target1) + expect(await target1.notificationCount()).to.equal(0) + expect(await target2.notificationCount()).to.equal(1) + expect(await defaultTarget.notificationCount()).to.equal(1) + }) + + it('should emit NotificationReceived events for both targets', async () => { + // Set a non-zero default target + await issuanceAllocator.connect(accounts.governor).setDefaultTarget(addresses.defaultTarget) + await defaultTarget.resetNotificationCount() + + // Set allocation and check for events from both mock targets + const tx = await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256)'](addresses.target1, ethers.parseEther('30')) + + // Both targets should emit their NotificationReceived events + await expect(tx).to.emit(target1, 'NotificationReceived') + await expect(tx).to.emit(defaultTarget, 'NotificationReceived') + }) + }) + + describe('setIssuancePerBlock notifications', () => { + it('should notify only default target when changing issuance rate', async () => { + // Set a non-zero default target + await issuanceAllocator.connect(accounts.governor).setDefaultTarget(addresses.defaultTarget) + + // Add a regular target + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256)'](addresses.target1, ethers.parseEther('30')) + + // Reset counters + await target1.resetNotificationCount() + await defaultTarget.resetNotificationCount() + + // Change issuance rate + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(ethers.parseEther('200')) + + // Only default should be notified (regular targets keep same absolute rates) + expect(await target1.notificationCount()).to.equal(0) + expect(await defaultTarget.notificationCount()).to.equal(1) + }) + }) + + describe('setDefaultTarget notifications', () => { + it('should notify both old and new default targets', async () => { + // Set first default target + await issuanceAllocator.connect(accounts.governor).setDefaultTarget(addresses.target1) + + // Reset counter + await target1.resetNotificationCount() + + // Change to new default target - should notify both + await issuanceAllocator.connect(accounts.governor).setDefaultTarget(addresses.target2) + + // Both old and new default should be notified + expect(await target1.notificationCount()).to.equal(1) + expect(await target2.notificationCount()).to.equal(1) + }) + }) + + describe('notification deduplication', () => { + it('should not notify target twice in the same block', async () => { + // Set a non-zero default target + await issuanceAllocator.connect(accounts.governor).setDefaultTarget(addresses.defaultTarget) + await defaultTarget.resetNotificationCount() + + // Try to set the same allocation twice in same block (second should be no-op) + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256)'](addresses.target1, ethers.parseEther('30')) + + // Should only be notified once + expect(await target1.notificationCount()).to.equal(1) + expect(await defaultTarget.notificationCount()).to.equal(1) + + // Second call with same values should not notify again (no change) + await issuanceAllocator + .connect(accounts.governor) + ['setTargetAllocation(address,uint256)'](addresses.target1, ethers.parseEther('30')) + + // Counts should remain the same (no new notifications) + expect(await target1.notificationCount()).to.equal(1) + expect(await defaultTarget.notificationCount()).to.equal(1) + }) + }) +}) diff --git a/packages/issuance/test/tests/allocate/fixtures.ts b/packages/issuance/test/tests/allocate/fixtures.ts index 2a9212be4..0122365b1 100644 --- a/packages/issuance/test/tests/allocate/fixtures.ts +++ b/packages/issuance/test/tests/allocate/fixtures.ts @@ -32,7 +32,7 @@ export async function deployIssuanceAllocator(graphToken, governor, issuancePerB const issuanceAllocator = issuanceAllocatorContract // Set issuance per block - await issuanceAllocator.connect(governor).setIssuancePerBlock(issuancePerBlock, false) + await issuanceAllocator.connect(governor).setIssuancePerBlock(issuancePerBlock) return issuanceAllocator } diff --git a/packages/issuance/test/tests/allocate/issuanceCalculations.ts b/packages/issuance/test/tests/allocate/issuanceCalculations.ts index fc69edea9..e013ff5e9 100644 --- a/packages/issuance/test/tests/allocate/issuanceCalculations.ts +++ b/packages/issuance/test/tests/allocate/issuanceCalculations.ts @@ -3,110 +3,82 @@ import { ethers } from 'hardhat' /** * Shared calculation utilities for issuance tests. * These functions provide reference implementations for expected values in tests. - * Enhanced with better naming, documentation, and error handling. */ // Constants for better readability export const CALCULATION_CONSTANTS = { - PPM_DENOMINATOR: 1_000_000n, // Parts per million denominator PRECISION_MULTIPLIER: 1000n, // For ratio calculations WEI_PER_ETHER: ethers.parseEther('1'), } as const /** - * Calculate expected accumulation for allocator-minting targets during pause. - * Accumulation happens from lastIssuanceAccumulationBlock to current block. + * Calculate expected self-minting accumulation during pause. + * In the new model, we accumulate self-minting (not allocator-minting) during pause. * - * @param issuancePerBlock - Issuance rate per block + * @param totalSelfMintingRate - Total self-minting rate (tokens per block) * @param blocks - Number of blocks to accumulate over - * @param allocatorMintingPPM - Total allocator-minting allocation in PPM - * @returns Expected accumulated amount for allocator-minting targets + * @returns Expected accumulated self-minting amount */ -export function calculateExpectedAccumulation( - issuancePerBlock: bigint, - blocks: bigint, - allocatorMintingPPM: bigint, -): bigint { - if (blocks === 0n || allocatorMintingPPM === 0n) return 0n - - const totalIssuance = issuancePerBlock * blocks - // Contract uses: totalIssuance * totalAllocatorMintingAllocationPPM / MILLION - return (totalIssuance * allocatorMintingPPM) / CALCULATION_CONSTANTS.PPM_DENOMINATOR +export function calculateExpectedSelfMintingAccumulation(totalSelfMintingRate: bigint, blocks: bigint): bigint { + if (blocks === 0n || totalSelfMintingRate === 0n) return 0n + return totalSelfMintingRate * blocks } /** - * Calculate expected issuance for a specific target. + * Calculate expected issuance for a specific target during normal operation. * - * @param issuancePerBlock - Issuance rate per block + * @param targetRate - Target's allocation rate (tokens per block) * @param blocks - Number of blocks - * @param targetAllocationPPM - Target's allocation in PPM * @returns Expected issuance for the target */ -export function calculateExpectedTargetIssuance( - issuancePerBlock: bigint, - blocks: bigint, - targetAllocationPPM: bigint, -): bigint { - if (blocks === 0n || targetAllocationPPM === 0n) return 0n - - const totalIssuance = issuancePerBlock * blocks - return (totalIssuance * targetAllocationPPM) / CALCULATION_CONSTANTS.PPM_DENOMINATOR +export function calculateExpectedTargetIssuance(targetRate: bigint, blocks: bigint): bigint { + if (blocks === 0n || targetRate === 0n) return 0n + return targetRate * blocks } /** - * Calculate proportional distribution of pending issuance among allocator-minting targets. + * Calculate proportional distribution during unpause when insufficient funds. + * Used when available funds < total non-default needs. * - * @param pendingAmount - Total pending amount to distribute - * @param targetAllocationPPM - Target's allocator-minting allocation in PPM - * @param totalSelfMintingPPM - Total self-minting allocation in PPM + * @param availableAmount - Total available amount to distribute + * @param targetRate - Target's allocator-minting rate (tokens per block) + * @param totalNonDefaultRate - Total non-default allocator-minting rate * @returns Expected amount for the target */ export function calculateProportionalDistribution( - pendingAmount: bigint, - targetAllocationPPM: bigint, - totalSelfMintingPPM: bigint, + availableAmount: bigint, + targetRate: bigint, + totalNonDefaultRate: bigint, ): bigint { - if (pendingAmount === 0n || targetAllocationPPM === 0n) return 0n - - const totalAllocatorMintingPPM = CALCULATION_CONSTANTS.PPM_DENOMINATOR - totalSelfMintingPPM - if (totalAllocatorMintingPPM === 0n) return 0n - - return (pendingAmount * targetAllocationPPM) / totalAllocatorMintingPPM + if (availableAmount === 0n || targetRate === 0n || totalNonDefaultRate === 0n) return 0n + return (availableAmount * targetRate) / totalNonDefaultRate } /** * Calculate expected total issuance for multiple targets. * - * @param issuancePerBlock - Issuance rate per block * @param blocks - Number of blocks - * @param targetAllocations - Array of target allocations in PPM + * @param targetRates - Array of target rates (tokens per block) * @returns Array of expected issuance amounts for each target */ -export function calculateMultiTargetIssuance( - issuancePerBlock: bigint, - blocks: bigint, - targetAllocations: bigint[], -): bigint[] { - return targetAllocations.map((allocation) => calculateExpectedTargetIssuance(issuancePerBlock, blocks, allocation)) +export function calculateMultiTargetIssuance(blocks: bigint, targetRates: bigint[]): bigint[] { + return targetRates.map((rate) => calculateExpectedTargetIssuance(rate, blocks)) } /** - * Verify that distributed amounts add up to expected total rate. + * Verify that distributed amounts add up to expected total. * * @param distributedAmounts - Array of distributed amounts - * @param expectedTotalRate - Expected total issuance rate - * @param blocks - Number of blocks + * @param expectedTotal - Expected total amount * @param tolerance - Tolerance for rounding errors (default: 1 wei) * @returns True if amounts add up within tolerance */ export function verifyTotalDistribution( distributedAmounts: bigint[], - expectedTotalRate: bigint, - blocks: bigint, + expectedTotal: bigint, tolerance: bigint = 1n, ): boolean { const totalDistributed = distributedAmounts.reduce((sum, amount) => sum + amount, 0n) - const expectedTotal = expectedTotalRate * blocks const diff = totalDistributed > expectedTotal ? totalDistributed - expectedTotal : expectedTotal - totalDistributed return diff <= tolerance } @@ -114,36 +86,39 @@ export function verifyTotalDistribution( /** * Calculate expected distribution ratios between targets * - * @param allocations - Array of allocations in PPM + * @param rates - Array of rates (tokens per block) * @returns Array of ratios relative to first target */ -export function calculateExpectedRatios(allocations: bigint[]): bigint[] { - if (allocations.length === 0) return [] +export function calculateExpectedRatios(rates: bigint[]): bigint[] { + if (rates.length === 0) return [] - const baseAllocation = allocations[0] - if (baseAllocation === 0n) return allocations.map(() => 0n) + const baseRate = rates[0] + if (baseRate === 0n) return rates.map(() => 0n) - return allocations.map((allocation) => (allocation * CALCULATION_CONSTANTS.PRECISION_MULTIPLIER) / baseAllocation) + return rates.map((rate) => (rate * CALCULATION_CONSTANTS.PRECISION_MULTIPLIER) / baseRate) } /** - * Convert allocation percentage to PPM + * Convert allocation percentage to absolute rate * * @param percentage - Percentage as a number (e.g., 30 for 30%) - * @returns PPM value + * @param issuancePerBlock - Total issuance per block + * @returns Absolute rate (tokens per block) */ -export function percentageToPPM(percentage: number): number { - return Math.round(percentage * 10_000) // 1% = 10,000 PPM +export function percentageToRate(percentage: number, issuancePerBlock: bigint): bigint { + return (issuancePerBlock * BigInt(Math.round(percentage * 100))) / 10000n } /** - * Convert PPM to percentage + * Convert rate to percentage * - * @param ppm - PPM value + * @param rate - Rate (tokens per block) + * @param issuancePerBlock - Total issuance per block * @returns Percentage as a number */ -export function ppmToPercentage(ppm: bigint | number): number { - return Number(ppm) / 10_000 +export function rateToPercentage(rate: bigint, issuancePerBlock: bigint): number { + if (issuancePerBlock === 0n) return 0 + return Number((rate * 10000n) / issuancePerBlock) / 100 } /** diff --git a/packages/issuance/test/tests/allocate/optimizedFixtures.ts b/packages/issuance/test/tests/allocate/optimizedFixtures.ts index 6ded870a1..66d3f3dc7 100644 --- a/packages/issuance/test/tests/allocate/optimizedFixtures.ts +++ b/packages/issuance/test/tests/allocate/optimizedFixtures.ts @@ -48,16 +48,12 @@ export async function setupOptimizedAllocateSystem(customOptions: any = {}) { if (options.setupTargets) { await issuanceAllocator .connect(accounts.governor) - [ - 'setTargetAllocation(address,uint256,uint256,bool)' - ](addresses.target1, TestConstants.ALLOCATION_30_PERCENT, 0, false) + ['setTargetAllocation(address,uint256,uint256)'](addresses.target1, TestConstants.ALLOCATION_30_PERCENT, 0) if (options.targetCount >= 2) { await issuanceAllocator .connect(accounts.governor) - [ - 'setTargetAllocation(address,uint256,uint256,bool)' - ](addresses.target2, TestConstants.ALLOCATION_20_PERCENT, 0, false) + ['setTargetAllocation(address,uint256,uint256)'](addresses.target2, TestConstants.ALLOCATION_20_PERCENT, 0) } } @@ -77,32 +73,28 @@ export async function setupOptimizedAllocateSystem(customOptions: any = {}) { const targets = await issuanceAllocator.getTargets() const defaultAddress = await issuanceAllocator.getTargetAt(0) for (const targetAddr of targets) { - // Skip the default allocation target + // Skip the default target target if (targetAddr === defaultAddress) continue await issuanceAllocator .connect(accounts.governor) - ['setTargetAllocation(address,uint256,uint256,bool)'](targetAddr, 0, 0, false) + ['setTargetAllocation(address,uint256,uint256)'](targetAddr, 0, 0) } - // Reset default allocation to address(0) with 100% - await issuanceAllocator.connect(accounts.governor).setDefaultAllocationAddress(ethers.ZeroAddress) + // Reset default target to address(0) with 100% + await issuanceAllocator.connect(accounts.governor).setDefaultTarget(ethers.ZeroAddress) // Reset issuance rate - await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(options.issuancePerBlock, false) + await issuanceAllocator.connect(accounts.governor).setIssuancePerBlock(options.issuancePerBlock) }, // Helper to setup standard allocations setupStandardAllocations: async () => { await issuanceAllocator .connect(accounts.governor) - [ - 'setTargetAllocation(address,uint256,uint256,bool)' - ](addresses.target1, TestConstants.ALLOCATION_30_PERCENT, 0, false) + ['setTargetAllocation(address,uint256,uint256)'](addresses.target1, TestConstants.ALLOCATION_30_PERCENT, 0) await issuanceAllocator .connect(accounts.governor) - [ - 'setTargetAllocation(address,uint256,uint256,bool)' - ](addresses.target2, TestConstants.ALLOCATION_40_PERCENT, 0, false) + ['setTargetAllocation(address,uint256,uint256)'](addresses.target2, TestConstants.ALLOCATION_40_PERCENT, 0) }, // Helper to verify proportional distributions @@ -214,9 +206,7 @@ export async function applyAllocationScenario(issuanceAllocator: any, addresses: const targetAddress = addresses[allocation.target] await issuanceAllocator .connect(governor) - [ - 'setTargetAllocation(address,uint256,uint256,bool)' - ](targetAddress, allocation.allocatorPPM, allocation.selfPPM, false) + ['setTargetAllocation(address,uint256,uint256)'](targetAddress, allocation.allocatorPPM, allocation.selfPPM) } } diff --git a/packages/subgraph-service/contracts/utilities/AllocationManager.sol b/packages/subgraph-service/contracts/utilities/AllocationManager.sol index 08608d8b4..8ca711758 100644 --- a/packages/subgraph-service/contracts/utilities/AllocationManager.sol +++ b/packages/subgraph-service/contracts/utilities/AllocationManager.sol @@ -11,6 +11,7 @@ import { IGraphToken } from "@graphprotocol/interfaces/contracts/contracts/token import { IHorizonStakingTypes } from "@graphprotocol/interfaces/contracts/horizon/internal/IHorizonStakingTypes.sol"; import { IAllocation } from "@graphprotocol/interfaces/contracts/subgraph-service/internal/IAllocation.sol"; import { ILegacyAllocation } from "@graphprotocol/interfaces/contracts/subgraph-service/internal/ILegacyAllocation.sol"; +import { RewardsReclaim } from "@graphprotocol/interfaces/contracts/contracts/rewards/RewardsReclaim.sol"; import { GraphDirectory } from "@graphprotocol/horizon/contracts/utilities/GraphDirectory.sol"; import { AllocationManagerV1Storage } from "./AllocationManagerStorage.sol"; @@ -278,12 +279,20 @@ abstract contract AllocationManager is EIP712Upgradeable, GraphDirectory, Alloca IAllocation.State memory allocation = _allocations.get(_allocationId); require(allocation.isOpen(), AllocationManagerAllocationClosed(_allocationId)); - // Mint indexing rewards if all conditions are met - uint256 tokensRewards = (!allocation.isStale(maxPOIStaleness) && - !allocation.isAltruistic() && - _poi != bytes32(0)) && _graphEpochManager().currentEpoch() > allocation.createdAtEpoch - ? _graphRewardsManager().takeRewards(_allocationId) - : 0; + // Mint indexing rewards if all conditions are met, otherwise reclaim with specific reason + uint256 tokensRewards; + if (allocation.isStale(maxPOIStaleness)) { + _graphRewardsManager().reclaimRewards(RewardsReclaim.STALE_POI, _allocationId, ""); + } else if (allocation.isAltruistic()) { + _graphRewardsManager().reclaimRewards(RewardsReclaim.ALTRUISTIC_ALLOCATION, _allocationId, ""); + } else if (_poi == bytes32(0)) { + _graphRewardsManager().reclaimRewards(RewardsReclaim.ZERO_POI, _allocationId, ""); + // solhint-disable-next-line gas-strict-inequalities + } else if (_graphEpochManager().currentEpoch() <= allocation.createdAtEpoch) { + _graphRewardsManager().reclaimRewards(RewardsReclaim.ALLOCATION_TOO_YOUNG, _allocationId, ""); + } else { + tokensRewards = _graphRewardsManager().takeRewards(_allocationId); + } // ... but we still take a snapshot to ensure the rewards are not accumulated for the next valid POI _allocations.snapshotRewards( @@ -418,12 +427,23 @@ abstract contract AllocationManager is EIP712Upgradeable, GraphDirectory, Alloca function _closeAllocation(address _allocationId, bool _forceClosed) internal { IAllocation.State memory allocation = _allocations.get(_allocationId); + // Reclaim uncollected rewards before closing + uint256 reclaimedRewards = _graphRewardsManager().reclaimRewards( + RewardsReclaim.CLOSE_ALLOCATION, + _allocationId, + "" + ); + // Take rewards snapshot to prevent other allos from counting tokens from this allo _allocations.snapshotRewards( _allocationId, _graphRewardsManager().onSubgraphAllocationUpdate(allocation.subgraphDeploymentId) ); + // Clear pending rewards only if rewards were reclaimed. This marks them as consumed, + // which could be useful for future logic that searches for unconsumed rewards. + if (0 < reclaimedRewards) _allocations.clearPendingRewards(_allocationId); + _allocations.close(_allocationId); allocationProvisionTracker.release(allocation.indexer, allocation.tokens); diff --git a/packages/subgraph-service/test/unit/mocks/MockRewardsManager.sol b/packages/subgraph-service/test/unit/mocks/MockRewardsManager.sol index 773d676f9..389b48cae 100644 --- a/packages/subgraph-service/test/unit/mocks/MockRewardsManager.sol +++ b/packages/subgraph-service/test/unit/mocks/MockRewardsManager.sol @@ -63,6 +63,25 @@ contract MockRewardsManager is IRewardsManager { function setIndexerEligibilityReclaimAddress(address) external {} + function setReclaimAddress(bytes32, address) external {} + + function reclaimRewards(bytes32, address _allocationID, bytes calldata) external view returns (uint256) { + address rewardsIssuer = msg.sender; + (bool isActive, , , uint256 tokens, uint256 accRewardsPerAllocatedToken) = IRewardsIssuer(rewardsIssuer) + .getAllocationData(_allocationID); + + if (!isActive) { + return 0; + } + + // Calculate accumulated but unclaimed rewards + uint256 accRewardsPerTokens = tokens.mulPPM(rewardsPerSignal); + uint256 rewards = accRewardsPerTokens - accRewardsPerAllocatedToken; + + // Note: We don't mint tokens for reclaimed rewards, they are just discarded + return rewards; + } + // -- Getters -- function getNewRewardsPerSignal() external view returns (uint256) {} diff --git a/packages/subgraph-service/test/unit/subgraphService/SubgraphService.t.sol b/packages/subgraph-service/test/unit/subgraphService/SubgraphService.t.sol index 74c677504..cf398f7dc 100644 --- a/packages/subgraph-service/test/unit/subgraphService/SubgraphService.t.sol +++ b/packages/subgraph-service/test/unit/subgraphService/SubgraphService.t.sol @@ -383,7 +383,7 @@ contract SubgraphServiceTest is SubgraphServiceSharedTest { CollectPaymentData memory collectPaymentDataBefore, CollectPaymentData memory collectPaymentDataAfter ) private view { - (IGraphTallyCollector.SignedRAV memory signedRav, uint256 tokensToCollect) = abi.decode( + (IGraphTallyCollector.SignedRAV memory signedRav, ) = abi.decode( _data, (IGraphTallyCollector.SignedRAV, uint256) ); diff --git a/packages/subgraph-service/test/unit/subgraphService/collect/indexing/indexing.t.sol b/packages/subgraph-service/test/unit/subgraphService/collect/indexing/indexing.t.sol index 61224ff23..85cc4f84b 100644 --- a/packages/subgraph-service/test/unit/subgraphService/collect/indexing/indexing.t.sol +++ b/packages/subgraph-service/test/unit/subgraphService/collect/indexing/indexing.t.sol @@ -171,4 +171,75 @@ contract SubgraphServiceCollectIndexingTest is SubgraphServiceTest { ); subgraphService.collect(newIndexer, paymentType, data); } + + function test_SubgraphService_Collect_Indexing_ZeroRewards(uint256 tokens) public useIndexer useAllocation(tokens) { + IGraphPayments.PaymentTypes paymentType = IGraphPayments.PaymentTypes.IndexingRewards; + bytes memory data = abi.encode(allocationID, bytes32("POI"), _getHardcodedPOIMetadata()); + + // Don't skip time - collect immediately, expecting zero rewards + _collect(users.indexer, paymentType, data); + } + + function test_SubgraphService_Collect_Indexing_ZeroPOI(uint256 tokens) public useIndexer useAllocation(tokens) { + IGraphPayments.PaymentTypes paymentType = IGraphPayments.PaymentTypes.IndexingRewards; + // Submit zero POI (bytes32(0)) + bytes memory data = abi.encode(allocationID, bytes32(0), _getHardcodedPOIMetadata()); + + // skip time to ensure allocation could get rewards + vm.roll(block.number + EPOCH_LENGTH); + + // Should succeed but reclaim rewards due to zero POI - just verify it doesn't revert + subgraphService.collect(users.indexer, paymentType, data); + } + + function test_SubgraphService_Collect_Indexing_StalePOI(uint256 tokens) public useIndexer useAllocation(tokens) { + IGraphPayments.PaymentTypes paymentType = IGraphPayments.PaymentTypes.IndexingRewards; + bytes memory data = abi.encode(allocationID, bytes32("POI"), _getHardcodedPOIMetadata()); + + // Skip past maxPOIStaleness to make allocation stale + skip(maxPOIStaleness + 1); + + // Should succeed but reclaim rewards due to stale POI - just verify it doesn't revert + subgraphService.collect(users.indexer, paymentType, data); + } + + function test_SubgraphService_Collect_Indexing_AltruisticAllocation(uint256 tokens) public useIndexer { + tokens = bound(tokens, minimumProvisionTokens, MAX_TOKENS); + + _createProvision(users.indexer, tokens, fishermanRewardPercentage, disputePeriod); + _register(users.indexer, abi.encode("url", "geoHash", address(0))); + + // Create altruistic allocation (0 tokens) + bytes memory data = _createSubgraphAllocationData(users.indexer, subgraphDeployment, allocationIDPrivateKey, 0); + _startService(users.indexer, data); + + IGraphPayments.PaymentTypes paymentType = IGraphPayments.PaymentTypes.IndexingRewards; + bytes memory collectData = abi.encode(allocationID, bytes32("POI"), _getHardcodedPOIMetadata()); + + // skip time to ensure allocation could get rewards + vm.roll(block.number + EPOCH_LENGTH); + + // Should succeed but reclaim rewards due to altruistic allocation - just verify it doesn't revert + subgraphService.collect(users.indexer, paymentType, collectData); + } + + function test_SubgraphService_Collect_Indexing_RevertWhen_AllocationClosed( + uint256 tokens + ) public useIndexer useAllocation(tokens) { + IGraphPayments.PaymentTypes paymentType = IGraphPayments.PaymentTypes.IndexingRewards; + bytes memory data = abi.encode(allocationID, bytes32("POI"), _getHardcodedPOIMetadata()); + + // Close the allocation + resetPrank(users.indexer); + subgraphService.stopService(users.indexer, abi.encode(allocationID)); + + // skip time to ensure allocation could get rewards + vm.roll(block.number + EPOCH_LENGTH); + + // Attempt to collect on closed allocation should revert + // Using the bytes4 selector directly since AllocationManagerAllocationClosed is inherited from AllocationManager + bytes4 selector = bytes4(keccak256("AllocationManagerAllocationClosed(address)")); + vm.expectRevert(abi.encodeWithSelector(selector, allocationID)); + subgraphService.collect(users.indexer, paymentType, data); + } }