Merge PR #927 from 'rithvikvibhu/end-airdrop'

This commit is contained in:
Nodari Chkuaselidze 2025-06-25 14:04:50 +04:00
commit dee79a3c4e
No known key found for this signature in database
GPG key ID: B018A7BB437D1F05
6 changed files with 578 additions and 35 deletions

View file

@ -5,6 +5,11 @@
**When upgrading to this version of hsd, you must pass `--chain-migrate=4`
and `--wallet-migrate=7` when you run it for the first time.**
### Network
**End Airdrop soft fork has been included. ([#927](https://github.com/handshake-org/hsd/pull/927))
Miners who want to support the soft-fork need to start signalling with `airstop` bit.**
### Wallet Changes
#### Wallet HTTP API
@ -213,6 +218,11 @@ The following methods have been deprecated:
## v6.0.0
### Network
**ICANN Lockup soft fork has been included. ([#819](https://github.com/handshake-org/hsd/pull/819), [#828](https://github.com/handshake-org/hsd/pull/828), [#834](https://github.com/handshake-org/hsd/pull/834))
Miners who want to support the soft-fork need to start signalling with `icannlockup` bit.**
### Node and Wallet HTTP API
Validation errors, request paremeter errors or bad HTTP requests will no
longer return (and log) `500` status code, instead will return `400`.

View file

@ -644,6 +644,14 @@ class Chain extends AsyncEmitter {
// Airdrop proof.
if (!output.covenant.isClaim()) {
// Disable airdrop claims if airstop is activated
if (state.hasAirstop) {
throw new VerifyError(block,
'invalid',
'bad-airdrop-disabled',
100);
}
let proof;
try {
proof = AirdropProof.decode(witness.items[0]);
@ -739,6 +747,10 @@ class Chain extends AsyncEmitter {
if (await this.isActive(prev, deployments.icannlockup))
state.nameFlags |= rules.nameFlags.VERIFY_COVENANTS_LOCKUP;
// Disable airdrop claims.
if (await this.isActive(prev, deployments.airstop))
state.hasAirstop = true;
return state;
}
@ -762,6 +774,9 @@ class Chain extends AsyncEmitter {
if (!this.state.hasICANNLockup() && state.hasICANNLockup())
this.logger.warning('ICANN lockup has been activated.');
if (!this.state.hasAirstop && state.hasAirstop)
this.logger.warning('Airdrop claims has been disabled.');
this.state = state;
}
@ -4115,6 +4130,7 @@ class DeploymentState {
this.flags = Script.flags.MANDATORY_VERIFY_FLAGS;
this.lockFlags = common.MANDATORY_LOCKTIME_FLAGS;
this.nameFlags = rules.MANDATORY_VERIFY_COVENANT_FLAGS;
this.hasAirstop = false;
}
hasHardening() {

View file

@ -41,6 +41,9 @@ const Claim = require('../primitives/claim');
const AirdropProof = require('../primitives/airdropproof');
const {types} = rules;
/** @typedef {import('../types').Hash} Hash */
/** @typedef {import('../blockchain/chainentry')} ChainEntry */
/**
* Mempool
* Represents a mempool.
@ -274,6 +277,7 @@ class Mempool extends EventEmitter {
// for a now expired name. Another
// example is a stale BID for a name
// which has now reached the REVEAL state.
const prevState = this.nextState;
const state = await this.getNextState();
const hardened = state.hasHardening();
const invalid = this.contracts.invalidate(block.height, hardened);
@ -316,13 +320,18 @@ class Mempool extends EventEmitter {
}
}
// If the next block activates airstop we drop any leftover proofs,
// they can no longer be mined.
if (!prevState.hasAirstop && state.hasAirstop)
this.dropAirdrops();
this.cache.sync(block.hash);
await this.cache.flush();
this.tip = block.hash;
if (invalid.length > 0) {
if (invalid.size > 0) {
this.logger.info(
'Invalidated %d txs for block %d.',
invalid.size, block.height);
@ -375,7 +384,7 @@ class Mempool extends EventEmitter {
const proof = AirdropProof.decode(witness.items[0]);
const entry = AirdropEntry.fromAirdrop(proof, this.chain.height);
this.trackAirdrop(entry, -1);
this.trackAirdrop(entry);
continue;
}
@ -387,7 +396,7 @@ class Mempool extends EventEmitter {
const entry = ClaimEntry.fromClaim(claim, data, this.chain.height);
this.trackClaim(entry, -1);
this.trackClaim(entry);
}
let total = 0;
@ -1048,11 +1057,10 @@ class Mempool extends EventEmitter {
* fully processed.
* @method
* @param {Claim} claim
* @param {Number?} id
* @returns {Promise}
*/
async addClaim(claim, id) {
async addClaim(claim) {
if (this.chain.height + 1 < this.network.txStart) {
throw new VerifyError(claim,
'invalid',
@ -1063,7 +1071,7 @@ class Mempool extends EventEmitter {
const hash = claim.hash();
const unlock = await this.locker.lock(hash);
try {
return await this._addClaim(claim, id);
return await this._addClaim(claim);
} finally {
unlock();
}
@ -1074,15 +1082,11 @@ class Mempool extends EventEmitter {
* @method
* @private
* @param {Claim} claim
* @param {Number?} id
* @returns {Promise}
*/
async _addClaim(claim, id) {
if (id == null)
id = -1;
await this.insertClaim(claim, id);
async _addClaim(claim) {
await this.insertClaim(claim);
if (util.now() - this.lastFlush > 10) {
await this.cache.flush();
@ -1095,11 +1099,10 @@ class Mempool extends EventEmitter {
* @method
* @private
* @param {Claim} claim
* @param {Number?} id
* @returns {Promise}
*/
async insertClaim(claim, id) {
async insertClaim(claim) {
const height = this.chain.height + 1;
const tip = this.chain.tip;
const hash = claim.hash();
@ -1175,7 +1178,7 @@ class Mempool extends EventEmitter {
const entry = ClaimEntry.fromClaim(claim, data, this.chain.height);
this.trackClaim(entry, id);
this.trackClaim(entry);
// Trim size if we're too big.
if (this.limitSize(hash)) {
@ -1192,10 +1195,9 @@ class Mempool extends EventEmitter {
/**
* Track claim entry.
* @param {ClaimEntry} entry
* @param {Number} id
*/
trackClaim(entry, id) {
trackClaim(entry) {
assert(!this.claims.has(entry.hash));
assert(!this.claimNames.has(entry.nameHash));
@ -1213,7 +1215,6 @@ class Mempool extends EventEmitter {
/**
* Untrack claim entry.
* @param {ClaimEntry} entry
* @param {Number} id
*/
untrackClaim(entry) {
@ -1261,11 +1262,10 @@ class Mempool extends EventEmitter {
* fully processed.
* @method
* @param {AirdropProof} proof
* @param {Number?} id
* @returns {Promise}
*/
async addAirdrop(proof, id) {
async addAirdrop(proof) {
if (this.chain.height + 1 < this.network.txStart) {
throw new VerifyError(proof,
'invalid',
@ -1276,7 +1276,7 @@ class Mempool extends EventEmitter {
const hash = proof.hash();
const unlock = await this.locker.lock(hash);
try {
return await this._addAirdrop(proof, id);
return await this._addAirdrop(proof);
} finally {
unlock();
}
@ -1287,15 +1287,11 @@ class Mempool extends EventEmitter {
* @method
* @private
* @param {AirdropProof} proof
* @param {Number?} id
* @returns {Promise}
*/
async _addAirdrop(proof, id) {
if (id == null)
id = -1;
await this.insertAirdrop(proof, id);
async _addAirdrop(proof) {
await this.insertAirdrop(proof);
if (util.now() - this.lastFlush > 10) {
await this.cache.flush();
@ -1308,11 +1304,10 @@ class Mempool extends EventEmitter {
* @method
* @private
* @param {AirdropProof} proof
* @param {Number?} id
* @returns {Promise}
*/
async insertAirdrop(proof, id) {
async insertAirdrop(proof) {
const hash = proof.hash();
// We can maybe ignore this.
@ -1326,6 +1321,9 @@ class Mempool extends EventEmitter {
if (!proof.isSane())
throw new VerifyError(proof, 'invalid', 'bad-airdrop-proof', 100);
if (this.nextState.hasAirstop)
throw new VerifyError(proof, 'invalid', 'bad-airdrop-disabled', 0);
if (this.chain.height + 1 >= this.network.goosigStop) {
const key = proof.getKey();
@ -1365,7 +1363,7 @@ class Mempool extends EventEmitter {
const entry = AirdropEntry.fromAirdrop(proof, this.chain.height);
this.trackAirdrop(entry, id);
this.trackAirdrop(entry);
// Trim size if we're too big.
if (this.limitSize(hash)) {
@ -1382,10 +1380,9 @@ class Mempool extends EventEmitter {
/**
* Track airdrop proof entry.
* @param {AirdropEntry} entry
* @param {Number} id
*/
trackAirdrop(entry, id) {
trackAirdrop(entry) {
assert(!this.airdrops.has(entry.hash));
assert(!this.airdropIndex.has(entry.position));
@ -1402,7 +1399,6 @@ class Mempool extends EventEmitter {
/**
* Untrack airdrop proof entry.
* @param {AirdropEntry} entry
* @param {Number} id
*/
untrackAirdrop(entry) {
@ -2557,7 +2553,7 @@ class Mempool extends EventEmitter {
* Map a transaction to the mempool.
* @private
* @param {MempoolEntry} entry
* @param {CoinView} view
* @param {CoinView} [view]
*/
trackEntry(entry, view) {

View file

@ -480,6 +480,16 @@ main.deployments = {
required: false,
force: false
},
airstop: {
name: 'airstop',
bit: 2,
startTime: 1751328000, // July 1st, 2025
timeout: 1759881600, // October 8th, 2025
threshold: -1,
window: -1,
required: false,
force: false
},
testdummy: {
name: 'testdummy',
bit: 28,
@ -501,6 +511,7 @@ main.deployments = {
main.deploys = [
main.deployments.hardening,
main.deployments.icannlockup,
main.deployments.airstop,
main.deployments.testdummy
];
@ -731,6 +742,16 @@ testnet.deployments = {
required: false,
force: false
},
airstop: {
name: 'airstop',
bit: 2,
startTime: 1751328000, // July 1st, 2025
timeout: 1759881600, // October 8th, 2025
threshold: -1,
window: -1,
required: false,
force: false
},
testdummy: {
name: 'testdummy',
bit: 28,
@ -746,6 +767,7 @@ testnet.deployments = {
testnet.deploys = [
testnet.deployments.hardening,
testnet.deployments.icannlockup,
testnet.deployments.airstop,
testnet.deployments.testdummy
];
@ -885,6 +907,16 @@ regtest.deployments = {
required: false,
force: false
},
airstop: {
name: 'airstop',
bit: 2,
startTime: 1751328000, // July 1st, 2025
timeout: 1759881600, // October 8th, 2025
threshold: -1,
window: -1,
required: false,
force: false
},
testdummy: {
name: 'testdummy',
bit: 28,
@ -900,6 +932,7 @@ regtest.deployments = {
regtest.deploys = [
regtest.deployments.hardening,
regtest.deployments.icannlockup,
regtest.deployments.airstop,
regtest.deployments.testdummy
];
@ -1043,6 +1076,16 @@ simnet.deployments = {
required: false,
force: false
},
airstop: {
name: 'airstop',
bit: 2,
startTime: 1751328000, // July 1st, 2025
timeout: 1759881600, // October 8th, 2025
threshold: -1,
window: -1,
required: false,
force: false
},
testdummy: {
name: 'testdummy',
bit: 28,
@ -1058,6 +1101,7 @@ simnet.deployments = {
simnet.deploys = [
simnet.deployments.hardening,
simnet.deployments.icannlockup,
simnet.deployments.airstop,
simnet.deployments.testdummy
];

477
test/chain-airstop-test.js Normal file
View file

@ -0,0 +1,477 @@
'use strict';
const fs = require('fs');
const { resolve } = require('path');
const assert = require('bsert');
const chainCommon = require('../lib/blockchain/common');
const Network = require('../lib/protocol/network');
const AirdropProof = require('../lib/primitives/airdropproof');
const NodeContext = require('./util/node-context');
const { thresholdStates } = chainCommon;
const network = Network.get('regtest');
const AIRDROP_PROOF_FILE = resolve(__dirname, 'data', 'airdrop-proof.base64');
const FAUCET_PROOF_FILE = resolve(__dirname, 'data', 'faucet-proof.base64');
const read = file => Buffer.from(fs.readFileSync(file, 'binary'), 'base64');
// Sent to:
// {
// pub: '02a8959cc6491aed3fb96b3b684400311f2779fb092b026a4b170b35c175d48cec',
// hash: '95cb6129c6b98179866094b2717bfbe27d9c1921',
// addr: 'hs1qjh9kz2wxhxqhnpnqjje8z7lmuf7ecxfp6kxlly'
// }
// Same as airdrop-test.js
const rawProof = read(AIRDROP_PROOF_FILE);
const rawFaucetProof = read(FAUCET_PROOF_FILE); // hs1qmjpjjgpz7dmg37paq9uksx4yjp675690dafg3q
const airdropProof = AirdropProof.decode(rawProof);
const faucetProof = AirdropProof.decode(rawFaucetProof);
const SOFT_FORK_NAME = 'airstop';
const networkDeployments = network.deployments;
const ACTUAL_START = networkDeployments[SOFT_FORK_NAME].startTime;
const ACTUAL_TIMEOUT = networkDeployments[SOFT_FORK_NAME].timeout;
describe('BIP-9 - Airstop (integration)', function () {
const checkBIP9Info = (info, expected) => {
expected = expected || {};
expected.startTime = expected.startTime || network.deployments[SOFT_FORK_NAME].startTime;
expected.timeout = expected.timeout || network.deployments[SOFT_FORK_NAME].timeout;
assert(info, 'BIP9 info should be returned');
assert.strictEqual(info.status, expected.status);
assert.strictEqual(info.bit, network.deployments[SOFT_FORK_NAME].bit);
assert.strictEqual(info.startTime, expected.startTime);
assert.strictEqual(info.timeout, expected.timeout);
};
const checkBIP9Statistics = (stats, expected) => {
expected = expected || {};
assert.strictEqual(stats.period, expected.period || network.minerWindow);
assert.strictEqual(stats.threshold, expected.threshold || network.activationThreshold);
assert.strictEqual(stats.elapsed, expected.elapsed);
assert.strictEqual(stats.count, expected.count);
assert.strictEqual(stats.possible, expected.possible);
};
describe('Success (integration)', function () {
const nodeCtx = new NodeContext();
before(async () => {
network.deployments[SOFT_FORK_NAME].startTime = 0;
network.deployments[SOFT_FORK_NAME].timeout = 0xffffffff;
await nodeCtx.open();
});
after(async () => {
network.deployments[SOFT_FORK_NAME].startTime = ACTUAL_START;
network.deployments[SOFT_FORK_NAME].timeout = ACTUAL_TIMEOUT;
await nodeCtx.close();
});
afterEach(() => {
nodeCtx.mempool.dropAirdrops();
});
it('should be able to add airdrop & faucet proofs to the mempool', async () => {
await nodeCtx.mempool.addAirdrop(airdropProof);
await nodeCtx.mempool.addAirdrop(faucetProof);
assert.strictEqual(nodeCtx.mempool.airdrops.size, 2);
});
it('should be able to mine airdrop & faucet proofs', async () => {
await tryClaimingAirdropProofs(nodeCtx, [airdropProof, faucetProof]);
});
it('should be in DEFINED state', async () => {
const state = await getForkDeploymentState(nodeCtx.chain);
const bip9info = await getBIP9Info(nodeCtx);
assert.strictEqual(state, thresholdStates.DEFINED);
checkBIP9Info(bip9info, { status: 'defined' });
});
it('should start the soft-fork', async () => {
await mineNBlocks(network.minerWindow - 2, nodeCtx);
// We are now at the threshold of the window.
{
const state = await getForkDeploymentState(nodeCtx.chain);
const bip9info = await getBIP9Info(nodeCtx);
assert.strictEqual(state, thresholdStates.DEFINED);
checkBIP9Info(bip9info, { status: 'defined' });
}
// go into new window and change the state to started.
await mineBlock(nodeCtx);
{
const state = await getForkDeploymentState(nodeCtx.chain);
const bip9info = await getBIP9Info(nodeCtx);
assert.strictEqual(state, thresholdStates.STARTED);
checkBIP9Info(bip9info, { status: 'started' });
checkBIP9Statistics(bip9info.statistics, {
elapsed: 0,
count: 0,
possible: true
});
}
});
it('should still be able to add airdrop & faucet proofs to the mempool', async () => {
await nodeCtx.mempool.addAirdrop(airdropProof);
await nodeCtx.mempool.addAirdrop(faucetProof);
assert.strictEqual(nodeCtx.mempool.airdrops.size, 2);
});
it('should still be able to mine airdrop & faucet proofs', async () => {
await tryClaimingAirdropProofs(nodeCtx, [airdropProof, faucetProof]);
});
it('should lock in the soft-fork', async () => {
// Reach the height just before the start of the next window
await mineNBlocks(network.minerWindow - 1, nodeCtx, { signalFork: true });
{
const state = await getForkDeploymentState(nodeCtx.chain);
const bip9info = await getBIP9Info(nodeCtx);
assert.strictEqual(state, thresholdStates.STARTED);
checkBIP9Info(bip9info, { status: 'started' });
checkBIP9Statistics(bip9info.statistics, {
elapsed: network.minerWindow - 1,
count: network.minerWindow - 1,
possible: true
});
}
// After this the deployment goes to LOCKED_IN state.
await mineBlock(nodeCtx, { signalFork: true });
{
const state = await getForkDeploymentState(nodeCtx.chain);
const bip9info = await getBIP9Info(nodeCtx);
assert.strictEqual(state, thresholdStates.LOCKED_IN);
checkBIP9Info(bip9info, { status: 'locked_in' });
assert(!bip9info.statistics);
}
});
it('should still be able to add airdrop & faucet proofs to the mempool', async () => {
await nodeCtx.mempool.addAirdrop(airdropProof);
await nodeCtx.mempool.addAirdrop(faucetProof);
assert.strictEqual(nodeCtx.mempool.airdrops.size, 2);
});
it('should still be able to mine airdrop & faucet proofs', async () => {
await tryClaimingAirdropProofs(nodeCtx, [airdropProof, faucetProof]);
});
it('should activate the soft-fork', async () => {
// Advance to ACTIVE state.
await mineNBlocks(network.minerWindow - 1, nodeCtx);
const blockToAdd = await nodeCtx.miner.mineBlock(nodeCtx.chain.tip);
await nodeCtx.mempool.addAirdrop(airdropProof);
await nodeCtx.mempool.addAirdrop(faucetProof);
assert.strictEqual(nodeCtx.mempool.airdrops.size, 2);
await nodeCtx.chain.add(blockToAdd);
// mempool must drop airdrops if next block no longer
// allows them.
assert.strictEqual(nodeCtx.mempool.airdrops.size, 0);
{
const state = await getForkDeploymentState(nodeCtx.chain);
const bip9info = await getBIP9Info(nodeCtx);
assert.strictEqual(state, thresholdStates.ACTIVE);
checkBIP9Info(bip9info, { status: 'active' });
assert(!bip9info.statistics);
}
});
it('should not be able to add airdrops to the mempool', async () => {
let err;
try {
await nodeCtx.mempool.addAirdrop(airdropProof);
} catch (e) {
err = e;
}
assert(err);
assert.strictEqual(err.code, 'invalid');
assert.strictEqual(err.reason, 'bad-airdrop-disabled');
assert.strictEqual(err.score, 0);
err = null;
try {
await nodeCtx.mempool.addAirdrop(faucetProof);
} catch (e) {
err = e;
}
assert(err);
assert.strictEqual(err.code, 'invalid');
assert.strictEqual(err.reason, 'bad-airdrop-disabled');
assert.strictEqual(err.score, 0);
});
it('should not be able to mine airdrop & faucet proofs anymore', async () => {
let err;
try {
await tryClaimingAirdropProofs(nodeCtx, [airdropProof]);
} catch (e) {
err = e;
}
assert(err);
assert.strictEqual(err.code, 'invalid');
assert.strictEqual(err.reason, 'bad-airdrop-disabled');
assert.strictEqual(err.score, 100);
nodeCtx.mempool.dropAirdrops();
err = null;
try {
await tryClaimingAirdropProofs(nodeCtx, [faucetProof]);
} catch (e) {
err = e;
}
assert(err);
assert.strictEqual(err.code, 'invalid');
assert.strictEqual(err.reason, 'bad-airdrop-disabled');
assert.strictEqual(err.score, 100);
});
});
describe('Failure (integration)', function () {
const nodeCtx = new NodeContext();
before(async () => {
network.deployments[SOFT_FORK_NAME].startTime = 0;
network.deployments[SOFT_FORK_NAME].timeout = 0xffffffff;
await nodeCtx.open();
});
after(async () => {
network.deployments[SOFT_FORK_NAME].startTime = ACTUAL_START;
network.deployments[SOFT_FORK_NAME].timeout = ACTUAL_TIMEOUT;
await nodeCtx.close();
});
afterEach(() => {
nodeCtx.mempool.dropAirdrops();
});
it('should be able to add airdrop & faucet proofs to the mempool', async () => {
await nodeCtx.mempool.addAirdrop(airdropProof);
await nodeCtx.mempool.addAirdrop(faucetProof);
assert.strictEqual(nodeCtx.mempool.airdrops.size, 2);
});
it('should be able to mine airdrop & faucet proofs', async () => {
await tryClaimingAirdropProofs(nodeCtx, [airdropProof, faucetProof]);
});
it('should be in DEFINED state', async () => {
const state = await getForkDeploymentState(nodeCtx.chain);
const bip9info = await getBIP9Info(nodeCtx);
assert.strictEqual(state, chainCommon.thresholdStates.DEFINED);
checkBIP9Info(bip9info, { status: 'defined' });
});
it('should start the soft-fork', async () => {
await mineNBlocks(network.minerWindow - 2, nodeCtx);
// We are now at the threshold of the window.
{
const state = await getForkDeploymentState(nodeCtx.chain);
const bip9info = await getBIP9Info(nodeCtx);
assert.strictEqual(state, thresholdStates.DEFINED);
checkBIP9Info(bip9info, { status: 'defined' });
}
// go into new window and change the state to started.
await mineBlock(nodeCtx);
{
const state = await getForkDeploymentState(nodeCtx.chain);
const bip9info = await getBIP9Info(nodeCtx);
assert.strictEqual(state, thresholdStates.STARTED);
checkBIP9Info(bip9info, { status: 'started' });
checkBIP9Statistics(bip9info.statistics, {
elapsed: 0,
count: 0,
possible: true
});
}
});
it('should still be able to mine airdrop & faucet proofs', async () => {
await tryClaimingAirdropProofs(nodeCtx, [airdropProof, faucetProof]);
});
it('should fail to lock in the soft-fork', async () => {
// Reach the height just before the start of the next window
await mineNBlocks(network.minerWindow - 1, nodeCtx, { signalFork: false });
{
const state = await getForkDeploymentState(nodeCtx.chain);
const bip9info = await getBIP9Info(nodeCtx);
assert.strictEqual(state, thresholdStates.STARTED);
checkBIP9Info(bip9info, { status: 'started' });
checkBIP9Statistics(bip9info.statistics, {
elapsed: network.minerWindow - 1,
count: 0,
possible: false
});
}
// After this the deployment stays in STARTED state.
await mineBlock(nodeCtx, { signalFork: false });
{
const state = await getForkDeploymentState(nodeCtx.chain);
const bip9info = await getBIP9Info(nodeCtx);
assert.strictEqual(state, thresholdStates.STARTED);
checkBIP9Info(bip9info, { status: 'started' });
checkBIP9Statistics(bip9info.statistics, {
elapsed: 0,
count: 0,
possible: true
});
}
});
it('should still be able to add airdrop & faucet proofs to the mempool', async () => {
await nodeCtx.mempool.addAirdrop(airdropProof);
await nodeCtx.mempool.addAirdrop(faucetProof);
assert.strictEqual(nodeCtx.mempool.airdrops.size, 2);
});
it('should still be able to mine airdrop & faucet proofs', async () => {
await tryClaimingAirdropProofs(nodeCtx, [airdropProof, faucetProof]);
});
});
});
/**
* Attempts to mine and add a block with all provided proofs
* and then revert the chain to the previous state.
*
* Throws errors if chain fails to add the block.
*
* @param {NodeContext} nodeCtx
* @param {AirdropProof[]} proofs
* @returns {Promise}
*/
async function tryClaimingAirdropProofs(nodeCtx, proofs) {
assert.ok(Array.isArray(proofs) && proofs.length > 0);
// We don't want mempool to safeguard miner.
const bakAirstop = nodeCtx.mempool.nextState.hasAirstop;
nodeCtx.mempool.nextState.hasAirstop = false;
for (const proof of proofs)
await nodeCtx.mempool.addAirdrop(proof);
nodeCtx.mempool.nextState.hasAirstop = bakAirstop;
assert.strictEqual(nodeCtx.mempool.airdrops.size, proofs.length);
const [block] = await nodeCtx.mineBlocks(1);
assert(block.txs[0].inputs.length === 3);
assert(block.txs[0].outputs.length === 3);
assert.strictEqual(nodeCtx.mempool.airdrops.size, 0);
// NOTE: reset WONT re-add proofs to the mempool.
await nodeCtx.chain.reset(nodeCtx.height - 1);
}
/**
* Mine N new blocks
* @param {number} n number of blocks to mine
* @param {NodeContext} node
* @param {Chain} node.chain
* @param {Miner} node.miner
* @param {object} opts
* @param {boolean} opts.signalFork whether to signal the fork
*/
async function mineNBlocks(n, node, opts = {}) {
for (let i = 0; i < n; i++)
await mineBlock(node, opts);
}
/**
* Mine a new block
* @param {NodeContext} node
* @param {object} opts
* @param {boolean} opts.signalFork whether to signal the fork
*/
async function mineBlock(node, opts = {}) {
assert(node);
const chain = node.chain;
const miner = node.miner;
const signalFork = opts.signalFork || false;
const job = await miner.cpu.createJob(chain.tip);
// opt out of all
job.attempt.version = 0;
if (signalFork)
job.attempt.version |= (1 << network.deployments[SOFT_FORK_NAME].bit);
job.refresh();
const block = await job.mineAsync();
await chain.add(block);
return block;
}
/**
* Get deployment state (number)
* @param {Chain} chain
* @returns {Promise<number>}
*/
async function getForkDeploymentState(chain) {
const prev = chain.tip;
const state = await chain.getState(prev, network.deployments[SOFT_FORK_NAME]);
return state;
}
/**
* @param {NodeContext} nodeCtx
*/
async function getBIP9Info(nodeCtx) {
const info = await nodeCtx.nrpc('getblockchaininfo');
return info.softforks[SOFT_FORK_NAME];
}

View file

@ -849,7 +849,7 @@ describe('BIP9 - ICANN lockup (integration)', function() {
});
}
// After this it should go to the ACTIVE state.
// After this it should go to the LOCKED_IN state.
await mineBlock(node);
{