Merge PR #925 from 'nodech/wallet-migration-progress-data'

This commit is contained in:
Nodari Chkuaselidze 2025-04-16 12:30:21 +04:00
commit 5f11d622b1
No known key found for this signature in database
GPG key ID: B018A7BB437D1F05
12 changed files with 665 additions and 187 deletions

View file

@ -2,6 +2,9 @@
## Unreleased
**When upgrading to this version of hsd, you must pass `--chain-migrate=4`
and `--wallet-migrate=6` when you run it for the first time.**
### Wallet Changes
#### Wallet HTTP API

View file

@ -18,15 +18,17 @@ const CoinView = require('../coins/coinview');
const UndoCoins = require('../coins/undocoins');
const layout = require('./layout');
const AbstractMigration = require('../migrations/migration');
const migrator = require('../migrations/migrator');
const {
Migrator,
oldLayout,
types
} = require('../migrations/migrator');
} = migrator;
/** @typedef {import('../types').Hash} Hash */
/** @typedef {ReturnType<bdb.DB['batch']>} Batch */
/** @typedef {import('../migrations/migrator').types} MigrationType */
/** @typedef {migrator.types} MigrationType */
/** @typedef {migrator.MigrationContext} MigrationContext */
/**
* Switch to new migrations layout.
@ -59,14 +61,14 @@ class MigrateMigrations extends AbstractMigration {
/**
* Actual migration
* @param {Batch} b
* @param {MigrationContext} ctx
* @returns {Promise}
*/
async migrate(b) {
async migrate(b, ctx) {
this.logger.info('Migrating migrations..');
const oldLayout = this.layout.oldLayout;
const newLayout = this.layout.newLayout;
let nextMigration = 1;
const skipped = [];
@ -90,33 +92,9 @@ class MigrateMigrations extends AbstractMigration {
this.db.writeVersion(b, 2);
const rawState = this.encodeMigrationState(nextMigration, skipped);
b.put(newLayout.M.encode(), rawState);
}
/**
* @param {Number} nextMigration
* @param {Number[]} skipped
* @returns {Buffer}
*/
encodeMigrationState(nextMigration, skipped) {
let size = 4;
size += encoding.sizeVarint(nextMigration);
size += encoding.sizeVarint(skipped.length);
for (const id of skipped)
size += encoding.sizeVarint(id);
const bw = bio.write(size);
bw.writeU32(0);
bw.writeVarint(nextMigration);
bw.writeVarint(skipped.length);
for (const id of skipped)
bw.writeVarint(id);
return bw.render();
ctx.state.version = 0;
ctx.state.skipped = skipped;
ctx.state.nextMigration = nextMigration;
}
static info() {
@ -555,6 +533,45 @@ class MigrateTreeState extends AbstractMigration {
}
}
class MigrateMigrationStateV1 extends AbstractMigration {
/**
* Create migration migration state
* @constructor
* @param {ChainMigratorOptions} options
*/
constructor(options) {
super(options);
this.options = options;
this.logger = options.logger.context('chain-migration-migration-state-v1');
this.db = options.db;
this.ldb = options.ldb;
this.network = options.network;
}
async check() {
return types.MIGRATE;
}
/**
* @param {Batch} b
* @param {MigrationContext} ctx
* @returns {Promise}
*/
async migrate(b, ctx) {
ctx.state.version = 1;
}
static info() {
return {
name: 'Migrate Migration State',
description: 'Migrate migration state to v1'
};
}
}
/**
* Chain Migrator
* @alias module:blockchain.ChainMigrator
@ -572,8 +589,6 @@ class ChainMigrator extends Migrator {
this.logger = this.options.logger.context('chain-migrations');
this.flagError = 'Restart with `hsd --chain-migrate='
+ this.lastMigration + '`';
this._migrationsToRun = null;
}
/**
@ -695,7 +710,8 @@ ChainMigrator.migrations = {
0: MigrateMigrations,
1: MigrateChainState,
2: MigrateBlockStore,
3: MigrateTreeState
3: MigrateTreeState,
4: MigrateMigrationStateV1
};
// Expose migrations
@ -703,5 +719,6 @@ ChainMigrator.MigrateChainState = MigrateChainState;
ChainMigrator.MigrateMigrations = MigrateMigrations;
ChainMigrator.MigrateBlockStore = MigrateBlockStore;
ChainMigrator.MigrateTreeState = MigrateTreeState;
ChainMigrator.MigrateMigrationStateV1 = MigrateMigrationStateV1;
module.exports = ChainMigrator;

View file

@ -8,6 +8,7 @@
/** @typedef {import('bdb').DB} DB */
/** @typedef {ReturnType<DB['batch']>} Batch */
/** @typedef {import('./migrator').types} MigrationType */
/** @typedef {import('./migrator').MigrationContext} MigrationContext */
/**
* Abstract class for single migration.
@ -37,10 +38,11 @@ class AbstractMigration {
/**
* Run the actual migration
* @param {Batch} b
* @param {MigrationContext} ctx
* @returns {Promise}
*/
async migrate(b) {
async migrate(b, ctx) {
throw new Error('Abstract method.');
}

View file

@ -1,6 +1,7 @@
/**
* migrations/migrator.js - abstract migrator for hsd.
* Copyright (c) 2021, Nodari Chkuaselidze (MIT License)
* https://github.com/handshake-org/hsd
*/
'use strict';
@ -11,6 +12,10 @@ const bdb = require('bdb');
const MigrationState = require('../migrations/state');
/** @typedef {ReturnType<bdb.DB['batch']>} Batch */
/** @typedef {import('../blockchain/chaindb')} ChainDB */
/** @typedef {import('../wallet/walletdb')} WalletDB */
const EMPTY = Buffer.alloc(0);
/**
* This entry needs to be part of all dbs that support migrations.
@ -43,36 +48,6 @@ const types = {
FAKE_MIGRATE: 2
};
/**
* Store migration results.
* @alias module:migrations.MigrationResult
*/
class MigrationResult {
constructor() {
/** @type {Set<Number>} */
this.migrated = new Set();
/** @type {Set<Number>} */
this.skipped = new Set();
}
/**
* @param {Number} id
*/
skip(id) {
this.skipped.add(id);
}
/**
* @param {Number} id
*/
migrate(id) {
this.migrated.add(id);
}
}
/**
* class for migrations.
* @alias module:migrations.Migrator
@ -93,7 +68,9 @@ class Migrator {
this.migrateFlag = -1;
this.layout = migrationLayout;
/** @type {ChainDB|WalletDB|null} */
this.db = null;
/** @type {bdb.DB?} */
this.ldb = null;
this.dbVersion = 0;
@ -152,46 +129,23 @@ class Migrator {
/**
* Do the actual migrations
* @returns {Promise}
* @returns {Promise<MigrationResult>}
*/
async migrate() {
const version = await this.ldb.get(this.layout.V.encode());
const lastID = this.getLastMigrationID();
if (version === null) {
if (this.migrateFlag !== -1) {
if (this.migrateFlag !== lastID) {
throw new Error(
`Migrate flag ${this.migrateFlag} does not match last ID: ${lastID}`
);
}
this.logger.warning('Fresh start, ignoring migration flag.');
}
const state = new MigrationState();
state.nextMigration = this.getLastMigrationID() + 1;
this.logger.info('Fresh start, saving last migration id: %d',
state.lastMigration);
await this.saveState(state);
return this.pending;
}
if (version === null)
return this.initialize();
await this.ensure();
await this.verifyDB();
await this.checkMigrations();
this.checkMigrateFlag();
let state = await this.getState();
if (this.migrateFlag !== -1 && this.migrateFlag !== lastID) {
throw new Error(
`Migrate flag ${this.migrateFlag} does not match last ID: ${lastID}`
);
}
this.logger.debug('Last migration %d, last available migration: %d',
state.lastMigration, lastID);
this.logger.info('There are %d migrations.', lastID - state.lastMigration);
@ -240,12 +194,15 @@ class Migrator {
this.logger.info('Migration %d in progress...', id);
const batch = this.ldb.batch();
// queue state updates first, so migration can modify the state.
const context = this.createContext(state);
await currentMigration.migrate(batch, context);
// allow migrations to increment next migration
state.nextMigration = Math.max(state.nextMigration, id + 1);
state.inProgress = false;
state.nextMigration = id + 1;
state.inProgressData = EMPTY;
this.writeState(batch, state);
await currentMigration.migrate(batch, this.pending);
await batch.write();
this.pending.migrate(id);
this.logger.info('Migration %d is done.', id);
@ -294,6 +251,45 @@ class Migrator {
this.logger.info(error);
}
/**
* Check migration flags.
* @throws {Error}
*/
checkMigrateFlag() {
if (this.migrateFlag === -1)
return;
const lastID = this.getLastMigrationID();
if (this.migrateFlag !== lastID) {
throw new Error(
`Migrate flag ${this.migrateFlag} does not match last ID: ${lastID}`);
}
}
/**
* Init fresh db.
* @returns {Promise<MigrationResult>}
*/
async initialize() {
this.checkMigrateFlag();
if (this.migrateFlag !== -1)
this.logger.warning('Fresh start, ignoring migration flag.');
const state = new MigrationState();
state.nextMigration = this.getLastMigrationID() + 1;
this.logger.info('Fresh start, saving last migration id: %d',
state.lastMigration);
await this.saveState(state);
return this.pending;
}
/**
* Do any necessary database checks
* @returns {Promise}
@ -376,9 +372,72 @@ class Migrator {
assert(data, 'State was corrupted.');
return MigrationState.decode(data);
}
/**
* Create context
* @param {MigrationState} state
* @returns {MigrationContext}
*/
createContext(state) {
return new MigrationContext(this, state, this.pending);
}
}
/**
* Store migration results.
* @alias module:migrations.MigrationResult
*/
class MigrationResult {
constructor() {
/** @type {Set<Number>} */
this.migrated = new Set();
/** @type {Set<Number>} */
this.skipped = new Set();
}
/**
* @param {Number} id
*/
skip(id) {
this.skipped.add(id);
}
/**
* @param {Number} id
*/
migrate(id) {
this.migrated.add(id);
}
}
/**
* Migration Context.
*/
class MigrationContext {
/**
* @param {Migrator} migrator
* @param {MigrationState} state
* @param {MigrationResult} pending
*/
constructor(migrator, state, pending) {
this.migrator = migrator;
this.state = state;
this.pending = pending;
}
async saveState() {
await this.migrator.saveState(this.state);
}
}
exports.Migrator = Migrator;
exports.MigrationResult = MigrationResult;
exports.MigrationContext = MigrationContext;
exports.types = types;
exports.oldLayout = oldLayout;

View file

@ -10,6 +10,8 @@ const {encoding} = bio;
/** @typedef {import('../types').BufioWriter} BufioWriter */
const EMPTY = Buffer.alloc(0);
/**
* State of database migrations.
* Because migration IDs are only increasing, we only need
@ -32,10 +34,13 @@ class MigrationState extends bio.Struct {
constructor() {
super();
this.version = 1;
this.inProgress = false;
this.nextMigration = 0;
/** @type {Number[]} */
this.skipped = [];
this.inProgressData = EMPTY;
}
get lastMigration() {
@ -53,6 +58,7 @@ class MigrationState extends bio.Struct {
this.inProgress = obj.inProgress;
this.nextMigration = obj.nextMigration;
this.skipped = obj.skipped.slice();
this.inProgressData = obj.inProgressData.slice();
return this;
}
@ -62,14 +68,17 @@ class MigrationState extends bio.Struct {
*/
getSize() {
// flags + last migration number
let size = 4; // flags
let size = 2; // flags
size += 2; // version
size += encoding.sizeVarint(this.nextMigration);
size += encoding.sizeVarint(this.skipped.length);
for (const id of this.skipped)
size += encoding.sizeVarint(id);
if (this.version > 0)
size += encoding.sizeVarBytes(this.inProgressData);
return size;
}
@ -85,13 +94,17 @@ class MigrationState extends bio.Struct {
if (this.inProgress)
flags |= 1 << 0;
bw.writeU32(flags);
bw.writeU16(flags);
bw.writeU16(this.version);
bw.writeVarint(this.nextMigration);
bw.writeVarint(this.skipped.length);
for (const id of this.skipped)
bw.writeVarint(id);
if (this.version > 0)
bw.writeVarBytes(this.inProgressData);
return bw;
}
@ -102,9 +115,10 @@ class MigrationState extends bio.Struct {
*/
read(br) {
const flags = br.readU32();
const flags = br.readU16();
this.inProgress = (flags & 1) !== 0;
this.version = br.readU16();
this.nextMigration = br.readVarint();
this.skipped = [];
@ -113,6 +127,9 @@ class MigrationState extends bio.Struct {
for (let i = 0; i < skippedItems; i++)
this.skipped.push(br.readVarint());
if (this.version > 0)
this.inProgressData = br.readVarBytes();
return this;
}
}

View file

@ -26,16 +26,19 @@ const WalletKey = require('./walletkey');
const Path = require('./path');
const MapRecord = require('./records').MapRecord;
const AbstractMigration = require('../migrations/migration');
const migrator = require('../migrations/migrator');
const {
MigrationResult,
MigrationContext,
Migrator,
types,
oldLayout
} = require('../migrations/migrator');
} = migrator;
const layouts = require('./layout');
const wlayout = layouts.wdb;
/** @typedef {import('../migrations/migrator').types} MigrationType */
/** @typedef {migrator.types} MigrationType */
/** @typedef {import('../migrations/state')} MigrationState */
/** @typedef {ReturnType<bdb.DB['batch']>} Batch */
/** @typedef {ReturnType<bdb.DB['bucket']>} Bucket */
/** @typedef {import('./walletdb')} WalletDB */
@ -57,7 +60,7 @@ class MigrateMigrations extends AbstractMigration {
/** @type {WalletMigratorOptions} */
this.options = options;
this.logger = options.logger.context('wallet-migrations-migrate');
this.logger = options.logger.context('wallet-migration-migrate');
this.db = options.db;
this.ldb = options.ldb;
this.layout = MigrateMigrations.layout();
@ -74,10 +77,11 @@ class MigrateMigrations extends AbstractMigration {
/**
* Actual migration
* @param {Batch} b
* @param {WalletMigrationContext} ctx
* @returns {Promise}
*/
async migrate(b) {
async migrate(b, ctx) {
this.logger.info('Migrating migrations..');
let nextMigration = 1;
@ -87,24 +91,9 @@ class MigrateMigrations extends AbstractMigration {
}
this.db.writeVersion(b, 1);
b.put(
this.layout.newLayout.wdb.M.encode(),
this.encodeMigrationState(nextMigration)
);
}
/**
* @param {Number} nextMigration
* @returns {Buffer}
*/
encodeMigrationState(nextMigration) {
const size = 4 + 1 + 1;
const encoded = Buffer.alloc(size);
encoding.writeVarint(encoded, nextMigration, 4);
return encoded;
ctx.state.version = 0;
ctx.state.nextMigration = nextMigration;
}
static info() {
@ -147,7 +136,7 @@ class MigrateChangeAddress extends AbstractMigration {
/** @type {WalletMigratorOptions} */
this.options = options;
this.logger = options.logger.context('change-address-migration');
this.logger = options.logger.context('wallet-migration-change-address');
this.db = options.db;
this.ldb = options.ldb;
this.layout = MigrateChangeAddress.layout();
@ -166,11 +155,11 @@ class MigrateChangeAddress extends AbstractMigration {
/**
* Actual migration
* @param {Batch} b
* @param {WalletMigrationResult} [pending]
* @param {WalletMigrationContext} ctx
* @returns {Promise}
*/
async migrate(b, pending) {
async migrate(b, ctx) {
const wlayout = this.layout.wdb;
const wids = await this.ldb.keys({
gte: wlayout.W.min(),
@ -185,7 +174,7 @@ class MigrateChangeAddress extends AbstractMigration {
}
if (total > 0)
pending.rescan = true;
ctx.pending.rescan = true;
}
/**
@ -392,7 +381,7 @@ class MigrateAccountLookahead extends AbstractMigration {
/** @type {WalletMigratorOptions} */
this.options = options;
this.logger = options.logger.context('account-lookahead-migration');
this.logger = options.logger.context('wallet-migration-account-lookahead');
this.db = options.db;
this.ldb = options.ldb;
this.layout = MigrateAccountLookahead.layout();
@ -503,7 +492,7 @@ class MigrateTXDBBalances extends AbstractMigration {
/** @type {WalletMigratorOptions} */
this.options = options;
this.logger = options.logger.context('txdb-balance-migration');
this.logger = options.logger.context('wallet-migration-txdb-balance');
this.db = options.db;
this.ldb = options.ldb;
}
@ -520,12 +509,12 @@ class MigrateTXDBBalances extends AbstractMigration {
/**
* Actual migration
* @param {Batch} b
* @param {WalletMigrationResult} [pending]
* @param {WalletMigrationContext} [ctx]
* @returns {Promise}
*/
async migrate(b, pending) {
pending.recalculateTXDB = true;
async migrate(b, ctx) {
ctx.pending.recalculateTXDB = true;
}
static info() {
@ -563,7 +552,7 @@ class MigrateBidRevealEntries extends AbstractMigration {
/** @type {WalletMigratorOptions} */
this.options = options;
this.logger = options.logger.context('bid-reveal-entries-migration');
this.logger = options.logger.context('wallet-migration-bid-reveal-entries');
this.db = options.db;
this.ldb = options.ldb;
this.layout = MigrateBidRevealEntries.layout();
@ -799,7 +788,8 @@ class MigrateTXCountTimeIndex extends AbstractMigration {
/** @type {WalletMigratorOptions} */
this.options = options;
this.logger = options.logger.context('tx-count-time-index-migration');
this.logger = options.logger.context(
'wallet-migration-tx-count-time-index');
this.db = options.db;
this.ldb = options.ldb;
this.layout = MigrateTXCountTimeIndex.layout();
@ -1317,6 +1307,51 @@ class MigrateTXCountTimeIndex extends AbstractMigration {
}
}
class MigrateMigrationStateV1 extends AbstractMigration {
/**
* Create Migration State migration object.
* @param {WalletMigratorOptions} options
* @constructor
*/
constructor(options) {
super(options);
/** @type {WalletMigratorOptions} */
this.options = options;
this.logger = options.logger.context('wallet-migration-migration-state-v1');
this.db = options.db;
this.ldb = options.ldb;
}
/**
* We always migrate.
* @returns {Promise<MigrationType>}
*/
async check() {
return types.MIGRATE;
}
/**
* Migrate Migration State.
* @param {Batch} b
* @param {WalletMigrationContext} ctx
* @returns {Promise}
*/
async migrate(b, ctx) {
ctx.state.version = 1;
}
static info() {
return {
name: 'Migrate Migration State',
description: 'Migrate migration state to v1'
};
}
}
/**
* Wallet migration results.
* @alias module:blockchain.WalletMigrationResult
@ -1331,6 +1366,22 @@ class WalletMigrationResult extends MigrationResult {
}
}
/**
* @alias module:blockchain.WalletMigrationContext
*/
class WalletMigrationContext extends MigrationContext {
/**
* @param {WalletMigrator} migrator
* @param {MigrationState} state
* @param {WalletMigrationResult} pending
*/
constructor(migrator, state, pending) {
super(migrator, state, pending);
}
}
/**
* Wallet Migrator
* @alias module:blockchain.WalletMigrator
@ -1375,6 +1426,15 @@ class WalletMigrator extends Migrator {
return ids;
}
/**
* @param {MigrationState} state
* @returns {WalletMigrationContext}
*/
createContext(state) {
return new WalletMigrationContext(this, state, this.pending);
}
}
/**
@ -1475,7 +1535,8 @@ WalletMigrator.migrations = {
2: MigrateAccountLookahead,
3: MigrateTXDBBalances,
4: MigrateBidRevealEntries,
5: MigrateTXCountTimeIndex
5: MigrateTXCountTimeIndex,
6: MigrateMigrationStateV1
};
// Expose migrations
@ -1485,5 +1546,6 @@ WalletMigrator.MigrateAccountLookahead = MigrateAccountLookahead;
WalletMigrator.MigrateTXDBBalances = MigrateTXDBBalances;
WalletMigrator.MigrateBidRevealEntries = MigrateBidRevealEntries;
WalletMigrator.MigrateTXCountTimeIndex = MigrateTXCountTimeIndex;
WalletMigrator.MigrateMigrationStateV1 = MigrateMigrationStateV1;
module.exports = WalletMigrator;

View file

@ -18,13 +18,15 @@ const {
types,
oldLayout
} = require('../lib/migrations/migrator');
const migutils = require('./util/migrations');
const {
migrationError,
writeVersion,
getVersion,
fillEntries,
checkEntries
} = require('./util/migrations');
} = migutils;
const common = require('./util/common');
const {rimraf, testdir} = common;
@ -629,7 +631,6 @@ describe('Chain Migrations', function() {
for (const tcase of data.cases) {
it(`should migrate ${tcase.description}`, async () => {
const before = tcase.before;
const after = tcase.after;
const version = tcase.dbVersion;
const mustMigrate1 = tcase.migrate1;
assert(typeof version === 'number');
@ -681,7 +682,12 @@ describe('Chain Migrations', function() {
if (mustMigrate1)
assert(migrated, 'Migration 1 did not run.');
await checkEntries(ldb, after);
await checkEntries(ldb, {
before: data.before,
after: data.after,
throw: true
});
});
}
});
@ -749,7 +755,12 @@ describe('Chain Migrations', function() {
;
}
await checkEntries(ldb, data.after);
await checkEntries(ldb, {
before: data.before,
after: data.after,
throw: true
});
await chain.close();
});
});
@ -1298,7 +1309,69 @@ describe('Chain Migrations', function() {
;
}
await checkEntries(ldb, data.after);
await checkEntries(ldb, {
before: data.before,
after: data.after,
throw: true
});
await chain.close();
});
});
describe('Migrate Migration state v1 (data)', function() {
const location = testdir('migrate-migration-state-v1');
const data = require('./data/migrations/chain-4-migrationstate-v1.json');
const migrationsBAK = ChainMigrator.migrations;
const Migration = ChainMigrator.MigrateMigrationStateV1;
const store = BlockStore.create({
memory: true,
network
});
const chainOptions = {
prefix: location,
memory: false,
blocks: store,
logger: Logger.global,
network
};
let chain, ldb;
before(async () => {
ChainMigrator.migrations = {};
chain = new Chain(chainOptions);
ldb = chain.db.db;
await fs.mkdirp(location);
await store.open();
await chain.open();
await fillEntries(ldb, data.before);
await chain.close();
await store.close();
});
after(async () => {
ChainMigrator.migrations = migrationsBAK;
await rimraf(location);
});
it('should migrate', async () => {
ChainMigrator.migrations = {
0: Migration
};
chain.options.chainMigrate = 0;
await chain.open();
await checkEntries(ldb, {
before: data.before,
after: data.after,
logErrors: true,
throw: true
});
await chain.close();
});
});

View file

@ -0,0 +1,9 @@
{
"description": "Migration state update",
"before": {
"4d": "000000000000"
},
"after": {
"4d": "00000100010000"
}
}

View file

@ -0,0 +1,9 @@
{
"description": "Migration state update",
"before": {
"4d": "000000000000"
},
"after": {
"4d": "00000100010000"
}
}

View file

@ -12,7 +12,8 @@ const {
const {
DB_FLAG_ERROR,
MockChainDB,
migrationError
migrationError,
mockLayout
} = require('./util/migrations');
const {rimraf, testdir} = require('./util/common');
@ -164,11 +165,15 @@ describe('Migrations', function() {
});
const error = migrationError(migrations, [1], DB_FLAG_ERROR);
await assert.rejects(async () => {
let err;
try {
await db.open();
}, {
message: error
});
} catch (e) {
err = e;
}
assert(err);
assert.strictEqual(err.message, error);
});
});
@ -198,19 +203,17 @@ describe('Migrations', function() {
it('should initialize fresh migration state', async () => {
await rimraf(location);
const db = new MockChainDB(defaultOptions);
const {migrations} = db;
migrations.logger = {
info: () => {},
debug: () => {},
warning: (msg) => {
throw new Error(`Unexpected warning: ${msg}`);
}
};
const db = new MockChainDB({
...defaultOptions,
logger: getLogger({
warning: (msg) => {
throw new Error(`Unexpected warning: ${msg}`);
}
})
});
await db.open();
const state = await migrations.getState();
const state = await getMigrationState(db);
assert.strictEqual(state.inProgress, false);
assert.strictEqual(state.nextMigration, 0);
await db.close();
@ -219,26 +222,21 @@ describe('Migrations', function() {
it('should ignore migration flag on non-existent db', async () => {
await rimraf(location);
let warning = null;
const db = new MockChainDB({
...defaultOptions,
migrations: { 0: MockMigration1 },
migrateFlag: 0
migrateFlag: 0,
logger: getLogger({
warning: (msg) => {
warning = msg;
}
})
});
const {migrations} = db;
let warning = null;
migrations.logger = {
info: () => {},
debug: () => {},
warning: (msg) => {
warning = msg;
}
};
await db.open();
assert.strictEqual(warning, 'Fresh start, ignoring migration flag.');
const state = await migrations.getState();
const state = await getMigrationState(db);
assert.strictEqual(state.inProgress, false);
assert.strictEqual(state.nextMigration, 1);
await db.close();
@ -339,11 +337,8 @@ describe('Migrations', function() {
await db.open();
const lastID = db.migrations.getLastMigrationID();
const state = await db.migrations.getState();
const state = await getMigrationState(db);
assert.strictEqual(state.nextMigration, 2);
assert.strictEqual(lastID, 1);
assert.strictEqual(migrated1, true);
assert.strictEqual(migrated2, true);
await db.close();
@ -395,7 +390,7 @@ describe('Migrations', function() {
// check the state is correct.
await db.db.open();
const state = await db.migrations.getState();
const state = await getMigrationState(db);
assert.strictEqual(state.inProgress, true);
assert.strictEqual(state.nextMigration, 0);
@ -414,7 +409,7 @@ describe('Migrations', function() {
// check the state is correct.
await db.db.open();
const state = await db.migrations.getState();
const state = await getMigrationState(db);
assert.strictEqual(state.inProgress, true);
assert.strictEqual(state.nextMigration, 1);
@ -422,7 +417,7 @@ describe('Migrations', function() {
}
await db.open();
const state = await db.migrations.getState();
const state = await getMigrationState(db);
assert.strictEqual(state.inProgress, false);
assert.strictEqual(state.nextMigration, 2);
assert.strictEqual(migrated1, true);
@ -450,7 +445,7 @@ describe('Migrations', function() {
await db.open();
const state = await db.migrations.getState();
const state = await getMigrationState(db);
assert.strictEqual(state.inProgress, false);
assert.strictEqual(state.nextMigration, 1);
assert.strictEqual(migrate, false);
@ -486,7 +481,7 @@ describe('Migrations', function() {
await db.open();
const state = await db.migrations.getState();
const state = await getMigrationState(db);
assert.strictEqual(state.inProgress, false);
assert.strictEqual(state.nextMigration, 1);
assert.deepStrictEqual(state.skipped, [0]);
@ -497,9 +492,9 @@ describe('Migrations', function() {
await db.close();
db.migrations.migrateFlag = -1;
db.options.migrateFlag = -1;
await db.open();
const state2 = await db.migrations.getState();
const state2 = await getMigrationState(db);
assert.strictEqual(state2.inProgress, false);
assert.strictEqual(state2.nextMigration, 1);
assert.deepStrictEqual(state2.skipped, [0]);
@ -529,6 +524,141 @@ describe('Migrations', function() {
message: 'Unknown migration type.'
});
});
it('should allow nextMigration modification, skip next', async () => {
let migrated1 = false;
let migrated2 = false;
const db = new MockChainDB({
...defaultOptions,
migrateFlag: 1,
migrations: {
0: class extends AbstractMigration {
async check() {
return types.MIGRATE;
}
async migrate(b, ctx) {
migrated1 = true;
ctx.state.nextMigration = 2;
}
},
1: class extends AbstractMigration {
async check() {
return types.MIGRATE;
}
async migrate() {
migrated2 = true;
}
}
}
});
await db.open();
assert.strictEqual(migrated1, true);
assert.strictEqual(migrated2, false);
await db.close();
});
it('should store inProgressData in version 1 and clean up', async () => {
let thrown1 = false;
let migrated1 = false;
let data1 = null;
let thrown3 = false;
let migrated3 = false;
let data3 = null;
let migrated4 = false;
let data4 = null;
const db = new MockChainDB({
...defaultOptions,
migrateFlag: 4,
migrations: {
0: class extends AbstractMigration {
async check() {
return types.MIGRATE;
}
async migrate(b, ctx) {
ctx.state.version = 0;
}
},
1: class extends AbstractMigration {
async check() {
return types.MIGRATE;
}
async migrate(b, ctx) {
if (!thrown1) {
// This wont be saved, state.version = 0.
ctx.state.inProgressData = Buffer.from('data1');
await ctx.saveState();
thrown1 = true;
throw new Error('error1');
}
migrated1 = true;
data1 = ctx.state.inProgressData;
}
},
2: class extends AbstractMigration {
async check() {
return types.MIGRATE;
}
async migrate(b, ctx) {
ctx.state.version = 1;
}
},
3: class extends AbstractMigration {
async check() {
return types.MIGRATE;
}
async migrate(b, ctx) {
if (!thrown3) {
ctx.state.inProgressData = Buffer.from('data3');
await ctx.saveState();
thrown3 = true;
throw new Error('error3');
}
migrated3 = true;
data3 = ctx.state.inProgressData;
}
},
4: class extends AbstractMigration {
async check() {
return types.MIGRATE;
}
async migrate(b, ctx) {
migrated4 = true;
data4 = ctx.state.inProgressData;
}
}
},
logger: getLogger()
});
for (let i = 0; i < 2; i++) {
try {
await db.open();
} catch (e) {
await db.close();
}
}
await db.open();
assert.strictEqual(migrated1, true);
assert.strictEqual(data1.length, 0);
assert.strictEqual(migrated3, true);
assert.bufferEqual(data3, Buffer.from('data3'));
assert.strictEqual(migrated4, true);
assert.strictEqual(data4.length, 0);
await db.close();
});
});
describe('Options', function() {
@ -559,12 +689,35 @@ describe('Migrations', function() {
state1.inProgress = 1;
state1.nextMigration = 3;
state1.skipped = [1, 2];
state1.inProgressData = Buffer.from('data');
const state2 = state1.clone();
assert.notEqual(state2.skipped, state1.skipped);
assert.deepStrictEqual(state2, state1);
});
it('should not encode progress data if version is 0', () => {
const state = new MigrationState();
state.version = 0;
state.inProgressData = Buffer.from('data');
const encoded = state.encode();
const decoded = MigrationState.decode(encoded);
assert.strictEqual(decoded.inProgressData.length, 0);
});
it('should encode progress data if version is not 0', () => {
const state = new MigrationState();
state.version = 1;
state.inProgressData = Buffer.from('data');
const encoded = state.encode();
const decoded = MigrationState.decode(encoded);
assert.bufferEqual(decoded.inProgressData, state.inProgressData);
});
});
describe('AbstractMigration', function() {
@ -626,3 +779,29 @@ describe('Migrations', function() {
}
});
});
const nop = () => {};
function getLogger(opts = {}) {
return {
debug: nop,
info: nop,
warning: nop,
error: nop,
context: () => {
return {
debug: opts.debug || nop,
info: opts.info || nop,
warning: opts.warning || nop,
error: opts.error || nop
};
}
};
}
async function getMigrationState(db) {
const raw = await db.db.get(mockLayout.M.encode());
return MigrationState.decode(raw);
}

View file

@ -58,19 +58,19 @@ class MockChainDB {
this.spv = this.options.spv;
this.prune = this.options.prune;
// This is here for testing purposes.
this.migrations = new MockChainDBMigrator({
...this.options,
db: this,
dbVersion: this.dbVersion
});
}
async open() {
this.logger.debug('Opening mock chaindb.');
await this.db.open();
await this.migrations.migrate();
// This is here for testing purposes.
const migrations = new MockChainDBMigrator({
...this.options,
db: this,
dbVersion: this.dbVersion
});
await migrations.migrate();
await this.db.verify(mockLayout.V.encode(), 'chain', this.dbVersion);
}

View file

@ -18,6 +18,7 @@ const {
types,
oldLayout
} = require('../lib/migrations/migrator');
const migutils = require('./util/migrations');
const {
migrationError,
writeVersion,
@ -25,7 +26,7 @@ const {
checkVersion,
checkEntries,
fillEntries
} = require('./util/migrations');
} = migutils;
const {rimraf, testdir} = require('./util/common');
const NETWORK = 'regtest';
@ -1059,4 +1060,51 @@ describe('Wallet Migrations', function() {
await walletDB.close();
});
});
describe('Migrate Migration state v1 (data)', function() {
const location = testdir('wallet-migrate-migration-state-v1');
const data = require('./data/migrations/wallet-6-migrationstate-v1.json');
const migrationsBAK = WalletMigrator.migrations;
const Migration = WalletMigrator.MigrateMigrationStateV1;
const walletOptions = {
prefix: location,
memory: false,
network
};
let walletDB, ldb;
before(async () => {
WalletMigrator.migrations = {};
await fs.mkdirp(location);
walletDB = new WalletDB(walletOptions);
ldb = walletDB.db;
await walletDB.open();
await fillEntries(walletDB.db, data.before);
await walletDB.close();
});
after(async () => {
WalletMigrator.migrations = migrationsBAK;
await rimraf(location);
});
it('should migrate', async () => {
WalletMigrator.migrations = {
0: Migration
};
walletDB.options.walletMigrate = 0;
await walletDB.open();
await checkEntries(ldb, {
before: data.before,
after: data.after,
throw: true
});
await walletDB.close();
});
});
});