Skip to content

Commit

Permalink
chain-test: update migrate tree state migration.
Browse files Browse the repository at this point in the history
  • Loading branch information
nodech committed Aug 26, 2024
1 parent e5d578e commit 89ce0ec
Show file tree
Hide file tree
Showing 4 changed files with 401 additions and 5 deletions.
26 changes: 21 additions & 5 deletions lib/blockchain/migrations.js
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,8 @@ const {
types
} = require('../migrations/migrator');

/** @typedef {import('../types').Hash} Hash */

/**
* Switch to new migrations layout.
*/
Expand Down Expand Up @@ -253,7 +255,7 @@ class MigrateChainState extends AbstractMigration {
encoding.writeU64(rawState, pending.coin, 40);
encoding.writeU64(rawState, pending.value, 40 + 8);
encoding.writeU64(rawState, pending.burned, 40 + 16);
b.put(layout.R.encode(), rawState);
b.put(this.layout.R.encode(), rawState);
}

/**
Expand Down Expand Up @@ -487,6 +489,7 @@ class MigrateTreeState extends AbstractMigration {
this.db = options.db;
this.ldb = options.ldb;
this.network = options.network;
this.layout = MigrateTreeState.layout();
}

async check() {
Expand All @@ -500,10 +503,12 @@ class MigrateTreeState extends AbstractMigration {
}

const {treeInterval} = this.network.names;
const state = await this.db.getState();
const tipHeight = await this.db.getHeight(state.tip);
const rawState = await this.ldb.get(this.layout.R.encode());
const tipHash = rawState.slice(0, 32);
const rawTipHeight = await this.ldb.get(this.layout.h.encode(tipHash));
const tipHeight = rawTipHeight.readUInt32LE(0);
const lastCommitHeight = tipHeight - (tipHeight % treeInterval);
const hash = await this.ldb.get(layout.s.encode());
const hash = await this.ldb.get(this.layout.s.encode());
assert(hash && hash.length === 32);

// new tree root
Expand All @@ -513,7 +518,7 @@ class MigrateTreeState extends AbstractMigration {
encoding.writeU32(buff, lastCommitHeight, 32);

this.db.writeVersion(b, 3);
b.put(layout.s.encode(), buff);
b.put(this.layout.s.encode(), buff);
}

static info() {
Expand All @@ -522,6 +527,17 @@ class MigrateTreeState extends AbstractMigration {
description: 'Add compaction information to the tree state.'
};
}

static layout() {
return {
// R -> tip hash
R: bdb.key('R'),
// h[hash] -> height
h: bdb.key('h', ['hash256']),
// s -> tree state
s: bdb.key('s')
};
}
}

/**
Expand Down
70 changes: 70 additions & 0 deletions test/chain-migration-test.js
Original file line number Diff line number Diff line change
Expand Up @@ -1232,5 +1232,75 @@ describe('Chain Migrations', function() {
assert.bufferEqual(state.encode(), encoded);
});
});

describe('Migrate Tree State (data)', function() {
const location = testdir('migrate-treestate-data');
const data = require('./data/migrations/chain-3-treestate.json');
const migrationsBAK = ChainMigrator.migrations;
const Migration = ChainMigrator.MigrateTreeState;
const store = BlockStore.create({
memory: true,
network
});

const chainOptions = {
prefix: location,
memory: false,
blocks: store,
logger: Logger.global,
network
};

let chain, ldb;
before(async () => {
ChainMigrator.migrations = {};
await fs.mkdirp(location);
await store.open();
chain = new Chain(chainOptions);
chain.db.version = 2;
await chain.open();
ldb = chain.db.db;

await fillEntries(ldb, data.before);

await chain.close();
await store.close();
});

after(async () => {
ChainMigrator.migrations = migrationsBAK;
await rimraf(location);
});

beforeEach(async () => {
await fs.mkdirp(location);
await store.open();
});

afterEach(async () => {
await store.close();

if (chain.opened) {
await chain.close();
}
});

it('should migrate', async () => {
ChainMigrator.migrations = {
0: Migration
};

chain.options.chainMigrate = 0;
chain.db.version = 3;
try {
await chain.open();
} catch (e) {
;
}

await checkEntries(ldb, data.after);
await chain.close();
});
});
});

207 changes: 207 additions & 0 deletions test/data/migrations/chain-3-treestate-gen.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,207 @@
'use strict';

/**
* Migration from v3 to v4
*/

const Logger = require('blgr');
const Network = require('../../../lib/protocol/network');
const Mempool = require('../../../lib/mempool/mempool');
const Miner = require('../../../lib/mining/miner');
const Chain = require('../../../lib/blockchain/chain');
const MemWallet = require('../../util/memwallet');
const HD = require('../../../lib/hd');
// const rules = require('../../../lib/covenants/rules');
const mutils = require('../../util/migrations');

const NETWORK = Network.get('regtest');
let blockstore = null;

try {
blockstore = require('../../../lib/blockstore');
} catch (e) {
;
}

const wallet1priv = 'rprvKE8qsHtkmUxUSPQdn2sFKFUcKyUQz9pKQhxjEWecnXg9hgJMsmJXcw'
+ 'J77SqmHT1R6mcuNqVPzgT2EoGStsXaUN92VJKhQWUB6uZdL8gAZvez';

let txID = 0;

async function dumpMigration() {
const commonOptions = {
memory: true,
network: NETWORK,
logger: Logger.global
};

let blocks = null;

if (blockstore) {
blocks = blockstore.create(commonOptions);

await blocks.open();
}

const chain = new Chain({
...commonOptions,
entryCache: 5000,
blocks
});

const mempool = new Mempool({
...commonOptions,
chain
});

const miner = new Miner({
...commonOptions,
mempool,
chain
});

const master = HD.HDPrivateKey.fromBase58(wallet1priv, NETWORK);
const wallet = new MemWallet({
network: NETWORK,
master
});

const address = wallet.getAddress();
miner.addAddress(address);

mempool.on('tx', (tx) => {
miner.cpu.notifyEntry();
wallet.addTX(tx);
});

chain.on('connect', async (entry, block, view) => {
try {
await mempool._addBlock(entry, block.txs, view);
wallet.addBlock(entry, block.txs);
} catch (e) {
;
}
});

chain.on('disconnect', async (entry, block) => {
try {
await mempool._removeBlock(entry, block.txs);
} catch (e) {
;
}
});

await chain.open();
await mempool.open();
await miner.open();

miner.createBlock = async (tip, address) => {
return mutils.createBlock({
txno: txID++,
chain,
miner,
tip,
address
});
};

const mineBlock = async () => {
const block = await miner.mineBlock(chain.tip, address);
await chain.add(block);
};

// 10 blocks
for (let i = 0; i < 20; i++) {
await mineBlock();
}

// full auction from start to finish.
// const names = [];
// for (let i = 0; i < 10; i++) {
// names.push(rules.grindName(10, chain.tip.height + 1, NETWORK));
// }

const names = [
'hakhblkjfd',
'bnfmvwgrzs',
'zvfhqmuwog',
'ovosxkjjmu',
'qbcbwcvggr',
'flvtbrbzun',
'ovxogcczhi',
'spmkswpciv',
'wkhdfzwfmr',
'wvstfqfuyq'
];

for (const name of names) {
const openTX = await wallet.createOpen(name);
await mempool.addTX(openTX.toTX());
}

for (let i = 0; i < NETWORK.names.treeInterval + 1; i++)
await mineBlock();

for (const name of names) {
const bidTX1 = await wallet.createBid(name, 10000, 20000);
await mempool.addTX(bidTX1.toTX());
const bidTX2 = await wallet.createBid(name, 10000, 20000);
await mempool.addTX(bidTX2.toTX());
}

for (let i = 0; i < NETWORK.names.biddingPeriod; i++)
await mineBlock();

for (const name of names) {
const reveal = await wallet.createReveal(name);
await mempool.addTX(reveal.toTX());
}

for (let i = 0; i < NETWORK.names.revealPeriod + 1; i++)
await mineBlock();

for (const name of names) {
const register = await wallet.createRegister(name, Buffer.from([1,2,3]));
await mempool.addTX(register.toTX());
}
await mineBlock();

for (const name of names) {
const update = await wallet.createUpdate(name, Buffer.from([1,2,3,4]));
await mempool.addTX(update.toTX());
}
await mineBlock();

const data = await getMigrationDump(chain);

await miner.close();
await mempool.close();
await chain.close();

if (blocks)
await blocks.close();

return data;
}

(async () => {
const full = await dumpMigration();

console.log(JSON.stringify({
full
}, null, 2));
})().catch((err) => {
console.error(err.stack);
process.exit(1);
});

async function getMigrationDump(chain) {
const prefixes = [
'R',
'h',
's',
'V'
];

return mutils.dumpChainDB(chain.db, prefixes.map(mutils.prefix2hex));
}
Loading

0 comments on commit 89ce0ec

Please sign in to comment.