Skip to content

Commit

Permalink
Merge pull request #48 from frisitano/chain_fork_feature
Browse files Browse the repository at this point in the history
Chain fork feature
  • Loading branch information
Netherdrake authored Oct 24, 2021
2 parents 1eb0531 + 5ac9f2f commit fdc83ff
Show file tree
Hide file tree
Showing 6 changed files with 3,953 additions and 1 deletion.
5 changes: 4 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -10,4 +10,7 @@
.cargo

# The cache for chain data in container
.local
.local

**/fork/data/
**/fork/node_modules/
11 changes: 11 additions & 0 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -79,3 +79,14 @@ cargo-update:
cargo update --manifest-path node/Cargo.toml
make test

.PHONY: fork
fork:
npm i --prefix fork fork
ifeq (,$(wildcard fork/data))
mkdir fork/data
endif
cp target/release/reef-node fork/data/binary
cp target/release/wbuild/reef-runtime/reef_runtime.compact.wasm fork/data/runtime.wasm
cp assets/types.json fork/data/schema.json
cp assets/chain_spec_$(chain)_raw.json fork/data/genesis.json
cd fork && npm start && cd ..
15 changes: 15 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -128,3 +128,18 @@ Build the wasm runtime with:
```bash
make wasm
```

### Fork reef-chain

You can create a fork of a live chain (testnet / mainnet) for development purposes.

1) Build binary and sync with target chain on localhost defaults. You will need to use unsafe rpc.
2) Execute the `Make` command ensuring to specify chain name (testnet / mainnet).
```bash
make chain=testnet fork
```
3) Now run a forked chain:
```bash
cd fork/data
./binary --chain fork.json --alice
```
156 changes: 156 additions & 0 deletions fork/index.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,156 @@
const fs = require('fs');
const path = require('path');
const chalk = require('chalk');
const cliProgress = require('cli-progress');
require("dotenv").config();
const { ApiPromise } = require('@polkadot/api');
const { HttpProvider } = require('@polkadot/rpc-provider');
const { options } = require('@reef-defi/api');
const { xxhashAsHex } = require('@polkadot/util-crypto');
const execFileSync = require('child_process').execFileSync;
const execSync = require('child_process').execSync;
const binaryPath = path.join(__dirname, 'data', 'binary');
const wasmPath = path.join(__dirname, 'data', 'runtime.wasm');
const schemaPath = path.join(__dirname, 'data', 'schema.json');
const hexPath = path.join(__dirname, 'data', 'runtime.hex');
const originalSpecPath = path.join(__dirname, 'data', 'genesis.json');
const forkedSpecPath = path.join(__dirname, 'data', 'fork.json');
const storagePath = path.join(__dirname, 'data', 'storage.json');

// Using http endpoint since substrate's Ws endpoint has a size limit.
const provider = new HttpProvider(process.env.HTTP_RPC_ENDPOINT || 'http://localhost:9933')
// The storage download will be split into 256^chunksLevel chunks.
const chunksLevel = process.env.FORK_CHUNKS_LEVEL || 1;
const totalChunks = Math.pow(256, chunksLevel);

let chunksFetched = 0;
let separator = false;
const progressBar = new cliProgress.SingleBar({}, cliProgress.Presets.shades_classic);

/**
* All module prefixes except those mentioned in the skippedModulesPrefix will be added to this by the script.
* If you want to add any past module or part of a skipped module, add the prefix here manually.
*
* Any storage value’s hex can be logged via console.log(api.query.<module>.<call>.key([...opt params])),
* e.g. console.log(api.query.timestamp.now.key()).
*
* If you want a map/doublemap key prefix, you can do it via .keyPrefix(),
* e.g. console.log(api.query.system.account.keyPrefix()).
*
* For module hashing, do it via xxhashAsHex,
* e.g. console.log(xxhashAsHex('System', 128)).
*/
let prefixes = ['0x26aa394eea5630e07c48ae0c9558cef7b99d880ec681799c0cf30e8886371da9' /* System.Account */];
const skippedModulesPrefix = ['System', 'Session', 'Babe', 'Grandpa', 'GrandpaFinality', 'FinalityTracker', 'Authorship'];

async function main() {
if (!fs.existsSync(binaryPath)) {
console.log(chalk.red('Binary missing. Please copy the binary of your substrate node to the data folder and rename the binary to "binary"'));
process.exit(1);
}
execFileSync('chmod', ['+x', binaryPath]);

if (!fs.existsSync(wasmPath)) {
console.log(chalk.red('WASM missing. Please copy the WASM blob of your substrate node to the data folder and rename it to "runtime.wasm"'));
process.exit(1);
}
execSync('cat ' + wasmPath + ' | hexdump -ve \'/1 "%02x"\' > ' + hexPath);

let api;
console.log(chalk.green('We are intentionally using the HTTP endpoint. If you see any warnings about that, please ignore them.'));
if (!fs.existsSync(schemaPath)) {
console.log(chalk.yellow('Custom Schema missing, using default schema.'));
api = await ApiPromise.create(options({ provider }));
} else {
const { types, rpc } = JSON.parse(fs.readFileSync(schemaPath, 'utf8'));
api = await ApiPromise.create(options({
provider,
types,
rpc,
}));
}

if (fs.existsSync(storagePath)) {
console.log(chalk.yellow('Reusing cached storage. Delete ./data/storage.json and rerun the script if you want to fetch latest storage'));
} else {
// Download state of original chain
console.log(chalk.green('Fetching current state of the live chain. Please wait, it can take a while depending on the size of your chain.'));
progressBar.start(totalChunks, 0);
const stream = fs.createWriteStream(storagePath, { flags: 'a' });
stream.write("[");
await fetchChunks("0x", chunksLevel, stream);
stream.write("]");
stream.end();
progressBar.stop();
}

const metadata = await api.rpc.state.getMetadata();
// Populate the prefixes array
const modules = JSON.parse(metadata.asLatest.modules);
modules.forEach((module) => {
if (module.storage) {
if (!skippedModulesPrefix.includes(module.storage.prefix)) {
prefixes.push(xxhashAsHex(module.storage.prefix, 128));
}
}
});

// Generate chain spec for original and forked chains
execSync(binaryPath + ' build-spec --chain=testnet --raw > ' + originalSpecPath);
execSync(binaryPath + ' build-spec --dev --raw > ' + forkedSpecPath);

let storage = JSON.parse(fs.readFileSync(storagePath, 'utf8'));
let originalSpec = JSON.parse(fs.readFileSync(originalSpecPath, 'utf8'));
let forkedSpec = JSON.parse(fs.readFileSync(forkedSpecPath, 'utf8'));

// Modify chain name and id
forkedSpec.name = originalSpec.name + '-fork';
forkedSpec.id = originalSpec.id + '-fork';
forkedSpec.protocolId = originalSpec.protocolId;

// Grab the items to be moved, then iterate through and insert into storage
storage
.filter((i) => prefixes.some((prefix) => i[0].startsWith(prefix)))
.forEach(([key, value]) => (forkedSpec.genesis.raw.top[key] = value));

// Delete System.LastRuntimeUpgrade to ensure that the on_runtime_upgrade event is triggered
delete forkedSpec.genesis.raw.top['0x26aa394eea5630e07c48ae0c9558cef7f9cce9c888469bb1a0dceaa129672ef8'];

// Set the code to the current runtime code
forkedSpec.genesis.raw.top['0x3a636f6465'] = '0x' + fs.readFileSync(hexPath, 'utf8').trim();

// To prevent the validator set from changing mid-test, set Staking.ForceEra to ForceNone ('0x02')
forkedSpec.genesis.raw.top['0x5f3e4907f716ac89b6347d15ececedcaf7dad0317324aecae8744b87fc95f2f3'] = '0x02';

fs.writeFileSync(forkedSpecPath, JSON.stringify(forkedSpec, null, 4));

console.log('Forked genesis generated successfully. Find it at ./data/fork.json');
process.exit();
}

main();

async function fetchChunks(prefix, levelsRemaining, stream) {
if (levelsRemaining <= 0) {
const pairs = await provider.send('state_getPairs', [prefix]);
if (pairs.length > 0) {
separator ? stream.write(",") : separator = true;
stream.write(JSON.stringify(pairs).slice(1, -1));
}
progressBar.update(++chunksFetched);
return;
}

// Async fetch the last level
if (process.env.QUICK_MODE && levelsRemaining == 1) {
let promises = [];
for (let i = 0; i < 256; i++) {
promises.push(fetchChunks(prefix + i.toString(16).padStart(2, "0"), levelsRemaining - 1, stream));
}
await Promise.all(promises);
} else {
for (let i = 0; i < 256; i++) {
await fetchChunks(prefix + i.toString(16).padStart(2, "0"), levelsRemaining - 1, stream);
}
}
}
Loading

0 comments on commit fdc83ff

Please sign in to comment.