This repository was archived by the owner on Oct 1, 2021. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 3
Migration 8 #4
Merged
Merged
Migration 8 #4
Changes from 1 commit
Commits
Show all changes
9 commits
Select commit
Hold shift + click to select a range
c43e20b
feat: migration 8
AuHau 21f6a49
Merge remote-tracking branch 'origin/master' into migration/8-multiha…
achingbrain fd79824
chore: update deps
achingbrain a8c8c67
chore: fix up migration tool to work with newest datastores
achingbrain 274e6de
fix: use datastore-level in the browser
achingbrain 3630f70
chore: formatting
achingbrain f1293d3
Merge remote-tracking branch 'origin/master' into migration/8-multiha…
achingbrain 04f6774
fix: failing test
achingbrain f69ff38
chore: descope key encoding, only migrate block keys
achingbrain File filter
Filter by extension
Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
There are no files selected for viewing
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,79 @@ | ||
const path = require('path') | ||
const CID = require('cids') | ||
const Key = require('interface-datastore').Key | ||
const core = require('datastore-core') | ||
const ShardingStore = core.ShardingDatastore | ||
const base32 = require('base32.js') | ||
const utils = require('../../src/utils') | ||
const log = require('debug')('ipfs-repo-migrations:migration-8') | ||
|
||
// This function in js-ipfs-repo defaults to not using sharding | ||
// but the default value of the options.sharding is True hence this | ||
// function defaults to use sharding. | ||
async function maybeWithSharding (filestore, options) { | ||
if (options.sharding === false) { | ||
return filestore | ||
} | ||
|
||
const shard = new core.shard.NextToLast(2) | ||
return await ShardingStore.createOrOpen(filestore, shard) | ||
} | ||
|
||
function keyToMultihash(key){ | ||
// Key to CID | ||
const decoder = new base32.Decoder() | ||
const buff = decoder.finalize(key.toString().slice(1)) | ||
const cid = new CID(Buffer.from(buff)) | ||
|
||
// CID to multihash | ||
const enc = new base32.Encoder() | ||
return new Key('/' + enc.finalize(cid.multihash), false) | ||
} | ||
|
||
function keyToCid(key){ | ||
// Key to CID | ||
const decoder = new base32.Decoder() | ||
const buff = decoder.write(key.toString().slice(1)).finalize() | ||
const cid = new CID(1, 'raw', Buffer.from(buff)) | ||
|
||
// CID to Key | ||
const enc = new base32.Encoder() | ||
return new Key('/' + enc.finalize(cid.buffer), false) | ||
} | ||
|
||
async function process(repoPath, options, keyFunction){ | ||
const { StorageBackend, storageOptions } = utils.getDatastoreAndOptions(options, 'blocks') | ||
|
||
const baseStore = new StorageBackend(path.join(repoPath, 'blocks'), storageOptions) | ||
const store = await maybeWithSharding(baseStore, storageOptions) | ||
|
||
try { | ||
const batch = store.batch() | ||
let counter = 0 | ||
for await (const block of store.query({})) { | ||
const newKey = keyFunction(block.key) | ||
|
||
// If the Key is CIDv0 then it is raw multihash and nothing is changing | ||
if(newKey.toString() !== block.key.toString()){ | ||
counter += 1 | ||
|
||
log(`Migrating Block from ${block.key.toString()} to ${newKey.toString()}`) | ||
batch.delete(block.key) | ||
batch.put(newKey, block.value) | ||
} | ||
} | ||
|
||
log(`Changing ${ counter } blocks`) | ||
await batch.commit() | ||
} finally { | ||
await store.close() | ||
} | ||
} | ||
|
||
exports.migrate = function blocksMigrate (repoPath, options) { | ||
return process(repoPath, options, keyToMultihash) | ||
} | ||
|
||
exports.revert = function blocksRevert (repoPath, options) { | ||
return process(repoPath, options, keyToCid) | ||
} |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,38 @@ | ||
'use strict' | ||
|
||
const keysEncoding = require('./keys-encoding') | ||
const blocksToMultihash = require('./blocks-to-multihash') | ||
const log = require('debug')('ipfs-repo-migrations:migration-8') | ||
|
||
async function migrate (repoPath, options) { | ||
await keysEncoding.migrate(repoPath, options) | ||
|
||
try{ | ||
await blocksToMultihash.migrate(repoPath, options) | ||
}catch (e) { | ||
log('During migration of Blockstore to multihash exception was raised! Reverting keys part of migration!') | ||
await keysEncoding.revert(repoPath, options) | ||
|
||
throw e | ||
} | ||
} | ||
|
||
async function revert (repoPath, options) { | ||
await keysEncoding.revert(repoPath, options) | ||
|
||
try{ | ||
await blocksToMultihash.revert(repoPath, options) | ||
}catch (e) { | ||
log('During reversion of Blockstore to CID exception was raised! Migrating keys part of migration!') | ||
await keysEncoding.migrate(repoPath, options) | ||
|
||
throw e | ||
} | ||
} | ||
|
||
module.exports = { | ||
version: 8, | ||
description: 'Transforms key\'s names into base32 encoding and converts Block store to use multihashes', | ||
migrate, | ||
revert | ||
} |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,62 @@ | ||
const utils = require('../../src/utils') | ||
const path = require('path') | ||
const base32 = require('base32.js') | ||
const Key = require('interface-datastore').Key | ||
const log = require('debug')('ipfs-repo-migrations:migration-8') | ||
|
||
const KEY_PREFIX = 'key_' | ||
|
||
function encode (name) { | ||
name = Buffer.from(name) | ||
const encoder = new base32.Encoder({ type: 'rfc4648' }) | ||
return (KEY_PREFIX + encoder.finalize(name)).toLowerCase() | ||
} | ||
|
||
function decode (name) { | ||
if (!name.startsWith(KEY_PREFIX)) { | ||
throw Error('Unknown format of key\'s name!') | ||
} | ||
|
||
const decoder = new base32.Decoder({ type: 'rfc4648' }) | ||
const decodedNameBuff = decoder.finalize(name.replace(KEY_PREFIX, '').toUpperCase()) | ||
return Buffer.from(decodedNameBuff).toString() | ||
} | ||
|
||
async function processFolder (store, prefix, fileNameProcessor) { | ||
const query = { | ||
prefix: `/${ prefix }` | ||
} | ||
|
||
const files = store.query(query) | ||
for await (let file of files) { | ||
const name = String(file.key._buf).replace(`/${ prefix }/`, '') | ||
const encodedFileName = fileNameProcessor(name) | ||
const newKey = new Key(`${ prefix }/${ encodedFileName }`) | ||
|
||
await store.delete(file.key) | ||
log(`Translating key's name '${ file.key }' into '${ newKey }'`) | ||
await store.put(newKey, file.value) | ||
} | ||
} | ||
|
||
async function process (repoPath, options, processor) { | ||
const { StorageBackend, storageOptions } = utils.getDatastoreAndOptions(options, 'keys') | ||
|
||
const store = new StorageBackend(path.join(repoPath, 'keys'), storageOptions) | ||
try { | ||
const info = processFolder(store, 'info', processor) | ||
const data = processFolder(store, 'pkcs8', processor) | ||
|
||
return await Promise.all([info, data]) | ||
} finally { | ||
await store.close() | ||
} | ||
} | ||
|
||
exports.migrate = async function keyEncode (repoPath, options) { | ||
return process(repoPath, options, encode) | ||
} | ||
|
||
exports.revert = async function keyDecode (repoPath, options) { | ||
return process(repoPath, options, decode) | ||
} |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,131 @@ | ||
/* eslint-env mocha */ | ||
'use strict' | ||
|
||
const chai = require('chai') | ||
chai.use(require('dirty-chai')) | ||
const chaiAsPromised = require('chai-as-promised') | ||
chai.use(chaiAsPromised) | ||
const expect = chai.expect | ||
|
||
const path = require('path') | ||
const keysMigration = require('../../migrations/migration-8/keys-encoding') | ||
const blocksMigration = require('../../migrations/migration-8/blocks-to-multihash') | ||
const Key = require('interface-datastore').Key | ||
const Datastore = require('datastore-fs') | ||
const core = require('datastore-core') | ||
const ShardingStore = core.ShardingDatastore | ||
|
||
const keysFixtures = [ | ||
['aAa', 'key_mfawc'], | ||
['bbb', 'key_mjrge'], | ||
['self', 'key_onswyzq'] | ||
] | ||
|
||
const blocksFixtures = [ | ||
['AFKREIBFG77IKIKDMBDUFDCSPK7H5TE5LNPMCSXYLPML27WSTT5YA5IUNU', 'CIQCKN76QUQUGYCHIKGFE6V6P3GJ2W26YFFPQW6YXV7NFHH3QB2RI3I'] | ||
] | ||
|
||
async function bootstrapKeys (dir, encoded) { | ||
const store = new Datastore(path.join(dir, 'keys'), { extension: '.data', createIfMissing: true }) | ||
await store.open() | ||
|
||
let name | ||
for (const keyNames of keysFixtures) { | ||
name = encoded ? keyNames[1] : keyNames[0] | ||
await store.put(new Key(`/pkcs8/${name}`), '') | ||
await store.put(new Key(`/info/${name}`), '') | ||
} | ||
|
||
await store.close() | ||
} | ||
|
||
async function validateKeys (dir, shouldBeEncoded) { | ||
const store = new Datastore(path.join(dir, 'keys'), { extension: '.data', createIfMissing: false }) | ||
await store.open() | ||
|
||
let name | ||
for (const keyNames of keysFixtures) { | ||
name = shouldBeEncoded ? keyNames[1] : keyNames[0] | ||
expect(await store.has(new Key(`/pkcs8/${name}`))).to.be.true(name) | ||
expect(await store.has(new Key(`/info/${name}`))).to.be.true(name) | ||
} | ||
|
||
await store.close() | ||
} | ||
|
||
async function bootstrapBlocks (dir, encoded) { | ||
const baseStore = new Datastore(path.join(dir, 'blocks'), { extension: '.data', createIfMissing: true }) | ||
const shard = new core.shard.NextToLast(2) | ||
const store = await ShardingStore.createOrOpen(baseStore, shard) | ||
|
||
let name | ||
for (const blocksNames of blocksFixtures) { | ||
name = encoded ? blocksNames[1] : blocksNames[0] | ||
await store.put(new Key(name), '') | ||
} | ||
|
||
await store.close() | ||
} | ||
|
||
async function validateBlocks (dir, shouldBeEncoded) { | ||
const baseStore = new Datastore(path.join(dir, 'blocks'), { extension: '.data', createIfMissing: false }) | ||
const shard = new core.shard.NextToLast(2) | ||
const store = await ShardingStore.createOrOpen(baseStore, shard) | ||
|
||
let newName, oldName | ||
for (const blockNames of blocksFixtures) { | ||
newName = shouldBeEncoded ? blockNames[1] : blockNames[0] | ||
oldName = shouldBeEncoded ? blockNames[0] : blockNames[1] | ||
expect(await store.has(new Key(oldName))).to.be.false(oldName) | ||
expect(await store.has(new Key(newName))).to.be.true(newName) | ||
} | ||
|
||
await store.close() | ||
} | ||
|
||
module.exports = (setup, cleanup) => { | ||
describe('migration 8', () => { | ||
let dir | ||
|
||
beforeEach(async () => { | ||
dir = await setup() | ||
}) | ||
afterEach(() => cleanup(dir)) | ||
|
||
it('should migrate keys forward', async () => { | ||
await bootstrapKeys(dir, false) | ||
await keysMigration.migrate(dir) | ||
await validateKeys(dir, true) | ||
}) | ||
|
||
it('should migrate keys backward', async () => { | ||
await bootstrapKeys(dir, true) | ||
await keysMigration.revert(dir) | ||
await validateKeys(dir, false) | ||
}) | ||
|
||
it('should fail to migrate keys backward with invalid key name', async () => { | ||
const store = new Datastore(path.join(dir, 'keys'), { extension: '.data', createIfMissing: true }) | ||
await store.open() | ||
|
||
await store.put(new Key('/pkcs8/mfawc'), '') | ||
await store.put(new Key('/info/mfawc'), '') | ||
|
||
await store.close() | ||
|
||
expect(keysMigration.revert(dir)).to.eventually.rejectedWith('Unknown format of key\'s name!') | ||
}) | ||
|
||
it('should migrate blocks forward', async () => { | ||
await bootstrapBlocks(dir, false) | ||
await blocksMigration.migrate(dir) | ||
await validateBlocks(dir, true) | ||
}) | ||
// | ||
// it('should migrate blocks backward', async () => { | ||
// await bootstrapKeys(dir, true) | ||
// await blocksMigration.revert(dir) | ||
// await validateKeys(dir, false) | ||
// }) | ||
}) | ||
} |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Add this suggestion to a batch that can be applied as a single commit.
This suggestion is invalid because no changes were made to the code.
Suggestions cannot be applied while the pull request is closed.
Suggestions cannot be applied while viewing a subset of changes.
Only one suggestion per line can be applied in a batch.
Add this suggestion to a batch that can be applied as a single commit.
Applying suggestions on deleted lines is not supported.
You must change the existing code in this line in order to create a valid suggestion.
Outdated suggestions cannot be applied.
This suggestion has been applied or marked resolved.
Suggestions cannot be applied from pending reviews.
Suggestions cannot be applied on multi-line comments.
Suggestions cannot be applied while the pull request is queued to merge.
Suggestion cannot be applied right now. Please check back later.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
We can't buffer the entire contents of a repo into memory. This has to be done incrementally.