Skip to content
This repository was archived by the owner on Oct 1, 2021. It is now read-only.

fix: replace node buffers with uint8arrays #25

Merged
merged 2 commits into from
Aug 5, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 4 additions & 3 deletions migrations/migration-8/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ const ShardingStore = core.ShardingDatastore
const mb = require('multibase')
const utils = require('../../src/utils')
const log = require('debug')('ipfs-repo-migrations:migration-8')
const uint8ArrayToString = require('uint8arrays/to-string')

// This function in js-ipfs-repo defaults to not using sharding
// but the default value of the options.sharding is true hence this
Expand All @@ -31,7 +32,7 @@ function keyToMultihash (key) {
multihash = mb.encode('base32', multihash).slice(1)

// Should be uppercase for interop with go
multihash = multihash.toString().toUpperCase()
multihash = uint8ArrayToString(multihash).toUpperCase()

return new Key(`/${multihash}`, false)
}
Expand All @@ -40,9 +41,9 @@ function keyToCid (key) {
const buf = mb.decode(`b${key.toString().slice(1)}`)

// CID to Key
const multihash = mb.encode('base32', new CID(1, 'raw', buf).buffer).slice(1)
const multihash = mb.encode('base32', new CID(1, 'raw', buf).bytes).slice(1)

return new Key(`/${multihash}`.toUpperCase(), false)
return new Key(`/${uint8ArrayToString(multihash)}`.toUpperCase(), false)
}

async function process (repoPath, options, keyFunction){
Expand Down
2 changes: 1 addition & 1 deletion migrations/migration-9/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ async function pinsToDAG (blockstore, datastore, pinstore) {
}
}

const pinRoot = new dagpb.DAGNode(Buffer.alloc(0), [
const pinRoot = new dagpb.DAGNode(new Uint8Array(), [
await pinset.storeSet(blockstore, PinTypes.recursive, recursivePins),
await pinset.storeSet(blockstore, PinTypes.direct, directPins)
])
Expand Down
44 changes: 21 additions & 23 deletions migrations/migration-9/pin-set.js
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,12 @@ const dagpb = require('ipld-dag-pb')
const { DAGNode, DAGLink } = dagpb
const multicodec = require('multicodec')
const pbSchema = require('./pin.proto')
const { Buffer } = require('buffer')
const { cidToKey, DEFAULT_FANOUT, MAX_ITEMS, EMPTY_KEY } = require('./utils')
const uint8ArrayConcat = require('uint8arrays/concat')
const uint8ArrayCompare = require('uint8arrays/compare')
const uint8ArrayToString = require('uint8arrays/to-string')
const uint8ArrayFromString = require('uint8arrays/from-string')
const uint8ArrayEquals = require('uint8arrays/equals')

const pb = protobuf(pbSchema)

Expand Down Expand Up @@ -50,12 +54,13 @@ function readHeader (rootNode) {
}

function hash (seed, key) {
const buf = Buffer.alloc(4)
buf.writeUInt32LE(seed, 0)
const data = Buffer.concat([
buf, Buffer.from(toB58String(key))
])
return fnv1a(data.toString('binary'))
const buffer = new ArrayBuffer(4)
const dataView = new DataView(buffer)
dataView.setUint32(0, seed, true)
const encodedKey = uint8ArrayFromString(toB58String(key))
const data = uint8ArrayConcat([buf, encodedKey], buf.length + encodedKey.length)

return fnv1a(uint8ArrayToString(data))
}

async function * walkItems (blockstore, node) {
Expand All @@ -68,7 +73,7 @@ async function * walkItems (blockstore, node) {
// if a fanout bin is not 'empty', dig into and walk its DAGLinks
const linkHash = link.Hash

if (!EMPTY_KEY.equals(linkHash.buffer)) {
if (!uint8ArrayEquals(EMPTY_KEY, linkHash.bytes)) {
// walk the links of this fanout bin
const buf = await blockstore.get(cidToKey(linkHash))
const node = dagpb.util.deserialize(buf)
Expand Down Expand Up @@ -106,9 +111,9 @@ function storeItems (blockstore, items) {
fanout: DEFAULT_FANOUT,
seed: depth
})
const headerBuf = Buffer.concat([
Buffer.from(varint.encode(pbHeader.length)), pbHeader
])

const header = varint.encode(pbHeader.length)
const headerBuf = uint8ArrayConcat([header, pbHeader])
const fanoutLinks = []

for (let i = 0; i < DEFAULT_FANOUT; i++) {
Expand All @@ -120,16 +125,16 @@ function storeItems (blockstore, items) {
.map(item => {
return ({
link: new DAGLink('', 1, item.key),
data: item.data || Buffer.alloc(0)
data: item.data || new Uint8Array()
})
})
// sorting makes any ordering of `pins` produce the same DAGNode
.sort((a, b) => Buffer.compare(a.link.Hash.buffer, b.link.Hash.buffer))
.sort((a, b) => {
return uint8ArrayCompare(a.link.Hash.bytes, b.link.Hash.bytes)
})

const rootLinks = fanoutLinks.concat(nodes.map(item => item.link))
const rootData = Buffer.concat(
[headerBuf].concat(nodes.map(item => item.data))
)
const rootData = uint8ArrayConcat([headerBuf, ...nodes.map(item => item.data)])

return new DAGNode(rootData, rootLinks)
} else {
Expand Down Expand Up @@ -162,13 +167,6 @@ function storeItems (blockstore, items) {
}

async function storeChild (child, binIdx) {
const opts = {
version: 0,
format: multicodec.DAG_PB,
hashAlg: multicodec.SHA2_256,
preload: false
}

const buf = dagpb.util.serialize(child)
const cid = dagpb.util.cid(buf, {
cidVersion: 0,
Expand Down
22 changes: 11 additions & 11 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -44,22 +44,22 @@
"docs": "aegir docs"
},
"dependencies": {
"buffer": "^5.6.0",
"cbor": "^5.0.2",
"chalk": "^4.0.0",
"cids": "^0.8.3",
"datastore-core": "^1.1.0",
"datastore-fs": "^1.0.0",
"datastore-level": "^1.1.0",
"cids": "^1.0.0",
"datastore-core": "^2.0.0",
"datastore-fs": "^2.0.0",
"datastore-level": "^2.0.0",
"debug": "^4.1.0",
"fnv1a": "^1.0.1",
"interface-datastore": "^1.0.2",
"ipld-dag-pb": "^0.18.5",
"multibase": "^1.0.1",
"multicodec": "^1.0.3",
"multihashing-async": "^1.0.0",
"interface-datastore": "^2.0.0",
"ipld-dag-pb": "^0.20.0",
"multibase": "^3.0.0",
"multicodec": "^2.0.0",
"multihashing-async": "^2.0.0",
"proper-lockfile": "^4.1.1",
"protons": "^1.2.1",
"protons": "^2.0.0",
"uint8arrays": "^1.0.0",
"varint": "^5.0.0",
"yargs": "^15.3.1",
"yargs-promise": "^1.1.0"
Expand Down
4 changes: 2 additions & 2 deletions src/repo/version.js
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
'use strict'

const { Buffer } = require('buffer')
const errors = require('../errors')
const repoInit = require('./init')
const Datastore = require('datastore-fs')
const uint8ArrayFromString = require('uint8arrays/from-string')

const Key = require('interface-datastore').Key

Expand Down Expand Up @@ -43,7 +43,7 @@ async function getVersion (path) {
async function setVersion (path, version) {
const store = new Datastore(path, { extension: '', createIfMissing: false })
await store.open()
await store.put(versionKey, Buffer.from(String(version)))
await store.put(versionKey, uint8ArrayFromString(String(version)))
await store.close()
}

Expand Down
5 changes: 2 additions & 3 deletions test/browser.js
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
/* eslint-env mocha */
'use strict'

const { Buffer } = require('buffer')
const loadFixture = require('aegir/fixtures')
const Datastore = require('datastore-level')

Expand All @@ -24,8 +23,8 @@ async function createAndLoadRepo () {
const store = new Datastore(dir, { extension: '', createIfMissing: true })
await store.open()

await store.put(VERSION_KEY, Buffer.from(loadFixture('test/fixtures/test-repo/version')))
await store.put(CONFIG_KEY, Buffer.from(loadFixture('test/fixtures/test-repo/config')))
await store.put(VERSION_KEY, loadFixture('test/fixtures/test-repo/version'))
await store.put(CONFIG_KEY, loadFixture('test/fixtures/test-repo/config'))
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

🤷‍♂️ I'm not sure I get why Buffer.from was used in first place because loadFixture returns buffer as far as I can tell. It's probably a good idea to turns them into Uint8Arrays to ensure that put does in fact works with Uint8Array as opposed to Buffer.

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I'm not sure either TBH.

Converting them to Unint8Arrays to ensure it works seems unnecessary as we're not testing the store implementation here.


return dir
}
Expand Down
6 changes: 3 additions & 3 deletions test/init-test.js
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
/* eslint-env mocha */
'use strict'

const { Buffer } = require('buffer')
const { expect } = require('./util')

const Datastore = require('datastore-fs')
const Key = require('interface-datastore').Key
const repoInit = require('../src/repo/init')
const uint8ArrayFromString = require('uint8arrays/from-string')

module.exports = (setup, cleanup) => {
let dir
Expand All @@ -23,8 +23,8 @@ module.exports = (setup, cleanup) => {
const configKey = new Key('config')
const store = new Datastore(dir, { extension: '', createIfMissing: false })
await store.open()
await store.put(versionKey, Buffer.from('7'))
await store.put(configKey, Buffer.from('config'))
await store.put(versionKey, uint8ArrayFromString('7'))
await store.put(configKey, uint8ArrayFromString('config'))
await store.close()

expect(await repoInit.isRepoInitialized(dir)).to.be.true()
Expand Down
9 changes: 5 additions & 4 deletions test/migrations/migration-9-test.js
Original file line number Diff line number Diff line change
Expand Up @@ -12,13 +12,14 @@ const multicodec = require('multicodec')
const multibase = require('multibase')
const all = require('it-all')
const cbor = require('cbor')
const uint8ArrayFromString = require('uint8arrays/from-string')

const migration = require('../../migrations/migration-9')
const { createStore, cidToKey, PIN_DS_KEY, DEFAULT_FANOUT } = require('../../migrations/migration-9/utils')
const CID = require('cids')

function keyToCid (key) {
const buf = Buffer.from(multibase.encoding('base32upper').decode(key.toString().split('/').pop()))
const buf = multibase.encoding('base32upper').decode(key.toString().split('/').pop())
return new CID(buf)
}

Expand Down Expand Up @@ -49,19 +50,19 @@ async function bootstrapBlocks (blockstore, datastore) {
)
const bucket = new Array(DEFAULT_FANOUT).fill(0).map(() => new DAGLink('', 1, emptyBlock.Hash))
const directLinks = await putNode(
new DAGNode(Buffer.from('CggBEIACHQAAAAA=', 'base64'), bucket),
new DAGNode(uint8ArrayFromString('CggBEIACHQAAAAA=', 'base64urlpad'), bucket),
'QmbxHkprr5qdLSK8EZWdBzKFzNXGoKrxb7A4PHX3eH6JPp'
)
const recursiveLinks = await putNode(
new DAGNode(Buffer.from('CggBEIACHQAAAAA=', 'base64'), [
new DAGNode(uint8ArrayFromString('CggBEIACHQAAAAA=', 'base64urlpad'), [
...bucket,
new DAGLink('', 1, pinnedCid)
]),
'QmdEtks1KYQsrgJ8FXpP1vXygnVHSqnyFTKQ3wcWVd4D2y'
)

const pinRoot = await putNode(
new DAGNode(Buffer.alloc(0), [
new DAGNode(new Uint8Array(), [
new DAGLink('direct', directLinks.Tsize, directLinks.Hash),
new DAGLink('recursive', recursiveLinks.Tsize, recursiveLinks.Hash)
]),
Expand Down
6 changes: 3 additions & 3 deletions test/test-migrations/migration-2/index.js
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
'use strict'

const { Buffer } = require('buffer')
const Datastore = require('datastore-fs')
const Key = require('interface-datastore').Key
const _set = require('just-safe-set')
const uint8ArrayFromString = require('uint8arrays/from-string')

const CONFIG_KEY = new Key('config')
const NEW_API_ADDRESS = '/ip6/::/tcp/5001'
Expand Down Expand Up @@ -89,7 +89,7 @@ async function migrate (repoPath, options, isBrowser) {
// Modify allowed origin
_set(config, 'Gateway.HTTPHeaders.Access-Control-Allow-Origin', 'some.origin.com')

const buf = Buffer.from(JSON.stringify(config, null, 2))
const buf = uint8ArrayFromString(JSON.stringify(config, null, 2))
await store.put(CONFIG_KEY, buf)
} finally {
await store.close()
Expand All @@ -109,7 +109,7 @@ async function revert (repoPath, options, isBrowser) {
// Reset origin
_set(config, 'Gateway.HTTPHeaders.Access-Control-Allow-Origin', '*')

const buf = Buffer.from(JSON.stringify(config, null, 2))
const buf = uint8ArrayFromString(JSON.stringify(config, null, 2))
await store.put(CONFIG_KEY, buf)
} finally {
await store.close()
Expand Down
11 changes: 5 additions & 6 deletions test/version-test.js
Original file line number Diff line number Diff line change
@@ -1,13 +1,12 @@
/* eslint-env mocha */
'use strict'

const { Buffer } = require('buffer')
const { expect } = require('./util')

const Datastore = require('datastore-fs')
const Key = require('interface-datastore').Key
const version = require('../src/repo/version')

const uint8ArrayFromString = require('uint8arrays/from-string')
const errors = require('../src/errors')

// When new versioning mechanism is introduced in new version don't forget to update
Expand All @@ -34,8 +33,8 @@ module.exports = (setup, cleanup) => {
// Create version file
const store = new Datastore(dir, { extension: '', createIfMissing: false })
await store.open()
await store.put(new Key('config'), Buffer.from('some dummy config'))
await store.put(new Key('version'), Buffer.from('7'))
await store.put(new Key('config'), uint8ArrayFromString('some dummy config'))
await store.put(new Key('version'), uint8ArrayFromString('7'))
await store.close()

expect(await version.getVersion(dir)).to.be.equal(7)
Expand All @@ -47,8 +46,8 @@ module.exports = (setup, cleanup) => {
// Create version file
const store = new Datastore(dir, { extension: '', createIfMissing: false })
await store.open()
await store.put(new Key('config'), Buffer.from('some dummy config'))
await store.put(new Key('version'), Buffer.from('5'))
await store.put(new Key('config'), uint8ArrayFromString('some dummy config'))
await store.put(new Key('version'), uint8ArrayFromString('5'))
await store.close()

await version.setVersion(dir, 7)
Expand Down