Skip to content

chore: replace toBaseEncodedString() with toString() #135

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 2 commits into from
Apr 7, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion packages/ipfs-unixfs-exporter/src/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ async function * walkPath (path, ipld, options = {}) {
cid,
toResolve
} = cidAndRest(path)
let name = cid.toBaseEncodedString()
let name = cid.toString()
let entryPath = name
const startingDepth = toResolve.length

Expand Down
2 changes: 1 addition & 1 deletion packages/ipfs-unixfs-exporter/src/resolvers/dag-cbor.js
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ const resolve = async (cid, name, path, toResolve, resolve, depth, ipld, options
subObject = subObject[prop]
} else {
// cannot resolve further
throw errCode(new Error(`No property named ${prop} found in cbor node ${cid.toBaseEncodedString()}`), 'ERR_NO_PROP')
throw errCode(new Error(`No property named ${prop} found in cbor node ${cid}`), 'ERR_NO_PROP')
}
}

Expand Down
2 changes: 1 addition & 1 deletion packages/ipfs-unixfs-exporter/src/resolvers/identity.js
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ const rawContent = (node) => {
*/
const resolve = async (cid, name, path, toResolve, resolve, depth, ipld, options) => {
if (toResolve.length) {
throw errCode(new Error(`No link named ${path} found in raw node ${cid.toBaseEncodedString()}`), 'ERR_NOT_FOUND')
throw errCode(new Error(`No link named ${path} found in raw node ${cid}`), 'ERR_NOT_FOUND')
}

const buf = await mh.decode(cid.multihash)
Expand Down
2 changes: 1 addition & 1 deletion packages/ipfs-unixfs-exporter/src/resolvers/raw.js
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ const rawContent = (node) => {
*/
const resolve = async (cid, name, path, toResolve, resolve, depth, ipld, options) => {
if (toResolve.length) {
throw errCode(new Error(`No link named ${path} found in raw node ${cid.toBaseEncodedString()}`), 'ERR_NOT_FOUND')
throw errCode(new Error(`No link named ${path} found in raw node ${cid}`), 'ERR_NOT_FOUND')
}

const buf = await ipld.get(cid, options)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ const unixFsResolver = async (cid, name, path, toResolve, resolve, depth, ipld,
let next

if (!name) {
name = cid.toBaseEncodedString()
name = cid.toString()
}

try {
Expand Down
26 changes: 13 additions & 13 deletions packages/ipfs-unixfs-exporter/test/exporter-sharded.spec.js
Original file line number Diff line number Diff line change
Expand Up @@ -172,59 +172,59 @@ describe('exporter sharded', function () {

it('exports one file from a sharded directory', async () => {
const dirCid = await createShard(31)
const exported = await exporter(`/ipfs/${dirCid.toBaseEncodedString()}/file-14`, ipld)
const exported = await exporter(`/ipfs/${dirCid}/file-14`, ipld)

expect(exported).to.have.property('name', 'file-14')
})

it('exports one file from a sharded directory sub shard', async () => {
const dirCid = await createShard(31)
const exported = await exporter(`/ipfs/${dirCid.toBaseEncodedString()}/file-30`, ipld)
const exported = await exporter(`/ipfs/${dirCid}/file-30`, ipld)

expect(exported.name).to.deep.equal('file-30')
})

it('exports one file from a shard inside a shard inside a shard', async () => {
const dirCid = await createShard(2568)
const exported = await exporter(`/ipfs/${dirCid.toBaseEncodedString()}/file-2567`, ipld)
const exported = await exporter(`/ipfs/${dirCid}/file-2567`, ipld)

expect(exported.name).to.deep.equal('file-2567')
})

it('extracts a deep folder from the sharded directory', async () => {
const dirCid = await createShardWithFileNames(31, (index) => `/foo/bar/baz/file-${index}`)
const exported = await exporter(`/ipfs/${dirCid.toBaseEncodedString()}/foo/bar/baz`, ipld)
const exported = await exporter(`/ipfs/${dirCid}/foo/bar/baz`, ipld)

expect(exported.name).to.deep.equal('baz')
})

it('extracts an intermediate folder from the sharded directory', async () => {
const dirCid = await createShardWithFileNames(31, (index) => `/foo/bar/baz/file-${index}`)
const exported = await exporter(`/ipfs/${dirCid.toBaseEncodedString()}/foo/bar`, ipld)
const exported = await exporter(`/ipfs/${dirCid}/foo/bar`, ipld)

expect(exported.name).to.deep.equal('bar')
})

it('uses .path to extract all intermediate entries from the sharded directory', async () => {
const dirCid = await createShardWithFileNames(31, (index) => `/foo/bar/baz/file-${index}`)
const exported = await all(walkPath(`/ipfs/${dirCid.toBaseEncodedString()}/foo/bar/baz/file-1`, ipld))
const exported = await all(walkPath(`/ipfs/${dirCid}/foo/bar/baz/file-1`, ipld))

expect(exported.length).to.equal(5)

expect(exported[0].name).to.equal(dirCid.toBaseEncodedString())
expect(exported[0].name).to.equal(dirCid.toString())
expect(exported[1].name).to.equal('foo')
expect(exported[1].path).to.equal(`${dirCid.toBaseEncodedString()}/foo`)
expect(exported[1].path).to.equal(`${dirCid}/foo`)
expect(exported[2].name).to.equal('bar')
expect(exported[2].path).to.equal(`${dirCid.toBaseEncodedString()}/foo/bar`)
expect(exported[2].path).to.equal(`${dirCid}/foo/bar`)
expect(exported[3].name).to.equal('baz')
expect(exported[3].path).to.equal(`${dirCid.toBaseEncodedString()}/foo/bar/baz`)
expect(exported[3].path).to.equal(`${dirCid}/foo/bar/baz`)
expect(exported[4].name).to.equal('file-1')
expect(exported[4].path).to.equal(`${dirCid.toBaseEncodedString()}/foo/bar/baz/file-1`)
expect(exported[4].path).to.equal(`${dirCid}/foo/bar/baz/file-1`)
})

it('uses .path to extract all intermediate entries from the sharded directory as well as the contents', async () => {
const dirCid = await createShardWithFileNames(31, (index) => `/foo/bar/baz/file-${index}`)
const exported = await all(walkPath(`/ipfs/${dirCid.toBaseEncodedString()}/foo/bar/baz`, ipld))
const exported = await all(walkPath(`/ipfs/${dirCid}/foo/bar/baz`, ipld))

expect(exported.length).to.equal(4)

Expand Down Expand Up @@ -268,7 +268,7 @@ describe('exporter sharded', function () {
hashAlg: mh.names['sha2-256']
})

const exported = await exporter(`/ipfs/${shardNodeCid.toBaseEncodedString()}/normal-dir/shard/file-1`, ipld)
const exported = await exporter(`/ipfs/${shardNodeCid}/normal-dir/shard/file-1`, ipld)

expect(exported.name).to.deep.equal('file-1')
})
Expand Down
24 changes: 12 additions & 12 deletions packages/ipfs-unixfs-exporter/test/exporter-subtree.spec.js
Original file line number Diff line number Diff line change
Expand Up @@ -44,11 +44,11 @@ describe('exporter subtree', () => {
throw new Error('Nothing imported')
}

const exported = await exporter(`${imported.cid.toBaseEncodedString()}/level-1/200Bytes.txt`, ipld)
const exported = await exporter(`${imported.cid}/level-1/200Bytes.txt`, ipld)

expect(exported).to.have.property('cid')
expect(exported.name).to.equal('200Bytes.txt')
expect(exported.path).to.equal(`${imported.cid.toBaseEncodedString()}/level-1/200Bytes.txt`)
expect(exported.path).to.equal(`${imported.cid}/level-1/200Bytes.txt`)

if (exported.type !== 'file') {
throw new Error('Unexpected type')
Expand All @@ -74,7 +74,7 @@ describe('exporter subtree', () => {
throw new Error('Nothing imported')
}

const exported = await exporter(`${imported.cid.toBaseEncodedString()}/level-1`, ipld)
const exported = await exporter(`${imported.cid}/level-1`, ipld)

if (exported.type !== 'directory') {
throw new Error('Unexpected type')
Expand All @@ -84,10 +84,10 @@ describe('exporter subtree', () => {

expect(files.length).to.equal(2)
expect(files[0].name).to.equal('200Bytes.txt')
expect(files[0].path).to.equal(`${imported.cid.toBaseEncodedString()}/level-1/200Bytes.txt`)
expect(files[0].path).to.equal(`${imported.cid}/level-1/200Bytes.txt`)

expect(files[1].name).to.equal('level-2')
expect(files[1].path).to.equal(`${imported.cid.toBaseEncodedString()}/level-1/level-2`)
expect(files[1].path).to.equal(`${imported.cid}/level-1/level-2`)

if (files[0].type !== 'file') {
throw new Error('Unexpected type')
Expand All @@ -108,7 +108,7 @@ describe('exporter subtree', () => {
}

try {
await exporter(`${imported.cid.toBaseEncodedString()}/doesnotexist`, ipld)
await exporter(`${imported.cid}/doesnotexist`, ipld)
} catch (err) {
expect(err.code).to.equal('ERR_NOT_FOUND')
}
Expand All @@ -134,16 +134,16 @@ describe('exporter subtree', () => {
throw new Error('Nothing imported')
}

const exported = await all(walkPath(`${imported.cid.toBaseEncodedString()}/level-1/level-2/200Bytes.txt`, ipld))
const exported = await all(walkPath(`${imported.cid}/level-1/level-2/200Bytes.txt`, ipld))

expect(exported.length).to.equal(4)
expect(exported[0].path).to.equal(imported.cid.toBaseEncodedString())
expect(exported[0].name).to.equal(imported.cid.toBaseEncodedString())
expect(exported[1].path).to.equal(`${imported.cid.toBaseEncodedString()}/level-1`)
expect(exported[0].path).to.equal(imported.cid.toString())
expect(exported[0].name).to.equal(imported.cid.toString())
expect(exported[1].path).to.equal(`${imported.cid}/level-1`)
expect(exported[1].name).to.equal('level-1')
expect(exported[2].path).to.equal(`${imported.cid.toBaseEncodedString()}/level-1/level-2`)
expect(exported[2].path).to.equal(`${imported.cid}/level-1/level-2`)
expect(exported[2].name).to.equal('level-2')
expect(exported[3].path).to.equal(`${imported.cid.toBaseEncodedString()}/level-1/level-2/200Bytes.txt`)
expect(exported[3].path).to.equal(`${imported.cid}/level-1/level-2/200Bytes.txt`)
expect(exported[3].name).to.equal('200Bytes.txt')
})
})
36 changes: 18 additions & 18 deletions packages/ipfs-unixfs-exporter/test/exporter.spec.js
Original file line number Diff line number Diff line change
Expand Up @@ -180,7 +180,7 @@ describe('exporter', () => {
const file = await exporter(result.cid, ipld)

expect(file).to.have.property('cid')
expect(file).to.have.property('path', result.cid.toBaseEncodedString())
expect(file).to.have.property('path', result.cid.toString())

if (file.type !== 'file') {
throw new Error('Unexpected type')
Expand All @@ -199,11 +199,11 @@ describe('exporter', () => {
content: asAsyncIterable(smallFile)
}], block))

const path = `/ipfs/${files[1].cid.toBaseEncodedString()}/${fileName}`
const path = `/ipfs/${files[1].cid}/${fileName}`
const file = await exporter(path, ipld)

expect(file.name).to.equal(fileName)
expect(file.path).to.equal(`${files[1].cid.toBaseEncodedString()}/${fileName}`)
expect(file.path).to.equal(`${files[1].cid}/${fileName}`)
})

it('small file in a directory with an square brackets in the title', async () => {
Expand All @@ -215,11 +215,11 @@ describe('exporter', () => {
content: asAsyncIterable(smallFile)
}], block))

const path = `/ipfs/${files[1].cid.toBaseEncodedString()}/${fileName}`
const path = `/ipfs/${files[1].cid}/${fileName}`
const file = await exporter(path, ipld)

expect(file.name).to.equal(fileName)
expect(file.path).to.equal(`${files[1].cid.toBaseEncodedString()}/${fileName}`)
expect(file.path).to.equal(`${files[1].cid}/${fileName}`)
})

it('exports a chunk of a file with no links', async () => {
Expand Down Expand Up @@ -338,7 +338,7 @@ describe('exporter', () => {
throw new Error('Unexpected type')
}

expect(file).to.have.property('path', cid.toBaseEncodedString())
expect(file).to.have.property('path', cid.toString())
expect(file.unixfs.fileSize()).to.equal(ONE_MEG * 6)
})

Expand All @@ -354,7 +354,7 @@ describe('exporter', () => {
})

const file = await exporter(cid, ipld)
expect(file).to.have.property('path', cid.toBaseEncodedString())
expect(file).to.have.property('path', cid.toString())

if (file.type !== 'file') {
throw new Error('Unexpected type')
Expand Down Expand Up @@ -432,9 +432,9 @@ describe('exporter', () => {
expect(
files.map((file) => file.path)
).to.be.eql([
`${dir.cid.toBaseEncodedString()}/200Bytes.txt`,
`${dir.cid.toBaseEncodedString()}/dir-another`,
`${dir.cid.toBaseEncodedString()}/level-1`
`${dir.cid}/200Bytes.txt`,
`${dir.cid}/dir-another`,
`${dir.cid}/level-1`
])

files
Expand Down Expand Up @@ -480,9 +480,9 @@ describe('exporter', () => {
expect(
files.map((file) => file.path)
).to.be.eql([
`${importedDir.cid.toBaseEncodedString()}/200Bytes.txt`,
`${importedDir.cid.toBaseEncodedString()}/dir-another`,
`${importedDir.cid.toBaseEncodedString()}/level-1`
`${importedDir.cid}/200Bytes.txt`,
`${importedDir.cid}/dir-another`,
`${importedDir.cid}/level-1`
])

expect(
Expand Down Expand Up @@ -942,7 +942,7 @@ describe('exporter', () => {
}, mc.DAG_CBOR)

try {
await exporter(`${cborNodeCid.toBaseEncodedString()}/baz`, ipld)
await exporter(`${cborNodeCid}/baz`, ipld)
} catch (err) {
expect(err.code).to.equal('ERR_NO_PROP')
}
Expand All @@ -954,7 +954,7 @@ describe('exporter', () => {
}

const cborNodeCid = await ipld.put(node, mc.DAG_CBOR)
const exported = await exporter(`${cborNodeCid.toBaseEncodedString()}`, ipld)
const exported = await exporter(`${cborNodeCid}`, ipld)

if (exported.type !== 'object') {
throw new Error('Unexpected type')
Expand All @@ -967,7 +967,7 @@ describe('exporter', () => {
const cid = new CID(1, 'git-raw', new CID('zdj7WkRPAX9o9nb9zPbXzwG7JEs78uyhwbUs8JSUayB98DWWY').multihash)

try {
await exporter(`${cid.toBaseEncodedString()}`, ipld)
await exporter(`${cid}`, ipld)
} catch (err) {
expect(err.code).to.equal('ERR_NO_RESOLVER')
}
Expand All @@ -977,7 +977,7 @@ describe('exporter', () => {
const cid = await ipld.put(Uint8Array.from([0, 1, 2, 3, 4]), mc.RAW)

try {
await exporter(`${cid.toBaseEncodedString()}/lol`, ipld)
await exporter(`${cid}/lol`, ipld)
} catch (err) {
expect(err.code).to.equal('ERR_NOT_FOUND')
}
Expand Down Expand Up @@ -1048,7 +1048,7 @@ describe('exporter', () => {
}

const exported = await all(recursive(dir.cid, ipld))
const dirCid = dir.cid.toBaseEncodedString()
const dirCid = dir.cid.toString()

expect(exported[0].depth).to.equal(0)
expect(exported[0].name).to.equal(dirCid)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -117,10 +117,10 @@ describe('builder: directory sharding', () => {
throw new Error('Unexpected type')
}

const expectedHash = nonShardedHash.toBaseEncodedString()
const expectedHash = nonShardedHash.toString()

expect(dir.path).to.be.eql(expectedHash)
expect(dir.cid.toBaseEncodedString()).to.be.eql(expectedHash)
expect(dir.cid.toString()).to.be.eql(expectedHash)
expect(files[0].path).to.be.eql(expectedHash + '/b')
expect(files[0].unixfs.fileSize()).to.be.eql(content.length)

Expand Down Expand Up @@ -154,10 +154,10 @@ describe('builder: directory sharding', () => {
throw new Error('Unexpected type')
}

const expectedHash = shardedHash.toBaseEncodedString()
const expectedHash = shardedHash.toString()

expect(dir.path).to.be.eql(expectedHash)
expect(dir.cid.toBaseEncodedString()).to.be.eql(expectedHash)
expect(dir.cid.toString()).to.be.eql(expectedHash)
expect(files[0].path).to.be.eql(expectedHash + '/b')
expect(files[0].unixfs.fileSize()).to.be.eql(content.length)

Expand Down Expand Up @@ -325,7 +325,7 @@ describe('builder: directory sharding', () => {
if (!index) {
// first dir
if (depth === 1) {
expect(path).to.equal(dir.cid.toBaseEncodedString())
expect(path).to.equal(dir.cid.toString())
}

const entry = entries[path]
Expand Down Expand Up @@ -363,7 +363,7 @@ describe('builder: directory sharding', () => {
})

it('exports a big dir with subpath', async () => {
const exportHash = rootHash.toBaseEncodedString() + '/big/big/2000'
const exportHash = rootHash.toString() + '/big/big/2000'

const node = await exporter(exportHash, ipld)
expect(node.path).to.equal(exportHash)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -126,7 +126,7 @@ async function recursiveExport (node, path, entries = []) {
function normalizeNode (node) {
return {
path: node.path || '',
multihash: node.cid.toBaseEncodedString()
multihash: node.cid.toString()
}
}

Expand Down
6 changes: 3 additions & 3 deletions packages/ipfs-unixfs-exporter/test/importer.spec.js
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ function stringifyMh (files) {
return files.map((file) => {
return {
...file,
cid: file.cid.toBaseEncodedString()
cid: file.cid.toString()
}
})
}
Expand Down Expand Up @@ -341,7 +341,7 @@ strategies.forEach((strategy) => {
const actualFile = actualFiles[i]

expect(actualFile.path).to.equal(expectedFile.path)
expect(actualFile.cid.toBaseEncodedString('base58btc')).to.equal(expectedFile.cid)
expect(actualFile.cid.toString('base58btc')).to.equal(expectedFile.cid)

if (actualFile.unixfs) {
expect(actualFile.unixfs.type).to.equal(expectedFile.type)
Expand Down Expand Up @@ -422,7 +422,7 @@ strategies.forEach((strategy) => {
expect(files.length).to.eql(1)

// always yield empty file node
expect(files[0].cid.toBaseEncodedString()).to.eql('QmbFMke1KXqnYyBBWxB74N4c5SBnJMVAiMNRcGu6x1AwQH')
expect(files[0].cid.toString()).to.eql('QmbFMke1KXqnYyBBWxB74N4c5SBnJMVAiMNRcGu6x1AwQH')
})

it('supports more than one root', async () => {
Expand Down
Loading