From 3bf5a4f2bdf128c5cb23198bfce40645dc0281ef Mon Sep 17 00:00:00 2001 From: achingbrain Date: Fri, 10 Feb 2023 08:36:37 +0100 Subject: [PATCH 1/5] feat!: convert to typescript Converts this module to typescript Since there were existing types this should be minimally disruptive but releasing this as a major just in case. BREAKING CHANGE: the types on this module may have changed --- .gitignore | 1 + package.json | 3 +- packages/ipfs-unixfs-exporter/src/index.js | 151 ---- packages/ipfs-unixfs-exporter/src/index.ts | 192 +++++ .../resolvers/{dag-cbor.js => dag-cbor.ts} | 20 +- .../resolvers/{identity.js => identity.ts} | 29 +- .../src/resolvers/{index.js => index.ts} | 20 +- .../src/resolvers/{raw.js => raw.ts} | 26 +- .../resolvers/unixfs-v1/content/directory.js | 32 - .../resolvers/unixfs-v1/content/directory.ts | 21 + .../unixfs-v1/content/{file.js => file.ts} | 63 +- ...directory.js => hamt-sharded-directory.ts} | 29 +- .../src/resolvers/unixfs-v1/content/raw.js | 34 - .../src/resolvers/unixfs-v1/content/raw.ts | 24 + .../unixfs-v1/{index.js => index.ts} | 73 +- packages/ipfs-unixfs-exporter/src/types.ts | 77 -- ...om-block.js => extract-data-from-block.ts} | 17 +- ...d-cid-in-shard.js => find-cid-in-shard.ts} | 72 +- ...ength.js => validate-offset-and-length.ts} | 24 +- ...arded.spec.js => exporter-sharded.spec.ts} | 40 +- ...btree.spec.js => exporter-subtree.spec.ts} | 16 +- .../{exporter.spec.js => exporter.spec.ts} | 162 ++-- .../test/helpers/as-async-iterable.js | 12 - .../test/helpers/as-async-iterable.ts | 10 + .../test/helpers/block.js | 48 -- .../test/helpers/collect-leaf-cids.js | 32 - .../test/helpers/collect-leaf-cids.ts | 23 + ....js => import-export-dir-sharding.spec.ts} | 51 +- ...ec.js => import-export-nested-dir.spec.ts} | 29 +- ...t-export.spec.js => import-export.spec.ts} | 17 +- .../{importer.spec.js => importer.spec.ts} | 222 +++--- packages/ipfs-unixfs-exporter/tsconfig.json | 3 +- packages/ipfs-unixfs-importer/package.json | 19 +- .../chunker/{fixed-size.js => fixed-size.ts} | 12 +- .../src/chunker/{rabin.js => rabin.ts} | 43 +- .../src/dag-builder/{dir.js => dir.ts} | 16 +- .../src/dag-builder/file/balanced.js | 31 - .../src/dag-builder/file/balanced.ts | 20 + ...{buffer-importer.js => buffer-importer.ts} | 17 +- .../src/dag-builder/file/flat.js | 10 - .../src/dag-builder/file/flat.ts | 6 + .../dag-builder/file/{index.js => index.ts} | 100 +-- .../src/dag-builder/file/trickle.js | 199 ----- .../src/dag-builder/file/trickle.ts | 170 +++++ .../src/dag-builder/{index.js => index.ts} | 72 +- ...{validate-chunks.js => validate-chunks.ts} | 12 +- .../src/{dir-flat.js => dir-flat.ts} | 73 +- .../src/{dir-sharded.js => dir-sharded.ts} | 133 ++-- packages/ipfs-unixfs-importer/src/dir.js | 86 --- packages/ipfs-unixfs-importer/src/dir.ts | 70 ++ .../{flat-to-shard.js => flat-to-shard.ts} | 36 +- packages/ipfs-unixfs-importer/src/index.js | 68 -- .../src/{types.ts => index.ts} | 62 +- .../src/{options.js => options.ts} | 26 +- .../src/{tree-builder.js => tree-builder.ts} | 51 +- .../src/utils/{persist.js => persist.ts} | 25 +- .../src/utils/to-path-components.js | 9 - .../src/utils/to-path-components.ts | 7 + .../{benchmark.spec.js => benchmark.spec.ts} | 21 +- ...anced.spec.js => builder-balanced.spec.ts} | 34 +- ...lder-flat.spec.js => builder-flat.spec.ts} | 15 +- ...hash.spec.js => builder-only-hash.spec.ts} | 10 +- ...ag.spec.js => builder-trickle-dag.spec.ts} | 44 +- .../test/{builder.spec.js => builder.spec.ts} | 25 +- ...-custom.spec.js => chunker-custom.spec.ts} | 34 +- ...ize.spec.js => chunker-fixed-size.spec.ts} | 12 +- ...er-rabin.spec.js => chunker-rabin.spec.ts} | 18 +- ...ec.js => hash-parity-with-go-ipfs.spec.ts} | 28 +- .../test/helpers/as-async-iterable.js | 12 - .../test/helpers/as-async-iterable.ts | 10 + .../test/helpers/block.js | 48 -- ....js => finite-pseudorandom-byte-stream.ts} | 11 +- ...m-byte-stream.js => random-byte-stream.ts} | 11 +- .../test/{utils.spec.js => utils.spec.ts} | 2 +- packages/ipfs-unixfs-importer/tsconfig.json | 3 +- packages/ipfs-unixfs/.aegir.js | 2 +- packages/ipfs-unixfs/package.json | 29 +- packages/ipfs-unixfs/src/index.js | 330 -------- packages/ipfs-unixfs/src/index.ts | 205 +++++ packages/ipfs-unixfs/src/types.ts | 7 - packages/ipfs-unixfs/src/unixfs.d.ts | 238 ------ packages/ipfs-unixfs/src/unixfs.js | 718 ------------------ packages/ipfs-unixfs/src/unixfs.proto | 6 +- packages/ipfs-unixfs/src/unixfs.ts | 277 +++++++ ...s-format.spec.js => unixfs-format.spec.ts} | 77 +- packages/ipfs-unixfs/tsconfig.json | 6 +- 86 files changed, 1794 insertions(+), 3385 deletions(-) delete mode 100644 packages/ipfs-unixfs-exporter/src/index.js create mode 100644 packages/ipfs-unixfs-exporter/src/index.ts rename packages/ipfs-unixfs-exporter/src/resolvers/{dag-cbor.js => dag-cbor.ts} (78%) rename packages/ipfs-unixfs-exporter/src/resolvers/{identity.js => identity.ts} (53%) rename packages/ipfs-unixfs-exporter/src/resolvers/{index.js => index.ts} (63%) rename packages/ipfs-unixfs-exporter/src/resolvers/{raw.js => raw.ts} (55%) delete mode 100644 packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/content/directory.js create mode 100644 packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/content/directory.ts rename packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/content/{file.js => file.ts} (69%) rename packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/content/{hamt-sharded-directory.js => hamt-sharded-directory.ts} (63%) delete mode 100644 packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/content/raw.js create mode 100644 packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/content/raw.ts rename packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/{index.js => index.ts} (60%) delete mode 100644 packages/ipfs-unixfs-exporter/src/types.ts rename packages/ipfs-unixfs-exporter/src/utils/{extract-data-from-block.js => extract-data-from-block.ts} (60%) rename packages/ipfs-unixfs-exporter/src/utils/{find-cid-in-shard.js => find-cid-in-shard.ts} (58%) rename packages/ipfs-unixfs-exporter/src/utils/{validate-offset-and-length.js => validate-offset-and-length.ts} (54%) rename packages/ipfs-unixfs-exporter/test/{exporter-sharded.spec.js => exporter-sharded.spec.ts} (90%) rename packages/ipfs-unixfs-exporter/test/{exporter-subtree.spec.js => exporter-subtree.spec.ts} (94%) rename packages/ipfs-unixfs-exporter/test/{exporter.spec.js => exporter.spec.ts} (89%) delete mode 100644 packages/ipfs-unixfs-exporter/test/helpers/as-async-iterable.js create mode 100644 packages/ipfs-unixfs-exporter/test/helpers/as-async-iterable.ts delete mode 100644 packages/ipfs-unixfs-exporter/test/helpers/block.js delete mode 100644 packages/ipfs-unixfs-exporter/test/helpers/collect-leaf-cids.js create mode 100644 packages/ipfs-unixfs-exporter/test/helpers/collect-leaf-cids.ts rename packages/ipfs-unixfs-exporter/test/{import-export-dir-sharding.spec.js => import-export-dir-sharding.spec.ts} (89%) rename packages/ipfs-unixfs-exporter/test/{import-export-nested-dir.spec.js => import-export-nested-dir.spec.ts} (84%) rename packages/ipfs-unixfs-exporter/test/{import-export.spec.js => import-export.spec.ts} (72%) rename packages/ipfs-unixfs-exporter/test/{importer.spec.js => importer.spec.ts} (88%) rename packages/ipfs-unixfs-importer/src/chunker/{fixed-size.js => fixed-size.ts} (77%) rename packages/ipfs-unixfs-importer/src/chunker/{rabin.js => rabin.ts} (65%) rename packages/ipfs-unixfs-importer/src/dag-builder/{dir.js => dir.ts} (57%) delete mode 100644 packages/ipfs-unixfs-importer/src/dag-builder/file/balanced.js create mode 100644 packages/ipfs-unixfs-importer/src/dag-builder/file/balanced.ts rename packages/ipfs-unixfs-importer/src/dag-builder/file/{buffer-importer.js => buffer-importer.ts} (72%) delete mode 100644 packages/ipfs-unixfs-importer/src/dag-builder/file/flat.js create mode 100644 packages/ipfs-unixfs-importer/src/dag-builder/file/flat.ts rename packages/ipfs-unixfs-importer/src/dag-builder/file/{index.js => index.ts} (59%) delete mode 100644 packages/ipfs-unixfs-importer/src/dag-builder/file/trickle.js create mode 100644 packages/ipfs-unixfs-importer/src/dag-builder/file/trickle.ts rename packages/ipfs-unixfs-importer/src/dag-builder/{index.js => index.ts} (51%) rename packages/ipfs-unixfs-importer/src/dag-builder/{validate-chunks.js => validate-chunks.ts} (78%) rename packages/ipfs-unixfs-importer/src/{dir-flat.js => dir-flat.ts} (56%) rename packages/ipfs-unixfs-importer/src/{dir-sharded.js => dir-sharded.ts} (62%) delete mode 100644 packages/ipfs-unixfs-importer/src/dir.js create mode 100644 packages/ipfs-unixfs-importer/src/dir.ts rename packages/ipfs-unixfs-importer/src/{flat-to-shard.js => flat-to-shard.ts} (55%) delete mode 100644 packages/ipfs-unixfs-importer/src/index.js rename packages/ipfs-unixfs-importer/src/{types.ts => index.ts} (74%) rename packages/ipfs-unixfs-importer/src/{options.js => options.ts} (77%) rename packages/ipfs-unixfs-importer/src/{tree-builder.js => tree-builder.ts} (62%) rename packages/ipfs-unixfs-importer/src/utils/{persist.js => persist.ts} (51%) delete mode 100644 packages/ipfs-unixfs-importer/src/utils/to-path-components.js create mode 100644 packages/ipfs-unixfs-importer/src/utils/to-path-components.ts rename packages/ipfs-unixfs-importer/test/{benchmark.spec.js => benchmark.spec.ts} (79%) rename packages/ipfs-unixfs-importer/test/{builder-balanced.spec.js => builder-balanced.spec.ts} (72%) rename packages/ipfs-unixfs-importer/test/{builder-flat.spec.js => builder-flat.spec.ts} (64%) rename packages/ipfs-unixfs-importer/test/{builder-only-hash.spec.js => builder-only-hash.spec.ts} (76%) rename packages/ipfs-unixfs-importer/test/{builder-trickle-dag.spec.js => builder-trickle-dag.spec.ts} (92%) rename packages/ipfs-unixfs-importer/test/{builder.spec.js => builder.spec.ts} (85%) rename packages/ipfs-unixfs-importer/test/{chunker-custom.spec.js => chunker-custom.spec.ts} (62%) rename packages/ipfs-unixfs-importer/test/{chunker-fixed-size.spec.js => chunker-fixed-size.spec.ts} (85%) rename packages/ipfs-unixfs-importer/test/{chunker-rabin.spec.js => chunker-rabin.spec.ts} (87%) rename packages/ipfs-unixfs-importer/test/{hash-parity-with-go-ipfs.spec.js => hash-parity-with-go-ipfs.spec.ts} (85%) delete mode 100644 packages/ipfs-unixfs-importer/test/helpers/as-async-iterable.js create mode 100644 packages/ipfs-unixfs-importer/test/helpers/as-async-iterable.ts delete mode 100644 packages/ipfs-unixfs-importer/test/helpers/block.js rename packages/ipfs-unixfs-importer/test/helpers/{finite-pseudorandom-byte-stream.js => finite-pseudorandom-byte-stream.ts} (72%) rename packages/ipfs-unixfs-importer/test/helpers/{random-byte-stream.js => random-byte-stream.ts} (61%) rename packages/ipfs-unixfs-importer/test/{utils.spec.js => utils.spec.ts} (90%) delete mode 100644 packages/ipfs-unixfs/src/index.js create mode 100644 packages/ipfs-unixfs/src/index.ts delete mode 100644 packages/ipfs-unixfs/src/types.ts delete mode 100644 packages/ipfs-unixfs/src/unixfs.d.ts delete mode 100644 packages/ipfs-unixfs/src/unixfs.js create mode 100644 packages/ipfs-unixfs/src/unixfs.ts rename packages/ipfs-unixfs/test/{unixfs-format.spec.js => unixfs-format.spec.ts} (90%) diff --git a/.gitignore b/.gitignore index 910f6339..7ad9e674 100644 --- a/.gitignore +++ b/.gitignore @@ -6,3 +6,4 @@ dist node_modules package-lock.json yarn.lock +.vscode diff --git a/package.json b/package.json index b5b203a6..831f8c7a 100644 --- a/package.json +++ b/package.json @@ -29,12 +29,13 @@ "clean": "aegir run clean", "build": "aegir run build", "lint": "aegir run lint", + "generate": "aegir run generate", "docs": "NODE_OPTIONS=--max_old_space_size=4096 aegir docs", "docs:no-publish": "npm run docs -- --publish false", "dep-check": "aegir run dep-check", "release": "npm run docs:no-publish && aegir run release && npm run docs" }, - "dependencies": { + "devDependencies": { "aegir": "^38.1.2" }, "workspaces": [ diff --git a/packages/ipfs-unixfs-exporter/src/index.js b/packages/ipfs-unixfs-exporter/src/index.js deleted file mode 100644 index 8cdaf2c8..00000000 --- a/packages/ipfs-unixfs-exporter/src/index.js +++ /dev/null @@ -1,151 +0,0 @@ -import errCode from 'err-code' -import { CID } from 'multiformats/cid' -import resolve from './resolvers/index.js' -import last from 'it-last' - -/** - * @typedef {import('ipfs-unixfs').UnixFS} UnixFS - * @typedef {import('interface-blockstore').Blockstore} Blockstore - * @typedef {import('./types').ExporterOptions} ExporterOptions - * @typedef {import('./types').UnixFSFile} UnixFSFile - * @typedef {import('./types').UnixFSDirectory} UnixFSDirectory - * @typedef {import('./types').ObjectNode} ObjectNode - * @typedef {import('./types').RawNode} RawNode - * @typedef {import('./types').IdentityNode} IdentityNode - * @typedef {import('./types').UnixFSEntry} UnixFSEntry - */ - -const toPathComponents = (path = '') => { - // split on / unless escaped with \ - return (path - .trim() - .match(/([^\\^/]|\\\/)+/g) || []) - .filter(Boolean) -} - -/** - * @param {string|Uint8Array|CID} path - */ -const cidAndRest = (path) => { - if (path instanceof Uint8Array) { - return { - cid: CID.decode(path), - toResolve: [] - } - } - - const cid = CID.asCID(path) - if (cid) { - return { - cid, - toResolve: [] - } - } - - if (typeof path === 'string') { - if (path.indexOf('/ipfs/') === 0) { - path = path.substring(6) - } - - const output = toPathComponents(path) - - return { - cid: CID.parse(output[0]), - toResolve: output.slice(1) - } - } - - throw errCode(new Error(`Unknown path type ${path}`), 'ERR_BAD_PATH') -} - -/** - * @param {string | CID} path - * @param {Blockstore} blockstore - * @param {ExporterOptions} [options] - */ -export async function * walkPath (path, blockstore, options = {}) { - let { - cid, - toResolve - } = cidAndRest(path) - let name = cid.toString() - let entryPath = name - const startingDepth = toResolve.length - - while (true) { - const result = await resolve(cid, name, entryPath, toResolve, startingDepth, blockstore, options) - - if (!result.entry && !result.next) { - throw errCode(new Error(`Could not resolve ${path}`), 'ERR_NOT_FOUND') - } - - if (result.entry) { - yield result.entry - } - - if (!result.next) { - return - } - - // resolve further parts - toResolve = result.next.toResolve - cid = result.next.cid - name = result.next.name - entryPath = result.next.path - } -} - -/** - * @param {string | CID} path - * @param {Blockstore} blockstore - * @param {ExporterOptions} [options] - */ -export async function exporter (path, blockstore, options = {}) { - const result = await last(walkPath(path, blockstore, options)) - - if (!result) { - throw errCode(new Error(`Could not resolve ${path}`), 'ERR_NOT_FOUND') - } - - return result -} - -/** - * @param {string | CID} path - * @param {Blockstore} blockstore - * @param {ExporterOptions} [options] - */ -export async function * recursive (path, blockstore, options = {}) { - const node = await exporter(path, blockstore, options) - - if (!node) { - return - } - - yield node - - if (node.type === 'directory') { - for await (const child of recurse(node, options)) { - yield child - } - } - - /** - * @param {UnixFSDirectory} node - * @param {ExporterOptions} options - * @returns {AsyncGenerator} - */ - async function * recurse (node, options) { - for await (const file of node.content(options)) { - yield file - - if (file instanceof Uint8Array) { - continue - } - - if (file.type === 'directory') { - yield * recurse(file, options) - } - } - } -} diff --git a/packages/ipfs-unixfs-exporter/src/index.ts b/packages/ipfs-unixfs-exporter/src/index.ts new file mode 100644 index 00000000..8c3b984b --- /dev/null +++ b/packages/ipfs-unixfs-exporter/src/index.ts @@ -0,0 +1,192 @@ +import errCode from 'err-code' +import { CID } from 'multiformats/cid' +import resolve from './resolvers/index.js' +import last from 'it-last' +import type { UnixFS } from 'ipfs-unixfs' +import type { PBNode } from '@ipld/dag-pb' +import type { Blockstore } from 'interface-blockstore' +import type { Bucket } from 'hamt-sharding' + +export interface ExporterOptions { + offset?: number + length?: number + signal?: AbortSignal + timeout?: number +} + +export interface Exportable { + type: 'file' | 'directory' | 'object' | 'raw' | 'identity' + name: string + path: string + cid: CID + depth: number + size: bigint + content: (options?: ExporterOptions) => AsyncIterable +} + +export interface UnixFSFile extends Exportable { + type: 'file' + unixfs: UnixFS + node: PBNode +} + +export interface UnixFSDirectory extends Exportable { + type: 'directory' + unixfs: UnixFS + node: PBNode +} + +export interface ObjectNode extends Exportable { + type: 'object' + node: Uint8Array +} + +export interface RawNode extends Exportable { + type: 'raw' + node: Uint8Array +} + +export interface IdentityNode extends Exportable { + type: 'identity' + node: Uint8Array +} + +export type UnixFSEntry = UnixFSFile | UnixFSDirectory | ObjectNode | RawNode | IdentityNode + +export interface NextResult { + cid: CID + name: string + path: string + toResolve: string[] +} + +export interface ResolveResult { + entry: UnixFSEntry + next?: NextResult +} + +export interface Resolve { (cid: CID, name: string, path: string, toResolve: string[], depth: number, blockstore: Blockstore, options: ExporterOptions): Promise } +export interface Resolver { (cid: CID, name: string, path: string, toResolve: string[], resolve: Resolve, depth: number, blockstore: Blockstore, options: ExporterOptions): Promise } + +export type UnixfsV1FileContent = AsyncIterable | Iterable +export type UnixfsV1DirectoryContent = AsyncIterable | Iterable +export type UnixfsV1Content = UnixfsV1FileContent | UnixfsV1DirectoryContent +export interface UnixfsV1Resolver { (cid: CID, node: PBNode, unixfs: UnixFS, path: string, resolve: Resolve, depth: number, blockstore: Blockstore): (options: ExporterOptions) => UnixfsV1Content } + +export interface ShardTraversalContext { + hamtDepth: number + rootBucket: Bucket + lastBucket: Bucket +} + +const toPathComponents = (path: string = ''): string[] => { + // split on / unless escaped with \ + return (path + .trim() + .match(/([^\\^/]|\\\/)+/g) ?? []) + .filter(Boolean) +} + +const cidAndRest = (path: string | Uint8Array | CID): { cid: CID, toResolve: string[] } => { + if (path instanceof Uint8Array) { + return { + cid: CID.decode(path), + toResolve: [] + } + } + + const cid = CID.asCID(path) + if (cid != null) { + return { + cid, + toResolve: [] + } + } + + if (typeof path === 'string') { + if (path.indexOf('/ipfs/') === 0) { + path = path.substring(6) + } + + const output = toPathComponents(path) + + return { + cid: CID.parse(output[0]), + toResolve: output.slice(1) + } + } + + throw errCode(new Error(`Unknown path type ${path}`), 'ERR_BAD_PATH') +} + +export async function * walkPath (path: string | CID, blockstore: Blockstore, options: ExporterOptions = {}): AsyncGenerator { + let { + cid, + toResolve + } = cidAndRest(path) + let name = cid.toString() + let entryPath = name + const startingDepth = toResolve.length + + while (true) { + const result = await resolve(cid, name, entryPath, toResolve, startingDepth, blockstore, options) + + if (result.entry == null && result.next == null) { + throw errCode(new Error(`Could not resolve ${path}`), 'ERR_NOT_FOUND') + } + + if (result.entry != null) { + yield result.entry + } + + if (result.next == null) { + return + } + + // resolve further parts + toResolve = result.next.toResolve + cid = result.next.cid + name = result.next.name + entryPath = result.next.path + } +} + +export async function exporter (path: string | CID, blockstore: Blockstore, options: ExporterOptions = {}): Promise { + const result = await last(walkPath(path, blockstore, options)) + + if (result == null) { + throw errCode(new Error(`Could not resolve ${path}`), 'ERR_NOT_FOUND') + } + + return result +} + +export async function * recursive (path: string | CID, blockstore: Blockstore, options: ExporterOptions = {}): AsyncGenerator { + const node = await exporter(path, blockstore, options) + + if (node == null) { + return + } + + yield node + + if (node.type === 'directory') { + for await (const child of recurse(node, options)) { + yield child + } + } + + async function * recurse (node: UnixFSDirectory, options: ExporterOptions): AsyncGenerator { + for await (const file of node.content(options)) { + yield file + + if (file instanceof Uint8Array) { + continue + } + + if (file.type === 'directory') { + yield * recurse(file, options) + } + } + } +} diff --git a/packages/ipfs-unixfs-exporter/src/resolvers/dag-cbor.js b/packages/ipfs-unixfs-exporter/src/resolvers/dag-cbor.ts similarity index 78% rename from packages/ipfs-unixfs-exporter/src/resolvers/dag-cbor.js rename to packages/ipfs-unixfs-exporter/src/resolvers/dag-cbor.ts index 2188f058..13e5c2e4 100644 --- a/packages/ipfs-unixfs-exporter/src/resolvers/dag-cbor.js +++ b/packages/ipfs-unixfs-exporter/src/resolvers/dag-cbor.ts @@ -1,21 +1,15 @@ import { CID } from 'multiformats/cid' import errCode from 'err-code' import * as dagCbor from '@ipld/dag-cbor' +import type { Resolver } from '../index.js' -/** - * @typedef {import('../types').Resolver} Resolver - */ - -/** - * @type {Resolver} - */ -const resolve = async (cid, name, path, toResolve, resolve, depth, blockstore, options) => { +const resolve: Resolver = async (cid, name, path, toResolve, resolve, depth, blockstore, options) => { const block = await blockstore.get(cid) - const object = dagCbor.decode(block) + const object = dagCbor.decode(block) let subObject = object let subPath = path - while (toResolve.length) { + while (toResolve.length > 0) { const prop = toResolve[0] if (prop in subObject) { @@ -24,7 +18,7 @@ const resolve = async (cid, name, path, toResolve, resolve, depth, blockstore, o subPath = `${subPath}/${prop}` const subObjectCid = CID.asCID(subObject[prop]) - if (subObjectCid) { + if (subObjectCid != null) { return { entry: { type: 'object', @@ -33,7 +27,7 @@ const resolve = async (cid, name, path, toResolve, resolve, depth, blockstore, o cid, node: block, depth, - size: block.length, + size: BigInt(block.length), content: async function * () { yield object } @@ -62,7 +56,7 @@ const resolve = async (cid, name, path, toResolve, resolve, depth, blockstore, o cid, node: block, depth, - size: block.length, + size: BigInt(block.length), content: async function * () { yield object } diff --git a/packages/ipfs-unixfs-exporter/src/resolvers/identity.js b/packages/ipfs-unixfs-exporter/src/resolvers/identity.ts similarity index 53% rename from packages/ipfs-unixfs-exporter/src/resolvers/identity.js rename to packages/ipfs-unixfs-exporter/src/resolvers/identity.ts index ac1c11b5..f226dbf3 100644 --- a/packages/ipfs-unixfs-exporter/src/resolvers/identity.js +++ b/packages/ipfs-unixfs-exporter/src/resolvers/identity.ts @@ -2,39 +2,26 @@ import errCode from 'err-code' import extractDataFromBlock from '../utils/extract-data-from-block.js' import validateOffsetAndLength from '../utils/validate-offset-and-length.js' import * as mh from 'multiformats/hashes/digest' +import type { ExporterOptions, Resolver } from '../index.js' -/** - * @typedef {import('../types').ExporterOptions} ExporterOptions - * @typedef {import('../types').Resolver} Resolver - */ - -/** - * @param {Uint8Array} node - */ -const rawContent = (node) => { - /** - * @param {ExporterOptions} options - */ - async function * contentGenerator (options = {}) { +const rawContent = (node: Uint8Array): ((options?: ExporterOptions) => AsyncGenerator) => { + async function * contentGenerator (options: ExporterOptions = {}): AsyncGenerator { const { offset, length } = validateOffsetAndLength(node.length, options.offset, options.length) - yield extractDataFromBlock(node, 0, offset, offset + length) + yield extractDataFromBlock(node, 0n, offset, offset + length) } return contentGenerator } -/** - * @type {Resolver} - */ -const resolve = async (cid, name, path, toResolve, resolve, depth, blockstore, options) => { - if (toResolve.length) { +const resolve: Resolver = async (cid, name, path, toResolve, resolve, depth, blockstore, options) => { + if (toResolve.length > 0) { throw errCode(new Error(`No link named ${path} found in raw node ${cid}`), 'ERR_NOT_FOUND') } - const buf = await mh.decode(cid.multihash.bytes) + const buf = mh.decode(cid.multihash.bytes) return { entry: { @@ -44,7 +31,7 @@ const resolve = async (cid, name, path, toResolve, resolve, depth, blockstore, o cid, content: rawContent(buf.digest), depth, - size: buf.digest.length, + size: BigInt(buf.digest.length), node: buf.digest } } diff --git a/packages/ipfs-unixfs-exporter/src/resolvers/index.js b/packages/ipfs-unixfs-exporter/src/resolvers/index.ts similarity index 63% rename from packages/ipfs-unixfs-exporter/src/resolvers/index.js rename to packages/ipfs-unixfs-exporter/src/resolvers/index.ts index 73a0b895..fbd5dbbb 100644 --- a/packages/ipfs-unixfs-exporter/src/resolvers/index.js +++ b/packages/ipfs-unixfs-exporter/src/resolvers/index.ts @@ -9,33 +9,23 @@ import dagPbResolver from './unixfs-v1/index.js' import rawResolver from './raw.js' import dagCborResolver from './dag-cbor.js' import identifyResolver from './identity.js' +import type { Resolve, Resolver } from '../index.js' -/** - * @typedef {import('../types').Resolver} Resolver - * @typedef {import('../types').Resolve} Resolve - */ - -/** - * @type {{ [ key: string ]: Resolver }} - */ -const resolvers = { +const resolvers: Record = { [dagPb.code]: dagPbResolver, [raw.code]: rawResolver, [dagCbor.code]: dagCborResolver, [identity.code]: identifyResolver } -/** - * @type {Resolve} - */ -function resolve (cid, name, path, toResolve, depth, blockstore, options) { +const resolve: Resolve = async (cid, name, path, toResolve, depth, blockstore, options) => { const resolver = resolvers[cid.code] - if (!resolver) { + if (resolver == null) { throw errCode(new Error(`No resolver for code ${cid.code}`), 'ERR_NO_RESOLVER') } - return resolver(cid, name, path, toResolve, resolve, depth, blockstore, options) + return await resolver(cid, name, path, toResolve, resolve, depth, blockstore, options) } export default resolve diff --git a/packages/ipfs-unixfs-exporter/src/resolvers/raw.js b/packages/ipfs-unixfs-exporter/src/resolvers/raw.ts similarity index 55% rename from packages/ipfs-unixfs-exporter/src/resolvers/raw.js rename to packages/ipfs-unixfs-exporter/src/resolvers/raw.ts index 5807d32c..f567cdcb 100644 --- a/packages/ipfs-unixfs-exporter/src/resolvers/raw.js +++ b/packages/ipfs-unixfs-exporter/src/resolvers/raw.ts @@ -1,35 +1,23 @@ import errCode from 'err-code' +import type { ExporterOptions, Resolver } from '../index.js' import extractDataFromBlock from '../utils/extract-data-from-block.js' import validateOffsetAndLength from '../utils/validate-offset-and-length.js' -/** - * @typedef {import('../types').ExporterOptions} ExporterOptions - */ - -/** - * @param {Uint8Array} node - */ -const rawContent = (node) => { - /** - * @param {ExporterOptions} options - */ - async function * contentGenerator (options = {}) { +const rawContent = (node: Uint8Array): ((options?: ExporterOptions) => AsyncGenerator) => { + async function * contentGenerator (options: ExporterOptions = {}): AsyncGenerator { const { offset, length } = validateOffsetAndLength(node.length, options.offset, options.length) - yield extractDataFromBlock(node, 0, offset, offset + length) + yield extractDataFromBlock(node, 0n, offset, offset + length) } return contentGenerator } -/** - * @type {import('../types').Resolver} - */ -const resolve = async (cid, name, path, toResolve, resolve, depth, blockstore, options) => { - if (toResolve.length) { +const resolve: Resolver = async (cid, name, path, toResolve, resolve, depth, blockstore, options) => { + if (toResolve.length > 0) { throw errCode(new Error(`No link named ${path} found in raw node ${cid}`), 'ERR_NOT_FOUND') } @@ -43,7 +31,7 @@ const resolve = async (cid, name, path, toResolve, resolve, depth, blockstore, o cid, content: rawContent(block), depth, - size: block.length, + size: BigInt(block.length), node: block } } diff --git a/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/content/directory.js b/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/content/directory.js deleted file mode 100644 index 677d39a6..00000000 --- a/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/content/directory.js +++ /dev/null @@ -1,32 +0,0 @@ -/** - * @typedef {import('../../../types').ExporterOptions} ExporterOptions - * @typedef {import('../../../types').UnixfsV1DirectoryContent} UnixfsV1DirectoryContent - * @typedef {import('../../../types').UnixfsV1Resolver} UnixfsV1Resolver - */ - -/** - * @type {UnixfsV1Resolver} - */ -const directoryContent = (cid, node, unixfs, path, resolve, depth, blockstore) => { - /** - * @param {ExporterOptions} [options] - * @returns {UnixfsV1DirectoryContent} - */ - async function * yieldDirectoryContent (options = {}) { - const offset = options.offset || 0 - const length = options.length || node.Links.length - const links = node.Links.slice(offset, length) - - for (const link of links) { - const result = await resolve(link.Hash, link.Name || '', `${path}/${link.Name || ''}`, [], depth + 1, blockstore, options) - - if (result.entry) { - yield result.entry - } - } - } - - return yieldDirectoryContent -} - -export default directoryContent diff --git a/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/content/directory.ts b/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/content/directory.ts new file mode 100644 index 00000000..dedcc8a2 --- /dev/null +++ b/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/content/directory.ts @@ -0,0 +1,21 @@ +import type { ExporterOptions, UnixfsV1DirectoryContent, UnixfsV1Resolver } from '../../../index.js' + +const directoryContent: UnixfsV1Resolver = (cid, node, unixfs, path, resolve, depth, blockstore) => { + async function * yieldDirectoryContent (options: ExporterOptions = {}): UnixfsV1DirectoryContent { + const offset = options.offset ?? 0 + const length = options.length ?? node.Links.length + const links = node.Links.slice(offset, length) + + for (const link of links) { + const result = await resolve(link.Hash, link.Name ?? '', `${path}/${link.Name ?? ''}`, [], depth + 1, blockstore, options) + + if (result.entry != null) { + yield result.entry + } + } + } + + return yieldDirectoryContent +} + +export default directoryContent diff --git a/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/content/file.js b/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/content/file.ts similarity index 69% rename from packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/content/file.js rename to packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/content/file.ts index 4f11cb44..b3798226 100644 --- a/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/content/file.js +++ b/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/content/file.ts @@ -4,31 +4,15 @@ import { UnixFS } from 'ipfs-unixfs' import errCode from 'err-code' import * as dagPb from '@ipld/dag-pb' import * as raw from 'multiformats/codecs/raw' -import { pushable } from 'it-pushable' +import { Pushable, pushable } from 'it-pushable' import parallel from 'it-parallel' import { pipe } from 'it-pipe' import map from 'it-map' import PQueue from 'p-queue' +import type { Blockstore } from 'interface-blockstore' +import type { ExporterOptions, UnixfsV1FileContent, UnixfsV1Resolver } from '../../../index.js' -/** - * @typedef {import('../../../types').ExporterOptions} ExporterOptions - * @typedef {import('interface-blockstore').Blockstore} Blockstore - * @typedef {import('@ipld/dag-pb').PBNode} PBNode - * @typedef {import('@ipld/dag-pb').PBLink} PBLink - */ - -/** - * @param {Blockstore} blockstore - * @param {PBNode | Uint8Array} node - * @param {import('it-pushable').Pushable} queue - * @param {number} streamPosition - * @param {number} start - * @param {number} end - * @param {PQueue} walkQueue - * @param {ExporterOptions} options - * @returns {Promise} - */ -async function walkDAG (blockstore, node, queue, streamPosition, start, end, walkQueue, options) { +async function walkDAG (blockstore: Blockstore, node: dagPb.PBNode | Uint8Array, queue: Pushable, streamPosition: bigint, start: bigint, end: bigint, walkQueue: PQueue, options: ExporterOptions): Promise { // a `raw` node if (node instanceof Uint8Array) { queue.push(extractDataFromBlock(node, streamPosition, start, end)) @@ -40,12 +24,11 @@ async function walkDAG (blockstore, node, queue, streamPosition, start, end, wal throw errCode(new Error('no data in PBNode'), 'ERR_NOT_UNIXFS') } - /** @type {UnixFS} */ - let file + let file: UnixFS try { file = UnixFS.unmarshal(node.Data) - } catch (/** @type {any} */ err) { + } catch (err: any) { throw errCode(err, 'ERR_NOT_UNIXFS') } @@ -56,11 +39,14 @@ async function walkDAG (blockstore, node, queue, streamPosition, start, end, wal queue.push(buf) - streamPosition += buf.byteLength + streamPosition += BigInt(buf.byteLength) } - /** @type {Array<{ link: PBLink, blockStart: number }>} */ - const childOps = [] + const childOps: Array<{ link: dagPb.PBLink, blockStart: bigint }> = [] + + if (node.Links.length !== file.blockSizes.length) { + throw errCode(new Error('Inconsistent block sizes and dag links'), 'ERR_NOT_UNIXFS') + } for (let i = 0; i < node.Links.length; i++) { const childLink = node.Links[i] @@ -102,8 +88,7 @@ async function walkDAG (blockstore, node, queue, streamPosition, start, end, wal }), async (source) => { for await (const { link, block, blockStart } of source) { - /** @type {PBNode | Uint8Array} */ - let child + let child: dagPb.PBNode | Uint8Array switch (link.Hash.code) { case dagPb.code: child = dagPb.decode(block) @@ -116,7 +101,7 @@ async function walkDAG (blockstore, node, queue, streamPosition, start, end, wal return } - walkQueue.add(async () => { + void walkQueue.add(async () => { await walkDAG(blockstore, child, queue, blockStart, start, end, walkQueue, options) }) } @@ -124,14 +109,8 @@ async function walkDAG (blockstore, node, queue, streamPosition, start, end, wal ) } -/** - * @type {import('../').UnixfsV1Resolver} - */ -const fileContent = (cid, node, unixfs, path, resolve, depth, blockstore) => { - /** - * @param {ExporterOptions} options - */ - async function * yieldFileContent (options = {}) { +const fileContent: UnixfsV1Resolver = (cid, node, unixfs, path, resolve, depth, blockstore) => { + async function * yieldFileContent (options: ExporterOptions = {}): UnixfsV1FileContent { const fileSize = unixfs.fileSize() if (fileSize === undefined) { @@ -143,7 +122,7 @@ const fileContent = (cid, node, unixfs, path, resolve, depth, blockstore) => { length } = validateOffsetAndLength(fileSize, options.offset, options.length) - if (length === 0) { + if (length === 0n) { return } @@ -154,22 +133,22 @@ const fileContent = (cid, node, unixfs, path, resolve, depth, blockstore) => { }) const queue = pushable() - walkQueue.add(async () => { - await walkDAG(blockstore, node, queue, 0, offset, offset + length, walkQueue, options) + void walkQueue.add(async () => { + await walkDAG(blockstore, node, queue, 0n, offset, offset + length, walkQueue, options) }) walkQueue.on('error', error => { queue.end(error) }) - let read = 0 + let read = 0n for await (const buf of queue) { if (buf == null) { continue } - read += buf.byteLength + read += BigInt(buf.byteLength) if (read === length) { queue.end() diff --git a/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/content/hamt-sharded-directory.js b/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/content/hamt-sharded-directory.ts similarity index 63% rename from packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/content/hamt-sharded-directory.js rename to packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/content/hamt-sharded-directory.ts index 8e3e8664..54b67382 100644 --- a/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/content/hamt-sharded-directory.js +++ b/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/content/hamt-sharded-directory.ts @@ -1,4 +1,6 @@ -import { decode } from '@ipld/dag-pb' +import { decode, PBNode } from '@ipld/dag-pb' +import type { Blockstore } from 'interface-blockstore' +import type { ExporterOptions, Resolve, UnixfsV1DirectoryContent, UnixfsV1Resolver } from '../../../index.js' /** * @typedef {import('interface-blockstore').Blockstore} Blockstore @@ -9,38 +11,21 @@ import { decode } from '@ipld/dag-pb' * @typedef {import('@ipld/dag-pb').PBNode} PBNode */ -/** - * @type {UnixfsV1Resolver} - */ -const hamtShardedDirectoryContent = (cid, node, unixfs, path, resolve, depth, blockstore) => { - /** - * @param {ExporterOptions} options - * - */ - function yieldHamtDirectoryContent (options = {}) { +const hamtShardedDirectoryContent: UnixfsV1Resolver = (cid, node, unixfs, path, resolve, depth, blockstore) => { + function yieldHamtDirectoryContent (options: ExporterOptions = {}): UnixfsV1DirectoryContent { return listDirectory(node, path, resolve, depth, blockstore, options) } return yieldHamtDirectoryContent } -/** - * @param {PBNode} node - * @param {string} path - * @param {Resolve} resolve - * @param {number} depth - * @param {Blockstore} blockstore - * @param {ExporterOptions} options - * - * @returns {UnixfsV1DirectoryContent} - */ -async function * listDirectory (node, path, resolve, depth, blockstore, options) { +async function * listDirectory (node: PBNode, path: string, resolve: Resolve, depth: number, blockstore: Blockstore, options: ExporterOptions): UnixfsV1DirectoryContent { const links = node.Links for (const link of links) { const name = link.Name != null ? link.Name.substring(2) : null - if (name) { + if (name != null && name !== '') { const result = await resolve(link.Hash, name, `${path}/${name}`, [], depth + 1, blockstore, options) yield result.entry diff --git a/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/content/raw.js b/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/content/raw.js deleted file mode 100644 index 5c622248..00000000 --- a/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/content/raw.js +++ /dev/null @@ -1,34 +0,0 @@ -import extractDataFromBlock from '../../../utils/extract-data-from-block.js' -import validateOffsetAndLength from '../../../utils/validate-offset-and-length.js' - -/** - * @typedef {import('../../../types').ExporterOptions} ExporterOptions - * @typedef {import('../../../types').UnixfsV1Resolver} UnixfsV1Resolver - */ - -/** - * @type {UnixfsV1Resolver} - */ -const rawContent = (cid, node, unixfs, path, resolve, depth, blockstore) => { - /** - * @param {ExporterOptions} options - */ - function * yieldRawContent (options = {}) { - if (!unixfs.data) { - throw new Error('Raw block had no data') - } - - const size = unixfs.data.length - - const { - offset, - length - } = validateOffsetAndLength(size, options.offset, options.length) - - yield extractDataFromBlock(unixfs.data, 0, offset, offset + length) - } - - return yieldRawContent -} - -export default rawContent diff --git a/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/content/raw.ts b/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/content/raw.ts new file mode 100644 index 00000000..e9f2a37e --- /dev/null +++ b/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/content/raw.ts @@ -0,0 +1,24 @@ +import type { ExporterOptions, UnixfsV1Resolver } from '../../../index.js' +import extractDataFromBlock from '../../../utils/extract-data-from-block.js' +import validateOffsetAndLength from '../../../utils/validate-offset-and-length.js' + +const rawContent: UnixfsV1Resolver = (cid, node, unixfs, path, resolve, depth, blockstore) => { + function * yieldRawContent (options: ExporterOptions = {}): Generator { + if (unixfs.data == null) { + throw new Error('Raw block had no data') + } + + const size = unixfs.data.length + + const { + offset, + length + } = validateOffsetAndLength(size, options.offset, options.length) + + yield extractDataFromBlock(unixfs.data, 0n, offset, offset + length) + } + + return yieldRawContent +} + +export default rawContent diff --git a/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/index.js b/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/index.ts similarity index 60% rename from packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/index.js rename to packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/index.ts index 1da18e6c..127694be 100644 --- a/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/index.js +++ b/packages/ipfs-unixfs-exporter/src/resolvers/unixfs-v1/index.ts @@ -1,33 +1,20 @@ import errCode from 'err-code' import { UnixFS } from 'ipfs-unixfs' import findShardCid from '../../utils/find-cid-in-shard.js' -import { decode } from '@ipld/dag-pb' - +import { decode, PBNode } from '@ipld/dag-pb' import contentFile from './content/file.js' import contentDirectory from './content/directory.js' import contentHamtShardedDirectory from './content/hamt-sharded-directory.js' +import type { CID } from 'multiformats/cid' +import type { Resolver, UnixfsV1Resolver } from '../../index.js' -/** - * @typedef {import('../../types').Resolve} Resolve - * @typedef {import('../../types').Resolver} Resolver - * @typedef {import('../../types').UnixfsV1Resolver} UnixfsV1Resolver - * @typedef {import('@ipld/dag-pb').PBNode} PBNode - */ - -/** - * @param {PBNode} node - * @param {string} name - */ -const findLinkCid = (node, name) => { +const findLinkCid = (node: PBNode, name: string): CID | undefined => { const link = node.Links.find(link => link.Name === name) - return link && link.Hash + return link?.Hash } -/** - * @type {{ [key: string]: UnixfsV1Resolver }} - */ -const contentExporters = { +const contentExporters: Record = { raw: contentFile, file: contentFile, directory: contentDirectory, @@ -40,16 +27,14 @@ const contentExporters = { } } -/** - * @type {Resolver} - */ -const unixFsResolver = async (cid, name, path, toResolve, resolve, depth, blockstore, options) => { +// @ts-expect-error types are wrong +const unixFsResolver: Resolver = async (cid, name, path, toResolve, resolve, depth, blockstore, options) => { const block = await blockstore.get(cid, options) const node = decode(block) let unixfs let next - if (!name) { + if (name == null) { name = cid.toString() } @@ -59,26 +44,26 @@ const unixFsResolver = async (cid, name, path, toResolve, resolve, depth, blocks try { unixfs = UnixFS.unmarshal(node.Data) - } catch (/** @type {any} */ err) { + } catch (err: any) { // non-UnixFS dag-pb node? It could happen. throw errCode(err, 'ERR_NOT_UNIXFS') } - if (!path) { + if (path == null) { path = name } - if (toResolve.length) { + if (toResolve.length > 0) { let linkCid - if (unixfs && unixfs.type === 'hamt-sharded-directory') { + if (unixfs?.type === 'hamt-sharded-directory') { // special case - unixfs v1 hamt shards linkCid = await findShardCid(node, toResolve[0], blockstore) } else { linkCid = findLinkCid(node, toResolve[0]) } - if (!linkCid) { + if (linkCid == null) { throw errCode(new Error('file does not exist'), 'ERR_NOT_FOUND') } @@ -89,19 +74,41 @@ const unixFsResolver = async (cid, name, path, toResolve, resolve, depth, blocks next = { cid: linkCid, toResolve, - name: nextName || '', + name: nextName ?? '', path: nextPath } } + const content = contentExporters[unixfs.type](cid, node, unixfs, path, resolve, depth, blockstore) + + if (content == null) { + throw errCode(new Error('could not find content exporter'), 'ERR_NOT_FOUND') + } + + if (unixfs.isDirectory()) { + return { + entry: { + type: 'directory', + name, + path, + cid, + content, + unixfs, + depth, + node, + size: unixfs.fileSize() + }, + next + } + } + return { entry: { - type: unixfs.isDirectory() ? 'directory' : 'file', + type: 'file', name, path, cid, - // @ts-ignore - content: contentExporters[unixfs.type](cid, node, unixfs, path, resolve, depth, blockstore), + content, unixfs, depth, node, diff --git a/packages/ipfs-unixfs-exporter/src/types.ts b/packages/ipfs-unixfs-exporter/src/types.ts deleted file mode 100644 index e1210b4b..00000000 --- a/packages/ipfs-unixfs-exporter/src/types.ts +++ /dev/null @@ -1,77 +0,0 @@ -import type { CID } from 'multiformats/cid' -import type { UnixFS } from 'ipfs-unixfs' -import type { PBNode } from '@ipld/dag-pb' -import type { Blockstore } from 'interface-blockstore' -import type { Bucket } from 'hamt-sharding' - -export interface ExporterOptions { - offset?: number - length?: number - signal?: AbortSignal - timeout?: number -} - -export interface Exportable { - type: 'file' | 'directory' | 'object' | 'raw' | 'identity' - name: string - path: string - cid: CID - depth: number - size: number - content: (options?: ExporterOptions) => AsyncIterable -} - -export interface UnixFSFile extends Exportable { - type: 'file' - unixfs: UnixFS - node: PBNode -} - -export interface UnixFSDirectory extends Exportable { - type: 'directory' - unixfs: UnixFS - node: PBNode -} - -export interface ObjectNode extends Exportable { - type: 'object' - node: Uint8Array -} - -export interface RawNode extends Exportable { - type: 'raw' - node: Uint8Array -} - -export interface IdentityNode extends Exportable { - type: 'identity' - node: Uint8Array -} - -export type UnixFSEntry = UnixFSFile | UnixFSDirectory | ObjectNode | RawNode | IdentityNode - -export interface NextResult { - cid: CID - name: string - path: string - toResolve: string[] -} - -export interface ResolveResult { - entry: UnixFSEntry - next?: NextResult -} - -export interface Resolve { (cid: CID, name: string, path: string, toResolve: string[], depth: number, blockstore: Blockstore, options: ExporterOptions): Promise } -export interface Resolver { (cid: CID, name: string, path: string, toResolve: string[], resolve: Resolve, depth: number, blockstore: Blockstore, options: ExporterOptions): Promise } - -export type UnixfsV1FileContent = AsyncIterable | Iterable -export type UnixfsV1DirectoryContent = AsyncIterable | Iterable -export type UnixfsV1Content = UnixfsV1FileContent | UnixfsV1DirectoryContent -export interface UnixfsV1Resolver { (cid: CID, node: PBNode, unixfs: UnixFS, path: string, resolve: Resolve, depth: number, blockstore: Blockstore): (options: ExporterOptions) => UnixfsV1Content } - -export interface ShardTraversalContext { - hamtDepth: number - rootBucket: Bucket - lastBucket: Bucket -} diff --git a/packages/ipfs-unixfs-exporter/src/utils/extract-data-from-block.js b/packages/ipfs-unixfs-exporter/src/utils/extract-data-from-block.ts similarity index 60% rename from packages/ipfs-unixfs-exporter/src/utils/extract-data-from-block.js rename to packages/ipfs-unixfs-exporter/src/utils/extract-data-from-block.ts index f727d92d..ab75562c 100644 --- a/packages/ipfs-unixfs-exporter/src/utils/extract-data-from-block.js +++ b/packages/ipfs-unixfs-exporter/src/utils/extract-data-from-block.ts @@ -1,12 +1,7 @@ -/** - * @param {Uint8Array} block - * @param {number} blockStart - * @param {number} requestedStart - * @param {number} requestedEnd - */ -function extractDataFromBlock (block, blockStart, requestedStart, requestedEnd) { - const blockLength = block.length - const blockEnd = blockStart + blockLength + +function extractDataFromBlock (block: Uint8Array, blockStart: bigint, requestedStart: bigint, requestedEnd: bigint): Uint8Array { + const blockLength = BigInt(block.length) + const blockEnd = BigInt(blockStart + blockLength) if (requestedStart >= blockEnd || requestedEnd < blockStart) { // If we are looking for a byte range that is starts after the start of the block, @@ -16,12 +11,12 @@ function extractDataFromBlock (block, blockStart, requestedStart, requestedEnd) if (requestedEnd >= blockStart && requestedEnd < blockEnd) { // If the end byte is in the current block, truncate the block to the end byte - block = block.subarray(0, requestedEnd - blockStart) + block = block.subarray(0, Number(requestedEnd - blockStart)) } if (requestedStart >= blockStart && requestedStart < blockEnd) { // If the start byte is in the current block, skip to the start byte - block = block.subarray(requestedStart - blockStart) + block = block.subarray(Number(requestedStart - blockStart)) } return block diff --git a/packages/ipfs-unixfs-exporter/src/utils/find-cid-in-shard.js b/packages/ipfs-unixfs-exporter/src/utils/find-cid-in-shard.ts similarity index 58% rename from packages/ipfs-unixfs-exporter/src/utils/find-cid-in-shard.js rename to packages/ipfs-unixfs-exporter/src/utils/find-cid-in-shard.ts index d089532d..a0e36ee1 100644 --- a/packages/ipfs-unixfs-exporter/src/utils/find-cid-in-shard.js +++ b/packages/ipfs-unixfs-exporter/src/utils/find-cid-in-shard.ts @@ -1,21 +1,13 @@ -import { Bucket, createHAMT } from 'hamt-sharding' -import { decode } from '@ipld/dag-pb' +import { Bucket, BucketPosition, createHAMT } from 'hamt-sharding' +import { decode, PBLink, PBNode } from '@ipld/dag-pb' import { murmur3128 } from '@multiformats/murmur3' - -/** - * @typedef {import('interface-blockstore').Blockstore} Blockstore - * @typedef {import('multiformats/cid').CID} CID - * @typedef {import('../types').ExporterOptions} ExporterOptions - * @typedef {import('@ipld/dag-pb').PBNode} PBNode - * @typedef {import('@ipld/dag-pb').PBLink} PBLink - */ +import type { Blockstore } from 'interface-blockstore' +import type { ExporterOptions, ShardTraversalContext } from '../index.js' +import type { CID } from 'multiformats/cid' // FIXME: this is copy/pasted from ipfs-unixfs-importer/src/options.js -/** - * @param {Uint8Array} buf - */ -const hashFn = async function (buf) { +const hashFn = async function (buf: Uint8Array): Promise { return (await murmur3128.encode(buf)) // Murmur3 outputs 128 bit but, accidentally, IPFS Go's // implementation only uses the first 64, so we must do the same @@ -25,14 +17,9 @@ const hashFn = async function (buf) { .reverse() } -/** - * @param {PBLink[]} links - * @param {Bucket} bucket - * @param {Bucket} rootBucket - */ -const addLinksToHamtBucket = (links, bucket, rootBucket) => { - return Promise.all( - links.map(link => { +const addLinksToHamtBucket = async (links: PBLink[], bucket: Bucket, rootBucket: Bucket): Promise => { + await Promise.all( + links.map(async link => { if (link.Name == null) { // TODO(@rvagg): what do? this is technically possible throw new Error('Unexpected Link without a Name') @@ -40,21 +27,19 @@ const addLinksToHamtBucket = (links, bucket, rootBucket) => { if (link.Name.length === 2) { const pos = parseInt(link.Name, 16) - return bucket._putObjectAt(pos, new Bucket({ + bucket._putObjectAt(pos, new Bucket({ hash: rootBucket._options.hash, bits: rootBucket._options.bits }, bucket, pos)) + return } - return rootBucket.put(link.Name.substring(2), true) + await rootBucket.put(link.Name.substring(2), true) }) ) } -/** - * @param {number} position - */ -const toPrefix = (position) => { +const toPrefix = (position: number): string => { return position .toString(16) .toUpperCase() @@ -62,14 +47,11 @@ const toPrefix = (position) => { .substring(0, 2) } -/** - * @param {import('hamt-sharding').BucketPosition} position - */ -const toBucketPath = (position) => { +const toBucketPath = (position: BucketPosition): Array> => { let bucket = position.bucket const path = [] - while (bucket._parent) { + while (bucket._parent != null) { path.push(bucket) bucket = bucket._parent @@ -80,19 +62,9 @@ const toBucketPath = (position) => { return path.reverse() } -/** - * @typedef {import('../types').ShardTraversalContext} ShardTraversalContext - * - * @param {PBNode} node - * @param {string} name - * @param {Blockstore} blockstore - * @param {ShardTraversalContext} [context] - * @param {ExporterOptions} [options] - * @returns {Promise} - */ -const findShardCid = async (node, name, blockstore, context, options) => { - if (!context) { - const rootBucket = createHAMT({ +const findShardCid = async (node: PBNode, name: string, blockstore: Blockstore, context?: ShardTraversalContext, options?: ExporterOptions): Promise => { + if (context == null) { + const rootBucket = createHAMT({ hashFn }) @@ -128,7 +100,7 @@ const findShardCid = async (node, name, blockstore, context, options) => { return false } - if (entryName && entryName !== name) { + if (entryName !== '' && entryName !== name) { // not the entry we're looking for return false } @@ -136,8 +108,8 @@ const findShardCid = async (node, name, blockstore, context, options) => { return true }) - if (!link) { - return null + if (link == null) { + return } if (link.Name != null && link.Name.substring(2) === name) { @@ -149,7 +121,7 @@ const findShardCid = async (node, name, blockstore, context, options) => { const block = await blockstore.get(link.Hash, options) node = decode(block) - return findShardCid(node, name, blockstore, context, options) + return await findShardCid(node, name, blockstore, context, options) } export default findShardCid diff --git a/packages/ipfs-unixfs-exporter/src/utils/validate-offset-and-length.js b/packages/ipfs-unixfs-exporter/src/utils/validate-offset-and-length.ts similarity index 54% rename from packages/ipfs-unixfs-exporter/src/utils/validate-offset-and-length.js rename to packages/ipfs-unixfs-exporter/src/utils/validate-offset-and-length.ts index 80e0ffb8..0984aea9 100644 --- a/packages/ipfs-unixfs-exporter/src/utils/validate-offset-and-length.js +++ b/packages/ipfs-unixfs-exporter/src/utils/validate-offset-and-length.ts @@ -1,16 +1,14 @@ import errCode from 'err-code' -/** - * @param {number} size - * @param {number} [offset] - * @param {number} [length] - */ -const validateOffsetAndLength = (size, offset, length) => { - if (!offset) { - offset = 0 +const validateOffsetAndLength = (size: number | bigint, offset: number | bigint = 0, length: number | bigint = size): { offset: bigint, length: bigint } => { + offset = BigInt(offset ?? 0) + length = BigInt(length ?? size) + + if (offset == null) { + offset = 0n } - if (offset < 0) { + if (offset < 0n) { throw errCode(new Error('Offset must be greater than or equal to 0'), 'ERR_INVALID_PARAMS') } @@ -18,16 +16,16 @@ const validateOffsetAndLength = (size, offset, length) => { throw errCode(new Error('Offset must be less than the file size'), 'ERR_INVALID_PARAMS') } - if (!length && length !== 0) { - length = size - offset + if (length == null) { + length = BigInt(size) - offset } - if (length < 0) { + if (length < 0n) { throw errCode(new Error('Length must be greater than or equal to 0'), 'ERR_INVALID_PARAMS') } if (offset + length > size) { - length = size - offset + length = BigInt(size) - offset } return { diff --git a/packages/ipfs-unixfs-exporter/test/exporter-sharded.spec.js b/packages/ipfs-unixfs-exporter/test/exporter-sharded.spec.ts similarity index 90% rename from packages/ipfs-unixfs-exporter/test/exporter-sharded.spec.js rename to packages/ipfs-unixfs-exporter/test/exporter-sharded.spec.ts index c744ddd1..800f852d 100644 --- a/packages/ipfs-unixfs-exporter/test/exporter-sharded.spec.js +++ b/packages/ipfs-unixfs-exporter/test/exporter-sharded.spec.ts @@ -1,4 +1,5 @@ /* eslint-env mocha */ + import { expect } from 'aegir/chai' import { UnixFS } from 'ipfs-unixfs' import all from 'it-all' @@ -7,49 +8,39 @@ import randomBytes from 'it-buffer-stream' import { exporter, walkPath } from '../src/index.js' import { importer } from 'ipfs-unixfs-importer' import * as dagPb from '@ipld/dag-pb' -import blockApi from './helpers/block.js' import { concat as uint8ArrayConcat } from 'uint8arrays/concat' import asAsyncIterable from './helpers/as-async-iterable.js' import { CID } from 'multiformats/cid' import { sha256 } from 'multiformats/hashes/sha2' +import { MemoryBlockstore } from 'blockstore-core' const SHARD_SPLIT_THRESHOLD = 10 describe('exporter sharded', function () { this.timeout(30000) - const block = blockApi() + const block = new MemoryBlockstore() - /** - * @param {number} numFiles - */ - const createShard = (numFiles) => { - return createShardWithFileNames(numFiles, (index) => `file-${index}`) + const createShard = async (numFiles: number): Promise => { + return await createShardWithFileNames(numFiles, (index) => `file-${index}`) } - /** - * @param {number} numFiles - * @param {(index: number) => string} fileName - */ - const createShardWithFileNames = (numFiles, fileName) => { + const createShardWithFileNames = async (numFiles: number, fileName: (index: number) => string): Promise => { const files = new Array(numFiles).fill(0).map((_, index) => ({ path: fileName(index), content: asAsyncIterable(Uint8Array.from([0, 1, 2, 3, 4, index])) })) - return createShardWithFiles(files) + return await createShardWithFiles(files) } - /** - * @param {{ path: string, content: AsyncIterable }[] } files - */ - const createShardWithFiles = async (files) => { + const createShardWithFiles = async (files: Array<{ path: string, content: AsyncIterable }>): Promise => { const result = await last(importer(files, block, { shardSplitThresholdBytes: SHARD_SPLIT_THRESHOLD, wrapWithDirectory: true })) - if (!result) { + if (result == null) { throw new Error('Failed to make shard') } @@ -57,8 +48,7 @@ describe('exporter sharded', function () { } it('exports a sharded directory', async () => { - /** @type {{ [key: string]: { content: Uint8Array, cid?: CID }}} */ - const files = {} + const files: Record = {} // needs to result in a block that is larger than SHARD_SPLIT_THRESHOLD bytes for (let i = 0; i < 100; i++) { @@ -77,13 +67,13 @@ describe('exporter sharded', function () { const dirCid = imported.pop()?.cid - if (!dirCid) { + if (dirCid == null) { throw new Error('No directory CID found') } // store the CIDs, we will validate them later imported.forEach(imported => { - if (!imported.path) { + if (imported.path == null) { throw new Error('Imported file did not have a path') } @@ -92,7 +82,7 @@ describe('exporter sharded', function () { const encodedBlock = await block.get(dirCid) const dir = dagPb.decode(encodedBlock) - if (!dir.Data) { + if (dir.Data == null) { throw Error('PBNode Data undefined') } const dirMetadata = UnixFS.unmarshal(dir.Data) @@ -107,7 +97,7 @@ describe('exporter sharded', function () { throw new Error('Expected directory') } - if (!exported.content) { + if (exported.content == null) { throw new Error('No content found on exported entry') } @@ -124,7 +114,7 @@ describe('exporter sharded', function () { const data = uint8ArrayConcat(await all(dirFile.content())) // validate the CID - // @ts-ignore - files[dirFile.name].cid is defined + // @ts-expect-error - files[dirFile.name].cid is defined expect(files[dirFile.name].cid.toString()).that.deep.equals(dirFile.cid.toString()) // validate the exported file content diff --git a/packages/ipfs-unixfs-exporter/test/exporter-subtree.spec.js b/packages/ipfs-unixfs-exporter/test/exporter-subtree.spec.ts similarity index 94% rename from packages/ipfs-unixfs-exporter/test/exporter-subtree.spec.js rename to packages/ipfs-unixfs-exporter/test/exporter-subtree.spec.ts index 86ffc82d..284211b3 100644 --- a/packages/ipfs-unixfs-exporter/test/exporter-subtree.spec.js +++ b/packages/ipfs-unixfs-exporter/test/exporter-subtree.spec.ts @@ -1,19 +1,19 @@ /* eslint-env mocha */ + import { expect } from 'aegir/chai' import { importer } from 'ipfs-unixfs-importer' import all from 'it-all' import last from 'it-last' -import blockApi from './helpers/block.js' +import { MemoryBlockstore } from 'blockstore-core' import randomBytes from 'it-buffer-stream' import { concat as uint8ArrayConcat } from 'uint8arrays/concat' import asAsyncIterable from './helpers/as-async-iterable.js' - import { exporter, walkPath } from './../src/index.js' const ONE_MEG = Math.pow(1024, 2) describe('exporter subtree', () => { - const block = blockApi() + const block = new MemoryBlockstore() it('exports a file 2 levels down', async () => { const content = uint8ArrayConcat(await all(randomBytes(ONE_MEG))) @@ -26,7 +26,7 @@ describe('exporter subtree', () => { content: asAsyncIterable(content) }], block)) - if (!imported) { + if (imported == null) { throw new Error('Nothing imported') } @@ -56,7 +56,7 @@ describe('exporter subtree', () => { path: './level-1/level-2' }], block)) - if (!imported) { + if (imported == null) { throw new Error('Nothing imported') } @@ -89,13 +89,13 @@ describe('exporter subtree', () => { content: randomBytes(ONE_MEG) }], block)) - if (!imported) { + if (imported == null) { throw new Error('Nothing imported') } try { await exporter(`${imported.cid}/doesnotexist`, block) - } catch (/** @type {any} */ err) { + } catch (err: any) { expect(err.code).to.equal('ERR_NOT_FOUND') } }) @@ -116,7 +116,7 @@ describe('exporter subtree', () => { content: asAsyncIterable(content) }], block)) - if (!imported) { + if (imported == null) { throw new Error('Nothing imported') } diff --git a/packages/ipfs-unixfs-exporter/test/exporter.spec.js b/packages/ipfs-unixfs-exporter/test/exporter.spec.ts similarity index 89% rename from packages/ipfs-unixfs-exporter/test/exporter.spec.js rename to packages/ipfs-unixfs-exporter/test/exporter.spec.ts index 426384e9..3cbf1c9e 100644 --- a/packages/ipfs-unixfs-exporter/test/exporter.spec.js +++ b/packages/ipfs-unixfs-exporter/test/exporter.spec.ts @@ -14,46 +14,36 @@ import all from 'it-all' import last from 'it-last' import first from 'it-first' import randomBytes from 'it-buffer-stream' -import blockApi from './helpers/block.js' +import { MemoryBlockstore } from 'blockstore-core' import { concat as uint8ArrayConcat } from 'uint8arrays/concat' import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' import { toString as uint8ArrayToString } from 'uint8arrays/to-string' import asAsyncIterable from './helpers/as-async-iterable.js' import delay from 'delay' +import type { PBNode } from '@ipld/dag-pb' +import type { Blockstore } from 'interface-blockstore' const ONE_MEG = Math.pow(1024, 2) -/** - * @typedef {import('@ipld/dag-pb').PBLink} PBLink - * @typedef {import('@ipld/dag-pb').PBNode} PBNode - */ - describe('exporter', () => { - const block = blockApi() - /** @type {Uint8Array} */ - let bigFile - /** @type {Uint8Array} */ - let smallFile + const block = new MemoryBlockstore() + let bigFile: Uint8Array + let smallFile: Uint8Array before(async () => { bigFile = uint8ArrayConcat(await all(randomBytes(ONE_MEG * 1.2))) smallFile = uint8ArrayConcat(await all(randomBytes(200))) }) - /** - * @param {object} [options] - * @param {string} [options.type='file'] - * @param {Uint8Array} [options.content] - * @param {PBLink[]} [options.links=[]] - */ - async function dagPut (options = {}) { - options.type = options.type || 'file' - options.content = options.content || Uint8Array.from([0x01, 0x02, 0x03]) - options.links = options.links || [] + async function dagPut (options: { type?: string, content?: Uint8Array, links?: dagPb.PBLink[] } = {}): Promise<{ file: UnixFS, node: PBNode, cid: CID }> { + options.type = options.type ?? 'file' + options.content = options.content ?? Uint8Array.from([0x01, 0x02, 0x03]) + options.links = options.links ?? [] const file = new UnixFS({ type: options.type, - data: options.content + data: options.content, + blockSizes: options.links.map(l => BigInt(l.Tsize ?? 0)) }) const node = { Data: file.marshal(), @@ -63,18 +53,12 @@ describe('exporter', () => { const cid = CID.createV0(await sha256.digest(buf)) await block.put(cid, buf) - return { file: file, node: node, cid } + return { file, node, cid } } - /** - * @param {object} options - * @param {Uint8Array} options.file - * @param {'balanced' | 'flat' | 'trickle'} [options.strategy='balanced'] - * @param {string} [options.path='/foo'] - * @param {number} [options.maxChunkSize] - * @param {boolean} [options.rawLeaves] - */ - async function addTestFile ({ file, strategy = 'balanced', path = '/foo', maxChunkSize, rawLeaves }) { + async function addTestFile (options: { file: Uint8Array, strategy?: 'balanced' | 'flat' | 'trickle', path?: string, maxChunkSize?: number, rawLeaves?: boolean }): Promise { + const { file, strategy = 'balanced', path = '/foo', maxChunkSize, rawLeaves } = options + const result = await all(importer([{ path, content: asAsyncIterable(file) @@ -87,17 +71,8 @@ describe('exporter', () => { return result[0].cid } - /** - * @param {object} options - * @param {Uint8Array} options.file - * @param {number} [options.offset] - * @param {number} [options.length] - * @param {'balanced' | 'flat' | 'trickle'} [options.strategy='balanced'] - * @param {string} [options.path='/foo'] - * @param {number} [options.maxChunkSize] - * @param {boolean} [options.rawLeaves] - */ - async function addAndReadTestFile ({ file, offset, length, strategy = 'balanced', path = '/foo', maxChunkSize, rawLeaves }) { + async function addAndReadTestFile (options: { file: Uint8Array, offset?: number, length?: number, strategy?: 'balanced' | 'flat' | 'trickle', path?: string, maxChunkSize?: number, rawLeaves?: boolean }): Promise { + const { file, offset, length, strategy = 'balanced', path = '/foo', maxChunkSize, rawLeaves } = options const cid = await addTestFile({ file, strategy, path, maxChunkSize, rawLeaves }) const entry = await exporter(cid, block) @@ -110,10 +85,7 @@ describe('exporter', () => { }))) } - /** - * @param {'balanced' | 'flat' | 'trickle'} strategy - */ - async function checkBytesThatSpanBlocks (strategy) { + async function checkBytesThatSpanBlocks (strategy: 'balanced' | 'flat' | 'trickle'): Promise { const bytesInABlock = 262144 const bytes = new Uint8Array(bytesInABlock + 100) @@ -131,18 +103,13 @@ describe('exporter', () => { expect(data).to.deep.equal(Uint8Array.from([1, 2, 3])) } - /** - * @param {'file' | 'directory' | 'raw'} type - * @param {Uint8Array | ArrayLike | undefined} data - * @param {{ node: PBNode, cid: CID }[]} children - */ - async function createAndPersistNode (type, data, children) { - const file = new UnixFS({ type, data: data ? Uint8Array.from(data) : undefined }) + async function createAndPersistNode (type: 'file' | 'directory' | 'raw', data: Uint8Array | ArrayLike | undefined, children: Array<{ node: PBNode, cid: CID }>): Promise<{ node: PBNode, cid: CID }> { + const file = new UnixFS({ type, data: (data != null) ? Uint8Array.from(data) : undefined }) const links = [] for (let i = 0; i < children.length; i++) { const child = children[i] - // @ts-ignore - we can guarantee that it's not undefined + // @ts-expect-error - we can guarantee that it's not undefined const leaf = UnixFS.unmarshal(child.node.Data) file.addBlockSize(leaf.fileSize()) @@ -173,7 +140,7 @@ describe('exporter', () => { const result = await dagPut() const encodedBlock = await block.get(result.cid) const node = dagPb.decode(encodedBlock) - if (!node.Data) { + if (node.Data == null) { throw new Error('PBNode Data undefined') } const unmarsh = UnixFS.unmarshal(node.Data) @@ -235,12 +202,12 @@ describe('exporter', () => { const encodedBlock = await block.get(result.cid) const node = dagPb.decode(encodedBlock) - if (!node.Data) { + if (node.Data == null) { throw new Error('PBNode Data undefined') } const unmarsh = UnixFS.unmarshal(node.Data) - if (!unmarsh.data) { + if (unmarsh.data == null) { throw new Error('Unexpected data') } @@ -284,8 +251,8 @@ describe('exporter', () => { const file = new UnixFS({ type: 'file' }) - file.addBlockSize(5) - file.addBlockSize(5) + file.addBlockSize(5n) + file.addBlockSize(5n) const fileNode = dagPb.prepare({ Data: file.marshal(), @@ -327,7 +294,7 @@ describe('exporter', () => { }] }) - if (!result.file.data) { + if (result.file.data == null) { throw new Error('Expected data') } @@ -355,13 +322,13 @@ describe('exporter', () => { maxChunkSize: 2 }) - /** @type {import('interface-blockstore').Blockstore} */ - const blockStore = { + // @ts-expect-error incomplete implementation + const blockStore: Blockstore = { ...block, - async get (cid, opts) { + async get (cid: CID) { await delay(Math.random() * 10) - return block.get(cid, opts) + return await block.get(cid) } } @@ -390,7 +357,7 @@ describe('exporter', () => { } expect(file).to.have.property('path', cid.toString()) - expect(file.unixfs.fileSize()).to.equal(ONE_MEG * 6) + expect(file.unixfs.fileSize()).to.equal(BigInt(ONE_MEG * 6)) }) it('exports a chunk of a large file > 5mb', async function () { @@ -466,7 +433,7 @@ describe('exporter', () => { path: './level-1/level-2' }], block)) - if (!importedDir) { + if (importedDir == null) { throw new Error('Nothing imported') } @@ -514,7 +481,7 @@ describe('exporter', () => { path: './level-1' }], block)) - if (!importedDir) { + if (importedDir == null) { throw new Error('Nothing imported') } @@ -618,7 +585,7 @@ describe('exporter', () => { length }) throw new Error('Should not have got this far') - } catch (/** @type {any} */ err) { + } catch (err: any) { expect(err.message).to.equal('Length must be greater than or equal to 0') expect(err.code).to.equal('ERR_INVALID_PARAMS') } @@ -637,7 +604,7 @@ describe('exporter', () => { offset }) throw new Error('Should not have got this far') - } catch (/** @type {any} */ err) { + } catch (err: any) { expect(err.message).to.equal('Offset must be greater than or equal to 0') expect(err.code).to.equal('ERR_INVALID_PARAMS') } @@ -656,7 +623,7 @@ describe('exporter', () => { offset }) throw new Error('Should not have got this far') - } catch (/** @type {any} */ err) { + } catch (err: any) { expect(err.message).to.equal('Offset must be less than the file size') expect(err.code).to.equal('ERR_INVALID_PARAMS') } @@ -670,7 +637,7 @@ describe('exporter', () => { rawLeaves: true })) - if (!imported) { + if (imported == null) { throw new Error('Nothing imported') } @@ -698,7 +665,7 @@ describe('exporter', () => { path: 'empty' }], block)) - if (!imported) { + if (imported == null) { throw new Error('Nothing imported') } @@ -728,7 +695,7 @@ describe('exporter', () => { offset: -1 }) throw new Error('Should not have got this far') - } catch (/** @type {any} */ err) { + } catch (err: any) { expect(err.message).to.contain('Offset must be greater than or equal to 0') expect(err.code).to.equal('ERR_INVALID_PARAMS') } @@ -769,7 +736,7 @@ describe('exporter', () => { offset: 2, length: -1 }) - } catch (/** @type {any} */ err) { + } catch (err: any) { expect(err.message).to.contain('Length must be greater than or equal to 0') expect(err.code).to.equal('ERR_INVALID_PARAMS') } @@ -781,7 +748,7 @@ describe('exporter', () => { file: Uint8Array.from([0, 1, 2, 3, 4]), offset: 200 }) - } catch (/** @type {any} */ err) { + } catch (err: any) { expect(err.message).to.contain('Offset must be less than the file size') expect(err.code).to.equal('ERR_INVALID_PARAMS') } @@ -860,7 +827,7 @@ describe('exporter', () => { try { await exporter(hash, block) - } catch (/** @type {any} */ err) { + } catch (err: any) { expect(err.code).to.equal('ERR_NOT_FOUND') } }) @@ -956,7 +923,7 @@ describe('exporter', () => { rawLeaves: true })) - if (!imported) { + if (imported == null) { throw new Error('Nothing imported') } @@ -981,7 +948,7 @@ describe('exporter', () => { rawLeaves: true })) - if (!imported) { + if (imported == null) { throw new Error('Nothing imported') } @@ -1007,7 +974,7 @@ describe('exporter', () => { try { await exporter(`${cid}/baz`, block) - } catch (/** @type {any} */ err) { + } catch (err: any) { expect(err.code).to.equal('ERR_NO_PROP') } }) @@ -1026,7 +993,7 @@ describe('exporter', () => { throw new Error('Unexpected type') } - return expect(first(exported.content())).to.eventually.deep.equal(node) + return await expect(first(exported.content())).to.eventually.deep.equal(node) }) it('errors when exporting a node with no resolver', async () => { @@ -1034,7 +1001,7 @@ describe('exporter', () => { try { await exporter(`${cid}`, block) - } catch (/** @type {any} */ err) { + } catch (err: any) { expect(err.code).to.equal('ERR_NO_RESOLVER') } }) @@ -1046,7 +1013,7 @@ describe('exporter', () => { try { await exporter(`${cid}/lol`, block) - } catch (/** @type {any} */ err) { + } catch (err: any) { expect(err.code).to.equal('ERR_NOT_FOUND') } }) @@ -1061,20 +1028,20 @@ describe('exporter', () => { try { await exporter(dagpbCid, block) - } catch (/** @type {any} */ err) { + } catch (err: any) { expect(err.code).to.equal('ERR_NOT_UNIXFS') } }) it('errors we export a unixfs node that has a non-unixfs/dag-pb child', async () => { - const cborBlock = await dagCbor.encode({ foo: 'bar' }) + const cborBlock = dagCbor.encode({ foo: 'bar' }) const cborCid = CID.createV1(dagCbor.code, await sha256.digest(cborBlock)) await block.put(cborCid, cborBlock) const file = new UnixFS({ type: 'file' }) - file.addBlockSize(100) + file.addBlockSize(100n) const dagpbBuffer = dagPb.encode({ Data: file.marshal(), @@ -1095,7 +1062,7 @@ describe('exporter', () => { try { await all(exported.content()) - } catch (/** @type {any} */ err) { + } catch (err: any) { expect(err.code).to.equal('ERR_NOT_UNIXFS') } }) @@ -1123,7 +1090,7 @@ describe('exporter', () => { content: asAsyncIterable(uint8ArrayFromString('hello world')) }], block)) - if (!dir) { + if (dir == null) { throw new Error('Nothing imported') } @@ -1152,7 +1119,7 @@ describe('exporter', () => { it('exports a CID encoded with the identity hash', async () => { const data = uint8ArrayFromString('hello world') - const hash = await identity.digest(data) + const hash = identity.digest(data) const cid = CID.create(1, identity.code, hash) const exported = await exporter(cid, block) @@ -1169,7 +1136,7 @@ describe('exporter', () => { it('exports a CID encoded with the identity hash with an offset', async () => { const data = uint8ArrayFromString('hello world') - const hash = await identity.digest(data) + const hash = identity.digest(data) const cid = CID.create(1, identity.code, hash) const exported = await exporter(cid, block) @@ -1187,7 +1154,7 @@ describe('exporter', () => { it('exports a CID encoded with the identity hash with a length', async () => { const data = uint8ArrayFromString('hello world') - const hash = await identity.digest(data) + const hash = identity.digest(data) const cid = CID.create(1, identity.code, hash) const exported = await exporter(cid, block) @@ -1205,7 +1172,7 @@ describe('exporter', () => { it('exports a CID encoded with the identity hash with an offset and a length', async () => { const data = uint8ArrayFromString('hello world') - const hash = await identity.digest(data) + const hash = identity.digest(data) const cid = CID.create(1, identity.code, hash) const exported = await exporter(cid, block) @@ -1238,14 +1205,9 @@ describe('exporter', () => { // regular test IPLD is offline-only, we need to mimic what happens when // we try to get a block from the network const customBlock = { - /** - * - * @param {CID} cid - * @param {{ signal: AbortSignal }} options - */ - get: (cid, options) => { + get: async (cid: CID, options: { signal: AbortSignal }) => { // promise will never resolve, so reject it when the abort signal is sent - return new Promise((resolve, reject) => { + return await new Promise((resolve, reject) => { options.signal.addEventListener('abort', () => { reject(new Error(message)) }) @@ -1253,7 +1215,7 @@ describe('exporter', () => { } } - // @ts-ignore ipld implementation incomplete + // @ts-expect-error ipld implementation incomplete await expect(exporter(cid, customBlock, { signal: abortController.signal })).to.eventually.be.rejectedWith(message) diff --git a/packages/ipfs-unixfs-exporter/test/helpers/as-async-iterable.js b/packages/ipfs-unixfs-exporter/test/helpers/as-async-iterable.js deleted file mode 100644 index 23e1cdea..00000000 --- a/packages/ipfs-unixfs-exporter/test/helpers/as-async-iterable.js +++ /dev/null @@ -1,12 +0,0 @@ -/** - * @param {Uint8Array | Uint8Array[]} arr - */ -async function * asAsyncIterable (arr) { - if (!Array.isArray(arr)) { - arr = [arr] - } - - yield * arr -} - -export default asAsyncIterable diff --git a/packages/ipfs-unixfs-exporter/test/helpers/as-async-iterable.ts b/packages/ipfs-unixfs-exporter/test/helpers/as-async-iterable.ts new file mode 100644 index 00000000..ef9b811b --- /dev/null +++ b/packages/ipfs-unixfs-exporter/test/helpers/as-async-iterable.ts @@ -0,0 +1,10 @@ + +async function * asAsyncIterable (arr: Uint8Array | Uint8Array[]): AsyncGenerator { + if (!Array.isArray(arr)) { + arr = [arr] + } + + yield * arr +} + +export default asAsyncIterable diff --git a/packages/ipfs-unixfs-exporter/test/helpers/block.js b/packages/ipfs-unixfs-exporter/test/helpers/block.js deleted file mode 100644 index 5072a6e5..00000000 --- a/packages/ipfs-unixfs-exporter/test/helpers/block.js +++ /dev/null @@ -1,48 +0,0 @@ -import errCode from 'err-code' -import { BaseBlockstore } from 'blockstore-core' -import { base58btc } from 'multiformats/bases/base58' - -/** - * @typedef {import('multiformats/cid').CID} CID - */ - -function createBlockApi () { - class MockBlockstore extends BaseBlockstore { - constructor () { - super() - - /** @type {{[key: string]: Uint8Array}} */ - this._blocks = {} - } - - /** - * @param {CID} cid - * @param {Uint8Array} block - * @param {any} [options] - */ - async put (cid, block, options = {}) { - this._blocks[base58btc.encode(cid.multihash.bytes)] = block - } - - /** - * @param {CID} cid - * @param {any} [options] - */ - async get (cid, options = {}) { - const bytes = this._blocks[base58btc.encode(cid.multihash.bytes)] - - if (bytes === undefined) { - throw errCode(new Error(`Could not find data for CID '${cid}'`), 'ERR_NOT_FOUND') - } - - return bytes - } - } - - /** @type {import('interface-blockstore').Blockstore} */ - const bs = new MockBlockstore() - - return bs -} - -export default createBlockApi diff --git a/packages/ipfs-unixfs-exporter/test/helpers/collect-leaf-cids.js b/packages/ipfs-unixfs-exporter/test/helpers/collect-leaf-cids.js deleted file mode 100644 index bc7fe5c7..00000000 --- a/packages/ipfs-unixfs-exporter/test/helpers/collect-leaf-cids.js +++ /dev/null @@ -1,32 +0,0 @@ -import * as dagPb from '@ipld/dag-pb' - -/** - * @typedef {import('@ipld/dag-pb').PBLink} PBLink - */ - -/** - * @param {import('multiformats/cid').CID} cid - * @param {import('interface-blockstore').Blockstore} blockstore - */ -export default function (cid, blockstore) { - /** - * @param {import('multiformats/cid').CID} cid - */ - async function * traverse (cid) { - const block = await blockstore.get(cid) - const node = dagPb.decode(block) - - if (node instanceof Uint8Array || !node.Links.length) { - yield { - node, - cid - } - - return - } - - node.Links.forEach(link => traverse(link.Hash)) - } - - return traverse(cid) -} diff --git a/packages/ipfs-unixfs-exporter/test/helpers/collect-leaf-cids.ts b/packages/ipfs-unixfs-exporter/test/helpers/collect-leaf-cids.ts new file mode 100644 index 00000000..564ead38 --- /dev/null +++ b/packages/ipfs-unixfs-exporter/test/helpers/collect-leaf-cids.ts @@ -0,0 +1,23 @@ +import * as dagPb from '@ipld/dag-pb' +import type { Blockstore } from 'interface-blockstore' +import type { CID } from 'multiformats/cid' + +export default function (cid: CID, blockstore: Blockstore): AsyncGenerator<{ node: Uint8Array | dagPb.PBNode, cid: CID }, void, undefined> { + async function * traverse (cid: CID): AsyncGenerator<{ node: dagPb.PBNode, cid: CID }, void, unknown> { + const block = await blockstore.get(cid) + const node = dagPb.decode(block) + + if (node instanceof Uint8Array || (node.Links.length === 0)) { + yield { + node, + cid + } + + return + } + + node.Links.forEach(link => traverse(link.Hash)) + } + + return traverse(cid) +} diff --git a/packages/ipfs-unixfs-exporter/test/import-export-dir-sharding.spec.js b/packages/ipfs-unixfs-exporter/test/import-export-dir-sharding.spec.ts similarity index 89% rename from packages/ipfs-unixfs-exporter/test/import-export-dir-sharding.spec.js rename to packages/ipfs-unixfs-exporter/test/import-export-dir-sharding.spec.ts index 09f499eb..e5d9bd34 100644 --- a/packages/ipfs-unixfs-exporter/test/import-export-dir-sharding.spec.js +++ b/packages/ipfs-unixfs-exporter/test/import-export-dir-sharding.spec.ts @@ -1,23 +1,19 @@ /* eslint-env mocha */ -import { importer } from 'ipfs-unixfs-importer' -import { exporter } from '../src/index.js' +import { importer } from 'ipfs-unixfs-importer' +import { exporter, UnixFSDirectory, UnixFSEntry } from '../src/index.js' import { expect } from 'aegir/chai' import all from 'it-all' import last from 'it-last' -import blockApi from './helpers/block.js' +import { MemoryBlockstore } from 'blockstore-core' import { concat as uint8ArrayConcat } from 'uint8arrays/concat' import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' import { toString as uint8ArrayToString } from 'uint8arrays/to-string' import asAsyncIterable from './helpers/as-async-iterable.js' - -/** - * @typedef {import('../src').UnixFSEntry} UnixFSEntry - * @typedef {import('../src').UnixFSDirectory} UnixFSDirectory - */ +import type { CID } from 'multiformats/cid' describe('builder: directory sharding', () => { - const block = blockApi() + const block = new MemoryBlockstore() describe('basic dirbuilder', () => { it('yields a non-sharded dir', async () => { @@ -108,7 +104,7 @@ describe('builder: directory sharding', () => { expect(dir.path).to.be.eql(expectedHash) expect(dir.cid.toString()).to.be.eql(expectedHash) expect(files[0].path).to.be.eql(expectedHash + '/b') - expect(files[0].unixfs.fileSize()).to.be.eql(content.length) + expect(files[0].unixfs.fileSize()).to.be.eql(BigInt(content.length)) const fileContent = uint8ArrayConcat(await all(files[0].content())) @@ -142,10 +138,10 @@ describe('builder: directory sharding', () => { const expectedHash = shardedHash.toString() - expect(dir.path).to.be.eql(expectedHash) - expect(dir.cid.toString()).to.be.eql(expectedHash) + expect(dir.path).to.equal(expectedHash) + expect(dir.cid.toString()).to.equal(expectedHash) expect(files[0].path).to.be.eql(expectedHash + '/b') - expect(files[0].unixfs.fileSize()).to.be.eql(content.length) + expect(files[0].unixfs.fileSize()).to.equal(BigInt(content.length)) const fileContent = uint8ArrayConcat(await all(files[0].content())) @@ -215,8 +211,7 @@ describe('builder: directory sharding', () => { const maxDirs = 2000 const maxDepth = 3 - /** @type {import('multiformats/cid').CID} */ - let rootHash + let rootHash: CID before(async () => { const source = { @@ -226,7 +221,7 @@ describe('builder: directory sharding', () => { let i = 0 let depth = 1 - while (pendingDepth && pending) { + while (pendingDepth > 0 && pending > 0) { i++ const dir = [] @@ -240,7 +235,7 @@ describe('builder: directory sharding', () => { } pending-- - if (!pending) { + if (pending === 0) { pendingDepth-- pending = maxDirs i = 0 @@ -252,7 +247,7 @@ describe('builder: directory sharding', () => { const node = await last(importer(source, block)) - if (!node) { + if (node == null) { throw new Error('Nothing imported') } @@ -264,14 +259,11 @@ describe('builder: directory sharding', () => { it('imports a big dir', async () => { const dir = await exporter(rootHash, block) - /** - * @param {UnixFSEntry} node - */ - const verifyContent = async (node) => { + const verifyContent = async (node: UnixFSEntry): Promise => { if (node.type === 'file') { const bufs = await all(node.content()) const content = uint8ArrayConcat(bufs) - expect(uint8ArrayToString(content)).to.equal(parseInt(node.name || '', 10).toString()) + expect(uint8ArrayToString(content)).to.equal(parseInt(node.name ?? '', 10).toString()) } else if (node.type === 'directory') { for await (const entry of node.content()) { await verifyContent(entry) @@ -283,11 +275,7 @@ describe('builder: directory sharding', () => { }) it('exports a big dir', async () => { - /** - * @param {UnixFSEntry} node - * @param {{ [key: string]: { type: 'file', content: string } | UnixFSDirectory }} entries - */ - const collectContent = async (node, entries = {}) => { + const collectContent = async (node: UnixFSEntry, entries: Record = {}): Promise> => { if (node.type === 'file') { entries[node.path] = { type: 'file', @@ -304,11 +292,8 @@ describe('builder: directory sharding', () => { return entries } - /** - * @param {string} path - */ - const eachPath = (path) => { - if (!index) { + const eachPath = (path: string): void => { + if (index === 0) { // first dir if (depth === 1) { expect(path).to.equal(dir.cid.toString()) diff --git a/packages/ipfs-unixfs-exporter/test/import-export-nested-dir.spec.js b/packages/ipfs-unixfs-exporter/test/import-export-nested-dir.spec.ts similarity index 84% rename from packages/ipfs-unixfs-exporter/test/import-export-nested-dir.spec.js rename to packages/ipfs-unixfs-exporter/test/import-export-nested-dir.spec.ts index f465a3d2..350e02ed 100644 --- a/packages/ipfs-unixfs-exporter/test/import-export-nested-dir.spec.js +++ b/packages/ipfs-unixfs-exporter/test/import-export-nested-dir.spec.ts @@ -1,17 +1,19 @@ /* eslint-env mocha */ + import { expect } from 'aegir/chai' import all from 'it-all' import { importer } from 'ipfs-unixfs-importer' -import { exporter } from '../src/index.js' -import blockApi from './helpers/block.js' +import { exporter, UnixFSEntry } from '../src/index.js' +import { MemoryBlockstore } from 'blockstore-core' import { concat as uint8ArrayConcat } from 'uint8arrays/concat' import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' import { toString as uint8ArrayToString } from 'uint8arrays/to-string' import asAsyncIterable from './helpers/as-async-iterable.js' +import type { CID } from 'multiformats/cid' describe('import and export: directory', () => { const rootHash = 'QmdCrquDwd7RfZ6GCZFEVADwe8uyyw1YmF9mtAB7etDgmK' - const block = blockApi() + const block = new MemoryBlockstore() it('imports', async function () { this.timeout(20 * 1000) @@ -81,13 +83,7 @@ describe('import and export: directory', () => { }) }) -/** - * - * @param {import('../src').UnixFSEntry} node - * @param {string} path - * @param {{ path: string, content: string }[]} entries - */ -async function recursiveExport (node, path, entries = []) { +async function recursiveExport (node: UnixFSEntry, path: string, entries: Array<{ path: string, content: string }> = []): Promise> { if (node.type !== 'directory') { throw new Error('Can only recursively export directories') } @@ -106,21 +102,14 @@ async function recursiveExport (node, path, entries = []) { return entries } -/** - * @param {{ path?: string, cid: import('multiformats/cid').CID }} node - */ -function normalizeNode (node) { +function normalizeNode (node: { path?: string, cid: CID }): { path: string, multihash: string } { return { - path: node.path || '', + path: node.path ?? '', multihash: node.cid.toString() } } -/** - * @param {{ path: string }} a - * @param {{ path: string }} b - */ -function byPath (a, b) { +function byPath (a: { path: string }, b: { path: string }): number { if (a.path > b.path) return -1 if (a.path < b.path) return 1 return 0 diff --git a/packages/ipfs-unixfs-exporter/test/import-export.spec.js b/packages/ipfs-unixfs-exporter/test/import-export.spec.ts similarity index 72% rename from packages/ipfs-unixfs-exporter/test/import-export.spec.js rename to packages/ipfs-unixfs-exporter/test/import-export.spec.ts index 7e03ce18..8e400375 100644 --- a/packages/ipfs-unixfs-exporter/test/import-export.spec.js +++ b/packages/ipfs-unixfs-exporter/test/import-export.spec.ts @@ -1,14 +1,13 @@ /* eslint-env mocha */ /* eslint max-nested-callbacks: ["error", 5] */ + import { expect } from 'aegir/chai' import loadFixture from 'aegir/fixtures' -import blockApi from './helpers/block.js' +import { MemoryBlockstore } from 'blockstore-core' import asAsyncIterable from './helpers/as-async-iterable.js' - import { importer } from 'ipfs-unixfs-importer' import { exporter } from '../src/index.js' -/** @type {Uint8Array} */ const bigFile = loadFixture(('test') + '/fixtures/1.2MiB.txt') const strategies = [ @@ -21,18 +20,18 @@ describe('import and export', function () { this.timeout(30 * 1000) strategies.forEach((strategy) => { - const importerOptions = { strategy: strategy } + const importerOptions = { strategy } describe('using builder: ' + strategy, () => { - const block = blockApi() + const block = new MemoryBlockstore() it('imports and exports', async () => { const path = `${strategy}-big.dat` - const values = [{ path: path, content: asAsyncIterable(bigFile) }] + const values = [{ path, content: asAsyncIterable(bigFile) }] - // @ts-ignore + // @ts-expect-error for await (const file of importer(values, block, importerOptions)) { - expect(file.path).to.eql(path) + expect(file.path).to.equal(path) const result = await exporter(file.cid, block) @@ -40,7 +39,7 @@ describe('import and export', function () { throw new Error('Unexpected type') } - expect(result.unixfs.fileSize()).to.eql(bigFile.length) + expect(result.unixfs.fileSize()).to.equal(BigInt(bigFile.length)) } }) }) diff --git a/packages/ipfs-unixfs-exporter/test/importer.spec.js b/packages/ipfs-unixfs-exporter/test/importer.spec.ts similarity index 88% rename from packages/ipfs-unixfs-exporter/test/importer.spec.js rename to packages/ipfs-unixfs-exporter/test/importer.spec.ts index 84abcdad..017cd6bd 100644 --- a/packages/ipfs-unixfs-exporter/test/importer.spec.js +++ b/packages/ipfs-unixfs-exporter/test/importer.spec.ts @@ -1,16 +1,16 @@ /* eslint-env mocha */ -import { importer } from 'ipfs-unixfs-importer' +import { importer, UserImporterOptions } from 'ipfs-unixfs-importer' import { exporter, recursive } from '../src/index.js' import extend from 'merge-options' import { expect } from 'aegir/chai' import sinon from 'sinon' -import { UnixFS, parseMtime } from 'ipfs-unixfs' +import { Mtime, UnixFS } from 'ipfs-unixfs' import collectLeafCids from './helpers/collect-leaf-cids.js' import loadFixture from 'aegir/fixtures' import all from 'it-all' import first from 'it-first' -import blockApi from './helpers/block.js' +import { MemoryBlockstore } from 'blockstore-core' import { concat as uint8ArrayConcat } from 'uint8arrays/concat' import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' import asAsyncIterable from './helpers/as-async-iterable.js' @@ -18,21 +18,12 @@ import last from 'it-last' import { CID } from 'multiformats/cid' import { base58btc } from 'multiformats/bases/base58' import { decode } from '@ipld/dag-pb' +import type { Blockstore } from 'interface-blockstore' -/** @type {Uint8Array} */ const bigFile = loadFixture('test/fixtures/1.2MiB.txt') -/** @type {Uint8Array} */ const smallFile = loadFixture('test/fixtures/200Bytes.txt') -/** - * @typedef {import('interface-blockstore').Blockstore} Blockstore - * @typedef {import('@ipld/dag-pb').PBNode} PBNode - */ - -/** - * @param {{ path?: string, cid: CID, unixfs?: UnixFS }[]} files - */ -function stringifyMh (files) { +function stringifyMh (files: Array<{ path?: string, cid: CID, unixfs?: UnixFS }>): Array<{ cid: string, path?: string, unixfs?: UnixFS }> { return files.map((file) => { return { ...file, @@ -41,15 +32,12 @@ function stringifyMh (files) { }) } -/** - * @param {Date} date - */ -function dateToTimespec (date) { +function dateToTimespec (date: Date): Mtime { const ms = date.getTime() const secs = Math.floor(ms / 1000) return { - secs, + secs: BigInt(secs), nsecs: (ms - (secs * 1000)) * 1000 } } @@ -57,19 +45,19 @@ function dateToTimespec (date) { const baseFiles = { '200Bytes.txt': { cid: 'QmQmZQxSKQppbsWfVzBvg59Cn3DKtsNVQ94bjAxg2h3Lb8', - size: 200, + size: 200n, type: 'file', path: '200Bytes.txt' }, '1.2MiB.txt': { cid: 'QmW7BDxEbGqxxSYVtn3peNPQgdDXbWkoQ6J1EFYAEuQV3Q', - size: 1258000, + size: 1258000n, type: 'file', path: '1.2MiB.txt' }, 'small.txt': { cid: 'QmZMb7HWpbevpcdhbUV1ZZgdji8vh5uQ13KxczChGrK9Rd', - size: 15, + size: 15n, type: 'file', path: 'small.txt' } @@ -86,7 +74,7 @@ const strategyBaseFiles = { trickle: extend({}, baseFiles, { '200Bytes.txt': { cid: 'QmY8bwnoKAKvJ8qtyPhWNxSS6sxiGVTJ9VpdQffs2KB5pE', - size: 200, + size: 200n, type: 'file', path: '200Bytes.txt' }, @@ -97,7 +85,7 @@ const strategyBaseFiles = { }) } -const strategies = [ +const strategies: Array<'flat' | 'balanced' | 'trickle'> = [ 'flat', 'balanced', 'trickle' @@ -108,25 +96,25 @@ const strategyOverrides = { 'foo-big': { cid: 'QmaFgyFJUP4fxFySJCddg2Pj6rpwSywopWk87VEVv52RSj', path: 'foo-big', - size: 1335478, + size: 1335478n, type: 'directory' }, pim: { cid: 'QmY8a78tx6Tk6naDgWCgTsd9EqGrUJRrH7dDyQhjyrmH2i', path: 'pim', - size: 1335744, + size: 1335744n, type: 'directory' }, 'pam/pum': { cid: 'QmY8a78tx6Tk6naDgWCgTsd9EqGrUJRrH7dDyQhjyrmH2i', path: 'pam/pum', - size: 1335744, + size: 1335744n, type: 'directory' }, pam: { cid: 'QmRgdtzNx1H1BPJqShdhvWZ2D4DA2HUgZJ3XLtoXei27Av', path: 'pam', - size: 2671269, + size: 2671269n, type: 'directory' } }, @@ -134,84 +122,77 @@ const strategyOverrides = { 'foo-big': { cid: 'QmaKbhFRy9kcCbcwrLsqYHWMiY44BDYkqTCMpAxDdd2du2', path: 'foo-big', - size: 1334657, + size: 1334657n, type: 'directory' }, pim: { cid: 'QmbWGdnua4YuYpWJb7fE25PRbW9GbKKLqq9Ucmnsg2gxnt', path: 'pim', - size: 1334923, + size: 1334923n, type: 'directory' }, 'pam/pum': { cid: 'QmbWGdnua4YuYpWJb7fE25PRbW9GbKKLqq9Ucmnsg2gxnt', path: 'pam/pum', - size: 1334923, + size: 1334923n, type: 'directory' }, pam: { cid: 'QmSuh47G9Qm3PFv1zziojtHxqCjuurSdtWAzxLxoKJPq2U', path: 'pam', - size: 2669627, + size: 2669627n, type: 'directory' }, '200Bytes.txt with raw leaves': extend({}, baseFiles['200Bytes.txt'], { cid: 'QmagyRwMfYhczYNv5SvcJc8xxXjZQBTTHS2jEqNMva2mYT', - size: 200 + size: 200n }), '200Bytes.txt with raw leaves and mode': extend({}, baseFiles['200Bytes.txt'], { cid: 'QmRYYSoRkL9bh5gzbgHndWjt81TYnM4W7MjzTp8WWioLGB', - size: 200 + size: 200n }), '200Bytes.txt with raw leaves and mtime': extend({}, baseFiles['200Bytes.txt'], { cid: 'QmQ1QHqXqgxJ4qjJZouRdYG7pdS6yzdhSAq7dYAu9bN6h4', - size: 200 + size: 200n }), '200Bytes.txt with raw leaves and metadata': extend({}, baseFiles['200Bytes.txt'], { cid: 'QmWUpftnvHN1Ey5iGoaWwMUZPnViXeJctDSUkcvunkahFo', - size: 200 + size: 200n }), 'foo/bar': { cid: 'QmTGMxKPzSGNBDp6jhTwnZxGW6w1S9ciyycRJ4b2qcQaHK', - size: 0, + size: 0n, path: 'foo/bar', type: 'directory' }, foo: { cid: 'Qme4A8fZmwfZESappfPcxSMTZVACiEzhHKtYRMuM1hbkDp', - size: 0, + size: 0n, path: 'foo', type: 'directory' }, 'small.txt': { cid: 'QmXmZ3qT328JxWtQXqrmvma2FmPp7tMdNiSuYvVJ5QRhKs', - size: 15, + size: 15n, type: 'file', path: 'small.txt' } } } -/** - * @param {Blockstore} blockstore - * @param {import('ipfs-unixfs-importer').UserImporterOptions} options - * @param {*} expected - */ -const checkLeafNodeTypes = async (blockstore, options, expected) => { +const checkLeafNodeTypes = async (blockstore: Blockstore, options: UserImporterOptions, expected: any): Promise => { const file = await first(importer([{ path: 'foo', content: asAsyncIterable(new Uint8Array(262144 + 5).fill(1)) }], blockstore, options)) - if (!file) { + if (file == null) { throw new Error('Nothing imported') } - // @type {Block} const fileBlock = await blockstore.get(file.cid) - /** @type {PBNode} */ const node = decode(fileBlock) - if (!node.Data) { + if (node.Data == null) { throw new Error('PBNode Data undefined') } const meta = UnixFS.unmarshal(node.Data) @@ -220,12 +201,12 @@ const checkLeafNodeTypes = async (blockstore, options, expected) => { expect(node.Links.length).to.equal(2) const linkedBlocks = await Promise.all( - node.Links.map(link => blockstore.get(link.Hash)) + node.Links.map(async link => await blockstore.get(link.Hash)) ) linkedBlocks.forEach(bytes => { const node = decode(bytes) - if (!node.Data) { + if (node.Data == null) { throw new Error('PBNode Data undefined') } const meta = UnixFS.unmarshal(node.Data) @@ -233,19 +214,14 @@ const checkLeafNodeTypes = async (blockstore, options, expected) => { }) } -/** - * @param {Blockstore} blockstore - * @param {import('ipfs-unixfs-importer').UserImporterOptions} options - * @param {*} expected - */ -const checkNodeLinks = async (blockstore, options, expected) => { +const checkNodeLinks = async (blockstore: Blockstore, options: UserImporterOptions, expected: any): Promise => { for await (const file of importer([{ path: 'foo', content: asAsyncIterable(new Uint8Array(100).fill(1)) }], blockstore, options)) { const fileBlock = await blockstore.get(file.cid) const node = decode(fileBlock) - if (!node.Data) { + if (node.Data == null) { throw new Error('PBNode Data undefined') } const meta = UnixFS.unmarshal(node.Data) @@ -256,7 +232,6 @@ const checkNodeLinks = async (blockstore, options, expected) => { } strategies.forEach((strategy) => { - // @ts-ignore const baseFiles = strategyBaseFiles[strategy] const defaultResults = extend({}, baseFiles, { 'foo/bar/200Bytes.txt': extend({}, baseFiles['200Bytes.txt'], { @@ -265,13 +240,13 @@ strategies.forEach((strategy) => { foo: { path: 'foo', cid: 'QmQrb6KKWGo8w7zKfx2JksptY6wN7B2ysSBdKZr4xMU36d', - size: 320, + size: 320n, type: 'directory' }, 'foo/bar': { path: 'foo/bar', cid: 'Qmf5BQbTUyUAvd6Ewct83GYGnE1F6btiC3acLhR8MDxgkD', - size: 270, + size: 270n, type: 'directory' }, 'foo-big/1.2MiB.txt': extend({}, baseFiles['1.2MiB.txt'], { @@ -280,7 +255,7 @@ strategies.forEach((strategy) => { 'foo-big': { path: 'foo-big', cid: 'QmaFgyFJUP4fxFySJCddg2Pj6rpwSywopWk87VEVv52RSj', - size: 1328120, + size: 1328120n, type: 'directory' }, 'pim/200Bytes.txt': extend({}, baseFiles['200Bytes.txt'], { @@ -292,25 +267,25 @@ strategies.forEach((strategy) => { pim: { path: 'pim', cid: 'QmY8a78tx6Tk6naDgWCgTsd9EqGrUJRrH7dDyQhjyrmH2i', - size: 1328386, + size: 1328386n, type: 'directory' }, 'empty-dir': { path: 'empty-dir', cid: 'QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn', - size: 4, + size: 4n, type: 'directory' }, 'pam/pum': { cid: 'QmY8a78tx6Tk6naDgWCgTsd9EqGrUJRrH7dDyQhjyrmH2i', path: 'pam/pum', - size: 1328386, + size: 1328386n, type: 'directory' }, pam: { cid: 'QmRgdtzNx1H1BPJqShdhvWZ2D4DA2HUgZJ3XLtoXei27Av', path: 'pam', - size: 2656553, + size: 2656553n, type: 'directory' }, '200Bytes.txt with raw leaves': extend({}, baseFiles['200Bytes.txt'], { @@ -318,26 +293,22 @@ strategies.forEach((strategy) => { }), '200Bytes.txt with raw leaves and mode': extend({}, baseFiles['200Bytes.txt'], { cid: 'QmWXbKV9BKJqd8x1NUw1myH987bURrn9Rna3rszYJgQwtX', - size: 200 + size: 200n }), '200Bytes.txt with raw leaves and mtime': extend({}, baseFiles['200Bytes.txt'], { cid: 'QmYfLToWgeJwrFFKideGNaS1zkmrow1a9o862sUL43NapC', - size: 200 + size: 200n }), '200Bytes.txt with raw leaves and metadata': extend({}, baseFiles['200Bytes.txt'], { cid: 'QmVfHowk2oKuWFyVwSRt8H1dQ3v272jyWSwhfQnTtWNmfw', - size: 200 + size: 200n }) - // @ts-ignore + // @ts-expect-error }, strategyOverrides[strategy]) const expected = extend({}, defaultResults) - /** - * @param {*} actualFiles - * @param {*} expectedFiles - */ - const expectFiles = (actualFiles, expectedFiles) => { + const expectFiles = (actualFiles: any, expectedFiles: any): void => { expect(actualFiles.length).to.equal(expectedFiles.length) for (let i = 0; i < expectedFiles.length; i++) { @@ -347,7 +318,7 @@ strategies.forEach((strategy) => { expect(actualFile.path).to.equal(expectedFile.path) expect(actualFile.cid.toString(base58btc)).to.equal(expectedFile.cid.toString()) - if (actualFile.unixfs) { + if (actualFile.unixfs != null) { expect(actualFile.unixfs.type).to.equal(expectedFile.type) if (actualFile.unixfs.type === 'file') { @@ -360,11 +331,9 @@ strategies.forEach((strategy) => { describe('importer: ' + strategy, function () { this.timeout(30 * 1000) - const block = blockApi() - /** @type {import('ipfs-unixfs-importer').UserImporterOptions} */ - const options = { - // @ts-ignore - strategy: strategy + const block = new MemoryBlockstore() + const options: UserImporterOptions = { + strategy } if (strategy === 'trickle') { @@ -381,7 +350,7 @@ strategies.forEach((strategy) => { content: 7 }], block, options)) throw new Error('No error was thrown') - } catch (/** @type {any} */ err) { + } catch (err: any) { expect(err.code).to.equal('ERR_INVALID_CONTENT') } }) @@ -398,7 +367,7 @@ strategies.forEach((strategy) => { } }], block, options)) throw new Error('No error was thrown') - } catch (/** @type {any} */ err) { + } catch (err: any) { expect(err.code).to.equal('ERR_INVALID_CONTENT') } }) @@ -511,8 +480,7 @@ strategies.forEach((strategy) => { path: '200Bytes.txt', content: asAsyncIterable(smallFile), mtime: { - secs: 10, - nsecs: 0 + secs: 10n } }], block, { ...options, @@ -530,8 +498,7 @@ strategies.forEach((strategy) => { content: asAsyncIterable(smallFile), mode: 0o123, mtime: { - secs: 10, - nsecs: 0 + secs: 10n } }], block, { ...options, @@ -627,11 +594,8 @@ strategies.forEach((strategy) => { result.forEach(eachFile) - /** - * @param {{ path?: string, cid: string, unixfs?: UnixFS }} file - */ - function eachFile (file) { - if (!file.unixfs) { + function eachFile (file: { path?: string, cid: string, unixfs?: UnixFS }): void { + if (file.unixfs == null) { throw new Error('file was not UnixFS') } @@ -670,7 +634,7 @@ strategies.forEach((strategy) => { await block.get(file.cid) throw new Error('No error was thrown') - } catch (/** @type {any} */ err) { + } catch (err: any) { expect(err.code).to.equal('ERR_NOT_FOUND') } }) @@ -694,11 +658,7 @@ strategies.forEach((strategy) => { }) it('will import files with CID version 1', async () => { - /** - * @param {string} path - * @param {number} size - */ - const createInputFile = (path, size) => { + const createInputFile = (path: string, size: number): { path: string, content: Uint8Array } => { const name = String(Math.random() + Date.now()) path = path[path.length - 1] === '/' ? path : path + '/' return { @@ -720,8 +680,7 @@ strategies.forEach((strategy) => { createInputFile('foo/bar', 262144 + 21) ] - /** @type {import('ipfs-unixfs-importer').UserImporterOptions} */ - const options = { + const options: UserImporterOptions = { cidVersion: 1, // Ensures we use DirSharded for the data below shardSplitThresholdBytes: 3 @@ -738,7 +697,7 @@ strategies.forEach((strategy) => { for (let i = 0; i < files.length; i++) { const file = files[i] - if (file.unixfs?.isDirectory()) { + if (file.unixfs?.isDirectory() === true) { // ignore directories continue } @@ -746,13 +705,8 @@ strategies.forEach((strategy) => { const cid = file.cid.toV1() const inputFile = inputFiles.find(f => f.path === file.path) - if (!inputFile) { - throw new Error('Could not find input file with path ' + file.path) - } - - // Just check the intermediate directory can be retrieved - if (!inputFile) { - await block.get(cid) + if (inputFile == null) { + throw new Error(`Could not find input file with path ${file.path}`) } // Check the imported content is correct @@ -766,26 +720,26 @@ strategies.forEach((strategy) => { } }) - it('imports file with raw leaf nodes when specified', () => { - return checkLeafNodeTypes(block, { + it('imports file with raw leaf nodes when specified', async () => { + await checkLeafNodeTypes(block, { leafType: 'raw' }, 'raw') }) - it('imports file with file leaf nodes when specified', () => { - return checkLeafNodeTypes(block, { + it('imports file with file leaf nodes when specified', async () => { + await checkLeafNodeTypes(block, { leafType: 'file' }, 'file') }) - it('reduces file to single node when specified', () => { - return checkNodeLinks(block, { + it('reduces file to single node when specified', async () => { + await checkNodeLinks(block, { reduceSingleLeafToSelf: true }, 0) }) - it('does not reduce file to single node when overidden by options', () => { - return checkNodeLinks(block, { + it('does not reduce file to single node when overidden by options', async () => { + await checkNodeLinks(block, { reduceSingleLeafToSelf: false }, 1) }) @@ -819,7 +773,7 @@ strategies.forEach((strategy) => { for await (const file of importer([{ path: '1.2MiB.txt', content: asAsyncIterable(bigFile), - mtime: parseMtime(now) + mtime: dateToTimespec(now) }], block, options)) { const node = await exporter(file.cid, block) @@ -834,7 +788,7 @@ strategies.forEach((strategy) => { const entries = await all(importer([{ path: '/foo', - mtime: parseMtime(now) + mtime: dateToTimespec(now) }], block)) const node = await exporter(entries[0].cid, block) @@ -849,7 +803,7 @@ strategies.forEach((strategy) => { const entries = await all(importer([{ path: '/foo', - mtime: parseMtime(now), + mtime: dateToTimespec(now), mode: perms }, { path: '/foo/bar.txt', @@ -859,7 +813,7 @@ strategies.forEach((strategy) => { const nodes = await all(recursive(entries[entries.length - 1].cid, block)) const node = nodes.filter(node => node.type === 'directory').pop() - if (!node) { + if (node == null) { expect.fail('no directory found') } @@ -875,7 +829,7 @@ strategies.forEach((strategy) => { const entries = await all(importer([{ path: '/foo/bar', - mtime: parseMtime(now), + mtime: dateToTimespec(now), mode: perms }, { path: '/foo/bar/baz.txt', @@ -885,7 +839,7 @@ strategies.forEach((strategy) => { const nodes = await all(recursive(entries[entries.length - 1].cid, block)) const node = nodes.filter(node => node.type === 'directory').pop() - if (!node) { + if (node == null) { expect.fail('no directory found') } @@ -904,7 +858,7 @@ strategies.forEach((strategy) => { content: asAsyncIterable(bigFile) }, { path: '/foo/bar', - mtime: parseMtime(now), + mtime: dateToTimespec(now), mode: perms }, { path: '/foo/quux' @@ -916,7 +870,7 @@ strategies.forEach((strategy) => { const nodes = await all(recursive(entries[entries.length - 1].cid, block)) const node = nodes.filter(node => node.type === 'directory' && node.name === 'bar').pop() - if (!node) { + if (node == null) { expect.fail('no directory found') } @@ -931,7 +885,7 @@ strategies.forEach((strategy) => { const entries = await all(importer([{ path: '/foo', - mtime: parseMtime(now) + mtime: dateToTimespec(now) }, { path: '/foo/bar.txt', content: asAsyncIterable(bigFile) @@ -947,7 +901,7 @@ strategies.forEach((strategy) => { const nodes = await all(recursive(entries[entries.length - 1].cid, block)) const node = nodes.filter(node => node.type === 'directory' && node.unixfs.type === 'hamt-sharded-directory').pop() - if (!node) { + if (node == null) { expect.fail('no hamt-sharded-directory found') } @@ -1018,7 +972,7 @@ strategies.forEach((strategy) => { const entries = await all(importer([{ path: '/foo/file1.txt', content: asAsyncIterable(bigFile), - mode: mode + mode }, { path: '/foo/bar/baz/file2.txt', content: asAsyncIterable(bigFile) @@ -1049,7 +1003,7 @@ strategies.forEach((strategy) => { it('should only add metadata to the root node of a file', async () => { this.timeout(60 * 1000) - const mtime = { secs: 5000, nsecs: 0 } + const mtime = { secs: 5000n, nsecs: 0 } const entries = await all(importer([{ path: '/foo/file1.txt', @@ -1071,7 +1025,7 @@ strategies.forEach((strategy) => { } expect(child).to.have.property('unixfs') - expect(child).to.not.have.nested.property('unixfs.mtime') + expect(child).to.have.nested.property('unixfs.mtime', undefined) }) it('should add metadata to the root node of a small file without raw leaves', async () => { @@ -1095,7 +1049,7 @@ strategies.forEach((strategy) => { }) describe('configuration', () => { - const block = blockApi() + const block = new MemoryBlockstore() it('alllows configuring with custom dag and tree builder', async () => { let builtTree = false @@ -1109,8 +1063,8 @@ describe('configuration', () => { }], block, { /** @type {import('ipfs-unixfs-importer').DAGBuilder} */ dagBuilder: async function * (source, block, opts) { // eslint-disable-line require-await - yield function () { - return Promise.resolve({ + yield async function () { + return await Promise.resolve({ cid, path: 'path', unixfs, @@ -1175,7 +1129,7 @@ describe('configuration', () => { rawLeaves: false })) - if (!result) { + if (result == null) { throw new Error('Nothing imported') } @@ -1188,7 +1142,7 @@ describe('configuration', () => { rawLeaves: false })) - if (!result2) { + if (result2 == null) { throw new Error('Nothing imported') } diff --git a/packages/ipfs-unixfs-exporter/tsconfig.json b/packages/ipfs-unixfs-exporter/tsconfig.json index 7665b90f..0f207e1c 100644 --- a/packages/ipfs-unixfs-exporter/tsconfig.json +++ b/packages/ipfs-unixfs-exporter/tsconfig.json @@ -1,8 +1,7 @@ { "extends": "aegir/src/config/tsconfig.aegir.json", "compilerOptions": { - "outDir": "dist", - "emitDeclarationOnly": true + "outDir": "dist" }, "include": [ "src", diff --git a/packages/ipfs-unixfs-importer/package.json b/packages/ipfs-unixfs-importer/package.json index e5c23bbd..b94f9fee 100644 --- a/packages/ipfs-unixfs-importer/package.json +++ b/packages/ipfs-unixfs-importer/package.json @@ -20,22 +20,6 @@ }, "type": "module", "types": "./dist/src/index.d.ts", - "typesVersions": { - "*": { - "*": [ - "*", - "dist/*", - "dist/src/*", - "dist/src/*/index" - ], - "src/*": [ - "*", - "dist/*", - "dist/src/*", - "dist/src/*/index" - ] - } - }, "files": [ "src", "dist", @@ -45,7 +29,7 @@ "exports": { ".": { "types": "./dist/src/index.d.ts", - "import": "./src/index.js" + "import": "./dist/src/index.js" } }, "eslintConfig": { @@ -171,6 +155,7 @@ "aegir": "^38.1.2", "blockstore-core": "^3.0.0", "it-buffer-stream": "^3.0.0", + "it-drain": "^2.0.0", "it-last": "^2.0.0", "wherearewe": "^2.0.1" }, diff --git a/packages/ipfs-unixfs-importer/src/chunker/fixed-size.js b/packages/ipfs-unixfs-importer/src/chunker/fixed-size.ts similarity index 77% rename from packages/ipfs-unixfs-importer/src/chunker/fixed-size.js rename to packages/ipfs-unixfs-importer/src/chunker/fixed-size.ts index 874e896f..a1782301 100644 --- a/packages/ipfs-unixfs-importer/src/chunker/fixed-size.js +++ b/packages/ipfs-unixfs-importer/src/chunker/fixed-size.ts @@ -1,9 +1,7 @@ import { Uint8ArrayList } from 'uint8arraylist' +import type { Chunker } from '../index.js' -/** - * @type {import('../types').Chunker} - */ -async function * fixedSizeChunker (source, options) { +export const fixedSize: Chunker = async function * (source, options) { let list = new Uint8ArrayList() let currentLength = 0 let emitted = false @@ -33,10 +31,8 @@ async function * fixedSizeChunker (source, options) { } } - if (!emitted || currentLength) { - // return any remaining bytes or an empty buffer + if (!emitted || currentLength > 0) { + // return any remaining bytes yield list.subarray(0, currentLength) } } - -export default fixedSizeChunker diff --git a/packages/ipfs-unixfs-importer/src/chunker/rabin.js b/packages/ipfs-unixfs-importer/src/chunker/rabin.ts similarity index 65% rename from packages/ipfs-unixfs-importer/src/chunker/rabin.js rename to packages/ipfs-unixfs-importer/src/chunker/rabin.ts index 9395466c..fae963e2 100644 --- a/packages/ipfs-unixfs-importer/src/chunker/rabin.js +++ b/packages/ipfs-unixfs-importer/src/chunker/rabin.ts @@ -1,28 +1,25 @@ import { Uint8ArrayList } from 'uint8arraylist' -// @ts-ignore +// @ts-expect-error import { create } from 'rabin-wasm' import errcode from 'err-code' +import type { Chunker } from '../index.js' -/** - * @typedef {object} RabinOptions - * @property {number} min - * @property {number} max - * @property {number} bits - * @property {number} window - * @property {number} polynomial - */ +export interface RabinOptions { + min: number + max: number + bits: number + window: number + polynomial: number +} -/** - * @type {import('../types').Chunker} - */ -async function * rabinChunker (source, options) { +export const rabin: Chunker = async function * (source, options) { let min, max, avg - if (options.minChunkSize && options.maxChunkSize && options.avgChunkSize) { + if (options.minChunkSize > 0 && options.maxChunkSize > 0 && options.avgChunkSize > 0) { avg = options.avgChunkSize min = options.minChunkSize max = options.maxChunkSize - } else if (!options.avgChunkSize) { + } else if (options.avgChunkSize == null) { throw errcode(new Error('please specify an average chunk size'), 'ERR_INVALID_AVG_CHUNK_SIZE') } else { avg = options.avgChunkSize @@ -45,9 +42,9 @@ async function * rabinChunker (source, options) { const sizepow = Math.floor(Math.log2(avg)) - for await (const chunk of rabin(source, { - min: min, - max: max, + for await (const chunk of rabinChunker(source, { + min, + max, bits: sizepow, window: options.window, polynomial: options.polynomial @@ -56,13 +53,7 @@ async function * rabinChunker (source, options) { } } -export default rabinChunker - -/** - * @param {AsyncIterable} source - * @param {RabinOptions} options - */ -async function * rabin (source, options) { +async function * rabinChunker (source: AsyncIterable, options: RabinOptions): AsyncGenerator { const r = await create(options.bits, options.min, options.max, options.window) const buffers = new Uint8ArrayList() @@ -80,7 +71,7 @@ async function * rabin (source, options) { } } - if (buffers.length) { + if (buffers.length > 0) { yield buffers.subarray(0) } } diff --git a/packages/ipfs-unixfs-importer/src/dag-builder/dir.js b/packages/ipfs-unixfs-importer/src/dag-builder/dir.ts similarity index 57% rename from packages/ipfs-unixfs-importer/src/dag-builder/dir.js rename to packages/ipfs-unixfs-importer/src/dag-builder/dir.ts index 6ca92097..b63c8755 100644 --- a/packages/ipfs-unixfs-importer/src/dag-builder/dir.js +++ b/packages/ipfs-unixfs-importer/src/dag-builder/dir.ts @@ -1,15 +1,9 @@ import { UnixFS } from 'ipfs-unixfs' -import persist from '../utils/persist.js' +import { persist } from '../utils/persist.js' import { encode, prepare } from '@ipld/dag-pb' +import type { Directory, UnixFSV1DagBuilder } from '../index.js' -/** - * @typedef {import('../types').Directory} Directory - */ - -/** - * @type {import('../types').UnixFSV1DagBuilder} - */ -const dirBuilder = async (item, blockstore, options) => { +export const dirBuilder: UnixFSV1DagBuilder = async (item, blockstore, options) => { const unixfs = new UnixFS({ type: 'directory', mtime: item.mtime, @@ -24,8 +18,6 @@ const dirBuilder = async (item, blockstore, options) => { cid, path, unixfs, - size: buffer.length + size: BigInt(buffer.length) } } - -export default dirBuilder diff --git a/packages/ipfs-unixfs-importer/src/dag-builder/file/balanced.js b/packages/ipfs-unixfs-importer/src/dag-builder/file/balanced.js deleted file mode 100644 index 5f646715..00000000 --- a/packages/ipfs-unixfs-importer/src/dag-builder/file/balanced.js +++ /dev/null @@ -1,31 +0,0 @@ -import batch from 'it-batch' - -/** - * @typedef {import('../../types').FileDAGBuilder} FileDAGBuilder - */ - -/** - * @type {FileDAGBuilder} - */ -function balanced (source, reduce, options) { - return reduceToParents(source, reduce, options) -} - -/** - * @type {FileDAGBuilder} - */ -async function reduceToParents (source, reduce, options) { - const roots = [] - - for await (const chunked of batch(source, options.maxChildrenPerNode)) { - roots.push(await reduce(chunked)) - } - - if (roots.length > 1) { - return reduceToParents(roots, reduce, options) - } - - return roots[0] -} - -export default balanced diff --git a/packages/ipfs-unixfs-importer/src/dag-builder/file/balanced.ts b/packages/ipfs-unixfs-importer/src/dag-builder/file/balanced.ts new file mode 100644 index 00000000..7a92cf48 --- /dev/null +++ b/packages/ipfs-unixfs-importer/src/dag-builder/file/balanced.ts @@ -0,0 +1,20 @@ +import batch from 'it-batch' +import type { FileDAGBuilder } from '../../index.js' + +export const balanced: FileDAGBuilder = async (source, reduce, options) => { + return await reduceToParents(source, reduce, options) +} + +const reduceToParents: FileDAGBuilder = async (source, reduce, options) => { + const roots = [] + + for await (const chunked of batch(source, options.maxChildrenPerNode)) { + roots.push(await reduce(chunked)) + } + + if (roots.length > 1) { + return await reduceToParents(roots, reduce, options) + } + + return roots[0] +} diff --git a/packages/ipfs-unixfs-importer/src/dag-builder/file/buffer-importer.js b/packages/ipfs-unixfs-importer/src/dag-builder/file/buffer-importer.ts similarity index 72% rename from packages/ipfs-unixfs-importer/src/dag-builder/file/buffer-importer.js rename to packages/ipfs-unixfs-importer/src/dag-builder/file/buffer-importer.ts index 44f64a47..87472306 100644 --- a/packages/ipfs-unixfs-importer/src/dag-builder/file/buffer-importer.js +++ b/packages/ipfs-unixfs-importer/src/dag-builder/file/buffer-importer.ts @@ -1,23 +1,16 @@ import { UnixFS } from 'ipfs-unixfs' -import persist from '../../utils/persist.js' +import { persist, PersistOptions } from '../../utils/persist.js' import * as dagPb from '@ipld/dag-pb' import * as raw from 'multiformats/codecs/raw' +import type { BufferImporter } from '../../index.js' -/** - * @typedef {import('../../types').BufferImporter} BufferImporter - */ - -/** - * @type {BufferImporter} - */ -async function * bufferImporter (file, block, options) { +export const bufferImporter: BufferImporter = async function * (file, block, options) { for await (let buffer of file.content) { yield async () => { options.progress(buffer.length, file.path) let unixfs - /** @type {import('../../types').PersistOptions} */ - const opts = { + const opts: PersistOptions = { codec: dagPb, cidVersion: options.cidVersion, hasher: options.hasher, @@ -42,7 +35,7 @@ async function * bufferImporter (file, block, options) { return { cid: await persist(buffer, block, opts), unixfs, - size: buffer.length + size: BigInt(buffer.length) } } } diff --git a/packages/ipfs-unixfs-importer/src/dag-builder/file/flat.js b/packages/ipfs-unixfs-importer/src/dag-builder/file/flat.js deleted file mode 100644 index b7072218..00000000 --- a/packages/ipfs-unixfs-importer/src/dag-builder/file/flat.js +++ /dev/null @@ -1,10 +0,0 @@ -import all from 'it-all' - -/** - * @type {import('../../types').FileDAGBuilder} - */ -async function flat (source, reduce) { - return reduce(await all(source)) -} - -export default flat diff --git a/packages/ipfs-unixfs-importer/src/dag-builder/file/flat.ts b/packages/ipfs-unixfs-importer/src/dag-builder/file/flat.ts new file mode 100644 index 00000000..b64c681e --- /dev/null +++ b/packages/ipfs-unixfs-importer/src/dag-builder/file/flat.ts @@ -0,0 +1,6 @@ +import all from 'it-all' +import type { FileDAGBuilder } from '../../index.js' + +export const flat: FileDAGBuilder = async function (source, reduce) { + return await reduce(await all(source)) +} diff --git a/packages/ipfs-unixfs-importer/src/dag-builder/file/index.js b/packages/ipfs-unixfs-importer/src/dag-builder/file/index.ts similarity index 59% rename from packages/ipfs-unixfs-importer/src/dag-builder/file/index.js rename to packages/ipfs-unixfs-importer/src/dag-builder/file/index.ts index 100a9500..8a52bda0 100644 --- a/packages/ipfs-unixfs-importer/src/dag-builder/file/index.js +++ b/packages/ipfs-unixfs-importer/src/dag-builder/file/index.ts @@ -1,57 +1,42 @@ import errCode from 'err-code' import { UnixFS } from 'ipfs-unixfs' -import persist from '../../utils/persist.js' +import { persist } from '../../utils/persist.js' import { encode, prepare } from '@ipld/dag-pb' import parallelBatch from 'it-parallel-batch' import * as rawCodec from 'multiformats/codecs/raw' import * as dagPb from '@ipld/dag-pb' -import dagFlat from './flat.js' -import dagBalanced from './balanced.js' -import dagTrickle from './trickle.js' -import bufferImporterFn from './buffer-importer.js' - -/** - * @typedef {import('interface-blockstore').Blockstore} Blockstore - * @typedef {import('../../types').File} File - * @typedef {import('../../types').ImporterOptions} ImporterOptions - * @typedef {import('../../types').Reducer} Reducer - * @typedef {import('../../types').DAGBuilder} DAGBuilder - * @typedef {import('../../types').FileDAGBuilder} FileDAGBuilder - */ - -/** - * @type {{ [key: string]: FileDAGBuilder}} - */ -const dagBuilders = { - flat: dagFlat, - balanced: dagBalanced, - trickle: dagTrickle +import { flat } from './flat.js' +import { balanced } from './balanced.js' +import { trickle } from './trickle.js' +import { bufferImporter } from './buffer-importer.js' +import type { File, FileDAGBuilder, ImporterOptions, InProgressImportResult, Reducer, UnixFSV1DagBuilder } from '../../index.js' +import type { Blockstore } from 'interface-blockstore' + +const dagBuilders: Record = { + flat, + balanced, + trickle } -/** - * @param {File} file - * @param {Blockstore} blockstore - * @param {ImporterOptions} options - */ -async function * buildFileBatch (file, blockstore, options) { +async function * buildFileBatch (file: File, blockstore: Blockstore, options: ImporterOptions): AsyncGenerator { let count = -1 let previous - let bufferImporter + let importer if (typeof options.bufferImporter === 'function') { - bufferImporter = options.bufferImporter + importer = options.bufferImporter } else { - bufferImporter = bufferImporterFn + importer = bufferImporter } - for await (const entry of parallelBatch(bufferImporter(file, blockstore, options), options.blockWriteConcurrency)) { + for await (const entry of parallelBatch(importer(file, blockstore, options), options.blockWriteConcurrency)) { count++ if (count === 0) { previous = entry continue - } else if (count === 1 && previous) { + } else if (count === 1 && (previous != null)) { yield previous previous = null } @@ -59,23 +44,15 @@ async function * buildFileBatch (file, blockstore, options) { yield entry } - if (previous) { + if (previous != null) { previous.single = true yield previous } } -/** - * @param {File} file - * @param {Blockstore} blockstore - * @param {ImporterOptions} options - */ -const reduce = (file, blockstore, options) => { - /** - * @type {Reducer} - */ - async function reducer (leaves) { - if (leaves.length === 1 && leaves[0].single && options.reduceSingleLeafToSelf) { +const reduce = (file: File, blockstore: Blockstore, options: ImporterOptions): Reducer => { + const reducer: Reducer = async function (leaves) { + if (leaves.length === 1 && leaves[0]?.single === true && options.reduceSingleLeafToSelf) { const leaf = leaves[0] if (file.mtime !== undefined || file.mode !== undefined) { @@ -117,7 +94,7 @@ const reduce = (file, blockstore, options) => { hasher: options.hasher, cidVersion: options.cidVersion }) - leaf.size = buffer.length + leaf.size = BigInt(buffer.length) } return { @@ -135,17 +112,17 @@ const reduce = (file, blockstore, options) => { mode: file.mode }) - const links = leaves + const links: dagPb.PBLink[] = leaves .filter(leaf => { - if (leaf.cid.code === rawCodec.code && leaf.size) { + if (leaf.cid.code === rawCodec.code && leaf.size > 0) { return true } - if (leaf.unixfs && !leaf.unixfs.data && leaf.unixfs.fileSize()) { + if ((leaf.unixfs != null) && (leaf.unixfs.data == null) && leaf.unixfs.fileSize() > 0n) { return true } - return Boolean(leaf.unixfs && leaf.unixfs.data && leaf.unixfs.data.length) + return Boolean(leaf.unixfs?.data?.length) }) .map((leaf) => { if (leaf.cid.code === rawCodec.code) { @@ -154,22 +131,22 @@ const reduce = (file, blockstore, options) => { return { Name: '', - Tsize: leaf.size, + Tsize: Number(leaf.size), Hash: leaf.cid } } - if (!leaf.unixfs || !leaf.unixfs.data) { + if ((leaf.unixfs == null) || (leaf.unixfs.data == null)) { // node is an intermediate node - f.addBlockSize((leaf.unixfs && leaf.unixfs.fileSize()) || 0) + f.addBlockSize(leaf.unixfs?.fileSize() ?? 0n) } else { // node is a unixfs 'file' leaf node - f.addBlockSize(leaf.unixfs.data.length) + f.addBlockSize(BigInt(leaf.unixfs.data.length)) } return { Name: '', - Tsize: leaf.size, + Tsize: Number(leaf.size), Hash: leaf.cid } }) @@ -185,24 +162,19 @@ const reduce = (file, blockstore, options) => { cid, path: file.path, unixfs: f, - size: buffer.length + node.Links.reduce((acc, curr) => acc + curr.Tsize, 0) + size: BigInt(buffer.length + node.Links.reduce((acc, curr) => acc + (curr.Tsize ?? 0), 0)) } } return reducer } -/** - * @type {import('../../types').UnixFSV1DagBuilder} - */ -function fileBuilder (file, block, options) { +export const fileBuilder: UnixFSV1DagBuilder = async (file, block, options) => { const dagBuilder = dagBuilders[options.strategy] - if (!dagBuilder) { + if (dagBuilder == null) { throw errCode(new Error(`Unknown importer build strategy name: ${options.strategy}`), 'ERR_BAD_STRATEGY') } - return dagBuilder(buildFileBatch(file, block, options), reduce(file, block, options), options) + return await dagBuilder(buildFileBatch(file, block, options), reduce(file, block, options), options) } - -export default fileBuilder diff --git a/packages/ipfs-unixfs-importer/src/dag-builder/file/trickle.js b/packages/ipfs-unixfs-importer/src/dag-builder/file/trickle.js deleted file mode 100644 index 677cd6d1..00000000 --- a/packages/ipfs-unixfs-importer/src/dag-builder/file/trickle.js +++ /dev/null @@ -1,199 +0,0 @@ -import batch from 'it-batch' - -/** - * @typedef {import('ipfs-unixfs').UnixFS} UnixFS - * @typedef {import('../../types').ImporterOptions} ImporterOptions - * @typedef {import('../../types').InProgressImportResult} InProgressImportResult - * @typedef {import('../../types').TrickleDagNode} TrickleDagNode - * @typedef {import('../../types').Reducer} Reducer - * @typedef {import('../../types').FileDAGBuilder} FileDAGBuilder - */ - -/** - * @type {FileDAGBuilder} - */ -async function trickleStream (source, reduce, options) { - const root = new Root(options.layerRepeat) - let iteration = 0 - let maxDepth = 1 - - /** @type {SubTree} */ - let subTree = root - - for await (const layer of batch(source, options.maxChildrenPerNode)) { - if (subTree.isFull()) { - if (subTree !== root) { - root.addChild(await subTree.reduce(reduce)) - } - - if (iteration && iteration % options.layerRepeat === 0) { - maxDepth++ - } - - subTree = new SubTree(maxDepth, options.layerRepeat, iteration) - - iteration++ - } - - subTree.append(layer) - } - - if (subTree && subTree !== root) { - root.addChild(await subTree.reduce(reduce)) - } - - return root.reduce(reduce) -} - -export default trickleStream - -class SubTree { - /** - * @param {number} maxDepth - * @param {number} layerRepeat - * @param {number} [iteration=0] - */ - constructor (maxDepth, layerRepeat, iteration = 0) { - this.maxDepth = maxDepth - this.layerRepeat = layerRepeat - this.currentDepth = 1 - this.iteration = iteration - - /** @type {TrickleDagNode} */ - this.root = this.node = this.parent = { - children: [], - depth: this.currentDepth, - maxDepth, - maxChildren: (this.maxDepth - this.currentDepth) * this.layerRepeat - } - } - - isFull () { - if (!this.root.data) { - return false - } - - if (this.currentDepth < this.maxDepth && this.node.maxChildren) { - // can descend - this._addNextNodeToParent(this.node) - - return false - } - - // try to find new node from node.parent - const distantRelative = this._findParent(this.node, this.currentDepth) - - if (distantRelative) { - this._addNextNodeToParent(distantRelative) - - return false - } - - return true - } - - /** - * @param {TrickleDagNode} parent - */ - _addNextNodeToParent (parent) { - this.parent = parent - - // find site for new node - const nextNode = { - children: [], - depth: parent.depth + 1, - parent, - maxDepth: this.maxDepth, - maxChildren: Math.floor(parent.children.length / this.layerRepeat) * this.layerRepeat - } - - // @ts-ignore - parent.children.push(nextNode) - - this.currentDepth = nextNode.depth - this.node = nextNode - } - - /** - * - * @param {InProgressImportResult[]} layer - */ - append (layer) { - this.node.data = layer - } - - /** - * @param {Reducer} reduce - */ - reduce (reduce) { - return this._reduce(this.root, reduce) - } - - /** - * @param {TrickleDagNode} node - * @param {Reducer} reduce - * @returns {Promise} - */ - async _reduce (node, reduce) { - /** @type {InProgressImportResult[]} */ - let children = [] - - if (node.children.length) { - children = await Promise.all( - node.children - // @ts-ignore - .filter(child => child.data) - // @ts-ignore - .map(child => this._reduce(child, reduce)) - ) - } - - return reduce((node.data || []).concat(children)) - } - - /** - * @param {TrickleDagNode} node - * @param {number} depth - * @returns {TrickleDagNode | undefined} - */ - _findParent (node, depth) { - const parent = node.parent - - if (!parent || parent.depth === 0) { - return - } - - if (parent.children.length === parent.maxChildren || !parent.maxChildren) { - // this layer is full, may be able to traverse to a different branch - return this._findParent(parent, depth) - } - - return parent - } -} - -class Root extends SubTree { - /** - * @param {number} layerRepeat - */ - constructor (layerRepeat) { - super(0, layerRepeat) - - this.root.depth = 0 - this.currentDepth = 1 - } - - /** - * @param {InProgressImportResult} child - */ - addChild (child) { - this.root.children.push(child) - } - - /** - * @param {Reducer} reduce - */ - reduce (reduce) { - return reduce((this.root.data || []).concat(this.root.children)) - } -} diff --git a/packages/ipfs-unixfs-importer/src/dag-builder/file/trickle.ts b/packages/ipfs-unixfs-importer/src/dag-builder/file/trickle.ts new file mode 100644 index 00000000..215c37d8 --- /dev/null +++ b/packages/ipfs-unixfs-importer/src/dag-builder/file/trickle.ts @@ -0,0 +1,170 @@ +import type { UnixFS } from 'ipfs-unixfs' +import batch from 'it-batch' +import type { CID } from 'multiformats/cid' +import type { FileDAGBuilder, InProgressImportResult, Reducer } from '../../index.js' + +export interface TrickleDagNode { + children: InProgressImportResult[] + depth: number + maxDepth: number + maxChildren: number + data?: InProgressImportResult[] + parent?: TrickleDagNode + cid?: CID + size?: number + unixfs?: UnixFS +} + +export const trickle: FileDAGBuilder = async function (source, reduce, options) { + const root = new Root(options.layerRepeat) + let iteration = 0 + let maxDepth = 1 + let subTree: SubTree = root + + for await (const layer of batch(source, options.maxChildrenPerNode)) { + if (subTree.isFull()) { + if (subTree !== root) { + root.addChild(await subTree.reduce(reduce)) + } + + if (iteration > 0 && iteration % options.layerRepeat === 0) { + maxDepth++ + } + + subTree = new SubTree(maxDepth, options.layerRepeat, iteration) + + iteration++ + } + + subTree.append(layer) + } + + if (subTree != null && subTree !== root) { + root.addChild(await subTree.reduce(reduce)) + } + + return await root.reduce(reduce) +} + +class SubTree { + public root: TrickleDagNode + public node: TrickleDagNode + public parent: TrickleDagNode + public maxDepth: number + public layerRepeat: number + public currentDepth: number + public iteration: number + + constructor (maxDepth: number, layerRepeat: number, iteration: number = 0) { + this.maxDepth = maxDepth + this.layerRepeat = layerRepeat + this.currentDepth = 1 + this.iteration = iteration + + this.root = this.node = this.parent = { + children: [], + depth: this.currentDepth, + maxDepth, + maxChildren: (this.maxDepth - this.currentDepth) * this.layerRepeat + } + } + + isFull (): boolean { + if (this.root.data == null) { + return false + } + + if (this.currentDepth < this.maxDepth && this.node.maxChildren > 0) { + // can descend + this._addNextNodeToParent(this.node) + + return false + } + + // try to find new node from node.parent + const distantRelative = this._findParent(this.node, this.currentDepth) + + if (distantRelative != null) { + this._addNextNodeToParent(distantRelative) + + return false + } + + return true + } + + _addNextNodeToParent (parent: TrickleDagNode): void { + this.parent = parent + + // find site for new node + const nextNode = { + children: [], + depth: parent.depth + 1, + parent, + maxDepth: this.maxDepth, + maxChildren: Math.floor(parent.children.length / this.layerRepeat) * this.layerRepeat + } + + // @ts-expect-error + parent.children.push(nextNode) + + this.currentDepth = nextNode.depth + this.node = nextNode + } + + append (layer: InProgressImportResult[]): void { + this.node.data = layer + } + + async reduce (reduce: Reducer): Promise { + return await this._reduce(this.root, reduce) + } + + async _reduce (node: TrickleDagNode, reduce: Reducer): Promise { + let children: InProgressImportResult[] = [] + + if (node.children.length > 0) { + children = await Promise.all( + node.children + // @ts-expect-error + .filter(child => child.data) + // @ts-expect-error + .map(async child => await this._reduce(child, reduce)) + ) + } + + return await reduce((node.data ?? []).concat(children)) + } + + _findParent (node: TrickleDagNode, depth: number): TrickleDagNode | undefined { + const parent = node.parent + + if (parent == null || parent.depth === 0) { + return + } + + if (parent.children.length === parent.maxChildren || parent.maxChildren === 0) { + // this layer is full, may be able to traverse to a different branch + return this._findParent(parent, depth) + } + + return parent + } +} + +class Root extends SubTree { + constructor (layerRepeat: number) { + super(0, layerRepeat) + + this.root.depth = 0 + this.currentDepth = 1 + } + + addChild (child: InProgressImportResult): void { + this.root.children.push(child) + } + + async reduce (reduce: Reducer): Promise { + return await reduce((this.root.data ?? []).concat(this.root.children)) + } +} diff --git a/packages/ipfs-unixfs-importer/src/dag-builder/index.js b/packages/ipfs-unixfs-importer/src/dag-builder/index.ts similarity index 51% rename from packages/ipfs-unixfs-importer/src/dag-builder/index.js rename to packages/ipfs-unixfs-importer/src/dag-builder/index.ts index 145c35d5..389c5a28 100644 --- a/packages/ipfs-unixfs-importer/src/dag-builder/index.js +++ b/packages/ipfs-unixfs-importer/src/dag-builder/index.ts @@ -1,39 +1,20 @@ -import dirBuilder from './dir.js' -import fileBuilder from './file/index.js' +import { dirBuilder } from './dir.js' +import { fileBuilder } from './file/index.js' import errCode from 'err-code' -import rabin from '../chunker/rabin.js' -import fixedSize from '../chunker/fixed-size.js' -import validateChunks from './validate-chunks.js' +import { rabin } from '../chunker/rabin.js' +import { fixedSize } from '../chunker/fixed-size.js' +import { validateChunks } from './validate-chunks.js' +import type { Chunker, ChunkValidator, DAGBuilder, Directory, File } from '../index.js' -/** - * @typedef {import('../types').File} File - * @typedef {import('../types').Directory} Directory - * @typedef {import('../types').DAGBuilder} DAGBuilder - * @typedef {import('../types').Chunker} Chunker - * @typedef {import('../types').ChunkValidator} ChunkValidator - */ - -/** - * @param {any} thing - * @returns {thing is Iterable} - */ -function isIterable (thing) { +function isIterable (thing: any): thing is Iterable { return Symbol.iterator in thing } -/** - * @param {any} thing - * @returns {thing is AsyncIterable} - */ -function isAsyncIterable (thing) { +function isAsyncIterable (thing: any): thing is AsyncIterable { return Symbol.asyncIterator in thing } -/** - * @param {Uint8Array | AsyncIterable | Iterable} content - * @returns {AsyncIterable} - */ -function contentAsAsyncIterable (content) { +function contentAsAsyncIterable (content: Uint8Array | AsyncIterable | Iterable): AsyncIterable { try { if (content instanceof Uint8Array) { return (async function * () { @@ -53,27 +34,21 @@ function contentAsAsyncIterable (content) { throw errCode(new Error('Content was invalid'), 'ERR_INVALID_CONTENT') } -/** - * @type {DAGBuilder} - */ -async function * dagBuilder (source, blockstore, options) { +export const dagBuilder: DAGBuilder = async function * (source, blockstore, options) { for await (const entry of source) { - if (entry.path) { + if (entry.path != null) { if (entry.path.substring(0, 2) === './') { options.wrapWithDirectory = true } entry.path = entry.path .split('/') - .filter(path => path && path !== '.') + .filter(path => path != null && path !== '.') .join('/') } - if (entry.content) { - /** - * @type {Chunker} - */ - let chunker + if (entry.content != null) { + let chunker: Chunker if (typeof options.chunker === 'function') { chunker = options.chunker @@ -83,10 +58,7 @@ async function * dagBuilder (source, blockstore, options) { chunker = fixedSize } - /** - * @type {ChunkValidator} - */ - let chunkValidator + let chunkValidator: ChunkValidator if (typeof options.chunkValidator === 'function') { chunkValidator = options.chunkValidator @@ -94,28 +66,24 @@ async function * dagBuilder (source, blockstore, options) { chunkValidator = validateChunks } - /** @type {File} */ - const file = { + const file: File = { path: entry.path, mtime: entry.mtime, mode: entry.mode, content: chunker(chunkValidator(contentAsAsyncIterable(entry.content), options), options) } - yield () => fileBuilder(file, blockstore, options) - } else if (entry.path) { - /** @type {Directory} */ - const dir = { + yield async () => await fileBuilder(file, blockstore, options) + } else if (entry.path != null) { + const dir: Directory = { path: entry.path, mtime: entry.mtime, mode: entry.mode } - yield () => dirBuilder(dir, blockstore, options) + yield async () => await dirBuilder(dir, blockstore, options) } else { throw new Error('Import candidate must have content or path or both') } } } - -export default dagBuilder diff --git a/packages/ipfs-unixfs-importer/src/dag-builder/validate-chunks.js b/packages/ipfs-unixfs-importer/src/dag-builder/validate-chunks.ts similarity index 78% rename from packages/ipfs-unixfs-importer/src/dag-builder/validate-chunks.js rename to packages/ipfs-unixfs-importer/src/dag-builder/validate-chunks.ts index ec5a6f13..bb0b6f8e 100644 --- a/packages/ipfs-unixfs-importer/src/dag-builder/validate-chunks.js +++ b/packages/ipfs-unixfs-importer/src/dag-builder/validate-chunks.ts @@ -1,14 +1,8 @@ import errCode from 'err-code' import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' +import type { ChunkValidator } from '../index.js' -/** - * @typedef {import('../types').ChunkValidator} ChunkValidator - */ - -/** - * @type {ChunkValidator} - */ -async function * validateChunks (source) { +export const validateChunks: ChunkValidator = async function * (source) { for await (const content of source) { if (content.length === undefined) { throw errCode(new Error('Content was invalid'), 'ERR_INVALID_CONTENT') @@ -25,5 +19,3 @@ async function * validateChunks (source) { } } } - -export default validateChunks diff --git a/packages/ipfs-unixfs-importer/src/dir-flat.js b/packages/ipfs-unixfs-importer/src/dir-flat.ts similarity index 56% rename from packages/ipfs-unixfs-importer/src/dir-flat.js rename to packages/ipfs-unixfs-importer/src/dir-flat.ts index 4aeb069f..35aa03b5 100644 --- a/packages/ipfs-unixfs-importer/src/dir-flat.js +++ b/packages/ipfs-unixfs-importer/src/dir-flat.ts @@ -1,35 +1,20 @@ -import { encode, prepare } from '@ipld/dag-pb' +import { encode, PBNode, prepare } from '@ipld/dag-pb' +import type { Blockstore } from 'interface-blockstore' import { UnixFS } from 'ipfs-unixfs' -import { Dir, CID_V0, CID_V1 } from './dir.js' -import persist from './utils/persist.js' - -/** - * @typedef {import('./types').ImporterOptions} ImporterOptions - * @typedef {import('./types').ImportResult} ImportResult - * @typedef {import('./types').InProgressImportResult} InProgressImportResult - * @typedef {import('interface-blockstore').Blockstore} Blockstore - * @typedef {import('./dir').DirProps} DirProps - * @typedef {import('@ipld/dag-pb').PBNode} PBNode - * @typedef {import('@ipld/dag-pb').PBLink} PBLink - */ - -class DirFlat extends Dir { - /** - * @param {DirProps} props - * @param {ImporterOptions} options - */ - constructor (props, options) { +import { Dir, CID_V0, CID_V1, DirProps } from './dir.js' +import type { ImporterOptions, ImportResult, InProgressImportResult } from './index.js' +import { persist } from './utils/persist.js' + +export class DirFlat extends Dir { + private readonly _children: Map + + constructor (props: DirProps, options: ImporterOptions) { super(props, options) - /** @type {Map} */ this._children = new Map() } - /** - * @param {string} name - * @param {InProgressImportResult | Dir} value - */ - async put (name, value) { + async put (name: string, value: InProgressImportResult | Dir): Promise { this.cid = undefined this.size = undefined this.nodeSize = undefined @@ -37,26 +22,23 @@ class DirFlat extends Dir { this._children.set(name, value) } - /** - * @param {string} name - */ - get (name) { - return Promise.resolve(this._children.get(name)) + async get (name: string): Promise { + return await Promise.resolve(this._children.get(name)) } - childCount () { + childCount (): number { return this._children.size } - directChildrenCount () { + directChildrenCount (): number { return this.childCount() } - onlyChild () { + onlyChild (): InProgressImportResult | Dir { return this._children.values().next().value } - async * eachChildSeries () { + async * eachChildSeries (): AsyncGenerator<{ key: string, child: InProgressImportResult | Dir }, void, undefined> { for (const [key, child] of this._children.entries()) { yield { key, @@ -65,7 +47,7 @@ class DirFlat extends Dir { } } - estimateNodeSize () { + estimateNodeSize (): number { if (this.nodeSize !== undefined) { return this.nodeSize } @@ -75,7 +57,7 @@ class DirFlat extends Dir { // estimate size only based on DAGLink name and CID byte lengths // https://github.com/ipfs/go-unixfsnode/blob/37b47f1f917f1b2f54c207682f38886e49896ef9/data/builder/directory.go#L81-L96 for (const [name, child] of this._children.entries()) { - if (child.size != null && child.cid) { + if (child.size != null && (child.cid != null)) { this.nodeSize += name.length + (this.options.cidVersion === 1 ? CID_V1.bytes.byteLength : CID_V0.bytes.byteLength) } } @@ -83,11 +65,7 @@ class DirFlat extends Dir { return this.nodeSize } - /** - * @param {Blockstore} block - * @returns {AsyncIterable} - */ - async * flush (block) { + async * flush (block: Blockstore): AsyncGenerator { const links = [] for (let [name, child] of this._children.entries()) { @@ -99,10 +77,10 @@ class DirFlat extends Dir { } } - if (child.size != null && child.cid) { + if (child.size != null && (child.cid != null)) { links.push({ Name: name, - Tsize: child.size, + Tsize: Number(child.size), Hash: child.cid }) } @@ -114,8 +92,7 @@ class DirFlat extends Dir { mode: this.mode }) - /** @type {PBNode} */ - const node = { Data: unixfs.marshal(), Links: links } + const node: PBNode = { Data: unixfs.marshal(), Links: links } const buffer = encode(prepare(node)) const cid = await persist(buffer, block, this.options) const size = buffer.length + node.Links.reduce( @@ -133,9 +110,7 @@ class DirFlat extends Dir { cid, unixfs, path: this.path, - size + size: BigInt(size) } } } - -export default DirFlat diff --git a/packages/ipfs-unixfs-importer/src/dir-sharded.js b/packages/ipfs-unixfs-importer/src/dir-sharded.ts similarity index 62% rename from packages/ipfs-unixfs-importer/src/dir-sharded.js rename to packages/ipfs-unixfs-importer/src/dir-sharded.ts index 2cde017b..23794972 100644 --- a/packages/ipfs-unixfs-importer/src/dir-sharded.js +++ b/packages/ipfs-unixfs-importer/src/dir-sharded.ts @@ -1,40 +1,24 @@ -import { encode, prepare } from '@ipld/dag-pb' +import { encode, PBLink, prepare } from '@ipld/dag-pb' import { UnixFS } from 'ipfs-unixfs' -import { Dir, CID_V0, CID_V1 } from './dir.js' -import persist from './utils/persist.js' -import { createHAMT, Bucket } from 'hamt-sharding' - -/** - * @typedef {import('./types').ImporterOptions} ImporterOptions - * @typedef {import('./types').ImportResult} ImportResult - * @typedef {import('./types').InProgressImportResult} InProgressImportResult - * @typedef {import('interface-blockstore').Blockstore} Blockstore - */ - -/** - * @typedef {import('./dir').DirProps} DirProps - */ +import { Dir, CID_V0, CID_V1, DirProps } from './dir.js' +import { persist } from './utils/persist.js' +import { createHAMT, Bucket, BucketChild } from 'hamt-sharding' +import type { ImporterOptions, ImportResult, InProgressImportResult } from './index.js' +import type { Blockstore } from 'interface-blockstore' class DirSharded extends Dir { - /** - * @param {DirProps} props - * @param {ImporterOptions} options - */ - constructor (props, options) { + private readonly _bucket: Bucket + + constructor (props: DirProps, options: ImporterOptions) { super(props, options) - /** @type {Bucket} */ this._bucket = createHAMT({ hashFn: options.hamtHashFn, bits: options.hamtBucketBits }) } - /** - * @param {string} name - * @param {InProgressImportResult | Dir} value - */ - async put (name, value) { + async put (name: string, value: InProgressImportResult | Dir): Promise { this.cid = undefined this.size = undefined this.nodeSize = undefined @@ -42,26 +26,23 @@ class DirSharded extends Dir { await this._bucket.put(name, value) } - /** - * @param {string} name - */ - get (name) { - return this._bucket.get(name) + async get (name: string): Promise { + return await this._bucket.get(name) } - childCount () { + childCount (): number { return this._bucket.leafCount() } - directChildrenCount () { + directChildrenCount (): number { return this._bucket.childrenCount() } - onlyChild () { + onlyChild (): Bucket | BucketChild { return this._bucket.onlyChild() } - async * eachChildSeries () { + async * eachChildSeries (): AsyncGenerator<{ key: string, child: InProgressImportResult | Dir }> { for await (const { key, value } of this._bucket.eachLeafSeries()) { yield { key, @@ -70,7 +51,7 @@ class DirSharded extends Dir { } } - estimateNodeSize () { + estimateNodeSize (): number { if (this.nodeSize !== undefined) { return this.nodeSize } @@ -80,11 +61,7 @@ class DirSharded extends Dir { return this.nodeSize } - /** - * @param {Blockstore} blockstore - * @returns {AsyncIterable} - */ - async * flush (blockstore) { + async * flush (blockstore: Blockstore): AsyncGenerator { for await (const entry of flush(this._bucket, blockstore, this, this.options)) { yield { ...entry, @@ -96,22 +73,15 @@ class DirSharded extends Dir { export default DirSharded -/** - * @param {Bucket} bucket - * @param {Blockstore} blockstore - * @param {DirSharded | null} shardRoot - * @param {ImporterOptions} options - * @returns {AsyncIterable} - */ -async function * flush (bucket, blockstore, shardRoot, options) { +async function * flush (bucket: Bucket, blockstore: Blockstore, shardRoot: DirSharded | null, options: ImporterOptions): AsyncIterable { const children = bucket._children - const links = [] - let childrenSize = 0 + const links: PBLink[] = [] + let childrenSize = 0n for (let i = 0; i < children.length; i++) { const child = children.get(i) - if (!child) { + if (child == null) { continue } @@ -120,23 +90,23 @@ async function * flush (bucket, blockstore, shardRoot, options) { if (child instanceof Bucket) { let shard - for await (const subShard of await flush(child, blockstore, null, options)) { + for await (const subShard of flush(child, blockstore, null, options)) { shard = subShard } - if (!shard) { + if (shard == null) { throw new Error('Could not flush sharded directory, no subshard found') } links.push({ Name: labelPrefix, - Tsize: shard.size, + Tsize: Number(shard.size), Hash: shard.cid }) childrenSize += shard.size - } else if (typeof child.value.flush === 'function') { + } else if (isDir(child.value)) { const dir = child.value - let flushedDir + let flushedDir: ImportResult | undefined for await (const entry of dir.flush(blockstore)) { flushedDir = entry @@ -144,10 +114,14 @@ async function * flush (bucket, blockstore, shardRoot, options) { yield flushedDir } + if (flushedDir == null) { + throw new Error('Did not flush dir') + } + const label = labelPrefix + child.key links.push({ Name: label, - Tsize: flushedDir.size, + Tsize: Number(flushedDir.size), Hash: flushedDir.cid }) @@ -155,7 +129,7 @@ async function * flush (bucket, blockstore, shardRoot, options) { } else { const value = child.value - if (!value.cid) { + if (value.cid == null) { continue } @@ -164,10 +138,10 @@ async function * flush (bucket, blockstore, shardRoot, options) { links.push({ Name: label, - Tsize: size, + Tsize: Number(size), Hash: value.cid }) - childrenSize += size + childrenSize += BigInt(size ?? 0) } } @@ -177,10 +151,10 @@ async function * flush (bucket, blockstore, shardRoot, options) { const dir = new UnixFS({ type: 'hamt-sharded-directory', data, - fanout: bucket.tableSize(), + fanout: BigInt(bucket.tableSize()), hashType: options.hamtHashCode, - mtime: shardRoot && shardRoot.mtime, - mode: shardRoot && shardRoot.mode + mtime: shardRoot?.mtime, + mode: shardRoot?.mode }) const node = { @@ -189,7 +163,7 @@ async function * flush (bucket, blockstore, shardRoot, options) { } const buffer = encode(prepare(node)) const cid = await persist(buffer, blockstore, options) - const size = buffer.length + childrenSize + const size = BigInt(buffer.byteLength) + childrenSize yield { cid, @@ -198,19 +172,18 @@ async function * flush (bucket, blockstore, shardRoot, options) { } } -/** - * @param {Bucket} bucket - * @param {DirSharded | null} shardRoot - * @param {ImporterOptions} options - */ -function calculateSize (bucket, shardRoot, options) { +function isDir (obj: any): obj is Dir { + return typeof obj.flush === 'function' +} + +function calculateSize (bucket: Bucket, shardRoot: DirSharded | null, options: ImporterOptions): number { const children = bucket._children - const links = [] + const links: PBLink[] = [] for (let i = 0; i < children.length; i++) { const child = children.get(i) - if (!child) { + if (child == null) { continue } @@ -221,7 +194,7 @@ function calculateSize (bucket, shardRoot, options) { links.push({ Name: labelPrefix, - Tsize: size, + Tsize: Number(size), Hash: options.cidVersion === 0 ? CID_V0 : CID_V1 }) } else if (typeof child.value.flush === 'function') { @@ -230,13 +203,13 @@ function calculateSize (bucket, shardRoot, options) { links.push({ Name: labelPrefix + child.key, - Tsize: size, + Tsize: Number(size), Hash: options.cidVersion === 0 ? CID_V0 : CID_V1 }) } else { const value = child.value - if (!value.cid) { + if (value.cid == null) { continue } @@ -245,7 +218,7 @@ function calculateSize (bucket, shardRoot, options) { links.push({ Name: label, - Tsize: size, + Tsize: Number(size), Hash: value.cid }) } @@ -257,10 +230,10 @@ function calculateSize (bucket, shardRoot, options) { const dir = new UnixFS({ type: 'hamt-sharded-directory', data, - fanout: bucket.tableSize(), + fanout: BigInt(bucket.tableSize()), hashType: options.hamtHashCode, - mtime: shardRoot && shardRoot.mtime, - mode: shardRoot && shardRoot.mode + mtime: shardRoot?.mtime, + mode: shardRoot?.mode }) const buffer = encode(prepare({ diff --git a/packages/ipfs-unixfs-importer/src/dir.js b/packages/ipfs-unixfs-importer/src/dir.js deleted file mode 100644 index 97a107b5..00000000 --- a/packages/ipfs-unixfs-importer/src/dir.js +++ /dev/null @@ -1,86 +0,0 @@ -import { CID } from 'multiformats/cid' - -/** - * @typedef {import('./types').ImporterOptions} ImporterOptions - * @typedef {import('./types').ImportResult} ImportResult - * @typedef {import('./types').InProgressImportResult} InProgressImportResult - * @typedef {import('interface-blockstore').Blockstore} Blockstore - * - * @typedef {object} DirProps - * @property {boolean} root - * @property {boolean} dir - * @property {string} path - * @property {boolean} dirty - * @property {boolean} flat - * @property {Dir} [parent] - * @property {string} [parentKey] - * @property {import('ipfs-unixfs').UnixFS} [unixfs] - * @property {number} [mode] - * @property {import('ipfs-unixfs').Mtime} [mtime] - */ -export class Dir { - /** - * @param {DirProps} props - * @param {ImporterOptions} options - */ - constructor (props, options) { - this.options = options || {} - - this.root = props.root - this.dir = props.dir - this.path = props.path - this.dirty = props.dirty - this.flat = props.flat - this.parent = props.parent - this.parentKey = props.parentKey - this.unixfs = props.unixfs - this.mode = props.mode - this.mtime = props.mtime - - /** @type {CID | undefined} */ - this.cid = undefined - /** @type {number | undefined} */ - this.size = undefined - /** @type {number | undefined} */ - this.nodeSize = undefined - } - - /** - * @param {string} name - * @param {InProgressImportResult | Dir} value - */ - async put (name, value) { } - - /** - * @param {string} name - * @returns {Promise} - */ - get (name) { - return Promise.resolve(this) - } - - /** - * @returns {AsyncIterable<{ key: string, child: InProgressImportResult | Dir}>} - */ - async * eachChildSeries () { } - - /** - * @param {Blockstore} blockstore - * @returns {AsyncIterable} - */ - async * flush (blockstore) { } - - /** - * @returns {number} - */ - estimateNodeSize () { - return 0 - } -} - -// we use these to calculate the node size to use as a check for whether a directory -// should be sharded or not. Since CIDs have a constant length and We're only -// interested in the data length and not the actual content identifier we can use -// any old CID instead of having to hash the data which is expensive. -export const CID_V0 = CID.parse('QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn') -export const CID_V1 = CID.parse('zdj7WbTaiJT1fgatdet9Ei9iDB5hdCxkbVyhyh8YTUnXMiwYi') diff --git a/packages/ipfs-unixfs-importer/src/dir.ts b/packages/ipfs-unixfs-importer/src/dir.ts new file mode 100644 index 00000000..600860ce --- /dev/null +++ b/packages/ipfs-unixfs-importer/src/dir.ts @@ -0,0 +1,70 @@ +import type { Blockstore } from 'interface-blockstore' +import type { Mtime, UnixFS } from 'ipfs-unixfs' +import { CID } from 'multiformats/cid' +import type { ImporterOptions, ImportResult, InProgressImportResult } from './index.js' + +export interface DirProps { + root: boolean + dir: boolean + path: string + dirty: boolean + flat: boolean + parent?: Dir + parentKey?: string + unixfs?: UnixFS + mode?: number + mtime?: Mtime +} + +export class Dir { + public options: ImporterOptions + public root: boolean + public dir: boolean + public path: string + public dirty: boolean + public flat: boolean + public parent?: Dir + public parentKey?: string + public unixfs?: UnixFS + public mode?: number + public mtime?: Mtime + public cid?: CID + public size?: number + public nodeSize?: number + + constructor (props: DirProps, options: ImporterOptions) { + this.options = options ?? {} + + this.root = props.root + this.dir = props.dir + this.path = props.path + this.dirty = props.dirty + this.flat = props.flat + this.parent = props.parent + this.parentKey = props.parentKey + this.unixfs = props.unixfs + this.mode = props.mode + this.mtime = props.mtime + } + + async put (name: string, value: InProgressImportResult | Dir): Promise { } + + async get (name: string): Promise { + return await Promise.resolve(this) + } + + async * eachChildSeries (): AsyncIterable<{ key: string, child: InProgressImportResult | Dir }> { } + + async * flush (blockstore: Blockstore): AsyncGenerator { } + + estimateNodeSize (): number { + return 0 + } +} + +// we use these to calculate the node size to use as a check for whether a directory +// should be sharded or not. Since CIDs have a constant length and We're only +// interested in the data length and not the actual content identifier we can use +// any old CID instead of having to hash the data which is expensive. +export const CID_V0 = CID.parse('QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn') +export const CID_V1 = CID.parse('zdj7WbTaiJT1fgatdet9Ei9iDB5hdCxkbVyhyh8YTUnXMiwYi') diff --git a/packages/ipfs-unixfs-importer/src/flat-to-shard.js b/packages/ipfs-unixfs-importer/src/flat-to-shard.ts similarity index 55% rename from packages/ipfs-unixfs-importer/src/flat-to-shard.js rename to packages/ipfs-unixfs-importer/src/flat-to-shard.ts index af083e49..48134ae2 100644 --- a/packages/ipfs-unixfs-importer/src/flat-to-shard.js +++ b/packages/ipfs-unixfs-importer/src/flat-to-shard.ts @@ -1,19 +1,9 @@ import DirSharded from './dir-sharded.js' -import DirFlat from './dir-flat.js' +import { DirFlat } from './dir-flat.js' +import type { Dir } from './dir.js' +import type { ImporterOptions } from './index.js' -/** - * @typedef {import('./dir').Dir} Dir - * @typedef {import('./types').ImporterOptions} ImporterOptions - */ - -/** - * @param {Dir | null} child - * @param {Dir} dir - * @param {number} threshold - * @param {ImporterOptions} options - * @returns {Promise} - */ -async function flatToShard (child, dir, threshold, options) { +export async function flatToShard (child: Dir | null, dir: Dir, threshold: number, options: ImporterOptions): Promise { let newDir = dir if (dir instanceof DirFlat && dir.estimateNodeSize() > threshold) { @@ -22,31 +12,27 @@ async function flatToShard (child, dir, threshold, options) { const parent = newDir.parent - if (parent) { + if (parent != null) { if (newDir !== dir) { - if (child) { + if (child != null) { child.parent = newDir } - if (!newDir.parentKey) { + if (newDir.parentKey == null) { throw new Error('No parent key found') } await parent.put(newDir.parentKey, newDir) } - return flatToShard(newDir, parent, threshold, options) + return await flatToShard(newDir, parent, threshold, options) } - // @ts-ignore + // @ts-expect-error return newDir } -/** - * @param {DirFlat} oldDir - * @param {ImporterOptions} options - */ -async function convertToShard (oldDir, options) { +async function convertToShard (oldDir: DirFlat, options: ImporterOptions): Promise { const newDir = new DirSharded({ root: oldDir.root, dir: true, @@ -65,5 +51,3 @@ async function convertToShard (oldDir, options) { return newDir } - -export default flatToShard diff --git a/packages/ipfs-unixfs-importer/src/index.js b/packages/ipfs-unixfs-importer/src/index.js deleted file mode 100644 index 80044289..00000000 --- a/packages/ipfs-unixfs-importer/src/index.js +++ /dev/null @@ -1,68 +0,0 @@ -import parallelBatch from 'it-parallel-batch' -import defaultOptions from './options.js' -import dagBuilderFn from './dag-builder/index.js' -import treeBuilderFn from './tree-builder.js' - -/** - * @typedef {import('interface-blockstore').Blockstore} Blockstore - * @typedef {import('./types').ImportCandidate} ImportCandidate - * @typedef {import('./types').UserImporterOptions} UserImporterOptions - * @typedef {import('./types').ImporterOptions} ImporterOptions - * @typedef {import('./types').Directory} Directory - * @typedef {import('./types').File} File - * @typedef {import('./types').ImportResult} ImportResult - * - * @typedef {import('./types').Chunker} Chunker - * @typedef {import('./types').DAGBuilder} DAGBuilder - * @typedef {import('./types').TreeBuilder} TreeBuilder - * @typedef {import('./types').BufferImporter} BufferImporter - * @typedef {import('./types').ChunkValidator} ChunkValidator - * @typedef {import('./types').Reducer} Reducer - * @typedef {import('./types').ProgressHandler} ProgressHandler - */ - -/** - * @param {AsyncIterable | Iterable | ImportCandidate} source - * @param {Blockstore} blockstore - * @param {UserImporterOptions} options - * @returns {AsyncGenerator} - */ -export async function * importer (source, blockstore, options = {}) { - const opts = defaultOptions(options) - - let dagBuilder - - if (typeof options.dagBuilder === 'function') { - dagBuilder = options.dagBuilder - } else { - dagBuilder = dagBuilderFn - } - - let treeBuilder - - if (typeof options.treeBuilder === 'function') { - treeBuilder = options.treeBuilder - } else { - treeBuilder = treeBuilderFn - } - - /** @type {AsyncIterable | Iterable} */ - let candidates - - if (Symbol.asyncIterator in source || Symbol.iterator in source) { - // @ts-ignore - candidates = source - } else { - // @ts-ignore - candidates = [source] - } - - for await (const entry of treeBuilder(parallelBatch(dagBuilder(candidates, blockstore, opts), opts.fileImportConcurrency), blockstore, opts)) { - yield { - cid: entry.cid, - path: entry.path, - unixfs: entry.unixfs, - size: entry.size - } - } -} diff --git a/packages/ipfs-unixfs-importer/src/types.ts b/packages/ipfs-unixfs-importer/src/index.ts similarity index 74% rename from packages/ipfs-unixfs-importer/src/types.ts rename to packages/ipfs-unixfs-importer/src/index.ts index e0f41133..a631fc60 100644 --- a/packages/ipfs-unixfs-importer/src/types.ts +++ b/packages/ipfs-unixfs-importer/src/index.ts @@ -1,7 +1,10 @@ +import parallelBatch from 'it-parallel-batch' +import defaultOptions from './options.js' +import { dagBuilder } from './dag-builder/index.js' +import { treeBuilder } from './tree-builder.js' import type { UnixFS, Mtime } from 'ipfs-unixfs' import type { CID, Version as CIDVersion } from 'multiformats/cid' import type { MultihashHasher } from 'multiformats/hashes/interface' -import type { BlockCodec } from 'multiformats/codecs/interface' import type { Blockstore } from 'interface-blockstore' export interface ImportCandidate { @@ -26,7 +29,7 @@ export interface Directory { export interface ImportResult { cid: CID - size: number + size: bigint path?: string unixfs?: UnixFS } @@ -73,7 +76,7 @@ export interface UserImporterOptions { timeout?: number hamtHashFn?: HamtHashFn hamtBucketBits?: number - hamtHashCode?: number + hamtHashCode?: bigint chunker?: ChunkerType | Chunker dagBuilder?: DAGBuilder treeBuilder?: TreeBuilder @@ -106,7 +109,7 @@ export interface ImporterOptions { timeout?: number hamtHashFn: HamtHashFn hamtBucketBits: number - hamtHashCode: number + hamtHashCode: bigint chunker: ChunkerType | Chunker dagBuilder?: DAGBuilder treeBuilder?: TreeBuilder @@ -114,22 +117,39 @@ export interface ImporterOptions { chunkValidator?: ChunkValidator } -export interface TrickleDagNode { - children: InProgressImportResult[] - depth: number - maxDepth: number - maxChildren: number - data?: InProgressImportResult[] - parent?: TrickleDagNode - cid?: CID - size?: number - unixfs?: UnixFS -} +export async function * importer (source: AsyncIterable | Iterable | ImportCandidate, blockstore: Blockstore, options: UserImporterOptions = {}): AsyncGenerator { + const opts = defaultOptions(options) -export interface PersistOptions { - codec?: BlockCodec - hasher: MultihashHasher - cidVersion: CIDVersion - onlyHash: boolean - signal?: AbortSignal + let buildDag + + if (typeof options.dagBuilder === 'function') { + buildDag = options.dagBuilder + } else { + buildDag = dagBuilder + } + + let buildTree + + if (typeof options.treeBuilder === 'function') { + buildTree = options.treeBuilder + } else { + buildTree = treeBuilder + } + + let candidates: AsyncIterable | Iterable + + if (Symbol.asyncIterator in source || Symbol.iterator in source) { + candidates = source + } else { + candidates = [source] + } + + for await (const entry of buildTree(parallelBatch(buildDag(candidates, blockstore, opts), opts.fileImportConcurrency), blockstore, opts)) { + yield { + cid: entry.cid, + path: entry.path, + unixfs: entry.unixfs, + size: entry.size + } + } } diff --git a/packages/ipfs-unixfs-importer/src/options.js b/packages/ipfs-unixfs-importer/src/options.ts similarity index 77% rename from packages/ipfs-unixfs-importer/src/options.js rename to packages/ipfs-unixfs-importer/src/options.ts index 9359368f..eaf0f713 100644 --- a/packages/ipfs-unixfs-importer/src/options.js +++ b/packages/ipfs-unixfs-importer/src/options.ts @@ -1,11 +1,9 @@ import mergeOptions from 'merge-options' import { sha256 } from 'multiformats/hashes/sha2' import { murmur3128 } from '@multiformats/murmur3' +import type { ImporterOptions, UserImporterOptions } from './index.js' -/** - * @param {Uint8Array} buf - */ -async function hamtHashFn (buf) { +async function hamtHashFn (buf: Uint8Array): Promise { return (await murmur3128.encode(buf)) // Murmur3 outputs 128 bit but, accidentally, IPFS Go's // implementation only uses the first 64, so we must do the same @@ -15,15 +13,7 @@ async function hamtHashFn (buf) { .reverse() } -/** - * @typedef {import('./types').UserImporterOptions} UserImporterOptions - * @typedef {import('./types').ImporterOptions} ImporterOptions - */ - -/** - * @type {ImporterOptions} - */ -const defaultOptions = { +const defaultOptions: ImporterOptions = { chunker: 'fixed', strategy: 'balanced', // 'flat', 'trickle' rawLeaves: false, @@ -43,7 +33,7 @@ const defaultOptions = { window: 16, // FIXME: This number is too big for JavaScript // https://github.com/ipfs/go-ipfs-chunker/blob/d0125832512163708c0804a3cda060e21acddae4/rabin.go#L11 - polynomial: 17437180132763653, // eslint-disable-line no-loss-of-precision + polynomial: 17437180132763653, // eslint-disable-line no-loss-of-precision,@typescript-eslint/no-loss-of-precision maxChildrenPerNode: 174, layerRepeat: 4, wrapWithDirectory: false, @@ -51,15 +41,11 @@ const defaultOptions = { hidden: false, timeout: undefined, hamtHashFn, - hamtHashCode: 0x22, + hamtHashCode: BigInt(0x22), hamtBucketBits: 8 } -/** - * @param {UserImporterOptions} options - * @returns {ImporterOptions} - */ -export default (options = {}) => { +export default (options: UserImporterOptions = {}): ImporterOptions => { const defaults = mergeOptions.bind({ ignoreUndefined: true }) return defaults(defaultOptions, options) } diff --git a/packages/ipfs-unixfs-importer/src/tree-builder.js b/packages/ipfs-unixfs-importer/src/tree-builder.ts similarity index 62% rename from packages/ipfs-unixfs-importer/src/tree-builder.js rename to packages/ipfs-unixfs-importer/src/tree-builder.ts index 7a2df8cd..5300db0f 100644 --- a/packages/ipfs-unixfs-importer/src/tree-builder.js +++ b/packages/ipfs-unixfs-importer/src/tree-builder.ts @@ -1,7 +1,9 @@ -import DirFlat from './dir-flat.js' -import flatToShard from './flat-to-shard.js' +import { DirFlat } from './dir-flat.js' +import { flatToShard } from './flat-to-shard.js' import { Dir } from './dir.js' -import toPathComponents from './utils/to-path-components.js' +import { toPathComponents } from './utils/to-path-components.js' +import type { ImporterOptions, ImportResult, InProgressImportResult, TreeBuilder } from './index.js' +import type { Blockstore } from 'interface-blockstore' /** * @typedef {import('./types').ImportResult} ImportResult @@ -11,13 +13,8 @@ import toPathComponents from './utils/to-path-components.js' * @typedef {(source: AsyncIterable, blockstore: Blockstore, options: ImporterOptions) => AsyncIterable} TreeBuilder */ -/** - * @param {InProgressImportResult} elem - * @param {Dir} tree - * @param {ImporterOptions} options - */ -async function addToTree (elem, tree, options) { - const pathElems = toPathComponents(elem.path || '') +async function addToTree (elem: InProgressImportResult, tree: Dir, options: ImporterOptions): Promise { + const pathElems = toPathComponents(elem.path ?? '') const lastIndex = pathElems.length - 1 let parent = tree let currentPath = '' @@ -25,7 +22,7 @@ async function addToTree (elem, tree, options) { for (let i = 0; i < pathElems.length; i++) { const pathElem = pathElems[i] - currentPath += `${currentPath ? '/' : ''}${pathElem}` + currentPath += `${currentPath !== '' ? '/' : ''}${pathElem}` const last = (i === lastIndex) parent.dirty = true @@ -38,17 +35,17 @@ async function addToTree (elem, tree, options) { } else { let dir = await parent.get(pathElem) - if (!dir || !(dir instanceof Dir)) { + if ((dir == null) || !(dir instanceof Dir)) { dir = new DirFlat({ root: false, dir: true, - parent: parent, + parent, parentKey: pathElem, path: currentPath, dirty: true, flat: true, - mtime: dir && dir.unixfs && dir.unixfs.mtime, - mode: dir && dir.unixfs && dir.unixfs.mode + mtime: dir?.unixfs?.mtime, + mode: dir?.unixfs?.mode }, options) } @@ -61,13 +58,9 @@ async function addToTree (elem, tree, options) { return tree } -/** - * @param {Dir | InProgressImportResult} tree - * @param {Blockstore} blockstore - */ -async function * flushAndYield (tree, blockstore) { +async function * flushAndYield (tree: Dir | InProgressImportResult, blockstore: Blockstore): AsyncGenerator { if (!(tree instanceof Dir)) { - if (tree && tree.unixfs && tree.unixfs.isDirectory()) { + if (tree.unixfs?.isDirectory() === true) { yield tree } @@ -77,12 +70,8 @@ async function * flushAndYield (tree, blockstore) { yield * tree.flush(blockstore) } -/** - * @type {TreeBuilder} - */ -async function * treeBuilder (source, block, options) { - /** @type {Dir} */ - let tree = new DirFlat({ +export const treeBuilder: TreeBuilder = async function * treeBuilder (source, block, options) { + let tree: Dir = new DirFlat({ root: true, dir: true, path: '', @@ -91,13 +80,13 @@ async function * treeBuilder (source, block, options) { }, options) for await (const entry of source) { - if (!entry) { + if (entry == null) { continue } tree = await addToTree(entry, tree, options) - if (!entry.unixfs || !entry.unixfs.isDirectory()) { + if (entry.unixfs == null || !entry.unixfs.isDirectory()) { yield entry } } @@ -106,7 +95,7 @@ async function * treeBuilder (source, block, options) { yield * flushAndYield(tree, block) } else { for await (const unwrapped of tree.eachChildSeries()) { - if (!unwrapped) { + if (unwrapped == null) { continue } @@ -114,5 +103,3 @@ async function * treeBuilder (source, block, options) { } } } - -export default treeBuilder diff --git a/packages/ipfs-unixfs-importer/src/utils/persist.js b/packages/ipfs-unixfs-importer/src/utils/persist.ts similarity index 51% rename from packages/ipfs-unixfs-importer/src/utils/persist.js rename to packages/ipfs-unixfs-importer/src/utils/persist.ts index fd57316c..9091df79 100644 --- a/packages/ipfs-unixfs-importer/src/utils/persist.js +++ b/packages/ipfs-unixfs-importer/src/utils/persist.ts @@ -1,18 +1,25 @@ import { CID } from 'multiformats/cid' import * as dagPb from '@ipld/dag-pb' import { sha256 } from 'multiformats/hashes/sha2' +import type { Blockstore } from 'interface-blockstore' +import type { BlockCodec } from 'multiformats/codecs/interface' +import type { MultihashHasher } from 'multiformats/hashes/interface' +import type { Version as CIDVersion } from 'multiformats/cid' + +export interface PersistOptions { + codec?: BlockCodec + hasher: MultihashHasher + cidVersion: CIDVersion + onlyHash: boolean + signal?: AbortSignal +} -/** - * @param {Uint8Array} buffer - * @param {import('interface-blockstore').Blockstore} blockstore - * @param {import('../types').PersistOptions} options - */ -const persist = async (buffer, blockstore, options) => { - if (!options.codec) { +export const persist = async (buffer: Uint8Array, blockstore: Blockstore, options: PersistOptions): Promise => { + if (options.codec == null) { options.codec = dagPb } - if (!options.hasher) { + if (options.hasher == null) { options.hasher = sha256 } @@ -35,5 +42,3 @@ const persist = async (buffer, blockstore, options) => { return cid } - -export default persist diff --git a/packages/ipfs-unixfs-importer/src/utils/to-path-components.js b/packages/ipfs-unixfs-importer/src/utils/to-path-components.js deleted file mode 100644 index 26835e7a..00000000 --- a/packages/ipfs-unixfs-importer/src/utils/to-path-components.js +++ /dev/null @@ -1,9 +0,0 @@ -const toPathComponents = (path = '') => { - // split on / unless escaped with \ - return (path - .trim() - .match(/([^\\/]|\\\/)+/g) || []) - .filter(Boolean) -} - -export default toPathComponents diff --git a/packages/ipfs-unixfs-importer/src/utils/to-path-components.ts b/packages/ipfs-unixfs-importer/src/utils/to-path-components.ts new file mode 100644 index 00000000..80ec3c58 --- /dev/null +++ b/packages/ipfs-unixfs-importer/src/utils/to-path-components.ts @@ -0,0 +1,7 @@ +export const toPathComponents = (path: string = ''): string[] => { + // split on / unless escaped with \ + return (path + .trim() + .match(/([^\\/]|\\\/)+/g) ?? []) + .filter(Boolean) +} diff --git a/packages/ipfs-unixfs-importer/test/benchmark.spec.js b/packages/ipfs-unixfs-importer/test/benchmark.spec.ts similarity index 79% rename from packages/ipfs-unixfs-importer/test/benchmark.spec.js rename to packages/ipfs-unixfs-importer/test/benchmark.spec.ts index 5591d1ab..cde008b8 100644 --- a/packages/ipfs-unixfs-importer/test/benchmark.spec.js +++ b/packages/ipfs-unixfs-importer/test/benchmark.spec.ts @@ -1,8 +1,9 @@ /* eslint-env mocha */ -import { importer } from '../src/index.js' +import { importer } from '../src/index.js' import bufferStream from 'it-buffer-stream' -import blockApi from './helpers/block.js' +import { MemoryBlockstore } from 'blockstore-core' +import drain from 'it-drain' const REPEATS = 10 const FILE_SIZE = Math.pow(2, 20) * 500 // 500MB @@ -11,10 +12,9 @@ const CHUNK_SIZE = 65536 describe.skip('benchmark', function () { this.timeout(30 * 1000) - const block = blockApi() + const block = new MemoryBlockstore() - /** @type {number[]} */ - const times = [] + const times: number[] = [] after(() => { console.info('Percent\tms') // eslint-disable-line no-console @@ -33,16 +33,13 @@ describe.skip('benchmark', function () { let lastPercent = 0 const options = { - /** - * @param {number} prog - */ - progress: (prog) => { + progress: (prog: number) => { read += prog const percent = Math.round((read / size) * 100) if (percent > lastPercent) { - times[percent] = (times[percent] || 0) + (Date.now() - lastDate) + times[percent] = (times[percent] ?? 0) + (Date.now() - lastDate) lastDate = Date.now() lastPercent = percent @@ -52,7 +49,7 @@ describe.skip('benchmark', function () { const buf = new Uint8Array(CHUNK_SIZE).fill(0) - await importer([{ + await drain(importer([{ path: '200Bytes.txt', content: bufferStream(size, { chunkSize: CHUNK_SIZE, @@ -60,7 +57,7 @@ describe.skip('benchmark', function () { return buf } }) - }], block, options) + }], block, options)) }) } }) diff --git a/packages/ipfs-unixfs-importer/test/builder-balanced.spec.js b/packages/ipfs-unixfs-importer/test/builder-balanced.spec.ts similarity index 72% rename from packages/ipfs-unixfs-importer/test/builder-balanced.spec.js rename to packages/ipfs-unixfs-importer/test/builder-balanced.spec.ts index dc9d5ba6..80ce60ff 100644 --- a/packages/ipfs-unixfs-importer/test/builder-balanced.spec.js +++ b/packages/ipfs-unixfs-importer/test/builder-balanced.spec.ts @@ -1,19 +1,15 @@ /* eslint-env mocha */ + import { expect } from 'aegir/chai' -import builder from '../src/dag-builder/file/balanced.js' +import { balanced } from '../src/dag-builder/file/balanced.js' import { CID } from 'multiformats/cid' import defaultOptions from '../src/options.js' +import type { InProgressImportResult } from '../src/index.js' -/** - * @typedef {import('../src/types').InProgressImportResult} InProgressImportResult - * - * @param {InProgressImportResult[]} leaves - * @returns {Promise} - */ -async function reduce (leaves) { +async function reduce (leaves: InProgressImportResult[]): Promise { if (leaves.length > 1) { return { - // @ts-ignore + // @ts-expect-error children: leaves } } else { @@ -30,10 +26,10 @@ describe('builder: balanced', () => { it('reduces one value into itself', async () => { const source = [{ cid: CID.parse('QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn'), - size: 0 + size: 0n }] - const result = await builder((async function * () { + const result = await balanced((async function * () { yield * source }()), reduce, options) @@ -43,16 +39,16 @@ describe('builder: balanced', () => { it('reduces 3 values into parent', async () => { const source = [{ cid: CID.parse('QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn'), - size: 0 + size: 0n }, { cid: CID.parse('QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn'), - size: 0 + size: 0n }, { cid: CID.parse('QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn'), - size: 0 + size: 0n }] - const result = await builder((async function * () { + const result = await balanced((async function * () { yield * source }()), reduce, options) @@ -64,8 +60,8 @@ describe('builder: balanced', () => { it('obeys max children per node', async () => { const source = [1, 2, 3, 4] - // @ts-ignore - const result = await builder((async function * () { + // @ts-expect-error + const result = await balanced((async function * () { yield * source }()), reduce, options) @@ -81,8 +77,8 @@ describe('builder: balanced', () => { it('refolds 2 parent nodes', async () => { const source = [1, 2, 3, 4, 5, 6, 7] - // @ts-ignore - const result = await builder((async function * () { + // @ts-expect-error + const result = await balanced((async function * () { yield * source }()), reduce, options) diff --git a/packages/ipfs-unixfs-importer/test/builder-flat.spec.js b/packages/ipfs-unixfs-importer/test/builder-flat.spec.ts similarity index 64% rename from packages/ipfs-unixfs-importer/test/builder-flat.spec.js rename to packages/ipfs-unixfs-importer/test/builder-flat.spec.ts index 0a95bc48..9ca4104b 100644 --- a/packages/ipfs-unixfs-importer/test/builder-flat.spec.js +++ b/packages/ipfs-unixfs-importer/test/builder-flat.spec.ts @@ -1,12 +1,9 @@ /* eslint-env mocha */ import { expect } from 'aegir/chai' -import builder from '../src/dag-builder/file/flat.js' +import { flat } from '../src/dag-builder/file/flat.js' -/** - * @param {*} leaves - */ -function reduce (leaves) { +function reduce (leaves: any[]): any { if (leaves.length > 1) { return { children: leaves } } else { @@ -17,16 +14,16 @@ function reduce (leaves) { describe('builder: flat', () => { it('reduces one value into itself', async () => { const source = [1] - // @ts-ignore - const result = await builder(source, reduce) + // @ts-expect-error + const result = await flat(source, reduce) expect(result).to.be.eql(1) }) it('reduces 2 values into parent', async () => { const source = [1, 2] - // @ts-ignore - const result = await builder(source, reduce) + // @ts-expect-error + const result = await flat(source, reduce) expect(result).to.be.eql({ children: [1, 2] }) }) diff --git a/packages/ipfs-unixfs-importer/test/builder-only-hash.spec.js b/packages/ipfs-unixfs-importer/test/builder-only-hash.spec.ts similarity index 76% rename from packages/ipfs-unixfs-importer/test/builder-only-hash.spec.js rename to packages/ipfs-unixfs-importer/test/builder-only-hash.spec.ts index f10a78b6..f06cbe68 100644 --- a/packages/ipfs-unixfs-importer/test/builder-only-hash.spec.js +++ b/packages/ipfs-unixfs-importer/test/builder-only-hash.spec.ts @@ -1,17 +1,17 @@ /* eslint-env mocha */ import { expect } from 'aegir/chai' -import builder from '../src/dag-builder/index.js' +import { dagBuilder } from '../src/dag-builder/index.js' import all from 'it-all' -import blockApi from './helpers/block.js' +import { MemoryBlockstore } from 'blockstore-core' import defaultOptions from '../src/options.js' import asAsyncIterable from './helpers/as-async-iterable.js' describe('builder: onlyHash', () => { - const block = blockApi() + const block = new MemoryBlockstore() it('will only chunk and hash if passed an "onlyHash" option', async () => { - const nodes = await all(builder([{ + const nodes = await all(dagBuilder([{ path: 'foo.txt', content: asAsyncIterable(Uint8Array.from([0, 1, 2, 3, 4])) }], block, { @@ -25,7 +25,7 @@ describe('builder: onlyHash', () => { await block.get((await nodes[0]()).cid) throw new Error('Should have errored') - } catch (/** @type {any} */ err) { + } catch (err: any) { expect(err.code).to.equal('ERR_NOT_FOUND') } }) diff --git a/packages/ipfs-unixfs-importer/test/builder-trickle-dag.spec.js b/packages/ipfs-unixfs-importer/test/builder-trickle-dag.spec.ts similarity index 92% rename from packages/ipfs-unixfs-importer/test/builder-trickle-dag.spec.js rename to packages/ipfs-unixfs-importer/test/builder-trickle-dag.spec.ts index 1b68db12..a1beda1e 100644 --- a/packages/ipfs-unixfs-importer/test/builder-trickle-dag.spec.js +++ b/packages/ipfs-unixfs-importer/test/builder-trickle-dag.spec.ts @@ -1,13 +1,10 @@ /* eslint-env mocha */ import { expect } from 'aegir/chai' -import builder from '../src/dag-builder/file/trickle.js' +import { trickle } from '../src/dag-builder/file/trickle.js' import asAsyncIterable from './helpers/as-async-iterable.js' -/** - * @param {number} max - */ -const createValues = (max) => { +const createValues = (max: number): number[] => { const output = [] for (let i = 0; i < max; i++) { @@ -17,10 +14,7 @@ const createValues = (max) => { return output } -/** - * @param {*} leaves - */ -function reduce (leaves) { +function reduce (leaves: any): any { if (leaves.length > 1) { return { children: leaves } } else { @@ -35,15 +29,15 @@ const options = { describe('builder: trickle', () => { it('reduces one value into itself', async () => { - // @ts-ignore - const result = await builder(asAsyncIterable([1]), reduce, options) + // @ts-expect-error + const result = await trickle(asAsyncIterable([1]), reduce, options) expect(result).to.deep.equal(1) }) it('reduces 3 values into parent', async () => { - // @ts-ignore - const result = await builder(createValues(3), reduce, options) + // @ts-expect-error + const result = await trickle(createValues(3), reduce, options) expect(result).to.deep.equal({ children: [ @@ -55,8 +49,8 @@ describe('builder: trickle', () => { }) it('reduces 6 values correctly', async () => { - // @ts-ignore - const result = await builder(createValues(6), reduce, options) + // @ts-expect-error + const result = await trickle(createValues(6), reduce, options) expect(result).to.deep.equal({ children: [ @@ -75,8 +69,8 @@ describe('builder: trickle', () => { }) it('reduces 9 values correctly', async () => { - // @ts-ignore - const result = await builder(createValues(9), reduce, options) + // @ts-expect-error + const result = await trickle(createValues(9), reduce, options) expect(result).to.deep.equal({ children: [ @@ -102,8 +96,8 @@ describe('builder: trickle', () => { }) it('reduces 12 values correctly', async () => { - // @ts-ignore - const result = await builder(createValues(12), reduce, options) + // @ts-expect-error + const result = await trickle(createValues(12), reduce, options) expect(result).to.deep.equal({ children: [ @@ -136,8 +130,8 @@ describe('builder: trickle', () => { }) it('reduces 21 values correctly', async () => { - // @ts-ignore - const result = await builder(createValues(21), reduce, options) + // @ts-expect-error + const result = await trickle(createValues(21), reduce, options) expect(result).to.deep.equal({ children: [ @@ -191,8 +185,8 @@ describe('builder: trickle', () => { }) it('reduces 68 values correctly', async () => { - // @ts-ignore - const result = await builder(createValues(68), reduce, options) + // @ts-expect-error + const result = await trickle(createValues(68), reduce, options) expect(result).to.deep.equal( { @@ -359,8 +353,8 @@ describe('builder: trickle', () => { }) it('reduces 93 values correctly', async () => { - // @ts-ignore - const result = await builder(createValues(93), reduce, options) + // @ts-expect-error + const result = await trickle(createValues(93), reduce, options) expect(result).to.deep.equal( { diff --git a/packages/ipfs-unixfs-importer/test/builder.spec.js b/packages/ipfs-unixfs-importer/test/builder.spec.ts similarity index 85% rename from packages/ipfs-unixfs-importer/test/builder.spec.js rename to packages/ipfs-unixfs-importer/test/builder.spec.ts index f39fb794..261f911d 100644 --- a/packages/ipfs-unixfs-importer/test/builder.spec.js +++ b/packages/ipfs-unixfs-importer/test/builder.spec.ts @@ -1,18 +1,19 @@ /* eslint-env mocha */ + import { expect } from 'aegir/chai' import * as mh from 'multiformats/hashes/digest' import { sha256, sha512 } from 'multiformats/hashes/sha2' import { decode } from '@ipld/dag-pb' import { UnixFS } from 'ipfs-unixfs' -import builder from '../src/dag-builder/index.js' +import { dagBuilder } from '../src/dag-builder/index.js' import first from 'it-first' -import blockApi from './helpers/block.js' +import { MemoryBlockstore } from 'blockstore-core' import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' import defaultOptions from '../src/options.js' import asAsyncIterable from './helpers/as-async-iterable.js' describe('builder', () => { - const block = blockApi() + const block = new MemoryBlockstore() const testMultihashes = [sha256, sha512] @@ -21,16 +22,16 @@ describe('builder', () => { const hasher = testMultihashes[i] const content = uint8ArrayFromString(String(Math.random() + Date.now())) const inputFile = { - path: content + '.txt', + path: `${content}.txt`, content: asAsyncIterable(content) } - const result = await first(builder([inputFile], block, { + const result = await first(dagBuilder([inputFile], block, { ...defaultOptions(), hasher })) - if (!result) { + if (result == null) { throw new Error('Nothing built') } @@ -43,7 +44,7 @@ describe('builder', () => { // Fetch using hasher encoded multihash const importedBlock = await block.get(imported.cid) const node = decode(importedBlock) - if (!node.Data) { + if (node.Data == null) { throw new Error('PBNode Data undefined') } const fetchedContent = UnixFS.unmarshal(node.Data).data @@ -63,12 +64,12 @@ describe('builder', () => { content: asAsyncIterable(new Uint8Array(262144 + 5).fill(1)) } - const result = await first(builder([inputFile], block, { + const result = await first(dagBuilder([inputFile], block, { ...defaultOptions(), hasher })) - if (!result) { + if (result == null) { throw new Error('Nothing built') } @@ -86,12 +87,12 @@ describe('builder', () => { path: `${String(Math.random() + Date.now())}-dir` } - const result = await first(builder([{ ...inputFile }], block, { + const result = await first(dagBuilder([{ ...inputFile }], block, { ...defaultOptions(), hasher })) - if (!result) { + if (result == null) { return new Error('Nothing built') } @@ -103,7 +104,7 @@ describe('builder', () => { const importedBlock = await block.get(imported.cid) const node = decode(importedBlock) - if (!node.Data) { + if (node.Data == null) { throw new Error('PBNode Data undefined') } const meta = UnixFS.unmarshal(node.Data) diff --git a/packages/ipfs-unixfs-importer/test/chunker-custom.spec.js b/packages/ipfs-unixfs-importer/test/chunker-custom.spec.ts similarity index 62% rename from packages/ipfs-unixfs-importer/test/chunker-custom.spec.js rename to packages/ipfs-unixfs-importer/test/chunker-custom.spec.ts index 96666dfd..4e590597 100644 --- a/packages/ipfs-unixfs-importer/test/chunker-custom.spec.js +++ b/packages/ipfs-unixfs-importer/test/chunker-custom.spec.ts @@ -1,31 +1,25 @@ /* eslint-env mocha */ + import { importer } from '../src/index.js' import { expect } from 'aegir/chai' - import * as rawCodec from 'multiformats/codecs/raw' import { sha256 } from 'multiformats/hashes/sha2' import * as Block from 'multiformats/block' -import blockApi from './helpers/block.js' +import { MemoryBlockstore } from 'blockstore-core' import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' import { UnixFS } from 'ipfs-unixfs' +import type { CID } from 'multiformats' -const iter = async function * () { +const iter = async function * (): AsyncGenerator { yield uint8ArrayFromString('one') yield uint8ArrayFromString('two') } describe('custom chunker', function () { - const block = blockApi() + const block = new MemoryBlockstore() - /** - * @param {AsyncIterable} content - * @param {number} size - */ - const fromPartsTest = (content, size) => async () => { - /** - * @param {Uint8Array} buf - */ - const put = async (buf) => { + const fromPartsTest = (content: AsyncIterable, size: bigint) => async () => { + const put = async (buf: Uint8Array): Promise<{ cid: CID, size: bigint, unixfs: UnixFS }> => { const encodedBlock = await Block.encode({ value: buf, codec: rawCodec, @@ -34,7 +28,7 @@ describe('custom chunker', function () { return { cid: encodedBlock.cid, - size: buf.length, + size: BigInt(buf.length), unixfs: new UnixFS() } } @@ -45,7 +39,7 @@ describe('custom chunker', function () { chunker: source => source, bufferImporter: async function * (file, block, options) { for await (const item of file.content) { - yield async () => put(item) + yield async () => await put(item) } } })) { @@ -58,19 +52,19 @@ describe('custom chunker', function () { for await (const part of importer([{ path: 'test', content }], block, { chunker: source => source })) { - expect(part.size).to.equal(116) + expect(part.size).to.equal(116n) } }) - const multi = async function * () { + const multi = async function * (): AsyncGenerator { yield uint8ArrayFromString('hello world') yield uint8ArrayFromString('hello world') } - it('works with multiple parts', fromPartsTest(multi(), 120)) + it('works with multiple parts', fromPartsTest(multi(), 120n)) - const single = async function * () { + const single = async function * (): AsyncGenerator { yield uint8ArrayFromString('hello world') } - it('works with single part', fromPartsTest(single(), 11)) + it('works with single part', fromPartsTest(single(), 11n)) }) diff --git a/packages/ipfs-unixfs-importer/test/chunker-fixed-size.spec.js b/packages/ipfs-unixfs-importer/test/chunker-fixed-size.spec.ts similarity index 85% rename from packages/ipfs-unixfs-importer/test/chunker-fixed-size.spec.js rename to packages/ipfs-unixfs-importer/test/chunker-fixed-size.spec.ts index cab1bc0c..e9cfd831 100644 --- a/packages/ipfs-unixfs-importer/test/chunker-fixed-size.spec.js +++ b/packages/ipfs-unixfs-importer/test/chunker-fixed-size.spec.ts @@ -1,11 +1,13 @@ /* eslint-env mocha */ -import chunker from '../src/chunker/fixed-size.js' + +import { fixedSize } from '../src/chunker/fixed-size.js' import { expect } from 'aegir/chai' import all from 'it-all' import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' import { concat as uint8ArrayConcat } from 'uint8arrays/concat' import defaultOptions from '../src/options.js' import asAsyncIterable from './helpers/as-async-iterable.js' + const rawFile = new Uint8Array(Math.pow(2, 20)) describe('chunker: fixed size', function () { @@ -20,7 +22,7 @@ describe('chunker: fixed size', function () { b2.fill('b'.charCodeAt(0)) b3.fill('c'.charCodeAt(0)) - const chunks = await all(chunker(asAsyncIterable([b1, b2, b3]), { + const chunks = await all(fixedSize(asAsyncIterable([b1, b2, b3]), { ...defaultOptions(), maxChunkSize: 256 })) @@ -38,7 +40,7 @@ describe('chunker: fixed size', function () { for (let i = 0; i < (256 * 12); i++) { input.push(buf) } - const chunks = await all(chunker(asAsyncIterable(input), { + const chunks = await all(fixedSize(asAsyncIterable(input), { ...defaultOptions(), maxChunkSize: 256 })) @@ -51,7 +53,7 @@ describe('chunker: fixed size', function () { it('256 KiB chunks', async () => { const KiB256 = 262144 - const chunks = await all(chunker(asAsyncIterable([rawFile]), { + const chunks = await all(fixedSize(asAsyncIterable([rawFile]), { ...defaultOptions(), maxChunkSize: KiB256 })) @@ -66,7 +68,7 @@ describe('chunker: fixed size', function () { const KiB256 = 262144 const file = uint8ArrayConcat([rawFile, uint8ArrayFromString('hello')]) - const chunks = await all(chunker(asAsyncIterable([file]), { + const chunks = await all(fixedSize(asAsyncIterable([file]), { ...defaultOptions(), maxChunkSize: KiB256 })) diff --git a/packages/ipfs-unixfs-importer/test/chunker-rabin.spec.js b/packages/ipfs-unixfs-importer/test/chunker-rabin.spec.ts similarity index 87% rename from packages/ipfs-unixfs-importer/test/chunker-rabin.spec.js rename to packages/ipfs-unixfs-importer/test/chunker-rabin.spec.ts index d6f779f5..2ddcf5aa 100644 --- a/packages/ipfs-unixfs-importer/test/chunker-rabin.spec.js +++ b/packages/ipfs-unixfs-importer/test/chunker-rabin.spec.ts @@ -1,6 +1,6 @@ /* eslint-env mocha */ -import chunker from '../src/chunker/rabin.js' +import { rabin } from '../src/chunker/rabin.js' import { expect } from 'aegir/chai' import all from 'it-all' import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' @@ -31,7 +31,7 @@ describe('chunker: rabin', function () { b2.fill('b'.charCodeAt(0)) b3.fill('c'.charCodeAt(0)) - const chunks = await all(chunker(asAsyncIterable([b1, b2, b3]), { + const chunks = await all(rabin(asAsyncIterable([b1, b2, b3]), { ...defaultOptions(), minChunkSize: 48, avgChunkSize: 96, @@ -55,7 +55,7 @@ describe('chunker: rabin', function () { const b1 = new Uint8Array(10 * 256) b1.fill('a'.charCodeAt(0)) - const chunks = await all(chunker(asAsyncIterable([b1]), { + const chunks = await all(rabin(asAsyncIterable([b1]), { ...defaultOptions(), maxChunkSize: 262144, minChunkSize: 18, @@ -78,7 +78,7 @@ describe('chunker: rabin', function () { maxChunkSize: Math.round(KiB256 + (KiB256 / 2)) } - const chunks = await all(chunker(asAsyncIterable([file]), opts)) + const chunks = await all(rabin(asAsyncIterable([file]), opts)) chunks.forEach((chunk) => { expect(chunk).to.have.length.gte(opts.minChunkSize) @@ -94,9 +94,9 @@ describe('chunker: rabin', function () { } try { - await all(chunker(asAsyncIterable([]), opts)) + await all(rabin(asAsyncIterable([]), opts)) throw new Error('Should have thrown') - } catch (/** @type {any} */ err) { + } catch (err: any) { expect(err.code).to.equal('ERR_INVALID_MIN_CHUNK_SIZE') } }) @@ -109,9 +109,9 @@ describe('chunker: rabin', function () { try { // @ts-expect-error invalid opts - await all(chunker(asAsyncIterable([]), opts)) + await all(rabin(asAsyncIterable([]), opts)) throw new Error('Should have thrown') - } catch (/** @type {any} */ err) { + } catch (err: any) { expect(err.code).to.equal('ERR_INVALID_AVG_CHUNK_SIZE') } }) @@ -125,7 +125,7 @@ describe('chunker: rabin', function () { avgChunkSize: 5 } - const chunks = await all(chunker(asAsyncIterable([file]), opts)) + const chunks = await all(rabin(asAsyncIterable([file]), opts)) chunks.forEach((chunk, index) => { if (index === chunks.length - 1) { diff --git a/packages/ipfs-unixfs-importer/test/hash-parity-with-go-ipfs.spec.js b/packages/ipfs-unixfs-importer/test/hash-parity-with-go-ipfs.spec.ts similarity index 85% rename from packages/ipfs-unixfs-importer/test/hash-parity-with-go-ipfs.spec.js rename to packages/ipfs-unixfs-importer/test/hash-parity-with-go-ipfs.spec.ts index 268073d1..f8f74a10 100644 --- a/packages/ipfs-unixfs-importer/test/hash-parity-with-go-ipfs.spec.js +++ b/packages/ipfs-unixfs-importer/test/hash-parity-with-go-ipfs.spec.ts @@ -1,16 +1,15 @@ /* eslint-env mocha */ -import { importer } from '../src/index.js' +import { importer } from '../src/index.js' import { expect } from 'aegir/chai' import randomByteStream from './helpers/finite-pseudorandom-byte-stream.js' import first from 'it-first' import last from 'it-last' -import blockApi from './helpers/block.js' +import { MemoryBlockstore } from 'blockstore-core' import defaultOptions from '../src/options.js' import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' -/** @type {('flat' | 'trickle' | 'balanced')[]} */ -const strategies = [ +const strategies: Array<'flat' | 'trickle' | 'balanced'> = [ 'flat', 'trickle', 'balanced' @@ -25,7 +24,7 @@ const expectedHashes = { strategies.forEach(strategy => { const options = { ...defaultOptions(), - strategy: strategy + strategy } if (strategy === 'trickle') { @@ -35,7 +34,7 @@ strategies.forEach(strategy => { } describe('go-ipfs interop using importer:' + strategy, () => { - const block = blockApi() + const block = new MemoryBlockstore() it('yields the same tree as go-ipfs', async function () { this.timeout(100 * 1000) @@ -47,7 +46,7 @@ strategies.forEach(strategy => { const file = await first(importer(source, block, options)) - if (!file) { + if (file == null) { throw new Error('Nothing imported') } @@ -59,10 +58,7 @@ strategies.forEach(strategy => { describe('go-ipfs auto-sharding interop', function () { this.timeout(100 * 1000) - /** - * @param {number} count - */ - function buildSource (count) { + function buildSource (count: number): Array<{ path: string, content: Uint8Array }> { return new Array(count).fill(0).map((_, index) => { const string = `long name to fill out bytes to make the sharded directory test flip over the sharded directory limit because link names are included in the directory entry ${index}` @@ -73,7 +69,7 @@ describe('go-ipfs auto-sharding interop', function () { }) } - const block = blockApi() + const block = new MemoryBlockstore() const threshold = 1343 it('uses the same shard threshold as go-unixfsnode (under threshold)', async function () { @@ -82,11 +78,11 @@ describe('go-ipfs auto-sharding interop', function () { rawLeaves: true })) - if (!result) { + if (result == null) { throw new Error('Nothing imported') } - expect(result).to.have.property('size', 490665) + expect(result).to.have.property('size', 490665n) expect(result).to.have.nested.property('unixfs.type', 'directory') expect(result.cid.toString()).to.be.equal('bafybeihecq4rpl4nw3cgfb2uiwltgsmw5sutouvuldv5fxn4gfbihvnalq') }) @@ -97,11 +93,11 @@ describe('go-ipfs auto-sharding interop', function () { rawLeaves: true })) - if (!result) { + if (result == null) { throw new Error('Nothing imported') } - expect(result).to.have.property('size', 515735) + expect(result).to.have.property('size', 515735n) expect(result).to.have.nested.property('unixfs.type', 'hamt-sharded-directory') expect(result.cid.toString()).to.be.equal('bafybeigyvxs6og5jbmpaa43qbhhd5swklqcfzqdrtjgfh53qjon6hpjaye') }) diff --git a/packages/ipfs-unixfs-importer/test/helpers/as-async-iterable.js b/packages/ipfs-unixfs-importer/test/helpers/as-async-iterable.js deleted file mode 100644 index 23e1cdea..00000000 --- a/packages/ipfs-unixfs-importer/test/helpers/as-async-iterable.js +++ /dev/null @@ -1,12 +0,0 @@ -/** - * @param {Uint8Array | Uint8Array[]} arr - */ -async function * asAsyncIterable (arr) { - if (!Array.isArray(arr)) { - arr = [arr] - } - - yield * arr -} - -export default asAsyncIterable diff --git a/packages/ipfs-unixfs-importer/test/helpers/as-async-iterable.ts b/packages/ipfs-unixfs-importer/test/helpers/as-async-iterable.ts new file mode 100644 index 00000000..ef9b811b --- /dev/null +++ b/packages/ipfs-unixfs-importer/test/helpers/as-async-iterable.ts @@ -0,0 +1,10 @@ + +async function * asAsyncIterable (arr: Uint8Array | Uint8Array[]): AsyncGenerator { + if (!Array.isArray(arr)) { + arr = [arr] + } + + yield * arr +} + +export default asAsyncIterable diff --git a/packages/ipfs-unixfs-importer/test/helpers/block.js b/packages/ipfs-unixfs-importer/test/helpers/block.js deleted file mode 100644 index 5072a6e5..00000000 --- a/packages/ipfs-unixfs-importer/test/helpers/block.js +++ /dev/null @@ -1,48 +0,0 @@ -import errCode from 'err-code' -import { BaseBlockstore } from 'blockstore-core' -import { base58btc } from 'multiformats/bases/base58' - -/** - * @typedef {import('multiformats/cid').CID} CID - */ - -function createBlockApi () { - class MockBlockstore extends BaseBlockstore { - constructor () { - super() - - /** @type {{[key: string]: Uint8Array}} */ - this._blocks = {} - } - - /** - * @param {CID} cid - * @param {Uint8Array} block - * @param {any} [options] - */ - async put (cid, block, options = {}) { - this._blocks[base58btc.encode(cid.multihash.bytes)] = block - } - - /** - * @param {CID} cid - * @param {any} [options] - */ - async get (cid, options = {}) { - const bytes = this._blocks[base58btc.encode(cid.multihash.bytes)] - - if (bytes === undefined) { - throw errCode(new Error(`Could not find data for CID '${cid}'`), 'ERR_NOT_FOUND') - } - - return bytes - } - } - - /** @type {import('interface-blockstore').Blockstore} */ - const bs = new MockBlockstore() - - return bs -} - -export default createBlockApi diff --git a/packages/ipfs-unixfs-importer/test/helpers/finite-pseudorandom-byte-stream.js b/packages/ipfs-unixfs-importer/test/helpers/finite-pseudorandom-byte-stream.ts similarity index 72% rename from packages/ipfs-unixfs-importer/test/helpers/finite-pseudorandom-byte-stream.js rename to packages/ipfs-unixfs-importer/test/helpers/finite-pseudorandom-byte-stream.ts index b3fde114..e5e219ac 100644 --- a/packages/ipfs-unixfs-importer/test/helpers/finite-pseudorandom-byte-stream.js +++ b/packages/ipfs-unixfs-importer/test/helpers/finite-pseudorandom-byte-stream.ts @@ -1,10 +1,6 @@ const REPEATABLE_CHUNK_SIZE = 300000 -/** - * @param {number} maxSize - * @param {number} seed - */ -async function * stream (maxSize, seed) { +async function * stream (maxSize: number, seed: number): AsyncGenerator { const chunks = Math.ceil(maxSize / REPEATABLE_CHUNK_SIZE) let emitted = 0 const buf = new Uint8Array(REPEATABLE_CHUNK_SIZE) @@ -22,10 +18,7 @@ async function * stream (maxSize, seed) { export default stream -/** - * @param {number} seed - */ -function random (seed) { +function random (seed: number): number { const x = Math.sin(seed) * 10000 return x - Math.floor(x) } diff --git a/packages/ipfs-unixfs-importer/test/helpers/random-byte-stream.js b/packages/ipfs-unixfs-importer/test/helpers/random-byte-stream.ts similarity index 61% rename from packages/ipfs-unixfs-importer/test/helpers/random-byte-stream.js rename to packages/ipfs-unixfs-importer/test/helpers/random-byte-stream.ts index dc2963c3..dfaf0720 100644 --- a/packages/ipfs-unixfs-importer/test/helpers/random-byte-stream.js +++ b/packages/ipfs-unixfs-importer/test/helpers/random-byte-stream.ts @@ -1,7 +1,5 @@ -/** - * @param {number} seed - */ -async function * randomByteStream (seed) { + +async function * randomByteStream (seed: number): AsyncGenerator { while (true) { const r = Math.floor(random(seed) * 256) seed = r @@ -10,10 +8,7 @@ async function * randomByteStream (seed) { } } -/** - * @param {number} seed - */ -function random (seed) { +function random (seed: number): number { const x = Math.sin(seed) * 10000 return x - Math.floor(x) } diff --git a/packages/ipfs-unixfs-importer/test/utils.spec.js b/packages/ipfs-unixfs-importer/test/utils.spec.ts similarity index 90% rename from packages/ipfs-unixfs-importer/test/utils.spec.js rename to packages/ipfs-unixfs-importer/test/utils.spec.ts index 58a9b9d3..f46c1cb8 100644 --- a/packages/ipfs-unixfs-importer/test/utils.spec.js +++ b/packages/ipfs-unixfs-importer/test/utils.spec.ts @@ -1,7 +1,7 @@ /* eslint-env mocha */ import { expect } from 'aegir/chai' -import toPathComponents from '../src/utils/to-path-components.js' +import { toPathComponents } from '../src/utils/to-path-components.js' describe('toPathComponents', () => { it('splits on unescaped "/" characters', () => { diff --git a/packages/ipfs-unixfs-importer/tsconfig.json b/packages/ipfs-unixfs-importer/tsconfig.json index 6655a440..3ec310c6 100644 --- a/packages/ipfs-unixfs-importer/tsconfig.json +++ b/packages/ipfs-unixfs-importer/tsconfig.json @@ -1,8 +1,7 @@ { "extends": "aegir/src/config/tsconfig.aegir.json", "compilerOptions": { - "outDir": "dist", - "emitDeclarationOnly": true + "outDir": "dist" }, "include": [ "src", diff --git a/packages/ipfs-unixfs/.aegir.js b/packages/ipfs-unixfs/.aegir.js index e2cfcdb4..d991da45 100644 --- a/packages/ipfs-unixfs/.aegir.js +++ b/packages/ipfs-unixfs/.aegir.js @@ -2,6 +2,6 @@ /** @type {import('aegir').PartialOptions} */ export default { build: { - bundlesizeMax: '11KB' + bundlesizeMax: '9KB' } } diff --git a/packages/ipfs-unixfs/package.json b/packages/ipfs-unixfs/package.json index 85f7f233..6231dd69 100644 --- a/packages/ipfs-unixfs/package.json +++ b/packages/ipfs-unixfs/package.json @@ -20,22 +20,6 @@ }, "type": "module", "types": "./dist/src/index.d.ts", - "typesVersions": { - "*": { - "*": [ - "*", - "dist/*", - "dist/src/*", - "dist/src/*/index" - ], - "src/*": [ - "*", - "dist/*", - "dist/src/*", - "dist/src/*/index" - ] - } - }, "files": [ "src", "dist", @@ -45,7 +29,7 @@ "exports": { ".": { "types": "./dist/src/index.d.ts", - "import": "./src/index.js" + "import": "./dist/src/index.js" } }, "eslintConfig": { @@ -143,9 +127,7 @@ ] }, "scripts": { - "generate": "npm run generate:proto && generate:proto-types", - "generate:proto": "pbjs -t static-module -w es6 -r ipfs-unixfs --force-number --no-verify --no-delimited --no-create --no-beautify --no-defaults --lint eslint-disable -o src/unixfs.js ./src/unixfs.proto", - "generate:proto-types": "pbts -o src/unixfs.d.ts src/unixfs.js", + "generate": "protons src/unixfs.proto", "test": "aegir test", "test:node": "aegir test -t node --cov", "test:chrome": "aegir test -t browser --cov", @@ -153,16 +135,17 @@ "build": "aegir build", "clean": "aegir clean", "lint": "aegir lint", - "dep-check": "aegir dep-check", + "dep-check": "aegir dep-check -i protons", "release": "aegir release" }, "dependencies": { "err-code": "^3.0.1", - "protobufjs": "^7.0.0" + "protons-runtime": "^5.0.0", + "uint8arraylist": "^2.4.3" }, "devDependencies": { "aegir": "^38.1.2", - "protobufjs-cli": "^1.0.0", + "protons": "^7.0.2", "uint8arrays": "^4.0.2" }, "browser": { diff --git a/packages/ipfs-unixfs/src/index.js b/packages/ipfs-unixfs/src/index.js deleted file mode 100644 index 3774cf19..00000000 --- a/packages/ipfs-unixfs/src/index.js +++ /dev/null @@ -1,330 +0,0 @@ -import errcode from 'err-code' -import * as Pb from './unixfs.js' -const PBData = Pb.Data - -/** - * @typedef {import('./types').Mtime} Mtime - * @typedef {import('./types').MtimeLike} MtimeLike - */ - -const types = [ - 'raw', - 'directory', - 'file', - 'metadata', - 'symlink', - 'hamt-sharded-directory' -] - -const dirTypes = [ - 'directory', - 'hamt-sharded-directory' -] - -const DEFAULT_FILE_MODE = parseInt('0644', 8) -const DEFAULT_DIRECTORY_MODE = parseInt('0755', 8) - -/** - * @param {string | number | null | undefined} [mode] - */ -export function parseMode (mode) { - if (mode == null) { - return undefined - } - - if (typeof mode === 'number') { - return mode & 0xFFF - } - - mode = mode.toString() - - if (mode.substring(0, 1) === '0') { - // octal string - return parseInt(mode, 8) & 0xFFF - } - - // decimal string - return parseInt(mode, 10) & 0xFFF -} - -/** - * @param {any} input - */ -export function parseMtime (input) { - if (input == null) { - return undefined - } - - /** @type {Mtime | undefined} */ - let mtime - - // { secs, nsecs } - if (input.secs != null) { - mtime = { - secs: input.secs, - nsecs: input.nsecs - } - } - - // UnixFS TimeSpec - if (input.Seconds != null) { - mtime = { - secs: input.Seconds, - nsecs: input.FractionalNanoseconds - } - } - - // process.hrtime() - if (Array.isArray(input)) { - mtime = { - secs: input[0], - nsecs: input[1] - } - } - - // Javascript Date - if (input instanceof Date) { - const ms = input.getTime() - const secs = Math.floor(ms / 1000) - - mtime = { - secs: secs, - nsecs: (ms - (secs * 1000)) * 1000 - } - } - - /* - TODO: https://github.com/ipfs/aegir/issues/487 - - // process.hrtime.bigint() - if (input instanceof BigInt) { - const secs = input / BigInt(1e9) - const nsecs = input - (secs * BigInt(1e9)) - - mtime = { - secs: parseInt(secs.toString()), - nsecs: parseInt(nsecs.toString()) - } - } - */ - - if (!Object.prototype.hasOwnProperty.call(mtime, 'secs')) { - return undefined - } - - if (mtime != null && mtime.nsecs != null && (mtime.nsecs < 0 || mtime.nsecs > 999999999)) { - throw errcode(new Error('mtime-nsecs must be within the range [0,999999999]'), 'ERR_INVALID_MTIME_NSECS') - } - - return mtime -} - -class UnixFS { - /** - * Decode from protobuf https://github.com/ipfs/specs/blob/master/UNIXFS.md - * - * @param {Uint8Array} marshaled - */ - static unmarshal (marshaled) { - const message = PBData.decode(marshaled) - const decoded = PBData.toObject(message, { - defaults: false, - arrays: true, - longs: Number, - objects: false - }) - - const data = new UnixFS({ - type: types[decoded.Type], - data: decoded.Data, - blockSizes: decoded.blocksizes, - mode: decoded.mode, - mtime: decoded.mtime - ? { - secs: decoded.mtime.Seconds, - nsecs: decoded.mtime.FractionalNanoseconds - } - : undefined - }) - - // make sure we honour the original mode - data._originalMode = decoded.mode || 0 - - return data - } - - /** - * @param {object} [options] - * @param {string} [options.type='file'] - * @param {Uint8Array} [options.data] - * @param {number[]} [options.blockSizes] - * @param {number} [options.hashType] - * @param {number} [options.fanout] - * @param {MtimeLike | null} [options.mtime] - * @param {number | string | null} [options.mode] - */ - constructor (options = { - type: 'file' - }) { - const { - type, - data, - blockSizes, - hashType, - fanout, - mtime, - mode - } = options - - if (type && !types.includes(type)) { - throw errcode(new Error('Type: ' + type + ' is not valid'), 'ERR_INVALID_TYPE') - } - - this.type = type || 'file' - this.data = data - this.hashType = hashType - this.fanout = fanout - - /** @type {number[]} */ - this.blockSizes = blockSizes || [] - this._originalMode = 0 - this.mode = parseMode(mode) - - if (mtime) { - this.mtime = parseMtime(mtime) - - if (this.mtime && !this.mtime.nsecs) { - this.mtime.nsecs = 0 - } - } - } - - /** - * @param {number | undefined} mode - */ - set mode (mode) { - this._mode = this.isDirectory() ? DEFAULT_DIRECTORY_MODE : DEFAULT_FILE_MODE - - const parsedMode = parseMode(mode) - - if (parsedMode !== undefined) { - this._mode = parsedMode - } - } - - /** - * @returns {number | undefined} - */ - get mode () { - return this._mode - } - - isDirectory () { - return Boolean(this.type && dirTypes.includes(this.type)) - } - - /** - * @param {number} size - */ - addBlockSize (size) { - this.blockSizes.push(size) - } - - /** - * @param {number} index - */ - removeBlockSize (index) { - this.blockSizes.splice(index, 1) - } - - /** - * Returns `0` for directories or `data.length + sum(blockSizes)` for everything else - */ - fileSize () { - if (this.isDirectory()) { - // dirs don't have file size - return 0 - } - - let sum = 0 - this.blockSizes.forEach((size) => { - sum += size - }) - - if (this.data) { - sum += this.data.length - } - - return sum - } - - /** - * encode to protobuf Uint8Array - */ - marshal () { - let type - - switch (this.type) { - case 'raw': type = PBData.DataType.Raw; break - case 'directory': type = PBData.DataType.Directory; break - case 'file': type = PBData.DataType.File; break - case 'metadata': type = PBData.DataType.Metadata; break - case 'symlink': type = PBData.DataType.Symlink; break - case 'hamt-sharded-directory': type = PBData.DataType.HAMTShard; break - default: - throw errcode(new Error('Type: ' + type + ' is not valid'), 'ERR_INVALID_TYPE') - } - - let data = this.data - - if (!this.data || !this.data.length) { - data = undefined - } - - let mode - - if (this.mode != null) { - mode = (this._originalMode & 0xFFFFF000) | (parseMode(this.mode) || 0) - - if (mode === DEFAULT_FILE_MODE && !this.isDirectory()) { - mode = undefined - } - - if (mode === DEFAULT_DIRECTORY_MODE && this.isDirectory()) { - mode = undefined - } - } - - let mtime - - if (this.mtime != null) { - const parsed = parseMtime(this.mtime) - - if (parsed) { - mtime = { - Seconds: parsed.secs, - FractionalNanoseconds: parsed.nsecs - } - - if (mtime.FractionalNanoseconds === 0) { - delete mtime.FractionalNanoseconds - } - } - } - - const pbData = { - Type: type, - Data: data, - filesize: this.isDirectory() ? undefined : this.fileSize(), - blocksizes: this.blockSizes, - hashType: this.hashType, - fanout: this.fanout, - mode, - mtime - } - - return PBData.encode(pbData).finish() - } -} - -export { UnixFS } diff --git a/packages/ipfs-unixfs/src/index.ts b/packages/ipfs-unixfs/src/index.ts new file mode 100644 index 00000000..4df0f6e2 --- /dev/null +++ b/packages/ipfs-unixfs/src/index.ts @@ -0,0 +1,205 @@ +import errcode from 'err-code' +import { Data as PBData } from './unixfs.js' + +export interface Mtime { + secs: bigint + nsecs?: number +} + +export type MtimeLike = Mtime | { Seconds: number, FractionalNanoseconds?: number } | [number, number] | Date + +const types: Record = { + Raw: 'raw', + Directory: 'directory', + File: 'file', + Metadata: 'metadata', + Symlink: 'symlink', + HAMTShard: 'hamt-sharded-directory' +} + +const dirTypes = [ + 'directory', + 'hamt-sharded-directory' +] + +const DEFAULT_FILE_MODE = parseInt('0644', 8) +const DEFAULT_DIRECTORY_MODE = parseInt('0755', 8) + +export interface UnixFSOptions { + type?: string + data?: Uint8Array + blockSizes?: bigint[] + hashType?: bigint + fanout?: bigint + mtime?: Mtime + mode?: number +} + +class UnixFS { + /** + * Decode from protobuf https://github.com/ipfs/specs/blob/master/UNIXFS.md + */ + static unmarshal (marshaled: Uint8Array): UnixFS { + const message = PBData.decode(marshaled) + + const data = new UnixFS({ + type: types[message.Type != null ? message.Type.toString() : 'File'], + data: message.Data, + blockSizes: message.blocksizes, + mode: message.mode, + mtime: message.mtime != null + ? { + secs: message.mtime.Seconds ?? 0n, + nsecs: message.mtime.FractionalNanoseconds + } + : undefined + }) + + // make sure we honour the original mode + data._originalMode = message.mode ?? 0 + + return data + } + + public type: string + public data?: Uint8Array + public blockSizes: bigint[] + public hashType?: bigint + public fanout?: bigint + public mtime?: Mtime + + private _mode?: number + private _originalMode: number + + constructor (options: UnixFSOptions = { + type: 'file' + }) { + const { + type, + data, + blockSizes, + hashType, + fanout, + mtime, + mode + } = options + + if (type != null && !Object.values(types).includes(type)) { + throw errcode(new Error('Type: ' + type + ' is not valid'), 'ERR_INVALID_TYPE') + } + + this.type = type ?? 'file' + this.data = data + this.hashType = hashType + this.fanout = fanout + this.blockSizes = blockSizes ?? [] + this._originalMode = 0 + this.mode = mode + this.mtime = mtime + } + + set mode (mode: number | undefined) { + if (mode == null) { + this._mode = this.isDirectory() ? DEFAULT_DIRECTORY_MODE : DEFAULT_FILE_MODE + } else { + this._mode = (mode & 0xFFF) + } + } + + get mode (): number | undefined { + return this._mode + } + + isDirectory (): boolean { + return dirTypes.includes(this.type) + } + + addBlockSize (size: bigint): void { + this.blockSizes.push(size) + } + + removeBlockSize (index: number): void { + this.blockSizes.splice(index, 1) + } + + /** + * Returns `0n` for directories or `data.length + sum(blockSizes)` for everything else + */ + fileSize (): bigint { + if (this.isDirectory()) { + // dirs don't have file size + return 0n + } + + let sum = 0n + this.blockSizes.forEach((size) => { + sum += size + }) + + if (this.data != null) { + sum += BigInt(this.data.length) + } + + return sum + } + + /** + * encode to protobuf Uint8Array + */ + marshal (): Uint8Array { + let type + + switch (this.type) { + case 'raw': type = PBData.DataType.Raw; break + case 'directory': type = PBData.DataType.Directory; break + case 'file': type = PBData.DataType.File; break + case 'metadata': type = PBData.DataType.Metadata; break + case 'symlink': type = PBData.DataType.Symlink; break + case 'hamt-sharded-directory': type = PBData.DataType.HAMTShard; break + default: + throw errcode(new Error(`Type: ${type} is not valid`), 'ERR_INVALID_TYPE') + } + + let data = this.data + + if (this.data == null || this.data.length === 0) { + data = undefined + } + + let mode + + if (this.mode != null) { + mode = (this._originalMode & 0xFFFFF000) | (this.mode ?? 0) + + if (mode === DEFAULT_FILE_MODE && !this.isDirectory()) { + mode = undefined + } + + if (mode === DEFAULT_DIRECTORY_MODE && this.isDirectory()) { + mode = undefined + } + } + + let mtime + + if (this.mtime != null) { + mtime = { + Seconds: this.mtime.secs, + FractionalNanoseconds: this.mtime.nsecs + } + } + + return PBData.encode({ + Type: type, + Data: data, + filesize: this.isDirectory() ? undefined : this.fileSize(), + blocksizes: this.blockSizes, + hashType: this.hashType, + fanout: this.fanout, + mode, + mtime + }) + } +} + +export { UnixFS } diff --git a/packages/ipfs-unixfs/src/types.ts b/packages/ipfs-unixfs/src/types.ts deleted file mode 100644 index cedc5057..00000000 --- a/packages/ipfs-unixfs/src/types.ts +++ /dev/null @@ -1,7 +0,0 @@ - -export interface Mtime { - secs: number - nsecs?: number -} - -export type MtimeLike = Mtime | { Seconds: number, FractionalNanoseconds?: number } | [number, number] | Date diff --git a/packages/ipfs-unixfs/src/unixfs.d.ts b/packages/ipfs-unixfs/src/unixfs.d.ts deleted file mode 100644 index 6a64ac75..00000000 --- a/packages/ipfs-unixfs/src/unixfs.d.ts +++ /dev/null @@ -1,238 +0,0 @@ -import $protobuf from "protobufjs/minimal.js"; -/** Properties of a Data. */ -export interface IData { - - /** Data Type */ - Type: Data.DataType; - - /** Data Data */ - Data?: (Uint8Array|null); - - /** Data filesize */ - filesize?: (number|null); - - /** Data blocksizes */ - blocksizes?: (number[]|null); - - /** Data hashType */ - hashType?: (number|null); - - /** Data fanout */ - fanout?: (number|null); - - /** Data mode */ - mode?: (number|null); - - /** Data mtime */ - mtime?: (IUnixTime|null); -} - -/** Represents a Data. */ -export class Data implements IData { - - /** - * Constructs a new Data. - * @param [p] Properties to set - */ - constructor(p?: IData); - - /** Data Type. */ - public Type: Data.DataType; - - /** Data Data. */ - public Data: Uint8Array; - - /** Data filesize. */ - public filesize: number; - - /** Data blocksizes. */ - public blocksizes: number[]; - - /** Data hashType. */ - public hashType: number; - - /** Data fanout. */ - public fanout: number; - - /** Data mode. */ - public mode: number; - - /** Data mtime. */ - public mtime?: (IUnixTime|null); - - /** - * Encodes the specified Data message. Does not implicitly {@link Data.verify|verify} messages. - * @param m Data message or plain object to encode - * @param [w] Writer to encode to - * @returns Writer - */ - public static encode(m: IData, w?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes a Data message from the specified reader or buffer. - * @param r Reader or buffer to decode from - * @param [l] Message length if known beforehand - * @returns Data - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(r: ($protobuf.Reader|Uint8Array), l?: number): Data; - - /** - * Creates a Data message from a plain object. Also converts values to their respective internal types. - * @param d Plain object - * @returns Data - */ - public static fromObject(d: { [k: string]: any }): Data; - - /** - * Creates a plain object from a Data message. Also converts values to other types if specified. - * @param m Data - * @param [o] Conversion options - * @returns Plain object - */ - public static toObject(m: Data, o?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this Data to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; -} - -export namespace Data { - - /** DataType enum. */ - enum DataType { - Raw = 0, - Directory = 1, - File = 2, - Metadata = 3, - Symlink = 4, - HAMTShard = 5 - } -} - -/** Properties of an UnixTime. */ -export interface IUnixTime { - - /** UnixTime Seconds */ - Seconds: number; - - /** UnixTime FractionalNanoseconds */ - FractionalNanoseconds?: (number|null); -} - -/** Represents an UnixTime. */ -export class UnixTime implements IUnixTime { - - /** - * Constructs a new UnixTime. - * @param [p] Properties to set - */ - constructor(p?: IUnixTime); - - /** UnixTime Seconds. */ - public Seconds: number; - - /** UnixTime FractionalNanoseconds. */ - public FractionalNanoseconds: number; - - /** - * Encodes the specified UnixTime message. Does not implicitly {@link UnixTime.verify|verify} messages. - * @param m UnixTime message or plain object to encode - * @param [w] Writer to encode to - * @returns Writer - */ - public static encode(m: IUnixTime, w?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes an UnixTime message from the specified reader or buffer. - * @param r Reader or buffer to decode from - * @param [l] Message length if known beforehand - * @returns UnixTime - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(r: ($protobuf.Reader|Uint8Array), l?: number): UnixTime; - - /** - * Creates an UnixTime message from a plain object. Also converts values to their respective internal types. - * @param d Plain object - * @returns UnixTime - */ - public static fromObject(d: { [k: string]: any }): UnixTime; - - /** - * Creates a plain object from an UnixTime message. Also converts values to other types if specified. - * @param m UnixTime - * @param [o] Conversion options - * @returns Plain object - */ - public static toObject(m: UnixTime, o?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this UnixTime to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; -} - -/** Properties of a Metadata. */ -export interface IMetadata { - - /** Metadata MimeType */ - MimeType?: (string|null); -} - -/** Represents a Metadata. */ -export class Metadata implements IMetadata { - - /** - * Constructs a new Metadata. - * @param [p] Properties to set - */ - constructor(p?: IMetadata); - - /** Metadata MimeType. */ - public MimeType: string; - - /** - * Encodes the specified Metadata message. Does not implicitly {@link Metadata.verify|verify} messages. - * @param m Metadata message or plain object to encode - * @param [w] Writer to encode to - * @returns Writer - */ - public static encode(m: IMetadata, w?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes a Metadata message from the specified reader or buffer. - * @param r Reader or buffer to decode from - * @param [l] Message length if known beforehand - * @returns Metadata - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(r: ($protobuf.Reader|Uint8Array), l?: number): Metadata; - - /** - * Creates a Metadata message from a plain object. Also converts values to their respective internal types. - * @param d Plain object - * @returns Metadata - */ - public static fromObject(d: { [k: string]: any }): Metadata; - - /** - * Creates a plain object from a Metadata message. Also converts values to other types if specified. - * @param m Metadata - * @param [o] Conversion options - * @returns Plain object - */ - public static toObject(m: Metadata, o?: $protobuf.IConversionOptions): { [k: string]: any }; - - /** - * Converts this Metadata to JSON. - * @returns JSON object - */ - public toJSON(): { [k: string]: any }; -} diff --git a/packages/ipfs-unixfs/src/unixfs.js b/packages/ipfs-unixfs/src/unixfs.js deleted file mode 100644 index a5e8ab9e..00000000 --- a/packages/ipfs-unixfs/src/unixfs.js +++ /dev/null @@ -1,718 +0,0 @@ -/*eslint-disable*/ -import $protobuf from "protobufjs/minimal.js"; - -// Common aliases -const $Reader = $protobuf.Reader, $Writer = $protobuf.Writer, $util = $protobuf.util; - -// Exported root namespace -const $root = $protobuf.roots["ipfs-unixfs"] || ($protobuf.roots["ipfs-unixfs"] = {}); - -export const Data = $root.Data = (() => { - - /** - * Properties of a Data. - * @exports IData - * @interface IData - * @property {Data.DataType} Type Data Type - * @property {Uint8Array|null} [Data] Data Data - * @property {number|null} [filesize] Data filesize - * @property {Array.|null} [blocksizes] Data blocksizes - * @property {number|null} [hashType] Data hashType - * @property {number|null} [fanout] Data fanout - * @property {number|null} [mode] Data mode - * @property {IUnixTime|null} [mtime] Data mtime - */ - - /** - * Constructs a new Data. - * @exports Data - * @classdesc Represents a Data. - * @implements IData - * @constructor - * @param {IData=} [p] Properties to set - */ - function Data(p) { - this.blocksizes = []; - if (p) - for (var ks = Object.keys(p), i = 0; i < ks.length; ++i) - if (p[ks[i]] != null) - this[ks[i]] = p[ks[i]]; - } - - /** - * Data Type. - * @member {Data.DataType} Type - * @memberof Data - * @instance - */ - Data.prototype.Type = 0; - - /** - * Data Data. - * @member {Uint8Array} Data - * @memberof Data - * @instance - */ - Data.prototype.Data = $util.newBuffer([]); - - /** - * Data filesize. - * @member {number} filesize - * @memberof Data - * @instance - */ - Data.prototype.filesize = $util.Long ? $util.Long.fromBits(0,0,true) : 0; - - /** - * Data blocksizes. - * @member {Array.} blocksizes - * @memberof Data - * @instance - */ - Data.prototype.blocksizes = $util.emptyArray; - - /** - * Data hashType. - * @member {number} hashType - * @memberof Data - * @instance - */ - Data.prototype.hashType = $util.Long ? $util.Long.fromBits(0,0,true) : 0; - - /** - * Data fanout. - * @member {number} fanout - * @memberof Data - * @instance - */ - Data.prototype.fanout = $util.Long ? $util.Long.fromBits(0,0,true) : 0; - - /** - * Data mode. - * @member {number} mode - * @memberof Data - * @instance - */ - Data.prototype.mode = 0; - - /** - * Data mtime. - * @member {IUnixTime|null|undefined} mtime - * @memberof Data - * @instance - */ - Data.prototype.mtime = null; - - /** - * Encodes the specified Data message. Does not implicitly {@link Data.verify|verify} messages. - * @function encode - * @memberof Data - * @static - * @param {IData} m Data message or plain object to encode - * @param {$protobuf.Writer} [w] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - Data.encode = function encode(m, w) { - if (!w) - w = $Writer.create(); - w.uint32(8).int32(m.Type); - if (m.Data != null && Object.hasOwnProperty.call(m, "Data")) - w.uint32(18).bytes(m.Data); - if (m.filesize != null && Object.hasOwnProperty.call(m, "filesize")) - w.uint32(24).uint64(m.filesize); - if (m.blocksizes != null && m.blocksizes.length) { - for (var i = 0; i < m.blocksizes.length; ++i) - w.uint32(32).uint64(m.blocksizes[i]); - } - if (m.hashType != null && Object.hasOwnProperty.call(m, "hashType")) - w.uint32(40).uint64(m.hashType); - if (m.fanout != null && Object.hasOwnProperty.call(m, "fanout")) - w.uint32(48).uint64(m.fanout); - if (m.mode != null && Object.hasOwnProperty.call(m, "mode")) - w.uint32(56).uint32(m.mode); - if (m.mtime != null && Object.hasOwnProperty.call(m, "mtime")) - $root.UnixTime.encode(m.mtime, w.uint32(66).fork()).ldelim(); - return w; - }; - - /** - * Decodes a Data message from the specified reader or buffer. - * @function decode - * @memberof Data - * @static - * @param {$protobuf.Reader|Uint8Array} r Reader or buffer to decode from - * @param {number} [l] Message length if known beforehand - * @returns {Data} Data - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - Data.decode = function decode(r, l) { - if (!(r instanceof $Reader)) - r = $Reader.create(r); - var c = l === undefined ? r.len : r.pos + l, m = new $root.Data(); - while (r.pos < c) { - var t = r.uint32(); - switch (t >>> 3) { - case 1: - m.Type = r.int32(); - break; - case 2: - m.Data = r.bytes(); - break; - case 3: - m.filesize = r.uint64(); - break; - case 4: - if (!(m.blocksizes && m.blocksizes.length)) - m.blocksizes = []; - if ((t & 7) === 2) { - var c2 = r.uint32() + r.pos; - while (r.pos < c2) - m.blocksizes.push(r.uint64()); - } else - m.blocksizes.push(r.uint64()); - break; - case 5: - m.hashType = r.uint64(); - break; - case 6: - m.fanout = r.uint64(); - break; - case 7: - m.mode = r.uint32(); - break; - case 8: - m.mtime = $root.UnixTime.decode(r, r.uint32()); - break; - default: - r.skipType(t & 7); - break; - } - } - if (!m.hasOwnProperty("Type")) - throw $util.ProtocolError("missing required 'Type'", { instance: m }); - return m; - }; - - /** - * Creates a Data message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof Data - * @static - * @param {Object.} d Plain object - * @returns {Data} Data - */ - Data.fromObject = function fromObject(d) { - if (d instanceof $root.Data) - return d; - var m = new $root.Data(); - switch (d.Type) { - case "Raw": - case 0: - m.Type = 0; - break; - case "Directory": - case 1: - m.Type = 1; - break; - case "File": - case 2: - m.Type = 2; - break; - case "Metadata": - case 3: - m.Type = 3; - break; - case "Symlink": - case 4: - m.Type = 4; - break; - case "HAMTShard": - case 5: - m.Type = 5; - break; - } - if (d.Data != null) { - if (typeof d.Data === "string") - $util.base64.decode(d.Data, m.Data = $util.newBuffer($util.base64.length(d.Data)), 0); - else if (d.Data.length) - m.Data = d.Data; - } - if (d.filesize != null) { - if ($util.Long) - (m.filesize = $util.Long.fromValue(d.filesize)).unsigned = true; - else if (typeof d.filesize === "string") - m.filesize = parseInt(d.filesize, 10); - else if (typeof d.filesize === "number") - m.filesize = d.filesize; - else if (typeof d.filesize === "object") - m.filesize = new $util.LongBits(d.filesize.low >>> 0, d.filesize.high >>> 0).toNumber(true); - } - if (d.blocksizes) { - if (!Array.isArray(d.blocksizes)) - throw TypeError(".Data.blocksizes: array expected"); - m.blocksizes = []; - for (var i = 0; i < d.blocksizes.length; ++i) { - if ($util.Long) - (m.blocksizes[i] = $util.Long.fromValue(d.blocksizes[i])).unsigned = true; - else if (typeof d.blocksizes[i] === "string") - m.blocksizes[i] = parseInt(d.blocksizes[i], 10); - else if (typeof d.blocksizes[i] === "number") - m.blocksizes[i] = d.blocksizes[i]; - else if (typeof d.blocksizes[i] === "object") - m.blocksizes[i] = new $util.LongBits(d.blocksizes[i].low >>> 0, d.blocksizes[i].high >>> 0).toNumber(true); - } - } - if (d.hashType != null) { - if ($util.Long) - (m.hashType = $util.Long.fromValue(d.hashType)).unsigned = true; - else if (typeof d.hashType === "string") - m.hashType = parseInt(d.hashType, 10); - else if (typeof d.hashType === "number") - m.hashType = d.hashType; - else if (typeof d.hashType === "object") - m.hashType = new $util.LongBits(d.hashType.low >>> 0, d.hashType.high >>> 0).toNumber(true); - } - if (d.fanout != null) { - if ($util.Long) - (m.fanout = $util.Long.fromValue(d.fanout)).unsigned = true; - else if (typeof d.fanout === "string") - m.fanout = parseInt(d.fanout, 10); - else if (typeof d.fanout === "number") - m.fanout = d.fanout; - else if (typeof d.fanout === "object") - m.fanout = new $util.LongBits(d.fanout.low >>> 0, d.fanout.high >>> 0).toNumber(true); - } - if (d.mode != null) { - m.mode = d.mode >>> 0; - } - if (d.mtime != null) { - if (typeof d.mtime !== "object") - throw TypeError(".Data.mtime: object expected"); - m.mtime = $root.UnixTime.fromObject(d.mtime); - } - return m; - }; - - /** - * Creates a plain object from a Data message. Also converts values to other types if specified. - * @function toObject - * @memberof Data - * @static - * @param {Data} m Data - * @param {$protobuf.IConversionOptions} [o] Conversion options - * @returns {Object.} Plain object - */ - Data.toObject = function toObject(m, o) { - if (!o) - o = {}; - var d = {}; - if (o.arrays || o.defaults) { - d.blocksizes = []; - } - if (o.defaults) { - d.Type = o.enums === String ? "Raw" : 0; - if (o.bytes === String) - d.Data = ""; - else { - d.Data = []; - if (o.bytes !== Array) - d.Data = $util.newBuffer(d.Data); - } - if ($util.Long) { - var n = new $util.Long(0, 0, true); - d.filesize = o.longs === String ? n.toString() : o.longs === Number ? n.toNumber() : n; - } else - d.filesize = o.longs === String ? "0" : 0; - if ($util.Long) { - var n = new $util.Long(0, 0, true); - d.hashType = o.longs === String ? n.toString() : o.longs === Number ? n.toNumber() : n; - } else - d.hashType = o.longs === String ? "0" : 0; - if ($util.Long) { - var n = new $util.Long(0, 0, true); - d.fanout = o.longs === String ? n.toString() : o.longs === Number ? n.toNumber() : n; - } else - d.fanout = o.longs === String ? "0" : 0; - d.mode = 0; - d.mtime = null; - } - if (m.Type != null && m.hasOwnProperty("Type")) { - d.Type = o.enums === String ? $root.Data.DataType[m.Type] : m.Type; - } - if (m.Data != null && m.hasOwnProperty("Data")) { - d.Data = o.bytes === String ? $util.base64.encode(m.Data, 0, m.Data.length) : o.bytes === Array ? Array.prototype.slice.call(m.Data) : m.Data; - } - if (m.filesize != null && m.hasOwnProperty("filesize")) { - if (typeof m.filesize === "number") - d.filesize = o.longs === String ? String(m.filesize) : m.filesize; - else - d.filesize = o.longs === String ? $util.Long.prototype.toString.call(m.filesize) : o.longs === Number ? new $util.LongBits(m.filesize.low >>> 0, m.filesize.high >>> 0).toNumber(true) : m.filesize; - } - if (m.blocksizes && m.blocksizes.length) { - d.blocksizes = []; - for (var j = 0; j < m.blocksizes.length; ++j) { - if (typeof m.blocksizes[j] === "number") - d.blocksizes[j] = o.longs === String ? String(m.blocksizes[j]) : m.blocksizes[j]; - else - d.blocksizes[j] = o.longs === String ? $util.Long.prototype.toString.call(m.blocksizes[j]) : o.longs === Number ? new $util.LongBits(m.blocksizes[j].low >>> 0, m.blocksizes[j].high >>> 0).toNumber(true) : m.blocksizes[j]; - } - } - if (m.hashType != null && m.hasOwnProperty("hashType")) { - if (typeof m.hashType === "number") - d.hashType = o.longs === String ? String(m.hashType) : m.hashType; - else - d.hashType = o.longs === String ? $util.Long.prototype.toString.call(m.hashType) : o.longs === Number ? new $util.LongBits(m.hashType.low >>> 0, m.hashType.high >>> 0).toNumber(true) : m.hashType; - } - if (m.fanout != null && m.hasOwnProperty("fanout")) { - if (typeof m.fanout === "number") - d.fanout = o.longs === String ? String(m.fanout) : m.fanout; - else - d.fanout = o.longs === String ? $util.Long.prototype.toString.call(m.fanout) : o.longs === Number ? new $util.LongBits(m.fanout.low >>> 0, m.fanout.high >>> 0).toNumber(true) : m.fanout; - } - if (m.mode != null && m.hasOwnProperty("mode")) { - d.mode = m.mode; - } - if (m.mtime != null && m.hasOwnProperty("mtime")) { - d.mtime = $root.UnixTime.toObject(m.mtime, o); - } - return d; - }; - - /** - * Converts this Data to JSON. - * @function toJSON - * @memberof Data - * @instance - * @returns {Object.} JSON object - */ - Data.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - /** - * DataType enum. - * @name Data.DataType - * @enum {number} - * @property {number} Raw=0 Raw value - * @property {number} Directory=1 Directory value - * @property {number} File=2 File value - * @property {number} Metadata=3 Metadata value - * @property {number} Symlink=4 Symlink value - * @property {number} HAMTShard=5 HAMTShard value - */ - Data.DataType = (function() { - const valuesById = {}, values = Object.create(valuesById); - values[valuesById[0] = "Raw"] = 0; - values[valuesById[1] = "Directory"] = 1; - values[valuesById[2] = "File"] = 2; - values[valuesById[3] = "Metadata"] = 3; - values[valuesById[4] = "Symlink"] = 4; - values[valuesById[5] = "HAMTShard"] = 5; - return values; - })(); - - return Data; -})(); - -export const UnixTime = $root.UnixTime = (() => { - - /** - * Properties of an UnixTime. - * @exports IUnixTime - * @interface IUnixTime - * @property {number} Seconds UnixTime Seconds - * @property {number|null} [FractionalNanoseconds] UnixTime FractionalNanoseconds - */ - - /** - * Constructs a new UnixTime. - * @exports UnixTime - * @classdesc Represents an UnixTime. - * @implements IUnixTime - * @constructor - * @param {IUnixTime=} [p] Properties to set - */ - function UnixTime(p) { - if (p) - for (var ks = Object.keys(p), i = 0; i < ks.length; ++i) - if (p[ks[i]] != null) - this[ks[i]] = p[ks[i]]; - } - - /** - * UnixTime Seconds. - * @member {number} Seconds - * @memberof UnixTime - * @instance - */ - UnixTime.prototype.Seconds = $util.Long ? $util.Long.fromBits(0,0,false) : 0; - - /** - * UnixTime FractionalNanoseconds. - * @member {number} FractionalNanoseconds - * @memberof UnixTime - * @instance - */ - UnixTime.prototype.FractionalNanoseconds = 0; - - /** - * Encodes the specified UnixTime message. Does not implicitly {@link UnixTime.verify|verify} messages. - * @function encode - * @memberof UnixTime - * @static - * @param {IUnixTime} m UnixTime message or plain object to encode - * @param {$protobuf.Writer} [w] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - UnixTime.encode = function encode(m, w) { - if (!w) - w = $Writer.create(); - w.uint32(8).int64(m.Seconds); - if (m.FractionalNanoseconds != null && Object.hasOwnProperty.call(m, "FractionalNanoseconds")) - w.uint32(21).fixed32(m.FractionalNanoseconds); - return w; - }; - - /** - * Decodes an UnixTime message from the specified reader or buffer. - * @function decode - * @memberof UnixTime - * @static - * @param {$protobuf.Reader|Uint8Array} r Reader or buffer to decode from - * @param {number} [l] Message length if known beforehand - * @returns {UnixTime} UnixTime - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - UnixTime.decode = function decode(r, l) { - if (!(r instanceof $Reader)) - r = $Reader.create(r); - var c = l === undefined ? r.len : r.pos + l, m = new $root.UnixTime(); - while (r.pos < c) { - var t = r.uint32(); - switch (t >>> 3) { - case 1: - m.Seconds = r.int64(); - break; - case 2: - m.FractionalNanoseconds = r.fixed32(); - break; - default: - r.skipType(t & 7); - break; - } - } - if (!m.hasOwnProperty("Seconds")) - throw $util.ProtocolError("missing required 'Seconds'", { instance: m }); - return m; - }; - - /** - * Creates an UnixTime message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof UnixTime - * @static - * @param {Object.} d Plain object - * @returns {UnixTime} UnixTime - */ - UnixTime.fromObject = function fromObject(d) { - if (d instanceof $root.UnixTime) - return d; - var m = new $root.UnixTime(); - if (d.Seconds != null) { - if ($util.Long) - (m.Seconds = $util.Long.fromValue(d.Seconds)).unsigned = false; - else if (typeof d.Seconds === "string") - m.Seconds = parseInt(d.Seconds, 10); - else if (typeof d.Seconds === "number") - m.Seconds = d.Seconds; - else if (typeof d.Seconds === "object") - m.Seconds = new $util.LongBits(d.Seconds.low >>> 0, d.Seconds.high >>> 0).toNumber(); - } - if (d.FractionalNanoseconds != null) { - m.FractionalNanoseconds = d.FractionalNanoseconds >>> 0; - } - return m; - }; - - /** - * Creates a plain object from an UnixTime message. Also converts values to other types if specified. - * @function toObject - * @memberof UnixTime - * @static - * @param {UnixTime} m UnixTime - * @param {$protobuf.IConversionOptions} [o] Conversion options - * @returns {Object.} Plain object - */ - UnixTime.toObject = function toObject(m, o) { - if (!o) - o = {}; - var d = {}; - if (o.defaults) { - if ($util.Long) { - var n = new $util.Long(0, 0, false); - d.Seconds = o.longs === String ? n.toString() : o.longs === Number ? n.toNumber() : n; - } else - d.Seconds = o.longs === String ? "0" : 0; - d.FractionalNanoseconds = 0; - } - if (m.Seconds != null && m.hasOwnProperty("Seconds")) { - if (typeof m.Seconds === "number") - d.Seconds = o.longs === String ? String(m.Seconds) : m.Seconds; - else - d.Seconds = o.longs === String ? $util.Long.prototype.toString.call(m.Seconds) : o.longs === Number ? new $util.LongBits(m.Seconds.low >>> 0, m.Seconds.high >>> 0).toNumber() : m.Seconds; - } - if (m.FractionalNanoseconds != null && m.hasOwnProperty("FractionalNanoseconds")) { - d.FractionalNanoseconds = m.FractionalNanoseconds; - } - return d; - }; - - /** - * Converts this UnixTime to JSON. - * @function toJSON - * @memberof UnixTime - * @instance - * @returns {Object.} JSON object - */ - UnixTime.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - return UnixTime; -})(); - -export const Metadata = $root.Metadata = (() => { - - /** - * Properties of a Metadata. - * @exports IMetadata - * @interface IMetadata - * @property {string|null} [MimeType] Metadata MimeType - */ - - /** - * Constructs a new Metadata. - * @exports Metadata - * @classdesc Represents a Metadata. - * @implements IMetadata - * @constructor - * @param {IMetadata=} [p] Properties to set - */ - function Metadata(p) { - if (p) - for (var ks = Object.keys(p), i = 0; i < ks.length; ++i) - if (p[ks[i]] != null) - this[ks[i]] = p[ks[i]]; - } - - /** - * Metadata MimeType. - * @member {string} MimeType - * @memberof Metadata - * @instance - */ - Metadata.prototype.MimeType = ""; - - /** - * Encodes the specified Metadata message. Does not implicitly {@link Metadata.verify|verify} messages. - * @function encode - * @memberof Metadata - * @static - * @param {IMetadata} m Metadata message or plain object to encode - * @param {$protobuf.Writer} [w] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - Metadata.encode = function encode(m, w) { - if (!w) - w = $Writer.create(); - if (m.MimeType != null && Object.hasOwnProperty.call(m, "MimeType")) - w.uint32(10).string(m.MimeType); - return w; - }; - - /** - * Decodes a Metadata message from the specified reader or buffer. - * @function decode - * @memberof Metadata - * @static - * @param {$protobuf.Reader|Uint8Array} r Reader or buffer to decode from - * @param {number} [l] Message length if known beforehand - * @returns {Metadata} Metadata - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - Metadata.decode = function decode(r, l) { - if (!(r instanceof $Reader)) - r = $Reader.create(r); - var c = l === undefined ? r.len : r.pos + l, m = new $root.Metadata(); - while (r.pos < c) { - var t = r.uint32(); - switch (t >>> 3) { - case 1: - m.MimeType = r.string(); - break; - default: - r.skipType(t & 7); - break; - } - } - return m; - }; - - /** - * Creates a Metadata message from a plain object. Also converts values to their respective internal types. - * @function fromObject - * @memberof Metadata - * @static - * @param {Object.} d Plain object - * @returns {Metadata} Metadata - */ - Metadata.fromObject = function fromObject(d) { - if (d instanceof $root.Metadata) - return d; - var m = new $root.Metadata(); - if (d.MimeType != null) { - m.MimeType = String(d.MimeType); - } - return m; - }; - - /** - * Creates a plain object from a Metadata message. Also converts values to other types if specified. - * @function toObject - * @memberof Metadata - * @static - * @param {Metadata} m Metadata - * @param {$protobuf.IConversionOptions} [o] Conversion options - * @returns {Object.} Plain object - */ - Metadata.toObject = function toObject(m, o) { - if (!o) - o = {}; - var d = {}; - if (o.defaults) { - d.MimeType = ""; - } - if (m.MimeType != null && m.hasOwnProperty("MimeType")) { - d.MimeType = m.MimeType; - } - return d; - }; - - /** - * Converts this Metadata to JSON. - * @function toJSON - * @memberof Metadata - * @instance - * @returns {Object.} JSON object - */ - Metadata.prototype.toJSON = function toJSON() { - return this.constructor.toObject(this, $protobuf.util.toJSONOptions); - }; - - return Metadata; -})(); - -export { $root as default }; diff --git a/packages/ipfs-unixfs/src/unixfs.proto b/packages/ipfs-unixfs/src/unixfs.proto index d351f402..d52746d7 100644 --- a/packages/ipfs-unixfs/src/unixfs.proto +++ b/packages/ipfs-unixfs/src/unixfs.proto @@ -1,4 +1,4 @@ -syntax = "proto2"; +syntax = "proto3"; message Data { enum DataType { @@ -10,7 +10,7 @@ message Data { HAMTShard = 5; } - required DataType Type = 1; + optional DataType Type = 1; optional bytes Data = 2; optional uint64 filesize = 3; repeated uint64 blocksizes = 4; @@ -21,7 +21,7 @@ message Data { } message UnixTime { - required int64 Seconds = 1; + optional int64 Seconds = 1; optional fixed32 FractionalNanoseconds = 2; } diff --git a/packages/ipfs-unixfs/src/unixfs.ts b/packages/ipfs-unixfs/src/unixfs.ts new file mode 100644 index 00000000..ceed9332 --- /dev/null +++ b/packages/ipfs-unixfs/src/unixfs.ts @@ -0,0 +1,277 @@ +/* eslint-disable import/export */ +/* eslint-disable complexity */ +/* eslint-disable @typescript-eslint/no-namespace */ +/* eslint-disable @typescript-eslint/no-unnecessary-boolean-literal-compare */ +/* eslint-disable @typescript-eslint/no-empty-interface */ + +import { enumeration, encodeMessage, decodeMessage, message } from 'protons-runtime' +import type { Codec } from 'protons-runtime' +import type { Uint8ArrayList } from 'uint8arraylist' + +export interface Data { + Type?: Data.DataType + Data?: Uint8Array + filesize?: bigint + blocksizes: bigint[] + hashType?: bigint + fanout?: bigint + mode?: number + mtime?: UnixTime +} + +export namespace Data { + export enum DataType { + Raw = 'Raw', + Directory = 'Directory', + File = 'File', + Metadata = 'Metadata', + Symlink = 'Symlink', + HAMTShard = 'HAMTShard' + } + + enum __DataTypeValues { + Raw = 0, + Directory = 1, + File = 2, + Metadata = 3, + Symlink = 4, + HAMTShard = 5 + } + + export namespace DataType { + export const codec = (): Codec => { + return enumeration(__DataTypeValues) + } + } + + let _codec: Codec + + export const codec = (): Codec => { + if (_codec == null) { + _codec = message((obj, w, opts = {}) => { + if (opts.lengthDelimited !== false) { + w.fork() + } + + if (obj.Type != null) { + w.uint32(8) + Data.DataType.codec().encode(obj.Type, w) + } + + if (obj.Data != null) { + w.uint32(18) + w.bytes(obj.Data) + } + + if (obj.filesize != null) { + w.uint32(24) + w.uint64(obj.filesize) + } + + if (obj.blocksizes != null) { + for (const value of obj.blocksizes) { + w.uint32(32) + w.uint64(value) + } + } + + if (obj.hashType != null) { + w.uint32(40) + w.uint64(obj.hashType) + } + + if (obj.fanout != null) { + w.uint32(48) + w.uint64(obj.fanout) + } + + if (obj.mode != null) { + w.uint32(56) + w.uint32(obj.mode) + } + + if (obj.mtime != null) { + w.uint32(66) + UnixTime.codec().encode(obj.mtime, w) + } + + if (opts.lengthDelimited !== false) { + w.ldelim() + } + }, (reader, length) => { + const obj: any = { + blocksizes: [] + } + + const end = length == null ? reader.len : reader.pos + length + + while (reader.pos < end) { + const tag = reader.uint32() + + switch (tag >>> 3) { + case 1: + obj.Type = Data.DataType.codec().decode(reader) + break + case 2: + obj.Data = reader.bytes() + break + case 3: + obj.filesize = reader.uint64() + break + case 4: + obj.blocksizes.push(reader.uint64()) + break + case 5: + obj.hashType = reader.uint64() + break + case 6: + obj.fanout = reader.uint64() + break + case 7: + obj.mode = reader.uint32() + break + case 8: + obj.mtime = UnixTime.codec().decode(reader, reader.uint32()) + break + default: + reader.skipType(tag & 7) + break + } + } + + return obj + }) + } + + return _codec + } + + export const encode = (obj: Partial): Uint8Array => { + return encodeMessage(obj, Data.codec()) + } + + export const decode = (buf: Uint8Array | Uint8ArrayList): Data => { + return decodeMessage(buf, Data.codec()) + } +} + +export interface UnixTime { + Seconds?: bigint + FractionalNanoseconds?: number +} + +export namespace UnixTime { + let _codec: Codec + + export const codec = (): Codec => { + if (_codec == null) { + _codec = message((obj, w, opts = {}) => { + if (opts.lengthDelimited !== false) { + w.fork() + } + + if (obj.Seconds != null) { + w.uint32(8) + w.int64(obj.Seconds) + } + + if (obj.FractionalNanoseconds != null) { + w.uint32(21) + w.fixed32(obj.FractionalNanoseconds) + } + + if (opts.lengthDelimited !== false) { + w.ldelim() + } + }, (reader, length) => { + const obj: any = {} + + const end = length == null ? reader.len : reader.pos + length + + while (reader.pos < end) { + const tag = reader.uint32() + + switch (tag >>> 3) { + case 1: + obj.Seconds = reader.int64() + break + case 2: + obj.FractionalNanoseconds = reader.fixed32() + break + default: + reader.skipType(tag & 7) + break + } + } + + return obj + }) + } + + return _codec + } + + export const encode = (obj: Partial): Uint8Array => { + return encodeMessage(obj, UnixTime.codec()) + } + + export const decode = (buf: Uint8Array | Uint8ArrayList): UnixTime => { + return decodeMessage(buf, UnixTime.codec()) + } +} + +export interface Metadata { + MimeType?: string +} + +export namespace Metadata { + let _codec: Codec + + export const codec = (): Codec => { + if (_codec == null) { + _codec = message((obj, w, opts = {}) => { + if (opts.lengthDelimited !== false) { + w.fork() + } + + if (obj.MimeType != null) { + w.uint32(10) + w.string(obj.MimeType) + } + + if (opts.lengthDelimited !== false) { + w.ldelim() + } + }, (reader, length) => { + const obj: any = {} + + const end = length == null ? reader.len : reader.pos + length + + while (reader.pos < end) { + const tag = reader.uint32() + + switch (tag >>> 3) { + case 1: + obj.MimeType = reader.string() + break + default: + reader.skipType(tag & 7) + break + } + } + + return obj + }) + } + + return _codec + } + + export const encode = (obj: Partial): Uint8Array => { + return encodeMessage(obj, Metadata.codec()) + } + + export const decode = (buf: Uint8Array | Uint8ArrayList): Metadata => { + return decodeMessage(buf, Metadata.codec()) + } +} diff --git a/packages/ipfs-unixfs/test/unixfs-format.spec.js b/packages/ipfs-unixfs/test/unixfs-format.spec.ts similarity index 90% rename from packages/ipfs-unixfs/test/unixfs-format.spec.js rename to packages/ipfs-unixfs/test/unixfs-format.spec.ts index 19e2c06b..7f483bcf 100644 --- a/packages/ipfs-unixfs/test/unixfs-format.spec.js +++ b/packages/ipfs-unixfs/test/unixfs-format.spec.ts @@ -1,12 +1,10 @@ /* eslint-env mocha */ import { expect } from 'aegir/chai' - -/** @type {(path: string) => Uint8Array} */ import loadFixture from 'aegir/fixtures' import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' -import { UnixFS } from '../src/index.js' +import { Mtime, UnixFS } from '../src/index.js' import * as Pb from '../src/unixfs.js' const PBData = Pb.Data @@ -81,7 +79,7 @@ describe('unixfs-format', () => { const data = new UnixFS({ type: 'file' }) - data.addBlockSize(256) + data.addBlockSize(256n) const marshaled = data.marshal() const unmarshaled = UnixFS.unmarshal(marshaled) expect(data.type).to.equal(unmarshaled.type) @@ -94,7 +92,7 @@ describe('unixfs-format', () => { const data = new UnixFS({ type: 'file' }) - data.addBlockSize(256) + data.addBlockSize(256n) const marshaled = data.marshal() const unmarshaled = UnixFS.unmarshal(marshaled) expect(data.type).to.equal(unmarshaled.type) @@ -152,7 +150,7 @@ describe('unixfs-format', () => { it('mode as string', () => { const data = new UnixFS({ type: 'file', - mode: '0555' + mode: 0o555 }) expect(UnixFS.unmarshal(data.marshal())).to.have.property('mode', parseInt('0555', 8)) @@ -162,16 +160,15 @@ describe('unixfs-format', () => { const data = new UnixFS({ type: 'file' }) - // @ts-ignore it's ok, really - data.mode = '0555' + data.mode = 0o555 expect(UnixFS.unmarshal(data.marshal())).to.have.property('mode', parseInt('0555', 8)) }) it('mtime', () => { const mtime = { - secs: 5, - nsecs: 0 + secs: 5n, + nsecs: undefined } const data = new UnixFS({ type: 'file', @@ -190,12 +187,13 @@ describe('unixfs-format', () => { const marshaled = data.marshal() const unmarshaled = UnixFS.unmarshal(marshaled) - expect(unmarshaled).to.not.have.property('mtime') + + expect(unmarshaled).to.have.property('mtime').that.is.undefined() }) it('sets mtime to 0', () => { const mtime = { - secs: 0, + secs: 0n, nsecs: 0 } const data = new UnixFS({ @@ -241,28 +239,15 @@ describe('unixfs-format', () => { }) it('sets mtime to 0 as Date', () => { - const mtime = { - secs: 0, - nsecs: 0 - } - const data = new UnixFS({ - type: 'file', - mtime: new Date(0) - }) - - const marshaled = data.marshal() - const unmarshaled = UnixFS.unmarshal(marshaled) - expect(unmarshaled).to.have.deep.property('mtime', mtime) - }) - - it('sets mtime to 0 as hrtime', () => { - const mtime = { - secs: 0, - nsecs: 0 + const mtime: Mtime = { + secs: 0n, + nsecs: undefined } const data = new UnixFS({ type: 'file', - mtime: [0, 0] + mtime: { + secs: 0n + } }) const marshaled = data.marshal() @@ -278,15 +263,15 @@ describe('unixfs-format', () => { const marshaled = entry.marshal() const unmarshaled = UnixFS.unmarshal(marshaled) - expect(unmarshaled).to.not.have.property('mtime') + expect(unmarshaled).to.have.property('mtime').that.is.undefined() }) it('does not overwrite unknown mode bits', () => { const mode = 0xFFFFFFF // larger than currently defined mode bits const buf = PBData.encode({ - Type: 0, + Type: PBData.DataType.File, mode - }).finish() + }) const unmarshaled = UnixFS.unmarshal(buf) const marshaled = unmarshaled.marshal() @@ -302,10 +287,8 @@ describe('unixfs-format', () => { }) const marshaled = entry.marshal() - const protobuf = PBData.decode(marshaled) - const object = PBData.toObject(protobuf, { - defaults: false - }) + const object = PBData.decode(marshaled) + expect(object).not.to.have.property('mode') }) @@ -316,10 +299,8 @@ describe('unixfs-format', () => { }) const marshaled = entry.marshal() - const protobuf = PBData.decode(marshaled) - const object = PBData.toObject(protobuf, { - defaults: false - }) + const object = PBData.decode(marshaled) + expect(object).not.to.have.property('mode') }) @@ -328,7 +309,7 @@ describe('unixfs-format', () => { const buf = PBData.encode({ Type: PBData.DataType.File, mode - }).finish() + }) const entry = UnixFS.unmarshal(buf) @@ -356,10 +337,8 @@ describe('unixfs-format', () => { expect(unmarshaled).to.have.property('mode', 0o644) - const protobuf = PBData.decode(marshaled) - const object = PBData.toObject(protobuf, { - defaults: false - }) + const object = PBData.decode(marshaled) + expect(object).not.to.have.property('mode') }) @@ -393,7 +372,7 @@ describe('unixfs-format', () => { new UnixFS({ type: 'bananas' }) - } catch (/** @type {any} */ err) { + } catch (err: any) { expect(err).to.have.property('code', 'ERR_INVALID_TYPE') done() } @@ -405,7 +384,7 @@ describe('unixfs-format', () => { try { entry.marshal() - } catch (/** @type {any} */ err) { + } catch (err: any) { expect(err).to.have.property('code', 'ERR_INVALID_TYPE') done() } diff --git a/packages/ipfs-unixfs/tsconfig.json b/packages/ipfs-unixfs/tsconfig.json index e429c4a3..13a35996 100644 --- a/packages/ipfs-unixfs/tsconfig.json +++ b/packages/ipfs-unixfs/tsconfig.json @@ -1,14 +1,10 @@ { "extends": "aegir/src/config/tsconfig.aegir.json", "compilerOptions": { - "outDir": "dist", - "emitDeclarationOnly": true + "outDir": "dist" }, "include": [ "src", "test" - ], - "exclude": [ - "src/unixfs.js" ] } From a063d90c535d692f35af9d32842f8906f69200d9 Mon Sep 17 00:00:00 2001 From: achingbrain Date: Fri, 10 Feb 2023 08:38:29 +0100 Subject: [PATCH 2/5] chore: update config --- packages/ipfs-unixfs-exporter/package.json | 16 ---------------- packages/ipfs-unixfs-importer/package.json | 2 +- 2 files changed, 1 insertion(+), 17 deletions(-) diff --git a/packages/ipfs-unixfs-exporter/package.json b/packages/ipfs-unixfs-exporter/package.json index 1b1e6d5f..12455f54 100644 --- a/packages/ipfs-unixfs-exporter/package.json +++ b/packages/ipfs-unixfs-exporter/package.json @@ -20,22 +20,6 @@ }, "type": "module", "types": "./dist/src/index.d.ts", - "typesVersions": { - "*": { - "*": [ - "*", - "dist/*", - "dist/src/*", - "dist/src/*/index" - ], - "src/*": [ - "*", - "dist/*", - "dist/src/*", - "dist/src/*/index" - ] - } - }, "files": [ "src", "dist", diff --git a/packages/ipfs-unixfs-importer/package.json b/packages/ipfs-unixfs-importer/package.json index b94f9fee..b51d12cc 100644 --- a/packages/ipfs-unixfs-importer/package.json +++ b/packages/ipfs-unixfs-importer/package.json @@ -148,7 +148,7 @@ "merge-options": "^3.0.4", "multiformats": "^11.0.0", "rabin-wasm": "^0.1.4", - "uint8arraylist": "^2.3.3", + "uint8arraylist": "^2.4.3", "uint8arrays": "^4.0.2" }, "devDependencies": { From 73f0410a9dedb03236bab99cd9559a55364996fb Mon Sep 17 00:00:00 2001 From: achingbrain Date: Mon, 13 Feb 2023 17:11:07 +0100 Subject: [PATCH 3/5] feat: remove excess options --- packages/ipfs-unixfs-exporter/package.json | 2 +- .../test/exporter-sharded.spec.ts | 8 +- .../test/exporter-subtree.spec.ts | 5 +- .../test/exporter.spec.ts | 45 ++- .../test/importer.spec.ts | 66 ++-- packages/ipfs-unixfs-importer/README.md | 66 +--- packages/ipfs-unixfs-importer/package.json | 27 +- .../src/chunker/fixed-size.ts | 73 ++-- .../ipfs-unixfs-importer/src/chunker/index.ts | 5 + .../ipfs-unixfs-importer/src/chunker/rabin.ts | 92 ++--- .../src/dag-builder/buffer-importer.ts | 50 +++ .../src/dag-builder/dir.ts | 20 +- .../dag-builder/{file/index.ts => file.ts} | 67 ++-- .../src/dag-builder/file/balanced.ts | 20 - .../src/dag-builder/file/buffer-importer.ts | 44 --- .../src/dag-builder/file/flat.ts | 6 - .../src/dag-builder/index.ts | 93 +++-- .../src/dag-builder/validate-chunks.ts | 31 +- packages/ipfs-unixfs-importer/src/dir-flat.ts | 6 +- .../ipfs-unixfs-importer/src/dir-sharded.ts | 31 +- packages/ipfs-unixfs-importer/src/dir.ts | 28 +- .../ipfs-unixfs-importer/src/flat-to-shard.ts | 6 +- packages/ipfs-unixfs-importer/src/index.ts | 359 +++++++++++++----- .../src/layout/balanced.ts | 27 ++ .../ipfs-unixfs-importer/src/layout/flat.ts | 9 + .../ipfs-unixfs-importer/src/layout/index.ts | 8 + .../{dag-builder/file => layout}/trickle.ts | 63 +-- packages/ipfs-unixfs-importer/src/options.ts | 51 --- .../ipfs-unixfs-importer/src/tree-builder.ts | 87 +++-- .../ipfs-unixfs-importer/src/utils/persist.ts | 25 +- .../test/builder-balanced.spec.ts | 20 +- .../test/builder-flat.spec.ts | 8 +- .../test/builder-only-hash.spec.ts | 32 -- .../test/builder-trickle-dag.spec.ts | 18 +- .../ipfs-unixfs-importer/test/builder.spec.ts | 114 ------ .../test/chunker-custom.spec.ts | 6 +- .../test/chunker-fixed-size.spec.ts | 29 +- .../test/chunker-rabin.spec.ts | 21 +- .../test/hash-parity-with-go-ipfs.spec.ts | 25 +- packages/ipfs-unixfs/package.json | 2 +- 40 files changed, 869 insertions(+), 826 deletions(-) create mode 100644 packages/ipfs-unixfs-importer/src/chunker/index.ts create mode 100644 packages/ipfs-unixfs-importer/src/dag-builder/buffer-importer.ts rename packages/ipfs-unixfs-importer/src/dag-builder/{file/index.ts => file.ts} (70%) delete mode 100644 packages/ipfs-unixfs-importer/src/dag-builder/file/balanced.ts delete mode 100644 packages/ipfs-unixfs-importer/src/dag-builder/file/buffer-importer.ts delete mode 100644 packages/ipfs-unixfs-importer/src/dag-builder/file/flat.ts create mode 100644 packages/ipfs-unixfs-importer/src/layout/balanced.ts create mode 100644 packages/ipfs-unixfs-importer/src/layout/flat.ts create mode 100644 packages/ipfs-unixfs-importer/src/layout/index.ts rename packages/ipfs-unixfs-importer/src/{dag-builder/file => layout}/trickle.ts (71%) delete mode 100644 packages/ipfs-unixfs-importer/src/options.ts delete mode 100644 packages/ipfs-unixfs-importer/test/builder-only-hash.spec.ts delete mode 100644 packages/ipfs-unixfs-importer/test/builder.spec.ts diff --git a/packages/ipfs-unixfs-exporter/package.json b/packages/ipfs-unixfs-exporter/package.json index 12455f54..bf42e9be 100644 --- a/packages/ipfs-unixfs-exporter/package.json +++ b/packages/ipfs-unixfs-exporter/package.json @@ -167,6 +167,6 @@ "fs": false }, "typedoc": { - "entryPoint": "./src/index.js" + "entryPoint": "./src/index.ts" } } diff --git a/packages/ipfs-unixfs-exporter/test/exporter-sharded.spec.ts b/packages/ipfs-unixfs-exporter/test/exporter-sharded.spec.ts index 800f852d..f473849f 100644 --- a/packages/ipfs-unixfs-exporter/test/exporter-sharded.spec.ts +++ b/packages/ipfs-unixfs-exporter/test/exporter-sharded.spec.ts @@ -37,7 +37,8 @@ describe('exporter sharded', function () { const createShardWithFiles = async (files: Array<{ path: string, content: AsyncIterable }>): Promise => { const result = await last(importer(files, block, { shardSplitThresholdBytes: SHARD_SPLIT_THRESHOLD, - wrapWithDirectory: true + wrapWithDirectory: true, + rawLeaves: false })) if (result == null) { @@ -62,7 +63,8 @@ describe('exporter sharded', function () { content: asAsyncIterable(files[path].content) })), block, { wrapWithDirectory: true, - shardSplitThresholdBytes: SHARD_SPLIT_THRESHOLD + shardSplitThresholdBytes: SHARD_SPLIT_THRESHOLD, + rawLeaves: false })) const dirCid = imported.pop()?.cid @@ -108,7 +110,7 @@ describe('exporter sharded', function () { const dirFile = dirFiles[i] if (dirFile.type !== 'file') { - throw new Error('Expected file') + throw new Error('Expected file, was ' + dirFile.type) } const data = uint8ArrayConcat(await all(dirFile.content())) diff --git a/packages/ipfs-unixfs-exporter/test/exporter-subtree.spec.ts b/packages/ipfs-unixfs-exporter/test/exporter-subtree.spec.ts index 284211b3..5958d2e6 100644 --- a/packages/ipfs-unixfs-exporter/test/exporter-subtree.spec.ts +++ b/packages/ipfs-unixfs-exporter/test/exporter-subtree.spec.ts @@ -24,7 +24,10 @@ describe('exporter subtree', () => { }, { path: './level-1/200Bytes.txt', content: asAsyncIterable(content) - }], block)) + }], block, { + rawLeaves: false, + cidVersion: 0 + })) if (imported == null) { throw new Error('Nothing imported') diff --git a/packages/ipfs-unixfs-exporter/test/exporter.spec.ts b/packages/ipfs-unixfs-exporter/test/exporter.spec.ts index 3cbf1c9e..12cb9373 100644 --- a/packages/ipfs-unixfs-exporter/test/exporter.spec.ts +++ b/packages/ipfs-unixfs-exporter/test/exporter.spec.ts @@ -22,6 +22,9 @@ import asAsyncIterable from './helpers/as-async-iterable.js' import delay from 'delay' import type { PBNode } from '@ipld/dag-pb' import type { Blockstore } from 'interface-blockstore' +import { balanced, FileLayout, flat, trickle } from 'ipfs-unixfs-importer/layout' +import type { Chunker } from 'ipfs-unixfs-importer/chunker' +import { fixedSize } from 'ipfs-unixfs-importer/chunker' const ONE_MEG = Math.pow(1024, 2) @@ -56,24 +59,24 @@ describe('exporter', () => { return { file, node, cid } } - async function addTestFile (options: { file: Uint8Array, strategy?: 'balanced' | 'flat' | 'trickle', path?: string, maxChunkSize?: number, rawLeaves?: boolean }): Promise { - const { file, strategy = 'balanced', path = '/foo', maxChunkSize, rawLeaves } = options + async function addTestFile (options: { file: Uint8Array, layout?: FileLayout, chunker?: Chunker, path?: string, rawLeaves?: boolean }): Promise { + const { file, path = '/foo', layout, chunker, rawLeaves } = options const result = await all(importer([{ path, content: asAsyncIterable(file) }], block, { - strategy, - rawLeaves, - maxChunkSize + layout, + chunker, + rawLeaves })) return result[0].cid } - async function addAndReadTestFile (options: { file: Uint8Array, offset?: number, length?: number, strategy?: 'balanced' | 'flat' | 'trickle', path?: string, maxChunkSize?: number, rawLeaves?: boolean }): Promise { - const { file, offset, length, strategy = 'balanced', path = '/foo', maxChunkSize, rawLeaves } = options - const cid = await addTestFile({ file, strategy, path, maxChunkSize, rawLeaves }) + async function addAndReadTestFile (options: { file: Uint8Array, offset?: number, length?: number, layout?: FileLayout, chunker?: Chunker, path?: string, rawLeaves?: boolean }): Promise { + const { file, offset, length, layout, path = '/foo', chunker, rawLeaves } = options + const cid = await addTestFile({ file, layout, path, chunker, rawLeaves }) const entry = await exporter(cid, block) if (entry.type !== 'file' && entry.type !== 'raw') { @@ -85,7 +88,7 @@ describe('exporter', () => { }))) } - async function checkBytesThatSpanBlocks (strategy: 'balanced' | 'flat' | 'trickle'): Promise { + async function checkBytesThatSpanBlocks (layout: FileLayout): Promise { const bytesInABlock = 262144 const bytes = new Uint8Array(bytesInABlock + 100) @@ -97,7 +100,7 @@ describe('exporter', () => { file: bytes, offset: bytesInABlock - 1, length: 3, - strategy + layout }) expect(data).to.deep.equal(Uint8Array.from([1, 2, 3])) @@ -319,7 +322,9 @@ describe('exporter', () => { const cid = await addTestFile({ file: data, - maxChunkSize: 2 + chunker: fixedSize({ + chunkSize: 2 + }) }) // @ts-expect-error incomplete implementation @@ -661,7 +666,7 @@ describe('exporter', () => { }) it('returns an empty stream for dir', async () => { - const imported = await first(importer([{ + const imported = await all(importer([{ path: 'empty' }], block)) @@ -669,7 +674,7 @@ describe('exporter', () => { throw new Error('Nothing imported') } - const dir = await exporter(imported.cid, block) + const dir = await exporter(imported[0].cid, block) if (dir.type !== 'directory') { throw new Error('Unexpected type') @@ -771,7 +776,9 @@ describe('exporter', () => { file: bigFile, offset: 0, length: bigFile.length, - maxChunkSize: 1024 + chunker: fixedSize({ + chunkSize: 1024 + }) }) expect(data).to.deep.equal(bigFile) @@ -786,7 +793,9 @@ describe('exporter', () => { const cid = await addTestFile({ file: bigFile, - maxChunkSize: 1024 + chunker: fixedSize({ + chunkSize: 1024 + }) }) const file = await exporter(cid, block) @@ -810,15 +819,15 @@ describe('exporter', () => { }) it('reads bytes with an offset and a length that span blocks using balanced layout', async () => { - await checkBytesThatSpanBlocks('balanced') + await checkBytesThatSpanBlocks(balanced()) }) it('reads bytes with an offset and a length that span blocks using flat layout', async () => { - await checkBytesThatSpanBlocks('flat') + await checkBytesThatSpanBlocks(flat()) }) it('reads bytes with an offset and a length that span blocks using trickle layout', async () => { - await checkBytesThatSpanBlocks('trickle') + await checkBytesThatSpanBlocks(trickle()) }) it('fails on non existent hash', async () => { diff --git a/packages/ipfs-unixfs-exporter/test/importer.spec.ts b/packages/ipfs-unixfs-exporter/test/importer.spec.ts index 017cd6bd..bf3b1929 100644 --- a/packages/ipfs-unixfs-exporter/test/importer.spec.ts +++ b/packages/ipfs-unixfs-exporter/test/importer.spec.ts @@ -1,6 +1,6 @@ /* eslint-env mocha */ -import { importer, UserImporterOptions } from 'ipfs-unixfs-importer' +import { importer, ImporterOptions } from 'ipfs-unixfs-importer' import { exporter, recursive } from '../src/index.js' import extend from 'merge-options' import { expect } from 'aegir/chai' @@ -19,6 +19,8 @@ import { CID } from 'multiformats/cid' import { base58btc } from 'multiformats/bases/base58' import { decode } from '@ipld/dag-pb' import type { Blockstore } from 'interface-blockstore' +import { balanced, FileLayout, flat, trickle } from 'ipfs-unixfs-importer/layout' +import { fixedSize } from 'ipfs-unixfs-importer/chunker' const bigFile = loadFixture('test/fixtures/1.2MiB.txt') const smallFile = loadFixture('test/fixtures/200Bytes.txt') @@ -180,7 +182,7 @@ const strategyOverrides = { } } -const checkLeafNodeTypes = async (blockstore: Blockstore, options: UserImporterOptions, expected: any): Promise => { +const checkLeafNodeTypes = async (blockstore: Blockstore, options: Partial, expected: any): Promise => { const file = await first(importer([{ path: 'foo', content: asAsyncIterable(new Uint8Array(262144 + 5).fill(1)) @@ -214,7 +216,7 @@ const checkLeafNodeTypes = async (blockstore: Blockstore, options: UserImporterO }) } -const checkNodeLinks = async (blockstore: Blockstore, options: UserImporterOptions, expected: any): Promise => { +const checkNodeLinks = async (blockstore: Blockstore, options: Partial>, expected: any): Promise => { for await (const file of importer([{ path: 'foo', content: asAsyncIterable(new Uint8Array(100).fill(1)) @@ -331,9 +333,21 @@ strategies.forEach((strategy) => { describe('importer: ' + strategy, function () { this.timeout(30 * 1000) + let layout: FileLayout + + if (strategy === 'balanced') { + layout = balanced() + } else if (strategy === 'flat') { + layout = flat() + } else if (strategy === 'trickle') { + layout = trickle() + } else { + throw new Error('Unknown strategy') + } + const block = new MemoryBlockstore() - const options: UserImporterOptions = { - strategy + const options: Partial> = { + layout } if (strategy === 'trickle') { @@ -618,34 +632,16 @@ strategies.forEach((strategy) => { } }) - it('will not write to disk if passed "onlyHash" option', async () => { - const content = String(Math.random() + Date.now()) - const files = await all(importer([{ - path: content + '.txt', - content: asAsyncIterable(uint8ArrayFromString(content)) - }], block, { - onlyHash: true - })) - - const file = files[0] - expect(file).to.exist() - - try { - await block.get(file.cid) - - throw new Error('No error was thrown') - } catch (err: any) { - expect(err.code).to.equal('ERR_NOT_FOUND') - } - }) - it('will call an optional progress function', async () => { - const maxChunkSize = 2048 + const chunkSize = 2048 const path = '1.2MiB.txt' + const progress = sinon.stub() - const options = { - progress: sinon.spy(), - maxChunkSize + const options: Partial = { + progress, + chunker: fixedSize({ + chunkSize + }) } await all(importer([{ @@ -653,8 +649,8 @@ strategies.forEach((strategy) => { content: asAsyncIterable(bigFile) }], block, options)) - expect(options.progress.called).to.equal(true) - expect(options.progress.args[0]).to.deep.equal([maxChunkSize, path]) + expect(progress.called).to.equal(true) + expect(progress.args[0]).to.deep.equal([chunkSize, path]) }) it('will import files with CID version 1', async () => { @@ -680,7 +676,7 @@ strategies.forEach((strategy) => { createInputFile('foo/bar', 262144 + 21) ] - const options: UserImporterOptions = { + const options: Partial> = { cidVersion: 1, // Ensures we use DirSharded for the data below shardSplitThresholdBytes: 3 @@ -1062,7 +1058,7 @@ describe('configuration', () => { content: 'content' }], block, { /** @type {import('ipfs-unixfs-importer').DAGBuilder} */ - dagBuilder: async function * (source, block, opts) { // eslint-disable-line require-await + dagBuilder: async function * (source, block) { // eslint-disable-line require-await yield async function () { return await Promise.resolve({ cid, @@ -1073,7 +1069,7 @@ describe('configuration', () => { } }, /** @type {import('ipfs-unixfs-importer').TreeBuilder} */ - treeBuilder: async function * (source, block, opts) { // eslint-disable-line require-await + treeBuilder: async function * (source, block) { // eslint-disable-line require-await builtTree = true yield * source } diff --git a/packages/ipfs-unixfs-importer/README.md b/packages/ipfs-unixfs-importer/README.md index e64e3aa3..00abe7c0 100644 --- a/packages/ipfs-unixfs-importer/README.md +++ b/packages/ipfs-unixfs-importer/README.md @@ -14,7 +14,9 @@ - [Example](#example) - [API](#api) - [const stream = importer(source, blockstore \[, options\])](#const-stream--importersource-blockstore--options) -- [Overriding internals](#overriding-internals) + - [const result = await importContent(content, blockstore \[, options\])](#const-result--await-importcontentcontent-blockstore--options) + - [const result = await importBytes(buf, blockstore \[, options\])](#const-result--await-importbytesbuf-blockstore--options) + - [const result = await importByteStream(source, blockstore \[, options\])](#const-result--await-importbytestreamsource-blockstore--options) - [API Docs](#api-docs) - [License](#license) - [Contribute](#contribute) @@ -95,7 +97,7 @@ When run, metadata about DAGNodes in the created tree is printed until the root: ## API ```js -import { importer } from 'ipfs-unixfs-importer' +import { importer, importContent, importBytes } from 'ipfs-unixfs-importer' ``` ### const stream = importer(source, blockstore \[, options]) @@ -117,55 +119,17 @@ The `importer` function returns an async iterator takes a source async iterator The input's file paths and directory structure will be preserved in the [`dag-pb`](https://github.com/ipld/js-dag-pb) created nodes. -`options` is an JavaScript option that might include the following keys: - -- `wrapWithDirectory` (boolean, defaults to false): if true, a wrapping node will be created -- `shardSplitThresholdBytes` (positive integer, defaults to 256KiB): if the serialized node is larger than this it might be converted to a HAMT sharded directory -- `chunker` (string, defaults to `"fixed"`): the chunking strategy. Supports: - - `fixed` - - `rabin` -- `avgChunkSize` (positive integer, defaults to `262144`): the average chunk size (rabin chunker only) -- `minChunkSize` (positive integer): the minimum chunk size (rabin chunker only) -- `maxChunkSize` (positive integer, defaults to `262144`): the maximum chunk size -- `strategy` (string, defaults to `"balanced"`): the DAG builder strategy name. Supports: - - `flat`: flat list of chunks - - `balanced`: builds a balanced tree - - `trickle`: builds [a trickle tree](https://github.com/ipfs/specs/pull/57#issuecomment-265205384) -- `maxChildrenPerNode` (positive integer, defaults to `174`): the maximum children per node for the `balanced` and `trickle` DAG builder strategies -- `layerRepeat` (positive integer, defaults to 4): (only applicable to the `trickle` DAG builder strategy). The maximum repetition of parent nodes for each layer of the tree. -- `reduceSingleLeafToSelf` (boolean, defaults to `true`): optimization for, when reducing a set of nodes with one node, reduce it to that node. -- `hamtHashFn` (async function(string) Buffer): a function that hashes file names to create HAMT shards -- `hamtBucketBits` (positive integer, defaults to `8`): the number of bits at each bucket of the HAMT -- `progress` (function): a function that will be called with the byte length of chunks as a file is added to ipfs. -- `onlyHash` (boolean, defaults to false): Only chunk and hash - do not write to disk -- `hashAlg` (string): multihash hashing algorithm to use -- `cidVersion` (integer, default 0): the CID version to use when storing the data (storage keys are based on the CID, *including* it's version) -- `rawLeaves` (boolean, defaults to false): When a file would span multiple DAGNodes, if this is true the leaf nodes will not be wrapped in `UnixFS` protobufs and will instead contain the raw file bytes -- `leafType` (string, defaults to `'file'`) what type of UnixFS node leaves should be - can be `'file'` or `'raw'` (ignored when `rawLeaves` is `true`) -- `blockWriteConcurrency` (positive integer, defaults to 10) How many blocks to hash and write to the block store concurrently. For small numbers of large files this should be high (e.g. 50). -- `fileImportConcurrency` (number, defaults to 50) How many files to import concurrently. For large numbers of small files this should be high (e.g. 50). - -## Overriding internals - -Several aspects of the importer are overridable by specifying functions as part of the options object with these keys: - -- `chunkValidator` (function): Optional function that supports the signature `async function * (source, options)` - - This function takes input from the `content` field of imported entries. It should transform them into `Buffer`s, throwing an error if it cannot. - - It should yield `Buffer` objects constructed from the `source` or throw an `Error` -- `chunker` (function): Optional function that supports the signature `async function * (source, options)` where `source` is an async generator and `options` is an options object - - It should yield `Buffer` objects. -- `bufferImporter` (function): Optional function that supports the signature `async function * (entry, blockstore, options)` - - This function should read `Buffer`s from `source` and persist them using `blockstore.put` or similar - - `entry` is the `{ path, content }` entry, where `entry.content` is an async generator that yields Buffers - - It should yield functions that return a Promise that resolves to an object with the properties `{ cid, unixfs, size }` where `cid` is a [CID], `unixfs` is a [UnixFS] entry and `size` is a `Number` that represents the serialized size of the [IPLD] node that holds the buffer data. - - Values will be pulled from this generator in parallel - the amount of parallelisation is controlled by the `blockWriteConcurrency` option (default: 10) -- `dagBuilder` (function): Optional function that supports the signature `async function * (source, blockstore, options)` - - This function should read `{ path, content }` entries from `source` and turn them into DAGs - - It should yield a `function` that returns a `Promise` that resolves to `{ cid, path, unixfs, node }` where `cid` is a `CID`, `path` is a string, `unixfs` is a UnixFS entry and `node` is a `DAGNode`. - - Values will be pulled from this generator in parallel - the amount of parallelisation is controlled by the `fileImportConcurrency` option (default: 50) -- `treeBuilder` (function): Optional function that supports the signature `async function * (source, blockstore, options)` - - This function should read `{ cid, path, unixfs, node }` entries from `source` and place them in a directory structure - - It should yield an object with the properties `{ cid, path, unixfs, size }` where `cid` is a `CID`, `path` is a string, `unixfs` is a UnixFS entry and `size` is a `Number`. +### const result = await importContent(content, blockstore \[, options]) + +A convenience function for importing a single file or directory. + +### const result = await importBytes(buf, blockstore \[, options]) + +A convenience function for importing a single Uint8Array. + +### const result = await importByteStream(source, blockstore \[, options]) + +A convenience function for importing a single stream of Uint8Arrays. ## API Docs diff --git a/packages/ipfs-unixfs-importer/package.json b/packages/ipfs-unixfs-importer/package.json index b51d12cc..77379bd0 100644 --- a/packages/ipfs-unixfs-importer/package.json +++ b/packages/ipfs-unixfs-importer/package.json @@ -20,6 +20,22 @@ }, "type": "module", "types": "./dist/src/index.d.ts", + "typesVersions": { + "*": { + "*": [ + "*", + "dist/*", + "dist/src/*", + "dist/src/*/index" + ], + "src/*": [ + "*", + "dist/*", + "dist/src/*", + "dist/src/*/index" + ] + } + }, "files": [ "src", "dist", @@ -30,6 +46,14 @@ ".": { "types": "./dist/src/index.d.ts", "import": "./dist/src/index.js" + }, + "./chunker": { + "types": "./dist/src/chunker/index.d.ts", + "import": "./dist/src/chunker/index.js" + }, + "./layout": { + "types": "./dist/src/layout/index.d.ts", + "import": "./dist/src/layout/index.js" } }, "eslintConfig": { @@ -145,7 +169,6 @@ "it-batch": "^2.0.0", "it-first": "^2.0.0", "it-parallel-batch": "^2.0.0", - "merge-options": "^3.0.4", "multiformats": "^11.0.0", "rabin-wasm": "^0.1.4", "uint8arraylist": "^2.4.3", @@ -163,6 +186,6 @@ "fs": false }, "typedoc": { - "entryPoint": "./src/index.js" + "entryPoint": "./src/index.ts" } } diff --git a/packages/ipfs-unixfs-importer/src/chunker/fixed-size.ts b/packages/ipfs-unixfs-importer/src/chunker/fixed-size.ts index a1782301..a435e393 100644 --- a/packages/ipfs-unixfs-importer/src/chunker/fixed-size.ts +++ b/packages/ipfs-unixfs-importer/src/chunker/fixed-size.ts @@ -1,38 +1,47 @@ import { Uint8ArrayList } from 'uint8arraylist' -import type { Chunker } from '../index.js' - -export const fixedSize: Chunker = async function * (source, options) { - let list = new Uint8ArrayList() - let currentLength = 0 - let emitted = false - const maxChunkSize = options.maxChunkSize - - for await (const buffer of source) { - list.append(buffer) - - currentLength += buffer.length - - while (currentLength >= maxChunkSize) { - yield list.slice(0, maxChunkSize) - emitted = true - - // throw away consumed bytes - if (maxChunkSize === list.length) { - list = new Uint8ArrayList() - currentLength = 0 - } else { - const newBl = new Uint8ArrayList() - newBl.append(list.sublist(maxChunkSize)) - list = newBl - - // update our offset - currentLength -= maxChunkSize +import type { Chunker } from './index.js' + +export interface FixedSizeOptions { + chunkSize?: number +} + +const DEFAULT_CHUNK_SIZE = 262144 + +export const fixedSize = (options: FixedSizeOptions = {}): Chunker => { + const chunkSize = options.chunkSize ?? DEFAULT_CHUNK_SIZE + + return async function * fixedSizeChunker (source) { + let list = new Uint8ArrayList() + let currentLength = 0 + let emitted = false + + for await (const buffer of source) { + list.append(buffer) + + currentLength += buffer.length + + while (currentLength >= chunkSize) { + yield list.slice(0, chunkSize) + emitted = true + + // throw away consumed bytes + if (chunkSize === list.length) { + list = new Uint8ArrayList() + currentLength = 0 + } else { + const newBl = new Uint8ArrayList() + newBl.append(list.sublist(chunkSize)) + list = newBl + + // update our offset + currentLength -= chunkSize + } } } - } - if (!emitted || currentLength > 0) { - // return any remaining bytes - yield list.subarray(0, currentLength) + if (!emitted || currentLength > 0) { + // return any remaining bytes + yield list.subarray(0, currentLength) + } } } diff --git a/packages/ipfs-unixfs-importer/src/chunker/index.ts b/packages/ipfs-unixfs-importer/src/chunker/index.ts new file mode 100644 index 00000000..1ba0231a --- /dev/null +++ b/packages/ipfs-unixfs-importer/src/chunker/index.ts @@ -0,0 +1,5 @@ + +export interface Chunker { (source: AsyncIterable): AsyncIterable } + +export { rabin } from './rabin.js' +export { fixedSize } from './fixed-size.js' diff --git a/packages/ipfs-unixfs-importer/src/chunker/rabin.ts b/packages/ipfs-unixfs-importer/src/chunker/rabin.ts index fae963e2..110185e7 100644 --- a/packages/ipfs-unixfs-importer/src/chunker/rabin.ts +++ b/packages/ipfs-unixfs-importer/src/chunker/rabin.ts @@ -2,31 +2,58 @@ import { Uint8ArrayList } from 'uint8arraylist' // @ts-expect-error import { create } from 'rabin-wasm' import errcode from 'err-code' -import type { Chunker } from '../index.js' +import type { Chunker } from './index.js' + +const DEFAULT_MIN_CHUNK_SIZE = 262144 +const DEFAULT_MAX_CHUNK_SIZE = 262144 +const DEFAULT_AVG_CHUNK_SIZE = 262144 +const DEFAULT_WINDOW = 16 + +async function * chunker (source: AsyncIterable, r: any): AsyncGenerator { + const buffers = new Uint8ArrayList() + + for await (const chunk of source) { + buffers.append(chunk) + + const sizes = r.fingerprint(chunk) + + for (let i = 0; i < sizes.length; i++) { + const size = sizes[i] + const buf = buffers.slice(0, size) + buffers.consume(size) + + yield buf + } + } + + if (buffers.length > 0) { + yield buffers.subarray(0) + } +} export interface RabinOptions { - min: number - max: number - bits: number - window: number - polynomial: number + minChunkSize?: number + maxChunkSize?: number + avgChunkSize?: number + window?: number } -export const rabin: Chunker = async function * (source, options) { - let min, max, avg +export const rabin = (options: RabinOptions = {}): Chunker => { + let min = options.minChunkSize ?? DEFAULT_MIN_CHUNK_SIZE + let max = options.maxChunkSize ?? DEFAULT_MAX_CHUNK_SIZE + let avg = options.avgChunkSize ?? DEFAULT_AVG_CHUNK_SIZE + const window = options.window ?? DEFAULT_WINDOW - if (options.minChunkSize > 0 && options.maxChunkSize > 0 && options.avgChunkSize > 0) { - avg = options.avgChunkSize - min = options.minChunkSize - max = options.maxChunkSize - } else if (options.avgChunkSize == null) { - throw errcode(new Error('please specify an average chunk size'), 'ERR_INVALID_AVG_CHUNK_SIZE') - } else { - avg = options.avgChunkSize + // if only avg was passed, calculate min/max from that + if (options.avgChunkSize != null && options.minChunkSize == null && options.maxChunkSize == null) { min = avg / 3 max = avg + (avg / 2) } + if (options.avgChunkSize == null && options.minChunkSize == null && options.maxChunkSize == null) { + throw errcode(new Error('please specify an average chunk size'), 'ERR_INVALID_AVG_CHUNK_SIZE') + } + // validate min/max/avg in the same way as go if (min < 16) { throw errcode(new Error('rabin min must be greater than 16'), 'ERR_INVALID_MIN_CHUNK_SIZE') @@ -42,36 +69,11 @@ export const rabin: Chunker = async function * (source, options) { const sizepow = Math.floor(Math.log2(avg)) - for await (const chunk of rabinChunker(source, { - min, - max, - bits: sizepow, - window: options.window, - polynomial: options.polynomial - })) { - yield chunk - } -} - -async function * rabinChunker (source: AsyncIterable, options: RabinOptions): AsyncGenerator { - const r = await create(options.bits, options.min, options.max, options.window) - const buffers = new Uint8ArrayList() - - for await (const chunk of source) { - buffers.append(chunk) - - const sizes = r.fingerprint(chunk) - - for (let i = 0; i < sizes.length; i++) { - const size = sizes[i] - const buf = buffers.slice(0, size) - buffers.consume(size) + return async function * rabinChunker (source) { + const r = await create(sizepow, min, max, window) - yield buf + for await (const chunk of chunker(source, r)) { + yield chunk } } - - if (buffers.length > 0) { - yield buffers.subarray(0) - } } diff --git a/packages/ipfs-unixfs-importer/src/dag-builder/buffer-importer.ts b/packages/ipfs-unixfs-importer/src/dag-builder/buffer-importer.ts new file mode 100644 index 00000000..f9b77eab --- /dev/null +++ b/packages/ipfs-unixfs-importer/src/dag-builder/buffer-importer.ts @@ -0,0 +1,50 @@ +import { UnixFS } from 'ipfs-unixfs' +import { persist, PersistOptions } from '../utils/persist.js' +import * as dagPb from '@ipld/dag-pb' +import * as raw from 'multiformats/codecs/raw' +import type { BufferImporter, ProgressHandler } from '../index.js' +import type { Version } from 'multiformats/cid' + +export interface BufferImporterOptions { + cidVersion: Version + rawLeaves: boolean + leafType: 'file' | 'raw' + progress?: ProgressHandler +} + +export function defaultBufferImporter (options: BufferImporterOptions): BufferImporter { + return async function * bufferImporter (file, block) { + for await (let buffer of file.content) { + yield async () => { + options.progress?.(buffer.length, file.path) + let unixfs + + const opts: PersistOptions = { + codec: dagPb, + cidVersion: options.cidVersion + } + + if (options.rawLeaves) { + opts.codec = raw + opts.cidVersion = 1 + } else { + unixfs = new UnixFS({ + type: options.leafType, + data: buffer + }) + + buffer = dagPb.encode({ + Data: unixfs.marshal(), + Links: [] + }) + } + + return { + cid: await persist(buffer, block, opts), + unixfs, + size: BigInt(buffer.length) + } + } + } + } +} diff --git a/packages/ipfs-unixfs-importer/src/dag-builder/dir.ts b/packages/ipfs-unixfs-importer/src/dag-builder/dir.ts index b63c8755..21c4ccae 100644 --- a/packages/ipfs-unixfs-importer/src/dag-builder/dir.ts +++ b/packages/ipfs-unixfs-importer/src/dag-builder/dir.ts @@ -1,23 +1,31 @@ import { UnixFS } from 'ipfs-unixfs' import { persist } from '../utils/persist.js' import { encode, prepare } from '@ipld/dag-pb' -import type { Directory, UnixFSV1DagBuilder } from '../index.js' +import type { Directory, InProgressImportResult } from '../index.js' +import type { Blockstore } from 'interface-blockstore' +import type { Version } from 'multiformats/cid' -export const dirBuilder: UnixFSV1DagBuilder = async (item, blockstore, options) => { +export interface DirBuilderOptions { + cidVersion: Version + signal?: AbortSignal +} + +export const dirBuilder = async (dir: Directory, blockstore: Blockstore, options: DirBuilderOptions): Promise => { const unixfs = new UnixFS({ type: 'directory', - mtime: item.mtime, - mode: item.mode + mtime: dir.mtime, + mode: dir.mode }) const buffer = encode(prepare({ Data: unixfs.marshal() })) const cid = await persist(buffer, blockstore, options) - const path = item.path + const path = dir.path return { cid, path, unixfs, - size: BigInt(buffer.length) + size: BigInt(buffer.length), + originalPath: dir.originalPath } } diff --git a/packages/ipfs-unixfs-importer/src/dag-builder/file/index.ts b/packages/ipfs-unixfs-importer/src/dag-builder/file.ts similarity index 70% rename from packages/ipfs-unixfs-importer/src/dag-builder/file/index.ts rename to packages/ipfs-unixfs-importer/src/dag-builder/file.ts index 8a52bda0..268c1667 100644 --- a/packages/ipfs-unixfs-importer/src/dag-builder/file/index.ts +++ b/packages/ipfs-unixfs-importer/src/dag-builder/file.ts @@ -1,36 +1,23 @@ -import errCode from 'err-code' import { UnixFS } from 'ipfs-unixfs' -import { persist } from '../../utils/persist.js' -import { encode, prepare } from '@ipld/dag-pb' +import { persist } from '../utils/persist.js' +import { encode, PBLink, prepare } from '@ipld/dag-pb' import parallelBatch from 'it-parallel-batch' import * as rawCodec from 'multiformats/codecs/raw' -import * as dagPb from '@ipld/dag-pb' - -import { flat } from './flat.js' -import { balanced } from './balanced.js' -import { trickle } from './trickle.js' -import { bufferImporter } from './buffer-importer.js' -import type { File, FileDAGBuilder, ImporterOptions, InProgressImportResult, Reducer, UnixFSV1DagBuilder } from '../../index.js' +import type { BufferImporter, File, InProgressImportResult } from '../index.js' import type { Blockstore } from 'interface-blockstore' +import type { FileLayout, Reducer } from '../layout/index.js' +import type { Version } from 'multiformats/cid' -const dagBuilders: Record = { - flat, - balanced, - trickle +interface BuildFileBatchOptions { + bufferImporter: BufferImporter + blockWriteConcurrency: number } -async function * buildFileBatch (file: File, blockstore: Blockstore, options: ImporterOptions): AsyncGenerator { +async function * buildFileBatch (file: File, blockstore: Blockstore, options: BuildFileBatchOptions): AsyncGenerator { let count = -1 - let previous - let importer - - if (typeof options.bufferImporter === 'function') { - importer = options.bufferImporter - } else { - importer = bufferImporter - } + let previous: InProgressImportResult | undefined - for await (const entry of parallelBatch(importer(file, blockstore, options), options.blockWriteConcurrency)) { + for await (const entry of parallelBatch(options.bufferImporter(file, blockstore), options.blockWriteConcurrency)) { count++ if (count === 0) { @@ -38,7 +25,7 @@ async function * buildFileBatch (file: File, blockstore: Blockstore, options: Im continue } else if (count === 1 && (previous != null)) { yield previous - previous = null + previous = undefined } yield entry @@ -50,7 +37,13 @@ async function * buildFileBatch (file: File, blockstore: Blockstore, options: Im } } -const reduce = (file: File, blockstore: Blockstore, options: ImporterOptions): Reducer => { +interface ReduceOptions { + reduceSingleLeafToSelf: boolean + cidVersion: Version + signal?: AbortSignal +} + +const reduce = (file: File, blockstore: Blockstore, options: ReduceOptions): Reducer => { const reducer: Reducer = async function (leaves) { if (leaves.length === 1 && leaves[0]?.single === true && options.reduceSingleLeafToSelf) { const leaf = leaves[0] @@ -90,8 +83,6 @@ const reduce = (file: File, blockstore: Blockstore, options: ImporterOptions): R // } leaf.cid = await persist(buffer, blockstore, { ...options, - codec: dagPb, - hasher: options.hasher, cidVersion: options.cidVersion }) leaf.size = BigInt(buffer.length) @@ -101,7 +92,8 @@ const reduce = (file: File, blockstore: Blockstore, options: ImporterOptions): R cid: leaf.cid, path: file.path, unixfs: leaf.unixfs, - size: leaf.size + size: leaf.size, + originalPath: leaf.originalPath } } @@ -112,7 +104,7 @@ const reduce = (file: File, blockstore: Blockstore, options: ImporterOptions): R mode: file.mode }) - const links: dagPb.PBLink[] = leaves + const links: PBLink[] = leaves .filter(leaf => { if (leaf.cid.code === rawCodec.code && leaf.size > 0) { return true @@ -162,19 +154,18 @@ const reduce = (file: File, blockstore: Blockstore, options: ImporterOptions): R cid, path: file.path, unixfs: f, - size: BigInt(buffer.length + node.Links.reduce((acc, curr) => acc + (curr.Tsize ?? 0), 0)) + size: BigInt(buffer.length + node.Links.reduce((acc, curr) => acc + (curr.Tsize ?? 0), 0)), + originalPath: file.originalPath } } return reducer } -export const fileBuilder: UnixFSV1DagBuilder = async (file, block, options) => { - const dagBuilder = dagBuilders[options.strategy] - - if (dagBuilder == null) { - throw errCode(new Error(`Unknown importer build strategy name: ${options.strategy}`), 'ERR_BAD_STRATEGY') - } +export interface FileBuilderOptions extends BuildFileBatchOptions, ReduceOptions { + layout: FileLayout +} - return await dagBuilder(buildFileBatch(file, block, options), reduce(file, block, options), options) +export const fileBuilder = async (file: File, block: Blockstore, options: FileBuilderOptions): Promise => { + return await options.layout(buildFileBatch(file, block, options), reduce(file, block, options)) } diff --git a/packages/ipfs-unixfs-importer/src/dag-builder/file/balanced.ts b/packages/ipfs-unixfs-importer/src/dag-builder/file/balanced.ts deleted file mode 100644 index 7a92cf48..00000000 --- a/packages/ipfs-unixfs-importer/src/dag-builder/file/balanced.ts +++ /dev/null @@ -1,20 +0,0 @@ -import batch from 'it-batch' -import type { FileDAGBuilder } from '../../index.js' - -export const balanced: FileDAGBuilder = async (source, reduce, options) => { - return await reduceToParents(source, reduce, options) -} - -const reduceToParents: FileDAGBuilder = async (source, reduce, options) => { - const roots = [] - - for await (const chunked of batch(source, options.maxChildrenPerNode)) { - roots.push(await reduce(chunked)) - } - - if (roots.length > 1) { - return await reduceToParents(roots, reduce, options) - } - - return roots[0] -} diff --git a/packages/ipfs-unixfs-importer/src/dag-builder/file/buffer-importer.ts b/packages/ipfs-unixfs-importer/src/dag-builder/file/buffer-importer.ts deleted file mode 100644 index 87472306..00000000 --- a/packages/ipfs-unixfs-importer/src/dag-builder/file/buffer-importer.ts +++ /dev/null @@ -1,44 +0,0 @@ -import { UnixFS } from 'ipfs-unixfs' -import { persist, PersistOptions } from '../../utils/persist.js' -import * as dagPb from '@ipld/dag-pb' -import * as raw from 'multiformats/codecs/raw' -import type { BufferImporter } from '../../index.js' - -export const bufferImporter: BufferImporter = async function * (file, block, options) { - for await (let buffer of file.content) { - yield async () => { - options.progress(buffer.length, file.path) - let unixfs - - const opts: PersistOptions = { - codec: dagPb, - cidVersion: options.cidVersion, - hasher: options.hasher, - onlyHash: options.onlyHash - } - - if (options.rawLeaves) { - opts.codec = raw - opts.cidVersion = 1 - } else { - unixfs = new UnixFS({ - type: options.leafType, - data: buffer - }) - - buffer = dagPb.encode({ - Data: unixfs.marshal(), - Links: [] - }) - } - - return { - cid: await persist(buffer, block, opts), - unixfs, - size: BigInt(buffer.length) - } - } - } -} - -export default bufferImporter diff --git a/packages/ipfs-unixfs-importer/src/dag-builder/file/flat.ts b/packages/ipfs-unixfs-importer/src/dag-builder/file/flat.ts deleted file mode 100644 index b64c681e..00000000 --- a/packages/ipfs-unixfs-importer/src/dag-builder/file/flat.ts +++ /dev/null @@ -1,6 +0,0 @@ -import all from 'it-all' -import type { FileDAGBuilder } from '../../index.js' - -export const flat: FileDAGBuilder = async function (source, reduce) { - return await reduce(await all(source)) -} diff --git a/packages/ipfs-unixfs-importer/src/dag-builder/index.ts b/packages/ipfs-unixfs-importer/src/dag-builder/index.ts index 389c5a28..d5cba6e6 100644 --- a/packages/ipfs-unixfs-importer/src/dag-builder/index.ts +++ b/packages/ipfs-unixfs-importer/src/dag-builder/index.ts @@ -1,10 +1,10 @@ -import { dirBuilder } from './dir.js' -import { fileBuilder } from './file/index.js' +import { dirBuilder, DirBuilderOptions } from './dir.js' +import { fileBuilder, FileBuilderOptions } from './file.js' import errCode from 'err-code' -import { rabin } from '../chunker/rabin.js' -import { fixedSize } from '../chunker/fixed-size.js' -import { validateChunks } from './validate-chunks.js' -import type { Chunker, ChunkValidator, DAGBuilder, Directory, File } from '../index.js' +import type { Directory, File, ImportCandidate, InProgressImportResult } from '../index.js' +import type { Blockstore } from 'interface-blockstore' +import type { ChunkValidator } from './validate-chunks.js' +import type { Chunker } from '../chunker/index.js' function isIterable (thing: any): thing is Iterable { return Symbol.iterator in thing @@ -34,56 +34,53 @@ function contentAsAsyncIterable (content: Uint8Array | AsyncIterable throw errCode(new Error('Content was invalid'), 'ERR_INVALID_CONTENT') } -export const dagBuilder: DAGBuilder = async function * (source, blockstore, options) { - for await (const entry of source) { - if (entry.path != null) { - if (entry.path.substring(0, 2) === './') { - options.wrapWithDirectory = true - } - - entry.path = entry.path - .split('/') - .filter(path => path != null && path !== '.') - .join('/') - } +export interface DagBuilderOptions extends FileBuilderOptions, DirBuilderOptions { + chunker: Chunker + chunkValidator: ChunkValidator + wrapWithDirectory: boolean +} - if (entry.content != null) { - let chunker: Chunker +export type ImporterSourceStream = AsyncIterable | Iterable - if (typeof options.chunker === 'function') { - chunker = options.chunker - } else if (options.chunker === 'rabin') { - chunker = rabin - } else { - chunker = fixedSize - } +export interface DAGBuilder { + (source: ImporterSourceStream, blockstore: Blockstore): AsyncIterable<() => Promise> +} - let chunkValidator: ChunkValidator +export function defaultDagBuilder (options: DagBuilderOptions): DAGBuilder { + return async function * dagBuilder (source, blockstore) { + for await (const entry of source) { + let originalPath: string | undefined - if (typeof options.chunkValidator === 'function') { - chunkValidator = options.chunkValidator - } else { - chunkValidator = validateChunks + if (entry.path != null) { + originalPath = entry.path + entry.path = entry.path + .split('/') + .filter(path => path != null && path !== '.') + .join('/') } - const file: File = { - path: entry.path, - mtime: entry.mtime, - mode: entry.mode, - content: chunker(chunkValidator(contentAsAsyncIterable(entry.content), options), options) - } + if (entry.content != null) { + const file: File = { + path: entry.path, + mtime: entry.mtime, + mode: entry.mode, + content: options.chunker(options.chunkValidator(contentAsAsyncIterable(entry.content))), + originalPath + } - yield async () => await fileBuilder(file, blockstore, options) - } else if (entry.path != null) { - const dir: Directory = { - path: entry.path, - mtime: entry.mtime, - mode: entry.mode - } + yield async () => await fileBuilder(file, blockstore, options) + } else if (entry.path != null) { + const dir: Directory = { + path: entry.path, + mtime: entry.mtime, + mode: entry.mode, + originalPath + } - yield async () => await dirBuilder(dir, blockstore, options) - } else { - throw new Error('Import candidate must have content or path or both') + yield async () => await dirBuilder(dir, blockstore, options) + } else { + throw new Error('Import candidate must have content or path or both') + } } } } diff --git a/packages/ipfs-unixfs-importer/src/dag-builder/validate-chunks.ts b/packages/ipfs-unixfs-importer/src/dag-builder/validate-chunks.ts index bb0b6f8e..d01e0628 100644 --- a/packages/ipfs-unixfs-importer/src/dag-builder/validate-chunks.ts +++ b/packages/ipfs-unixfs-importer/src/dag-builder/validate-chunks.ts @@ -1,21 +1,24 @@ import errCode from 'err-code' import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' -import type { ChunkValidator } from '../index.js' -export const validateChunks: ChunkValidator = async function * (source) { - for await (const content of source) { - if (content.length === undefined) { - throw errCode(new Error('Content was invalid'), 'ERR_INVALID_CONTENT') - } +export interface ChunkValidator { (source: AsyncIterable): AsyncIterable } + +export const defaultChunkValidator = (): ChunkValidator => { + return async function * validateChunks (source) { + for await (const content of source) { + if (content.length === undefined) { + throw errCode(new Error('Content was invalid'), 'ERR_INVALID_CONTENT') + } - if (typeof content === 'string' || content instanceof String) { - yield uint8ArrayFromString(content.toString()) - } else if (Array.isArray(content)) { - yield Uint8Array.from(content) - } else if (content instanceof Uint8Array) { - yield content - } else { - throw errCode(new Error('Content was invalid'), 'ERR_INVALID_CONTENT') + if (typeof content === 'string' || content instanceof String) { + yield uint8ArrayFromString(content.toString()) + } else if (Array.isArray(content)) { + yield Uint8Array.from(content) + } else if (content instanceof Uint8Array) { + yield content + } else { + throw errCode(new Error('Content was invalid'), 'ERR_INVALID_CONTENT') + } } } } diff --git a/packages/ipfs-unixfs-importer/src/dir-flat.ts b/packages/ipfs-unixfs-importer/src/dir-flat.ts index 35aa03b5..2597a5f7 100644 --- a/packages/ipfs-unixfs-importer/src/dir-flat.ts +++ b/packages/ipfs-unixfs-importer/src/dir-flat.ts @@ -2,13 +2,13 @@ import { encode, PBNode, prepare } from '@ipld/dag-pb' import type { Blockstore } from 'interface-blockstore' import { UnixFS } from 'ipfs-unixfs' import { Dir, CID_V0, CID_V1, DirProps } from './dir.js' -import type { ImporterOptions, ImportResult, InProgressImportResult } from './index.js' -import { persist } from './utils/persist.js' +import type { ImportResult, InProgressImportResult } from './index.js' +import { persist, PersistOptions } from './utils/persist.js' export class DirFlat extends Dir { private readonly _children: Map - constructor (props: DirProps, options: ImporterOptions) { + constructor (props: DirProps, options: PersistOptions) { super(props, options) this._children = new Map() diff --git a/packages/ipfs-unixfs-importer/src/dir-sharded.ts b/packages/ipfs-unixfs-importer/src/dir-sharded.ts index 23794972..24468629 100644 --- a/packages/ipfs-unixfs-importer/src/dir-sharded.ts +++ b/packages/ipfs-unixfs-importer/src/dir-sharded.ts @@ -1,20 +1,33 @@ import { encode, PBLink, prepare } from '@ipld/dag-pb' import { UnixFS } from 'ipfs-unixfs' import { Dir, CID_V0, CID_V1, DirProps } from './dir.js' -import { persist } from './utils/persist.js' +import { persist, PersistOptions } from './utils/persist.js' import { createHAMT, Bucket, BucketChild } from 'hamt-sharding' -import type { ImporterOptions, ImportResult, InProgressImportResult } from './index.js' +import { murmur3128 } from '@multiformats/murmur3' +import type { ImportResult, InProgressImportResult } from './index.js' import type { Blockstore } from 'interface-blockstore' +async function hamtHashFn (buf: Uint8Array): Promise { + return (await murmur3128.encode(buf)) + // Murmur3 outputs 128 bit but, accidentally, IPFS Go's + // implementation only uses the first 64, so we must do the same + // for parity.. + .slice(0, 8) + // Invert buffer because that's how Go impl does it + .reverse() +} + +const HAMT_HASH_CODE = BigInt(0x22) + class DirSharded extends Dir { private readonly _bucket: Bucket - constructor (props: DirProps, options: ImporterOptions) { + constructor (props: DirProps, options: PersistOptions) { super(props, options) this._bucket = createHAMT({ - hashFn: options.hamtHashFn, - bits: options.hamtBucketBits + hashFn: hamtHashFn, + bits: 8 }) } @@ -73,7 +86,7 @@ class DirSharded extends Dir { export default DirSharded -async function * flush (bucket: Bucket, blockstore: Blockstore, shardRoot: DirSharded | null, options: ImporterOptions): AsyncIterable { +async function * flush (bucket: Bucket, blockstore: Blockstore, shardRoot: DirSharded | null, options: PersistOptions): AsyncIterable { const children = bucket._children const links: PBLink[] = [] let childrenSize = 0n @@ -152,7 +165,7 @@ async function * flush (bucket: Bucket, blockstore type: 'hamt-sharded-directory', data, fanout: BigInt(bucket.tableSize()), - hashType: options.hamtHashCode, + hashType: HAMT_HASH_CODE, mtime: shardRoot?.mtime, mode: shardRoot?.mode }) @@ -176,7 +189,7 @@ function isDir (obj: any): obj is Dir { return typeof obj.flush === 'function' } -function calculateSize (bucket: Bucket, shardRoot: DirSharded | null, options: ImporterOptions): number { +function calculateSize (bucket: Bucket, shardRoot: DirSharded | null, options: PersistOptions): number { const children = bucket._children const links: PBLink[] = [] @@ -231,7 +244,7 @@ function calculateSize (bucket: Bucket, shardRoot: DirSharded | null, optio type: 'hamt-sharded-directory', data, fanout: BigInt(bucket.tableSize()), - hashType: options.hamtHashCode, + hashType: HAMT_HASH_CODE, mtime: shardRoot?.mtime, mode: shardRoot?.mode }) diff --git a/packages/ipfs-unixfs-importer/src/dir.ts b/packages/ipfs-unixfs-importer/src/dir.ts index 600860ce..1e0303d7 100644 --- a/packages/ipfs-unixfs-importer/src/dir.ts +++ b/packages/ipfs-unixfs-importer/src/dir.ts @@ -1,7 +1,8 @@ import type { Blockstore } from 'interface-blockstore' import type { Mtime, UnixFS } from 'ipfs-unixfs' import { CID } from 'multiformats/cid' -import type { ImporterOptions, ImportResult, InProgressImportResult } from './index.js' +import type { ImportResult, InProgressImportResult } from './index.js' +import type { PersistOptions } from './utils/persist.js' export interface DirProps { root: boolean @@ -16,8 +17,8 @@ export interface DirProps { mtime?: Mtime } -export class Dir { - public options: ImporterOptions +export abstract class Dir { + public options: PersistOptions public root: boolean public dir: boolean public path: string @@ -32,7 +33,7 @@ export class Dir { public size?: number public nodeSize?: number - constructor (props: DirProps, options: ImporterOptions) { + constructor (props: DirProps, options: PersistOptions) { this.options = options ?? {} this.root = props.root @@ -47,19 +48,12 @@ export class Dir { this.mtime = props.mtime } - async put (name: string, value: InProgressImportResult | Dir): Promise { } - - async get (name: string): Promise { - return await Promise.resolve(this) - } - - async * eachChildSeries (): AsyncIterable<{ key: string, child: InProgressImportResult | Dir }> { } - - async * flush (blockstore: Blockstore): AsyncGenerator { } - - estimateNodeSize (): number { - return 0 - } + abstract put (name: string, value: InProgressImportResult | Dir): Promise + abstract get (name: string): Promise + abstract eachChildSeries (): AsyncIterable<{ key: string, child: InProgressImportResult | Dir }> + abstract flush (blockstore: Blockstore): AsyncGenerator + abstract estimateNodeSize (): number + abstract childCount (): number } // we use these to calculate the node size to use as a check for whether a directory diff --git a/packages/ipfs-unixfs-importer/src/flat-to-shard.ts b/packages/ipfs-unixfs-importer/src/flat-to-shard.ts index 48134ae2..5059d014 100644 --- a/packages/ipfs-unixfs-importer/src/flat-to-shard.ts +++ b/packages/ipfs-unixfs-importer/src/flat-to-shard.ts @@ -1,9 +1,9 @@ import DirSharded from './dir-sharded.js' import { DirFlat } from './dir-flat.js' import type { Dir } from './dir.js' -import type { ImporterOptions } from './index.js' +import type { PersistOptions } from './utils/persist.js' -export async function flatToShard (child: Dir | null, dir: Dir, threshold: number, options: ImporterOptions): Promise { +export async function flatToShard (child: Dir | null, dir: Dir, threshold: number, options: PersistOptions): Promise { let newDir = dir if (dir instanceof DirFlat && dir.estimateNodeSize() > threshold) { @@ -32,7 +32,7 @@ export async function flatToShard (child: Dir | null, dir: Dir, threshold: numbe return newDir } -async function convertToShard (oldDir: DirFlat, options: ImporterOptions): Promise { +async function convertToShard (oldDir: DirFlat, options: PersistOptions): Promise { const newDir = new DirSharded({ root: oldDir.root, dir: true, diff --git a/packages/ipfs-unixfs-importer/src/index.ts b/packages/ipfs-unixfs-importer/src/index.ts index a631fc60..3c9fd350 100644 --- a/packages/ipfs-unixfs-importer/src/index.ts +++ b/packages/ipfs-unixfs-importer/src/index.ts @@ -1,15 +1,24 @@ import parallelBatch from 'it-parallel-batch' -import defaultOptions from './options.js' -import { dagBuilder } from './dag-builder/index.js' -import { treeBuilder } from './tree-builder.js' +import { DAGBuilder, defaultDagBuilder } from './dag-builder/index.js' +import { defaultTreeBuilder } from './tree-builder.js' import type { UnixFS, Mtime } from 'ipfs-unixfs' import type { CID, Version as CIDVersion } from 'multiformats/cid' -import type { MultihashHasher } from 'multiformats/hashes/interface' import type { Blockstore } from 'interface-blockstore' +import { ChunkValidator, defaultChunkValidator } from './dag-builder/validate-chunks.js' +import { fixedSize } from './chunker/fixed-size.js' +import type { Chunker } from './chunker/index.js' +import { balanced, FileLayout } from './layout/index.js' +import { defaultBufferImporter } from './dag-builder/buffer-importer.js' +import first from 'it-first' +import errcode from 'err-code' +import type { AwaitIterable } from 'blockstore-core/base' + +export type ByteStream = AwaitIterable +export type ImportContent = ByteStream | Uint8Array export interface ImportCandidate { path?: string - content?: AsyncIterable | Iterable | Uint8Array + content?: ImportContent mtime?: Mtime mode?: number } @@ -19,12 +28,14 @@ export interface File { path?: string mtime?: Mtime mode?: number + originalPath?: string } export interface Directory { path?: string mtime?: Mtime mode?: number + originalPath?: string } export interface ImportResult { @@ -36,106 +47,169 @@ export interface ImportResult { export interface InProgressImportResult extends ImportResult { single?: boolean + originalPath?: string } -export type ChunkerType = 'fixed' | 'rabin' export interface ProgressHandler { (chunkSize: number, path?: string): void } export interface HamtHashFn { (value: Uint8Array): Promise } -export interface Chunker { (source: AsyncIterable, options: ImporterOptions): AsyncIterable } -export interface DAGBuilder { (source: AsyncIterable | Iterable, blockstore: Blockstore, options: ImporterOptions): AsyncIterable<() => Promise> } -export interface TreeBuilder { (source: AsyncIterable, blockstore: Blockstore, options: ImporterOptions): AsyncIterable } -export interface BufferImporter { (file: File, blockstore: Blockstore, options: ImporterOptions): AsyncIterable<() => Promise> } -export interface ChunkValidator { (source: AsyncIterable, options: ImporterOptions): AsyncIterable } -export interface UnixFSV1DagBuilder { (item: T, blockstore: Blockstore, options: ImporterOptions): Promise } -export interface Reducer { (leaves: InProgressImportResult[]): Promise } - -export interface FileDAGBuilder { (source: AsyncIterable | Iterable, reducer: Reducer, options: ImporterOptions): Promise } - -export interface UserImporterOptions { - strategy?: 'balanced' | 'flat' | 'trickle' +export interface TreeBuilder { (source: AsyncIterable, blockstore: Blockstore): AsyncIterable } +export interface BufferImporter { (file: File, blockstore: Blockstore): AsyncIterable<() => Promise> } + +/** + * Options to control the importer's behaviour + */ +export interface ImporterOptions { + /** + * When a file would span multiple DAGNodes, if this is true the leaf nodes + * will not be wrapped in `UnixFS` protobufs and will instead contain the + * raw file bytes. Default: true + */ rawLeaves?: boolean - onlyHash?: boolean + + /** + * If the file being imported is small enough to fit into one DAGNodes, store + * the file data in the root node along with the UnixFS metadata instead of + * in a leaf node which would then require additional I/O to load. Default: true + */ reduceSingleLeafToSelf?: boolean - hasher?: MultihashHasher + + /** + * What type of UnixFS node leaves should be - can be `'file'` or `'raw'` + * (ignored when `rawLeaves` is `true`). + * + * This option exists to simulate kubo's trickle dag which uses a combination + * of `'raw'` UnixFS leaves and `reduceSingleLeafToSelf: false`. + * + * For modern code the `rawLeaves: true` option should be used instead so leaves + * are plain Uint8Arrays without a UnixFS/Protobuf wrapper. + */ leafType?: 'file' | 'raw' + + /** + * the CID version to use when storing the data. Default: 1 + */ cidVersion?: CIDVersion + + /** + * A function that will be called with the byte length of chunks as a file + * is added to ipfs. + */ progress?: ProgressHandler + + /** + * If the serialized node is larger than this it might be converted to a HAMT + * sharded directory. Default: 256KiB + */ shardSplitThresholdBytes?: number + + /** + * How many files to import concurrently. For large numbers of small files this + * should be high (e.g. 50). Default: 10 + */ fileImportConcurrency?: number + + /** + * How many blocks to hash and write to the block store concurrently. For small + * numbers of large files this should be high (e.g. 50). Default: 50 + */ blockWriteConcurrency?: number - minChunkSize?: number - maxChunkSize?: number - avgChunkSize?: number - window?: number - polynomial?: number - maxChildrenPerNode?: number - layerRepeat?: number + + /** + * If true, all imported files and folders will be contained in a directory that + * will correspond to the CID of the final entry yielded. Default: false + */ wrapWithDirectory?: boolean - recursive?: boolean - hidden?: boolean - timeout?: number - hamtHashFn?: HamtHashFn - hamtBucketBits?: number - hamtHashCode?: bigint - chunker?: ChunkerType | Chunker - dagBuilder?: DAGBuilder - treeBuilder?: TreeBuilder - bufferImporter?: BufferImporter - chunkValidator?: ChunkValidator -} -export interface ImporterOptions { - strategy: 'balanced' | 'flat' | 'trickle' - rawLeaves: boolean - onlyHash: boolean - reduceSingleLeafToSelf: boolean - hasher: MultihashHasher - leafType: 'file' | 'raw' - cidVersion: CIDVersion - progress: ProgressHandler - shardSplitThresholdBytes: number - fileImportConcurrency: number - blockWriteConcurrency: number - minChunkSize: number - maxChunkSize: number - avgChunkSize: number - window: number - polynomial: number - maxChildrenPerNode: number - layerRepeat: number - wrapWithDirectory: boolean - recursive: boolean - hidden: boolean - timeout?: number - hamtHashFn: HamtHashFn - hamtBucketBits: number - hamtHashCode: bigint - chunker: ChunkerType | Chunker + /** + * The chunking strategy. See [./src/chunker/index.ts](./src/chunker/index.ts) + * for available chunkers. Default: fixedSize + */ + chunker?: Chunker + + /** + * How the DAG that represents files are created. See + * [./src/layout/index.ts](./src/layout/index.ts) for available layouts. Default: balanced + */ + layout?: FileLayout + + /** + * This option can be used to override the importer internals. + * + * This function should read `{ path, content }` entries from `source` and turn them + * into DAGs + * It should yield a `function` that returns a `Promise` that resolves to + * `{ cid, path, unixfs, node }` where `cid` is a `CID`, `path` is a string, `unixfs` + * is a UnixFS entry and `node` is a `DAGNode`. + * Values will be pulled from this generator in parallel - the amount of parallelisation + * is controlled by the `fileImportConcurrency` option (default: 50) + */ dagBuilder?: DAGBuilder + + /** + * This option can be used to override the importer internals. + * + * This function should read `{ cid, path, unixfs, node }` entries from `source` and + * place them in a directory structure + * It should yield an object with the properties `{ cid, path, unixfs, size }` where + * `cid` is a `CID`, `path` is a string, `unixfs` is a UnixFS entry and `size` is a `Number`. + */ treeBuilder?: TreeBuilder + + /** + * This option can be used to override the importer internals. + * + * This function should read `Buffer`s from `source` and persist them using `blockstore.put` + * or similar + * `entry` is the `{ path, content }` entry, where `entry.content` is an async + * generator that yields Buffers + * It should yield functions that return a Promise that resolves to an object with + * the properties `{ cid, unixfs, size }` where `cid` is a [CID], `unixfs` is a [UnixFS] entry and `size` is a `Number` that represents the serialized size of the [IPLD] node that holds the buffer data. + * Values will be pulled from this generator in parallel - the amount of + * parallelisation is controlled by the `blockWriteConcurrency` option (default: 10) + */ bufferImporter?: BufferImporter + + /** + * This option can be used to override the importer internals. + * + * This function takes input from the `content` field of imported entries. + * It should transform them into `Buffer`s, throwing an error if it cannot. + * It should yield `Buffer` objects constructed from the `source` or throw an + * `Error` + */ chunkValidator?: ChunkValidator } -export async function * importer (source: AsyncIterable | Iterable | ImportCandidate, blockstore: Blockstore, options: UserImporterOptions = {}): AsyncGenerator { - const opts = defaultOptions(options) - - let buildDag - - if (typeof options.dagBuilder === 'function') { - buildDag = options.dagBuilder - } else { - buildDag = dagBuilder - } - - let buildTree - - if (typeof options.treeBuilder === 'function') { - buildTree = options.treeBuilder - } else { - buildTree = treeBuilder - } +export type ImportCandidateStream = AsyncIterable | Iterable +/** + * The importer creates UnixFS DAGs and stores the blocks that make + * them up in the passed blockstore. + * + * @example + * + * ```typescript + * import { importer } from 'ipfs-unixfs-importer' + * import { MemoryBlockstore } from 'blockstore-core' + * + * // store blocks in memory, other blockstores are available + * const blockstore = new MemoryBlockstore() + * + * const input = [{ + * path: './foo.txt', + * content: Uint8Array.from([0, 1, 2, 3, 4]) + * }, { + * path: './bar.txt', + * content: Uint8Array.from([0, 1, 2, 3, 4]) + * }] + * + * for await (const entry of importer(input, blockstore)) { + * console.info(entry) + * // { cid: CID(), ... } + * } + * ``` + */ +export async function * importer (source: ImportCandidateStream, blockstore: Blockstore, options: ImporterOptions = {}): AsyncGenerator { let candidates: AsyncIterable | Iterable if (Symbol.asyncIterator in source || Symbol.iterator in source) { @@ -144,7 +218,39 @@ export async function * importer (source: AsyncIterable | Itera candidates = [source] } - for await (const entry of buildTree(parallelBatch(buildDag(candidates, blockstore, opts), opts.fileImportConcurrency), blockstore, opts)) { + const wrapWithDirectory = options.wrapWithDirectory ?? false + const shardSplitThresholdBytes = options.shardSplitThresholdBytes ?? 262144 + const cidVersion = options.cidVersion ?? 0 + const rawLeaves = options.rawLeaves ?? false + const leafType = options.leafType ?? 'file' + const fileImportConcurrency = options.fileImportConcurrency ?? 50 + const blockWriteConcurrency = options.blockWriteConcurrency ?? 10 + const reduceSingleLeafToSelf = options.reduceSingleLeafToSelf ?? true + + const chunker = options.chunker ?? fixedSize() + const chunkValidator = options.chunkValidator ?? defaultChunkValidator() + const buildDag: DAGBuilder = options.dagBuilder ?? defaultDagBuilder({ + chunker, + chunkValidator, + wrapWithDirectory, + layout: options.layout ?? balanced(), + bufferImporter: options.bufferImporter ?? defaultBufferImporter({ + cidVersion, + rawLeaves, + leafType, + progress: options.progress + }), + blockWriteConcurrency, + reduceSingleLeafToSelf, + cidVersion + }) + const buildTree: TreeBuilder = options.treeBuilder ?? defaultTreeBuilder({ + wrapWithDirectory, + shardSplitThresholdBytes, + cidVersion + }) + + for await (const entry of buildTree(parallelBatch(buildDag(candidates, blockstore), fileImportConcurrency), blockstore)) { yield { cid: entry.cid, path: entry.path, @@ -153,3 +259,86 @@ export async function * importer (source: AsyncIterable | Itera } } } + +/** + * `importContent` is similar to `importer` except it accepts a single + * `ImportCandidate` and returns a promise of a single `ImportResult` + * instead of a stream of results. + * + * @example + * + * ```typescript + * import { importOne } from 'ipfs-unixfs-importer' + * import { MemoryBlockstore } from 'blockstore-core' + * + * // store blocks in memory, other blockstores are available + * const blockstore = new MemoryBlockstore() + * + * const input = { + * path: './foo.txt', + * content: Uint8Array.from([0, 1, 2, 3, 4]) + * } + * + * const entry = await importContent(input, blockstore) + * ``` + */ +export async function importContent (content: ImportCandidate, blockstore: Blockstore, options: ImporterOptions = {}): Promise { + const result = await first(importer([content], blockstore, options)) + + if (result == null) { + throw errcode(new Error('Nothing imported'), 'ERR_INVALID_PARAMS') + } + + return result +} + +/** + * `importBytes` accepts a single Uint8Array and returns a promise + * of a single `ImportResult`. + * + * @example + * + * ```typescript + * import { importOne } from 'ipfs-unixfs-importer' + * import { MemoryBlockstore } from 'blockstore-core' + * + * // store blocks in memory, other blockstores are available + * const blockstore = new MemoryBlockstore() + * + * const input = Uint8Array.from([0, 1, 2, 3, 4]) + * + * const entry = await importBytes(input, blockstore) + * ``` + */ +export async function importBytes (buf: ImportContent, blockstore: Blockstore, options: ImporterOptions = {}): Promise { + return importContent({ + content: buf + }, blockstore, options) +} + +/** + * `importByteStream` accepts a single stream of Uint8Arrays and + * returns a promise of a single `ImportResult`. + * + * @example + * + * ```typescript + * import { importOne } from 'ipfs-unixfs-importer' + * import { MemoryBlockstore } from 'blockstore-core' + * + * // store blocks in memory, other blockstores are available + * const blockstore = new MemoryBlockstore() + * + * const input = [ + * Uint8Array.from([0, 1, 2, 3, 4]), + * Uint8Array.from([5, 6, 7, 8, 9]) + * ] + * + * const entry = await importByteStream(input, blockstore) + * ``` + */ +export async function importByteStream (bufs: ByteStream, blockstore: Blockstore, options: ImporterOptions = {}): Promise { + return importContent({ + content: bufs + }, blockstore, options) +} diff --git a/packages/ipfs-unixfs-importer/src/layout/balanced.ts b/packages/ipfs-unixfs-importer/src/layout/balanced.ts new file mode 100644 index 00000000..30194f7c --- /dev/null +++ b/packages/ipfs-unixfs-importer/src/layout/balanced.ts @@ -0,0 +1,27 @@ +import batch from 'it-batch' +import type { InProgressImportResult } from '../index.js' +import type { FileLayout } from './index.js' + +const DEFAULT_MAX_CHILDREN_PER_NODE = 174 + +export interface BalancedOptions { + maxChildrenPerNode?: number +} + +export function balanced (options?: BalancedOptions): FileLayout { + const maxChildrenPerNode = options?.maxChildrenPerNode ?? DEFAULT_MAX_CHILDREN_PER_NODE + + return async function balancedLayout (source, reduce): Promise { + const roots = [] + + for await (const chunked of batch(source, maxChildrenPerNode)) { + roots.push(await reduce(chunked)) + } + + if (roots.length > 1) { + return await balancedLayout(roots, reduce) + } + + return roots[0] + } +} diff --git a/packages/ipfs-unixfs-importer/src/layout/flat.ts b/packages/ipfs-unixfs-importer/src/layout/flat.ts new file mode 100644 index 00000000..f75b44f7 --- /dev/null +++ b/packages/ipfs-unixfs-importer/src/layout/flat.ts @@ -0,0 +1,9 @@ +import all from 'it-all' +import type { FileLayout } from './index.js' +import type { InProgressImportResult } from '../index.js' + +export function flat (): FileLayout { + return async function flatLayout (source, reduce): Promise { + return await reduce(await all(source)) + } +} diff --git a/packages/ipfs-unixfs-importer/src/layout/index.ts b/packages/ipfs-unixfs-importer/src/layout/index.ts new file mode 100644 index 00000000..99b5be91 --- /dev/null +++ b/packages/ipfs-unixfs-importer/src/layout/index.ts @@ -0,0 +1,8 @@ +import type { InProgressImportResult } from '../index.js' + +export interface Reducer { (leaves: InProgressImportResult[]): Promise } +export interface FileLayout { (source: AsyncIterable | Iterable, reducer: Reducer): Promise } + +export { balanced } from './balanced.js' +export { flat } from './flat.js' +export { trickle } from './trickle.js' diff --git a/packages/ipfs-unixfs-importer/src/dag-builder/file/trickle.ts b/packages/ipfs-unixfs-importer/src/layout/trickle.ts similarity index 71% rename from packages/ipfs-unixfs-importer/src/dag-builder/file/trickle.ts rename to packages/ipfs-unixfs-importer/src/layout/trickle.ts index 215c37d8..c0cc194a 100644 --- a/packages/ipfs-unixfs-importer/src/dag-builder/file/trickle.ts +++ b/packages/ipfs-unixfs-importer/src/layout/trickle.ts @@ -1,9 +1,13 @@ import type { UnixFS } from 'ipfs-unixfs' import batch from 'it-batch' import type { CID } from 'multiformats/cid' -import type { FileDAGBuilder, InProgressImportResult, Reducer } from '../../index.js' +import type { InProgressImportResult } from '../index.js' +import type { FileLayout, Reducer } from '../layout/index.js' -export interface TrickleDagNode { +const DEFAULT_LAYER_REPEAT = 4 +const DEFAULT_MAX_CHILDREN_PER_NODE = 174 + +interface TrickleDagNode { children: InProgressImportResult[] depth: number maxDepth: number @@ -15,35 +19,48 @@ export interface TrickleDagNode { unixfs?: UnixFS } -export const trickle: FileDAGBuilder = async function (source, reduce, options) { - const root = new Root(options.layerRepeat) - let iteration = 0 - let maxDepth = 1 - let subTree: SubTree = root +export interface TrickleOptions { + layerRepeat?: number + maxChildrenPerNode?: number +} - for await (const layer of batch(source, options.maxChildrenPerNode)) { - if (subTree.isFull()) { - if (subTree !== root) { - root.addChild(await subTree.reduce(reduce)) - } +/** + * @see https://github.com/ipfs/specs/pull/57#issuecomment-265205384 + */ +export function trickle (options?: TrickleOptions): FileLayout { + const layerRepeat = options?.layerRepeat ?? DEFAULT_LAYER_REPEAT + const maxChildrenPerNode = options?.maxChildrenPerNode ?? DEFAULT_MAX_CHILDREN_PER_NODE - if (iteration > 0 && iteration % options.layerRepeat === 0) { - maxDepth++ - } + return async function trickleLayout (source, reduce): Promise { + const root = new Root(layerRepeat) + let iteration = 0 + let maxDepth = 1 + let subTree: SubTree = root + + for await (const layer of batch(source, maxChildrenPerNode)) { + if (subTree.isFull()) { + if (subTree !== root) { + root.addChild(await subTree.reduce(reduce)) + } + + if (iteration > 0 && iteration % layerRepeat === 0) { + maxDepth++ + } - subTree = new SubTree(maxDepth, options.layerRepeat, iteration) + subTree = new SubTree(maxDepth, layerRepeat, iteration) - iteration++ + iteration++ + } + + subTree.append(layer) } - subTree.append(layer) - } + if (subTree != null && subTree !== root) { + root.addChild(await subTree.reduce(reduce)) + } - if (subTree != null && subTree !== root) { - root.addChild(await subTree.reduce(reduce)) + return await root.reduce(reduce) } - - return await root.reduce(reduce) } class SubTree { diff --git a/packages/ipfs-unixfs-importer/src/options.ts b/packages/ipfs-unixfs-importer/src/options.ts deleted file mode 100644 index eaf0f713..00000000 --- a/packages/ipfs-unixfs-importer/src/options.ts +++ /dev/null @@ -1,51 +0,0 @@ -import mergeOptions from 'merge-options' -import { sha256 } from 'multiformats/hashes/sha2' -import { murmur3128 } from '@multiformats/murmur3' -import type { ImporterOptions, UserImporterOptions } from './index.js' - -async function hamtHashFn (buf: Uint8Array): Promise { - return (await murmur3128.encode(buf)) - // Murmur3 outputs 128 bit but, accidentally, IPFS Go's - // implementation only uses the first 64, so we must do the same - // for parity.. - .slice(0, 8) - // Invert buffer because that's how Go impl does it - .reverse() -} - -const defaultOptions: ImporterOptions = { - chunker: 'fixed', - strategy: 'balanced', // 'flat', 'trickle' - rawLeaves: false, - onlyHash: false, - reduceSingleLeafToSelf: true, - hasher: sha256, - leafType: 'file', // 'raw' - cidVersion: 0, - progress: () => () => {}, - // https://github.com/ipfs/go-ipfs/pull/8114/files#diff-eec963b47a6e1080d9d8023b4e438e6e3591b4154f7379a7e728401d2055374aR319 - shardSplitThresholdBytes: 262144, - fileImportConcurrency: 50, - blockWriteConcurrency: 10, - minChunkSize: 262144, - maxChunkSize: 262144, - avgChunkSize: 262144, - window: 16, - // FIXME: This number is too big for JavaScript - // https://github.com/ipfs/go-ipfs-chunker/blob/d0125832512163708c0804a3cda060e21acddae4/rabin.go#L11 - polynomial: 17437180132763653, // eslint-disable-line no-loss-of-precision,@typescript-eslint/no-loss-of-precision - maxChildrenPerNode: 174, - layerRepeat: 4, - wrapWithDirectory: false, - recursive: false, - hidden: false, - timeout: undefined, - hamtHashFn, - hamtHashCode: BigInt(0x22), - hamtBucketBits: 8 -} - -export default (options: UserImporterOptions = {}): ImporterOptions => { - const defaults = mergeOptions.bind({ ignoreUndefined: true }) - return defaults(defaultOptions, options) -} diff --git a/packages/ipfs-unixfs-importer/src/tree-builder.ts b/packages/ipfs-unixfs-importer/src/tree-builder.ts index 5300db0f..b40e0f29 100644 --- a/packages/ipfs-unixfs-importer/src/tree-builder.ts +++ b/packages/ipfs-unixfs-importer/src/tree-builder.ts @@ -2,18 +2,15 @@ import { DirFlat } from './dir-flat.js' import { flatToShard } from './flat-to-shard.js' import { Dir } from './dir.js' import { toPathComponents } from './utils/to-path-components.js' -import type { ImporterOptions, ImportResult, InProgressImportResult, TreeBuilder } from './index.js' +import type { ImportResult, InProgressImportResult, TreeBuilder } from './index.js' import type { Blockstore } from 'interface-blockstore' +import type { PersistOptions } from './utils/persist.js' -/** - * @typedef {import('./types').ImportResult} ImportResult - * @typedef {import('./types').InProgressImportResult} InProgressImportResult - * @typedef {import('./types').ImporterOptions} ImporterOptions - * @typedef {import('interface-blockstore').Blockstore} Blockstore - * @typedef {(source: AsyncIterable, blockstore: Blockstore, options: ImporterOptions) => AsyncIterable} TreeBuilder - */ +export interface AddToTreeOptions extends PersistOptions { + shardSplitThresholdBytes: number +} -async function addToTree (elem: InProgressImportResult, tree: Dir, options: ImporterOptions): Promise { +async function addToTree (elem: InProgressImportResult, tree: Dir, options: AddToTreeOptions): Promise { const pathElems = toPathComponents(elem.path ?? '') const lastIndex = pathElems.length - 1 let parent = tree @@ -70,36 +67,58 @@ async function * flushAndYield (tree: Dir | InProgressImportResult, blockstore: yield * tree.flush(blockstore) } -export const treeBuilder: TreeBuilder = async function * treeBuilder (source, block, options) { - let tree: Dir = new DirFlat({ - root: true, - dir: true, - path: '', - dirty: true, - flat: true - }, options) - - for await (const entry of source) { - if (entry == null) { - continue - } +export interface TreeBuilderOptions extends AddToTreeOptions { + wrapWithDirectory: boolean +} - tree = await addToTree(entry, tree, options) +export function defaultTreeBuilder (options: TreeBuilderOptions): TreeBuilder { + return async function * treeBuilder (source, block) { + let tree: Dir = new DirFlat({ + root: true, + dir: true, + path: '', + dirty: true, + flat: true + }, options) + + let rootDir: string | undefined + let singleRoot = false + + for await (const entry of source) { + if (entry == null) { + continue + } - if (entry.unixfs == null || !entry.unixfs.isDirectory()) { - yield entry - } - } + // if all paths are from the same root directory, we should + // wrap them all in that root directory + const dir = `${entry.originalPath ?? ''}`.split('/')[0] + + if (dir != null && dir !== '') { + if (rootDir == null) { + rootDir = dir + singleRoot = true + } else if (rootDir !== dir) { + singleRoot = false + } + } - if (options.wrapWithDirectory) { - yield * flushAndYield(tree, block) - } else { - for await (const unwrapped of tree.eachChildSeries()) { - if (unwrapped == null) { - continue + tree = await addToTree(entry, tree, options) + + if (entry.unixfs == null || !entry.unixfs.isDirectory()) { + yield entry } + } - yield * flushAndYield(unwrapped.child, block) + if (options.wrapWithDirectory || (singleRoot && tree.childCount() > 1)) { + yield * flushAndYield(tree, block) + } else { + for await (const unwrapped of tree.eachChildSeries()) { + if (unwrapped == null) { + continue + } + + yield * flushAndYield(unwrapped.child, block) + } } } } diff --git a/packages/ipfs-unixfs-importer/src/utils/persist.ts b/packages/ipfs-unixfs-importer/src/utils/persist.ts index 9091df79..15f12752 100644 --- a/packages/ipfs-unixfs-importer/src/utils/persist.ts +++ b/packages/ipfs-unixfs-importer/src/utils/persist.ts @@ -3,14 +3,11 @@ import * as dagPb from '@ipld/dag-pb' import { sha256 } from 'multiformats/hashes/sha2' import type { Blockstore } from 'interface-blockstore' import type { BlockCodec } from 'multiformats/codecs/interface' -import type { MultihashHasher } from 'multiformats/hashes/interface' import type { Version as CIDVersion } from 'multiformats/cid' export interface PersistOptions { codec?: BlockCodec - hasher: MultihashHasher cidVersion: CIDVersion - onlyHash: boolean signal?: AbortSignal } @@ -19,26 +16,12 @@ export const persist = async (buffer: Uint8Array, blockstore: Blockstore, option options.codec = dagPb } - if (options.hasher == null) { - options.hasher = sha256 - } - - if (options.cidVersion === undefined) { - options.cidVersion = 1 - } - - if (options.codec === dagPb && options.hasher !== sha256) { - options.cidVersion = 1 - } - - const multihash = await options.hasher.digest(buffer) + const multihash = await sha256.digest(buffer) const cid = CID.create(options.cidVersion, options.codec.code, multihash) - if (!options.onlyHash) { - await blockstore.put(cid, buffer, { - signal: options.signal - }) - } + await blockstore.put(cid, buffer, { + signal: options.signal + }) return cid } diff --git a/packages/ipfs-unixfs-importer/test/builder-balanced.spec.ts b/packages/ipfs-unixfs-importer/test/builder-balanced.spec.ts index 80ce60ff..f40f4272 100644 --- a/packages/ipfs-unixfs-importer/test/builder-balanced.spec.ts +++ b/packages/ipfs-unixfs-importer/test/builder-balanced.spec.ts @@ -1,9 +1,8 @@ /* eslint-env mocha */ import { expect } from 'aegir/chai' -import { balanced } from '../src/dag-builder/file/balanced.js' +import { balanced } from '../src/layout/balanced.js' import { CID } from 'multiformats/cid' -import defaultOptions from '../src/options.js' import type { InProgressImportResult } from '../src/index.js' async function reduce (leaves: InProgressImportResult[]): Promise { @@ -18,7 +17,6 @@ async function reduce (leaves: InProgressImportResult[]): Promise { size: 0n }] - const result = await balanced((async function * () { + const result = await balanced(options)((async function * () { yield * source - }()), reduce, options) + }()), reduce) expect(result).to.deep.equal(source[0]) }) @@ -48,9 +46,9 @@ describe('builder: balanced', () => { size: 0n }] - const result = await balanced((async function * () { + const result = await balanced(options)((async function * () { yield * source - }()), reduce, options) + }()), reduce) expect(result).to.deep.equal({ children: source @@ -61,9 +59,9 @@ describe('builder: balanced', () => { const source = [1, 2, 3, 4] // @ts-expect-error - const result = await balanced((async function * () { + const result = await balanced(options)((async function * () { yield * source - }()), reduce, options) + }()), reduce) expect(result).to.deep.equal({ children: [{ @@ -78,9 +76,9 @@ describe('builder: balanced', () => { const source = [1, 2, 3, 4, 5, 6, 7] // @ts-expect-error - const result = await balanced((async function * () { + const result = await balanced(options)((async function * () { yield * source - }()), reduce, options) + }()), reduce) expect(result).to.deep.equal({ children: [{ diff --git a/packages/ipfs-unixfs-importer/test/builder-flat.spec.ts b/packages/ipfs-unixfs-importer/test/builder-flat.spec.ts index 9ca4104b..6a8c6a52 100644 --- a/packages/ipfs-unixfs-importer/test/builder-flat.spec.ts +++ b/packages/ipfs-unixfs-importer/test/builder-flat.spec.ts @@ -1,7 +1,7 @@ /* eslint-env mocha */ import { expect } from 'aegir/chai' -import { flat } from '../src/dag-builder/file/flat.js' +import { flat } from '../src/layout/flat.js' function reduce (leaves: any[]): any { if (leaves.length > 1) { @@ -15,15 +15,15 @@ describe('builder: flat', () => { it('reduces one value into itself', async () => { const source = [1] // @ts-expect-error - const result = await flat(source, reduce) + const result = await flat()(source, reduce) - expect(result).to.be.eql(1) + expect(result).to.be.equal(1) }) it('reduces 2 values into parent', async () => { const source = [1, 2] // @ts-expect-error - const result = await flat(source, reduce) + const result = await flat()(source, reduce) expect(result).to.be.eql({ children: [1, 2] }) }) diff --git a/packages/ipfs-unixfs-importer/test/builder-only-hash.spec.ts b/packages/ipfs-unixfs-importer/test/builder-only-hash.spec.ts deleted file mode 100644 index f06cbe68..00000000 --- a/packages/ipfs-unixfs-importer/test/builder-only-hash.spec.ts +++ /dev/null @@ -1,32 +0,0 @@ -/* eslint-env mocha */ - -import { expect } from 'aegir/chai' -import { dagBuilder } from '../src/dag-builder/index.js' -import all from 'it-all' -import { MemoryBlockstore } from 'blockstore-core' -import defaultOptions from '../src/options.js' -import asAsyncIterable from './helpers/as-async-iterable.js' - -describe('builder: onlyHash', () => { - const block = new MemoryBlockstore() - - it('will only chunk and hash if passed an "onlyHash" option', async () => { - const nodes = await all(dagBuilder([{ - path: 'foo.txt', - content: asAsyncIterable(Uint8Array.from([0, 1, 2, 3, 4])) - }], block, { - ...defaultOptions({}), - onlyHash: true - })) - - expect(nodes.length).to.equal(1) - - try { - await block.get((await nodes[0]()).cid) - - throw new Error('Should have errored') - } catch (err: any) { - expect(err.code).to.equal('ERR_NOT_FOUND') - } - }) -}) diff --git a/packages/ipfs-unixfs-importer/test/builder-trickle-dag.spec.ts b/packages/ipfs-unixfs-importer/test/builder-trickle-dag.spec.ts index a1beda1e..dc5f0843 100644 --- a/packages/ipfs-unixfs-importer/test/builder-trickle-dag.spec.ts +++ b/packages/ipfs-unixfs-importer/test/builder-trickle-dag.spec.ts @@ -1,7 +1,7 @@ /* eslint-env mocha */ import { expect } from 'aegir/chai' -import { trickle } from '../src/dag-builder/file/trickle.js' +import { trickle } from '../src/layout/trickle.js' import asAsyncIterable from './helpers/as-async-iterable.js' const createValues = (max: number): number[] => { @@ -30,14 +30,14 @@ const options = { describe('builder: trickle', () => { it('reduces one value into itself', async () => { // @ts-expect-error - const result = await trickle(asAsyncIterable([1]), reduce, options) + const result = await trickle(options)(asAsyncIterable([1]), reduce) expect(result).to.deep.equal(1) }) it('reduces 3 values into parent', async () => { // @ts-expect-error - const result = await trickle(createValues(3), reduce, options) + const result = await trickle(options)(createValues(3), reduce) expect(result).to.deep.equal({ children: [ @@ -50,7 +50,7 @@ describe('builder: trickle', () => { it('reduces 6 values correctly', async () => { // @ts-expect-error - const result = await trickle(createValues(6), reduce, options) + const result = await trickle(options)(createValues(6), reduce) expect(result).to.deep.equal({ children: [ @@ -70,7 +70,7 @@ describe('builder: trickle', () => { it('reduces 9 values correctly', async () => { // @ts-expect-error - const result = await trickle(createValues(9), reduce, options) + const result = await trickle(options)(createValues(9), reduce) expect(result).to.deep.equal({ children: [ @@ -97,7 +97,7 @@ describe('builder: trickle', () => { it('reduces 12 values correctly', async () => { // @ts-expect-error - const result = await trickle(createValues(12), reduce, options) + const result = await trickle(options)(createValues(12), reduce) expect(result).to.deep.equal({ children: [ @@ -131,7 +131,7 @@ describe('builder: trickle', () => { it('reduces 21 values correctly', async () => { // @ts-expect-error - const result = await trickle(createValues(21), reduce, options) + const result = await trickle(options)(createValues(21), reduce) expect(result).to.deep.equal({ children: [ @@ -186,7 +186,7 @@ describe('builder: trickle', () => { it('reduces 68 values correctly', async () => { // @ts-expect-error - const result = await trickle(createValues(68), reduce, options) + const result = await trickle(options)(createValues(68), reduce) expect(result).to.deep.equal( { @@ -354,7 +354,7 @@ describe('builder: trickle', () => { it('reduces 93 values correctly', async () => { // @ts-expect-error - const result = await trickle(createValues(93), reduce, options) + const result = await trickle(options)(createValues(93), reduce) expect(result).to.deep.equal( { diff --git a/packages/ipfs-unixfs-importer/test/builder.spec.ts b/packages/ipfs-unixfs-importer/test/builder.spec.ts deleted file mode 100644 index 261f911d..00000000 --- a/packages/ipfs-unixfs-importer/test/builder.spec.ts +++ /dev/null @@ -1,114 +0,0 @@ -/* eslint-env mocha */ - -import { expect } from 'aegir/chai' -import * as mh from 'multiformats/hashes/digest' -import { sha256, sha512 } from 'multiformats/hashes/sha2' -import { decode } from '@ipld/dag-pb' -import { UnixFS } from 'ipfs-unixfs' -import { dagBuilder } from '../src/dag-builder/index.js' -import first from 'it-first' -import { MemoryBlockstore } from 'blockstore-core' -import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' -import defaultOptions from '../src/options.js' -import asAsyncIterable from './helpers/as-async-iterable.js' - -describe('builder', () => { - const block = new MemoryBlockstore() - - const testMultihashes = [sha256, sha512] - - it('allows multihash hash algorithm to be specified', async () => { - for (let i = 0; i < testMultihashes.length; i++) { - const hasher = testMultihashes[i] - const content = uint8ArrayFromString(String(Math.random() + Date.now())) - const inputFile = { - path: `${content}.txt`, - content: asAsyncIterable(content) - } - - const result = await first(dagBuilder([inputFile], block, { - ...defaultOptions(), - hasher - })) - - if (result == null) { - throw new Error('Nothing built') - } - - const imported = await result() - expect(imported).to.exist() - - // Verify multihash has been encoded using hasher - expect(mh.decode(imported.cid.multihash.bytes).code).to.equal(hasher.code) - - // Fetch using hasher encoded multihash - const importedBlock = await block.get(imported.cid) - const node = decode(importedBlock) - if (node.Data == null) { - throw new Error('PBNode Data undefined') - } - const fetchedContent = UnixFS.unmarshal(node.Data).data - expect(fetchedContent).to.deep.equal(content) - } - }) - - it('allows multihash hash algorithm to be specified for big file', async function () { - this.timeout(30000) - - for (let i = 0; i < testMultihashes.length; i++) { - const hasher = testMultihashes[i] - const content = String(Math.random() + Date.now()) - const inputFile = { - path: content + '.txt', - // Bigger than maxChunkSize - content: asAsyncIterable(new Uint8Array(262144 + 5).fill(1)) - } - - const result = await first(dagBuilder([inputFile], block, { - ...defaultOptions(), - hasher - })) - - if (result == null) { - throw new Error('Nothing built') - } - - const imported = await result() - - expect(imported).to.exist() - expect(mh.decode(imported.cid.multihash.bytes).code).to.equal(hasher.code) - } - }) - - it('allows multihash hash algorithm to be specified for a directory', async () => { - for (let i = 0; i < testMultihashes.length; i++) { - const hasher = testMultihashes[i] - const inputFile = { - path: `${String(Math.random() + Date.now())}-dir` - } - - const result = await first(dagBuilder([{ ...inputFile }], block, { - ...defaultOptions(), - hasher - })) - - if (result == null) { - return new Error('Nothing built') - } - - const imported = await result() - - expect(mh.decode(imported.cid.multihash.bytes).code).to.equal(hasher.code) - - // Fetch using hasher encoded multihash - const importedBlock = await block.get(imported.cid) - const node = decode(importedBlock) - - if (node.Data == null) { - throw new Error('PBNode Data undefined') - } - const meta = UnixFS.unmarshal(node.Data) - expect(meta.type).to.equal('directory') - } - }) -}) diff --git a/packages/ipfs-unixfs-importer/test/chunker-custom.spec.ts b/packages/ipfs-unixfs-importer/test/chunker-custom.spec.ts index 4e590597..9d5cae8f 100644 --- a/packages/ipfs-unixfs-importer/test/chunker-custom.spec.ts +++ b/packages/ipfs-unixfs-importer/test/chunker-custom.spec.ts @@ -37,7 +37,7 @@ describe('custom chunker', function () { content }], block, { chunker: source => source, - bufferImporter: async function * (file, block, options) { + bufferImporter: async function * (file, block) { for await (const item of file.content) { yield async () => await put(item) } @@ -50,7 +50,9 @@ describe('custom chunker', function () { it('keeps custom chunking', async () => { const content = iter() for await (const part of importer([{ path: 'test', content }], block, { - chunker: source => source + chunker: source => source, + rawLeaves: false, + cidVersion: 0 })) { expect(part.size).to.equal(116n) } diff --git a/packages/ipfs-unixfs-importer/test/chunker-fixed-size.spec.ts b/packages/ipfs-unixfs-importer/test/chunker-fixed-size.spec.ts index e9cfd831..5597a83d 100644 --- a/packages/ipfs-unixfs-importer/test/chunker-fixed-size.spec.ts +++ b/packages/ipfs-unixfs-importer/test/chunker-fixed-size.spec.ts @@ -5,7 +5,6 @@ import { expect } from 'aegir/chai' import all from 'it-all' import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' import { concat as uint8ArrayConcat } from 'uint8arrays/concat' -import defaultOptions from '../src/options.js' import asAsyncIterable from './helpers/as-async-iterable.js' const rawFile = new Uint8Array(Math.pow(2, 20)) @@ -22,10 +21,9 @@ describe('chunker: fixed size', function () { b2.fill('b'.charCodeAt(0)) b3.fill('c'.charCodeAt(0)) - const chunks = await all(fixedSize(asAsyncIterable([b1, b2, b3]), { - ...defaultOptions(), - maxChunkSize: 256 - })) + const chunks = await all(fixedSize({ + chunkSize: 256 + })(asAsyncIterable([b1, b2, b3]))) expect(chunks).to.have.length(8) chunks.forEach((chunk) => { @@ -40,10 +38,9 @@ describe('chunker: fixed size', function () { for (let i = 0; i < (256 * 12); i++) { input.push(buf) } - const chunks = await all(fixedSize(asAsyncIterable(input), { - ...defaultOptions(), - maxChunkSize: 256 - })) + const chunks = await all(fixedSize({ + chunkSize: 256 + })(asAsyncIterable(input))) expect(chunks).to.have.length(12) chunks.forEach((chunk) => { @@ -53,10 +50,9 @@ describe('chunker: fixed size', function () { it('256 KiB chunks', async () => { const KiB256 = 262144 - const chunks = await all(fixedSize(asAsyncIterable([rawFile]), { - ...defaultOptions(), - maxChunkSize: KiB256 - })) + const chunks = await all(fixedSize({ + chunkSize: KiB256 + })(asAsyncIterable([rawFile]))) expect(chunks).to.have.length(4) chunks.forEach((chunk) => { @@ -68,10 +64,9 @@ describe('chunker: fixed size', function () { const KiB256 = 262144 const file = uint8ArrayConcat([rawFile, uint8ArrayFromString('hello')]) - const chunks = await all(fixedSize(asAsyncIterable([file]), { - ...defaultOptions(), - maxChunkSize: KiB256 - })) + const chunks = await all(fixedSize({ + chunkSize: KiB256 + })(asAsyncIterable([file]))) expect(chunks).to.have.length(5) let counter = 0 diff --git a/packages/ipfs-unixfs-importer/test/chunker-rabin.spec.ts b/packages/ipfs-unixfs-importer/test/chunker-rabin.spec.ts index 2ddcf5aa..f6788e2c 100644 --- a/packages/ipfs-unixfs-importer/test/chunker-rabin.spec.ts +++ b/packages/ipfs-unixfs-importer/test/chunker-rabin.spec.ts @@ -6,7 +6,6 @@ import all from 'it-all' import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' import { concat as uint8ArrayConcat } from 'uint8arrays/concat' import asAsyncIterable from './helpers/as-async-iterable.js' -import defaultOptions from '../src/options.js' import { isElectronRenderer } from 'wherearewe' const rawFile = new Uint8Array(Math.pow(2, 20)).fill(1) @@ -31,12 +30,11 @@ describe('chunker: rabin', function () { b2.fill('b'.charCodeAt(0)) b3.fill('c'.charCodeAt(0)) - const chunks = await all(rabin(asAsyncIterable([b1, b2, b3]), { - ...defaultOptions(), + const chunks = await all(rabin({ minChunkSize: 48, avgChunkSize: 96, maxChunkSize: 192 - })) + })(asAsyncIterable([b1, b2, b3]))) const size = chunks.reduce((acc, curr) => acc + curr.length, 0) @@ -55,12 +53,11 @@ describe('chunker: rabin', function () { const b1 = new Uint8Array(10 * 256) b1.fill('a'.charCodeAt(0)) - const chunks = await all(rabin(asAsyncIterable([b1]), { - ...defaultOptions(), + const chunks = await all(rabin({ maxChunkSize: 262144, minChunkSize: 18, avgChunkSize: 256 - })) + })(asAsyncIterable([b1]))) chunks.forEach((chunk) => { expect(chunk).to.have.length.gte(256 / 3) @@ -72,13 +69,12 @@ describe('chunker: rabin', function () { const KiB256 = 262144 const file = uint8ArrayConcat([rawFile, uint8ArrayFromString('hello')]) const opts = { - ...defaultOptions(), minChunkSize: Math.round(KiB256 / 3), avgChunkSize: KiB256, maxChunkSize: Math.round(KiB256 + (KiB256 / 2)) } - const chunks = await all(rabin(asAsyncIterable([file]), opts)) + const chunks = await all(rabin(opts)(asAsyncIterable([file]))) chunks.forEach((chunk) => { expect(chunk).to.have.length.gte(opts.minChunkSize) @@ -88,13 +84,12 @@ describe('chunker: rabin', function () { it('throws when min chunk size is too small', async () => { const opts = { - ...defaultOptions(), minChunkSize: 1, maxChunkSize: 100 } try { - await all(rabin(asAsyncIterable([]), opts)) + await all(rabin(opts)(asAsyncIterable([]))) throw new Error('Should have thrown') } catch (err: any) { expect(err.code).to.equal('ERR_INVALID_MIN_CHUNK_SIZE') @@ -103,7 +98,6 @@ describe('chunker: rabin', function () { it('throws when avg chunk size is not specified', async () => { const opts = { - ...defaultOptions(), avgChunkSize: undefined } @@ -119,13 +113,12 @@ describe('chunker: rabin', function () { it('uses the min chunk size when max and avg are too small', async () => { const file = uint8ArrayConcat([rawFile, uint8ArrayFromString('hello')]) const opts = { - ...defaultOptions(), minChunkSize: 100, maxChunkSize: 5, avgChunkSize: 5 } - const chunks = await all(rabin(asAsyncIterable([file]), opts)) + const chunks = await all(rabin(opts)(asAsyncIterable([file]))) chunks.forEach((chunk, index) => { if (index === chunks.length - 1) { diff --git a/packages/ipfs-unixfs-importer/test/hash-parity-with-go-ipfs.spec.ts b/packages/ipfs-unixfs-importer/test/hash-parity-with-go-ipfs.spec.ts index f8f74a10..1be6b2c5 100644 --- a/packages/ipfs-unixfs-importer/test/hash-parity-with-go-ipfs.spec.ts +++ b/packages/ipfs-unixfs-importer/test/hash-parity-with-go-ipfs.spec.ts @@ -1,30 +1,31 @@ /* eslint-env mocha */ -import { importer } from '../src/index.js' +import { importer, ImporterOptions } from '../src/index.js' import { expect } from 'aegir/chai' import randomByteStream from './helpers/finite-pseudorandom-byte-stream.js' import first from 'it-first' import last from 'it-last' import { MemoryBlockstore } from 'blockstore-core' -import defaultOptions from '../src/options.js' import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' +import { balanced, FileLayout, flat, trickle } from '../src/layout/index.js' -const strategies: Array<'flat' | 'trickle' | 'balanced'> = [ - 'flat', - 'trickle', - 'balanced' -] +const strategies: Record<'flat' | 'trickle' | 'balanced', FileLayout> = { + flat: flat(), + trickle: trickle(), + balanced: balanced() +} -const expectedHashes = { +const expectedHashes: Record = { flat: 'QmeJ9FRWKnXZQiX5CM1E8j4gpGbg6otpgajThqsbnBpoyD', balanced: 'QmRdPboiJQoZ5cdazR9a8vGqdJvWg6M5bfdtUSKNHpuscj', trickle: 'QmdZcefqMZ3tzdS4CRBN5s1c67eS3nQzN8TNXFBYfgofoy' } -strategies.forEach(strategy => { - const options = { - ...defaultOptions(), - strategy +Object.entries(strategies).forEach(([strategy, layout]) => { + const options: Partial = { + layout, + cidVersion: 0, + rawLeaves: false } if (strategy === 'trickle') { diff --git a/packages/ipfs-unixfs/package.json b/packages/ipfs-unixfs/package.json index 6231dd69..0e99bf22 100644 --- a/packages/ipfs-unixfs/package.json +++ b/packages/ipfs-unixfs/package.json @@ -152,6 +152,6 @@ "fs": false }, "typedoc": { - "entryPoint": "./src/index.js" + "entryPoint": "./src/index.ts" } } From 17e400ac53972c898e6b574a91c624a8c2ecbb9a Mon Sep 17 00:00:00 2001 From: achingbrain Date: Mon, 13 Feb 2023 17:14:49 +0100 Subject: [PATCH 4/5] chore: linting --- packages/ipfs-unixfs-importer/src/index.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/ipfs-unixfs-importer/src/index.ts b/packages/ipfs-unixfs-importer/src/index.ts index 3c9fd350..0bf23c93 100644 --- a/packages/ipfs-unixfs-importer/src/index.ts +++ b/packages/ipfs-unixfs-importer/src/index.ts @@ -311,7 +311,7 @@ export async function importContent (content: ImportCandidate, blockstore: Block * ``` */ export async function importBytes (buf: ImportContent, blockstore: Blockstore, options: ImporterOptions = {}): Promise { - return importContent({ + return await importContent({ content: buf }, blockstore, options) } @@ -338,7 +338,7 @@ export async function importBytes (buf: ImportContent, blockstore: Blockstore, o * ``` */ export async function importByteStream (bufs: ByteStream, blockstore: Blockstore, options: ImporterOptions = {}): Promise { - return importContent({ + return await importContent({ content: bufs }, blockstore, options) } From a78ec2b589563ca3b64f0549cfa5bd3b313fbf4a Mon Sep 17 00:00:00 2001 From: achingbrain Date: Mon, 13 Feb 2023 18:55:04 +0100 Subject: [PATCH 5/5] chore: fix raw leaves options --- packages/ipfs-unixfs-exporter/package.json | 2 +- .../test/import-export-dir-sharding.spec.ts | 22 ++++++++++---- .../test/import-export-nested-dir.spec.ts | 5 +++- .../test/importer.spec.ts | 29 ++++++++++++------- packages/ipfs-unixfs-importer/src/index.ts | 4 +-- 5 files changed, 43 insertions(+), 19 deletions(-) diff --git a/packages/ipfs-unixfs-exporter/package.json b/packages/ipfs-unixfs-exporter/package.json index bf42e9be..f503be00 100644 --- a/packages/ipfs-unixfs-exporter/package.json +++ b/packages/ipfs-unixfs-exporter/package.json @@ -29,7 +29,7 @@ "exports": { ".": { "types": "./dist/src/index.d.ts", - "import": "./src/index.js" + "import": "./dist/src/index.js" } }, "eslintConfig": { diff --git a/packages/ipfs-unixfs-exporter/test/import-export-dir-sharding.spec.ts b/packages/ipfs-unixfs-exporter/test/import-export-dir-sharding.spec.ts index e5d9bd34..8ea7ec40 100644 --- a/packages/ipfs-unixfs-exporter/test/import-export-dir-sharding.spec.ts +++ b/packages/ipfs-unixfs-exporter/test/import-export-dir-sharding.spec.ts @@ -22,7 +22,9 @@ describe('builder: directory sharding', () => { path: 'a/b', content: asAsyncIterable(content) }], block, { - shardSplitThresholdBytes: Infinity // never shard + shardSplitThresholdBytes: Infinity, // never shard + rawLeaves: false, + cidVersion: 0 })) expect(nodes.length).to.equal(2) @@ -80,7 +82,9 @@ describe('builder: directory sharding', () => { path: 'a/b', content: asAsyncIterable(uint8ArrayFromString(content)) }], block, { - shardSplitThresholdBytes: Infinity // never shard + shardSplitThresholdBytes: Infinity, // never shard + rawLeaves: false, + cidVersion: 0 })) const nonShardedHash = nodes[1].cid @@ -117,7 +121,9 @@ describe('builder: directory sharding', () => { path: 'a/b', content: asAsyncIterable(uint8ArrayFromString(content)) }], block, { - shardSplitThresholdBytes: 0 // always shard + shardSplitThresholdBytes: 0, // always shard + rawLeaves: false, + cidVersion: 0 })) const shardedHash = nodes[1].cid @@ -185,7 +191,10 @@ describe('builder: directory sharding', () => { } } - const nodes = await all(importer(source, block)) + const nodes = await all(importer(source, block, { + rawLeaves: false, + cidVersion: 0 + })) expect(nodes.length).to.equal(maxDirs + 1) // files plus the containing directory @@ -245,7 +254,10 @@ describe('builder: directory sharding', () => { } } - const node = await last(importer(source, block)) + const node = await last(importer(source, block, { + rawLeaves: false, + cidVersion: 0 + })) if (node == null) { throw new Error('Nothing imported') diff --git a/packages/ipfs-unixfs-exporter/test/import-export-nested-dir.spec.ts b/packages/ipfs-unixfs-exporter/test/import-export-nested-dir.spec.ts index 350e02ed..919fd04d 100644 --- a/packages/ipfs-unixfs-exporter/test/import-export-nested-dir.spec.ts +++ b/packages/ipfs-unixfs-exporter/test/import-export-nested-dir.spec.ts @@ -32,7 +32,10 @@ describe('import and export: directory', () => { content: asAsyncIterable(uint8ArrayFromString('cream')) }] - const files = await all(importer(source, block)) + const files = await all(importer(source, block, { + rawLeaves: false, + cidVersion: 0 + })) expect(files.map(normalizeNode).sort(byPath)).to.be.eql([{ path: 'a/b/h', diff --git a/packages/ipfs-unixfs-exporter/test/importer.spec.ts b/packages/ipfs-unixfs-exporter/test/importer.spec.ts index bf3b1929..7d5cea99 100644 --- a/packages/ipfs-unixfs-exporter/test/importer.spec.ts +++ b/packages/ipfs-unixfs-exporter/test/importer.spec.ts @@ -216,7 +216,7 @@ const checkLeafNodeTypes = async (blockstore: Blockstore, options: Partial>, expected: any): Promise => { +const checkNodeLinks = async (blockstore: Blockstore, options: Partial, expected: any): Promise => { for await (const file of importer([{ path: 'foo', content: asAsyncIterable(new Uint8Array(100).fill(1)) @@ -346,8 +346,10 @@ strategies.forEach((strategy) => { } const block = new MemoryBlockstore() - const options: Partial> = { - layout + const options: Partial = { + layout, + rawLeaves: false, + cidVersion: 0 } if (strategy === 'trickle') { @@ -676,10 +678,11 @@ strategies.forEach((strategy) => { createInputFile('foo/bar', 262144 + 21) ] - const options: Partial> = { + const options: Partial = { cidVersion: 1, // Ensures we use DirSharded for the data below - shardSplitThresholdBytes: 3 + shardSplitThresholdBytes: 3, + rawLeaves: false } const files = await all(importer(inputFiles.map(file => ({ @@ -718,25 +721,29 @@ strategies.forEach((strategy) => { it('imports file with raw leaf nodes when specified', async () => { await checkLeafNodeTypes(block, { - leafType: 'raw' + leafType: 'raw', + rawLeaves: false }, 'raw') }) it('imports file with file leaf nodes when specified', async () => { await checkLeafNodeTypes(block, { - leafType: 'file' + leafType: 'file', + rawLeaves: false }, 'file') }) it('reduces file to single node when specified', async () => { await checkNodeLinks(block, { - reduceSingleLeafToSelf: true + reduceSingleLeafToSelf: true, + rawLeaves: false }, 0) }) it('does not reduce file to single node when overidden by options', async () => { await checkNodeLinks(block, { - reduceSingleLeafToSelf: false + reduceSingleLeafToSelf: false, + rawLeaves: false }, 1) }) @@ -1005,7 +1012,9 @@ strategies.forEach((strategy) => { path: '/foo/file1.txt', content: asAsyncIterable(bigFile), mtime - }], block)) + }], block, { + rawLeaves: false + })) const root = await exporter(entries[0].cid, block) expect(root).to.have.deep.nested.property('unixfs.mtime', mtime) diff --git a/packages/ipfs-unixfs-importer/src/index.ts b/packages/ipfs-unixfs-importer/src/index.ts index 0bf23c93..e14950ba 100644 --- a/packages/ipfs-unixfs-importer/src/index.ts +++ b/packages/ipfs-unixfs-importer/src/index.ts @@ -220,8 +220,8 @@ export async function * importer (source: ImportCandidateStream, blockstore: Blo const wrapWithDirectory = options.wrapWithDirectory ?? false const shardSplitThresholdBytes = options.shardSplitThresholdBytes ?? 262144 - const cidVersion = options.cidVersion ?? 0 - const rawLeaves = options.rawLeaves ?? false + const cidVersion = options.cidVersion ?? 1 + const rawLeaves = options.rawLeaves ?? true const leafType = options.leafType ?? 'file' const fileImportConcurrency = options.fileImportConcurrency ?? 50 const blockWriteConcurrency = options.blockWriteConcurrency ?? 10