From dcc5ce7d0dc2b1a160f3a2b686d15faf7092b57b Mon Sep 17 00:00:00 2001 From: achingbrain Date: Mon, 18 Nov 2019 09:54:12 -0600 Subject: [PATCH 01/15] chore: dep updated --- package.json | 6 +++--- src/core/write.js | 2 +- test/mv.spec.js | 2 +- test/read.spec.js | 2 +- test/write.spec.js | 2 +- 5 files changed, 7 insertions(+), 7 deletions(-) diff --git a/package.json b/package.json index 1195eff..0e648ef 100644 --- a/package.json +++ b/package.json @@ -45,9 +45,9 @@ "detect-webworker": "^1.0.0", "dirty-chai": "^2.0.1", "ipfs-block-service": "~0.16.0", - "ipfs-repo": "~0.27.0", + "ipfs-repo": "^0.29.1", "ipld": "~0.25.0", - "memdown": "^4.0.0", + "memdown": "^5.1.0", "temp-write": "^4.0.0" }, "dependencies": { @@ -58,7 +58,7 @@ "debug": "^4.1.0", "err-code": "^2.0.0", "hamt-sharding": "~0.0.2", - "interface-datastore": "~0.7.0", + "interface-datastore": "^0.8.0", "ipfs-multipart": "~0.2.0", "ipfs-unixfs": "~0.1.16", "ipfs-unixfs-exporter": "~0.38.0", diff --git a/src/core/write.js b/src/core/write.js index 8691d97..18c9a2d 100644 --- a/src/core/write.js +++ b/src/core/write.js @@ -165,7 +165,7 @@ const write = async (context, source, destination, options) => { offset: bytesWritten }) } else { - log(`Not writing last bytes from original file`) + log('Not writing last bytes from original file') } } diff --git a/test/mv.spec.js b/test/mv.spec.js index b77fb60..655b3b9 100644 --- a/test/mv.spec.js +++ b/test/mv.spec.js @@ -237,7 +237,7 @@ describe('mv', () => { it('moves a file from a sub-shard of a sharded directory to a sharded directory', async () => { const shardedDirPath = await createShardedDirectory(mfs, 10, 75) const otherShardedDirPath = await createShardedDirectory(mfs) - const file = `file-1a.txt` + const file = 'file-1a.txt' const filePath = `${shardedDirPath}/${file}` const finalFilePath = `${otherShardedDirPath}/${file}` diff --git a/test/read.spec.js b/test/read.spec.js index 35a044b..5e00d27 100644 --- a/test/read.spec.js +++ b/test/read.spec.js @@ -17,7 +17,7 @@ describe('read', () => { mfs = await createMfs() }) - describe(`read`, () => { + describe('read', () => { it('reads a small file', async () => { const filePath = '/small-file.txt' diff --git a/test/write.spec.js b/test/write.spec.js index 468810f..9571e7b 100644 --- a/test/write.spec.js +++ b/test/write.spec.js @@ -591,7 +591,7 @@ describe('write', () => { it('overwrites a file in a subshard of a sharded directory', async () => { const shardedDirPath = await createShardedDirectory(mfs, 10, 75) - const newFile = `file-1a.txt` + const newFile = 'file-1a.txt' const newFilePath = `${shardedDirPath}/${newFile}` const newContent = Buffer.from([3, 2, 1, 0]) From 95e1d9cdcde3083580f8db2c9a98e4a81419ad89 Mon Sep 17 00:00:00 2001 From: achingbrain Date: Thu, 21 Nov 2019 10:41:41 -0600 Subject: [PATCH 02/15] feat: adds touch and chmod commands and metadata Adds UnixFSv1.5 metadata support to mfs, including displaying it when listing dirs and statting files. Also adds `touch` and `chmod` commands to manipulate metadata in a similar way to the unix shell. --- package.json | 7 ++- src/cli/chmod.js | 63 +++++++++++++++++++++ src/cli/cp.js | 2 +- src/cli/ls.js | 8 ++- src/cli/mkdir.js | 25 ++++++++- src/cli/stat.js | 8 ++- src/cli/touch.js | 59 ++++++++++++++++++++ src/cli/utils.js | 7 ++- src/core/chmod.js | 74 +++++++++++++++++++++++++ src/core/index.js | 4 +- src/core/ls.js | 8 ++- src/core/mkdir.js | 8 ++- src/core/stat.js | 4 +- src/core/touch.js | 101 ++++++++++++++++++++++++++++++++++ src/core/utils/add-link.js | 24 +++++++- src/core/utils/create-node.js | 11 +++- src/core/utils/hamt-utils.js | 13 ++++- src/core/write.js | 11 +++- src/http/chmod.js | 39 +++++++++++++ src/http/index.js | 4 ++ src/http/touch.js | 48 ++++++++++++++++ src/http/write.js | 3 +- test/chmod.spec.js | 65 ++++++++++++++++++++++ test/helpers/chai.js | 7 +++ test/touch.spec.js | 80 +++++++++++++++++++++++++++ 25 files changed, 658 insertions(+), 25 deletions(-) create mode 100644 src/cli/chmod.js create mode 100644 src/cli/touch.js create mode 100644 src/core/chmod.js create mode 100644 src/core/touch.js create mode 100644 src/http/chmod.js create mode 100644 src/http/touch.js create mode 100644 test/chmod.spec.js create mode 100644 test/helpers/chai.js create mode 100644 test/touch.spec.js diff --git a/package.json b/package.json index 0e648ef..0aadd6f 100644 --- a/package.json +++ b/package.json @@ -41,6 +41,8 @@ "aegir": "^20.0.0", "async-iterator-all": "^1.0.0", "chai": "^4.2.0", + "chai-as-promised": "^7.1.1", + "delay": "^4.3.0", "detect-node": "^2.0.4", "detect-webworker": "^1.0.0", "dirty-chai": "^2.0.1", @@ -60,9 +62,10 @@ "hamt-sharding": "~0.0.2", "interface-datastore": "^0.8.0", "ipfs-multipart": "~0.2.0", - "ipfs-unixfs": "~0.1.16", + "ipfs-unixfs": "^0.2.0", "ipfs-unixfs-exporter": "~0.38.0", - "ipfs-unixfs-importer": "~0.40.0", + "ipfs-unixfs-importer": "^0.40.0", + "ipfs-utils": "^0.4.0", "ipld-dag-pb": "~0.18.0", "joi-browser": "^13.4.0", "mortice": "^2.0.0", diff --git a/src/cli/chmod.js b/src/cli/chmod.js new file mode 100644 index 0000000..f0083ca --- /dev/null +++ b/src/cli/chmod.js @@ -0,0 +1,63 @@ +'use strict' + +const { + asBoolean, + asOctal +} = require('./utils') + +module.exports = { + command: 'chmod [mode] [path]', + + describe: 'Change file modes', + + builder: { + path: { + type: 'string', + describe: 'The MFS path to change the mode of' + }, + mode: { + type: 'int', + coerce: asOctal, + describe: 'The mode to use' + }, + recursive: { + type: 'boolean', + default: false, + coerce: asBoolean, + describe: 'Whether to change modes recursively' + }, + flush: { + alias: 'f', + type: 'boolean', + default: true, + coerce: asBoolean, + describe: 'Flush the changes to disk immediately' + }, + 'shard-split-threshold': { + type: 'number', + default: 1000, + describe: 'If a directory has more links than this, it will be transformed into a hamt-sharded-directory' + } + }, + + handler (argv) { + const { + path, + mode, + getIpfs, + recursive, + flush, + shardSplitThreshold + } = argv + + argv.resolve((async () => { + const ipfs = await getIpfs() + + return ipfs.files.chmod(path, mode, { + recursive, + flush, + shardSplitThreshold + }) + })()) + } +} diff --git a/src/cli/cp.js b/src/cli/cp.js index 473fb33..7c3a085 100644 --- a/src/cli/cp.js +++ b/src/cli/cp.js @@ -18,7 +18,7 @@ module.exports = { describe: 'Create any non-existent intermediate directories' }, format: { - alias: 'h', + alias: 'f', type: 'string', default: 'dag-pb', describe: 'If intermediate directories are created, use this format to create them (experimental)' diff --git a/src/cli/ls.js b/src/cli/ls.js index 236fd89..ffe2c1f 100644 --- a/src/cli/ls.js +++ b/src/cli/ls.js @@ -10,6 +10,8 @@ const { const { FILE_SEPARATOR } = require('../core/utils/constants') +const formatMode = require('ipfs-utils/src/files/format-mode') +const formatMtime = require('ipfs-utils/src/files/format-mtime') module.exports = { command: 'ls [path]', @@ -64,8 +66,8 @@ module.exports = { } if (long) { - files.forEach(link => { - print(`${link.name}\t${link.hash}\t${link.size}`) + files.forEach(file => { + print(`${formatMode(file.mode, file.type === 1)}\t${formatMtime(file.mtime)}\t${file.name}\t${file.hash}\t${file.size}`) }) } else { files.forEach(link => print(link.name)) @@ -85,7 +87,7 @@ module.exports = { }), through(file => { if (long) { - print(`${file.name}\t${file.hash}\t${file.size}`) + print(`${formatMode(file.mode, file.type === 1)}\t${formatMtime(file.mtime)}\t${file.name}\t${file.hash}\t${file.size}`) } else { print(file.name) } diff --git a/src/cli/mkdir.js b/src/cli/mkdir.js index 4e70fe9..cf413ab 100644 --- a/src/cli/mkdir.js +++ b/src/cli/mkdir.js @@ -1,7 +1,8 @@ 'use strict' const { - asBoolean + asBoolean, + asOctal } = require('./utils') module.exports = { @@ -38,6 +39,20 @@ module.exports = { type: 'number', default: 1000, describe: 'If a directory has more links than this, it will be transformed into a hamt-sharded-directory' + }, + mode: { + alias: 'm', + type: 'number', + default: true, + coerce: asOctal, + describe: 'Mode to apply to the new directory' + }, + mtime: { + alias: 'm', + type: 'number', + default: true, + coerce: asOctal, + describe: 'Mtime to apply to the new directory' } }, @@ -49,7 +64,9 @@ module.exports = { cidVersion, hashAlg, flush, - shardSplitThreshold + shardSplitThreshold, + mode, + mtime } = argv argv.resolve((async () => { @@ -60,7 +77,9 @@ module.exports = { cidVersion, hashAlg, flush, - shardSplitThreshold + shardSplitThreshold, + mode, + mtime }) })()) } diff --git a/src/cli/stat.js b/src/cli/stat.js index 5807f0a..94ef972 100644 --- a/src/cli/stat.js +++ b/src/cli/stat.js @@ -18,8 +18,10 @@ module.exports = { Size: CumulativeSize: ChildBlocks: -Type: `, - describe: 'Print statistics in given format. Allowed tokens: . Conflicts with other format options.' +Type: +Mode: +Mtime: `, + describe: 'Print statistics in given format. Allowed tokens: . Conflicts with other format options.' }, hash: { alias: 'h', @@ -79,6 +81,8 @@ Type: `, .replace('', stats.cumulativeSize) .replace('', stats.blocks) .replace('', stats.type) + .replace('', stats.mode) + .replace('', stats.mtime) ) }) })()) diff --git a/src/cli/touch.js b/src/cli/touch.js new file mode 100644 index 0000000..9687353 --- /dev/null +++ b/src/cli/touch.js @@ -0,0 +1,59 @@ +'use strict' + +const { + asBoolean +} = require('./utils') + +module.exports = { + command: 'touch [path]', + + describe: 'change file modification times', + + builder: { + flush: { + alias: 'f', + type: 'boolean', + default: true, + coerce: asBoolean, + describe: 'Flush the changes to disk immediately' + }, + 'shard-split-threshold': { + type: 'number', + default: 1000, + describe: 'If a directory has more links than this, it will be transformed into a hamt-sharded-directory' + }, + 'cid-version': { + alias: ['cid-ver'], + type: 'number', + default: 0, + describe: 'Cid version to use' + }, + mtime: { + alias: 'm', + type: 'number', + default: parseInt(Date.now() / 1000), + describe: 'Time to use as the new modification time' + } + }, + + handler (argv) { + const { + path, + getIpfs, + flush, + shardSplitThreshold, + cidVersion, + mtime + } = argv + + argv.resolve((async () => { + const ipfs = await getIpfs() + + return ipfs.files.touch(path, mtime, { + flush, + shardSplitThreshold, + cidVersion + }) + })()) + } +} diff --git a/src/cli/utils.js b/src/cli/utils.js index c8169ce..9ebfcda 100644 --- a/src/cli/utils.js +++ b/src/cli/utils.js @@ -31,8 +31,13 @@ const asBoolean = (value) => { return false } +const asOctal = (value) => { + return parseInt(value, 8) +} + module.exports = { disablePrinting, print, - asBoolean + asBoolean, + asOctal } diff --git a/src/core/chmod.js b/src/core/chmod.js new file mode 100644 index 0000000..9265ab4 --- /dev/null +++ b/src/core/chmod.js @@ -0,0 +1,74 @@ +'use strict' + +const applyDefaultOptions = require('./utils/apply-default-options') +const toMfsPath = require('./utils/to-mfs-path') +const log = require('debug')('ipfs:mfs:touch') +const errCode = require('err-code') +const UnixFS = require('ipfs-unixfs') +const toTrail = require('./utils/to-trail') +const addLink = require('./utils/add-link') +const updateTree = require('./utils/update-tree') +const updateMfsRoot = require('./utils/update-mfs-root') +const { DAGNode } = require('ipld-dag-pb') +const mc = require('multicodec') +const mh = require('multihashes') + +const defaultOptions = { + flush: true, + shardSplitThreshold: 1000, + cidVersion: 1, + format: 'dag-pb', + hashAlg: 'sha2-256' +} + +module.exports = (context) => { + return async function mfsChmod (path, mode, options) { + options = applyDefaultOptions(options, defaultOptions) + + log(`Fetching stats for ${path}`) + + const { + cid, + mfsDirectory, + name + } = await toMfsPath(context, path) + + if (cid.codec !== 'dag-pb') { + throw errCode(new Error(`${path} was not a UnixFS node`), 'ERR_NOT_UNIXFS') + } + + let node = await context.ipld.get(cid) + const metadata = UnixFS.unmarshal(node.Data) + metadata.mode = mode + node = new DAGNode(metadata.marshal(), node.Links) + + const updatedCid = await context.ipld.put(node, mc.DAG_PB, { + cidVersion: cid.version, + hashAlg: mh.names['sha2-256'], + hashOnly: !options.flush + }) + + const trail = await toTrail(context, mfsDirectory, options) + const parent = trail[trail.length - 1] + const parentNode = await context.ipld.get(parent.cid) + + const result = await addLink(context, { + parent: parentNode, + name: name, + cid: updatedCid, + size: node.serialize().length, + flush: options.flush, + format: 'dag-pb', + hashAlg: 'sha2-256', + cidVersion: cid.version + }) + + parent.cid = result.cid + + // update the tree with the new child + const newRootCid = await updateTree(context, trail, options) + + // Update the MFS record with the new CID for the root of the tree + await updateMfsRoot(context, newRootCid) + } +} diff --git a/src/core/index.js b/src/core/index.js index 59068b8..a2335f7 100644 --- a/src/core/index.js +++ b/src/core/index.js @@ -10,11 +10,13 @@ const readOperations = { // These operations are locked at the function level and will execute in series const writeOperations = { + chmod: require('./chmod'), cp: require('./cp'), flush: require('./flush'), mkdir: require('./mkdir'), mv: require('./mv'), - rm: require('./rm') + rm: require('./rm'), + touch: require('./touch') } // These operations are asynchronous and manage their own locking diff --git a/src/core/ls.js b/src/core/ls.js index 0fe8648..c458988 100644 --- a/src/core/ls.js +++ b/src/core/ls.js @@ -15,17 +15,23 @@ const defaultOptions = { const toOutput = (fsEntry) => { let type = 0 let size = fsEntry.node.size || fsEntry.node.length + let mode + let mtime if (fsEntry.unixfs) { size = fsEntry.unixfs.fileSize() type = FILE_TYPES[fsEntry.unixfs.type] + mode = fsEntry.unixfs.mode + mtime = fsEntry.unixfs.mtime } return { cid: fsEntry.cid, name: fsEntry.name, type, - size + size, + mode, + mtime } } diff --git a/src/core/mkdir.js b/src/core/mkdir.js index c868fc2..0e39926 100644 --- a/src/core/mkdir.js +++ b/src/core/mkdir.js @@ -20,7 +20,9 @@ const defaultOptions = { cidVersion: 0, shardSplitThreshold: 1000, format: 'dag-pb', - flush: true + flush: true, + mode: null, + mtime: null } module.exports = (context) => { @@ -116,7 +118,9 @@ const addEmptyDir = async (context, childName, emptyDir, parent, trail, options) name: childName, format: options.format, hashAlg: options.hashAlg, - cidVersion: options.cidVersion + cidVersion: options.cidVersion, + mode: options.mode, + mtime: options.mtime }) trail[trail.length - 1].cid = result.cid diff --git a/src/core/stat.js b/src/core/stat.js index 0ec7498..6bee887 100644 --- a/src/core/stat.js +++ b/src/core/stat.js @@ -84,7 +84,9 @@ const statters = { type: nodeType, local: undefined, sizeLocal: undefined, - withLocality: false + withLocality: false, + mtime: file.unixfs.mtime, + mode: file.unixfs.mode } }, 'dag-cbor': (file) => { diff --git a/src/core/touch.js b/src/core/touch.js new file mode 100644 index 0000000..830355a --- /dev/null +++ b/src/core/touch.js @@ -0,0 +1,101 @@ +'use strict' + +const applyDefaultOptions = require('./utils/apply-default-options') +const toMfsPath = require('./utils/to-mfs-path') +const log = require('debug')('ipfs:mfs:touch') +const errCode = require('err-code') +const UnixFS = require('ipfs-unixfs') +const toTrail = require('./utils/to-trail') +const addLink = require('./utils/add-link') +const updateTree = require('./utils/update-tree') +const updateMfsRoot = require('./utils/update-mfs-root') +const { DAGNode } = require('ipld-dag-pb') +const mc = require('multicodec') +const mh = require('multihashes') + +const defaultOptions = { + flush: true, + shardSplitThreshold: 1000, + cidVersion: 1, + format: 'dag-pb', + hashAlg: 'sha2-256' +} + +module.exports = (context) => { + return async function mfsTouch (path, mtime, options) { + if (!options && isNaN(mtime)) { + options = mtime + mtime = parseInt(Date.now() / 1000) + } + + options = applyDefaultOptions(options, defaultOptions) + + log(`Touching ${path}`) + + const { + cid, + mfsDirectory, + name, + exists + } = await toMfsPath(context, path) + + let node + let updatedCid + + let cidVersion = options.cidVersion + + if (!exists) { + const metadata = new UnixFS('file') + metadata.mtime = mtime + node = new DAGNode(metadata.marshal()) + updatedCid = await context.ipld.put(node, mc.DAG_PB, { + cidVersion: options.cidVersion, + hashAlg: mh.names['sha2-256'], + hashOnly: !options.flush + }) + } else { + if (cid.codec !== 'dag-pb') { + throw errCode(new Error(`${path} was not a UnixFS node`), 'ERR_NOT_UNIXFS') + } + + cidVersion = cid.version + + node = await context.ipld.get(cid) + + const metadata = UnixFS.unmarshal(node.Data) + metadata.mtime = mtime + + node = new DAGNode(metadata.marshal(), node.Links) + + updatedCid = await context.ipld.put(node, mc.DAG_PB, { + cidVersion: cid.version, + hashAlg: mh.names['sha2-256'], + hashOnly: !options.flush + }) + } + + const trail = await toTrail(context, mfsDirectory, options) + const parent = trail[trail.length - 1] + const parentNode = await context.ipld.get(parent.cid) + + const result = await addLink(context, { + parent: parentNode, + name: name, + cid: updatedCid, + size: node.serialize().length, + flush: options.flush, + shardSplitThreshold: options.shardSplitThreshold, + format: 'dag-pb', + hashAlg: 'sha2-256', + cidVersion + }) + + parent.cid = result.cid + + // update the tree with the new child + const newRootCid = await updateTree(context, trail, options) + + // Update the MFS record with the new CID for the root of the tree + await updateMfsRoot(context, newRootCid) + } +} diff --git a/src/core/utils/add-link.js b/src/core/utils/add-link.js index 7116556..4585a49 100644 --- a/src/core/utils/add-link.js +++ b/src/core/utils/add-link.js @@ -61,7 +61,11 @@ const addLink = async (context, options) => { if (options.parent.Links.length >= options.shardSplitThreshold) { log('Converting directory to sharded directory') - return convertToShardedDirectory(context, options) + return convertToShardedDirectory(context, { + ...options, + mtime: meta.mtime, + mode: meta.mode + }) } log(`Adding ${options.name} (${options.cid}) to regular directory`) @@ -89,6 +93,15 @@ const addToDirectory = async (context, options) => { options.parent.rmLink(options.name) options.parent.addLink(new DAGLink(options.name, options.size, options.cid)) + const node = UnixFS.unmarshal(options.parent.Data) + + // Update mtime if set previously + if (node.mtime) { + node.mtime = parseInt(Date.now() / 1000) + + options.parent.Data = UnixFS.unmarshal(node) + } + const format = mc[options.format.toUpperCase().replace(/-/g, '_')] const hashAlg = mh.names[options.hashAlg] @@ -137,6 +150,7 @@ const addFileToShardedDirectory = async (context, options) => { // start at the root bucket and descend, loading nodes as we go const rootBucket = await recreateHamtLevel(options.parent.Links) + const node = UnixFS.unmarshal(options.parent.Data) const shard = new DirSharded({ root: true, @@ -145,10 +159,16 @@ const addFileToShardedDirectory = async (context, options) => { parentKey: null, path: '', dirty: true, - flat: false + flat: false, + mode: node.mode }, options) shard._bucket = rootBucket + // Update mtime if set previously + if (node.mtime) { + shard.mtime = parseInt(Date.now() / 1000) + } + // load subshards until the bucket & position no longer changes const position = await rootBucket._findNewBucketAndPos(file.name) const path = toBucketPath(position) diff --git a/src/core/utils/create-node.js b/src/core/utils/create-node.js index f1cfb28..ac01979 100644 --- a/src/core/utils/create-node.js +++ b/src/core/utils/create-node.js @@ -10,8 +10,17 @@ const mh = require('multihashes') const createNode = async (context, type, options) => { const format = mc[options.format.toUpperCase().replace(/-/g, '_')] const hashAlg = mh.names[options.hashAlg] + const metadata = new UnixFS(type) - const node = new DAGNode(new UnixFS(type).marshal()) + if (options.mode !== undefined) { + metadata.mode = options.mode + } + + if (options.mtime !== undefined) { + metadata.mtime = options.mtime + } + + const node = new DAGNode(metadata.marshal()) const cid = await context.ipld.put(node, format, { cidVersion: options.cidVersion, hashAlg diff --git a/src/core/utils/hamt-utils.js b/src/core/utils/hamt-utils.js index 9289640..182fa66 100644 --- a/src/core/utils/hamt-utils.js +++ b/src/core/utils/hamt-utils.js @@ -18,6 +18,15 @@ const updateHamtDirectory = async (context, links, bucket, options) => { dir.fanout = bucket.tableSize() dir.hashType = DirSharded.hashFn.code + const node = UnixFS.unmarshal(options.parent.Data) + + // Update mtime if set previously + if (node.mtime) { + node.mtime = parseInt(Date.now() / 1000) + + dir.Data = UnixFS.unmarshal(node) + } + const format = mc[options.format.toUpperCase().replace(/-/g, '_')] const hashAlg = mh.names[options.hashAlg] @@ -175,7 +184,9 @@ const createShard = async (context, contents, options) => { parentKey: null, path: '', dirty: true, - flat: false + flat: false, + mtime: options.mtime, + mode: options.mode }, options) for (let i = 0; i < contents.length; i++) { diff --git a/src/core/write.js b/src/core/write.js index 18c9a2d..9f9ffdf 100644 --- a/src/core/write.js +++ b/src/core/write.js @@ -34,12 +34,13 @@ const defaultOptions = { strategy: 'trickle', flush: true, leafType: 'raw', - shardSplitThreshold: 1000 + shardSplitThreshold: 1000, + mode: undefined, + mtime: undefined } module.exports = (context) => { return async function mfsWrite (path, content, options) { - log('Hello world, writing', path, content, options) options = applyDefaultOptions(options, defaultOptions) let source, destination, parent @@ -175,7 +176,11 @@ const write = async (context, source, destination, options) => { }) const result = await last(importer([{ - content: content + content: content, + + // persist mode & mtime if set previously + mode: (destination.unixfs && destination.unixfs.mode) || options.mode, + mtime: (destination.unixfs && destination.unixfs.mtime) ? parseInt(new Date() / 1000) : options.mtime }], context.ipld, { progress: options.progress, hashAlg: options.hashAlg, diff --git a/src/http/chmod.js b/src/http/chmod.js new file mode 100644 index 0000000..0b89366 --- /dev/null +++ b/src/http/chmod.js @@ -0,0 +1,39 @@ +'use strict' + +const Joi = require('@hapi/joi') + +const mfsChmod = { + method: 'POST', + path: '/api/v0/files/chmod', + async handler (request, h) { + const { + ipfs + } = request.server.app + const { + arg, + flush, + mode + } = request.query + + await ipfs.files.chmod(arg, mode, { + flush + }) + + return h.response() + }, + options: { + validate: { + options: { + allowUnknown: true, + stripUnknown: true + }, + query: Joi.object().keys({ + arg: Joi.string(), + mode: Joi.number().integer().min(0), + flush: Joi.boolean().default(true) + }) + } + } +} + +module.exports = mfsChmod diff --git a/src/http/index.js b/src/http/index.js index 2ccaa9e..96cb1ad 100644 --- a/src/http/index.js +++ b/src/http/index.js @@ -1,5 +1,6 @@ 'use strict' +const chmod = require('./chmod') const cp = require('./cp') const flush = require('./flush') const ls = require('./ls') @@ -8,9 +9,11 @@ const mv = require('./mv') const read = require('./read') const rm = require('./rm') const stat = require('./stat') +const touch = require('./touch') const write = require('./write') module.exports = [ + chmod, cp, flush, ls, @@ -19,5 +22,6 @@ module.exports = [ read, rm, stat, + touch, write ] diff --git a/src/http/touch.js b/src/http/touch.js new file mode 100644 index 0000000..db26610 --- /dev/null +++ b/src/http/touch.js @@ -0,0 +1,48 @@ +'use strict' + +const Joi = require('@hapi/joi') + +const mfsTouch = { + method: 'POST', + path: '/api/v0/files/touch', + async handler (request, h) { + const { + ipfs + } = request.server.app + const { + arg, + flush, + shardSplitThreshold, + cidVersion, + mtime + } = request.query + + await ipfs.files.touch(arg, mtime, { + flush, + shardSplitThreshold, + cidVersion + }) + + return h.response() + }, + options: { + validate: { + options: { + allowUnknown: true, + stripUnknown: true + }, + query: Joi.object().keys({ + arg: Joi.array().items(Joi.string()).min(2), + mtime: Joi.number().integer().min(0), + flush: Joi.boolean().default(true), + shardSplitThreshold: Joi.number().integer().min(0).default(1000), + cidVersion: Joi.number().integer().valid([ + 0, + 1 + ]).default(0) + }) + } + } +} + +module.exports = mfsTouch diff --git a/src/http/write.js b/src/http/write.js index f647c05..5962017 100644 --- a/src/http/write.js +++ b/src/http/write.js @@ -93,7 +93,8 @@ const mfsWrite = { 'balanced', 'trickle' ]).default('trickle'), - flush: Joi.boolean().default(true) + flush: Joi.boolean().default(true), + shardSplitThreshold: Joi.number().integer().min(0).default(1000) }) .rename('o', 'offset', { override: true, diff --git a/test/chmod.spec.js b/test/chmod.spec.js new file mode 100644 index 0000000..5f10cab --- /dev/null +++ b/test/chmod.spec.js @@ -0,0 +1,65 @@ +/* eslint-env mocha */ +'use strict' + +const expect = require('./helpers/chai') +const createMfs = require('./helpers/create-mfs') + +describe('chmod', () => { + let mfs + + before(async () => { + mfs = await createMfs() + }) + + it('should update the mode for a file', async () => { + const path = `/foo-${Date.now()}` + + await mfs.write(path, Buffer.from('Hello world'), { + create: true, + mtime: parseInt(new Date() / 1000) + }) + const targetMode = parseInt('0777', 8) + const originalMode = (await mfs.stat(path)).mode + await mfs.chmod(path, targetMode, { + flush: true + }) + + const updatedMode = (await mfs.stat(path)).mode + expect(updatedMode).to.not.equal(originalMode) + expect(updatedMode).to.equal(targetMode) + }) + + it('should update the mode for a directory', async () => { + const path = `/foo-${Date.now()}` + + await mfs.mkdir(path) + const targetMode = parseInt('0777', 8) + const originalMode = (await mfs.stat(path)).mode + await mfs.chmod(path, targetMode, { + flush: true + }) + + const updatedMode = (await mfs.stat(path)).mode + expect(updatedMode).to.not.equal(originalMode) + expect(updatedMode).to.equal(targetMode) + }) + + it('should update the mode for a hamt-sharded-directory', async () => { + const path = `/foo-${Date.now()}` + + await mfs.mkdir(path) + await mfs.write(`${path}/foo.txt`, Buffer.from('Hello world'), { + create: true, + shardSplitThreshold: 0 + }) + const targetMode = parseInt('0777', 8) + const originalMode = (await mfs.stat(path)).mode + await mfs.chmod(path, targetMode, { + flush: true + }) + + const updatedMode = (await mfs.stat(path)).mode + expect(updatedMode).to.not.equal(originalMode) + expect(updatedMode).to.equal(targetMode) + }) +}) diff --git a/test/helpers/chai.js b/test/helpers/chai.js new file mode 100644 index 0000000..c00c40d --- /dev/null +++ b/test/helpers/chai.js @@ -0,0 +1,7 @@ +'use strict' + +const chai = require('chai') +chai.use(require('dirty-chai')) +chai.use(require('chai-as-promised')) + +module.exports = chai.expect diff --git a/test/touch.spec.js b/test/touch.spec.js new file mode 100644 index 0000000..3091c2d --- /dev/null +++ b/test/touch.spec.js @@ -0,0 +1,80 @@ +/* eslint-env mocha */ +'use strict' + +const expect = require('./helpers/chai') +const createMfs = require('./helpers/create-mfs') +const streamToBuffer = require('./helpers/stream-to-buffer') +const delay = require('delay') + +describe('touch', () => { + let mfs + + before(async () => { + mfs = await createMfs() + }) + + it('should update the mtime for a file', async () => { + const path = `/foo-${Date.now()}` + + await mfs.write(path, Buffer.from('Hello world'), { + create: true, + mtime: parseInt(new Date() / 1000) + }) + const originalMtime = (await mfs.stat(path)).mtime + await delay(1000) + await mfs.touch(path, { + flush: true + }) + + const updatedMtime = (await mfs.stat(path)).mtime + expect(updatedMtime).to.be.greaterThan(originalMtime) + }) + + it('should update the mtime for a directory', async () => { + const path = `/foo-${Date.now()}` + + await mfs.mkdir(path, { + mtime: parseInt(Date.now() / 1000) + }) + const originalMtime = (await mfs.stat(path)).mtime + await delay(1000) + await mfs.touch(path, { + flush: true + }) + + const updatedMtime = (await mfs.stat(path)).mtime + expect(updatedMtime).to.be.greaterThan(originalMtime) + }) + + it('should update the mtime for a hamt-sharded-directory', async () => { + const path = `/foo-${Date.now()}` + + await mfs.mkdir(path, { + mtime: parseInt(Date.now() / 1000) + }) + await mfs.write(`${path}/foo.txt`, Buffer.from('Hello world'), { + create: true, + shardSplitThreshold: 0 + }) + const originalMtime = (await mfs.stat(path)).mtime + await delay(1000) + await mfs.touch(path, { + flush: true + }) + + const updatedMtime = (await mfs.stat(path)).mtime + expect(updatedMtime).to.be.greaterThan(originalMtime) + }) + + it('should create an empty file', async () => { + const path = `/foo-${Date.now()}` + + await mfs.touch(path, { + flush: true + }) + + const buffer = await streamToBuffer(mfs.read(path)) + + expect(buffer).to.deep.equal(Buffer.from([])) + }) +}) From b3976c4797cf101606660e39163c6d2895fb5587 Mon Sep 17 00:00:00 2001 From: achingbrain Date: Fri, 22 Nov 2019 15:19:57 -0600 Subject: [PATCH 03/15] chore: update deps --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 0aadd6f..9484c83 100644 --- a/package.json +++ b/package.json @@ -64,7 +64,7 @@ "ipfs-multipart": "~0.2.0", "ipfs-unixfs": "^0.2.0", "ipfs-unixfs-exporter": "~0.38.0", - "ipfs-unixfs-importer": "^0.40.0", + "ipfs-unixfs-importer": "^0.41.0", "ipfs-utils": "^0.4.0", "ipld-dag-pb": "~0.18.0", "joi-browser": "^13.4.0", From d4fbd67632361acc335121e1cea339d614240f45 Mon Sep 17 00:00:00 2001 From: achingbrain Date: Wed, 27 Nov 2019 14:14:10 +0000 Subject: [PATCH 04/15] chore: update deps --- package.json | 10 +++++----- src/core/utils/add-link.js | 2 +- src/core/utils/hamt-utils.js | 2 +- src/core/write.js | 2 +- test/helpers/create-shard.js | 2 +- test/mkdir.spec.js | 2 +- test/write.spec.js | 2 +- 7 files changed, 11 insertions(+), 11 deletions(-) diff --git a/package.json b/package.json index 9484c83..325f050 100644 --- a/package.json +++ b/package.json @@ -39,7 +39,6 @@ "homepage": "https://github.com/ipfs/js-ipfs-mfs#readme", "devDependencies": { "aegir": "^20.0.0", - "async-iterator-all": "^1.0.0", "chai": "^4.2.0", "chai-as-promised": "^7.1.1", "delay": "^4.3.0", @@ -49,13 +48,13 @@ "ipfs-block-service": "~0.16.0", "ipfs-repo": "^0.29.1", "ipld": "~0.25.0", + "it-all": "^1.0.1", "memdown": "^5.1.0", "temp-write": "^4.0.0" }, "dependencies": { "@hapi/boom": "^7.4.2", "@hapi/joi": "^15.1.0", - "async-iterator-last": "^1.0.0", "cids": "~0.7.1", "debug": "^4.1.0", "err-code": "^2.0.0", @@ -63,10 +62,11 @@ "interface-datastore": "^0.8.0", "ipfs-multipart": "~0.2.0", "ipfs-unixfs": "^0.2.0", - "ipfs-unixfs-exporter": "~0.38.0", - "ipfs-unixfs-importer": "^0.41.0", - "ipfs-utils": "^0.4.0", + "ipfs-unixfs-exporter": "~0.39.0", + "ipfs-unixfs-importer": "^0.42.0", + "ipfs-utils": "ipfs/js-ipfs-utils#support-unixfs-metadata", "ipld-dag-pb": "~0.18.0", + "it-last": "^1.0.1", "joi-browser": "^13.4.0", "mortice": "^2.0.0", "multicodec": "~0.5.3", diff --git a/src/core/utils/add-link.js b/src/core/utils/add-link.js index 4585a49..3bed208 100644 --- a/src/core/utils/add-link.js +++ b/src/core/utils/add-link.js @@ -17,7 +17,7 @@ const { const errCode = require('err-code') const mc = require('multicodec') const mh = require('multihashes') -const last = require('async-iterator-last') +const last = require('it-last') const addLink = async (context, options) => { if (!options.parentCid && !options.parent) { diff --git a/src/core/utils/hamt-utils.js b/src/core/utils/hamt-utils.js index 182fa66..f3bfe87 100644 --- a/src/core/utils/hamt-utils.js +++ b/src/core/utils/hamt-utils.js @@ -9,7 +9,7 @@ const log = require('debug')('ipfs:mfs:core:utils:hamt-utils') const UnixFS = require('ipfs-unixfs') const mc = require('multicodec') const mh = require('multihashes') -const last = require('async-iterator-last') +const last = require('it-last') const updateHamtDirectory = async (context, links, bucket, options) => { // update parent with new bit field diff --git a/src/core/write.js b/src/core/write.js index 9f9ffdf..e68b3bf 100644 --- a/src/core/write.js +++ b/src/core/write.js @@ -17,7 +17,7 @@ const errCode = require('err-code') const { MAX_CHUNK_SIZE } = require('./utils/constants') -const last = require('async-iterator-last') +const last = require('it-last') const defaultOptions = { offset: 0, // the offset in the file to begin writing diff --git a/test/helpers/create-shard.js b/test/helpers/create-shard.js index f56f827..91114de 100644 --- a/test/helpers/create-shard.js +++ b/test/helpers/create-shard.js @@ -1,7 +1,7 @@ 'use strict' const importer = require('ipfs-unixfs-importer') -const last = require('async-iterator-last') +const last = require('it-last') const createShard = async (ipld, files, shardSplitThreshold = 10) => { const result = await last(importer(files, ipld, { diff --git a/test/mkdir.spec.js b/test/mkdir.spec.js index 628f2dd..5904ce2 100644 --- a/test/mkdir.spec.js +++ b/test/mkdir.spec.js @@ -8,7 +8,7 @@ const multihash = require('multihashes') const createMfs = require('./helpers/create-mfs') const cidAtPath = require('./helpers/cid-at-path') const createShardedDirectory = require('./helpers/create-sharded-directory') -const all = require('async-iterator-all') +const all = require('it-all') describe('mkdir', () => { let mfs diff --git a/test/write.spec.js b/test/write.spec.js index 9571e7b..8a5091f 100644 --- a/test/write.spec.js +++ b/test/write.spec.js @@ -14,7 +14,7 @@ const createShard = require('./helpers/create-shard') const createShardedDirectory = require('./helpers/create-sharded-directory') const createTwoShards = require('./helpers/create-two-shards') const crypto = require('crypto') -const all = require('async-iterator-all') +const all = require('it-all') let fs, tempWrite From 43eadd467605aa0b0496b9ac6aaec36adf74c16d Mon Sep 17 00:00:00 2001 From: achingbrain Date: Tue, 3 Dec 2019 18:44:03 +0000 Subject: [PATCH 05/15] test: add cli tests --- package.json | 13 +- src/cli/chmod.js | 17 ++ src/cli/cp.js | 19 +- src/cli/index.js | 4 + src/cli/ls.js | 10 +- src/cli/mkdir.js | 16 +- src/cli/mv.js | 33 +++ src/cli/read.js | 4 +- src/cli/stat.js | 4 +- src/cli/touch.js | 42 +++- src/cli/write.js | 32 ++- src/core/chmod.js | 3 +- src/core/mv.js | 1 + src/core/touch.js | 4 +- src/core/utils/add-link.js | 2 +- src/core/utils/hamt-utils.js | 2 +- src/http/chmod.js | 17 +- test/cli/chmod.spec.js | 178 ++++++++++++++ test/cli/cp.spec.js | 170 +++++++++++++ test/cli/flush.spec.js | 40 +++ test/cli/ls.spec.js | 193 +++++++++++++++ test/cli/mkdir.spec.js | 270 ++++++++++++++++++++ test/cli/mv.spec.js | 312 +++++++++++++++++++++++ test/cli/read.spec.js | 104 ++++++++ test/cli/rm.spec.js | 57 +++++ test/cli/stat.spec.js | 144 +++++++++++ test/cli/touch.spec.js | 170 +++++++++++++ test/cli/write.spec.js | 450 ++++++++++++++++++++++++++++++++++ test/{ => core}/chmod.spec.js | 4 +- test/{ => core}/cp.spec.js | 8 +- test/{ => core}/flush.spec.js | 2 +- test/{ => core}/ls.spec.js | 8 +- test/{ => core}/mkdir.spec.js | 6 +- test/{ => core}/mv.spec.js | 6 +- test/{ => core}/read.spec.js | 6 +- test/{ => core}/rm.spec.js | 8 +- test/{ => core}/stat.spec.js | 4 +- test/{ => core}/touch.spec.js | 6 +- test/{ => core}/write.spec.js | 12 +- test/helpers/cli.js | 16 ++ 40 files changed, 2308 insertions(+), 89 deletions(-) create mode 100644 test/cli/chmod.spec.js create mode 100644 test/cli/cp.spec.js create mode 100644 test/cli/flush.spec.js create mode 100644 test/cli/ls.spec.js create mode 100644 test/cli/mkdir.spec.js create mode 100644 test/cli/mv.spec.js create mode 100644 test/cli/read.spec.js create mode 100644 test/cli/rm.spec.js create mode 100644 test/cli/stat.spec.js create mode 100644 test/cli/touch.spec.js create mode 100644 test/cli/write.spec.js rename test/{ => core}/chmod.spec.js (94%) rename test/{ => core}/cp.spec.js (97%) rename test/{ => core}/flush.spec.js (91%) rename test/{ => core}/ls.spec.js (96%) rename test/{ => core}/mkdir.spec.js (96%) rename test/{ => core}/mv.spec.js (97%) rename test/{ => core}/read.spec.js (95%) rename test/{ => core}/rm.spec.js (97%) rename test/{ => core}/stat.spec.js (97%) rename test/{ => core}/touch.spec.js (92%) rename test/{ => core}/write.spec.js (98%) create mode 100644 test/helpers/cli.js diff --git a/package.json b/package.json index 325f050..81aee37 100644 --- a/package.json +++ b/package.json @@ -11,8 +11,11 @@ "scripts": { "test": "aegir test", "test:node": "aegir test -t node", - "test:browser": "aegir test -t browser", - "test:webworker": "aegir test -t webworker", + "test:cli": "aegir test -t node -f test/cli/**/*.js", + "test:core": "aegir test -t node -f test/core/**/*.js", + "test:http": "aegir test -t node -f test/http/**/*.js", + "test:browser": "aegir test -t browser -f test/core/**/*.js", + "test:webworker": "aegir test -t webworker -f test/core/**/*.js", "build": "aegir build", "lint": "aegir lint", "release": "aegir release", @@ -46,11 +49,13 @@ "detect-webworker": "^1.0.0", "dirty-chai": "^2.0.1", "ipfs-block-service": "~0.16.0", - "ipfs-repo": "^0.29.1", + "ipfs-repo": "^0.30.1", "ipld": "~0.25.0", "it-all": "^1.0.1", "memdown": "^5.1.0", - "temp-write": "^4.0.0" + "temp-write": "^4.0.0", + "yargs": "^15.0.2", + "yargs-promise": "^1.1.0" }, "dependencies": { "@hapi/boom": "^7.4.2", diff --git a/src/cli/chmod.js b/src/cli/chmod.js index f0083ca..d8bd0b4 100644 --- a/src/cli/chmod.js +++ b/src/cli/chmod.js @@ -21,11 +21,24 @@ module.exports = { describe: 'The mode to use' }, recursive: { + alias: 'r', type: 'boolean', default: false, coerce: asBoolean, describe: 'Whether to change modes recursively' }, + codec: { + alias: 'c', + type: 'string', + default: 'dag-pb', + describe: 'If intermediate directories are created, use this codec to create them (experimental)' + }, + 'hash-alg': { + alias: 'h', + type: 'string', + default: 'sha2-256', + describe: 'Hash function to use. Will set CID version to 1 if used' + }, flush: { alias: 'f', type: 'boolean', @@ -46,6 +59,8 @@ module.exports = { mode, getIpfs, recursive, + codec, + hashAlg, flush, shardSplitThreshold } = argv @@ -55,6 +70,8 @@ module.exports = { return ipfs.files.chmod(path, mode, { recursive, + format: codec, + hashAlg, flush, shardSplitThreshold }) diff --git a/src/cli/cp.js b/src/cli/cp.js index 7c3a085..3be124b 100644 --- a/src/cli/cp.js +++ b/src/cli/cp.js @@ -17,11 +17,11 @@ module.exports = { coerce: asBoolean, describe: 'Create any non-existent intermediate directories' }, - format: { - alias: 'f', + codec: { + alias: 'c', type: 'string', default: 'dag-pb', - describe: 'If intermediate directories are created, use this format to create them (experimental)' + describe: 'If intermediate directories are created, use this codec to create them (experimental)' }, 'hash-alg': { alias: 'h', @@ -29,6 +29,13 @@ module.exports = { default: 'sha2-256', describe: 'Hash function to use. Will set CID version to 1 if used' }, + flush: { + alias: 'f', + type: 'boolean', + default: true, + coerce: asBoolean, + describe: 'Flush the changes to disk immediately' + }, 'shard-split-threshold': { type: 'number', default: 1000, @@ -42,7 +49,8 @@ module.exports = { dest, getIpfs, parents, - format, + codec, + flush, hashAlg, shardSplitThreshold } = argv @@ -51,7 +59,8 @@ module.exports = { const ipfs = await getIpfs() return ipfs.files.cp(source, dest, { parents, - format, + format: codec, + flush, hashAlg, shardSplitThreshold }) diff --git a/src/cli/index.js b/src/cli/index.js index 1c83aba..7186cf6 100644 --- a/src/cli/index.js +++ b/src/cli/index.js @@ -20,5 +20,9 @@ const command = { module.exports = (yargs) => { return yargs + .config({ + print, + getStdin: () => process.stdin + }) .command(command) } diff --git a/src/cli/ls.js b/src/cli/ls.js index ffe2c1f..c66cc77 100644 --- a/src/cli/ls.js +++ b/src/cli/ls.js @@ -4,7 +4,6 @@ const pull = require('pull-stream/pull') const onEnd = require('pull-stream/sinks/on-end') const through = require('pull-stream/throughs/through') const { - print, asBoolean } = require('./utils') const { @@ -45,18 +44,15 @@ module.exports = { getIpfs, long, sort, - cidBase + cidBase, + print } = argv argv.resolve((async () => { const ipfs = await getIpfs() return new Promise((resolve, reject) => { if (sort) { - ipfs.files.ls(path || FILE_SEPARATOR, { - long, - sort, - cidBase - }) + ipfs.files.ls(path || FILE_SEPARATOR) .then(files => { // https://github.com/ipfs/go-ipfs/issues/5181 if (sort) { diff --git a/src/cli/mkdir.js b/src/cli/mkdir.js index cf413ab..00e3874 100644 --- a/src/cli/mkdir.js +++ b/src/cli/mkdir.js @@ -24,9 +24,17 @@ module.exports = { default: 0, describe: 'Cid version to use. (experimental).' }, + codec: { + alias: 'c', + type: 'string', + default: 'dag-pb', + describe: 'If intermediate directories are created, use this codec to create them (experimental)' + }, 'hash-alg': { + alias: 'h', type: 'string', - describe: 'Hash function to use. Will set Cid version to 1 if used. (experimental).' + default: 'sha2-256', + describe: 'Hash function to use. Will set CID version to 1 if used' }, flush: { alias: 'f', @@ -41,16 +49,12 @@ module.exports = { describe: 'If a directory has more links than this, it will be transformed into a hamt-sharded-directory' }, mode: { - alias: 'm', type: 'number', - default: true, coerce: asOctal, describe: 'Mode to apply to the new directory' }, mtime: { - alias: 'm', type: 'number', - default: true, coerce: asOctal, describe: 'Mtime to apply to the new directory' } @@ -62,6 +66,7 @@ module.exports = { getIpfs, parents, cidVersion, + codec, hashAlg, flush, shardSplitThreshold, @@ -75,6 +80,7 @@ module.exports = { return ipfs.files.mkdir(path, { parents, cidVersion, + format: codec, hashAlg, flush, shardSplitThreshold, diff --git a/src/cli/mv.js b/src/cli/mv.js index d1d391c..764d19a 100644 --- a/src/cli/mv.js +++ b/src/cli/mv.js @@ -24,6 +24,31 @@ module.exports = { coerce: asBoolean, describe: 'Remove directories recursively' }, + 'cid-version': { + alias: ['cid-ver'], + type: 'number', + default: 0, + describe: 'Cid version to use. (experimental).' + }, + codec: { + alias: 'c', + type: 'string', + default: 'dag-pb', + describe: 'If intermediate directories are created, use this codec to create them (experimental)' + }, + 'hash-alg': { + alias: 'h', + type: 'string', + default: 'sha2-256', + describe: 'Hash function to use. Will set CID version to 1 if used' + }, + flush: { + alias: 'f', + type: 'boolean', + default: true, + coerce: asBoolean, + describe: 'Flush the changes to disk immediately' + }, 'shard-split-threshold': { type: 'number', default: 1000, @@ -38,6 +63,10 @@ module.exports = { getIpfs, parents, recursive, + cidVersion, + codec, + hashAlg, + flush, shardSplitThreshold } = argv @@ -47,6 +76,10 @@ module.exports = { return ipfs.files.mv(source, dest, { parents, recursive, + cidVersion, + format: codec, + hashAlg, + flush, shardSplitThreshold }) })()) diff --git a/src/cli/read.js b/src/cli/read.js index 02ae3f6..a0cf1b3 100644 --- a/src/cli/read.js +++ b/src/cli/read.js @@ -3,9 +3,6 @@ const pull = require('pull-stream/pull') const through = require('pull-stream/throughs/through') const onEnd = require('pull-stream/sinks/on-end') -const { - print -} = require('./utils') module.exports = { command: 'read ', @@ -29,6 +26,7 @@ module.exports = { const { path, getIpfs, + print, offset, length } = argv diff --git a/src/cli/stat.js b/src/cli/stat.js index 94ef972..c2c6bd3 100644 --- a/src/cli/stat.js +++ b/src/cli/stat.js @@ -1,8 +1,7 @@ 'use strict' const { - asBoolean, - print + asBoolean } = require('./utils') module.exports = { @@ -54,6 +53,7 @@ Mtime: `, const { path, getIpfs, + print, format, hash, size, diff --git a/src/cli/touch.js b/src/cli/touch.js index 9687353..32e47d1 100644 --- a/src/cli/touch.js +++ b/src/cli/touch.js @@ -10,6 +10,12 @@ module.exports = { describe: 'change file modification times', builder: { + mtime: { + alias: 'm', + type: 'number', + default: parseInt(Date.now() / 1000), + describe: 'Time to use as the new modification time' + }, flush: { alias: 'f', type: 'boolean', @@ -17,22 +23,28 @@ module.exports = { coerce: asBoolean, describe: 'Flush the changes to disk immediately' }, - 'shard-split-threshold': { - type: 'number', - default: 1000, - describe: 'If a directory has more links than this, it will be transformed into a hamt-sharded-directory' - }, 'cid-version': { alias: ['cid-ver'], type: 'number', default: 0, - describe: 'Cid version to use' + describe: 'Cid version to use. (experimental).' }, - mtime: { - alias: 'm', + codec: { + alias: 'c', + type: 'string', + default: 'dag-pb', + describe: 'If intermediate directories are created, use this codec to create them (experimental)' + }, + 'hash-alg': { + alias: 'h', + type: 'string', + default: 'sha2-256', + describe: 'Hash function to use. Will set CID version to 1 if used' + }, + 'shard-split-threshold': { type: 'number', - default: parseInt(Date.now() / 1000), - describe: 'Time to use as the new modification time' + default: 1000, + describe: 'If a directory has more links than this, it will be transformed into a hamt-sharded-directory' } }, @@ -41,8 +53,10 @@ module.exports = { path, getIpfs, flush, - shardSplitThreshold, cidVersion, + codec, + hashAlg, + shardSplitThreshold, mtime } = argv @@ -51,8 +65,10 @@ module.exports = { return ipfs.files.touch(path, mtime, { flush, - shardSplitThreshold, - cidVersion + cidVersion, + format: codec, + hashAlg, + shardSplitThreshold }) })()) } diff --git a/src/cli/write.js b/src/cli/write.js index 80fc382..0fe7898 100644 --- a/src/cli/write.js +++ b/src/cli/write.js @@ -1,7 +1,8 @@ 'use strict' const { - asBoolean + asBoolean, + asOctal } = require('./utils') module.exports = { @@ -13,6 +14,7 @@ module.exports = { parents: { alias: 'p', type: 'boolean', + default: false, describe: 'Create any non-existent intermediate directories' }, create: { @@ -75,7 +77,8 @@ module.exports = { type: 'string', default: 'sha2-256' }, - format: { + codec: { + alias: ['c'], type: 'string', default: 'dag-pb' }, @@ -83,6 +86,16 @@ module.exports = { type: 'number', default: 1000, describe: 'If a directory has more links than this, it will be transformed into a hamt-sharded-directory' + }, + mode: { + type: 'int', + coerce: asOctal, + describe: 'The mode to use' + }, + mtime: { + alias: 'm', + type: 'number', + describe: 'Time to use as the new modification time' } }, @@ -90,6 +103,7 @@ module.exports = { const { path, getIpfs, + getStdin, offset, length, create, @@ -98,18 +112,20 @@ module.exports = { reduceSingleLeafToSelf, cidVersion, hashAlg, - format, + codec, parents, progress, strategy, flush, - shardSplitThreshold + shardSplitThreshold, + mode, + mtime } = argv argv.resolve((async () => { const ipfs = await getIpfs() - await ipfs.files.write(path, process.stdin, { + await ipfs.files.write(path, getStdin(), { offset, length, create, @@ -118,12 +134,14 @@ module.exports = { reduceSingleLeafToSelf, cidVersion, hashAlg, - format, + format: codec, parents, progress, strategy, flush, - shardSplitThreshold + shardSplitThreshold, + mode, + mtime }) })()) } diff --git a/src/core/chmod.js b/src/core/chmod.js index 9265ab4..fcd41b7 100644 --- a/src/core/chmod.js +++ b/src/core/chmod.js @@ -16,7 +16,6 @@ const mh = require('multihashes') const defaultOptions = { flush: true, shardSplitThreshold: 1000, - cidVersion: 1, format: 'dag-pb', hashAlg: 'sha2-256' } @@ -45,7 +44,7 @@ module.exports = (context) => { const updatedCid = await context.ipld.put(node, mc.DAG_PB, { cidVersion: cid.version, hashAlg: mh.names['sha2-256'], - hashOnly: !options.flush + onlyHash: !options.flush }) const trail = await toTrail(context, mfsDirectory, options) diff --git a/src/core/mv.js b/src/core/mv.js index 00ae295..d432fb6 100644 --- a/src/core/mv.js +++ b/src/core/mv.js @@ -9,6 +9,7 @@ const defaultOptions = { parents: false, recursive: false, flush: true, + cidVersion: 0, format: 'dag-pb', hashAlg: 'sha2-256', shardSplitThreshold: 1000 diff --git a/src/core/touch.js b/src/core/touch.js index 830355a..a0c8e63 100644 --- a/src/core/touch.js +++ b/src/core/touch.js @@ -51,7 +51,7 @@ module.exports = (context) => { updatedCid = await context.ipld.put(node, mc.DAG_PB, { cidVersion: options.cidVersion, hashAlg: mh.names['sha2-256'], - hashOnly: !options.flush + onlyHash: !options.flush }) } else { if (cid.codec !== 'dag-pb') { @@ -70,7 +70,7 @@ module.exports = (context) => { updatedCid = await context.ipld.put(node, mc.DAG_PB, { cidVersion: cid.version, hashAlg: mh.names['sha2-256'], - hashOnly: !options.flush + onlyHash: !options.flush }) } diff --git a/src/core/utils/add-link.js b/src/core/utils/add-link.js index 3bed208..089d0d1 100644 --- a/src/core/utils/add-link.js +++ b/src/core/utils/add-link.js @@ -109,7 +109,7 @@ const addToDirectory = async (context, options) => { const cid = await context.ipld.put(options.parent, format, { cidVersion: options.cidVersion, hashAlg, - hashOnly: !options.flush + onlyHash: !options.flush }) return { diff --git a/src/core/utils/hamt-utils.js b/src/core/utils/hamt-utils.js index f3bfe87..a267720 100644 --- a/src/core/utils/hamt-utils.js +++ b/src/core/utils/hamt-utils.js @@ -34,7 +34,7 @@ const updateHamtDirectory = async (context, links, bucket, options) => { const cid = await context.ipld.put(parent, format, { cidVersion: options.cidVersion, hashAlg, - hashOnly: !options.flush + onlyHash: !options.flush }) return { diff --git a/src/http/chmod.js b/src/http/chmod.js index 0b89366..3f1f58c 100644 --- a/src/http/chmod.js +++ b/src/http/chmod.js @@ -1,6 +1,19 @@ 'use strict' -const Joi = require('@hapi/joi') +const originalJoi = require('@hapi/joi') +const Joi = originalJoi.extend({ + name: 'octalNumber', + base: originalJoi.number().min(0), + coerce: (value, state, options) => { + const val = parseInt(value, 8) + + if (isNaN(val) || val < 0) { + throw new Error('Invalid octal number') + } + + return val + } +}) const mfsChmod = { method: 'POST', @@ -29,7 +42,7 @@ const mfsChmod = { }, query: Joi.object().keys({ arg: Joi.string(), - mode: Joi.number().integer().min(0), + mode: Joi.octalNumber(), flush: Joi.boolean().default(true) }) } diff --git a/test/cli/chmod.spec.js b/test/cli/chmod.spec.js new file mode 100644 index 0000000..ff0c43b --- /dev/null +++ b/test/cli/chmod.spec.js @@ -0,0 +1,178 @@ +/* eslint-env mocha */ +'use strict' + +const expect = require('../helpers/chai') +const cli = require('../helpers/cli') +const sinon = require('sinon') + +describe('cli chmod', () => { + let ipfs + + beforeEach(() => { + ipfs = { + files: { + chmod: sinon.stub() + } + } + }) + + it('should update the mode for a file', async () => { + await cli('files chmod 0777 /foo', { ipfs }) + + expect(ipfs.files.chmod.callCount).to.equal(1) + expect(ipfs.files.chmod.getCall(0).args).to.deep.equal([ + '/foo', + parseInt('0777', 8), { + recursive: false, + format: 'dag-pb', + hashAlg: 'sha2-256', + flush: true, + shardSplitThreshold: 1000 + } + ]) + }) + + it('should update the mode recursively', async () => { + await cli('files chmod 0777 --recursive /foo', { ipfs }) + + expect(ipfs.files.chmod.callCount).to.equal(1) + expect(ipfs.files.chmod.getCall(0).args).to.deep.equal([ + '/foo', + parseInt('0777', 8), { + recursive: true, + format: 'dag-pb', + hashAlg: 'sha2-256', + flush: true, + shardSplitThreshold: 1000 + } + ]) + }) + + it('should update the mode recursively (short option)', async () => { + await cli('files chmod 0777 -r /foo', { ipfs }) + + expect(ipfs.files.chmod.callCount).to.equal(1) + expect(ipfs.files.chmod.getCall(0).args).to.deep.equal([ + '/foo', + parseInt('0777', 8), { + recursive: true, + format: 'dag-pb', + hashAlg: 'sha2-256', + flush: true, + shardSplitThreshold: 1000 + } + ]) + }) + + it('should update the mode without flushing', async () => { + await cli('files chmod 0777 --flush false /foo', { ipfs }) + + expect(ipfs.files.chmod.callCount).to.equal(1) + expect(ipfs.files.chmod.getCall(0).args).to.deep.equal([ + '/foo', + parseInt('0777', 8), { + recursive: false, + format: 'dag-pb', + hashAlg: 'sha2-256', + flush: false, + shardSplitThreshold: 1000 + } + ]) + }) + + it('should update the mode without flushing (short option)', async () => { + await cli('files chmod 0777 -f false /foo', { ipfs }) + + expect(ipfs.files.chmod.callCount).to.equal(1) + expect(ipfs.files.chmod.getCall(0).args).to.deep.equal([ + '/foo', + parseInt('0777', 8), { + recursive: false, + format: 'dag-pb', + hashAlg: 'sha2-256', + flush: false, + shardSplitThreshold: 1000 + } + ]) + }) + + it('should update the mode a different codec', async () => { + await cli('files chmod 0777 --codec dag-foo /foo', { ipfs }) + + expect(ipfs.files.chmod.callCount).to.equal(1) + expect(ipfs.files.chmod.getCall(0).args).to.deep.equal([ + '/foo', + parseInt('0777', 8), { + recursive: false, + format: 'dag-foo', + hashAlg: 'sha2-256', + flush: true, + shardSplitThreshold: 1000 + } + ]) + }) + + it('should update the mode a different codec (short option)', async () => { + await cli('files chmod 0777 -c dag-foo /foo', { ipfs }) + + expect(ipfs.files.chmod.callCount).to.equal(1) + expect(ipfs.files.chmod.getCall(0).args).to.deep.equal([ + '/foo', + parseInt('0777', 8), { + recursive: false, + format: 'dag-foo', + hashAlg: 'sha2-256', + flush: true, + shardSplitThreshold: 1000 + } + ]) + }) + + it('should update the mode a different hash algorithm', async () => { + await cli('files chmod 0777 --hash-alg sha3-256 /foo', { ipfs }) + + expect(ipfs.files.chmod.callCount).to.equal(1) + expect(ipfs.files.chmod.getCall(0).args).to.deep.equal([ + '/foo', + parseInt('0777', 8), { + recursive: false, + format: 'dag-pb', + hashAlg: 'sha3-256', + flush: true, + shardSplitThreshold: 1000 + } + ]) + }) + + it('should update the mode a different hash algorithm (short option)', async () => { + await cli('files chmod 0777 -h sha3-256 /foo', { ipfs }) + + expect(ipfs.files.chmod.callCount).to.equal(1) + expect(ipfs.files.chmod.getCall(0).args).to.deep.equal([ + '/foo', + parseInt('0777', 8), { + recursive: false, + format: 'dag-pb', + hashAlg: 'sha3-256', + flush: true, + shardSplitThreshold: 1000 + } + ]) + }) + + it('should update the mode with a shard split threshold', async () => { + await cli('files chmod 0777 --shard-split-threshold 10 /foo', { ipfs }) + + expect(ipfs.files.chmod.callCount).to.equal(1) + expect(ipfs.files.chmod.getCall(0).args).to.deep.equal([ + '/foo', + parseInt('0777', 8), { + recursive: false, + format: 'dag-pb', + hashAlg: 'sha2-256', + flush: true, + shardSplitThreshold: 10 + } + ]) + }) +}) diff --git a/test/cli/cp.spec.js b/test/cli/cp.spec.js new file mode 100644 index 0000000..31c4ef7 --- /dev/null +++ b/test/cli/cp.spec.js @@ -0,0 +1,170 @@ +/* eslint-env mocha */ +'use strict' + +const expect = require('../helpers/chai') +const cli = require('../helpers/cli') +const sinon = require('sinon') + +describe('cli cp', () => { + let ipfs + + beforeEach(() => { + ipfs = { + files: { + cp: sinon.stub() + } + } + }) + + it('should copy files', async () => { + const source = 'source' + const dest = 'source' + + await cli(`files cp ${source} ${dest}`, { ipfs }) + + expect(ipfs.files.cp.callCount).to.equal(1) + expect(ipfs.files.cp.getCall(0).args).to.deep.equal([ + source, + dest, { + parents: false, + format: 'dag-pb', + hashAlg: 'sha2-256', + flush: true, + shardSplitThreshold: 1000 + } + ]) + }) + + it('should copy files and create intermediate directrories', async () => { + const source = 'source' + const dest = 'source' + + await cli(`files cp --parents ${source} ${dest}`, { ipfs }) + + expect(ipfs.files.cp.callCount).to.equal(1) + expect(ipfs.files.cp.getCall(0).args).to.deep.equal([ + source, + dest, { + parents: true, + format: 'dag-pb', + hashAlg: 'sha2-256', + flush: true, + shardSplitThreshold: 1000 + } + ]) + }) + + it('should copy files and create intermediate directrories (short option)', async () => { + const source = 'source' + const dest = 'source' + + await cli(`files cp --parents ${source} ${dest}`, { ipfs }) + + expect(ipfs.files.cp.callCount).to.equal(1) + expect(ipfs.files.cp.getCall(0).args).to.deep.equal([ + source, + dest, { + parents: true, + format: 'dag-pb', + hashAlg: 'sha2-256', + flush: true, + shardSplitThreshold: 1000 + } + ]) + }) + + it('should copy files with a different codec', async () => { + const source = 'source' + const dest = 'source' + + await cli(`files cp --codec dag-foo ${source} ${dest}`, { ipfs }) + + expect(ipfs.files.cp.callCount).to.equal(1) + expect(ipfs.files.cp.getCall(0).args).to.deep.equal([ + source, + dest, { + parents: false, + format: 'dag-foo', + hashAlg: 'sha2-256', + flush: true, + shardSplitThreshold: 1000 + } + ]) + }) + + it('should copy files with a different codec (short option)', async () => { + const source = 'source' + const dest = 'source' + + await cli(`files cp -c dag-foo ${source} ${dest}`, { ipfs }) + + expect(ipfs.files.cp.callCount).to.equal(1) + expect(ipfs.files.cp.getCall(0).args).to.deep.equal([ + source, + dest, { + parents: false, + format: 'dag-foo', + hashAlg: 'sha2-256', + flush: true, + shardSplitThreshold: 1000 + } + ]) + }) + + it('should copy files with a different hash algorithm', async () => { + const source = 'source' + const dest = 'source' + + await cli(`files cp --hash-alg sha3-256 ${source} ${dest}`, { ipfs }) + + expect(ipfs.files.cp.callCount).to.equal(1) + expect(ipfs.files.cp.getCall(0).args).to.deep.equal([ + source, + dest, { + parents: false, + format: 'dag-pb', + hashAlg: 'sha3-256', + flush: true, + shardSplitThreshold: 1000 + } + ]) + }) + + it('should copy files with a different hash algorithm (short option)', async () => { + const source = 'source' + const dest = 'source' + + await cli(`files cp -h sha3-256 ${source} ${dest}`, { ipfs }) + + expect(ipfs.files.cp.callCount).to.equal(1) + expect(ipfs.files.cp.getCall(0).args).to.deep.equal([ + source, + dest, { + parents: false, + format: 'dag-pb', + hashAlg: 'sha3-256', + flush: true, + shardSplitThreshold: 1000 + } + ]) + }) + + it('should copy files with a different shard split threshold', async () => { + const source = 'source' + const dest = 'source' + + await cli(`files cp --shard-split-threshold 10 ${source} ${dest}`, { ipfs }) + + expect(ipfs.files.cp.callCount).to.equal(1) + expect(ipfs.files.cp.getCall(0).args).to.deep.equal([ + source, + dest, { + parents: false, + format: 'dag-pb', + hashAlg: 'sha2-256', + flush: true, + shardSplitThreshold: 10 + } + ]) + }) +}) diff --git a/test/cli/flush.spec.js b/test/cli/flush.spec.js new file mode 100644 index 0000000..a5baa14 --- /dev/null +++ b/test/cli/flush.spec.js @@ -0,0 +1,40 @@ +/* eslint-env mocha */ +'use strict' + +const expect = require('../helpers/chai') +const cli = require('../helpers/cli') +const sinon = require('sinon') + +describe('cli flush', () => { + let ipfs + + beforeEach(() => { + ipfs = { + files: { + flush: sinon.stub() + } + } + }) + + it('should flush a path', async () => { + const path = '/foo' + + await cli(`files flush ${path}`, { ipfs }) + + expect(ipfs.files.flush.callCount).to.equal(1) + expect(ipfs.files.flush.getCall(0).args).to.deep.equal([ + path, + {} + ]) + }) + + it('should flush without a path', async () => { + await cli('files flush', { ipfs }) + + expect(ipfs.files.flush.callCount).to.equal(1) + expect(ipfs.files.flush.getCall(0).args).to.deep.equal([ + '/', + {} + ]) + }) +}) diff --git a/test/cli/ls.spec.js b/test/cli/ls.spec.js new file mode 100644 index 0000000..5ae966c --- /dev/null +++ b/test/cli/ls.spec.js @@ -0,0 +1,193 @@ +/* eslint-env mocha */ +'use strict' + +const expect = require('../helpers/chai') +const cli = require('../helpers/cli') +const sinon = require('sinon') +const values = require('pull-stream/sources/values') + +describe('cli ls', () => { + let ipfs + let print + let output + + beforeEach(() => { + output = '' + ipfs = { + files: { + ls: sinon.stub().resolves([]) + } + } + print = (msg = '', newline = true) => { + output += newline ? msg + '\n' : msg + } + }) + + it('should list a path', async () => { + const path = '/foo' + + await cli(`files ls ${path}`, { ipfs, print }) + + expect(ipfs.files.ls.callCount).to.equal(1) + expect(ipfs.files.ls.getCall(0).args).to.deep.equal([ + path + ]) + }) + + it('should list without a path', async () => { + await cli('files ls', { ipfs, print }) + + expect(ipfs.files.ls.callCount).to.equal(1) + expect(ipfs.files.ls.getCall(0).args).to.deep.equal([ + '/' + ]) + }) + + it('should list a path with details', async () => { + const files = [{ + hash: 'file-name', + name: 'file-name', + size: 'file-size', + mode: 'file-mode', + mtime: 'file-mtime' + }] + + ipfs.files.ls = sinon.stub().resolves(files) + + await cli('files ls --long /foo', { ipfs, print }) + + expect(ipfs.files.ls.callCount).to.equal(1) + expect(output).to.include(files[0].hash) + expect(output).to.include(files[0].name) + expect(output).to.include(files[0].size) + }) + + it('should list a path with details (short option)', async () => { + const files = [{ + hash: 'file-name', + name: 'file-name', + size: 'file-size', + mode: 'file-mode', + mtime: 'file-mtime' + }] + + ipfs.files.ls = sinon.stub().resolves(files) + + await cli('files ls -l /foo', { ipfs, print }) + + expect(ipfs.files.ls.callCount).to.equal(1) + expect(output).to.include(files[0].hash) + expect(output).to.include(files[0].name) + expect(output).to.include(files[0].size) + }) + + it('should list a path with details', async () => { + const files = [{ + hash: 'file-name', + name: 'file-name', + size: 'file-size', + mode: 'file-mode', + mtime: 'file-mtime' + }] + + ipfs.files.ls = sinon.stub().resolves(files) + + await cli('files ls --long /foo', { ipfs, print }) + + expect(ipfs.files.ls.callCount).to.equal(1) + expect(output).to.include(files[0].hash) + expect(output).to.include(files[0].name) + expect(output).to.include(files[0].size) + }) + + it('should list a path with details (short option)', async () => { + const files = [{ + hash: 'file-name', + name: 'file-name', + size: 'file-size', + mode: 'file-mode', + mtime: 'file-mtime' + }] + + ipfs.files.ls = sinon.stub().resolves(files) + + await cli('files ls -l /foo', { ipfs, print }) + + expect(ipfs.files.ls.callCount).to.equal(1) + expect(output).to.include(files[0].hash) + expect(output).to.include(files[0].name) + expect(output).to.include(files[0].size) + }) + + it('should list a path without sorting', async () => { + const files = [{ + hash: 'file-name', + name: 'file-name', + size: 'file-size', + mode: 'file-mode', + mtime: 'file-mtime' + }] + + ipfs.files.lsPullStream = sinon.stub().returns(values(files)) + + await cli('files ls --sort false /foo', { ipfs, print }) + + expect(ipfs.files.lsPullStream.callCount).to.equal(1) + expect(output).to.include(files[0].name) + }) + + it('should list a path without sorting (short option)', async () => { + const files = [{ + hash: 'file-name', + name: 'file-name', + size: 'file-size', + mode: 'file-mode', + mtime: 'file-mtime' + }] + + ipfs.files.lsPullStream = sinon.stub().returns(values(files)) + + await cli('files ls -s false /foo', { ipfs, print }) + + expect(ipfs.files.lsPullStream.callCount).to.equal(1) + expect(output).to.include(files[0].name) + }) + + it('should list a path with details without sorting', async () => { + const files = [{ + hash: 'file-name', + name: 'file-name', + size: 'file-size', + mode: 'file-mode', + mtime: 'file-mtime' + }] + + ipfs.files.lsPullStream = sinon.stub().returns(values(files)) + + await cli('files ls --long --sort false /foo', { ipfs, print }) + + expect(ipfs.files.lsPullStream.callCount).to.equal(1) + expect(output).to.include(files[0].hash) + expect(output).to.include(files[0].name) + expect(output).to.include(files[0].size) + }) + + it('should list a path with details without sorting (short option)', async () => { + const files = [{ + hash: 'file-name', + name: 'file-name', + size: 'file-size', + mode: 'file-mode', + mtime: 'file-mtime' + }] + + ipfs.files.lsPullStream = sinon.stub().returns(values(files)) + + await cli('files ls -l -s false /foo', { ipfs, print }) + + expect(ipfs.files.lsPullStream.callCount).to.equal(1) + expect(output).to.include(files[0].hash) + expect(output).to.include(files[0].name) + expect(output).to.include(files[0].size) + }) +}) diff --git a/test/cli/mkdir.spec.js b/test/cli/mkdir.spec.js new file mode 100644 index 0000000..b3eac9f --- /dev/null +++ b/test/cli/mkdir.spec.js @@ -0,0 +1,270 @@ +/* eslint-env mocha */ +'use strict' + +const expect = require('../helpers/chai') +const cli = require('../helpers/cli') +const sinon = require('sinon') + +describe('cli mkdir', () => { + let ipfs + + beforeEach(() => { + ipfs = { + files: { + mkdir: sinon.stub() + } + } + }) + + it('should make a directory', async () => { + await cli('files mkdir /foo', { ipfs }) + + expect(ipfs.files.mkdir.callCount).to.equal(1) + expect(ipfs.files.mkdir.getCall(0).args).to.deep.equal([ + '/foo', { + parents: false, + cidVersion: 0, + format: 'dag-pb', + hashAlg: 'sha2-256', + flush: true, + shardSplitThreshold: 1000, + mode: undefined, + mtime: undefined + } + ]) + }) + + it('should make a directory with parents', async () => { + await cli('files mkdir --parents /foo', { ipfs }) + + expect(ipfs.files.mkdir.callCount).to.equal(1) + expect(ipfs.files.mkdir.getCall(0).args).to.deep.equal([ + '/foo', { + parents: true, + cidVersion: 0, + format: 'dag-pb', + hashAlg: 'sha2-256', + flush: true, + shardSplitThreshold: 1000, + mode: undefined, + mtime: undefined + } + ]) + }) + + it('should make a directory with parents (short option)', async () => { + await cli('files mkdir -p /foo', { ipfs }) + + expect(ipfs.files.mkdir.callCount).to.equal(1) + expect(ipfs.files.mkdir.getCall(0).args).to.deep.equal([ + '/foo', { + parents: true, + cidVersion: 0, + format: 'dag-pb', + hashAlg: 'sha2-256', + flush: true, + shardSplitThreshold: 1000, + mode: undefined, + mtime: undefined + } + ]) + }) + + it('should make a directory with a different cid version', async () => { + await cli('files mkdir --cid-version 5 /foo', { ipfs }) + + expect(ipfs.files.mkdir.callCount).to.equal(1) + expect(ipfs.files.mkdir.getCall(0).args).to.deep.equal([ + '/foo', { + parents: false, + cidVersion: 5, + format: 'dag-pb', + hashAlg: 'sha2-256', + flush: true, + shardSplitThreshold: 1000, + mode: undefined, + mtime: undefined + } + ]) + }) + + it('should make a directory with a different cid version (shortish option)', async () => { + await cli('files mkdir --cid-ver 5 /foo', { ipfs }) + + expect(ipfs.files.mkdir.callCount).to.equal(1) + expect(ipfs.files.mkdir.getCall(0).args).to.deep.equal([ + '/foo', { + parents: false, + cidVersion: 5, + format: 'dag-pb', + hashAlg: 'sha2-256', + flush: true, + shardSplitThreshold: 1000, + mode: undefined, + mtime: undefined + } + ]) + }) + + it('should make a directory with a different codec', async () => { + await cli('files mkdir --codec dag-foo /foo', { ipfs }) + + expect(ipfs.files.mkdir.callCount).to.equal(1) + expect(ipfs.files.mkdir.getCall(0).args).to.deep.equal([ + '/foo', { + parents: false, + cidVersion: 0, + format: 'dag-foo', + hashAlg: 'sha2-256', + flush: true, + shardSplitThreshold: 1000, + mode: undefined, + mtime: undefined + } + ]) + }) + + it('should make a directory with a different codec (short option)', async () => { + await cli('files mkdir -c dag-foo /foo', { ipfs }) + + expect(ipfs.files.mkdir.callCount).to.equal(1) + expect(ipfs.files.mkdir.getCall(0).args).to.deep.equal([ + '/foo', { + parents: false, + cidVersion: 0, + format: 'dag-foo', + hashAlg: 'sha2-256', + flush: true, + shardSplitThreshold: 1000, + mode: undefined, + mtime: undefined + } + ]) + }) + + it('should make a directory with a different hash algorithm', async () => { + await cli('files mkdir --hash-alg sha3-256 /foo', { ipfs }) + + expect(ipfs.files.mkdir.callCount).to.equal(1) + expect(ipfs.files.mkdir.getCall(0).args).to.deep.equal([ + '/foo', { + parents: false, + cidVersion: 0, + format: 'dag-pb', + hashAlg: 'sha3-256', + flush: true, + shardSplitThreshold: 1000, + mode: undefined, + mtime: undefined + } + ]) + }) + + it('should make a directory with a different hash algorithm (short option)', async () => { + await cli('files mkdir -h sha3-256 /foo', { ipfs }) + + expect(ipfs.files.mkdir.callCount).to.equal(1) + expect(ipfs.files.mkdir.getCall(0).args).to.deep.equal([ + '/foo', { + parents: false, + cidVersion: 0, + format: 'dag-pb', + hashAlg: 'sha3-256', + flush: true, + shardSplitThreshold: 1000, + mode: undefined, + mtime: undefined + } + ]) + }) + + it('should make a directory without flushing', async () => { + await cli('files mkdir --flush false /foo', { ipfs }) + + expect(ipfs.files.mkdir.callCount).to.equal(1) + expect(ipfs.files.mkdir.getCall(0).args).to.deep.equal([ + '/foo', { + parents: false, + cidVersion: 0, + format: 'dag-pb', + hashAlg: 'sha2-256', + flush: false, + shardSplitThreshold: 1000, + mode: undefined, + mtime: undefined + } + ]) + }) + + it('should make a directory without flushing (short option)', async () => { + await cli('files mkdir -f false /foo', { ipfs }) + + expect(ipfs.files.mkdir.callCount).to.equal(1) + expect(ipfs.files.mkdir.getCall(0).args).to.deep.equal([ + '/foo', { + parents: false, + cidVersion: 0, + format: 'dag-pb', + hashAlg: 'sha2-256', + flush: false, + shardSplitThreshold: 1000, + mode: undefined, + mtime: undefined + } + ]) + }) + + it('should make a directory a different shard split threshold', async () => { + await cli('files mkdir --shard-split-threshold 10 /foo', { ipfs }) + + expect(ipfs.files.mkdir.callCount).to.equal(1) + expect(ipfs.files.mkdir.getCall(0).args).to.deep.equal([ + '/foo', { + parents: false, + cidVersion: 0, + format: 'dag-pb', + hashAlg: 'sha2-256', + flush: true, + shardSplitThreshold: 10, + mode: undefined, + mtime: undefined + } + ]) + }) + + it('should make a directory a different mode', async () => { + await cli('files mkdir --mode 0111 /foo', { ipfs }) + + expect(ipfs.files.mkdir.callCount).to.equal(1) + expect(ipfs.files.mkdir.getCall(0).args).to.deep.equal([ + '/foo', { + parents: false, + cidVersion: 0, + format: 'dag-pb', + hashAlg: 'sha2-256', + flush: true, + shardSplitThreshold: 1000, + mode: parseInt('0111', 8), + mtime: undefined + } + ]) + }) + + it('should make a directory a different mtime', async () => { + await cli('files mkdir --mtime 5 /foo', { ipfs }) + + expect(ipfs.files.mkdir.callCount).to.equal(1) + expect(ipfs.files.mkdir.getCall(0).args).to.deep.equal([ + '/foo', { + parents: false, + cidVersion: 0, + format: 'dag-pb', + hashAlg: 'sha2-256', + flush: true, + shardSplitThreshold: 1000, + mode: undefined, + mtime: 5 + } + ]) + }) +}) diff --git a/test/cli/mv.spec.js b/test/cli/mv.spec.js new file mode 100644 index 0000000..bd010fd --- /dev/null +++ b/test/cli/mv.spec.js @@ -0,0 +1,312 @@ +/* eslint-env mocha */ +'use strict' + +const expect = require('../helpers/chai') +const cli = require('../helpers/cli') +const sinon = require('sinon') + +describe('cli mv', () => { + let ipfs + + beforeEach(() => { + ipfs = { + files: { + mv: sinon.stub() + } + } + }) + + it('should move an entry', async () => { + const source = '/src' + const dest = '/dest' + + await cli(`files mv ${source} ${dest}`, { ipfs }) + + expect(ipfs.files.mv.callCount).to.equal(1) + expect(ipfs.files.mv.getCall(0).args).to.deep.equal([ + source, + dest, { + parents: false, + recursive: false, + cidVersion: 0, + format: 'dag-pb', + hashAlg: 'sha2-256', + flush: true, + shardSplitThreshold: 1000 + } + ]) + }) + + it('should move an entry and create parents', async () => { + const source = '/src' + const dest = '/dest' + + await cli(`files mv --parents ${source} ${dest}`, { ipfs }) + + expect(ipfs.files.mv.callCount).to.equal(1) + expect(ipfs.files.mv.getCall(0).args).to.deep.equal([ + source, + dest, { + parents: true, + recursive: false, + cidVersion: 0, + format: 'dag-pb', + hashAlg: 'sha2-256', + flush: true, + shardSplitThreshold: 1000 + } + ]) + }) + + it('should move an entry and create parents (short option)', async () => { + const source = '/src' + const dest = '/dest' + + await cli(`files mv -p ${source} ${dest}`, { ipfs }) + + expect(ipfs.files.mv.callCount).to.equal(1) + expect(ipfs.files.mv.getCall(0).args).to.deep.equal([ + source, + dest, { + parents: true, + recursive: false, + cidVersion: 0, + format: 'dag-pb', + hashAlg: 'sha2-256', + flush: true, + shardSplitThreshold: 1000 + } + ]) + }) + + it('should move an entry recursively', async () => { + const source = '/src' + const dest = '/dest' + + await cli(`files mv --recursive ${source} ${dest}`, { ipfs }) + + expect(ipfs.files.mv.callCount).to.equal(1) + expect(ipfs.files.mv.getCall(0).args).to.deep.equal([ + source, + dest, { + parents: false, + recursive: true, + cidVersion: 0, + format: 'dag-pb', + hashAlg: 'sha2-256', + flush: true, + shardSplitThreshold: 1000 + } + ]) + }) + + it('should move an entry recursively (short option)', async () => { + const source = '/src' + const dest = '/dest' + + await cli(`files mv -r ${source} ${dest}`, { ipfs }) + + expect(ipfs.files.mv.callCount).to.equal(1) + expect(ipfs.files.mv.getCall(0).args).to.deep.equal([ + source, + dest, { + parents: false, + recursive: true, + cidVersion: 0, + format: 'dag-pb', + hashAlg: 'sha2-256', + flush: true, + shardSplitThreshold: 1000 + } + ]) + }) + + it('should make a directory with a different cid version', async () => { + const source = '/src' + const dest = '/dest' + + await cli(`files mv --cid-version 5 ${source} ${dest}`, { ipfs }) + + expect(ipfs.files.mv.callCount).to.equal(1) + expect(ipfs.files.mv.getCall(0).args).to.deep.equal([ + source, + dest, { + parents: false, + recursive: false, + cidVersion: 5, + format: 'dag-pb', + hashAlg: 'sha2-256', + flush: true, + shardSplitThreshold: 1000 + } + ]) + }) + + it('should make a directory with a different cid version (shortish option)', async () => { + const source = '/src' + const dest = '/dest' + + await cli(`files mv --cid-ver 5 ${source} ${dest}`, { ipfs }) + + expect(ipfs.files.mv.callCount).to.equal(1) + expect(ipfs.files.mv.getCall(0).args).to.deep.equal([ + source, + dest, { + parents: false, + recursive: false, + cidVersion: 5, + format: 'dag-pb', + hashAlg: 'sha2-256', + flush: true, + shardSplitThreshold: 1000 + } + ]) + }) + + it('should make a directory with a different codec', async () => { + const source = '/src' + const dest = '/dest' + + await cli(`files mv --codec dag-foo ${source} ${dest}`, { ipfs }) + + expect(ipfs.files.mv.callCount).to.equal(1) + expect(ipfs.files.mv.getCall(0).args).to.deep.equal([ + source, + dest, { + parents: false, + recursive: false, + cidVersion: 0, + format: 'dag-foo', + hashAlg: 'sha2-256', + flush: true, + shardSplitThreshold: 1000 + } + ]) + }) + + it('should make a directory with a different codec (short option)', async () => { + const source = '/src' + const dest = '/dest' + + await cli(`files mv -c dag-foo ${source} ${dest}`, { ipfs }) + + expect(ipfs.files.mv.callCount).to.equal(1) + expect(ipfs.files.mv.getCall(0).args).to.deep.equal([ + source, + dest, { + parents: false, + recursive: false, + cidVersion: 0, + format: 'dag-foo', + hashAlg: 'sha2-256', + flush: true, + shardSplitThreshold: 1000 + } + ]) + }) + + it('should make a directory with a different hash algorithm', async () => { + const source = '/src' + const dest = '/dest' + + await cli(`files mv --hash-alg sha3-256 ${source} ${dest}`, { ipfs }) + + expect(ipfs.files.mv.callCount).to.equal(1) + expect(ipfs.files.mv.getCall(0).args).to.deep.equal([ + source, + dest, { + parents: false, + recursive: false, + cidVersion: 0, + format: 'dag-pb', + hashAlg: 'sha3-256', + flush: true, + shardSplitThreshold: 1000 + } + ]) + }) + + it('should make a directory with a different hash algorithm (short option)', async () => { + const source = '/src' + const dest = '/dest' + + await cli(`files mv -h sha3-256 ${source} ${dest}`, { ipfs }) + + expect(ipfs.files.mv.callCount).to.equal(1) + expect(ipfs.files.mv.getCall(0).args).to.deep.equal([ + source, + dest, { + parents: false, + recursive: false, + cidVersion: 0, + format: 'dag-pb', + hashAlg: 'sha3-256', + flush: true, + shardSplitThreshold: 1000 + } + ]) + }) + + it('should make a directory without flushing', async () => { + const source = '/src' + const dest = '/dest' + + await cli(`files mv --flush false ${source} ${dest}`, { ipfs }) + + expect(ipfs.files.mv.callCount).to.equal(1) + expect(ipfs.files.mv.getCall(0).args).to.deep.equal([ + source, + dest, { + parents: false, + recursive: false, + cidVersion: 0, + format: 'dag-pb', + hashAlg: 'sha2-256', + flush: false, + shardSplitThreshold: 1000 + } + ]) + }) + + it('should make a directory without flushing (short option)', async () => { + const source = '/src' + const dest = '/dest' + + await cli(`files mv -f false ${source} ${dest}`, { ipfs }) + + expect(ipfs.files.mv.callCount).to.equal(1) + expect(ipfs.files.mv.getCall(0).args).to.deep.equal([ + source, + dest, { + parents: false, + recursive: false, + cidVersion: 0, + format: 'dag-pb', + hashAlg: 'sha2-256', + flush: false, + shardSplitThreshold: 1000 + } + ]) + }) + + it('should make a directory a different shard split threshold', async () => { + const source = '/src' + const dest = '/dest' + + await cli(`files mv --shard-split-threshold 10 ${source} ${dest}`, { ipfs }) + + expect(ipfs.files.mv.callCount).to.equal(1) + expect(ipfs.files.mv.getCall(0).args).to.deep.equal([ + source, + dest, { + parents: false, + recursive: false, + cidVersion: 0, + format: 'dag-pb', + hashAlg: 'sha2-256', + flush: true, + shardSplitThreshold: 10 + } + ]) + }) +}) diff --git a/test/cli/read.spec.js b/test/cli/read.spec.js new file mode 100644 index 0000000..d739f99 --- /dev/null +++ b/test/cli/read.spec.js @@ -0,0 +1,104 @@ +/* eslint-env mocha */ +'use strict' + +const expect = require('../helpers/chai') +const cli = require('../helpers/cli') +const sinon = require('sinon') +const values = require('pull-stream/sources/values') + +describe('cli read', () => { + let ipfs + let print + let output + + beforeEach(() => { + output = '' + ipfs = { + files: { + readPullStream: sinon.stub().returns(values(['hello world'])) + } + } + print = (msg = '', newline = true) => { + output += newline ? msg + '\n' : msg + } + }) + + it('should read a path', async () => { + const path = '/foo' + + await cli(`files read ${path}`, { ipfs, print }) + + expect(ipfs.files.readPullStream.callCount).to.equal(1) + expect(ipfs.files.readPullStream.getCall(0).args).to.deep.equal([ + path, { + offset: undefined, + length: undefined + } + ]) + expect(output).to.equal('hello world') + }) + + it('should read a path with an offset', async () => { + const path = '/foo' + const offset = 5 + + await cli(`files read --offset ${offset} ${path}`, { ipfs, print }) + + expect(ipfs.files.readPullStream.callCount).to.equal(1) + expect(ipfs.files.readPullStream.getCall(0).args).to.deep.equal([ + path, { + offset, + length: undefined + } + ]) + expect(output).to.equal('hello world') + }) + + it('should read a path with an offset (short option)', async () => { + const path = '/foo' + const offset = 5 + + await cli(`files read -o ${offset} ${path}`, { ipfs, print }) + + expect(ipfs.files.readPullStream.callCount).to.equal(1) + expect(ipfs.files.readPullStream.getCall(0).args).to.deep.equal([ + path, { + offset, + length: undefined + } + ]) + expect(output).to.equal('hello world') + }) + + it('should read a path with an length', async () => { + const path = '/foo' + const length = 5 + + await cli(`files read --length ${length} ${path}`, { ipfs, print }) + + expect(ipfs.files.readPullStream.callCount).to.equal(1) + expect(ipfs.files.readPullStream.getCall(0).args).to.deep.equal([ + path, { + offset: undefined, + length + } + ]) + expect(output).to.equal('hello world') + }) + + it('should read a path with an length (short option)', async () => { + const path = '/foo' + const length = 5 + + await cli(`files read -l ${length} ${path}`, { ipfs, print }) + + expect(ipfs.files.readPullStream.callCount).to.equal(1) + expect(ipfs.files.readPullStream.getCall(0).args).to.deep.equal([ + path, { + offset: undefined, + length + } + ]) + expect(output).to.equal('hello world') + }) +}) diff --git a/test/cli/rm.spec.js b/test/cli/rm.spec.js new file mode 100644 index 0000000..e36de5b --- /dev/null +++ b/test/cli/rm.spec.js @@ -0,0 +1,57 @@ +/* eslint-env mocha */ +'use strict' + +const expect = require('../helpers/chai') +const cli = require('../helpers/cli') +const sinon = require('sinon') + +describe('cli rm', () => { + let ipfs + + beforeEach(() => { + ipfs = { + files: { + rm: sinon.stub().resolves() + } + } + }) + + it('should remove a path', async () => { + const path = '/foo' + + await cli(`files rm ${path}`, { ipfs }) + + expect(ipfs.files.rm.callCount).to.equal(1) + expect(ipfs.files.rm.getCall(0).args).to.deep.equal([ + path, { + recursive: false + } + ]) + }) + + it('should remove a path recursively', async () => { + const path = '/foo' + + await cli(`files rm --recursive ${path}`, { ipfs }) + + expect(ipfs.files.rm.callCount).to.equal(1) + expect(ipfs.files.rm.getCall(0).args).to.deep.equal([ + path, { + recursive: true + } + ]) + }) + + it('should remove a path recursively (short option)', async () => { + const path = '/foo' + + await cli(`files rm -r ${path}`, { ipfs }) + + expect(ipfs.files.rm.callCount).to.equal(1) + expect(ipfs.files.rm.getCall(0).args).to.deep.equal([ + path, { + recursive: true + } + ]) + }) +}) diff --git a/test/cli/stat.spec.js b/test/cli/stat.spec.js new file mode 100644 index 0000000..6c275d4 --- /dev/null +++ b/test/cli/stat.spec.js @@ -0,0 +1,144 @@ +/* eslint-env mocha */ +'use strict' + +const expect = require('../helpers/chai') +const cli = require('../helpers/cli') +const sinon = require('sinon') + +describe('cli stat', () => { + let ipfs + let print + let output + + beforeEach(() => { + output = '' + ipfs = { + files: { + stat: sinon.stub().resolves({ + hash: 'stats-hash', + size: 'stats-size', + cumulativeSize: 'stats-cumulativeSize', + blocks: 'stats-blocks', + type: 'stats-type', + mode: 'stats-mode', + mtime: 'stats-mtime' + }) + } + } + print = (msg = '', newline = true) => { + output += newline ? msg + '\n' : msg + } + }) + + it('should stat a path', async () => { + const path = '/foo' + + await cli(`files stat ${path}`, { ipfs, print }) + + expect(ipfs.files.stat.callCount).to.equal(1) + expect(ipfs.files.stat.getCall(0).args).to.deep.equal([ + path, { + withLocal: false + } + ]) + expect(output).to.include('CumulativeSize') + }) + + it('should stat a path with local', async () => { + const path = '/foo' + + await cli(`files stat --with-local ${path}`, { ipfs, print }) + + expect(ipfs.files.stat.callCount).to.equal(1) + expect(ipfs.files.stat.getCall(0).args).to.deep.equal([ + path, { + withLocal: true + } + ]) + expect(output).to.include('CumulativeSize') + }) + + it('should stat a path with local (short option)', async () => { + const path = '/foo' + + await cli(`files stat -l ${path}`, { ipfs, print }) + + expect(ipfs.files.stat.callCount).to.equal(1) + expect(ipfs.files.stat.getCall(0).args).to.deep.equal([ + path, { + withLocal: true + } + ]) + expect(output).to.include('CumulativeSize') + }) + + it('should stat a path and only show hashes', async () => { + const path = '/foo' + + await cli(`files stat --hash ${path}`, { ipfs, print }) + + expect(ipfs.files.stat.callCount).to.equal(1) + expect(ipfs.files.stat.getCall(0).args).to.deep.equal([ + path, { + withLocal: false + } + ]) + expect(output).to.equal('stats-hash\n') + }) + + it('should stat a path and only show hashes (short option)', async () => { + const path = '/foo' + + await cli(`files stat -h ${path}`, { ipfs, print }) + + expect(ipfs.files.stat.callCount).to.equal(1) + expect(ipfs.files.stat.getCall(0).args).to.deep.equal([ + path, { + withLocal: false + } + ]) + expect(output).to.equal('stats-hash\n') + }) + + it('should stat a path and only show sizes', async () => { + const path = '/foo' + + await cli(`files stat --size ${path}`, { ipfs, print }) + + expect(ipfs.files.stat.callCount).to.equal(1) + expect(ipfs.files.stat.getCall(0).args).to.deep.equal([ + path, { + withLocal: false + } + ]) + expect(output).to.equal('stats-size\n') + }) + + it('should stat a path and only show sizes (short option)', async () => { + const path = '/foo' + + await cli(`files stat -s ${path}`, { ipfs, print }) + + expect(ipfs.files.stat.callCount).to.equal(1) + expect(ipfs.files.stat.getCall(0).args).to.deep.equal([ + path, { + withLocal: false + } + ]) + expect(output).to.equal('stats-size\n') + }) + + it('should stat a path with format option', async () => { + const path = '/foo' + + await cli(`files stat --format ' ' ${path}`, { ipfs, print }) + + expect(ipfs.files.stat.callCount).to.equal(1) + expect(ipfs.files.stat.getCall(0).args).to.deep.equal([ + path, { + withLocal: false + } + ]) + expect(output).to.equal('stats-mode stats-type\n') + }) +}) diff --git a/test/cli/touch.spec.js b/test/cli/touch.spec.js new file mode 100644 index 0000000..3b2a70b --- /dev/null +++ b/test/cli/touch.spec.js @@ -0,0 +1,170 @@ +/* eslint-env mocha */ +'use strict' + +const expect = require('../helpers/chai') +const cli = require('../helpers/cli') +const sinon = require('sinon') + +describe('cli touch', () => { + let ipfs + + beforeEach(() => { + ipfs = { + files: { + touch: sinon.stub() + } + } + }) + + it('should update the mtime for a file', async () => { + const path = '/foo' + const mtime = parseInt(Date.now() / 1000) + + await cli(`files touch -m ${mtime} ${path}`, { ipfs }) + + expect(ipfs.files.touch.callCount).to.equal(1) + expect(ipfs.files.touch.getCall(0).args).to.deep.equal([ + path, + mtime, { + cidVersion: 0, + format: 'dag-pb', + hashAlg: 'sha2-256', + flush: true, + shardSplitThreshold: 1000 + } + ]) + }) + + it('should update the mode without flushing', async () => { + const path = '/foo' + const mtime = parseInt(Date.now() / 1000) + + await cli(`files touch -m ${mtime} --flush false ${path}`, { ipfs }) + + expect(ipfs.files.touch.callCount).to.equal(1) + expect(ipfs.files.touch.getCall(0).args).to.deep.equal([ + path, + mtime, { + cidVersion: 0, + format: 'dag-pb', + hashAlg: 'sha2-256', + flush: false, + shardSplitThreshold: 1000 + } + ]) + }) + + it('should update the mode without flushing (short option)', async () => { + const path = '/foo' + const mtime = parseInt(Date.now() / 1000) + + await cli(`files touch -m ${mtime} -f false ${path}`, { ipfs }) + + expect(ipfs.files.touch.callCount).to.equal(1) + expect(ipfs.files.touch.getCall(0).args).to.deep.equal([ + path, + mtime, { + cidVersion: 0, + format: 'dag-pb', + hashAlg: 'sha2-256', + flush: false, + shardSplitThreshold: 1000 + } + ]) + }) + + it('should update the mode a different codec', async () => { + const path = '/foo' + const mtime = parseInt(Date.now() / 1000) + + await cli(`files touch -m ${mtime} --codec dag-foo ${path}`, { ipfs }) + + expect(ipfs.files.touch.callCount).to.equal(1) + expect(ipfs.files.touch.getCall(0).args).to.deep.equal([ + path, + mtime, { + cidVersion: 0, + format: 'dag-foo', + hashAlg: 'sha2-256', + flush: true, + shardSplitThreshold: 1000 + } + ]) + }) + + it('should update the mode a different codec (short option)', async () => { + const path = '/foo' + const mtime = parseInt(Date.now() / 1000) + + await cli(`files touch -m ${mtime} -c dag-foo ${path}`, { ipfs }) + + expect(ipfs.files.touch.callCount).to.equal(1) + expect(ipfs.files.touch.getCall(0).args).to.deep.equal([ + path, + mtime, { + cidVersion: 0, + format: 'dag-foo', + hashAlg: 'sha2-256', + flush: true, + shardSplitThreshold: 1000 + } + ]) + }) + + it('should update the mode a different hash algorithm', async () => { + const path = '/foo' + const mtime = parseInt(Date.now() / 1000) + + await cli(`files touch -m ${mtime} --hash-alg sha3-256 ${path}`, { ipfs }) + + expect(ipfs.files.touch.callCount).to.equal(1) + expect(ipfs.files.touch.getCall(0).args).to.deep.equal([ + path, + mtime, { + cidVersion: 0, + format: 'dag-pb', + hashAlg: 'sha3-256', + flush: true, + shardSplitThreshold: 1000 + } + ]) + }) + + it('should update the mode a different hash algorithm (short option)', async () => { + const path = '/foo' + const mtime = parseInt(Date.now() / 1000) + + await cli(`files touch -m ${mtime} -h sha3-256 ${path}`, { ipfs }) + + expect(ipfs.files.touch.callCount).to.equal(1) + expect(ipfs.files.touch.getCall(0).args).to.deep.equal([ + path, + mtime, { + cidVersion: 0, + format: 'dag-pb', + hashAlg: 'sha3-256', + flush: true, + shardSplitThreshold: 1000 + } + ]) + }) + + it('should update the mode with a shard split threshold', async () => { + const path = '/foo' + const mtime = parseInt(Date.now() / 1000) + + await cli(`files touch -m ${mtime} --shard-split-threshold 10 ${path}`, { ipfs }) + + expect(ipfs.files.touch.callCount).to.equal(1) + expect(ipfs.files.touch.getCall(0).args).to.deep.equal([ + path, + mtime, { + cidVersion: 0, + format: 'dag-pb', + hashAlg: 'sha2-256', + flush: true, + shardSplitThreshold: 10 + } + ]) + }) +}) diff --git a/test/cli/write.spec.js b/test/cli/write.spec.js new file mode 100644 index 0000000..da51c2c --- /dev/null +++ b/test/cli/write.spec.js @@ -0,0 +1,450 @@ +/* eslint-env mocha */ +'use strict' + +const expect = require('../helpers/chai') +const cli = require('../helpers/cli') +const sinon = require('sinon') + +function defaultOptions (modification = {}) { + const options = { + offset: undefined, + length: undefined, + create: false, + truncate: false, + rawLeaves: false, + reduceSingleLeafToSelf: false, + cidVersion: 0, + hashAlg: 'sha2-256', + format: 'dag-pb', + parents: false, + progress: undefined, + strategy: 'balanced', + flush: true, + shardSplitThreshold: 1000, + mode: undefined, + mtime: undefined + } + + Object.keys(modification).forEach(key => { + options[key] = modification[key] + }) + + return options +} + +describe('cli write', () => { + const stdin = 'stdin' + const getStdin = () => stdin + let ipfs + + beforeEach(() => { + ipfs = { + files: { + write: sinon.stub() + } + } + }) + + it('should write to a file', async () => { + const path = '/foo' + + await cli(`files write ${path}`, { ipfs, getStdin }) + + expect(ipfs.files.write.callCount).to.equal(1) + expect(ipfs.files.write.getCall(0).args).to.deep.equal([ + path, + stdin, + defaultOptions() + ]) + }) + + it('should write to a file and create parents', async () => { + const path = '/foo' + + await cli(`files write --parents ${path}`, { ipfs, getStdin }) + + expect(ipfs.files.write.callCount).to.equal(1) + expect(ipfs.files.write.getCall(0).args).to.deep.equal([ + path, + stdin, + defaultOptions({ + parents: true + }) + ]) + }) + + it('should write to a file and create parents (short option)', async () => { + const path = '/foo' + + await cli(`files write -p ${path}`, { ipfs, getStdin }) + + expect(ipfs.files.write.callCount).to.equal(1) + expect(ipfs.files.write.getCall(0).args).to.deep.equal([ + path, + stdin, + defaultOptions({ + parents: true + }) + ]) + }) + + it('should write to a file and create it', async () => { + const path = '/foo' + + await cli(`files write --create ${path}`, { ipfs, getStdin }) + + expect(ipfs.files.write.callCount).to.equal(1) + expect(ipfs.files.write.getCall(0).args).to.deep.equal([ + path, + stdin, + defaultOptions({ + create: true + }) + ]) + }) + + it('should write to a file and create it (short option)', async () => { + const path = '/foo' + + await cli(`files write -e ${path}`, { ipfs, getStdin }) + + expect(ipfs.files.write.callCount).to.equal(1) + expect(ipfs.files.write.getCall(0).args).to.deep.equal([ + path, + stdin, + defaultOptions({ + create: true + }) + ]) + }) + + it('should write to a file with an offset', async () => { + const path = '/foo' + + await cli(`files write --offset 10 ${path}`, { ipfs, getStdin }) + + expect(ipfs.files.write.callCount).to.equal(1) + expect(ipfs.files.write.getCall(0).args).to.deep.equal([ + path, + stdin, + defaultOptions({ + offset: 10 + }) + ]) + }) + + it('should write to a file with an offset (short option)', async () => { + const path = '/foo' + + await cli(`files write -o 10 ${path}`, { ipfs, getStdin }) + + expect(ipfs.files.write.callCount).to.equal(1) + expect(ipfs.files.write.getCall(0).args).to.deep.equal([ + path, + stdin, + defaultOptions({ + offset: 10 + }) + ]) + }) + + it('should write to a file with an length', async () => { + const path = '/foo' + + await cli(`files write --length 10 ${path}`, { ipfs, getStdin }) + + expect(ipfs.files.write.callCount).to.equal(1) + expect(ipfs.files.write.getCall(0).args).to.deep.equal([ + path, + stdin, + defaultOptions({ + length: 10 + }) + ]) + }) + + it('should write to a file with a length (short option)', async () => { + const path = '/foo' + + await cli(`files write -l 10 ${path}`, { ipfs, getStdin }) + + expect(ipfs.files.write.callCount).to.equal(1) + expect(ipfs.files.write.getCall(0).args).to.deep.equal([ + path, + stdin, + defaultOptions({ + length: 10 + }) + ]) + }) + + it('should write to a file and truncate it', async () => { + const path = '/foo' + + await cli(`files write --truncate ${path}`, { ipfs, getStdin }) + + expect(ipfs.files.write.callCount).to.equal(1) + expect(ipfs.files.write.getCall(0).args).to.deep.equal([ + path, + stdin, + defaultOptions({ + truncate: true + }) + ]) + }) + + it('should write to a file and truncate it (short option)', async () => { + const path = '/foo' + + await cli(`files write -t ${path}`, { ipfs, getStdin }) + + expect(ipfs.files.write.callCount).to.equal(1) + expect(ipfs.files.write.getCall(0).args).to.deep.equal([ + path, + stdin, + defaultOptions({ + truncate: true + }) + ]) + }) + + it('should write to a file with raw leaves', async () => { + const path = '/foo' + + await cli(`files write --raw-leaves ${path}`, { ipfs, getStdin }) + + expect(ipfs.files.write.callCount).to.equal(1) + expect(ipfs.files.write.getCall(0).args).to.deep.equal([ + path, + stdin, + defaultOptions({ + rawLeaves: true + }) + ]) + }) + + it('should write to a file with raw leaves (short option)', async () => { + const path = '/foo' + + await cli(`files write -r ${path}`, { ipfs, getStdin }) + + expect(ipfs.files.write.callCount).to.equal(1) + expect(ipfs.files.write.getCall(0).args).to.deep.equal([ + path, + stdin, + defaultOptions({ + rawLeaves: true + }) + ]) + }) + + it('should write to a file and reduce a single leaf to one node', async () => { + const path = '/foo' + + await cli(`files write --reduce-single-leaf-to-self ${path}`, { ipfs, getStdin }) + + expect(ipfs.files.write.callCount).to.equal(1) + expect(ipfs.files.write.getCall(0).args).to.deep.equal([ + path, + stdin, + defaultOptions({ + reduceSingleLeafToSelf: true + }) + ]) + }) + + it('should write to a file without flushing', async () => { + const path = '/foo' + + await cli(`files write --flush false ${path}`, { ipfs, getStdin }) + + expect(ipfs.files.write.callCount).to.equal(1) + expect(ipfs.files.write.getCall(0).args).to.deep.equal([ + path, + stdin, + defaultOptions({ + flush: false + }) + ]) + }) + + it('should write to a file without flushing (short option)', async () => { + const path = '/foo' + + await cli(`files write -f false ${path}`, { ipfs, getStdin }) + + expect(ipfs.files.write.callCount).to.equal(1) + expect(ipfs.files.write.getCall(0).args).to.deep.equal([ + path, + stdin, + defaultOptions({ + flush: false + }) + ]) + }) + + it('should write to a file with a different strategy', async () => { + const path = '/foo' + + await cli(`files write --strategy trickle ${path}`, { ipfs, getStdin }) + + expect(ipfs.files.write.callCount).to.equal(1) + expect(ipfs.files.write.getCall(0).args).to.deep.equal([ + path, + stdin, + defaultOptions({ + strategy: 'trickle' + }) + ]) + }) + + it('should write to a file with a different strategy (short option)', async () => { + const path = '/foo' + + await cli(`files write -s trickle ${path}`, { ipfs, getStdin }) + + expect(ipfs.files.write.callCount).to.equal(1) + expect(ipfs.files.write.getCall(0).args).to.deep.equal([ + path, + stdin, + defaultOptions({ + strategy: 'trickle' + }) + ]) + }) + + it('should write to a file with a different cid version', async () => { + const path = '/foo' + + await cli(`files write --cid-version 5 ${path}`, { ipfs, getStdin }) + + expect(ipfs.files.write.callCount).to.equal(1) + expect(ipfs.files.write.getCall(0).args).to.deep.equal([ + path, + stdin, + defaultOptions({ + cidVersion: 5 + }) + ]) + }) + + it('should write to a file with a different cid version (shortish option)', async () => { + const path = '/foo' + + await cli(`files write --cid-ver 5 ${path}`, { ipfs, getStdin }) + + expect(ipfs.files.write.callCount).to.equal(1) + expect(ipfs.files.write.getCall(0).args).to.deep.equal([ + path, + stdin, + defaultOptions({ + cidVersion: 5 + }) + ]) + }) + + it('should update the mode a different codec', async () => { + const path = '/foo' + + await cli(`files write --codec dag-foo ${path}`, { ipfs, getStdin }) + + expect(ipfs.files.write.callCount).to.equal(1) + expect(ipfs.files.write.getCall(0).args).to.deep.equal([ + path, + stdin, + defaultOptions({ + format: 'dag-foo' + }) + ]) + }) + + it('should update the mode a different codec (short option)', async () => { + const path = '/foo' + + await cli(`files write -c dag-foo ${path}`, { ipfs, getStdin }) + + expect(ipfs.files.write.callCount).to.equal(1) + expect(ipfs.files.write.getCall(0).args).to.deep.equal([ + path, + stdin, + defaultOptions({ + format: 'dag-foo' + }) + ]) + }) + + it('should update the mode a different hash algorithm', async () => { + const path = '/foo' + + await cli(`files write --hash-alg sha3-256 ${path}`, { ipfs, getStdin }) + + expect(ipfs.files.write.callCount).to.equal(1) + expect(ipfs.files.write.getCall(0).args).to.deep.equal([ + path, + stdin, + defaultOptions({ + hashAlg: 'sha3-256' + }) + ]) + }) + + it('should update the mode a different hash algorithm (short option)', async () => { + const path = '/foo' + + await cli(`files write -h sha3-256 ${path}`, { ipfs, getStdin }) + + expect(ipfs.files.write.callCount).to.equal(1) + expect(ipfs.files.write.getCall(0).args).to.deep.equal([ + path, + stdin, + defaultOptions({ + hashAlg: 'sha3-256' + }) + ]) + }) + + it('should update the mode with a shard split threshold', async () => { + const path = '/foo' + + await cli(`files write --shard-split-threshold 10 ${path}`, { ipfs, getStdin }) + + expect(ipfs.files.write.callCount).to.equal(1) + expect(ipfs.files.write.getCall(0).args).to.deep.equal([ + path, + stdin, + defaultOptions({ + shardSplitThreshold: 10 + }) + ]) + }) + + it('should update the mode a different mode', async () => { + const path = '/foo' + + await cli(`files write --mode 0557 ${path}`, { ipfs, getStdin }) + + expect(ipfs.files.write.callCount).to.equal(1) + expect(ipfs.files.write.getCall(0).args).to.deep.equal([ + path, + stdin, + defaultOptions({ + mode: parseInt('0557', 8) + }) + ]) + }) + + it('should update the mode a different mtime', async () => { + const path = '/foo' + + await cli(`files write --mtime 11 ${path}`, { ipfs, getStdin }) + + expect(ipfs.files.write.callCount).to.equal(1) + expect(ipfs.files.write.getCall(0).args).to.deep.equal([ + path, + stdin, + defaultOptions({ + mtime: 11 + }) + ]) + }) +}) diff --git a/test/chmod.spec.js b/test/core/chmod.spec.js similarity index 94% rename from test/chmod.spec.js rename to test/core/chmod.spec.js index 5f10cab..720b25b 100644 --- a/test/chmod.spec.js +++ b/test/core/chmod.spec.js @@ -1,8 +1,8 @@ /* eslint-env mocha */ 'use strict' -const expect = require('./helpers/chai') -const createMfs = require('./helpers/create-mfs') +const expect = require('../helpers/chai') +const createMfs = require('../helpers/create-mfs') describe('chmod', () => { let mfs diff --git a/test/cp.spec.js b/test/core/cp.spec.js similarity index 97% rename from test/cp.spec.js rename to test/core/cp.spec.js index 2b74642..e22e4e8 100644 --- a/test/cp.spec.js +++ b/test/core/cp.spec.js @@ -4,10 +4,10 @@ const chai = require('chai') chai.use(require('dirty-chai')) const expect = chai.expect -const createMfs = require('./helpers/create-mfs') -const createShardedDirectory = require('./helpers/create-sharded-directory') -const streamToBuffer = require('./helpers/stream-to-buffer') -const streamToArray = require('./helpers/stream-to-array') +const createMfs = require('../helpers/create-mfs') +const createShardedDirectory = require('../helpers/create-sharded-directory') +const streamToBuffer = require('../helpers/stream-to-buffer') +const streamToArray = require('../helpers/stream-to-array') const crypto = require('crypto') describe('cp', () => { diff --git a/test/flush.spec.js b/test/core/flush.spec.js similarity index 91% rename from test/flush.spec.js rename to test/core/flush.spec.js index fced064..494711d 100644 --- a/test/flush.spec.js +++ b/test/core/flush.spec.js @@ -4,7 +4,7 @@ const chai = require('chai') chai.use(require('dirty-chai')) const expect = chai.expect -const createMfs = require('./helpers/create-mfs') +const createMfs = require('../helpers/create-mfs') describe('flush', () => { let mfs diff --git a/test/ls.spec.js b/test/core/ls.spec.js similarity index 96% rename from test/ls.spec.js rename to test/core/ls.spec.js index eadd1c9..49d7eec 100644 --- a/test/ls.spec.js +++ b/test/core/ls.spec.js @@ -7,10 +7,10 @@ const expect = chai.expect const CID = require('cids') const { FILE_TYPES -} = require('../src') -const createMfs = require('./helpers/create-mfs') -const createShardedDirectory = require('./helpers/create-sharded-directory') -const streamToArray = require('./helpers/stream-to-array') +} = require('../../src') +const createMfs = require('../helpers/create-mfs') +const createShardedDirectory = require('../helpers/create-sharded-directory') +const streamToArray = require('../helpers/stream-to-array') const crypto = require('crypto') describe('ls', () => { diff --git a/test/mkdir.spec.js b/test/core/mkdir.spec.js similarity index 96% rename from test/mkdir.spec.js rename to test/core/mkdir.spec.js index 5904ce2..d8a3685 100644 --- a/test/mkdir.spec.js +++ b/test/core/mkdir.spec.js @@ -5,9 +5,9 @@ const chai = require('chai') chai.use(require('dirty-chai')) const expect = chai.expect const multihash = require('multihashes') -const createMfs = require('./helpers/create-mfs') -const cidAtPath = require('./helpers/cid-at-path') -const createShardedDirectory = require('./helpers/create-sharded-directory') +const createMfs = require('../helpers/create-mfs') +const cidAtPath = require('../helpers/cid-at-path') +const createShardedDirectory = require('../helpers/create-sharded-directory') const all = require('it-all') describe('mkdir', () => { diff --git a/test/mv.spec.js b/test/core/mv.spec.js similarity index 97% rename from test/mv.spec.js rename to test/core/mv.spec.js index 655b3b9..261bd20 100644 --- a/test/mv.spec.js +++ b/test/core/mv.spec.js @@ -4,9 +4,9 @@ const chai = require('chai') chai.use(require('dirty-chai')) const expect = chai.expect -const createMfs = require('./helpers/create-mfs') -const createShardedDirectory = require('./helpers/create-sharded-directory') -const streamToBuffer = require('./helpers/stream-to-buffer') +const createMfs = require('../helpers/create-mfs') +const createShardedDirectory = require('../helpers/create-sharded-directory') +const streamToBuffer = require('../helpers/stream-to-buffer') const crypto = require('crypto') describe('mv', () => { diff --git a/test/read.spec.js b/test/core/read.spec.js similarity index 95% rename from test/read.spec.js rename to test/core/read.spec.js index 5e00d27..252f490 100644 --- a/test/read.spec.js +++ b/test/core/read.spec.js @@ -4,10 +4,10 @@ const chai = require('chai') chai.use(require('dirty-chai')) const expect = chai.expect -const createMfs = require('./helpers/create-mfs') -const createShardedDirectory = require('./helpers/create-sharded-directory') +const createMfs = require('../helpers/create-mfs') +const createShardedDirectory = require('../helpers/create-sharded-directory') const crypto = require('crypto') -const streamToBuffer = require('./helpers/stream-to-buffer') +const streamToBuffer = require('../helpers/stream-to-buffer') describe('read', () => { let mfs diff --git a/test/rm.spec.js b/test/core/rm.spec.js similarity index 97% rename from test/rm.spec.js rename to test/core/rm.spec.js index 0e662f0..b34aee6 100644 --- a/test/rm.spec.js +++ b/test/core/rm.spec.js @@ -4,13 +4,13 @@ const chai = require('chai') chai.use(require('dirty-chai')) const expect = chai.expect -const createMfs = require('./helpers/create-mfs') -const createShardedDirectory = require('./helpers/create-sharded-directory') -const createTwoShards = require('./helpers/create-two-shards') +const createMfs = require('../helpers/create-mfs') +const createShardedDirectory = require('../helpers/create-sharded-directory') +const createTwoShards = require('../helpers/create-two-shards') const crypto = require('crypto') const { FILE_SEPARATOR -} = require('../src/core/utils/constants') +} = require('../../src/core/utils/constants') describe('rm', () => { let mfs diff --git a/test/stat.spec.js b/test/core/stat.spec.js similarity index 97% rename from test/stat.spec.js rename to test/core/stat.spec.js index d074d75..8b4af83 100644 --- a/test/stat.spec.js +++ b/test/core/stat.spec.js @@ -5,8 +5,8 @@ const chai = require('chai') chai.use(require('dirty-chai')) const expect = chai.expect const crypto = require('crypto') -const createMfs = require('./helpers/create-mfs') -const createShardedDirectory = require('./helpers/create-sharded-directory') +const createMfs = require('../helpers/create-mfs') +const createShardedDirectory = require('../helpers/create-sharded-directory') const mc = require('multicodec') describe('stat', () => { diff --git a/test/touch.spec.js b/test/core/touch.spec.js similarity index 92% rename from test/touch.spec.js rename to test/core/touch.spec.js index 3091c2d..d63526b 100644 --- a/test/touch.spec.js +++ b/test/core/touch.spec.js @@ -1,9 +1,9 @@ /* eslint-env mocha */ 'use strict' -const expect = require('./helpers/chai') -const createMfs = require('./helpers/create-mfs') -const streamToBuffer = require('./helpers/stream-to-buffer') +const expect = require('../helpers/chai') +const createMfs = require('../helpers/create-mfs') +const streamToBuffer = require('../helpers/stream-to-buffer') const delay = require('delay') describe('touch', () => { diff --git a/test/write.spec.js b/test/core/write.spec.js similarity index 98% rename from test/write.spec.js rename to test/core/write.spec.js index 8a5091f..b0df007 100644 --- a/test/write.spec.js +++ b/test/core/write.spec.js @@ -7,12 +7,12 @@ const expect = chai.expect const isNode = require('detect-node') const multihash = require('multihashes') const util = require('util') -const createMfs = require('./helpers/create-mfs') -const cidAtPath = require('./helpers/cid-at-path') -const traverseLeafNodes = require('./helpers/traverse-leaf-nodes') -const createShard = require('./helpers/create-shard') -const createShardedDirectory = require('./helpers/create-sharded-directory') -const createTwoShards = require('./helpers/create-two-shards') +const createMfs = require('../helpers/create-mfs') +const cidAtPath = require('../helpers/cid-at-path') +const traverseLeafNodes = require('../helpers/traverse-leaf-nodes') +const createShard = require('../helpers/create-shard') +const createShardedDirectory = require('../helpers/create-sharded-directory') +const createTwoShards = require('../helpers/create-two-shards') const crypto = require('crypto') const all = require('it-all') diff --git a/test/helpers/cli.js b/test/helpers/cli.js new file mode 100644 index 0000000..f196be6 --- /dev/null +++ b/test/helpers/cli.js @@ -0,0 +1,16 @@ +'use strict' + +const yargs = require('yargs') +const YargsPromise = require('yargs-promise') +const mfs = require('../../src/cli') + +module.exports = (command, { ipfs, print = () => {}, getStdin }) => { + const parser = new YargsPromise(mfs(yargs), { + getIpfs: () => ipfs, + print, + getStdin + }) + + return parser + .parse(command) +} From 07bdca1018d1e1e7d58c2f534cbec0ee395e286a Mon Sep 17 00:00:00 2001 From: achingbrain Date: Tue, 3 Dec 2019 18:47:29 +0000 Subject: [PATCH 06/15] fix: add missing dep --- package.json | 1 + 1 file changed, 1 insertion(+) diff --git a/package.json b/package.json index 81aee37..604f16e 100644 --- a/package.json +++ b/package.json @@ -53,6 +53,7 @@ "ipld": "~0.25.0", "it-all": "^1.0.1", "memdown": "^5.1.0", + "sinon": "^7.5.0", "temp-write": "^4.0.0", "yargs": "^15.0.2", "yargs-promise": "^1.1.0" From 6679fe594489b6c97a51fd08177807f641eca53d Mon Sep 17 00:00:00 2001 From: achingbrain Date: Tue, 3 Dec 2019 18:51:20 +0000 Subject: [PATCH 07/15] fix: downgrade repo --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 78a44b7..40e9f71 100644 --- a/package.json +++ b/package.json @@ -49,7 +49,7 @@ "detect-webworker": "^1.0.0", "dirty-chai": "^2.0.1", "ipfs-block-service": "~0.16.0", - "ipfs-repo": "^0.30.1", + "ipfs-repo": "^0.29.1", "ipld": "~0.25.0", "it-all": "^1.0.1", "memdown": "^5.1.0", From 1ae8e7943cf984d77f30f7400fb4b78f84c0c384 Mon Sep 17 00:00:00 2001 From: achingbrain Date: Tue, 3 Dec 2019 19:05:36 +0000 Subject: [PATCH 08/15] fix: fix tests after hashOnly turned to onlyHash --- package.json | 2 +- src/core/cp.js | 3 ++- src/core/mkdir.js | 3 ++- src/core/utils/create-node.js | 3 ++- 4 files changed, 7 insertions(+), 4 deletions(-) diff --git a/package.json b/package.json index 40e9f71..78a44b7 100644 --- a/package.json +++ b/package.json @@ -49,7 +49,7 @@ "detect-webworker": "^1.0.0", "dirty-chai": "^2.0.1", "ipfs-block-service": "~0.16.0", - "ipfs-repo": "^0.29.1", + "ipfs-repo": "^0.30.1", "ipld": "~0.25.0", "it-all": "^1.0.1", "memdown": "^5.1.0", diff --git a/src/core/cp.js b/src/core/cp.js index 3160e17..8e70d36 100644 --- a/src/core/cp.js +++ b/src/core/cp.js @@ -152,7 +152,8 @@ const addSourceToParent = async (context, source, childName, parent, options) => name: childName, format: options.format, hashAlg: options.hashAlg, - cidVersion: options.cidVersion + cidVersion: options.cidVersion, + flush: options.flush }) parent.node = node diff --git a/src/core/mkdir.js b/src/core/mkdir.js index 0e39926..9b62aca 100644 --- a/src/core/mkdir.js +++ b/src/core/mkdir.js @@ -120,7 +120,8 @@ const addEmptyDir = async (context, childName, emptyDir, parent, trail, options) hashAlg: options.hashAlg, cidVersion: options.cidVersion, mode: options.mode, - mtime: options.mtime + mtime: options.mtime, + flush: options.flush }) trail[trail.length - 1].cid = result.cid diff --git a/src/core/utils/create-node.js b/src/core/utils/create-node.js index ac01979..043784d 100644 --- a/src/core/utils/create-node.js +++ b/src/core/utils/create-node.js @@ -23,7 +23,8 @@ const createNode = async (context, type, options) => { const node = new DAGNode(metadata.marshal()) const cid = await context.ipld.put(node, format, { cidVersion: options.cidVersion, - hashAlg + hashAlg, + onlyHash: !options.flush }) return { From 0b409ae9df77f0f1b6331a9b06d95b7d0e8243f3 Mon Sep 17 00:00:00 2001 From: achingbrain Date: Thu, 5 Dec 2019 10:53:57 +0000 Subject: [PATCH 09/15] test: add tests for http interface --- package.json | 14 +- src/http/chmod.js | 33 ++- src/http/cp.js | 7 +- src/http/flush.js | 8 +- src/http/ls.js | 4 +- src/http/mkdir.js | 11 +- src/http/mv.js | 15 +- src/http/read.js | 10 +- src/http/touch.js | 19 +- src/http/utils/joi.js | 22 ++ src/http/write.js | 11 +- test/browser.js | 3 + test/cli/chmod.js | 164 ++++++++++++++ test/cli/chmod.spec.js | 178 --------------- test/cli/{cp.spec.js => cp.js} | 132 ++++------- test/cli/{flush.spec.js => flush.js} | 5 +- test/cli/index.js | 16 ++ test/cli/{ls.spec.js => ls.js} | 7 +- test/cli/mkdir.js | 209 +++++++++++++++++ test/cli/mkdir.spec.js | 270 ---------------------- test/cli/{mv.spec.js => mv.js} | 247 +++++++------------- test/cli/{read.spec.js => read.js} | 57 +++-- test/cli/{rm.spec.js => rm.js} | 41 ++-- test/cli/{stat.spec.js => stat.js} | 76 +++---- test/cli/touch.js | 143 ++++++++++++ test/cli/touch.spec.js | 170 -------------- test/cli/{write.spec.js => write.js} | 9 +- test/core/{chmod.spec.js => chmod.js} | 0 test/core/{cp.spec.js => cp.js} | 0 test/core/{flush.spec.js => flush.js} | 0 test/core/index.js | 16 ++ test/core/{ls.spec.js => ls.js} | 0 test/core/{mkdir.spec.js => mkdir.js} | 0 test/core/{mv.spec.js => mv.js} | 0 test/core/read.js | 149 ++++++++++++ test/core/read.spec.js | 151 ------------ test/core/{rm.spec.js => rm.js} | 0 test/core/{stat.spec.js => stat.js} | 0 test/core/{touch.spec.js => touch.js} | 0 test/core/{write.spec.js => write.js} | 0 test/helpers/http.js | 15 ++ test/http/chmod.js | 130 +++++++++++ test/http/cp.js | 114 ++++++++++ test/http/flush.js | 45 ++++ test/http/index.js | 16 ++ test/http/ls.js | 155 +++++++++++++ test/http/mkdir.js | 172 ++++++++++++++ test/http/mv.js | 164 ++++++++++++++ test/http/read.js | 107 +++++++++ test/http/rm.js | 59 +++++ test/http/stat.js | 120 ++++++++++ test/http/touch.js | 114 ++++++++++ test/http/write.js | 315 ++++++++++++++++++++++++++ test/node.js | 5 + test/webworker.js | 3 + 55 files changed, 2589 insertions(+), 1142 deletions(-) create mode 100644 src/http/utils/joi.js create mode 100644 test/browser.js create mode 100644 test/cli/chmod.js delete mode 100644 test/cli/chmod.spec.js rename test/cli/{cp.spec.js => cp.js} (55%) rename test/cli/{flush.spec.js => flush.js} (93%) create mode 100644 test/cli/index.js rename test/cli/{ls.spec.js => ls.js} (98%) create mode 100644 test/cli/mkdir.js delete mode 100644 test/cli/mkdir.spec.js rename test/cli/{mv.spec.js => mv.js} (53%) rename test/cli/{read.spec.js => read.js} (76%) rename test/cli/{rm.spec.js => rm.js} (69%) rename test/cli/{stat.spec.js => stat.js} (83%) create mode 100644 test/cli/touch.js delete mode 100644 test/cli/touch.spec.js rename test/cli/{write.spec.js => write.js} (98%) rename test/core/{chmod.spec.js => chmod.js} (100%) rename test/core/{cp.spec.js => cp.js} (100%) rename test/core/{flush.spec.js => flush.js} (100%) create mode 100644 test/core/index.js rename test/core/{ls.spec.js => ls.js} (100%) rename test/core/{mkdir.spec.js => mkdir.js} (100%) rename test/core/{mv.spec.js => mv.js} (100%) create mode 100644 test/core/read.js delete mode 100644 test/core/read.spec.js rename test/core/{rm.spec.js => rm.js} (100%) rename test/core/{stat.spec.js => stat.js} (100%) rename test/core/{touch.spec.js => touch.js} (100%) rename test/core/{write.spec.js => write.js} (100%) create mode 100644 test/helpers/http.js create mode 100644 test/http/chmod.js create mode 100644 test/http/cp.js create mode 100644 test/http/flush.js create mode 100644 test/http/index.js create mode 100644 test/http/ls.js create mode 100644 test/http/mkdir.js create mode 100644 test/http/mv.js create mode 100644 test/http/read.js create mode 100644 test/http/rm.js create mode 100644 test/http/stat.js create mode 100644 test/http/touch.js create mode 100644 test/http/write.js create mode 100644 test/node.js create mode 100644 test/webworker.js diff --git a/package.json b/package.json index 78a44b7..096ad4d 100644 --- a/package.json +++ b/package.json @@ -5,8 +5,10 @@ "leadMaintainer": "Alex Potsides ", "main": "src/index.js", "browser": { + "@hapi/hapi": false, + "@hapi/joi": "joi-browser", "fs": false, - "@hapi/joi": "joi-browser" + "yargs": false }, "scripts": { "test": "aegir test", @@ -14,14 +16,14 @@ "test:cli": "aegir test -t node -f test/cli/**/*.js", "test:core": "aegir test -t node -f test/core/**/*.js", "test:http": "aegir test -t node -f test/http/**/*.js", - "test:browser": "aegir test -t browser -f test/core/**/*.js", - "test:webworker": "aegir test -t webworker -f test/core/**/*.js", + "test:browser": "aegir test -t browser", + "test:webworker": "aegir test -t webworker", "build": "aegir build", "lint": "aegir lint", "release": "aegir release", "release-minor": "aegir release --type minor", "release-major": "aegir release --type major", - "coverage": "aegir coverage", + "coverage": "nyc --reporter=text --reporter=lcov npm run test:node", "dep-check": "aegir dep-check" }, "repository": { @@ -41,6 +43,7 @@ }, "homepage": "https://github.com/ipfs/js-ipfs-mfs#readme", "devDependencies": { + "@hapi/hapi": "^18.4.0", "aegir": "^20.0.0", "chai": "^4.2.0", "chai-as-promised": "^7.1.1", @@ -48,12 +51,15 @@ "detect-node": "^2.0.4", "detect-webworker": "^1.0.0", "dirty-chai": "^2.0.1", + "form-data": "^3.0.0", "ipfs-block-service": "~0.16.0", "ipfs-repo": "^0.30.1", "ipld": "~0.25.0", "it-all": "^1.0.1", "memdown": "^5.1.0", + "nyc": "^14.1.1", "sinon": "^7.5.0", + "stream-to-promise": "^2.2.0", "temp-write": "^4.0.0", "yargs": "^15.0.2", "yargs-promise": "^1.1.0" diff --git a/src/http/chmod.js b/src/http/chmod.js index 3f1f58c..3db9f35 100644 --- a/src/http/chmod.js +++ b/src/http/chmod.js @@ -1,19 +1,6 @@ 'use strict' -const originalJoi = require('@hapi/joi') -const Joi = originalJoi.extend({ - name: 'octalNumber', - base: originalJoi.number().min(0), - coerce: (value, state, options) => { - const val = parseInt(value, 8) - - if (isNaN(val) || val < 0) { - throw new Error('Invalid octal number') - } - - return val - } -}) +const Joi = require('./utils/joi') const mfsChmod = { method: 'POST', @@ -24,12 +11,20 @@ const mfsChmod = { } = request.server.app const { arg, + mode, + recursive, + codec, + hashAlg, flush, - mode + shardSplitThreshold } = request.query await ipfs.files.chmod(arg, mode, { - flush + recursive, + format: codec, + hashAlg, + flush, + shardSplitThreshold }) return h.response() @@ -43,7 +38,11 @@ const mfsChmod = { query: Joi.object().keys({ arg: Joi.string(), mode: Joi.octalNumber(), - flush: Joi.boolean().default(true) + recursive: Joi.boolean().default(false), + flush: Joi.boolean().default(true), + codec: Joi.string().default('dag-pb'), + hashAlg: Joi.string().default('sha2-256'), + shardSplitThreshold: Joi.number().integer().min(0).default(1000) }) } } diff --git a/src/http/cp.js b/src/http/cp.js index f047af9..751fda0 100644 --- a/src/http/cp.js +++ b/src/http/cp.js @@ -12,6 +12,7 @@ const mfsCp = { const { arg, parents, + flush, format, hashAlg, shardSplitThreshold @@ -19,6 +20,7 @@ const mfsCp = { const args = arg.concat({ parents, + flush, format, hashAlg, shardSplitThreshold @@ -37,12 +39,15 @@ const mfsCp = { query: Joi.object().keys({ arg: Joi.array().items(Joi.string()).min(2), parents: Joi.boolean().default(false), + flush: Joi.boolean().default(true), format: Joi.string().valid([ 'dag-pb', 'dag-cbor' ]).default('dag-pb'), - hashAlg: Joi.string().default('sha2-256') + hashAlg: Joi.string().default('sha2-256'), + shardSplitThreshold: Joi.number().integer().min(0).default(1000) }) + .rename('codec', 'format') } } } diff --git a/src/http/flush.js b/src/http/flush.js index f362ace..47db6fe 100644 --- a/src/http/flush.js +++ b/src/http/flush.js @@ -2,6 +2,10 @@ const Joi = require('@hapi/joi') +const { + FILE_SEPARATOR +} = require('../core/utils/constants') + const mfsFlush = { method: 'POST', path: '/api/v0/files/flush', @@ -13,7 +17,7 @@ const mfsFlush = { arg } = request.query - await ipfs.files.flush.call(null, arg) + await ipfs.files.flush(arg || FILE_SEPARATOR, {}) return h.response() }, @@ -24,7 +28,7 @@ const mfsFlush = { stripUnknown: true }, query: Joi.object().keys({ - arg: Joi.string().required() + arg: Joi.string() }) } } diff --git a/src/http/ls.js b/src/http/ls.js index 375a8d3..ff67ff2 100644 --- a/src/http/ls.js +++ b/src/http/ls.js @@ -10,7 +10,9 @@ const mapEntry = (entry) => { Name: entry.name, Type: entry.type, Size: entry.size, - Hash: entry.hash + Hash: entry.hash, + Mode: entry.mode, + Mtime: entry.mtime } } diff --git a/src/http/mkdir.js b/src/http/mkdir.js index 6796e1a..16f388b 100644 --- a/src/http/mkdir.js +++ b/src/http/mkdir.js @@ -1,6 +1,6 @@ 'use strict' -const Joi = require('@hapi/joi') +const Joi = require('./utils/joi') const mfsMkdir = { method: 'POST', @@ -11,6 +11,8 @@ const mfsMkdir = { } = request.server.app const { arg, + mode, + mtime, parents, format, hashAlg, @@ -20,6 +22,8 @@ const mfsMkdir = { } = request.query await ipfs.files.mkdir(arg, { + mode, + mtime, parents, format, hashAlg, @@ -38,6 +42,8 @@ const mfsMkdir = { }, query: Joi.object().keys({ arg: Joi.string().required(), + mode: Joi.octalNumber(), + mtime: Joi.number().integer(), parents: Joi.boolean().default(false), format: Joi.string().valid([ 'dag-pb', @@ -48,7 +54,8 @@ const mfsMkdir = { 0, 1 ]).default(0), - flush: Joi.boolean().default(true) + flush: Joi.boolean().default(true), + shardSplitThreshold: Joi.number().integer().min(0).default(1000) }) .rename('p', 'parents', { override: true, diff --git a/src/http/mv.js b/src/http/mv.js index aeee443..2ba60df 100644 --- a/src/http/mv.js +++ b/src/http/mv.js @@ -11,14 +11,20 @@ const mfsMv = { } = request.server.app const { arg, + recursive, parents, format, hashAlg, + cidVersion, + flush, shardSplitThreshold } = request.query const args = arg.concat({ + recursive, parents, + cidVersion, + flush, format, hashAlg, shardSplitThreshold @@ -36,12 +42,19 @@ const mfsMv = { }, query: Joi.object().keys({ arg: Joi.array().items(Joi.string()).min(2), + recursive: Joi.boolean().default(false), parents: Joi.boolean().default(false), format: Joi.string().valid([ 'dag-pb', 'dag-cbor' ]).default('dag-pb'), - hashAlg: Joi.string().default('sha2-256') + hashAlg: Joi.string().default('sha2-256'), + cidVersion: Joi.number().integer().valid([ + 0, + 1 + ]).default(0), + flush: Joi.boolean().default(true), + shardSplitThreshold: Joi.number().integer().min(0).default(1000) }) } } diff --git a/src/http/read.js b/src/http/read.js index 6382a93..6be1aee 100644 --- a/src/http/read.js +++ b/src/http/read.js @@ -15,15 +15,13 @@ const mfsRead = { const { arg, offset, - length, - count + length } = request.query const responseStream = await new Promise((resolve, reject) => { const stream = ipfs.files.readReadableStream(arg, { offset, - length, - count + length }) stream.once('data', (chunk) => { @@ -61,6 +59,10 @@ const mfsRead = { override: true, ignoreUndefined: true }) + .rename('count', 'length', { + override: true, + ignoreUndefined: true + }) } } } diff --git a/src/http/touch.js b/src/http/touch.js index db26610..10f6488 100644 --- a/src/http/touch.js +++ b/src/http/touch.js @@ -14,13 +14,17 @@ const mfsTouch = { flush, shardSplitThreshold, cidVersion, + format, + hashAlg, mtime } = request.query await ipfs.files.touch(arg, mtime, { flush, shardSplitThreshold, - cidVersion + cidVersion, + format, + hashAlg }) return h.response() @@ -32,14 +36,19 @@ const mfsTouch = { stripUnknown: true }, query: Joi.object().keys({ - arg: Joi.array().items(Joi.string()).min(2), + arg: Joi.string().required(), mtime: Joi.number().integer().min(0), - flush: Joi.boolean().default(true), - shardSplitThreshold: Joi.number().integer().min(0).default(1000), + format: Joi.string().valid([ + 'dag-pb', + 'dag-cbor' + ]).default('dag-pb'), + hashAlg: Joi.string().default('sha2-256'), cidVersion: Joi.number().integer().valid([ 0, 1 - ]).default(0) + ]).default(0), + flush: Joi.boolean().default(true), + shardSplitThreshold: Joi.number().integer().min(0).default(1000) }) } } diff --git a/src/http/utils/joi.js b/src/http/utils/joi.js new file mode 100644 index 0000000..4121dc0 --- /dev/null +++ b/src/http/utils/joi.js @@ -0,0 +1,22 @@ +'use strict' + +const originalJoi = require('@hapi/joi') +const Joi = originalJoi.extend({ + name: 'octalNumber', + base: originalJoi.number().min(0), + coerce: (value, state, options) => { + if (value === undefined) { + return + } + + const val = parseInt(value, 8) + + if (isNaN(val) || val < 0) { + throw new Error('Invalid octal number') + } + + return val + } +}) + +module.exports = Joi diff --git a/src/http/write.js b/src/http/write.js index 5962017..64d3a7e 100644 --- a/src/http/write.js +++ b/src/http/write.js @@ -18,6 +18,7 @@ const mfsWrite = { create, truncate, rawLeaves, + reduceSingleLeafToSelf, cidVersion, hashAlg, format, @@ -44,6 +45,7 @@ const mfsWrite = { create, truncate, rawLeaves, + reduceSingleLeafToSelf, cidVersion, hashAlg, format, @@ -51,7 +53,9 @@ const mfsWrite = { progress, strategy, flush, - shardSplitThreshold + shardSplitThreshold, + mode: entry.mode, + mtime: entry.mtime }) } } @@ -79,9 +83,7 @@ const mfsWrite = { 0, 1 ]).default(0), - hashAlg: Joi.string().valid([ - 'sha2-256' - ]).default('sha2-256'), + hashAlg: Joi.string().default('sha2-256'), format: Joi.string().valid([ 'dag-pb', 'dag-cbor' @@ -94,6 +96,7 @@ const mfsWrite = { 'trickle' ]).default('trickle'), flush: Joi.boolean().default(true), + reduceSingleLeafToSelf: Joi.boolean().default(false), shardSplitThreshold: Joi.number().integer().min(0).default(1000) }) .rename('o', 'offset', { diff --git a/test/browser.js b/test/browser.js new file mode 100644 index 0000000..ed5d991 --- /dev/null +++ b/test/browser.js @@ -0,0 +1,3 @@ +'use strict' + +require('./core') diff --git a/test/cli/chmod.js b/test/cli/chmod.js new file mode 100644 index 0000000..c7bd8d5 --- /dev/null +++ b/test/cli/chmod.js @@ -0,0 +1,164 @@ +/* eslint-env mocha */ +'use strict' + +const expect = require('../helpers/chai') +const cli = require('../helpers/cli') +const sinon = require('sinon') + +function defaultOptions (modification = {}) { + const options = { + recursive: false, + format: 'dag-pb', + hashAlg: 'sha2-256', + flush: true, + shardSplitThreshold: 1000 + } + + Object.keys(modification).forEach(key => { + options[key] = modification[key] + }) + + return options +} + +describe('chmod', () => { + const path = '/foo' + const mode = '0777' + let ipfs + + beforeEach(() => { + ipfs = { + files: { + chmod: sinon.stub() + } + } + }) + + it('should update the mode for a file', async () => { + await cli(`files chmod ${mode} ${path}`, { ipfs }) + + expect(ipfs.files.chmod.callCount).to.equal(1) + expect(ipfs.files.chmod.getCall(0).args).to.deep.equal([ + path, + parseInt(mode, 8), + defaultOptions() + ]) + }) + + it('should update the mode recursively', async () => { + await cli(`files chmod ${mode} --recursive ${path}`, { ipfs }) + + expect(ipfs.files.chmod.callCount).to.equal(1) + expect(ipfs.files.chmod.getCall(0).args).to.deep.equal([ + path, + parseInt(mode, 8), + defaultOptions({ + recursive: true + }) + ]) + }) + + it('should update the mode recursively (short option)', async () => { + await cli(`files chmod ${mode} -r ${path}`, { ipfs }) + + expect(ipfs.files.chmod.callCount).to.equal(1) + expect(ipfs.files.chmod.getCall(0).args).to.deep.equal([ + path, + parseInt(mode, 8), + defaultOptions({ + recursive: true + }) + ]) + }) + + it('should update the mode without flushing', async () => { + await cli(`files chmod ${mode} --flush false ${path}`, { ipfs }) + + expect(ipfs.files.chmod.callCount).to.equal(1) + expect(ipfs.files.chmod.getCall(0).args).to.deep.equal([ + path, + parseInt(mode, 8), + defaultOptions({ + flush: false + }) + ]) + }) + + it('should update the mode without flushing (short option)', async () => { + await cli(`files chmod ${mode} -f false ${path}`, { ipfs }) + + expect(ipfs.files.chmod.callCount).to.equal(1) + expect(ipfs.files.chmod.getCall(0).args).to.deep.equal([ + path, + parseInt(mode, 8), + defaultOptions({ + flush: false + }) + ]) + }) + + it('should update the mode with a different codec', async () => { + await cli(`files chmod ${mode} --codec dag-foo ${path}`, { ipfs }) + + expect(ipfs.files.chmod.callCount).to.equal(1) + expect(ipfs.files.chmod.getCall(0).args).to.deep.equal([ + path, + parseInt(mode, 8), + defaultOptions({ + format: 'dag-foo' + }) + ]) + }) + + it('should update the mode with a different codec (short option)', async () => { + await cli(`files chmod ${mode} -c dag-foo ${path}`, { ipfs }) + + expect(ipfs.files.chmod.callCount).to.equal(1) + expect(ipfs.files.chmod.getCall(0).args).to.deep.equal([ + path, + parseInt(mode, 8), + defaultOptions({ + format: 'dag-foo' + }) + ]) + }) + + it('should update the mode a with different hash algorithm', async () => { + await cli(`files chmod ${mode} --hash-alg sha3-256 ${path}`, { ipfs }) + + expect(ipfs.files.chmod.callCount).to.equal(1) + expect(ipfs.files.chmod.getCall(0).args).to.deep.equal([ + path, + parseInt(mode, 8), + defaultOptions({ + hashAlg: 'sha3-256' + }) + ]) + }) + + it('should update the mode a with different hash algorithm (short option)', async () => { + await cli(`files chmod ${mode} -h sha3-256 ${path}`, { ipfs }) + + expect(ipfs.files.chmod.callCount).to.equal(1) + expect(ipfs.files.chmod.getCall(0).args).to.deep.equal([ + path, + parseInt(mode, 8), + defaultOptions({ + hashAlg: 'sha3-256' + }) + ]) + }) + + it('should update the mode with a shard split threshold', async () => { + await cli('files chmod 0777 --shard-split-threshold 10 /foo', { ipfs }) + + expect(ipfs.files.chmod.callCount).to.equal(1) + expect(ipfs.files.chmod.getCall(0).args).to.deep.equal([ + path, + parseInt(mode, 8), + defaultOptions({ + shardSplitThreshold: 10 + }) + ]) + }) +}) diff --git a/test/cli/chmod.spec.js b/test/cli/chmod.spec.js deleted file mode 100644 index ff0c43b..0000000 --- a/test/cli/chmod.spec.js +++ /dev/null @@ -1,178 +0,0 @@ -/* eslint-env mocha */ -'use strict' - -const expect = require('../helpers/chai') -const cli = require('../helpers/cli') -const sinon = require('sinon') - -describe('cli chmod', () => { - let ipfs - - beforeEach(() => { - ipfs = { - files: { - chmod: sinon.stub() - } - } - }) - - it('should update the mode for a file', async () => { - await cli('files chmod 0777 /foo', { ipfs }) - - expect(ipfs.files.chmod.callCount).to.equal(1) - expect(ipfs.files.chmod.getCall(0).args).to.deep.equal([ - '/foo', - parseInt('0777', 8), { - recursive: false, - format: 'dag-pb', - hashAlg: 'sha2-256', - flush: true, - shardSplitThreshold: 1000 - } - ]) - }) - - it('should update the mode recursively', async () => { - await cli('files chmod 0777 --recursive /foo', { ipfs }) - - expect(ipfs.files.chmod.callCount).to.equal(1) - expect(ipfs.files.chmod.getCall(0).args).to.deep.equal([ - '/foo', - parseInt('0777', 8), { - recursive: true, - format: 'dag-pb', - hashAlg: 'sha2-256', - flush: true, - shardSplitThreshold: 1000 - } - ]) - }) - - it('should update the mode recursively (short option)', async () => { - await cli('files chmod 0777 -r /foo', { ipfs }) - - expect(ipfs.files.chmod.callCount).to.equal(1) - expect(ipfs.files.chmod.getCall(0).args).to.deep.equal([ - '/foo', - parseInt('0777', 8), { - recursive: true, - format: 'dag-pb', - hashAlg: 'sha2-256', - flush: true, - shardSplitThreshold: 1000 - } - ]) - }) - - it('should update the mode without flushing', async () => { - await cli('files chmod 0777 --flush false /foo', { ipfs }) - - expect(ipfs.files.chmod.callCount).to.equal(1) - expect(ipfs.files.chmod.getCall(0).args).to.deep.equal([ - '/foo', - parseInt('0777', 8), { - recursive: false, - format: 'dag-pb', - hashAlg: 'sha2-256', - flush: false, - shardSplitThreshold: 1000 - } - ]) - }) - - it('should update the mode without flushing (short option)', async () => { - await cli('files chmod 0777 -f false /foo', { ipfs }) - - expect(ipfs.files.chmod.callCount).to.equal(1) - expect(ipfs.files.chmod.getCall(0).args).to.deep.equal([ - '/foo', - parseInt('0777', 8), { - recursive: false, - format: 'dag-pb', - hashAlg: 'sha2-256', - flush: false, - shardSplitThreshold: 1000 - } - ]) - }) - - it('should update the mode a different codec', async () => { - await cli('files chmod 0777 --codec dag-foo /foo', { ipfs }) - - expect(ipfs.files.chmod.callCount).to.equal(1) - expect(ipfs.files.chmod.getCall(0).args).to.deep.equal([ - '/foo', - parseInt('0777', 8), { - recursive: false, - format: 'dag-foo', - hashAlg: 'sha2-256', - flush: true, - shardSplitThreshold: 1000 - } - ]) - }) - - it('should update the mode a different codec (short option)', async () => { - await cli('files chmod 0777 -c dag-foo /foo', { ipfs }) - - expect(ipfs.files.chmod.callCount).to.equal(1) - expect(ipfs.files.chmod.getCall(0).args).to.deep.equal([ - '/foo', - parseInt('0777', 8), { - recursive: false, - format: 'dag-foo', - hashAlg: 'sha2-256', - flush: true, - shardSplitThreshold: 1000 - } - ]) - }) - - it('should update the mode a different hash algorithm', async () => { - await cli('files chmod 0777 --hash-alg sha3-256 /foo', { ipfs }) - - expect(ipfs.files.chmod.callCount).to.equal(1) - expect(ipfs.files.chmod.getCall(0).args).to.deep.equal([ - '/foo', - parseInt('0777', 8), { - recursive: false, - format: 'dag-pb', - hashAlg: 'sha3-256', - flush: true, - shardSplitThreshold: 1000 - } - ]) - }) - - it('should update the mode a different hash algorithm (short option)', async () => { - await cli('files chmod 0777 -h sha3-256 /foo', { ipfs }) - - expect(ipfs.files.chmod.callCount).to.equal(1) - expect(ipfs.files.chmod.getCall(0).args).to.deep.equal([ - '/foo', - parseInt('0777', 8), { - recursive: false, - format: 'dag-pb', - hashAlg: 'sha3-256', - flush: true, - shardSplitThreshold: 1000 - } - ]) - }) - - it('should update the mode with a shard split threshold', async () => { - await cli('files chmod 0777 --shard-split-threshold 10 /foo', { ipfs }) - - expect(ipfs.files.chmod.callCount).to.equal(1) - expect(ipfs.files.chmod.getCall(0).args).to.deep.equal([ - '/foo', - parseInt('0777', 8), { - recursive: false, - format: 'dag-pb', - hashAlg: 'sha2-256', - flush: true, - shardSplitThreshold: 10 - } - ]) - }) -}) diff --git a/test/cli/cp.spec.js b/test/cli/cp.js similarity index 55% rename from test/cli/cp.spec.js rename to test/cli/cp.js index 31c4ef7..953fc16 100644 --- a/test/cli/cp.spec.js +++ b/test/cli/cp.js @@ -5,7 +5,25 @@ const expect = require('../helpers/chai') const cli = require('../helpers/cli') const sinon = require('sinon') -describe('cli cp', () => { +function defaultOptions (modification = {}) { + const options = { + parents: false, + format: 'dag-pb', + hashAlg: 'sha2-256', + flush: true, + shardSplitThreshold: 1000 + } + + Object.keys(modification).forEach(key => { + options[key] = modification[key] + }) + + return options +} + +describe('cp', () => { + const source = 'source' + const dest = 'dest' let ipfs beforeEach(() => { @@ -17,154 +35,104 @@ describe('cli cp', () => { }) it('should copy files', async () => { - const source = 'source' - const dest = 'source' - await cli(`files cp ${source} ${dest}`, { ipfs }) expect(ipfs.files.cp.callCount).to.equal(1) expect(ipfs.files.cp.getCall(0).args).to.deep.equal([ source, - dest, { - parents: false, - format: 'dag-pb', - hashAlg: 'sha2-256', - flush: true, - shardSplitThreshold: 1000 - } + dest, + defaultOptions() ]) }) - it('should copy files and create intermediate directrories', async () => { - const source = 'source' - const dest = 'source' - + it('should copy files and create intermediate directories', async () => { await cli(`files cp --parents ${source} ${dest}`, { ipfs }) expect(ipfs.files.cp.callCount).to.equal(1) expect(ipfs.files.cp.getCall(0).args).to.deep.equal([ source, - dest, { - parents: true, - format: 'dag-pb', - hashAlg: 'sha2-256', - flush: true, - shardSplitThreshold: 1000 - } + dest, + defaultOptions({ + parents: true + }) ]) }) - it('should copy files and create intermediate directrories (short option)', async () => { - const source = 'source' - const dest = 'source' - + it('should copy files and create intermediate directories (short option)', async () => { await cli(`files cp --parents ${source} ${dest}`, { ipfs }) expect(ipfs.files.cp.callCount).to.equal(1) expect(ipfs.files.cp.getCall(0).args).to.deep.equal([ source, - dest, { - parents: true, - format: 'dag-pb', - hashAlg: 'sha2-256', - flush: true, - shardSplitThreshold: 1000 - } + dest, + defaultOptions({ + parents: true + }) ]) }) it('should copy files with a different codec', async () => { - const source = 'source' - const dest = 'source' - await cli(`files cp --codec dag-foo ${source} ${dest}`, { ipfs }) expect(ipfs.files.cp.callCount).to.equal(1) expect(ipfs.files.cp.getCall(0).args).to.deep.equal([ source, - dest, { - parents: false, - format: 'dag-foo', - hashAlg: 'sha2-256', - flush: true, - shardSplitThreshold: 1000 - } + dest, + defaultOptions({ + format: 'dag-foo' + }) ]) }) it('should copy files with a different codec (short option)', async () => { - const source = 'source' - const dest = 'source' - await cli(`files cp -c dag-foo ${source} ${dest}`, { ipfs }) expect(ipfs.files.cp.callCount).to.equal(1) expect(ipfs.files.cp.getCall(0).args).to.deep.equal([ source, - dest, { - parents: false, - format: 'dag-foo', - hashAlg: 'sha2-256', - flush: true, - shardSplitThreshold: 1000 - } + dest, + defaultOptions({ + format: 'dag-foo' + }) ]) }) it('should copy files with a different hash algorithm', async () => { - const source = 'source' - const dest = 'source' - await cli(`files cp --hash-alg sha3-256 ${source} ${dest}`, { ipfs }) expect(ipfs.files.cp.callCount).to.equal(1) expect(ipfs.files.cp.getCall(0).args).to.deep.equal([ source, - dest, { - parents: false, - format: 'dag-pb', - hashAlg: 'sha3-256', - flush: true, - shardSplitThreshold: 1000 - } + dest, + defaultOptions({ + hashAlg: 'sha3-256' + }) ]) }) it('should copy files with a different hash algorithm (short option)', async () => { - const source = 'source' - const dest = 'source' - await cli(`files cp -h sha3-256 ${source} ${dest}`, { ipfs }) expect(ipfs.files.cp.callCount).to.equal(1) expect(ipfs.files.cp.getCall(0).args).to.deep.equal([ source, - dest, { - parents: false, - format: 'dag-pb', - hashAlg: 'sha3-256', - flush: true, - shardSplitThreshold: 1000 - } + dest, + defaultOptions({ + hashAlg: 'sha3-256' + }) ]) }) it('should copy files with a different shard split threshold', async () => { - const source = 'source' - const dest = 'source' - await cli(`files cp --shard-split-threshold 10 ${source} ${dest}`, { ipfs }) expect(ipfs.files.cp.callCount).to.equal(1) expect(ipfs.files.cp.getCall(0).args).to.deep.equal([ source, - dest, { - parents: false, - format: 'dag-pb', - hashAlg: 'sha2-256', - flush: true, + dest, + defaultOptions({ shardSplitThreshold: 10 - } + }) ]) }) }) diff --git a/test/cli/flush.spec.js b/test/cli/flush.js similarity index 93% rename from test/cli/flush.spec.js rename to test/cli/flush.js index a5baa14..f40a0d7 100644 --- a/test/cli/flush.spec.js +++ b/test/cli/flush.js @@ -5,7 +5,8 @@ const expect = require('../helpers/chai') const cli = require('../helpers/cli') const sinon = require('sinon') -describe('cli flush', () => { +describe('flush', () => { + const path = '/foo' let ipfs beforeEach(() => { @@ -17,8 +18,6 @@ describe('cli flush', () => { }) it('should flush a path', async () => { - const path = '/foo' - await cli(`files flush ${path}`, { ipfs }) expect(ipfs.files.flush.callCount).to.equal(1) diff --git a/test/cli/index.js b/test/cli/index.js new file mode 100644 index 0000000..0f1fff2 --- /dev/null +++ b/test/cli/index.js @@ -0,0 +1,16 @@ +/* eslint-env mocha */ +'use strict' + +describe('cli', () => { + require('./chmod') + require('./cp') + require('./flush') + require('./ls') + require('./mkdir') + require('./mv') + require('./read') + require('./rm') + require('./stat') + require('./touch') + require('./write') +}) diff --git a/test/cli/ls.spec.js b/test/cli/ls.js similarity index 98% rename from test/cli/ls.spec.js rename to test/cli/ls.js index 5ae966c..478f7fe 100644 --- a/test/cli/ls.spec.js +++ b/test/cli/ls.js @@ -5,8 +5,13 @@ const expect = require('../helpers/chai') const cli = require('../helpers/cli') const sinon = require('sinon') const values = require('pull-stream/sources/values') +const isNode = require('detect-node') + +describe('ls', () => { + if (!isNode) { + return + } -describe('cli ls', () => { let ipfs let print let output diff --git a/test/cli/mkdir.js b/test/cli/mkdir.js new file mode 100644 index 0000000..633c668 --- /dev/null +++ b/test/cli/mkdir.js @@ -0,0 +1,209 @@ +/* eslint-env mocha */ +'use strict' + +const expect = require('../helpers/chai') +const cli = require('../helpers/cli') +const sinon = require('sinon') +const isNode = require('detect-node') + +function defaultOptions (modification = {}) { + const options = { + parents: false, + cidVersion: 0, + format: 'dag-pb', + hashAlg: 'sha2-256', + flush: true, + shardSplitThreshold: 1000, + mode: undefined, + mtime: undefined + } + + Object.keys(modification).forEach(key => { + options[key] = modification[key] + }) + + return options +} + +describe('mkdir', () => { + if (!isNode) { + return + } + + const path = '/foo' + let ipfs + + beforeEach(() => { + ipfs = { + files: { + mkdir: sinon.stub() + } + } + }) + + it('should make a directory', async () => { + await cli(`files mkdir ${path}`, { ipfs }) + + expect(ipfs.files.mkdir.callCount).to.equal(1) + expect(ipfs.files.mkdir.getCall(0).args).to.deep.equal([ + path, + defaultOptions() + ]) + }) + + it('should make a directory with parents', async () => { + await cli(`files mkdir --parents ${path}`, { ipfs }) + + expect(ipfs.files.mkdir.callCount).to.equal(1) + expect(ipfs.files.mkdir.getCall(0).args).to.deep.equal([ + path, + defaultOptions({ + parents: true + }) + ]) + }) + + it('should make a directory with parents (short option)', async () => { + await cli(`files mkdir -p ${path}`, { ipfs }) + + expect(ipfs.files.mkdir.callCount).to.equal(1) + expect(ipfs.files.mkdir.getCall(0).args).to.deep.equal([ + path, + defaultOptions({ + parents: true + }) + ]) + }) + + it('should make a directory with a different cid version', async () => { + await cli(`files mkdir --cid-version 5 ${path}`, { ipfs }) + + expect(ipfs.files.mkdir.callCount).to.equal(1) + expect(ipfs.files.mkdir.getCall(0).args).to.deep.equal([ + path, + defaultOptions({ + cidVersion: 5 + }) + ]) + }) + + it('should make a directory with a different cid version (shortish option)', async () => { + await cli(`files mkdir --cid-ver 5 ${path}`, { ipfs }) + + expect(ipfs.files.mkdir.callCount).to.equal(1) + expect(ipfs.files.mkdir.getCall(0).args).to.deep.equal([ + path, + defaultOptions({ + cidVersion: 5 + }) + ]) + }) + + it('should make a directory with a different codec', async () => { + await cli(`files mkdir --codec dag-foo ${path}`, { ipfs }) + + expect(ipfs.files.mkdir.callCount).to.equal(1) + expect(ipfs.files.mkdir.getCall(0).args).to.deep.equal([ + path, + defaultOptions({ + format: 'dag-foo' + }) + ]) + }) + + it('should make a directory with a different codec (short option)', async () => { + await cli(`files mkdir -c dag-foo ${path}`, { ipfs }) + + expect(ipfs.files.mkdir.callCount).to.equal(1) + expect(ipfs.files.mkdir.getCall(0).args).to.deep.equal([ + path, + defaultOptions({ + format: 'dag-foo' + }) + ]) + }) + + it('should make a directory with a different hash algorithm', async () => { + await cli(`files mkdir --hash-alg sha3-256 ${path}`, { ipfs }) + + expect(ipfs.files.mkdir.callCount).to.equal(1) + expect(ipfs.files.mkdir.getCall(0).args).to.deep.equal([ + path, + defaultOptions({ + hashAlg: 'sha3-256' + }) + ]) + }) + + it('should make a directory with a different hash algorithm (short option)', async () => { + await cli(`files mkdir -h sha3-256 ${path}`, { ipfs }) + + expect(ipfs.files.mkdir.callCount).to.equal(1) + expect(ipfs.files.mkdir.getCall(0).args).to.deep.equal([ + path, + defaultOptions({ + hashAlg: 'sha3-256' + }) + ]) + }) + + it('should make a directory without flushing', async () => { + await cli(`files mkdir --flush false ${path}`, { ipfs }) + + expect(ipfs.files.mkdir.callCount).to.equal(1) + expect(ipfs.files.mkdir.getCall(0).args).to.deep.equal([ + path, + defaultOptions({ + flush: false + }) + ]) + }) + + it('should make a directory without flushing (short option)', async () => { + await cli(`files mkdir -f false ${path}`, { ipfs }) + + expect(ipfs.files.mkdir.callCount).to.equal(1) + expect(ipfs.files.mkdir.getCall(0).args).to.deep.equal([ + path, + defaultOptions({ + flush: false + }) + ]) + }) + + it('should make a directory a different shard split threshold', async () => { + await cli(`files mkdir --shard-split-threshold 10 ${path}`, { ipfs }) + + expect(ipfs.files.mkdir.callCount).to.equal(1) + expect(ipfs.files.mkdir.getCall(0).args).to.deep.equal([ + path, + defaultOptions({ + shardSplitThreshold: 10 + }) + ]) + }) + + it('should make a directory a different mode', async () => { + await cli(`files mkdir --mode 0111 ${path}`, { ipfs }) + + expect(ipfs.files.mkdir.callCount).to.equal(1) + expect(ipfs.files.mkdir.getCall(0).args).to.deep.equal([ + path, + defaultOptions({ + mode: parseInt('0111', 8) + }) + ]) + }) + + it('should make a directory a different mtime', async () => { + await cli(`files mkdir --mtime 5 ${path}`, { ipfs }) + + expect(ipfs.files.mkdir.callCount).to.equal(1) + expect(ipfs.files.mkdir.getCall(0).args).to.deep.equal([ + path, + defaultOptions({ + mtime: 5 + }) + ]) + }) +}) diff --git a/test/cli/mkdir.spec.js b/test/cli/mkdir.spec.js deleted file mode 100644 index b3eac9f..0000000 --- a/test/cli/mkdir.spec.js +++ /dev/null @@ -1,270 +0,0 @@ -/* eslint-env mocha */ -'use strict' - -const expect = require('../helpers/chai') -const cli = require('../helpers/cli') -const sinon = require('sinon') - -describe('cli mkdir', () => { - let ipfs - - beforeEach(() => { - ipfs = { - files: { - mkdir: sinon.stub() - } - } - }) - - it('should make a directory', async () => { - await cli('files mkdir /foo', { ipfs }) - - expect(ipfs.files.mkdir.callCount).to.equal(1) - expect(ipfs.files.mkdir.getCall(0).args).to.deep.equal([ - '/foo', { - parents: false, - cidVersion: 0, - format: 'dag-pb', - hashAlg: 'sha2-256', - flush: true, - shardSplitThreshold: 1000, - mode: undefined, - mtime: undefined - } - ]) - }) - - it('should make a directory with parents', async () => { - await cli('files mkdir --parents /foo', { ipfs }) - - expect(ipfs.files.mkdir.callCount).to.equal(1) - expect(ipfs.files.mkdir.getCall(0).args).to.deep.equal([ - '/foo', { - parents: true, - cidVersion: 0, - format: 'dag-pb', - hashAlg: 'sha2-256', - flush: true, - shardSplitThreshold: 1000, - mode: undefined, - mtime: undefined - } - ]) - }) - - it('should make a directory with parents (short option)', async () => { - await cli('files mkdir -p /foo', { ipfs }) - - expect(ipfs.files.mkdir.callCount).to.equal(1) - expect(ipfs.files.mkdir.getCall(0).args).to.deep.equal([ - '/foo', { - parents: true, - cidVersion: 0, - format: 'dag-pb', - hashAlg: 'sha2-256', - flush: true, - shardSplitThreshold: 1000, - mode: undefined, - mtime: undefined - } - ]) - }) - - it('should make a directory with a different cid version', async () => { - await cli('files mkdir --cid-version 5 /foo', { ipfs }) - - expect(ipfs.files.mkdir.callCount).to.equal(1) - expect(ipfs.files.mkdir.getCall(0).args).to.deep.equal([ - '/foo', { - parents: false, - cidVersion: 5, - format: 'dag-pb', - hashAlg: 'sha2-256', - flush: true, - shardSplitThreshold: 1000, - mode: undefined, - mtime: undefined - } - ]) - }) - - it('should make a directory with a different cid version (shortish option)', async () => { - await cli('files mkdir --cid-ver 5 /foo', { ipfs }) - - expect(ipfs.files.mkdir.callCount).to.equal(1) - expect(ipfs.files.mkdir.getCall(0).args).to.deep.equal([ - '/foo', { - parents: false, - cidVersion: 5, - format: 'dag-pb', - hashAlg: 'sha2-256', - flush: true, - shardSplitThreshold: 1000, - mode: undefined, - mtime: undefined - } - ]) - }) - - it('should make a directory with a different codec', async () => { - await cli('files mkdir --codec dag-foo /foo', { ipfs }) - - expect(ipfs.files.mkdir.callCount).to.equal(1) - expect(ipfs.files.mkdir.getCall(0).args).to.deep.equal([ - '/foo', { - parents: false, - cidVersion: 0, - format: 'dag-foo', - hashAlg: 'sha2-256', - flush: true, - shardSplitThreshold: 1000, - mode: undefined, - mtime: undefined - } - ]) - }) - - it('should make a directory with a different codec (short option)', async () => { - await cli('files mkdir -c dag-foo /foo', { ipfs }) - - expect(ipfs.files.mkdir.callCount).to.equal(1) - expect(ipfs.files.mkdir.getCall(0).args).to.deep.equal([ - '/foo', { - parents: false, - cidVersion: 0, - format: 'dag-foo', - hashAlg: 'sha2-256', - flush: true, - shardSplitThreshold: 1000, - mode: undefined, - mtime: undefined - } - ]) - }) - - it('should make a directory with a different hash algorithm', async () => { - await cli('files mkdir --hash-alg sha3-256 /foo', { ipfs }) - - expect(ipfs.files.mkdir.callCount).to.equal(1) - expect(ipfs.files.mkdir.getCall(0).args).to.deep.equal([ - '/foo', { - parents: false, - cidVersion: 0, - format: 'dag-pb', - hashAlg: 'sha3-256', - flush: true, - shardSplitThreshold: 1000, - mode: undefined, - mtime: undefined - } - ]) - }) - - it('should make a directory with a different hash algorithm (short option)', async () => { - await cli('files mkdir -h sha3-256 /foo', { ipfs }) - - expect(ipfs.files.mkdir.callCount).to.equal(1) - expect(ipfs.files.mkdir.getCall(0).args).to.deep.equal([ - '/foo', { - parents: false, - cidVersion: 0, - format: 'dag-pb', - hashAlg: 'sha3-256', - flush: true, - shardSplitThreshold: 1000, - mode: undefined, - mtime: undefined - } - ]) - }) - - it('should make a directory without flushing', async () => { - await cli('files mkdir --flush false /foo', { ipfs }) - - expect(ipfs.files.mkdir.callCount).to.equal(1) - expect(ipfs.files.mkdir.getCall(0).args).to.deep.equal([ - '/foo', { - parents: false, - cidVersion: 0, - format: 'dag-pb', - hashAlg: 'sha2-256', - flush: false, - shardSplitThreshold: 1000, - mode: undefined, - mtime: undefined - } - ]) - }) - - it('should make a directory without flushing (short option)', async () => { - await cli('files mkdir -f false /foo', { ipfs }) - - expect(ipfs.files.mkdir.callCount).to.equal(1) - expect(ipfs.files.mkdir.getCall(0).args).to.deep.equal([ - '/foo', { - parents: false, - cidVersion: 0, - format: 'dag-pb', - hashAlg: 'sha2-256', - flush: false, - shardSplitThreshold: 1000, - mode: undefined, - mtime: undefined - } - ]) - }) - - it('should make a directory a different shard split threshold', async () => { - await cli('files mkdir --shard-split-threshold 10 /foo', { ipfs }) - - expect(ipfs.files.mkdir.callCount).to.equal(1) - expect(ipfs.files.mkdir.getCall(0).args).to.deep.equal([ - '/foo', { - parents: false, - cidVersion: 0, - format: 'dag-pb', - hashAlg: 'sha2-256', - flush: true, - shardSplitThreshold: 10, - mode: undefined, - mtime: undefined - } - ]) - }) - - it('should make a directory a different mode', async () => { - await cli('files mkdir --mode 0111 /foo', { ipfs }) - - expect(ipfs.files.mkdir.callCount).to.equal(1) - expect(ipfs.files.mkdir.getCall(0).args).to.deep.equal([ - '/foo', { - parents: false, - cidVersion: 0, - format: 'dag-pb', - hashAlg: 'sha2-256', - flush: true, - shardSplitThreshold: 1000, - mode: parseInt('0111', 8), - mtime: undefined - } - ]) - }) - - it('should make a directory a different mtime', async () => { - await cli('files mkdir --mtime 5 /foo', { ipfs }) - - expect(ipfs.files.mkdir.callCount).to.equal(1) - expect(ipfs.files.mkdir.getCall(0).args).to.deep.equal([ - '/foo', { - parents: false, - cidVersion: 0, - format: 'dag-pb', - hashAlg: 'sha2-256', - flush: true, - shardSplitThreshold: 1000, - mode: undefined, - mtime: 5 - } - ]) - }) -}) diff --git a/test/cli/mv.spec.js b/test/cli/mv.js similarity index 53% rename from test/cli/mv.spec.js rename to test/cli/mv.js index bd010fd..ca47005 100644 --- a/test/cli/mv.spec.js +++ b/test/cli/mv.js @@ -4,8 +4,33 @@ const expect = require('../helpers/chai') const cli = require('../helpers/cli') const sinon = require('sinon') +const isNode = require('detect-node') + +function defaultOptions (modification = {}) { + const options = { + parents: false, + recursive: false, + cidVersion: 0, + format: 'dag-pb', + hashAlg: 'sha2-256', + flush: true, + shardSplitThreshold: 1000 + } + + Object.keys(modification).forEach(key => { + options[key] = modification[key] + }) + + return options +} -describe('cli mv', () => { +describe('mv', () => { + if (!isNode) { + return + } + + const source = '/src' + const dest = '/dest' let ipfs beforeEach(() => { @@ -17,296 +42,182 @@ describe('cli mv', () => { }) it('should move an entry', async () => { - const source = '/src' - const dest = '/dest' - await cli(`files mv ${source} ${dest}`, { ipfs }) expect(ipfs.files.mv.callCount).to.equal(1) expect(ipfs.files.mv.getCall(0).args).to.deep.equal([ source, - dest, { - parents: false, - recursive: false, - cidVersion: 0, - format: 'dag-pb', - hashAlg: 'sha2-256', - flush: true, - shardSplitThreshold: 1000 - } + dest, + defaultOptions() ]) }) it('should move an entry and create parents', async () => { - const source = '/src' - const dest = '/dest' - await cli(`files mv --parents ${source} ${dest}`, { ipfs }) expect(ipfs.files.mv.callCount).to.equal(1) expect(ipfs.files.mv.getCall(0).args).to.deep.equal([ source, - dest, { - parents: true, - recursive: false, - cidVersion: 0, - format: 'dag-pb', - hashAlg: 'sha2-256', - flush: true, - shardSplitThreshold: 1000 - } + dest, + defaultOptions({ + parents: true + }) ]) }) it('should move an entry and create parents (short option)', async () => { - const source = '/src' - const dest = '/dest' - await cli(`files mv -p ${source} ${dest}`, { ipfs }) expect(ipfs.files.mv.callCount).to.equal(1) expect(ipfs.files.mv.getCall(0).args).to.deep.equal([ source, - dest, { - parents: true, - recursive: false, - cidVersion: 0, - format: 'dag-pb', - hashAlg: 'sha2-256', - flush: true, - shardSplitThreshold: 1000 - } + dest, + defaultOptions({ + parents: true + }) ]) }) it('should move an entry recursively', async () => { - const source = '/src' - const dest = '/dest' - await cli(`files mv --recursive ${source} ${dest}`, { ipfs }) expect(ipfs.files.mv.callCount).to.equal(1) expect(ipfs.files.mv.getCall(0).args).to.deep.equal([ source, - dest, { - parents: false, - recursive: true, - cidVersion: 0, - format: 'dag-pb', - hashAlg: 'sha2-256', - flush: true, - shardSplitThreshold: 1000 - } + dest, + defaultOptions({ + recursive: true + }) ]) }) it('should move an entry recursively (short option)', async () => { - const source = '/src' - const dest = '/dest' - await cli(`files mv -r ${source} ${dest}`, { ipfs }) expect(ipfs.files.mv.callCount).to.equal(1) expect(ipfs.files.mv.getCall(0).args).to.deep.equal([ source, - dest, { - parents: false, - recursive: true, - cidVersion: 0, - format: 'dag-pb', - hashAlg: 'sha2-256', - flush: true, - shardSplitThreshold: 1000 - } + dest, + defaultOptions({ + recursive: true + }) ]) }) it('should make a directory with a different cid version', async () => { - const source = '/src' - const dest = '/dest' - await cli(`files mv --cid-version 5 ${source} ${dest}`, { ipfs }) expect(ipfs.files.mv.callCount).to.equal(1) expect(ipfs.files.mv.getCall(0).args).to.deep.equal([ source, - dest, { - parents: false, - recursive: false, - cidVersion: 5, - format: 'dag-pb', - hashAlg: 'sha2-256', - flush: true, - shardSplitThreshold: 1000 - } + dest, + defaultOptions({ + cidVersion: 5 + }) ]) }) it('should make a directory with a different cid version (shortish option)', async () => { - const source = '/src' - const dest = '/dest' - await cli(`files mv --cid-ver 5 ${source} ${dest}`, { ipfs }) expect(ipfs.files.mv.callCount).to.equal(1) expect(ipfs.files.mv.getCall(0).args).to.deep.equal([ source, - dest, { - parents: false, - recursive: false, - cidVersion: 5, - format: 'dag-pb', - hashAlg: 'sha2-256', - flush: true, - shardSplitThreshold: 1000 - } + dest, + defaultOptions({ + cidVersion: 5 + }) ]) }) it('should make a directory with a different codec', async () => { - const source = '/src' - const dest = '/dest' - await cli(`files mv --codec dag-foo ${source} ${dest}`, { ipfs }) expect(ipfs.files.mv.callCount).to.equal(1) expect(ipfs.files.mv.getCall(0).args).to.deep.equal([ source, - dest, { - parents: false, - recursive: false, - cidVersion: 0, - format: 'dag-foo', - hashAlg: 'sha2-256', - flush: true, - shardSplitThreshold: 1000 - } + dest, + defaultOptions({ + format: 'dag-foo' + }) ]) }) it('should make a directory with a different codec (short option)', async () => { - const source = '/src' - const dest = '/dest' - await cli(`files mv -c dag-foo ${source} ${dest}`, { ipfs }) expect(ipfs.files.mv.callCount).to.equal(1) expect(ipfs.files.mv.getCall(0).args).to.deep.equal([ source, - dest, { - parents: false, - recursive: false, - cidVersion: 0, - format: 'dag-foo', - hashAlg: 'sha2-256', - flush: true, - shardSplitThreshold: 1000 - } + dest, + defaultOptions({ + format: 'dag-foo' + }) ]) }) it('should make a directory with a different hash algorithm', async () => { - const source = '/src' - const dest = '/dest' - await cli(`files mv --hash-alg sha3-256 ${source} ${dest}`, { ipfs }) expect(ipfs.files.mv.callCount).to.equal(1) expect(ipfs.files.mv.getCall(0).args).to.deep.equal([ source, - dest, { - parents: false, - recursive: false, - cidVersion: 0, - format: 'dag-pb', - hashAlg: 'sha3-256', - flush: true, - shardSplitThreshold: 1000 - } + dest, + defaultOptions({ + hashAlg: 'sha3-256' + }) ]) }) it('should make a directory with a different hash algorithm (short option)', async () => { - const source = '/src' - const dest = '/dest' - await cli(`files mv -h sha3-256 ${source} ${dest}`, { ipfs }) expect(ipfs.files.mv.callCount).to.equal(1) expect(ipfs.files.mv.getCall(0).args).to.deep.equal([ source, - dest, { - parents: false, - recursive: false, - cidVersion: 0, - format: 'dag-pb', - hashAlg: 'sha3-256', - flush: true, - shardSplitThreshold: 1000 - } + dest, + defaultOptions({ + hashAlg: 'sha3-256' + }) ]) }) it('should make a directory without flushing', async () => { - const source = '/src' - const dest = '/dest' - await cli(`files mv --flush false ${source} ${dest}`, { ipfs }) expect(ipfs.files.mv.callCount).to.equal(1) expect(ipfs.files.mv.getCall(0).args).to.deep.equal([ source, - dest, { - parents: false, - recursive: false, - cidVersion: 0, - format: 'dag-pb', - hashAlg: 'sha2-256', - flush: false, - shardSplitThreshold: 1000 - } + dest, + defaultOptions({ + flush: false + }) ]) }) it('should make a directory without flushing (short option)', async () => { - const source = '/src' - const dest = '/dest' - await cli(`files mv -f false ${source} ${dest}`, { ipfs }) expect(ipfs.files.mv.callCount).to.equal(1) expect(ipfs.files.mv.getCall(0).args).to.deep.equal([ source, - dest, { - parents: false, - recursive: false, - cidVersion: 0, - format: 'dag-pb', - hashAlg: 'sha2-256', - flush: false, - shardSplitThreshold: 1000 - } + dest, + defaultOptions({ + flush: false + }) ]) }) it('should make a directory a different shard split threshold', async () => { - const source = '/src' - const dest = '/dest' - await cli(`files mv --shard-split-threshold 10 ${source} ${dest}`, { ipfs }) expect(ipfs.files.mv.callCount).to.equal(1) expect(ipfs.files.mv.getCall(0).args).to.deep.equal([ source, - dest, { - parents: false, - recursive: false, - cidVersion: 0, - format: 'dag-pb', - hashAlg: 'sha2-256', - flush: true, + dest, + defaultOptions({ shardSplitThreshold: 10 - } + }) ]) }) }) diff --git a/test/cli/read.spec.js b/test/cli/read.js similarity index 76% rename from test/cli/read.spec.js rename to test/cli/read.js index d739f99..3c5620f 100644 --- a/test/cli/read.spec.js +++ b/test/cli/read.js @@ -5,8 +5,27 @@ const expect = require('../helpers/chai') const cli = require('../helpers/cli') const sinon = require('sinon') const values = require('pull-stream/sources/values') +const isNode = require('detect-node') -describe('cli read', () => { +function defaultOptions (modification = {}) { + const options = { + offset: undefined, + length: undefined + } + + Object.keys(modification).forEach(key => { + options[key] = modification[key] + }) + + return options +} + +describe('read', () => { + if (!isNode) { + return + } + + const path = '/foo' let ipfs let print let output @@ -24,38 +43,32 @@ describe('cli read', () => { }) it('should read a path', async () => { - const path = '/foo' - await cli(`files read ${path}`, { ipfs, print }) expect(ipfs.files.readPullStream.callCount).to.equal(1) expect(ipfs.files.readPullStream.getCall(0).args).to.deep.equal([ - path, { - offset: undefined, - length: undefined - } + path, + defaultOptions() ]) expect(output).to.equal('hello world') }) it('should read a path with an offset', async () => { - const path = '/foo' const offset = 5 await cli(`files read --offset ${offset} ${path}`, { ipfs, print }) expect(ipfs.files.readPullStream.callCount).to.equal(1) expect(ipfs.files.readPullStream.getCall(0).args).to.deep.equal([ - path, { - offset, - length: undefined - } + path, + defaultOptions({ + offset + }) ]) expect(output).to.equal('hello world') }) it('should read a path with an offset (short option)', async () => { - const path = '/foo' const offset = 5 await cli(`files read -o ${offset} ${path}`, { ipfs, print }) @@ -70,34 +83,32 @@ describe('cli read', () => { expect(output).to.equal('hello world') }) - it('should read a path with an length', async () => { - const path = '/foo' + it('should read a path with a length', async () => { const length = 5 await cli(`files read --length ${length} ${path}`, { ipfs, print }) expect(ipfs.files.readPullStream.callCount).to.equal(1) expect(ipfs.files.readPullStream.getCall(0).args).to.deep.equal([ - path, { - offset: undefined, + path, + defaultOptions({ length - } + }) ]) expect(output).to.equal('hello world') }) - it('should read a path with an length (short option)', async () => { - const path = '/foo' + it('should read a path with a length (short option)', async () => { const length = 5 await cli(`files read -l ${length} ${path}`, { ipfs, print }) expect(ipfs.files.readPullStream.callCount).to.equal(1) expect(ipfs.files.readPullStream.getCall(0).args).to.deep.equal([ - path, { - offset: undefined, + path, + defaultOptions({ length - } + }) ]) expect(output).to.equal('hello world') }) diff --git a/test/cli/rm.spec.js b/test/cli/rm.js similarity index 69% rename from test/cli/rm.spec.js rename to test/cli/rm.js index e36de5b..1370e4b 100644 --- a/test/cli/rm.spec.js +++ b/test/cli/rm.js @@ -4,8 +4,26 @@ const expect = require('../helpers/chai') const cli = require('../helpers/cli') const sinon = require('sinon') +const isNode = require('detect-node') -describe('cli rm', () => { +function defaultOptions (modification = {}) { + const options = { + recursive: false + } + + Object.keys(modification).forEach(key => { + options[key] = modification[key] + }) + + return options +} + +describe('rm', () => { + if (!isNode) { + return + } + + const path = '/foo' let ipfs beforeEach(() => { @@ -17,41 +35,36 @@ describe('cli rm', () => { }) it('should remove a path', async () => { - const path = '/foo' - await cli(`files rm ${path}`, { ipfs }) expect(ipfs.files.rm.callCount).to.equal(1) expect(ipfs.files.rm.getCall(0).args).to.deep.equal([ - path, { - recursive: false - } + path, + defaultOptions() ]) }) it('should remove a path recursively', async () => { - const path = '/foo' - await cli(`files rm --recursive ${path}`, { ipfs }) expect(ipfs.files.rm.callCount).to.equal(1) expect(ipfs.files.rm.getCall(0).args).to.deep.equal([ - path, { + path, + defaultOptions({ recursive: true - } + }) ]) }) it('should remove a path recursively (short option)', async () => { - const path = '/foo' - await cli(`files rm -r ${path}`, { ipfs }) expect(ipfs.files.rm.callCount).to.equal(1) expect(ipfs.files.rm.getCall(0).args).to.deep.equal([ - path, { + path, + defaultOptions({ recursive: true - } + }) ]) }) }) diff --git a/test/cli/stat.spec.js b/test/cli/stat.js similarity index 83% rename from test/cli/stat.spec.js rename to test/cli/stat.js index 6c275d4..2ff7736 100644 --- a/test/cli/stat.spec.js +++ b/test/cli/stat.js @@ -4,8 +4,26 @@ const expect = require('../helpers/chai') const cli = require('../helpers/cli') const sinon = require('sinon') +const isNode = require('detect-node') -describe('cli stat', () => { +function defaultOptions (modification = {}) { + const options = { + withLocal: false + } + + Object.keys(modification).forEach(key => { + options[key] = modification[key] + }) + + return options +} + +describe('stat', () => { + if (!isNode) { + return + } + + const path = '/foo' let ipfs let print let output @@ -31,113 +49,93 @@ describe('cli stat', () => { }) it('should stat a path', async () => { - const path = '/foo' - await cli(`files stat ${path}`, { ipfs, print }) expect(ipfs.files.stat.callCount).to.equal(1) expect(ipfs.files.stat.getCall(0).args).to.deep.equal([ - path, { - withLocal: false - } + path, + defaultOptions() ]) expect(output).to.include('CumulativeSize') }) it('should stat a path with local', async () => { - const path = '/foo' - await cli(`files stat --with-local ${path}`, { ipfs, print }) expect(ipfs.files.stat.callCount).to.equal(1) expect(ipfs.files.stat.getCall(0).args).to.deep.equal([ - path, { + path, + defaultOptions({ withLocal: true - } + }) ]) expect(output).to.include('CumulativeSize') }) it('should stat a path with local (short option)', async () => { - const path = '/foo' - await cli(`files stat -l ${path}`, { ipfs, print }) expect(ipfs.files.stat.callCount).to.equal(1) expect(ipfs.files.stat.getCall(0).args).to.deep.equal([ - path, { + path, + defaultOptions({ withLocal: true - } + }) ]) expect(output).to.include('CumulativeSize') }) it('should stat a path and only show hashes', async () => { - const path = '/foo' - await cli(`files stat --hash ${path}`, { ipfs, print }) expect(ipfs.files.stat.callCount).to.equal(1) expect(ipfs.files.stat.getCall(0).args).to.deep.equal([ - path, { - withLocal: false - } + path, + defaultOptions() ]) expect(output).to.equal('stats-hash\n') }) it('should stat a path and only show hashes (short option)', async () => { - const path = '/foo' - await cli(`files stat -h ${path}`, { ipfs, print }) expect(ipfs.files.stat.callCount).to.equal(1) expect(ipfs.files.stat.getCall(0).args).to.deep.equal([ - path, { - withLocal: false - } + path, + defaultOptions() ]) expect(output).to.equal('stats-hash\n') }) it('should stat a path and only show sizes', async () => { - const path = '/foo' - await cli(`files stat --size ${path}`, { ipfs, print }) expect(ipfs.files.stat.callCount).to.equal(1) expect(ipfs.files.stat.getCall(0).args).to.deep.equal([ - path, { - withLocal: false - } + path, + defaultOptions() ]) expect(output).to.equal('stats-size\n') }) it('should stat a path and only show sizes (short option)', async () => { - const path = '/foo' - await cli(`files stat -s ${path}`, { ipfs, print }) expect(ipfs.files.stat.callCount).to.equal(1) expect(ipfs.files.stat.getCall(0).args).to.deep.equal([ - path, { - withLocal: false - } + path, + defaultOptions() ]) expect(output).to.equal('stats-size\n') }) it('should stat a path with format option', async () => { - const path = '/foo' - await cli(`files stat --format ' ' ${path}`, { ipfs, print }) expect(ipfs.files.stat.callCount).to.equal(1) expect(ipfs.files.stat.getCall(0).args).to.deep.equal([ - path, { - withLocal: false - } + path, + defaultOptions() ]) expect(output).to.equal('stats-mode stats-type\n') }) diff --git a/test/cli/touch.js b/test/cli/touch.js new file mode 100644 index 0000000..6ad7a9d --- /dev/null +++ b/test/cli/touch.js @@ -0,0 +1,143 @@ +/* eslint-env mocha */ +'use strict' + +const expect = require('../helpers/chai') +const cli = require('../helpers/cli') +const sinon = require('sinon') +const isNode = require('detect-node') + +function defaultOptions (modification = {}) { + const options = { + cidVersion: 0, + format: 'dag-pb', + hashAlg: 'sha2-256', + flush: true, + shardSplitThreshold: 1000 + } + + Object.keys(modification).forEach(key => { + options[key] = modification[key] + }) + + return options +} + +describe('touch', () => { + if (!isNode) { + return + } + + const path = '/foo' + const mtime = parseInt(Date.now() / 1000) + let ipfs + + beforeEach(() => { + ipfs = { + files: { + touch: sinon.stub() + } + } + }) + + it('should update the mtime for a file', async () => { + await cli(`files touch -m ${mtime} ${path}`, { ipfs }) + + expect(ipfs.files.touch.callCount).to.equal(1) + expect(ipfs.files.touch.getCall(0).args).to.deep.equal([ + path, + mtime, + defaultOptions() + ]) + }) + + it('should update the mtime without flushing', async () => { + await cli(`files touch -m ${mtime} --flush false ${path}`, { ipfs }) + + expect(ipfs.files.touch.callCount).to.equal(1) + expect(ipfs.files.touch.getCall(0).args).to.deep.equal([ + path, + mtime, + defaultOptions({ + flush: false + }) + ]) + }) + + it('should update the mtime without flushing (short option)', async () => { + await cli(`files touch -m ${mtime} -f false ${path}`, { ipfs }) + + expect(ipfs.files.touch.callCount).to.equal(1) + expect(ipfs.files.touch.getCall(0).args).to.deep.equal([ + path, + mtime, + defaultOptions({ + flush: false + }) + ]) + }) + + it('should update the mtime with a different codec', async () => { + await cli(`files touch -m ${mtime} --codec dag-foo ${path}`, { ipfs }) + + expect(ipfs.files.touch.callCount).to.equal(1) + expect(ipfs.files.touch.getCall(0).args).to.deep.equal([ + path, + mtime, + defaultOptions({ + format: 'dag-foo' + }) + ]) + }) + + it('should update the mtime with a different codec (short option)', async () => { + await cli(`files touch -m ${mtime} -c dag-foo ${path}`, { ipfs }) + + expect(ipfs.files.touch.callCount).to.equal(1) + expect(ipfs.files.touch.getCall(0).args).to.deep.equal([ + path, + mtime, + defaultOptions({ + format: 'dag-foo' + }) + ]) + }) + + it('should update the mtime with a different hash algorithm', async () => { + await cli(`files touch -m ${mtime} --hash-alg sha3-256 ${path}`, { ipfs }) + + expect(ipfs.files.touch.callCount).to.equal(1) + expect(ipfs.files.touch.getCall(0).args).to.deep.equal([ + path, + mtime, + defaultOptions({ + hashAlg: 'sha3-256' + }) + ]) + }) + + it('should update the mtime with a different hash algorithm (short option)', async () => { + await cli(`files touch -m ${mtime} -h sha3-256 ${path}`, { ipfs }) + + expect(ipfs.files.touch.callCount).to.equal(1) + expect(ipfs.files.touch.getCall(0).args).to.deep.equal([ + path, + mtime, + defaultOptions({ + hashAlg: 'sha3-256' + }) + ]) + }) + + it('should update the mtime with a shard split threshold', async () => { + await cli(`files touch -m ${mtime} --shard-split-threshold 10 ${path}`, { ipfs }) + + expect(ipfs.files.touch.callCount).to.equal(1) + expect(ipfs.files.touch.getCall(0).args).to.deep.equal([ + path, + mtime, + defaultOptions({ + shardSplitThreshold: 10 + }) + ]) + }) +}) diff --git a/test/cli/touch.spec.js b/test/cli/touch.spec.js deleted file mode 100644 index 3b2a70b..0000000 --- a/test/cli/touch.spec.js +++ /dev/null @@ -1,170 +0,0 @@ -/* eslint-env mocha */ -'use strict' - -const expect = require('../helpers/chai') -const cli = require('../helpers/cli') -const sinon = require('sinon') - -describe('cli touch', () => { - let ipfs - - beforeEach(() => { - ipfs = { - files: { - touch: sinon.stub() - } - } - }) - - it('should update the mtime for a file', async () => { - const path = '/foo' - const mtime = parseInt(Date.now() / 1000) - - await cli(`files touch -m ${mtime} ${path}`, { ipfs }) - - expect(ipfs.files.touch.callCount).to.equal(1) - expect(ipfs.files.touch.getCall(0).args).to.deep.equal([ - path, - mtime, { - cidVersion: 0, - format: 'dag-pb', - hashAlg: 'sha2-256', - flush: true, - shardSplitThreshold: 1000 - } - ]) - }) - - it('should update the mode without flushing', async () => { - const path = '/foo' - const mtime = parseInt(Date.now() / 1000) - - await cli(`files touch -m ${mtime} --flush false ${path}`, { ipfs }) - - expect(ipfs.files.touch.callCount).to.equal(1) - expect(ipfs.files.touch.getCall(0).args).to.deep.equal([ - path, - mtime, { - cidVersion: 0, - format: 'dag-pb', - hashAlg: 'sha2-256', - flush: false, - shardSplitThreshold: 1000 - } - ]) - }) - - it('should update the mode without flushing (short option)', async () => { - const path = '/foo' - const mtime = parseInt(Date.now() / 1000) - - await cli(`files touch -m ${mtime} -f false ${path}`, { ipfs }) - - expect(ipfs.files.touch.callCount).to.equal(1) - expect(ipfs.files.touch.getCall(0).args).to.deep.equal([ - path, - mtime, { - cidVersion: 0, - format: 'dag-pb', - hashAlg: 'sha2-256', - flush: false, - shardSplitThreshold: 1000 - } - ]) - }) - - it('should update the mode a different codec', async () => { - const path = '/foo' - const mtime = parseInt(Date.now() / 1000) - - await cli(`files touch -m ${mtime} --codec dag-foo ${path}`, { ipfs }) - - expect(ipfs.files.touch.callCount).to.equal(1) - expect(ipfs.files.touch.getCall(0).args).to.deep.equal([ - path, - mtime, { - cidVersion: 0, - format: 'dag-foo', - hashAlg: 'sha2-256', - flush: true, - shardSplitThreshold: 1000 - } - ]) - }) - - it('should update the mode a different codec (short option)', async () => { - const path = '/foo' - const mtime = parseInt(Date.now() / 1000) - - await cli(`files touch -m ${mtime} -c dag-foo ${path}`, { ipfs }) - - expect(ipfs.files.touch.callCount).to.equal(1) - expect(ipfs.files.touch.getCall(0).args).to.deep.equal([ - path, - mtime, { - cidVersion: 0, - format: 'dag-foo', - hashAlg: 'sha2-256', - flush: true, - shardSplitThreshold: 1000 - } - ]) - }) - - it('should update the mode a different hash algorithm', async () => { - const path = '/foo' - const mtime = parseInt(Date.now() / 1000) - - await cli(`files touch -m ${mtime} --hash-alg sha3-256 ${path}`, { ipfs }) - - expect(ipfs.files.touch.callCount).to.equal(1) - expect(ipfs.files.touch.getCall(0).args).to.deep.equal([ - path, - mtime, { - cidVersion: 0, - format: 'dag-pb', - hashAlg: 'sha3-256', - flush: true, - shardSplitThreshold: 1000 - } - ]) - }) - - it('should update the mode a different hash algorithm (short option)', async () => { - const path = '/foo' - const mtime = parseInt(Date.now() / 1000) - - await cli(`files touch -m ${mtime} -h sha3-256 ${path}`, { ipfs }) - - expect(ipfs.files.touch.callCount).to.equal(1) - expect(ipfs.files.touch.getCall(0).args).to.deep.equal([ - path, - mtime, { - cidVersion: 0, - format: 'dag-pb', - hashAlg: 'sha3-256', - flush: true, - shardSplitThreshold: 1000 - } - ]) - }) - - it('should update the mode with a shard split threshold', async () => { - const path = '/foo' - const mtime = parseInt(Date.now() / 1000) - - await cli(`files touch -m ${mtime} --shard-split-threshold 10 ${path}`, { ipfs }) - - expect(ipfs.files.touch.callCount).to.equal(1) - expect(ipfs.files.touch.getCall(0).args).to.deep.equal([ - path, - mtime, { - cidVersion: 0, - format: 'dag-pb', - hashAlg: 'sha2-256', - flush: true, - shardSplitThreshold: 10 - } - ]) - }) -}) diff --git a/test/cli/write.spec.js b/test/cli/write.js similarity index 98% rename from test/cli/write.spec.js rename to test/cli/write.js index da51c2c..0acc073 100644 --- a/test/cli/write.spec.js +++ b/test/cli/write.js @@ -4,6 +4,7 @@ const expect = require('../helpers/chai') const cli = require('../helpers/cli') const sinon = require('sinon') +const isNode = require('detect-node') function defaultOptions (modification = {}) { const options = { @@ -32,7 +33,11 @@ function defaultOptions (modification = {}) { return options } -describe('cli write', () => { +describe('write', () => { + if (!isNode) { + return + } + const stdin = 'stdin' const getStdin = () => stdin let ipfs @@ -148,7 +153,7 @@ describe('cli write', () => { ]) }) - it('should write to a file with an length', async () => { + it('should write to a file with a length', async () => { const path = '/foo' await cli(`files write --length 10 ${path}`, { ipfs, getStdin }) diff --git a/test/core/chmod.spec.js b/test/core/chmod.js similarity index 100% rename from test/core/chmod.spec.js rename to test/core/chmod.js diff --git a/test/core/cp.spec.js b/test/core/cp.js similarity index 100% rename from test/core/cp.spec.js rename to test/core/cp.js diff --git a/test/core/flush.spec.js b/test/core/flush.js similarity index 100% rename from test/core/flush.spec.js rename to test/core/flush.js diff --git a/test/core/index.js b/test/core/index.js new file mode 100644 index 0000000..f0baeb2 --- /dev/null +++ b/test/core/index.js @@ -0,0 +1,16 @@ +/* eslint-env mocha */ +'use strict' + +describe('core', () => { + require('./chmod') + require('./cp') + require('./flush') + require('./ls') + require('./mkdir') + require('./mv') + require('./read') + require('./rm') + require('./stat') + require('./touch') + require('./write') +}) diff --git a/test/core/ls.spec.js b/test/core/ls.js similarity index 100% rename from test/core/ls.spec.js rename to test/core/ls.js diff --git a/test/core/mkdir.spec.js b/test/core/mkdir.js similarity index 100% rename from test/core/mkdir.spec.js rename to test/core/mkdir.js diff --git a/test/core/mv.spec.js b/test/core/mv.js similarity index 100% rename from test/core/mv.spec.js rename to test/core/mv.js diff --git a/test/core/read.js b/test/core/read.js new file mode 100644 index 0000000..e94d68b --- /dev/null +++ b/test/core/read.js @@ -0,0 +1,149 @@ +/* eslint-env mocha */ +'use strict' + +const chai = require('chai') +chai.use(require('dirty-chai')) +const expect = chai.expect +const createMfs = require('../helpers/create-mfs') +const createShardedDirectory = require('../helpers/create-sharded-directory') +const crypto = require('crypto') +const streamToBuffer = require('../helpers/stream-to-buffer') + +describe('read', () => { + let mfs + const smallFile = crypto.randomBytes(13) + + before(async () => { + mfs = await createMfs() + }) + + it('reads a small file', async () => { + const filePath = '/small-file.txt' + + await mfs.write(filePath, smallFile, { + create: true + }) + + const buffer = await streamToBuffer(mfs.read(filePath)) + + expect(buffer).to.deep.equal(smallFile) + }) + + it('reads a file with an offset', async () => { + const path = `/some-file-${Math.random()}.txt` + const data = crypto.randomBytes(100) + const offset = 10 + + await mfs.write(path, data, { + create: true + }) + + const buffer = await streamToBuffer(mfs.read(path, { + offset + })) + + expect(buffer).to.deep.equal(data.slice(offset)) + }) + + it('reads a file with a length', async () => { + const path = `/some-file-${Math.random()}.txt` + const data = crypto.randomBytes(100) + const length = 10 + + await mfs.write(path, data, { + create: true + }) + + const buffer = await streamToBuffer(mfs.read(path, { + length + })) + + expect(buffer).to.deep.equal(data.slice(0, length)) + }) + + it('reads a file with a legacy count argument', async () => { + const path = `/some-file-${Math.random()}.txt` + const data = crypto.randomBytes(100) + const length = 10 + + await mfs.write(path, data, { + create: true + }) + + const buffer = await streamToBuffer(mfs.read(path, { + count: length + })) + + expect(buffer).to.deep.equal(data.slice(0, length)) + }) + + it('reads a file with an offset and a length', async () => { + const path = `/some-file-${Math.random()}.txt` + const data = crypto.randomBytes(100) + const offset = 10 + const length = 10 + + await mfs.write(path, data, { + create: true + }) + + const buffer = await streamToBuffer(mfs.read(path, { + offset, + length + })) + + expect(buffer).to.deep.equal(data.slice(offset, offset + length)) + }) + + it('reads a file with an offset and a legacy count argument', async () => { + const path = `/some-file-${Math.random()}.txt` + const data = crypto.randomBytes(100) + const offset = 10 + const length = 10 + + await mfs.write(path, data, { + create: true + }) + + const buffer = await streamToBuffer(mfs.read(path, { + offset, + count: length + })) + + expect(buffer).to.deep.equal(data.slice(offset, offset + length)) + }) + + it('refuses to read a directory', async () => { + const path = '/' + + try { + await streamToBuffer(mfs.read(path)) + throw new Error('Should have errored on trying to read a directory') + } catch (err) { + expect(err.code).to.equal('ERR_NOT_FILE') + } + }) + + it('refuses to read a non-existent file', async () => { + try { + await streamToBuffer(mfs.read(`/file-${Math.random()}.txt`)) + throw new Error('Should have errored on non-existent file') + } catch (err) { + expect(err.code).to.equal('ERR_NOT_FOUND') + } + }) + + it('reads file from inside a sharded directory', async () => { + const shardedDirPath = await createShardedDirectory(mfs) + const filePath = `${shardedDirPath}/file-${Math.random()}.txt` + const content = Buffer.from([0, 1, 2, 3, 4]) + + await mfs.write(filePath, content, { + create: true + }) + + const buffer = await streamToBuffer(mfs.read(filePath)) + + expect(buffer).to.deep.equal(content) + }) +}) diff --git a/test/core/read.spec.js b/test/core/read.spec.js deleted file mode 100644 index 252f490..0000000 --- a/test/core/read.spec.js +++ /dev/null @@ -1,151 +0,0 @@ -/* eslint-env mocha */ -'use strict' - -const chai = require('chai') -chai.use(require('dirty-chai')) -const expect = chai.expect -const createMfs = require('../helpers/create-mfs') -const createShardedDirectory = require('../helpers/create-sharded-directory') -const crypto = require('crypto') -const streamToBuffer = require('../helpers/stream-to-buffer') - -describe('read', () => { - let mfs - const smallFile = crypto.randomBytes(13) - - before(async () => { - mfs = await createMfs() - }) - - describe('read', () => { - it('reads a small file', async () => { - const filePath = '/small-file.txt' - - await mfs.write(filePath, smallFile, { - create: true - }) - - const buffer = await streamToBuffer(mfs.read(filePath)) - - expect(buffer).to.deep.equal(smallFile) - }) - - it('reads a file with an offset', async () => { - const path = `/some-file-${Math.random()}.txt` - const data = crypto.randomBytes(100) - const offset = 10 - - await mfs.write(path, data, { - create: true - }) - - const buffer = await streamToBuffer(mfs.read(path, { - offset - })) - - expect(buffer).to.deep.equal(data.slice(offset)) - }) - - it('reads a file with a length', async () => { - const path = `/some-file-${Math.random()}.txt` - const data = crypto.randomBytes(100) - const length = 10 - - await mfs.write(path, data, { - create: true - }) - - const buffer = await streamToBuffer(mfs.read(path, { - length - })) - - expect(buffer).to.deep.equal(data.slice(0, length)) - }) - - it('reads a file with a legacy count argument', async () => { - const path = `/some-file-${Math.random()}.txt` - const data = crypto.randomBytes(100) - const length = 10 - - await mfs.write(path, data, { - create: true - }) - - const buffer = await streamToBuffer(mfs.read(path, { - count: length - })) - - expect(buffer).to.deep.equal(data.slice(0, length)) - }) - - it('reads a file with an offset and a length', async () => { - const path = `/some-file-${Math.random()}.txt` - const data = crypto.randomBytes(100) - const offset = 10 - const length = 10 - - await mfs.write(path, data, { - create: true - }) - - const buffer = await streamToBuffer(mfs.read(path, { - offset, - length - })) - - expect(buffer).to.deep.equal(data.slice(offset, offset + length)) - }) - - it('reads a file with an offset and a legacy count argument', async () => { - const path = `/some-file-${Math.random()}.txt` - const data = crypto.randomBytes(100) - const offset = 10 - const length = 10 - - await mfs.write(path, data, { - create: true - }) - - const buffer = await streamToBuffer(mfs.read(path, { - offset, - count: length - })) - - expect(buffer).to.deep.equal(data.slice(offset, offset + length)) - }) - - it('refuses to read a directory', async () => { - const path = '/' - - try { - await streamToBuffer(mfs.read(path)) - throw new Error('Should have errored on trying to read a directory') - } catch (err) { - expect(err.code).to.equal('ERR_NOT_FILE') - } - }) - - it('refuses to read a non-existent file', async () => { - try { - await streamToBuffer(mfs.read(`/file-${Math.random()}.txt`)) - throw new Error('Should have errored on non-existent file') - } catch (err) { - expect(err.code).to.equal('ERR_NOT_FOUND') - } - }) - - it('reads file from inside a sharded directory', async () => { - const shardedDirPath = await createShardedDirectory(mfs) - const filePath = `${shardedDirPath}/file-${Math.random()}.txt` - const content = Buffer.from([0, 1, 2, 3, 4]) - - await mfs.write(filePath, content, { - create: true - }) - - const buffer = await streamToBuffer(mfs.read(filePath)) - - expect(buffer).to.deep.equal(content) - }) - }) -}) diff --git a/test/core/rm.spec.js b/test/core/rm.js similarity index 100% rename from test/core/rm.spec.js rename to test/core/rm.js diff --git a/test/core/stat.spec.js b/test/core/stat.js similarity index 100% rename from test/core/stat.spec.js rename to test/core/stat.js diff --git a/test/core/touch.spec.js b/test/core/touch.js similarity index 100% rename from test/core/touch.spec.js rename to test/core/touch.js diff --git a/test/core/write.spec.js b/test/core/write.js similarity index 100% rename from test/core/write.spec.js rename to test/core/write.js diff --git a/test/helpers/http.js b/test/helpers/http.js new file mode 100644 index 0000000..e8f58fd --- /dev/null +++ b/test/helpers/http.js @@ -0,0 +1,15 @@ +'use strict' + +const Hapi = require('@hapi/hapi') +const routes = require('../../src/http') + +module.exports = (request, { ipfs }) => { + const server = Hapi.server() + server.app.ipfs = ipfs + + for (const key in routes) { + server.route(routes[key]) + } + + return server.inject(request) +} diff --git a/test/http/chmod.js b/test/http/chmod.js new file mode 100644 index 0000000..da04536 --- /dev/null +++ b/test/http/chmod.js @@ -0,0 +1,130 @@ +/* eslint-env mocha */ +'use strict' + +const expect = require('../helpers/chai') +const http = require('../helpers/http') +const sinon = require('sinon') + +function defaultOptions (modification = {}) { + const options = { + recursive: false, + format: 'dag-pb', + hashAlg: 'sha2-256', + flush: true, + shardSplitThreshold: 1000 + } + + Object.keys(modification).forEach(key => { + options[key] = modification[key] + }) + + return options +} + +describe('chmod', () => { + const path = '/foo' + const mode = '0654' + let ipfs + + beforeEach(() => { + ipfs = { + files: { + chmod: sinon.stub() + } + } + }) + + it('should update the mode for a file', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/chmod?arg=${path}&mode=${mode}` + }, { ipfs }) + + expect(ipfs.files.chmod.callCount).to.equal(1) + expect(ipfs.files.chmod.getCall(0).args).to.deep.equal([ + path, + parseInt(mode, 8), + defaultOptions() + ]) + }) + + it('should update the mode recursively', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/chmod?arg=${path}&mode=${mode}&recursive=true` + }, { ipfs }) + + expect(ipfs.files.chmod.callCount).to.equal(1) + expect(ipfs.files.chmod.getCall(0).args).to.deep.equal([ + path, + parseInt(mode, 8), + defaultOptions({ + recursive: true + }) + ]) + }) + + it('should update the mode without flushing', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/chmod?arg=${path}&mode=${mode}&flush=false` + }, { ipfs }) + + expect(ipfs.files.chmod.callCount).to.equal(1) + expect(ipfs.files.chmod.getCall(0).args).to.deep.equal([ + path, + parseInt(mode, 8), + defaultOptions({ + flush: false + }) + ]) + }) + + it('should update the mode a different codec', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/chmod?arg=${path}&mode=${mode}&codec=dag-foo` + }, { ipfs }) + + expect(ipfs.files.chmod.callCount).to.equal(1) + expect(ipfs.files.chmod.getCall(0).args).to.deep.equal([ + path, + parseInt(mode, 8), + defaultOptions({ + format: 'dag-foo' + }) + ]) + }) + + it('should update the mode a different hash algorithm', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/chmod?arg=${path}&mode=${mode}&hashAlg=sha3-256` + }, { ipfs }) + + expect(ipfs.files.chmod.callCount).to.equal(1) + expect(ipfs.files.chmod.getCall(0).args).to.deep.equal([ + path, + parseInt(mode, 8), + defaultOptions({ + hashAlg: 'sha3-256' + }) + ]) + }) + + it('should update the mode with a shard split threshold', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/chmod?arg=${path}&mode=${mode}&shardSplitThreshold=10` + }, { ipfs }) + + expect(ipfs.files.chmod.callCount).to.equal(1) + expect(ipfs.files.chmod.getCall(0).args).to.deep.equal([ + path, + parseInt(mode, 8), + defaultOptions({ + shardSplitThreshold: 10 + }) + ]) + }) +}) diff --git a/test/http/cp.js b/test/http/cp.js new file mode 100644 index 0000000..4b2a69c --- /dev/null +++ b/test/http/cp.js @@ -0,0 +1,114 @@ +/* eslint-env mocha */ +'use strict' + +const expect = require('../helpers/chai') +const http = require('../helpers/http') +const sinon = require('sinon') + +function defaultOptions (modification = {}) { + const options = { + parents: false, + format: 'dag-pb', + hashAlg: 'sha2-256', + flush: true, + shardSplitThreshold: 1000 + } + + Object.keys(modification).forEach(key => { + options[key] = modification[key] + }) + + return options +} + +describe('cp', () => () => { + const source = 'source' + const dest = 'dest' + let ipfs + + beforeEach(() => { + ipfs = { + files: { + cp: sinon.stub() + } + } + }) + + it('should copy files', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/cp?arg=${source}&arg=${dest}` + }, { ipfs }) + + expect(ipfs.files.cp.callCount).to.equal(1) + expect(ipfs.files.cp.getCall(0).args).to.deep.equal([ + source, + dest, + defaultOptions() + ]) + }) + + it('should copy files and create intermediate directories', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/cp?arg=${source}&arg=${dest}&parents=true` + }, { ipfs }) + + expect(ipfs.files.cp.callCount).to.equal(1) + expect(ipfs.files.cp.getCall(0).args).to.deep.equal([ + source, + dest, + defaultOptions({ + parents: true + }) + ]) + }) + + it('should copy files with a different codec', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/cp?arg=${source}&arg=${dest}&codec=dag-cbor` + }, { ipfs }) + + expect(ipfs.files.cp.callCount).to.equal(1) + expect(ipfs.files.cp.getCall(0).args).to.deep.equal([ + source, + dest, + defaultOptions({ + format: 'dag-cbor' + }) + ]) + }) + + it('should copy files with a different hash algorithm', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/cp?arg=${source}&arg=${dest}&hashAlg=sha3-256` + }, { ipfs }) + + expect(ipfs.files.cp.callCount).to.equal(1) + expect(ipfs.files.cp.getCall(0).args).to.deep.equal([ + source, + dest, + defaultOptions({ + hashAlg: 'sha3-256' + }) + ]) + }) + + it('should copy files with a different shard split threshold', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/cp?arg=${source}&arg=${dest}&shardSplitThreshold=10` + }, { ipfs }) + + expect(ipfs.files.cp.callCount).to.equal(1) + expect(ipfs.files.cp.getCall(0).args).to.deep.equal([ + source, + dest, + defaultOptions({ + shardSplitThreshold: 10 + }) + ]) + }) +}) diff --git a/test/http/flush.js b/test/http/flush.js new file mode 100644 index 0000000..cfd28f4 --- /dev/null +++ b/test/http/flush.js @@ -0,0 +1,45 @@ +/* eslint-env mocha */ +'use strict' + +const expect = require('../helpers/chai') +const http = require('../helpers/http') +const sinon = require('sinon') + +describe('flush', () => () => { + const path = '/foo' + let ipfs + + beforeEach(() => { + ipfs = { + files: { + flush: sinon.stub() + } + } + }) + + it('should flush a path', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/flush?arg=${path}` + }, { ipfs }) + + expect(ipfs.files.flush.callCount).to.equal(1) + expect(ipfs.files.flush.getCall(0).args).to.deep.equal([ + path, + {} + ]) + }) + + it('should flush without a path', async () => { + await http({ + method: 'POST', + url: '/api/v0/files/flush' + }, { ipfs }) + + expect(ipfs.files.flush.callCount).to.equal(1) + expect(ipfs.files.flush.getCall(0).args).to.deep.equal([ + '/', + {} + ]) + }) +}) diff --git a/test/http/index.js b/test/http/index.js new file mode 100644 index 0000000..d9bb97a --- /dev/null +++ b/test/http/index.js @@ -0,0 +1,16 @@ +/* eslint-env mocha */ +'use strict' + +describe('http', () => { + require('./chmod') + require('./cp') + require('./flush') + require('./ls') + require('./mkdir') + require('./mv') + require('./read') + require('./rm') + require('./stat') + require('./touch') + require('./write') +}) diff --git a/test/http/ls.js b/test/http/ls.js new file mode 100644 index 0000000..0603ecc --- /dev/null +++ b/test/http/ls.js @@ -0,0 +1,155 @@ +/* eslint-env mocha */ +'use strict' + +const expect = require('../helpers/chai') +const http = require('../helpers/http') +const sinon = require('sinon') +const PassThrough = require('stream').PassThrough + +function defaultOptions (modification = {}) { + const options = { + cidBase: 'base58btc', + long: false + } + + Object.keys(modification).forEach(key => { + options[key] = modification[key] + }) + + return options +} + +describe('ls', () => { + const path = '/foo' + const file = { + name: 'file-name', + type: 'file-type', + size: 'file-size', + hash: 'file-hash' + } + let ipfs + + beforeEach(() => { + ipfs = { + files: { + ls: sinon.stub().resolves([]) + } + } + }) + + it('should list a path', async () => { + ipfs.files.ls = sinon.stub().resolves([file]) + + const response = await http({ + method: 'POST', + url: `/api/v0/files/ls?arg=${path}` + }, { ipfs }) + + expect(ipfs.files.ls.callCount).to.equal(1) + expect(ipfs.files.ls.getCall(0).args).to.deep.equal([ + path, + defaultOptions() + ]) + expect(response).to.have.nested.property('result.Entries.length', 1) + expect(response).to.have.nested.property('result.Entries[0].Name', file.name) + expect(response).to.have.nested.property('result.Entries[0].Type', file.type) + expect(response).to.have.nested.property('result.Entries[0].Size', file.size) + expect(response).to.have.nested.property('result.Entries[0].Hash', file.hash) + }) + + it('should list without a path', async () => { + await http({ + method: 'POST', + url: '/api/v0/files/ls' + }, { ipfs }) + + expect(ipfs.files.ls.callCount).to.equal(1) + expect(ipfs.files.ls.getCall(0).args).to.deep.equal([ + '/', + defaultOptions() + ]) + }) + + it('should list a path with details', async () => { + const file = { + name: 'file-name', + type: 'file-type', + size: 'file-size', + hash: 'file-hash', + mode: 'file-mode', + mtime: 'file-mtime' + } + ipfs.files.ls = sinon.stub().resolves([file]) + + const response = await http({ + method: 'POST', + url: `/api/v0/files/ls?arg=${path}&long=true` + }, { ipfs }) + + expect(ipfs.files.ls.callCount).to.equal(1) + expect(ipfs.files.ls.getCall(0).args).to.deep.equal([ + path, + defaultOptions({ + long: true + }) + ]) + expect(response).to.have.nested.property('result.Entries.length', 1) + expect(response).to.have.nested.property('result.Entries[0].Name', file.name) + expect(response).to.have.nested.property('result.Entries[0].Type', file.type) + expect(response).to.have.nested.property('result.Entries[0].Size', file.size) + expect(response).to.have.nested.property('result.Entries[0].Hash', file.hash) + expect(response).to.have.nested.property('result.Entries[0].Mode', file.mode) + expect(response).to.have.nested.property('result.Entries[0].Mtime', file.mtime) + }) + + it('should stream a path', async () => { + const stream = new PassThrough({ + objectMode: true + }) + stream.emit('data', file) + stream.end() + ipfs.files.lsReadableStream = sinon.stub().returns(stream) + await http({ + method: 'POST', + url: `/api/v0/files/ls?arg=${path}&stream=true` + }, { ipfs }) + + expect(ipfs.files.lsReadableStream.callCount).to.equal(1) + expect(ipfs.files.lsReadableStream.getCall(0).args).to.deep.equal([ + path, + defaultOptions() + ]) + }) + + it('should list a path with details', async () => { + const file = { + name: 'file-name', + type: 'file-type', + size: 'file-size', + hash: 'file-hash', + mode: 'file-mode', + mtime: 'file-mtime' + } + ipfs.files.ls = sinon.stub().resolves([file]) + + const response = await http({ + method: 'POST', + url: `/api/v0/files/ls?arg=${path}&long=true` + }, { ipfs }) + + expect(ipfs.files.ls.callCount).to.equal(1) + expect(ipfs.files.ls.getCall(0).args).to.deep.equal([ + path, + defaultOptions({ + long: true + }) + ]) + expect(response).to.have.nested.property('result.Entries.length', 1) + expect(response).to.have.nested.property('result.Entries[0].Name', file.name) + expect(response).to.have.nested.property('result.Entries[0].Type', file.type) + expect(response).to.have.nested.property('result.Entries[0].Size', file.size) + expect(response).to.have.nested.property('result.Entries[0].Hash', file.hash) + expect(response).to.have.nested.property('result.Entries[0].Mode', file.mode) + expect(response).to.have.nested.property('result.Entries[0].Mtime', file.mtime) + }) +}) diff --git a/test/http/mkdir.js b/test/http/mkdir.js new file mode 100644 index 0000000..a369257 --- /dev/null +++ b/test/http/mkdir.js @@ -0,0 +1,172 @@ +/* eslint-env mocha */ +'use strict' + +const expect = require('../helpers/chai') +const http = require('../helpers/http') +const sinon = require('sinon') + +function defaultOptions (modification = {}) { + const options = { + parents: false, + cidVersion: 0, + format: 'dag-pb', + hashAlg: 'sha2-256', + flush: true, + shardSplitThreshold: 1000, + mode: undefined, + mtime: undefined + } + + Object.keys(modification).forEach(key => { + options[key] = modification[key] + }) + + return options +} + +describe('mkdir', () => { + const path = '/foo' + let ipfs + + beforeEach(() => { + ipfs = { + files: { + mkdir: sinon.stub() + } + } + }) + + it('should make a directory', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/mkdir?arg=${path}` + }, { ipfs }) + + expect(ipfs.files.mkdir.callCount).to.equal(1) + expect(ipfs.files.mkdir.getCall(0).args).to.deep.equal([ + path, + defaultOptions() + ]) + }) + + it('should make a directory with parents', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/mkdir?arg=${path}&parents=true` + }, { ipfs }) + + expect(ipfs.files.mkdir.callCount).to.equal(1) + expect(ipfs.files.mkdir.getCall(0).args).to.deep.equal([ + path, + defaultOptions({ + parents: true + }) + ]) + }) + + it('should make a directory with a different cid version', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/mkdir?arg=${path}&cidVersion=1` + }, { ipfs }) + + expect(ipfs.files.mkdir.callCount).to.equal(1) + expect(ipfs.files.mkdir.getCall(0).args).to.deep.equal([ + path, + defaultOptions({ + cidVersion: 1 + }) + ]) + }) + + it('should make a directory with a different codec', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/mkdir?arg=${path}&format=dag-cbor` + }, { ipfs }) + + expect(ipfs.files.mkdir.callCount).to.equal(1) + expect(ipfs.files.mkdir.getCall(0).args).to.deep.equal([ + path, + defaultOptions({ + format: 'dag-cbor' + }) + ]) + }) + + it('should make a directory with a different hash algorithm', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/mkdir?arg=${path}&hashAlg=sha3-256` + }, { ipfs }) + + expect(ipfs.files.mkdir.callCount).to.equal(1) + expect(ipfs.files.mkdir.getCall(0).args).to.deep.equal([ + path, + defaultOptions({ + hashAlg: 'sha3-256' + }) + ]) + }) + + it('should make a directory without flushing', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/mkdir?arg=${path}&flush=false` + }, { ipfs }) + + expect(ipfs.files.mkdir.callCount).to.equal(1) + expect(ipfs.files.mkdir.getCall(0).args).to.deep.equal([ + path, + defaultOptions({ + flush: false + }) + ]) + }) + + it('should make a directory a different shard split threshold', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/mkdir?arg=${path}&shardSplitThreshold=10` + }, { ipfs }) + + expect(ipfs.files.mkdir.callCount).to.equal(1) + expect(ipfs.files.mkdir.getCall(0).args).to.deep.equal([ + path, + defaultOptions({ + shardSplitThreshold: 10 + }) + ]) + }) + + it('should make a directory a different mode', async () => { + const mode = '0513' + await http({ + method: 'POST', + url: `/api/v0/files/mkdir?arg=${path}&mode=${mode}` + }, { ipfs }) + + expect(ipfs.files.mkdir.callCount).to.equal(1) + expect(ipfs.files.mkdir.getCall(0).args).to.deep.equal([ + path, + defaultOptions({ + mode: parseInt(mode, 8) + }) + ]) + }) + + it('should make a directory a different mtime', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/mkdir?arg=${path}&mtime=5` + }, { ipfs }) + + expect(ipfs.files.mkdir.callCount).to.equal(1) + expect(ipfs.files.mkdir.getCall(0).args).to.deep.equal([ + path, + defaultOptions({ + mtime: 5 + }) + ]) + }) +}) diff --git a/test/http/mv.js b/test/http/mv.js new file mode 100644 index 0000000..ae1e8b3 --- /dev/null +++ b/test/http/mv.js @@ -0,0 +1,164 @@ +/* eslint-env mocha */ +'use strict' + +const expect = require('../helpers/chai') +const http = require('../helpers/http') +const sinon = require('sinon') + +function defaultOptions (modification = {}) { + const options = { + parents: false, + recursive: false, + cidVersion: 0, + format: 'dag-pb', + hashAlg: 'sha2-256', + flush: true, + shardSplitThreshold: 1000 + } + + Object.keys(modification).forEach(key => { + options[key] = modification[key] + }) + + return options +} + +describe('mv', () => { + const source = '/src' + const dest = '/dest' + let ipfs + + beforeEach(() => { + ipfs = { + files: { + mv: sinon.stub() + } + } + }) + + it('should move an entry', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/mv?arg=${source}&arg=${dest}` + }, { ipfs }) + + expect(ipfs.files.mv.callCount).to.equal(1) + expect(ipfs.files.mv.getCall(0).args).to.deep.equal([ + source, + dest, + defaultOptions() + ]) + }) + + it('should move an entry and create parents', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/mv?arg=${source}&arg=${dest}&parents=true` + }, { ipfs }) + + expect(ipfs.files.mv.callCount).to.equal(1) + expect(ipfs.files.mv.getCall(0).args).to.deep.equal([ + source, + dest, + defaultOptions({ + parents: true + }) + ]) + }) + + it('should move an entry recursively', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/mv?arg=${source}&arg=${dest}&recursive=true` + }, { ipfs }) + + expect(ipfs.files.mv.callCount).to.equal(1) + expect(ipfs.files.mv.getCall(0).args).to.deep.equal([ + source, + dest, + defaultOptions({ + recursive: true + }) + ]) + }) + + it('should make a directory with a different cid version', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/mv?arg=${source}&arg=${dest}&cidVersion=1` + }, { ipfs }) + + expect(ipfs.files.mv.callCount).to.equal(1) + expect(ipfs.files.mv.getCall(0).args).to.deep.equal([ + source, + dest, + defaultOptions({ + cidVersion: 1 + }) + ]) + }) + + it('should make a directory with a different codec', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/mv?arg=${source}&arg=${dest}&format=dag-cbor` + }, { ipfs }) + + expect(ipfs.files.mv.callCount).to.equal(1) + expect(ipfs.files.mv.getCall(0).args).to.deep.equal([ + source, + dest, + defaultOptions({ + format: 'dag-cbor' + }) + ]) + }) + + it('should make a directory with a different hash algorithm', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/mv?arg=${source}&arg=${dest}&hashAlg=sha3-256` + }, { ipfs }) + + expect(ipfs.files.mv.callCount).to.equal(1) + expect(ipfs.files.mv.getCall(0).args).to.deep.equal([ + source, + dest, + defaultOptions({ + hashAlg: 'sha3-256' + }) + ]) + }) + + it('should make a directory without flushing', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/mv?arg=${source}&arg=${dest}&flush=false` + }, { ipfs }) + + expect(ipfs.files.mv.callCount).to.equal(1) + expect(ipfs.files.mv.getCall(0).args).to.deep.equal([ + source, + dest, + defaultOptions({ + flush: false + }) + ]) + }) + + it('should make a directory a different shard split threshold', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/mv?arg=${source}&arg=${dest}&shardSplitThreshold=10` + }, { ipfs }) + + expect(ipfs.files.mv.callCount).to.equal(1) + expect(ipfs.files.mv.getCall(0).args).to.deep.equal([ + source, + dest, + defaultOptions({ + shardSplitThreshold: 10 + }) + ]) + }) +}) diff --git a/test/http/read.js b/test/http/read.js new file mode 100644 index 0000000..ad747da --- /dev/null +++ b/test/http/read.js @@ -0,0 +1,107 @@ +/* eslint-env mocha */ +'use strict' + +const expect = require('../helpers/chai') +const http = require('../helpers/http') +const sinon = require('sinon') +const PassThrough = require('stream').PassThrough + +function defaultOptions (modification = {}) { + const options = { + offset: undefined, + length: undefined + } + + Object.keys(modification).forEach(key => { + options[key] = modification[key] + }) + + return options +} + +describe('read', () => { + const path = '/foo' + let ipfs + + beforeEach(() => { + ipfs = { + files: { + readReadableStream: sinon.stub().callsFake(() => { + const stream = new PassThrough() + + setImmediate(() => { + stream.emit('data', Buffer.from('hello world')) + stream.end() + }) + + return stream + }) + } + } + }) + + it('should read a path', async () => { + const response = await http({ + method: 'POST', + url: `/api/v0/files/read?arg=${path}` + }, { ipfs }) + + expect(ipfs.files.readReadableStream.callCount).to.equal(1) + expect(ipfs.files.readReadableStream.getCall(0).args).to.deep.equal([ + path, + defaultOptions() + ]) + expect(response).to.have.property('result', 'hello world') + }) + + it('should read a path with an offset', async () => { + const offset = 5 + const response = await http({ + method: 'POST', + url: `/api/v0/files/read?arg=${path}&offset=${offset}` + }, { ipfs }) + + expect(ipfs.files.readReadableStream.callCount).to.equal(1) + expect(ipfs.files.readReadableStream.getCall(0).args).to.deep.equal([ + path, + defaultOptions({ + offset + }) + ]) + expect(response).to.have.property('result', 'hello world') + }) + + it('should read a path with a length', async () => { + const length = 5 + const response = await http({ + method: 'POST', + url: `/api/v0/files/read?arg=${path}&length=${length}` + }, { ipfs }) + + expect(ipfs.files.readReadableStream.callCount).to.equal(1) + expect(ipfs.files.readReadableStream.getCall(0).args).to.deep.equal([ + path, + defaultOptions({ + length + }) + ]) + expect(response).to.have.property('result', 'hello world') + }) + + it('should read a path with count treated as length', async () => { + const length = 5 + const response = await http({ + method: 'POST', + url: `/api/v0/files/read?arg=${path}&count=${length}` + }, { ipfs }) + + expect(ipfs.files.readReadableStream.callCount).to.equal(1) + expect(ipfs.files.readReadableStream.getCall(0).args).to.deep.equal([ + path, + defaultOptions({ + length + }) + ]) + expect(response).to.have.property('result', 'hello world') + }) +}) diff --git a/test/http/rm.js b/test/http/rm.js new file mode 100644 index 0000000..6bc877e --- /dev/null +++ b/test/http/rm.js @@ -0,0 +1,59 @@ +/* eslint-env mocha */ +'use strict' + +const expect = require('../helpers/chai') +const http = require('../helpers/http') +const sinon = require('sinon') + +function defaultOptions (modification = {}) { + const options = { + recursive: false + } + + Object.keys(modification).forEach(key => { + options[key] = modification[key] + }) + + return options +} + +describe('rm', () => { + const path = '/foo' + let ipfs + + beforeEach(() => { + ipfs = { + files: { + rm: sinon.stub().resolves() + } + } + }) + + it('should remove a path', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/rm?arg=${path}` + }, { ipfs }) + + expect(ipfs.files.rm.callCount).to.equal(1) + expect(ipfs.files.rm.getCall(0).args).to.deep.equal([ + path, + defaultOptions() + ]) + }) + + it('should remove a path recursively', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/rm?arg=${path}&recursive=true` + }, { ipfs }) + + expect(ipfs.files.rm.callCount).to.equal(1) + expect(ipfs.files.rm.getCall(0).args).to.deep.equal([ + path, + defaultOptions({ + recursive: true + }) + ]) + }) +}) diff --git a/test/http/stat.js b/test/http/stat.js new file mode 100644 index 0000000..c8536a0 --- /dev/null +++ b/test/http/stat.js @@ -0,0 +1,120 @@ +/* eslint-env mocha */ +'use strict' + +const expect = require('../helpers/chai') +const http = require('../helpers/http') +const sinon = require('sinon') + +function defaultOptions (modification = {}) { + const options = { + withLocal: false, + hash: false, + size: false, + cidBase: 'base58btc' + } + + Object.keys(modification).forEach(key => { + options[key] = modification[key] + }) + + return options +} + +describe('stat', () => { + const path = '/foo' + const stats = { + hash: 'stats-hash', + size: 'stats-size', + cumulativeSize: 'stats-cumulativeSize', + blocks: 'stats-blocks', + type: 'stats-type', + mode: 'stats-mode', + mtime: 'stats-mtime' + } + let ipfs + + beforeEach(() => { + ipfs = { + files: { + stat: sinon.stub().resolves(stats) + } + } + }) + + it('should stat a path', async () => { + const response = await http({ + method: 'POST', + url: `/api/v0/files/stat?arg=${path}` + }, { ipfs }) + + expect(ipfs.files.stat.callCount).to.equal(1) + expect(ipfs.files.stat.getCall(0).args).to.deep.equal([ + path, + defaultOptions() + ]) + expect(response).to.have.nested.property('result.CumulativeSize', stats.cumulativeSize) + }) + + it('should stat a path with local', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/stat?arg=${path}&withLocal=true` + }, { ipfs }) + + expect(ipfs.files.stat.callCount).to.equal(1) + expect(ipfs.files.stat.getCall(0).args).to.deep.equal([ + path, + defaultOptions({ + withLocal: true + }) + ]) + }) + + it('should stat a path and only show hashes', async () => { + const response = await http({ + method: 'POST', + url: `/api/v0/files/stat?arg=${path}&hash=true` + }, { ipfs }) + + expect(ipfs.files.stat.callCount).to.equal(1) + expect(ipfs.files.stat.getCall(0).args).to.deep.equal([ + path, + defaultOptions({ + hash: true + }) + ]) + expect(response).to.have.nested.property('result.Hash', stats.hash) + }) + + it('should stat a path and only show sizes', async () => { + const response = await http({ + method: 'POST', + url: `/api/v0/files/stat?arg=${path}&size=true` + }, { ipfs }) + + expect(ipfs.files.stat.callCount).to.equal(1) + expect(ipfs.files.stat.getCall(0).args).to.deep.equal([ + path, + defaultOptions({ + size: true + }) + ]) + expect(response).to.have.nested.property('result.Size', stats.size) + }) + + it('should stat a path and show hashes with a different base', async () => { + const response = await http({ + method: 'POST', + url: `/api/v0/files/stat?arg=${path}&cidBase=base64` + }, { ipfs }) + + expect(ipfs.files.stat.callCount).to.equal(1) + expect(ipfs.files.stat.getCall(0).args).to.deep.equal([ + path, + defaultOptions({ + cidBase: 'base64' + }) + ]) + expect(response).to.have.nested.property('result.Hash', stats.hash) + }) +}) diff --git a/test/http/touch.js b/test/http/touch.js new file mode 100644 index 0000000..ab1699a --- /dev/null +++ b/test/http/touch.js @@ -0,0 +1,114 @@ +/* eslint-env mocha */ +'use strict' + +const expect = require('../helpers/chai') +const http = require('../helpers/http') +const sinon = require('sinon') + +function defaultOptions (modification = {}) { + const options = { + cidVersion: 0, + format: 'dag-pb', + hashAlg: 'sha2-256', + flush: true, + shardSplitThreshold: 1000 + } + + Object.keys(modification).forEach(key => { + options[key] = modification[key] + }) + + return options +} + +describe('touch', () => { + const path = '/foo' + const mtime = parseInt(Date.now() / 1000) + let ipfs + + beforeEach(() => { + ipfs = { + files: { + touch: sinon.stub() + } + } + }) + + it('should update the mtime for a file', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/touch?arg=${path}&mtime=${mtime}` + }, { ipfs }) + + expect(ipfs.files.touch.callCount).to.equal(1) + expect(ipfs.files.touch.getCall(0).args).to.deep.equal([ + path, + mtime, + defaultOptions() + ]) + }) + + it('should update the mtime without flushing', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/touch?arg=${path}&mtime=${mtime}&flush=false` + }, { ipfs }) + + expect(ipfs.files.touch.callCount).to.equal(1) + expect(ipfs.files.touch.getCall(0).args).to.deep.equal([ + path, + mtime, + defaultOptions({ + flush: false + }) + ]) + }) + + it('should update the mtime with a different codec', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/touch?arg=${path}&mtime=${mtime}&format=dag-pb` + }, { ipfs }) + + expect(ipfs.files.touch.callCount).to.equal(1) + expect(ipfs.files.touch.getCall(0).args).to.deep.equal([ + path, + mtime, + defaultOptions({ + format: 'dag-pb' + }) + ]) + }) + + it('should update the mtime with a different hash algorithm', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/touch?arg=${path}&mtime=${mtime}&hashAlg=sha3-256` + }, { ipfs }) + + expect(ipfs.files.touch.callCount).to.equal(1) + expect(ipfs.files.touch.getCall(0).args).to.deep.equal([ + path, + mtime, + defaultOptions({ + hashAlg: 'sha3-256' + }) + ]) + }) + + it('should update the mtime with a shard split threshold', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/touch?arg=${path}&mtime=${mtime}&shardSplitThreshold=10` + }, { ipfs }) + + expect(ipfs.files.touch.callCount).to.equal(1) + expect(ipfs.files.touch.getCall(0).args).to.deep.equal([ + path, + mtime, + defaultOptions({ + shardSplitThreshold: 10 + }) + ]) + }) +}) diff --git a/test/http/write.js b/test/http/write.js new file mode 100644 index 0000000..f096c2e --- /dev/null +++ b/test/http/write.js @@ -0,0 +1,315 @@ +/* eslint-env mocha */ +'use strict' + +const expect = require('../helpers/chai') +const http = require('../helpers/http') +const sinon = require('sinon') +const FormData = require('form-data') +const streamToPromise = require('stream-to-promise') + +function defaultOptions (modification = {}) { + const options = { + offset: undefined, + length: undefined, + create: false, + truncate: false, + rawLeaves: false, + reduceSingleLeafToSelf: false, + cidVersion: 0, + hashAlg: 'sha2-256', + format: 'dag-pb', + parents: false, + progress: undefined, + strategy: 'trickle', + flush: true, + shardSplitThreshold: 1000, + mode: undefined, + mtime: undefined + } + + Object.keys(modification).forEach(key => { + options[key] = modification[key] + }) + + return options +} + +async function send (text, headers = {}) { + const form = new FormData() + form.append('file-0', Buffer.from(text), { + header: headers + }) + + return { + headers: form.getHeaders(), + payload: await streamToPromise(form) + } +} + +describe('write', () => { + const path = '/foo' + let ipfs + let content + + beforeEach(() => { + content = Buffer.alloc(0) + + ipfs = { + files: { + write: sinon.stub().callsFake(async (path, input) => { + for await (const buf of input) { + content = Buffer.concat([content, buf]) + } + + content = content.toString('utf8') + }) + } + } + }) + + it('should write to a file', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/write?arg=${path}`, + ...await send('hello world') + }, { ipfs }) + + expect(ipfs.files.write.callCount).to.equal(1) + expect(ipfs.files.write.getCall(0)).to.have.nested.property('args[0]', path) + expect(ipfs.files.write.getCall(0)).to.have.nested.deep.property('args[2]', defaultOptions()) + expect(content).to.equal('hello world') + }) + + it('should write to a file and create parents', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/write?arg=${path}&parents=true`, + ...await send('hello world') + }, { ipfs }) + + expect(ipfs.files.write.callCount).to.equal(1) + expect(ipfs.files.write.getCall(0)).to.have.nested.property('args[0]', path) + expect(ipfs.files.write.getCall(0)).to.have.nested.deep.property('args[2]', defaultOptions({ + parents: true + })) + expect(content).to.equal('hello world') + }) + + it('should write to a file and create it', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/write?arg=${path}&create=true`, + ...await send('hello world') + }, { ipfs }) + + expect(ipfs.files.write.callCount).to.equal(1) + expect(ipfs.files.write.getCall(0)).to.have.nested.property('args[0]', path) + expect(ipfs.files.write.getCall(0)).to.have.nested.deep.property('args[2]', defaultOptions({ + create: true + })) + expect(content).to.equal('hello world') + }) + + it('should write to a file with an offset', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/write?arg=${path}&offset=10`, + ...await send('hello world') + }, { ipfs }) + + expect(ipfs.files.write.callCount).to.equal(1) + expect(ipfs.files.write.getCall(0)).to.have.nested.property('args[0]', path) + expect(ipfs.files.write.getCall(0)).to.have.nested.deep.property('args[2]', defaultOptions({ + offset: 10 + })) + expect(content).to.equal('hello world') + }) + + it('should write to a file with a length', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/write?arg=${path}&length=10`, + ...await send('hello world') + }, { ipfs }) + + expect(ipfs.files.write.callCount).to.equal(1) + expect(ipfs.files.write.getCall(0)).to.have.nested.property('args[0]', path) + expect(ipfs.files.write.getCall(0)).to.have.nested.deep.property('args[2]', defaultOptions({ + length: 10 + })) + expect(content).to.equal('hello world') + }) + + it('should write to a file and truncate it', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/write?arg=${path}&truncate=true`, + ...await send('hello world') + }, { ipfs }) + + expect(ipfs.files.write.callCount).to.equal(1) + expect(ipfs.files.write.getCall(0)).to.have.nested.property('args[0]', path) + expect(ipfs.files.write.getCall(0)).to.have.nested.deep.property('args[2]', defaultOptions({ + truncate: true + })) + expect(content).to.equal('hello world') + }) + + it('should write to a file with raw leaves', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/write?arg=${path}&rawLeaves=true`, + ...await send('hello world') + }, { ipfs }) + + expect(ipfs.files.write.callCount).to.equal(1) + expect(ipfs.files.write.getCall(0)).to.have.nested.property('args[0]', path) + expect(ipfs.files.write.getCall(0)).to.have.nested.deep.property('args[2]', defaultOptions({ + rawLeaves: true + })) + expect(content).to.equal('hello world') + }) + + it('should write to a file and reduce a single leaf to one node', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/write?arg=${path}&reduceSingleLeafToSelf=true`, + ...await send('hello world') + }, { ipfs }) + + expect(ipfs.files.write.callCount).to.equal(1) + expect(ipfs.files.write.getCall(0)).to.have.nested.property('args[0]', path) + expect(ipfs.files.write.getCall(0)).to.have.nested.deep.property('args[2]', defaultOptions({ + reduceSingleLeafToSelf: true + })) + expect(content).to.equal('hello world') + }) + + it('should write to a file without flushing', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/write?arg=${path}&flush=false`, + ...await send('hello world') + }, { ipfs }) + + expect(ipfs.files.write.callCount).to.equal(1) + expect(ipfs.files.write.getCall(0)).to.have.nested.property('args[0]', path) + expect(ipfs.files.write.getCall(0)).to.have.nested.deep.property('args[2]', defaultOptions({ + flush: false + })) + expect(content).to.equal('hello world') + }) + + it('should write to a file with a different strategy', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/write?arg=${path}&strategy=flat`, + ...await send('hello world') + }, { ipfs }) + + expect(ipfs.files.write.callCount).to.equal(1) + expect(ipfs.files.write.getCall(0)).to.have.nested.property('args[0]', path) + expect(ipfs.files.write.getCall(0)).to.have.nested.deep.property('args[2]', defaultOptions({ + strategy: 'flat' + })) + expect(content).to.equal('hello world') + }) + + it('should write to a file with a different cid version', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/write?arg=${path}&cidVersion=1`, + ...await send('hello world') + }, { ipfs }) + + expect(ipfs.files.write.callCount).to.equal(1) + expect(ipfs.files.write.getCall(0)).to.have.nested.property('args[0]', path) + expect(ipfs.files.write.getCall(0)).to.have.nested.deep.property('args[2]', defaultOptions({ + cidVersion: 1 + })) + expect(content).to.equal('hello world') + }) + + it('should update the mode a different codec', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/write?arg=${path}&format=dag-cbor`, + ...await send('hello world') + }, { ipfs }) + + expect(ipfs.files.write.callCount).to.equal(1) + expect(ipfs.files.write.getCall(0)).to.have.nested.property('args[0]', path) + expect(ipfs.files.write.getCall(0)).to.have.nested.deep.property('args[2]', defaultOptions({ + format: 'dag-cbor' + })) + expect(content).to.equal('hello world') + }) + + it('should update the mode a different hash algorithm', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/write?arg=${path}&hashAlg=sha3-256`, + ...await send('hello world') + }, { ipfs }) + + expect(ipfs.files.write.callCount).to.equal(1) + expect(ipfs.files.write.getCall(0)).to.have.nested.property('args[0]', path) + expect(ipfs.files.write.getCall(0)).to.have.nested.deep.property('args[2]', defaultOptions({ + hashAlg: 'sha3-256' + })) + expect(content).to.equal('hello world') + }) + + it('should update the mode with a shard split threshold', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/write?arg=${path}&shardSplitThreshold=10`, + ...await send('hello world') + }, { ipfs }) + + expect(ipfs.files.write.callCount).to.equal(1) + expect(ipfs.files.write.getCall(0)).to.have.nested.property('args[0]', path) + expect(ipfs.files.write.getCall(0)).to.have.nested.deep.property('args[2]', defaultOptions({ + shardSplitThreshold: 10 + })) + expect(content).to.equal('hello world') + }) + + it('should update the mode a different mode', async () => { + const mode = '0577' + + await http({ + method: 'POST', + url: `/api/v0/files/write?arg=${path}`, + ...await send('hello world', { + mode + }) + }, { ipfs }) + + expect(ipfs.files.write.callCount).to.equal(1) + expect(ipfs.files.write.getCall(0)).to.have.nested.property('args[0]', path) + expect(ipfs.files.write.getCall(0)).to.have.nested.deep.property('args[2]', defaultOptions({ + mode: parseInt(mode, 8) + })) + expect(content).to.equal('hello world') + }) + + it('should update the mode a different mtime', async () => { + const mtime = 11 + + await http({ + method: 'POST', + url: `/api/v0/files/write?arg=${path}`, + ...await send('hello world', { + mtime + }) + }, { ipfs }) + + expect(ipfs.files.write.callCount).to.equal(1) + expect(ipfs.files.write.getCall(0)).to.have.nested.property('args[0]', path) + expect(ipfs.files.write.getCall(0)).to.have.nested.deep.property('args[2]', defaultOptions({ + mtime + })) + expect(content).to.equal('hello world') + }) +}) diff --git a/test/node.js b/test/node.js new file mode 100644 index 0000000..3d35201 --- /dev/null +++ b/test/node.js @@ -0,0 +1,5 @@ +'use strict' + +require('./cli') +require('./core') +require('./http') diff --git a/test/webworker.js b/test/webworker.js new file mode 100644 index 0000000..ed5d991 --- /dev/null +++ b/test/webworker.js @@ -0,0 +1,3 @@ +'use strict' + +require('./core') From 18dc7668ce34208542174d8980cbbf9c72c39618 Mon Sep 17 00:00:00 2001 From: achingbrain Date: Thu, 5 Dec 2019 11:13:17 +0000 Subject: [PATCH 10/15] fix: use multipart pr --- package.json | 2 +- test/cli/write.js | 22 +++++++++++----------- test/http/write.js | 14 +++++++------- 3 files changed, 19 insertions(+), 19 deletions(-) diff --git a/package.json b/package.json index 096ad4d..8839920 100644 --- a/package.json +++ b/package.json @@ -72,7 +72,7 @@ "err-code": "^2.0.0", "hamt-sharding": "~0.0.2", "interface-datastore": "^0.8.0", - "ipfs-multipart": "~0.2.0", + "ipfs-multipart": "ipfs/js-ipfs-multipart#support-unixfs-metadata", "ipfs-unixfs": "^0.2.0", "ipfs-unixfs-exporter": "~0.39.0", "ipfs-unixfs-importer": "^0.42.0", diff --git a/test/cli/write.js b/test/cli/write.js index 0acc073..8d3b08d 100644 --- a/test/cli/write.js +++ b/test/cli/write.js @@ -288,7 +288,7 @@ describe('write', () => { ]) }) - it('should write to a file with a different strategy', async () => { + it('should write to a file with a specified strategy', async () => { const path = '/foo' await cli(`files write --strategy trickle ${path}`, { ipfs, getStdin }) @@ -303,7 +303,7 @@ describe('write', () => { ]) }) - it('should write to a file with a different strategy (short option)', async () => { + it('should write to a file with a specified strategy (short option)', async () => { const path = '/foo' await cli(`files write -s trickle ${path}`, { ipfs, getStdin }) @@ -318,7 +318,7 @@ describe('write', () => { ]) }) - it('should write to a file with a different cid version', async () => { + it('should write to a file with a specified cid version', async () => { const path = '/foo' await cli(`files write --cid-version 5 ${path}`, { ipfs, getStdin }) @@ -333,7 +333,7 @@ describe('write', () => { ]) }) - it('should write to a file with a different cid version (shortish option)', async () => { + it('should write to a file with a specified cid version (shortish option)', async () => { const path = '/foo' await cli(`files write --cid-ver 5 ${path}`, { ipfs, getStdin }) @@ -348,7 +348,7 @@ describe('write', () => { ]) }) - it('should update the mode a different codec', async () => { + it('should write to a file with a specified codec', async () => { const path = '/foo' await cli(`files write --codec dag-foo ${path}`, { ipfs, getStdin }) @@ -363,7 +363,7 @@ describe('write', () => { ]) }) - it('should update the mode a different codec (short option)', async () => { + it('should write to a file with a specified codec (short option)', async () => { const path = '/foo' await cli(`files write -c dag-foo ${path}`, { ipfs, getStdin }) @@ -378,7 +378,7 @@ describe('write', () => { ]) }) - it('should update the mode a different hash algorithm', async () => { + it('should write to a file with a specified hash algorithm', async () => { const path = '/foo' await cli(`files write --hash-alg sha3-256 ${path}`, { ipfs, getStdin }) @@ -393,7 +393,7 @@ describe('write', () => { ]) }) - it('should update the mode a different hash algorithm (short option)', async () => { + it('should write to a file with a specified hash algorithm (short option)', async () => { const path = '/foo' await cli(`files write -h sha3-256 ${path}`, { ipfs, getStdin }) @@ -408,7 +408,7 @@ describe('write', () => { ]) }) - it('should update the mode with a shard split threshold', async () => { + it('should write to a file with a specified shard split threshold', async () => { const path = '/foo' await cli(`files write --shard-split-threshold 10 ${path}`, { ipfs, getStdin }) @@ -423,7 +423,7 @@ describe('write', () => { ]) }) - it('should update the mode a different mode', async () => { + it('should write to a file with a specified mode', async () => { const path = '/foo' await cli(`files write --mode 0557 ${path}`, { ipfs, getStdin }) @@ -438,7 +438,7 @@ describe('write', () => { ]) }) - it('should update the mode a different mtime', async () => { + it('should write to a file with a specified mtime', async () => { const path = '/foo' await cli(`files write --mtime 11 ${path}`, { ipfs, getStdin }) diff --git a/test/http/write.js b/test/http/write.js index f096c2e..9c6fd06 100644 --- a/test/http/write.js +++ b/test/http/write.js @@ -200,7 +200,7 @@ describe('write', () => { expect(content).to.equal('hello world') }) - it('should write to a file with a different strategy', async () => { + it('should write to a file with a specified strategy', async () => { await http({ method: 'POST', url: `/api/v0/files/write?arg=${path}&strategy=flat`, @@ -215,7 +215,7 @@ describe('write', () => { expect(content).to.equal('hello world') }) - it('should write to a file with a different cid version', async () => { + it('should write to a file with a specified cid version', async () => { await http({ method: 'POST', url: `/api/v0/files/write?arg=${path}&cidVersion=1`, @@ -230,7 +230,7 @@ describe('write', () => { expect(content).to.equal('hello world') }) - it('should update the mode a different codec', async () => { + it('should write to a file with a specified codec', async () => { await http({ method: 'POST', url: `/api/v0/files/write?arg=${path}&format=dag-cbor`, @@ -245,7 +245,7 @@ describe('write', () => { expect(content).to.equal('hello world') }) - it('should update the mode a different hash algorithm', async () => { + it('should write to a file with a specified hash algorithm', async () => { await http({ method: 'POST', url: `/api/v0/files/write?arg=${path}&hashAlg=sha3-256`, @@ -260,7 +260,7 @@ describe('write', () => { expect(content).to.equal('hello world') }) - it('should update the mode with a shard split threshold', async () => { + it('should write to a file with a specified shard split threshold', async () => { await http({ method: 'POST', url: `/api/v0/files/write?arg=${path}&shardSplitThreshold=10`, @@ -275,7 +275,7 @@ describe('write', () => { expect(content).to.equal('hello world') }) - it('should update the mode a different mode', async () => { + it('shouldwrite to a file with a specified mode', async () => { const mode = '0577' await http({ @@ -294,7 +294,7 @@ describe('write', () => { expect(content).to.equal('hello world') }) - it('should update the mode a different mtime', async () => { + it('should write to a file with a specified mtime', async () => { const mtime = 11 await http({ From 8889f25b5c14cbe49a6cc09fb76a0e500393e5ab Mon Sep 17 00:00:00 2001 From: achingbrain Date: Thu, 5 Dec 2019 11:33:06 +0000 Subject: [PATCH 11/15] chore: remove unecessary browser overrides --- package.json | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/package.json b/package.json index 8839920..d630d40 100644 --- a/package.json +++ b/package.json @@ -5,10 +5,8 @@ "leadMaintainer": "Alex Potsides ", "main": "src/index.js", "browser": { - "@hapi/hapi": false, "@hapi/joi": "joi-browser", - "fs": false, - "yargs": false + "fs": false }, "scripts": { "test": "aegir test", From cdcf704abfc108052acee65437bbe839de0b2513 Mon Sep 17 00:00:00 2001 From: achingbrain Date: Fri, 6 Dec 2019 15:25:22 +0000 Subject: [PATCH 12/15] chore: update ipfs-utils dep --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index d630d40..5a0724a 100644 --- a/package.json +++ b/package.json @@ -74,7 +74,7 @@ "ipfs-unixfs": "^0.2.0", "ipfs-unixfs-exporter": "~0.39.0", "ipfs-unixfs-importer": "^0.42.0", - "ipfs-utils": "ipfs/js-ipfs-utils#support-unixfs-metadata", + "ipfs-utils": "^0.5.0", "ipld-dag-pb": "~0.18.0", "it-last": "^1.0.1", "joi-browser": "^13.4.0", From 0ee7296587d9320ecc763cd872a25ecdd477f110 Mon Sep 17 00:00:00 2001 From: achingbrain Date: Mon, 23 Dec 2019 16:49:16 +0000 Subject: [PATCH 13/15] fix: fix up tests, add support for timespecs --- package.json | 8 +- src/cli/index.js | 10 +-- src/cli/mkdir.js | 9 ++- src/cli/touch.js | 11 ++- src/cli/utils.js | 7 +- src/cli/write.js | 6 +- src/core/chmod.js | 129 ++++++++++++++++++++++++++++++++ src/core/ls.js | 11 ++- src/core/stat.js | 52 +++++++------ src/core/touch.js | 20 ++--- src/core/utils/add-link.js | 19 ++--- src/core/utils/create-node.js | 14 ++-- src/core/utils/hamt-utils.js | 19 ++--- src/core/utils/with-mfs-root.js | 2 +- src/core/write.js | 20 ++++- src/http/chmod.js | 4 +- src/http/ls.js | 20 ++++- src/http/mkdir.js | 9 ++- src/http/stat.js | 4 +- src/http/touch.js | 10 ++- src/http/utils/joi.js | 22 ------ src/http/utils/parse-mtime.js | 20 +++++ test/cli/mkdir.js | 2 +- test/cli/touch.js | 38 +++++----- test/cli/write.js | 2 +- test/core/chmod.js | 107 +++++++++++++++++++++++--- test/core/touch.js | 12 +-- test/http/chmod.js | 12 +-- test/http/ls.js | 24 ++++-- test/http/mkdir.js | 6 +- test/http/touch.js | 55 +++++++++++--- test/http/write.js | 6 +- 32 files changed, 496 insertions(+), 194 deletions(-) delete mode 100644 src/http/utils/joi.js create mode 100644 src/http/utils/parse-mtime.js diff --git a/package.json b/package.json index 5a0724a..411846d 100644 --- a/package.json +++ b/package.json @@ -70,11 +70,11 @@ "err-code": "^2.0.0", "hamt-sharding": "~0.0.2", "interface-datastore": "^0.8.0", - "ipfs-multipart": "ipfs/js-ipfs-multipart#support-unixfs-metadata", - "ipfs-unixfs": "^0.2.0", + "ipfs-multipart": "^0.2.0", + "ipfs-unixfs": "ipfs/js-ipfs-unixfs#store-mtime-as-timespec", "ipfs-unixfs-exporter": "~0.39.0", - "ipfs-unixfs-importer": "^0.42.0", - "ipfs-utils": "^0.5.0", + "ipfs-unixfs-importer": "ipfs/js-ipfs-unixfs-importer#mtime-passed-as-timespec", + "ipfs-utils": "ipfs/js-ipfs-utils#format-mtime-as-timespec", "ipld-dag-pb": "~0.18.0", "it-last": "^1.0.1", "joi-browser": "^13.4.0", diff --git a/src/cli/index.js b/src/cli/index.js index 7186cf6..ae87d7a 100644 --- a/src/cli/index.js +++ b/src/cli/index.js @@ -1,9 +1,5 @@ 'use strict' -const { - print -} = require('./utils') - const command = { command: 'files ', @@ -14,15 +10,11 @@ const command = { }, handler (argv) { - print('Type `jsipfs files --help` for more instructions') + argv.print('Type `jsipfs files --help` for more instructions') } } module.exports = (yargs) => { return yargs - .config({ - print, - getStdin: () => process.stdin - }) .command(command) } diff --git a/src/cli/mkdir.js b/src/cli/mkdir.js index 00e3874..fcdfe09 100644 --- a/src/cli/mkdir.js +++ b/src/cli/mkdir.js @@ -2,7 +2,8 @@ const { asBoolean, - asOctal + asOctal, + asDateFromSeconds } = require('./utils') module.exports = { @@ -54,9 +55,9 @@ module.exports = { describe: 'Mode to apply to the new directory' }, mtime: { - type: 'number', - coerce: asOctal, - describe: 'Mtime to apply to the new directory' + type: 'date', + coerce: asDateFromSeconds, + describe: 'Mtime to apply to the new directory in seconds' } }, diff --git a/src/cli/touch.js b/src/cli/touch.js index 32e47d1..260a04d 100644 --- a/src/cli/touch.js +++ b/src/cli/touch.js @@ -1,7 +1,8 @@ 'use strict' const { - asBoolean + asBoolean, + asDateFromSeconds } = require('./utils') module.exports = { @@ -12,8 +13,9 @@ module.exports = { builder: { mtime: { alias: 'm', - type: 'number', - default: parseInt(Date.now() / 1000), + type: 'date', + coerce: asDateFromSeconds, + default: Date.now(), describe: 'Time to use as the new modification time' }, flush: { @@ -63,7 +65,8 @@ module.exports = { argv.resolve((async () => { const ipfs = await getIpfs() - return ipfs.files.touch(path, mtime, { + return ipfs.files.touch(path, { + mtime, flush, cidVersion, format: codec, diff --git a/src/cli/utils.js b/src/cli/utils.js index 9ebfcda..fc0dcf0 100644 --- a/src/cli/utils.js +++ b/src/cli/utils.js @@ -35,9 +35,14 @@ const asOctal = (value) => { return parseInt(value, 8) } +const asDateFromSeconds = (value) => { + return new Date(parseInt(value, 10) * 1000) +} + module.exports = { disablePrinting, print, asBoolean, - asOctal + asOctal, + asDateFromSeconds } diff --git a/src/cli/write.js b/src/cli/write.js index 0fe7898..754ef93 100644 --- a/src/cli/write.js +++ b/src/cli/write.js @@ -2,7 +2,8 @@ const { asBoolean, - asOctal + asOctal, + asDateFromSeconds } = require('./utils') module.exports = { @@ -94,7 +95,8 @@ module.exports = { }, mtime: { alias: 'm', - type: 'number', + type: 'date', + coerce: asDateFromSeconds, describe: 'Time to use as the new modification time' } }, diff --git a/src/core/chmod.js b/src/core/chmod.js index fcd41b7..2c77740 100644 --- a/src/core/chmod.js +++ b/src/core/chmod.js @@ -20,6 +20,124 @@ const defaultOptions = { hashAlg: 'sha2-256' } +function calculateModification (mode) { + let modification = 0 + + if (mode.includes('x')) { + modification += 1 + } + + if (mode.includes('w')) { + modification += 2 + } + + if (mode.includes('r')) { + modification += 4 + } + + return modification +} + +function calculateUGO (references, modification) { + let ugo = 0 + + if (references.includes('u')) { + ugo += (modification << 6) + } + + if (references.includes('g')) { + ugo += (modification << 3) + } + + if (references.includes('o')) { + ugo += (modification) + } + + return ugo +} + +function calculateSpecial (references, mode, modification) { + if (mode.includes('t')) { + modification += parseInt('1000', 8) + } + + if (mode.includes('s')) { + if (references.includes('u')) { + modification += parseInt('4000', 8) + } + + if (references.includes('g')) { + modification += parseInt('2000', 8) + } + } + + return modification +} + +// https://en.wikipedia.org/wiki/Chmod#Symbolic_modes +function parseSymbolicMode (input, originalMode) { + if (!originalMode) { + originalMode = 0 + } + + const match = input.match(/^(u?g?o?a?)(-?\+?=?)?(r?w?x?X?s?t?)$/) + + if (!match) { + throw new Error(`Invalid file mode: ${input}`) + } + + let [ + _, // eslint-disable-line no-unused-vars + references, + operator, + mode + ] = match + + if (references === 'a' || !references) { + references = 'ugo' + } + + let modification = calculateModification(mode) + modification = calculateUGO(references, modification) + modification = calculateSpecial(references, mode, modification) + + if (operator === '=') { + if (references.includes('u')) { + // blank u bits + originalMode = originalMode & parseInt('7077', 8) + + // or them together + originalMode = originalMode | modification + } + + if (references.includes('g')) { + // blank g bits + originalMode = originalMode & parseInt('7707', 8) + + // or them together + originalMode = originalMode | modification + } + + if (references.includes('o')) { + // blank o bits + originalMode = originalMode & parseInt('7770', 8) + + // or them together + originalMode = originalMode | modification + } + + return originalMode + } + + if (operator === '+') { + return modification | originalMode + } + + if (operator === '-') { + return modification ^ originalMode + } +} + module.exports = (context) => { return async function mfsChmod (path, mode, options) { options = applyDefaultOptions(options, defaultOptions) @@ -38,6 +156,17 @@ module.exports = (context) => { let node = await context.ipld.get(cid) const metadata = UnixFS.unmarshal(node.Data) + + if (typeof mode === 'string' || mode instanceof String) { + if (mode.match(/^\d+$/g)) { + mode = parseInt(mode, 8) + } else { + mode = mode.split(',').reduce((curr, acc) => { + return parseSymbolicMode(acc, curr) + }, metadata.mode) + } + } + metadata.mode = mode node = new DAGNode(metadata.marshal(), node.Links) diff --git a/src/core/ls.js b/src/core/ls.js index c458988..f8380ce 100644 --- a/src/core/ls.js +++ b/src/core/ls.js @@ -25,14 +25,19 @@ const toOutput = (fsEntry) => { mtime = fsEntry.unixfs.mtime } - return { + const output = { cid: fsEntry.cid, name: fsEntry.name, type, size, - mode, - mtime + mode } + + if (mtime !== undefined) { + output.mtime = mtime + } + + return output } module.exports = (context) => { diff --git a/src/core/stat.js b/src/core/stat.js index 6bee887..7330463 100644 --- a/src/core/stat.js +++ b/src/core/stat.js @@ -57,37 +57,43 @@ const statters = { } }, 'dag-pb': (file) => { - let blocks = file.node.Links.length - let size = file.node.size - let cumulativeSize = file.node.size - let nodeType = null + const blocks = file.node.Links.length + const size = file.node.size + const cumulativeSize = file.node.size - if (file.unixfs) { - size = file.unixfs.fileSize() - nodeType = file.unixfs.type - - if (nodeType.includes('directory')) { - size = 0 - cumulativeSize = file.node.size - } - - if (nodeType === 'file') { - blocks = file.unixfs.blockSizes.length - } - } - - return { + const output = { cid: file.cid, size: size, cumulativeSize: cumulativeSize, blocks: blocks, - type: nodeType, local: undefined, sizeLocal: undefined, - withLocality: false, - mtime: file.unixfs.mtime, - mode: file.unixfs.mode + withLocality: false } + + if (file.unixfs) { + output.size = file.unixfs.fileSize() + output.type = file.unixfs.type + + if (file.unixfs.isDirectory()) { + output.size = 0 + output.cumulativeSize = file.node.size + } + + if (output.type === 'file') { + output.blocks = file.unixfs.blockSizes.length + } + + if (file.unixfs.mtime) { + output.mtime = file.unixfs.mtime + } + + if (file.unixfs.mode !== undefined && file.unixfs.mode !== null) { + output.mode = file.unixfs.mode + } + } + + return output }, 'dag-cbor': (file) => { return { diff --git a/src/core/touch.js b/src/core/touch.js index a0c8e63..3019d28 100644 --- a/src/core/touch.js +++ b/src/core/touch.js @@ -14,6 +14,7 @@ const mc = require('multicodec') const mh = require('multihashes') const defaultOptions = { + mtime: undefined, flush: true, shardSplitThreshold: 1000, cidVersion: 1, @@ -22,15 +23,12 @@ const defaultOptions = { } module.exports = (context) => { - return async function mfsTouch (path, mtime, options) { - if (!options && isNaN(mtime)) { - options = mtime - mtime = parseInt(Date.now() / 1000) - } - + return async function mfsTouch (path, options) { + options = options || {} options = applyDefaultOptions(options, defaultOptions) + options.mtime = options.mtime || new Date() - log(`Touching ${path}`) + log(`Touching ${path} mtime: ${options.mtime}`) const { cid, @@ -45,8 +43,10 @@ module.exports = (context) => { let cidVersion = options.cidVersion if (!exists) { - const metadata = new UnixFS('file') - metadata.mtime = mtime + const metadata = new UnixFS({ + type: 'file', + mtime: options.mtime + }) node = new DAGNode(metadata.marshal()) updatedCid = await context.ipld.put(node, mc.DAG_PB, { cidVersion: options.cidVersion, @@ -63,7 +63,7 @@ module.exports = (context) => { node = await context.ipld.get(cid) const metadata = UnixFS.unmarshal(node.Data) - metadata.mtime = mtime + metadata.mtime = options.mtime node = new DAGNode(metadata.marshal(), node.Links) diff --git a/src/core/utils/add-link.js b/src/core/utils/add-link.js index 089d0d1..1e2193a 100644 --- a/src/core/utils/add-link.js +++ b/src/core/utils/add-link.js @@ -1,7 +1,8 @@ 'use strict' const { - DAGLink + DAGLink, + DAGNode } = require('ipld-dag-pb') const CID = require('cids') const log = require('debug')('ipfs:mfs:core:utils:add-link') @@ -93,14 +94,10 @@ const addToDirectory = async (context, options) => { options.parent.rmLink(options.name) options.parent.addLink(new DAGLink(options.name, options.size, options.cid)) + // Update mtime const node = UnixFS.unmarshal(options.parent.Data) - - // Update mtime if set previously - if (node.mtime) { - node.mtime = parseInt(Date.now() / 1000) - - options.parent.Data = UnixFS.unmarshal(node) - } + node.mtime = new Date() + options.parent = new DAGNode(node.marshal(), options.parent.Links) const format = mc[options.format.toUpperCase().replace(/-/g, '_')] const hashAlg = mh.names[options.hashAlg] @@ -163,11 +160,7 @@ const addFileToShardedDirectory = async (context, options) => { mode: node.mode }, options) shard._bucket = rootBucket - - // Update mtime if set previously - if (node.mtime) { - shard.mtime = parseInt(Date.now() / 1000) - } + shard.mtime = new Date() // load subshards until the bucket & position no longer changes const position = await rootBucket._findNewBucketAndPos(file.name) diff --git a/src/core/utils/create-node.js b/src/core/utils/create-node.js index 043784d..c0982ae 100644 --- a/src/core/utils/create-node.js +++ b/src/core/utils/create-node.js @@ -10,15 +10,11 @@ const mh = require('multihashes') const createNode = async (context, type, options) => { const format = mc[options.format.toUpperCase().replace(/-/g, '_')] const hashAlg = mh.names[options.hashAlg] - const metadata = new UnixFS(type) - - if (options.mode !== undefined) { - metadata.mode = options.mode - } - - if (options.mtime !== undefined) { - metadata.mtime = options.mtime - } + const metadata = new UnixFS({ + type, + mode: options.mode, + mtime: options.mtime + }) const node = new DAGNode(metadata.marshal()) const cid = await context.ipld.put(node, format, { diff --git a/src/core/utils/hamt-utils.js b/src/core/utils/hamt-utils.js index a267720..d630920 100644 --- a/src/core/utils/hamt-utils.js +++ b/src/core/utils/hamt-utils.js @@ -14,18 +14,15 @@ const last = require('it-last') const updateHamtDirectory = async (context, links, bucket, options) => { // update parent with new bit field const data = Buffer.from(bucket._children.bitField().reverse()) - const dir = new UnixFS('hamt-sharded-directory', data) - dir.fanout = bucket.tableSize() - dir.hashType = DirSharded.hashFn.code - const node = UnixFS.unmarshal(options.parent.Data) - - // Update mtime if set previously - if (node.mtime) { - node.mtime = parseInt(Date.now() / 1000) - - dir.Data = UnixFS.unmarshal(node) - } + const dir = new UnixFS({ + type: 'hamt-sharded-directory', + data, + fanout: bucket.tableSize(), + hashType: DirSharded.hashFn.code, + mode: node.mode, + mtime: node.mtime + }) const format = mc[options.format.toUpperCase().replace(/-/g, '_')] const hashAlg = mh.names[options.hashAlg] diff --git a/src/core/utils/with-mfs-root.js b/src/core/utils/with-mfs-root.js index 38da0af..5cf6740 100644 --- a/src/core/utils/with-mfs-root.js +++ b/src/core/utils/with-mfs-root.js @@ -30,7 +30,7 @@ const loadMfsRoot = async (context) => { } log('Creating new MFS root') - const node = new DAGNode(new UnixFs('directory').marshal()) + const node = new DAGNode(new UnixFs({ type: 'directory' }).marshal()) cid = await context.ipld.put(node, mc.DAG_PB, { cidVersion: 0, hashAlg: mh.names['sha2-256'] // why can't ipld look this up? diff --git a/src/core/write.js b/src/core/write.js index e68b3bf..2b13c4d 100644 --- a/src/core/write.js +++ b/src/core/write.js @@ -175,12 +175,28 @@ const write = async (context, source, destination, options) => { } }) + let mode + + if (options.mode !== undefined && options.mode !== null) { + mode = options.mode + } else if (destination && destination.unixfs) { + mode = destination.unixfs.mode + } + + let mtime + + if (options.mtime !== undefined && options.mtine !== null) { + mtime = options.mtime + } else if (destination && destination.unixfs) { + mtime = destination.unixfs.mtime + } + const result = await last(importer([{ content: content, // persist mode & mtime if set previously - mode: (destination.unixfs && destination.unixfs.mode) || options.mode, - mtime: (destination.unixfs && destination.unixfs.mtime) ? parseInt(new Date() / 1000) : options.mtime + mode, + mtime }], context.ipld, { progress: options.progress, hashAlg: options.hashAlg, diff --git a/src/http/chmod.js b/src/http/chmod.js index 3db9f35..8b6a967 100644 --- a/src/http/chmod.js +++ b/src/http/chmod.js @@ -1,6 +1,6 @@ 'use strict' -const Joi = require('./utils/joi') +const Joi = require('@hapi/joi') const mfsChmod = { method: 'POST', @@ -37,7 +37,7 @@ const mfsChmod = { }, query: Joi.object().keys({ arg: Joi.string(), - mode: Joi.octalNumber(), + mode: Joi.string(), recursive: Joi.boolean().default(false), flush: Joi.boolean().default(true), codec: Joi.string().default('dag-pb'), diff --git a/src/http/ls.js b/src/http/ls.js index ff67ff2..5b00562 100644 --- a/src/http/ls.js +++ b/src/http/ls.js @@ -6,14 +6,23 @@ const { } = require('stream') const mapEntry = (entry) => { - return { + const output = { Name: entry.name, Type: entry.type, Size: entry.size, Hash: entry.hash, - Mode: entry.mode, - Mtime: entry.mtime + Mode: entry.mode.toString(8).padStart(4, '0') } + + if (entry.mtime) { + output.Mtime = entry.mtime.secs + + if (entry.mtime.nsecs != null) { + output.MtimeNsecs = entry.mtime.nsecs + } + } + + return output } const mfsLs = { @@ -49,7 +58,10 @@ const mfsLs = { passThrough.end(entry ? JSON.stringify(mapEntry(entry)) + '\n' : undefined) }) - readableStream.once('error', reject) + readableStream.once('error', (err) => { + passThrough.end() + reject(err) + }) }) return h.response(responseStream).header('X-Stream-Output', '1') diff --git a/src/http/mkdir.js b/src/http/mkdir.js index 16f388b..b951f96 100644 --- a/src/http/mkdir.js +++ b/src/http/mkdir.js @@ -1,6 +1,7 @@ 'use strict' -const Joi = require('./utils/joi') +const Joi = require('@hapi/joi') +const parseMtime = require('./utils/parse-mtime') const mfsMkdir = { method: 'POST', @@ -13,6 +14,7 @@ const mfsMkdir = { arg, mode, mtime, + mtimeNsecs, parents, format, hashAlg, @@ -23,7 +25,7 @@ const mfsMkdir = { await ipfs.files.mkdir(arg, { mode, - mtime, + mtime: parseMtime(mtime, mtimeNsecs), parents, format, hashAlg, @@ -42,8 +44,9 @@ const mfsMkdir = { }, query: Joi.object().keys({ arg: Joi.string().required(), - mode: Joi.octalNumber(), + mode: Joi.string(), mtime: Joi.number().integer(), + mtimeNsecs: Joi.number().integer().min(0), parents: Joi.boolean().default(false), format: Joi.string().valid([ 'dag-pb', diff --git a/src/http/stat.js b/src/http/stat.js index 63724fc..4ed28a3 100644 --- a/src/http/stat.js +++ b/src/http/stat.js @@ -32,7 +32,9 @@ const mfsStat = { CumulativeSize: stats.cumulativeSize, WithLocality: stats.withLocality, Local: stats.local, - SizeLocal: stats.sizeLocal + SizeLocal: stats.sizeLocal, + Mtime: stats.mtime, + Mode: stats.mode.toString(8).padStart(4, '0') }) }, options: { diff --git a/src/http/touch.js b/src/http/touch.js index 10f6488..5c93fe5 100644 --- a/src/http/touch.js +++ b/src/http/touch.js @@ -1,6 +1,7 @@ 'use strict' const Joi = require('@hapi/joi') +const parseMtime = require('./utils/parse-mtime') const mfsTouch = { method: 'POST', @@ -16,10 +17,12 @@ const mfsTouch = { cidVersion, format, hashAlg, - mtime + mtime, + mtimeNsecs } = request.query - await ipfs.files.touch(arg, mtime, { + await ipfs.files.touch(arg, { + mtime: parseMtime(mtime, mtimeNsecs), flush, shardSplitThreshold, cidVersion, @@ -37,7 +40,8 @@ const mfsTouch = { }, query: Joi.object().keys({ arg: Joi.string().required(), - mtime: Joi.number().integer().min(0), + mtime: Joi.number().integer(), + mtimeNsecs: Joi.number().integer().min(0), format: Joi.string().valid([ 'dag-pb', 'dag-cbor' diff --git a/src/http/utils/joi.js b/src/http/utils/joi.js deleted file mode 100644 index 4121dc0..0000000 --- a/src/http/utils/joi.js +++ /dev/null @@ -1,22 +0,0 @@ -'use strict' - -const originalJoi = require('@hapi/joi') -const Joi = originalJoi.extend({ - name: 'octalNumber', - base: originalJoi.number().min(0), - coerce: (value, state, options) => { - if (value === undefined) { - return - } - - const val = parseInt(value, 8) - - if (isNaN(val) || val < 0) { - throw new Error('Invalid octal number') - } - - return val - } -}) - -module.exports = Joi diff --git a/src/http/utils/parse-mtime.js b/src/http/utils/parse-mtime.js new file mode 100644 index 0000000..863c53b --- /dev/null +++ b/src/http/utils/parse-mtime.js @@ -0,0 +1,20 @@ +'use strict' + +module.exports = (secs, nsecs) => { + if ((secs === undefined || secs === null) && (nsecs === undefined || nsecs === null)) { + return + } + + const mtime = {} + + if (nsecs || nsecs === 0) { + mtime.secs = 0 + mtime.nsecs = nsecs + } + + if (secs || secs === 0) { + mtime.secs = secs + } + + return mtime +} diff --git a/test/cli/mkdir.js b/test/cli/mkdir.js index 633c668..ea3a0f2 100644 --- a/test/cli/mkdir.js +++ b/test/cli/mkdir.js @@ -202,7 +202,7 @@ describe('mkdir', () => { expect(ipfs.files.mkdir.getCall(0).args).to.deep.equal([ path, defaultOptions({ - mtime: 5 + mtime: new Date(5000) }) ]) }) diff --git a/test/cli/touch.js b/test/cli/touch.js index 6ad7a9d..56febab 100644 --- a/test/cli/touch.js +++ b/test/cli/touch.js @@ -8,6 +8,7 @@ const isNode = require('detect-node') function defaultOptions (modification = {}) { const options = { + mtime: null, cidVersion: 0, format: 'dag-pb', hashAlg: 'sha2-256', @@ -28,7 +29,7 @@ describe('touch', () => { } const path = '/foo' - const mtime = parseInt(Date.now() / 1000) + const mtime = new Date(100000) let ipfs beforeEach(() => { @@ -40,102 +41,103 @@ describe('touch', () => { }) it('should update the mtime for a file', async () => { - await cli(`files touch -m ${mtime} ${path}`, { ipfs }) + await cli(`files touch -m ${mtime.getTime() / 1000} ${path}`, { ipfs }) expect(ipfs.files.touch.callCount).to.equal(1) expect(ipfs.files.touch.getCall(0).args).to.deep.equal([ path, - mtime, - defaultOptions() + defaultOptions({ + mtime + }) ]) }) it('should update the mtime without flushing', async () => { - await cli(`files touch -m ${mtime} --flush false ${path}`, { ipfs }) + await cli(`files touch -m ${mtime.getTime() / 1000} --flush false ${path}`, { ipfs }) expect(ipfs.files.touch.callCount).to.equal(1) expect(ipfs.files.touch.getCall(0).args).to.deep.equal([ path, - mtime, defaultOptions({ + mtime, flush: false }) ]) }) it('should update the mtime without flushing (short option)', async () => { - await cli(`files touch -m ${mtime} -f false ${path}`, { ipfs }) + await cli(`files touch -m ${mtime.getTime() / 1000} -f false ${path}`, { ipfs }) expect(ipfs.files.touch.callCount).to.equal(1) expect(ipfs.files.touch.getCall(0).args).to.deep.equal([ path, - mtime, defaultOptions({ + mtime, flush: false }) ]) }) it('should update the mtime with a different codec', async () => { - await cli(`files touch -m ${mtime} --codec dag-foo ${path}`, { ipfs }) + await cli(`files touch -m ${mtime.getTime() / 1000} --codec dag-foo ${path}`, { ipfs }) expect(ipfs.files.touch.callCount).to.equal(1) expect(ipfs.files.touch.getCall(0).args).to.deep.equal([ path, - mtime, defaultOptions({ + mtime, format: 'dag-foo' }) ]) }) it('should update the mtime with a different codec (short option)', async () => { - await cli(`files touch -m ${mtime} -c dag-foo ${path}`, { ipfs }) + await cli(`files touch -m ${mtime.getTime() / 1000} -c dag-foo ${path}`, { ipfs }) expect(ipfs.files.touch.callCount).to.equal(1) expect(ipfs.files.touch.getCall(0).args).to.deep.equal([ path, - mtime, defaultOptions({ + mtime, format: 'dag-foo' }) ]) }) it('should update the mtime with a different hash algorithm', async () => { - await cli(`files touch -m ${mtime} --hash-alg sha3-256 ${path}`, { ipfs }) + await cli(`files touch -m ${mtime.getTime() / 1000} --hash-alg sha3-256 ${path}`, { ipfs }) expect(ipfs.files.touch.callCount).to.equal(1) expect(ipfs.files.touch.getCall(0).args).to.deep.equal([ path, - mtime, defaultOptions({ + mtime, hashAlg: 'sha3-256' }) ]) }) it('should update the mtime with a different hash algorithm (short option)', async () => { - await cli(`files touch -m ${mtime} -h sha3-256 ${path}`, { ipfs }) + await cli(`files touch -m ${mtime.getTime() / 1000} -h sha3-256 ${path}`, { ipfs }) expect(ipfs.files.touch.callCount).to.equal(1) expect(ipfs.files.touch.getCall(0).args).to.deep.equal([ path, - mtime, defaultOptions({ + mtime, hashAlg: 'sha3-256' }) ]) }) it('should update the mtime with a shard split threshold', async () => { - await cli(`files touch -m ${mtime} --shard-split-threshold 10 ${path}`, { ipfs }) + await cli(`files touch -m ${mtime.getTime() / 1000} --shard-split-threshold 10 ${path}`, { ipfs }) expect(ipfs.files.touch.callCount).to.equal(1) expect(ipfs.files.touch.getCall(0).args).to.deep.equal([ path, - mtime, defaultOptions({ + mtime, shardSplitThreshold: 10 }) ]) diff --git a/test/cli/write.js b/test/cli/write.js index 8d3b08d..ce65a9a 100644 --- a/test/cli/write.js +++ b/test/cli/write.js @@ -448,7 +448,7 @@ describe('write', () => { path, stdin, defaultOptions({ - mtime: 11 + mtime: new Date(11000) }) ]) }) diff --git a/test/core/chmod.js b/test/core/chmod.js index 720b25b..182f893 100644 --- a/test/core/chmod.js +++ b/test/core/chmod.js @@ -11,37 +11,51 @@ describe('chmod', () => { mfs = await createMfs() }) + async function testChmod (initialMode, modification, expectedFinalMode) { + const path = `/foo-${Date.now()}` + + await mfs.write(path, Buffer.from('Hello world'), { + create: true, + mtime: new Date(), + mode: initialMode + }) + await mfs.chmod(path, modification, { + flush: true + }) + + const updatedMode = (await mfs.stat(path)).mode + expect(updatedMode).to.equal(parseInt(expectedFinalMode, 8)) + } + it('should update the mode for a file', async () => { const path = `/foo-${Date.now()}` await mfs.write(path, Buffer.from('Hello world'), { create: true, - mtime: parseInt(new Date() / 1000) + mtime: new Date() }) - const targetMode = parseInt('0777', 8) const originalMode = (await mfs.stat(path)).mode - await mfs.chmod(path, targetMode, { + await mfs.chmod(path, '0777', { flush: true }) const updatedMode = (await mfs.stat(path)).mode expect(updatedMode).to.not.equal(originalMode) - expect(updatedMode).to.equal(targetMode) + expect(updatedMode).to.equal(parseInt('0777', 8)) }) it('should update the mode for a directory', async () => { const path = `/foo-${Date.now()}` await mfs.mkdir(path) - const targetMode = parseInt('0777', 8) const originalMode = (await mfs.stat(path)).mode - await mfs.chmod(path, targetMode, { + await mfs.chmod(path, '0777', { flush: true }) const updatedMode = (await mfs.stat(path)).mode expect(updatedMode).to.not.equal(originalMode) - expect(updatedMode).to.equal(targetMode) + expect(updatedMode).to.equal(parseInt('0777', 8)) }) it('should update the mode for a hamt-sharded-directory', async () => { @@ -52,14 +66,87 @@ describe('chmod', () => { create: true, shardSplitThreshold: 0 }) - const targetMode = parseInt('0777', 8) const originalMode = (await mfs.stat(path)).mode - await mfs.chmod(path, targetMode, { + await mfs.chmod(path, '0777', { flush: true }) const updatedMode = (await mfs.stat(path)).mode expect(updatedMode).to.not.equal(originalMode) - expect(updatedMode).to.equal(targetMode) + expect(updatedMode).to.equal(parseInt('0777', 8)) + }) + + it('should update modes with basic symbolic notation that adds bits', async () => { + await testChmod('0000', '+x', '0111') + await testChmod('0000', '+w', '0222') + await testChmod('0000', '+r', '0444') + await testChmod('0000', 'u+x', '0100') + await testChmod('0000', 'u+w', '0200') + await testChmod('0000', 'u+r', '0400') + await testChmod('0000', 'g+x', '0010') + await testChmod('0000', 'g+w', '0020') + await testChmod('0000', 'g+r', '0040') + await testChmod('0000', 'o+x', '0001') + await testChmod('0000', 'o+w', '0002') + await testChmod('0000', 'o+r', '0004') + await testChmod('0000', 'ug+x', '0110') + await testChmod('0000', 'ug+w', '0220') + await testChmod('0000', 'ug+r', '0440') + await testChmod('0000', 'ugo+x', '0111') + await testChmod('0000', 'ugo+w', '0222') + await testChmod('0000', 'ugo+r', '0444') + }) + + it('should update modes with basic symbolic notation that removes bits', async () => { + await testChmod('0111', '-x', '0000') + await testChmod('0222', '-w', '0000') + await testChmod('0444', '-r', '0000') + await testChmod('0100', 'u-x', '0000') + await testChmod('0200', 'u-w', '0000') + await testChmod('0400', 'u-r', '0000') + await testChmod('0010', 'g-x', '0000') + await testChmod('0020', 'g-w', '0000') + await testChmod('0040', 'g-r', '0000') + await testChmod('0001', 'o-x', '0000') + await testChmod('0002', 'o-w', '0000') + await testChmod('0004', 'o-r', '0000') + await testChmod('0110', 'ug-x', '0000') + await testChmod('0220', 'ug-w', '0000') + await testChmod('0440', 'ug-r', '0000') + await testChmod('0111', 'ugo-x', '0000') + await testChmod('0222', 'ugo-w', '0000') + await testChmod('0444', 'ugo-r', '0000') + }) + + it('should update modes with basic symbolic notation that overrides bits', async () => { + await testChmod('0777', '=x', '0111') + await testChmod('0777', '=w', '0222') + await testChmod('0777', '=r', '0444') + await testChmod('0777', 'u=x', '0177') + await testChmod('0777', 'u=w', '0277') + await testChmod('0777', 'u=r', '0477') + await testChmod('0777', 'g=x', '0717') + await testChmod('0777', 'g=w', '0727') + await testChmod('0777', 'g=r', '0747') + await testChmod('0777', 'o=x', '0771') + await testChmod('0777', 'o=w', '0772') + await testChmod('0777', 'o=r', '0774') + await testChmod('0777', 'ug=x', '0117') + await testChmod('0777', 'ug=w', '0227') + await testChmod('0777', 'ug=r', '0447') + await testChmod('0777', 'ugo=x', '0111') + await testChmod('0777', 'ugo=w', '0222') + await testChmod('0777', 'ugo=r', '0444') + }) + + it('should update modes with multiple symbolic notation', async () => { + await testChmod('0000', 'g+x,u+w', '0210') + }) + + it('should update modes with special symbolic notation', async () => { + await testChmod('0000', 'g+s', '2000') + await testChmod('0000', 'u+s', '4000') + await testChmod('0000', '+t', '1000') + await testChmod('0000', '+s', '6000') }) }) diff --git a/test/core/touch.js b/test/core/touch.js index d63526b..99bfe4d 100644 --- a/test/core/touch.js +++ b/test/core/touch.js @@ -18,7 +18,7 @@ describe('touch', () => { await mfs.write(path, Buffer.from('Hello world'), { create: true, - mtime: parseInt(new Date() / 1000) + mtime: new Date() }) const originalMtime = (await mfs.stat(path)).mtime await delay(1000) @@ -27,14 +27,14 @@ describe('touch', () => { }) const updatedMtime = (await mfs.stat(path)).mtime - expect(updatedMtime).to.be.greaterThan(originalMtime) + expect(updatedMtime.secs).to.be.greaterThan(originalMtime.secs) }) it('should update the mtime for a directory', async () => { const path = `/foo-${Date.now()}` await mfs.mkdir(path, { - mtime: parseInt(Date.now() / 1000) + mtime: new Date() }) const originalMtime = (await mfs.stat(path)).mtime await delay(1000) @@ -43,14 +43,14 @@ describe('touch', () => { }) const updatedMtime = (await mfs.stat(path)).mtime - expect(updatedMtime).to.be.greaterThan(originalMtime) + expect(updatedMtime.secs).to.be.greaterThan(originalMtime.secs) }) it('should update the mtime for a hamt-sharded-directory', async () => { const path = `/foo-${Date.now()}` await mfs.mkdir(path, { - mtime: parseInt(Date.now() / 1000) + mtime: new Date() }) await mfs.write(`${path}/foo.txt`, Buffer.from('Hello world'), { create: true, @@ -63,7 +63,7 @@ describe('touch', () => { }) const updatedMtime = (await mfs.stat(path)).mtime - expect(updatedMtime).to.be.greaterThan(originalMtime) + expect(updatedMtime.secs).to.be.greaterThan(originalMtime.secs) }) it('should create an empty file', async () => { diff --git a/test/http/chmod.js b/test/http/chmod.js index da04536..19ef1cf 100644 --- a/test/http/chmod.js +++ b/test/http/chmod.js @@ -43,7 +43,7 @@ describe('chmod', () => { expect(ipfs.files.chmod.callCount).to.equal(1) expect(ipfs.files.chmod.getCall(0).args).to.deep.equal([ path, - parseInt(mode, 8), + mode, defaultOptions() ]) }) @@ -57,7 +57,7 @@ describe('chmod', () => { expect(ipfs.files.chmod.callCount).to.equal(1) expect(ipfs.files.chmod.getCall(0).args).to.deep.equal([ path, - parseInt(mode, 8), + mode, defaultOptions({ recursive: true }) @@ -73,7 +73,7 @@ describe('chmod', () => { expect(ipfs.files.chmod.callCount).to.equal(1) expect(ipfs.files.chmod.getCall(0).args).to.deep.equal([ path, - parseInt(mode, 8), + mode, defaultOptions({ flush: false }) @@ -89,7 +89,7 @@ describe('chmod', () => { expect(ipfs.files.chmod.callCount).to.equal(1) expect(ipfs.files.chmod.getCall(0).args).to.deep.equal([ path, - parseInt(mode, 8), + mode, defaultOptions({ format: 'dag-foo' }) @@ -105,7 +105,7 @@ describe('chmod', () => { expect(ipfs.files.chmod.callCount).to.equal(1) expect(ipfs.files.chmod.getCall(0).args).to.deep.equal([ path, - parseInt(mode, 8), + mode, defaultOptions({ hashAlg: 'sha3-256' }) @@ -121,7 +121,7 @@ describe('chmod', () => { expect(ipfs.files.chmod.callCount).to.equal(1) expect(ipfs.files.chmod.getCall(0).args).to.deep.equal([ path, - parseInt(mode, 8), + mode, defaultOptions({ shardSplitThreshold: 10 }) diff --git a/test/http/ls.js b/test/http/ls.js index 0603ecc..cb7c602 100644 --- a/test/http/ls.js +++ b/test/http/ls.js @@ -25,7 +25,12 @@ describe('ls', () => { name: 'file-name', type: 'file-type', size: 'file-size', - hash: 'file-hash' + hash: 'file-hash', + mode: 'file-mode', + mtime: { + secs: 'file-mtime-secs', + nsecs: 'file-mtime-nsecs' + } } let ipfs @@ -77,7 +82,10 @@ describe('ls', () => { size: 'file-size', hash: 'file-hash', mode: 'file-mode', - mtime: 'file-mtime' + mtime: { + secs: 'file-mtime-secs', + nsecs: 'file-mtime-nsecs' + } } ipfs.files.ls = sinon.stub().resolves([file]) @@ -99,7 +107,8 @@ describe('ls', () => { expect(response).to.have.nested.property('result.Entries[0].Size', file.size) expect(response).to.have.nested.property('result.Entries[0].Hash', file.hash) expect(response).to.have.nested.property('result.Entries[0].Mode', file.mode) - expect(response).to.have.nested.property('result.Entries[0].Mtime', file.mtime) + expect(response).to.have.nested.property('result.Entries[0].Mtime', file.mtime.secs) + expect(response).to.have.nested.property('result.Entries[0].MtimeNsecs', file.mtime.nsecs) }) it('should stream a path', async () => { @@ -128,7 +137,10 @@ describe('ls', () => { size: 'file-size', hash: 'file-hash', mode: 'file-mode', - mtime: 'file-mtime' + mtime: { + secs: 'file-mtime-secs', + nsecs: 'file-mtime-nsecs' + } } ipfs.files.ls = sinon.stub().resolves([file]) @@ -144,12 +156,14 @@ describe('ls', () => { long: true }) ]) + expect(response).to.have.nested.property('result.Entries.length', 1) expect(response).to.have.nested.property('result.Entries[0].Name', file.name) expect(response).to.have.nested.property('result.Entries[0].Type', file.type) expect(response).to.have.nested.property('result.Entries[0].Size', file.size) expect(response).to.have.nested.property('result.Entries[0].Hash', file.hash) expect(response).to.have.nested.property('result.Entries[0].Mode', file.mode) - expect(response).to.have.nested.property('result.Entries[0].Mtime', file.mtime) + expect(response).to.have.nested.property('result.Entries[0].Mtime', file.mtime.secs) + expect(response).to.have.nested.property('result.Entries[0].MtimeNsecs', file.mtime.nsecs) }) }) diff --git a/test/http/mkdir.js b/test/http/mkdir.js index a369257..0e97a7b 100644 --- a/test/http/mkdir.js +++ b/test/http/mkdir.js @@ -150,7 +150,7 @@ describe('mkdir', () => { expect(ipfs.files.mkdir.getCall(0).args).to.deep.equal([ path, defaultOptions({ - mode: parseInt(mode, 8) + mode: mode }) ]) }) @@ -165,7 +165,9 @@ describe('mkdir', () => { expect(ipfs.files.mkdir.getCall(0).args).to.deep.equal([ path, defaultOptions({ - mtime: 5 + mtime: { + secs: 5 + } }) ]) }) diff --git a/test/http/touch.js b/test/http/touch.js index ab1699a..9acecef 100644 --- a/test/http/touch.js +++ b/test/http/touch.js @@ -7,6 +7,7 @@ const sinon = require('sinon') function defaultOptions (modification = {}) { const options = { + mtime: null, cidVersion: 0, format: 'dag-pb', hashAlg: 'sha2-256', @@ -23,7 +24,7 @@ function defaultOptions (modification = {}) { describe('touch', () => { const path = '/foo' - const mtime = parseInt(Date.now() / 1000) + const mtime = new Date(1000000) let ipfs beforeEach(() => { @@ -37,28 +38,33 @@ describe('touch', () => { it('should update the mtime for a file', async () => { await http({ method: 'POST', - url: `/api/v0/files/touch?arg=${path}&mtime=${mtime}` + url: `/api/v0/files/touch?arg=${path}&mtime=${mtime.getTime() / 1000}` }, { ipfs }) expect(ipfs.files.touch.callCount).to.equal(1) expect(ipfs.files.touch.getCall(0).args).to.deep.equal([ path, - mtime, - defaultOptions() + defaultOptions({ + mtime: { + secs: 1000 + } + }) ]) }) it('should update the mtime without flushing', async () => { await http({ method: 'POST', - url: `/api/v0/files/touch?arg=${path}&mtime=${mtime}&flush=false` + url: `/api/v0/files/touch?arg=${path}&mtime=${mtime.getTime() / 1000}&flush=false` }, { ipfs }) expect(ipfs.files.touch.callCount).to.equal(1) expect(ipfs.files.touch.getCall(0).args).to.deep.equal([ path, - mtime, defaultOptions({ + mtime: { + secs: 1000 + }, flush: false }) ]) @@ -67,14 +73,16 @@ describe('touch', () => { it('should update the mtime with a different codec', async () => { await http({ method: 'POST', - url: `/api/v0/files/touch?arg=${path}&mtime=${mtime}&format=dag-pb` + url: `/api/v0/files/touch?arg=${path}&mtime=${mtime.getTime() / 1000}&format=dag-pb` }, { ipfs }) expect(ipfs.files.touch.callCount).to.equal(1) expect(ipfs.files.touch.getCall(0).args).to.deep.equal([ path, - mtime, defaultOptions({ + mtime: { + secs: 1000 + }, format: 'dag-pb' }) ]) @@ -83,14 +91,16 @@ describe('touch', () => { it('should update the mtime with a different hash algorithm', async () => { await http({ method: 'POST', - url: `/api/v0/files/touch?arg=${path}&mtime=${mtime}&hashAlg=sha3-256` + url: `/api/v0/files/touch?arg=${path}&mtime=${mtime.getTime() / 1000}&hashAlg=sha3-256` }, { ipfs }) expect(ipfs.files.touch.callCount).to.equal(1) expect(ipfs.files.touch.getCall(0).args).to.deep.equal([ path, - mtime, defaultOptions({ + mtime: { + secs: 1000 + }, hashAlg: 'sha3-256' }) ]) @@ -99,14 +109,35 @@ describe('touch', () => { it('should update the mtime with a shard split threshold', async () => { await http({ method: 'POST', - url: `/api/v0/files/touch?arg=${path}&mtime=${mtime}&shardSplitThreshold=10` + url: `/api/v0/files/touch?arg=${path}&mtime=${mtime.getTime() / 1000}&shardSplitThreshold=10` + }, { ipfs }) + + expect(ipfs.files.touch.callCount).to.equal(1) + expect(ipfs.files.touch.getCall(0).args).to.deep.equal([ + path, + defaultOptions({ + mtime: { + secs: 1000 + }, + shardSplitThreshold: 10 + }) + ]) + }) + + it('should update the mtime with nanoseconds with a shard split threshold', async () => { + await http({ + method: 'POST', + url: `/api/v0/files/touch?arg=${path}&mtime=${mtime.getTime() / 1000}&mtimeNsecs=100&shardSplitThreshold=10` }, { ipfs }) expect(ipfs.files.touch.callCount).to.equal(1) expect(ipfs.files.touch.getCall(0).args).to.deep.equal([ path, - mtime, defaultOptions({ + mtime: { + secs: 1000, + nsecs: 100 + }, shardSplitThreshold: 10 }) ]) diff --git a/test/http/write.js b/test/http/write.js index 9c6fd06..19f5ac8 100644 --- a/test/http/write.js +++ b/test/http/write.js @@ -275,7 +275,7 @@ describe('write', () => { expect(content).to.equal('hello world') }) - it('shouldwrite to a file with a specified mode', async () => { + it('should write to a file with a specified mode', async () => { const mode = '0577' await http({ @@ -308,7 +308,9 @@ describe('write', () => { expect(ipfs.files.write.callCount).to.equal(1) expect(ipfs.files.write.getCall(0)).to.have.nested.property('args[0]', path) expect(ipfs.files.write.getCall(0)).to.have.nested.deep.property('args[2]', defaultOptions({ - mtime + mtime: { + secs: 11 + } })) expect(content).to.equal('hello world') }) From 234e11c17634d429044c18bda6cfe5eef387ffb5 Mon Sep 17 00:00:00 2001 From: achingbrain Date: Mon, 23 Dec 2019 18:36:54 +0000 Subject: [PATCH 14/15] chore: use multipart pr --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 411846d..0fe0d71 100644 --- a/package.json +++ b/package.json @@ -70,7 +70,7 @@ "err-code": "^2.0.0", "hamt-sharding": "~0.0.2", "interface-datastore": "^0.8.0", - "ipfs-multipart": "^0.2.0", + "ipfs-multipart": "ipfs/js-ipfs-multipart#store-mtime-as-timespec", "ipfs-unixfs": "ipfs/js-ipfs-unixfs#store-mtime-as-timespec", "ipfs-unixfs-exporter": "~0.39.0", "ipfs-unixfs-importer": "ipfs/js-ipfs-unixfs-importer#mtime-passed-as-timespec", From d55b59568f297dfe3600a0e8f9af6999cf5b53be Mon Sep 17 00:00:00 2001 From: achingbrain Date: Thu, 9 Jan 2020 14:37:38 +0000 Subject: [PATCH 15/15] fix: support optional mtimes --- package.json | 24 ++++++++++++------------ src/core/chmod.js | 4 +++- src/core/touch.js | 2 +- src/http/stat.js | 3 ++- 4 files changed, 18 insertions(+), 15 deletions(-) diff --git a/package.json b/package.json index 0fe0d71..2413edd 100644 --- a/package.json +++ b/package.json @@ -55,8 +55,8 @@ "ipld": "~0.25.0", "it-all": "^1.0.1", "memdown": "^5.1.0", - "nyc": "^14.1.1", - "sinon": "^7.5.0", + "nyc": "^15.0.0", + "sinon": "^8.0.4", "stream-to-promise": "^2.2.0", "temp-write": "^4.0.0", "yargs": "^15.0.2", @@ -65,22 +65,22 @@ "dependencies": { "@hapi/boom": "^7.4.2", "@hapi/joi": "^15.1.0", - "cids": "~0.7.1", + "cids": "^0.7.1", "debug": "^4.1.0", "err-code": "^2.0.0", - "hamt-sharding": "~0.0.2", + "hamt-sharding": "^1.0.0", "interface-datastore": "^0.8.0", - "ipfs-multipart": "ipfs/js-ipfs-multipart#store-mtime-as-timespec", - "ipfs-unixfs": "ipfs/js-ipfs-unixfs#store-mtime-as-timespec", - "ipfs-unixfs-exporter": "~0.39.0", - "ipfs-unixfs-importer": "ipfs/js-ipfs-unixfs-importer#mtime-passed-as-timespec", - "ipfs-utils": "ipfs/js-ipfs-utils#format-mtime-as-timespec", - "ipld-dag-pb": "~0.18.0", + "ipfs-multipart": "^0.3.0", + "ipfs-unixfs": "^0.3.0", + "ipfs-unixfs-exporter": "^0.40.0", + "ipfs-unixfs-importer": "^0.43.0", + "ipfs-utils": "^0.4.2", + "ipld-dag-pb": "^0.18.0", "it-last": "^1.0.1", "joi-browser": "^13.4.0", "mortice": "^2.0.0", - "multicodec": "~0.5.3", - "multihashes": "~0.4.14", + "multicodec": "^1.0.0", + "multihashes": "^0.4.14", "once": "^1.4.0", "pull-stream": "^3.6.9" }, diff --git a/src/core/chmod.js b/src/core/chmod.js index 2c77740..3b32c07 100644 --- a/src/core/chmod.js +++ b/src/core/chmod.js @@ -17,7 +17,9 @@ const defaultOptions = { flush: true, shardSplitThreshold: 1000, format: 'dag-pb', - hashAlg: 'sha2-256' + hashAlg: 'sha2-256', + cidVersion: 0, + recursive: false } function calculateModification (mode) { diff --git a/src/core/touch.js b/src/core/touch.js index 3019d28..1517773 100644 --- a/src/core/touch.js +++ b/src/core/touch.js @@ -17,7 +17,7 @@ const defaultOptions = { mtime: undefined, flush: true, shardSplitThreshold: 1000, - cidVersion: 1, + cidVersion: 0, format: 'dag-pb', hashAlg: 'sha2-256' } diff --git a/src/http/stat.js b/src/http/stat.js index 4ed28a3..d8b5563 100644 --- a/src/http/stat.js +++ b/src/http/stat.js @@ -33,7 +33,8 @@ const mfsStat = { WithLocality: stats.withLocality, Local: stats.local, SizeLocal: stats.sizeLocal, - Mtime: stats.mtime, + Mtime: stats.mtime ? stats.mtime.secs : undefined, + MtimeNsecs: stats.mtime ? stats.mtime.nsecs : undefined, Mode: stats.mode.toString(8).padStart(4, '0') }) },