Skip to content

Commit 782c862

Browse files
committed
Merge pull request #8 from noffle/readme
README and API improvements
2 parents 5a400b4 + b9cd36b commit 782c862

7 files changed

+110
-169
lines changed

README.md

+79-1
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,90 @@
11
IPFS Data Importing
22
===================
33

4+
> Import data into an IPFS DAG Service.
5+
46
[![](https://img.shields.io/badge/made%20by-Protocol%20Labs-blue.svg?style=flat-square)](http://ipn.io)
57
[![](https://img.shields.io/badge/freenode-%23ipfs-blue.svg?style=flat-square)](http://webchat.freenode.net/?channels=%23ipfs)
68
[![Build Status](https://travis-ci.org/ipfs/js-ipfs-data-importing.svg?style=flat-square)](https://travis-ci.org/ipfs/js-ipfs-data-importing)
79
![](https://img.shields.io/badge/coverage-%3F-yellow.svg?style=flat-square)
810
[![Dependency Status](https://david-dm.org/ipfs/js-ipfs-data-importing.svg?style=flat-square)](https://david-dm.org/ipfs/js-ipfs-data-importing)
911
[![js-standard-style](https://img.shields.io/badge/code%20style-standard-brightgreen.svg?style=flat-square)](https://github.com/feross/standard)
1012

11-
> JavaScript implementation of the layout and chunking mechanisms used by IPFS
13+
## Example
14+
15+
Let's create a little directory to import:
16+
```sh
17+
$ cd /tmp
18+
$ mkdir foo
19+
$ echo 'hello' > foo/bar
20+
$ echo 'warld' > foo/quux
21+
```
22+
23+
And write the importing logic:
24+
```js
25+
// Dependencies to create a DAG Service (where the dir will be imported into)
26+
var memStore = require('abstract-blob-store')
27+
var ipfsRepo = require('ipfs-repo')
28+
var ipfsBlocks = require('ipfs-blocks')
29+
var ipfsMerkleDag = require('ipfs-merkle-dag')
30+
31+
var repo = new ipfsRepo('', { stores: memStore })
32+
var blocks = new ipfsBlocks.BlockService(repo)
33+
var dag = new ipfsMerkleDag.DAGService(blocks)
34+
35+
36+
var ipfsData = require('ipfs-data-importing')
37+
38+
// Import /tmp/foo
39+
ipfsData.import('/tmp/foo', dag, {
40+
recursive: true
41+
}, done)
42+
43+
// A root DAG Node is received upon completion
44+
function done (err, rootStat) {
45+
if (err) { throw err }
46+
console.log(rootStat)
47+
}
48+
```
49+
50+
When run, the stat of root DAG Node is outputted:
51+
52+
```
53+
{ Hash: <Buffer 12 20 bd e2 2b 57 3f 6f bd 7c cc 5a 11 7f 28 6c a2 9a 9f c0 90 e1 d4 16 d0 5f 42 81 ec 0c 2a 7f 7f 93>,
54+
Size: 59843,
55+
Name: 'foo' }
56+
```
57+
58+
## API
59+
60+
```js
61+
var importer = require('ipfs-data-importing')
62+
```
63+
64+
### importer.import(target, dagService, opts, cb)
65+
66+
`target` can be a `string`, `Buffer`, or `Stream`. When it's a string, the file
67+
or directory structure rooted on the filesystem at `target` is imported, with
68+
the hierarchy preserved. If a Buffer or Stream, a single DAG node will be
69+
imported representing the buffer or stream's contents.
70+
71+
Uses the [DAG Service](https://github.com/vijayee/js-ipfs-merkle-dag/) instance
72+
`dagService`. Accepts the following `opts`:
73+
74+
- `recursive`: whether to recurse into directories. Defaults to `false`.
75+
76+
Calls the callback `cb(err, stat)` on completion or error, where `stat` is an
77+
object with the `Hash`, `Size`, and `Name` of the root
78+
[`DAGNode`](https://github.com/vijayee/js-ipfs-merkle-dag/).
79+
80+
## install
81+
82+
With [npm](https://npmjs.org/) installed, run
83+
84+
```
85+
$ npm install ipfs-data-importing
86+
```
87+
88+
## license
1289

90+
ISC

package.json

+1
Original file line numberDiff line numberDiff line change
@@ -27,6 +27,7 @@
2727
},
2828
"homepage": "https://github.com/diasdavid/js-ipfs-data-importing#readme",
2929
"devDependencies": {
30+
"block-stream2": "^1.1.0",
3031
"brfs": "^1.4.3",
3132
"bs58": "^3.0.0",
3233
"buffer-loader": "0.0.1",

src/chunker-fixed-size.js

+3-43
Original file line numberDiff line numberDiff line change
@@ -1,45 +1,5 @@
1-
var through2 = require('through2')
1+
var chunker = require('block-stream2')
22

3-
exports = module.exports = FixedSizeChunker
4-
5-
// The difference of this chunker compared to other fixed size chunkers
6-
// available, is that it doesn't add padding the last chunk
7-
8-
function FixedSizeChunker (size) {
9-
var stream = through2(transform, flush)
10-
11-
var buf = new Buffer(0)
12-
13-
function transform (chunk, enc, cb) {
14-
var that = this
15-
16-
buf = Buffer.concat([buf, chunk])
17-
18-
if (buf.length >= size) {
19-
slice()
20-
}
21-
22-
function slice () {
23-
var chunk = new Buffer(size, 'binary')
24-
var newBuf = new Buffer(buf.length - size, 'binary')
25-
buf.copy(chunk, 0, 0, size)
26-
buf.copy(newBuf, 0, size, buf.length)
27-
buf = newBuf
28-
that.push(chunk)
29-
30-
if (buf.length >= size) {
31-
return slice()
32-
}
33-
}
34-
35-
cb()
36-
}
37-
38-
function flush (cb) {
39-
// last chunk
40-
this.push(buf)
41-
cb()
42-
}
43-
44-
return stream
3+
exports = module.exports = function (size) {
4+
return chunker({ size: size, zeroPadding: false })
455
}

src/index.js

+17-26
Original file line numberDiff line numberDiff line change
@@ -12,34 +12,29 @@ exports = module.exports
1212
const CHUNK_SIZE = 262144
1313

1414
// Use a layout + chunkers to convert a directory (or file) to the layout format
15-
exports.import = function (options, callback) {
16-
// options.path : what to import
17-
// options.buffer : import a buffer
18-
// options.filename : optional file name for buffer
19-
// options.stream : import a stream
15+
exports.import = (target, dagService, options, callback) => {
16+
if (typeof options === 'function') { callback = options; options = {} }
17+
18+
if (!target) { return callback(new Error('must specify target')) }
19+
if (!dagService) { return callback(new Error('must specify dag service')) }
20+
2021
// options.recursive : follow dirs
2122
// options.chunkers : obj with chunkers to each type of data, { default: dumb-chunker }
22-
// options.dag-service : instance of block service
23-
const dagService = options.dagService
2423

25-
if (options.buffer) {
26-
if (!Buffer.isBuffer(options.buffer)) {
27-
return callback(new Error('buffer importer must take a buffer'))
28-
}
29-
bufferImporter(options.buffer, callback)
30-
} else if (options.stream) {
31-
if (!(typeof options.stream.on === 'function')) {
32-
return callback(new Error('stream importer must take a readable stream'))
33-
}
24+
options = options || {}
25+
26+
if (Buffer.isBuffer(target)) {
27+
bufferImporter(target, callback)
28+
} else if (typeof target.on === 'function') {
3429
// TODO Create Stream Importer
3530
// streamImporter(options.stream, callback)
3631
return callback(new Error('stream importer has not been built yet'))
37-
} else if (options.path) {
38-
const stats = fs.statSync(options.path)
32+
} else if (typeof target === 'string') {
33+
const stats = fs.statSync(target)
3934
if (stats.isFile()) {
40-
fileImporter(options.path, callback)
35+
fileImporter(target, callback)
4136
} else if (stats.isDirectory() && options.recursive) {
42-
dirImporter(options.path, callback)
37+
dirImporter(target, callback)
4338
} else {
4439
return callback(new Error('recursive must be true to add a directory'))
4540
}
@@ -219,13 +214,10 @@ exports.import = function (options, callback) {
219214
if (err) {
220215
return log.err(err)
221216
}
222-
// an optional file name provided
223-
const fileName = options.filename
224217

225218
callback(null, {
226219
Hash: parentNode.multihash(),
227-
Size: parentNode.size(),
228-
Name: fileName
220+
Size: parentNode.size()
229221
}) && cb()
230222
})
231223
}))
@@ -241,8 +233,7 @@ exports.import = function (options, callback) {
241233

242234
callback(null, {
243235
Hash: fileNode.multihash(),
244-
Size: fileNode.size(),
245-
Name: options.filename
236+
Size: fileNode.size()
246237
})
247238
})
248239
}

tests/buffer-test.js

+3-9
Original file line numberDiff line numberDiff line change
@@ -24,6 +24,7 @@ const marbuf = require('buffer!./test-data/200Bytes.txt.block')
2424
module.exports = function (repo) {
2525
describe('chunker: fixed size', function () {
2626
this.timeout(10000)
27+
2728
it('256 Bytes chunks', function (done) {
2829
var counter = 0
2930
fileStream()
@@ -84,10 +85,7 @@ module.exports = function (repo) {
8485
var bs = new BlockService(repo)
8586
var ds = new DAGService(bs)
8687
var buf = smallBuf
87-
importer.import({
88-
buffer: buf,
89-
dagService: ds
90-
}, function (err, stat) {
88+
importer.import(buf, ds, function (err, stat) {
9189
expect(err).to.not.exist
9290
ds.get(stat.Hash, function (err, node) {
9391
expect(err).to.not.exist
@@ -105,11 +103,7 @@ module.exports = function (repo) {
105103
var buf = bigBuf
106104
var bs = new BlockService(repo)
107105
var ds = new DAGService(bs)
108-
importer.import({
109-
buffer: buf,
110-
dagService: ds,
111-
filename: 'Test.txt'
112-
}, function (err, stat) {
106+
importer.import(buf, ds, function (err, stat) {
113107
expect(err).to.not.exist
114108
ds.get(stat.Hash, function (err, node) {
115109
expect(err).to.not.exist

tests/test-fixed-size-chunker.js

-64
This file was deleted.

0 commit comments

Comments
 (0)