Skip to content

Commit 4bf004b

Browse files
committed
README and API improvements.
1 parent 5a400b4 commit 4bf004b

File tree

3 files changed

+99
-33
lines changed

3 files changed

+99
-33
lines changed

README.md

Lines changed: 79 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,90 @@
11
IPFS Data Importing
22
===================
33

4+
> Import data into an IPFS DAG Service.
5+
46
[![](https://img.shields.io/badge/made%20by-Protocol%20Labs-blue.svg?style=flat-square)](http://ipn.io)
57
[![](https://img.shields.io/badge/freenode-%23ipfs-blue.svg?style=flat-square)](http://webchat.freenode.net/?channels=%23ipfs)
68
[![Build Status](https://travis-ci.org/ipfs/js-ipfs-data-importing.svg?style=flat-square)](https://travis-ci.org/ipfs/js-ipfs-data-importing)
79
![](https://img.shields.io/badge/coverage-%3F-yellow.svg?style=flat-square)
810
[![Dependency Status](https://david-dm.org/ipfs/js-ipfs-data-importing.svg?style=flat-square)](https://david-dm.org/ipfs/js-ipfs-data-importing)
911
[![js-standard-style](https://img.shields.io/badge/code%20style-standard-brightgreen.svg?style=flat-square)](https://github.com/feross/standard)
1012

11-
> JavaScript implementation of the layout and chunking mechanisms used by IPFS
13+
## Example
14+
15+
Let's create a little directory to import:
16+
```sh
17+
$ cd /tmp
18+
$ mkdir foo
19+
$ echo 'hello' > foo/bar
20+
$ echo 'warld' > foo/quux
21+
```
22+
23+
And write the importing logic:
24+
```js
25+
// Dependencies to create a DAG Service (where the dir will be imported into)
26+
var memStore = require('abstract-blob-store')
27+
var ipfsRepo = require('ipfs-repo')
28+
var ipfsBlocks = require('ipfs-blocks')
29+
var ipfsMerkleDag = require('ipfs-merkle-dag')
30+
31+
var repo = new ipfsRepo('', { stores: memStore })
32+
var blocks = new ipfsBlocks.BlockService(repo)
33+
var dag = new ipfsMerkleDag.DAGService(blocks)
34+
35+
36+
var ipfsData = require('ipfs-data-importing')
37+
38+
// Import /tmp/foo
39+
ipfsData.import('/tmp/foo', dag, {
40+
recursive: true
41+
}, done)
42+
43+
// A root DAG Node is received upon completion
44+
function done (err, rootStat) {
45+
if (err) { throw err }
46+
console.log(rootStat)
47+
}
48+
```
49+
50+
When run, the stat of root DAG Node is outputted:
51+
52+
```
53+
{ Hash: <Buffer 12 20 bd e2 2b 57 3f 6f bd 7c cc 5a 11 7f 28 6c a2 9a 9f c0 90 e1 d4 16 d0 5f 42 81 ec 0c 2a 7f 7f 93>,
54+
Size: 59843,
55+
Name: 'foo' }
56+
```
57+
58+
## API
59+
60+
```js
61+
var importer = require('ipfs-data-importing')
62+
```
63+
64+
### importer.import(target, dagService, opts, cb)
65+
66+
`target` can be a `string`, `Buffer`, or `Stream`. When it's a string, the file
67+
or directory structure rooted on the filesystem at `target` is imported, with
68+
the hierarchy preserved. If a Buffer or Stream, a single DAG node will be
69+
imported representing the buffer or stream's contents.
70+
71+
Uses the [DAG Service](https://github.com/vijayee/js-ipfs-merkle-dag/) instance
72+
`dagService`. Accepts the following `opts`:
73+
74+
- `recursive`: whether to recurse into directories. Defaults to `false`.
75+
76+
Calls the callback `cb(err, stat)` on completion or error, where `stat` is an
77+
object with the `Hash`, `Size`, and `Name` of the root
78+
[`DAGNode`](https://github.com/vijayee/js-ipfs-merkle-dag/).
79+
80+
## install
81+
82+
With [npm](https://npmjs.org/) installed, run
83+
84+
```
85+
$ npm install ipfs-data-importing
86+
```
87+
88+
## license
1289

90+
ISC

src/index.js

Lines changed: 14 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -12,15 +12,20 @@ exports = module.exports
1212
const CHUNK_SIZE = 262144
1313

1414
// Use a layout + chunkers to convert a directory (or file) to the layout format
15-
exports.import = function (options, callback) {
16-
// options.path : what to import
17-
// options.buffer : import a buffer
18-
// options.filename : optional file name for buffer
19-
// options.stream : import a stream
15+
exports.import = (dagService, options, callback) => {
16+
if (typeof options === 'function') { callback = options; options = {} }
17+
if (!dagService) { return callback(new Error('no dag service provided')) }
18+
2019
// options.recursive : follow dirs
2120
// options.chunkers : obj with chunkers to each type of data, { default: dumb-chunker }
22-
// options.dag-service : instance of block service
23-
const dagService = options.dagService
21+
// options.path : import a file hierarchy from a path
22+
// options.stream : import a stream
23+
// options.buffer : import a buffer
24+
25+
// TODO: make first param be 'target' and check type to decide how to import
26+
// path
27+
// stream
28+
// buffer
2429

2530
if (options.buffer) {
2631
if (!Buffer.isBuffer(options.buffer)) {
@@ -219,13 +224,10 @@ exports.import = function (options, callback) {
219224
if (err) {
220225
return log.err(err)
221226
}
222-
// an optional file name provided
223-
const fileName = options.filename
224227

225228
callback(null, {
226229
Hash: parentNode.multihash(),
227-
Size: parentNode.size(),
228-
Name: fileName
230+
Size: parentNode.size()
229231
}) && cb()
230232
})
231233
}))
@@ -241,8 +243,7 @@ exports.import = function (options, callback) {
241243

242244
callback(null, {
243245
Hash: fileNode.multihash(),
244-
Size: fileNode.size(),
245-
Name: options.filename
246+
Size: fileNode.size()
246247
})
247248
})
248249
}

tests/test-import.js

Lines changed: 6 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -54,10 +54,7 @@ describe('layout: importer', function () {
5454
})
5555

5656
it('import a small file', (done) => {
57-
importer.import({
58-
path: small,
59-
dagService: ds
60-
}, function (err, stat) {
57+
importer.import(small, ds, function (err, stat) {
6158
expect(err).to.not.exist
6259
ds.get(stat.Hash, (err, node) => {
6360
expect(err).to.not.exist
@@ -72,10 +69,7 @@ describe('layout: importer', function () {
7269
})
7370

7471
it('import a big file', (done) => {
75-
importer.import({
76-
path: big,
77-
dagService: ds
78-
}, function (err, stat) {
72+
importer.import(big, ds, function (err, stat) {
7973
expect(err).to.not.exist
8074
ds.get(stat.Hash, (err, node) => {
8175
expect(err).to.not.exist
@@ -120,9 +114,7 @@ describe('layout: importer', function () {
120114
})
121115

122116
it('import a small directory', (done) => {
123-
importer.import({
124-
path: dirSmall,
125-
dagService: ds,
117+
importer.import(dirSmall, ds, {
126118
recursive: true
127119
}, function (err, stats) {
128120
expect(err).to.not.exist
@@ -149,9 +141,7 @@ describe('layout: importer', function () {
149141
})
150142

151143
it('import a big directory', (done) => {
152-
importer.import({
153-
path: dirBig,
154-
dagService: ds,
144+
importer.import(dirBig, ds, {
155145
recursive: true
156146
}, function (err, stats) {
157147
expect(err).to.not.exist
@@ -178,9 +168,7 @@ describe('layout: importer', function () {
178168
})
179169

180170
it('import a nested directory', (done) => {
181-
importer.import({
182-
path: dirNested,
183-
dagService: ds,
171+
importer.import(dirNested, ds, {
184172
recursive: true
185173
}, function (err, stats) {
186174
expect(err).to.not.exist
@@ -225,8 +213,7 @@ describe('layout: importer', function () {
225213
var buf = fs.readFileSync(path.join(__dirname, '/test-data/1.2MiB.txt'))
226214
importer.import({
227215
buffer: buf,
228-
dagService: ds,
229-
filename: 'Test.txt'
216+
dagService: ds
230217
}, function (err, stat) {
231218
expect(err).to.not.exist
232219
ds.get(stat.Hash, (err, node) => {

0 commit comments

Comments
 (0)