diff --git a/package.json b/package.json index f0ff3949a..6f8915b59 100644 --- a/package.json +++ b/package.json @@ -30,11 +30,12 @@ "url": "https://github.com/ipfs/js-ipfs-api" }, "devDependencies": { - "aegir": "^5.0.1", + "aegir": "^6.0.0", "chai": "^3.5.0", "gulp": "^3.9.1", - "interface-ipfs-core": "^0.5.0", + "interface-ipfs-core": "^0.6.0", "ipfsd-ctl": "^0.14.0", + "passthrough-counter": "^1.0.0", "pre-commit": "^1.1.3", "stream-equal": "^0.1.8", "stream-http": "^2.3.1", diff --git a/src/add-to-dagnode-transform.js b/src/add-to-dagnode-transform.js index 46e92b6fb..f70d869f9 100644 --- a/src/add-to-dagnode-transform.js +++ b/src/add-to-dagnode-transform.js @@ -8,6 +8,7 @@ module.exports = function (err, res, send, done) { if (err) { return done(err) } + async.map(res, function map (entry, next) { getDagNode(send, entry.Hash, function (err, node) { if (err) { diff --git a/src/api/add-files.js b/src/api/add-files.js index c5362f643..26f71097d 100644 --- a/src/api/add-files.js +++ b/src/api/add-files.js @@ -1,5 +1,6 @@ 'use strict' +const isNode = require('detect-node') const addToDagNodesTransform = require('../add-to-dagnode-transform') module.exports = (send) => { @@ -9,6 +10,10 @@ module.exports = (send) => { opts = {} } + if (!isNode) { + return cb(new Error('Recursive uploads are not supported in the browser')) + } + if (typeof (path) !== 'string') { return cb(new Error('"path" must be a string')) } diff --git a/src/api/cat.js b/src/api/cat.js index 8b61f66a1..06e9bac90 100644 --- a/src/api/cat.js +++ b/src/api/cat.js @@ -1,8 +1,7 @@ 'use strict' -const bs58 = require('bs58') -const isIPFS = require('is-ipfs') const promisify = require('promisify-es6') +const cleanMultihash = require('../clean-multihash') module.exports = (send) => { const cat = promisify((multihash, callback) => { @@ -15,13 +14,3 @@ module.exports = (send) => { }) return cat } - -function cleanMultihash (multihash) { - if (!isIPFS.multihash(multihash)) { - throw new Error('not valid multihash') - } - if (Buffer.isBuffer(multihash)) { - return bs58.encode(multihash) - } - return multihash -} diff --git a/src/api/get.js b/src/api/get.js new file mode 100644 index 000000000..b1b833d9d --- /dev/null +++ b/src/api/get.js @@ -0,0 +1,30 @@ +'use strict' + +const tarStreamToObjects = require('../tar-stream-to-objects') +const cleanMultihash = require('../clean-multihash') +const promisify = require('promisify-es6') + +module.exports = (send) => { + return promisify(function get (path, opts, cb) { + if (typeof opts === 'function' && !cb) { + cb = opts + opts = {} + } + + // opts is the real callback -- 'cb' is being injected by promisify + if (typeof opts === 'function' && typeof cb === 'function') { + cb = opts + opts = {} + } + + try { + path = cleanMultihash(path) + } catch (err) { + return cb(err) + } + + var sendWithTransform = send.withTransform(tarStreamToObjects) + + return sendWithTransform('get', path, opts, null, cb) + }) +} diff --git a/src/clean-multihash.js b/src/clean-multihash.js new file mode 100644 index 000000000..bbf3f9a39 --- /dev/null +++ b/src/clean-multihash.js @@ -0,0 +1,15 @@ +'use strict' + +const bs58 = require('bs58') +const isIPFS = require('is-ipfs') + +module.exports = function (multihash) { + if (!isIPFS.multihash(multihash)) { + throw new Error('not valid multihash') + } + if (Buffer.isBuffer(multihash)) { + return bs58.encode(multihash) + } + return multihash +} + diff --git a/src/load-commands.js b/src/load-commands.js index e8dc92657..ebd832de7 100644 --- a/src/load-commands.js +++ b/src/load-commands.js @@ -13,6 +13,7 @@ function requireCommands () { dht: require('./api/dht'), diag: require('./api/diag'), id: require('./api/id'), + get: require('./api/get'), log: require('./api/log'), ls: require('./api/ls'), mount: require('./api/mount'), @@ -33,6 +34,12 @@ function requireCommands () { const files = require('./api/files')(send) files.add = require('./api/add')(send) files.createAddStream = require('./api/add-stream.js')(send) + files.get = require('./api/get')(send) + + // aliases + cmds.add = files.add + cmds.createAddStream = files.createAddStream + cmds.get = files.get return files } diff --git a/src/request-api.js b/src/request-api.js index bef4fafcf..a5c01ea4c 100644 --- a/src/request-api.js +++ b/src/request-api.js @@ -4,6 +4,7 @@ const Wreck = require('wreck') const Qs = require('qs') const ndjson = require('ndjson') const getFilesStream = require('./get-files-stream') +const Counter = require('passthrough-counter') const isNode = require('detect-node') @@ -11,13 +12,19 @@ const isNode = require('detect-node') function parseChunkedJson (res, cb) { const parsed = [] + const c = new Counter() res + .pipe(c) .pipe(ndjson.parse()) - .on('data', parsed.push.bind(parsed)) - .on('end', () => cb(null, parsed)) + .on('data', (obj) => { + parsed.push(obj) + }) + .on('end', () => { + cb(null, parsed) + }) } -function onRes (buffer, cb) { +function onRes (buffer, cb, uri) { return (err, res) => { if (err) { return cb(err) @@ -42,10 +49,14 @@ function onRes (buffer, cb) { }) } - if (stream && !buffer) return cb(null, res) + if (stream && !buffer) { + return cb(null, res) + } if (chunkedObjects) { - if (isJson) return parseChunkedJson(res, cb) + if (isJson) { + return parseChunkedJson(res, cb) + } return Wreck.read(res, null, cb) } @@ -56,6 +67,11 @@ function onRes (buffer, cb) { function requestAPI (config, path, args, qs, files, buffer, cb) { qs = qs || {} + + if (Array.isArray(files)) { + qs.recursive = true + } + if (Array.isArray(path)) path = path.join('/') if (args && !Array.isArray(args)) args = [args] if (args) qs.arg = args @@ -67,10 +83,6 @@ function requestAPI (config, path, args, qs, files, buffer, cb) { delete qs.r } - if (!isNode && qs.recursive && path === 'add') { - return cb(new Error('Recursive uploads are not supported in the browser')) - } - qs['stream-channels'] = true let stream @@ -104,7 +116,7 @@ function requestAPI (config, path, args, qs, files, buffer, cb) { opts.payload = stream } - return Wreck.request(opts.method, opts.uri, opts, onRes(buffer, cb)) + return Wreck.request(opts.method, opts.uri, opts, onRes(buffer, cb, opts.uri)) } // -- Interface @@ -128,9 +140,9 @@ exports = module.exports = function getRequestAPI (config) { return requestAPI(config, path, args, qs, files, buffer, cb) } - // Wraps the 'send' function such that an asynchronous transform may be - // applied to its result before passing it on to either its callback or - // promise. + // Wraps the 'send' function such that an asynchronous + // transform may be applied to its result before + // passing it on to either its callback or promise. send.withTransform = function (transform) { return function (path, args, qs, files, buffer, cb) { if (typeof buffer === 'function') { diff --git a/src/tar-stream-to-objects.js b/src/tar-stream-to-objects.js new file mode 100644 index 000000000..b817d5612 --- /dev/null +++ b/src/tar-stream-to-objects.js @@ -0,0 +1,32 @@ +'use strict' + +const tar = require('tar-stream') +const Readable = require('readable-stream') + +// transform tar stream into readable stream of +// { path: 'string', content: Readable } +module.exports = function (err, res, send, done) { + if (err) { + return done(err) + } + + var ex = tar.extract() + res.pipe(ex) + + var objStream = new Readable({ objectMode: true }) + objStream._read = function noop () {} + + ex.on('entry', function (header, stream, next) { + objStream.push({ + path: header.name, + content: header.type !== 'directory' ? stream : null + }) + next() + }) + ex.on('finish', () => { + objStream.push(null) + }) + + done(null, objStream) +} + diff --git a/tasks/daemons.js b/tasks/daemons.js index a341312dc..ad1c1909d 100644 --- a/tasks/daemons.js +++ b/tasks/daemons.js @@ -1,4 +1,5 @@ 'use strict' +/* eslint max-nested-callbacks: ["error", 8] */ // TODO reduce nesteness const gulp = require('gulp') const fs = require('fs') diff --git a/test/api/block.spec.js b/test/api/block.spec.js index 4fdef4646..5fedd8f0e 100644 --- a/test/api/block.spec.js +++ b/test/api/block.spec.js @@ -1,4 +1,5 @@ /* eslint-env mocha */ +/* eslint max-nested-callbacks: ["error", 8] */ /* globals apiClients */ 'use strict' diff --git a/test/api/bootstrap.spec.js b/test/api/bootstrap.spec.js index 5728d0dbd..8cecb7978 100644 --- a/test/api/bootstrap.spec.js +++ b/test/api/bootstrap.spec.js @@ -1,4 +1,5 @@ /* eslint-env mocha */ +/* eslint max-nested-callbacks: ["error", 8] */ /* globals apiClients */ 'use strict' diff --git a/test/api/config.spec.js b/test/api/config.spec.js index 21ca82d8e..86914958e 100644 --- a/test/api/config.spec.js +++ b/test/api/config.spec.js @@ -1,4 +1,5 @@ /* eslint-env mocha */ +/* eslint max-nested-callbacks: ["error", 8] */ /* globals apiClients */ 'use strict' diff --git a/test/api/files.spec.js b/test/api/files.spec.js index 7a5cc616b..731a37395 100644 --- a/test/api/files.spec.js +++ b/test/api/files.spec.js @@ -1,4 +1,5 @@ /* eslint-env mocha */ +/* eslint max-nested-callbacks: ["error", 8] */ /* globals apiClients */ 'use strict' diff --git a/test/api/get.spec.js b/test/api/get.spec.js new file mode 100644 index 000000000..77a9f7451 --- /dev/null +++ b/test/api/get.spec.js @@ -0,0 +1,135 @@ +/* eslint-env mocha */ +/* eslint max-nested-callbacks: ["error", 8] */ +/* globals apiClients */ + +'use strict' + +const expect = require('chai').expect +const isNode = require('detect-node') +const fs = require('fs') +// const bl = require('bl') +const concat = require('concat-stream') +const through = require('through2') +const streamEqual = require('stream-equal') + +const path = require('path') + +// const extract = require('tar-stream').extract + +const testfile = fs.readFileSync(path.join(__dirname, '/../testfile.txt')) + +let testfileBig + +if (isNode) { + const tfbPath = path.join(__dirname, '/../15mb.random') + testfileBig = fs.createReadStream(tfbPath, { bufferSize: 128 }) +} + +describe('.get', () => { + it('get with no compression args', (done) => { + apiClients.a + .get('Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP', (err, res) => { + expect(err).to.not.exist + + // accumulate the files and their content + var files = [] + res.pipe(through.obj((file, enc, next) => { + file.content.pipe(concat((content) => { + files.push({ + path: file.path, + content: content + }) + next() + })) + }, () => { + expect(files).to.be.length(1) + expect(files[0].content.toString()).to.contain(testfile.toString()) + done() + })) + }) + }) + + it('get with archive true', (done) => { + apiClients.a + .get('Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP', {archive: true}, (err, res) => { + expect(err).to.not.exist + + // accumulate the files and their content + var files = [] + res.pipe(through.obj((file, enc, next) => { + file.content.pipe(concat((content) => { + files.push({ + path: file.path, + content: content + }) + next() + })) + }, () => { + expect(files).to.be.length(1) + expect(files[0].content.toString()).to.contain(testfile.toString()) + done() + })) + }) + }) + + it('get err with out of range compression level', (done) => { + apiClients.a + .get('Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP', {compress: true, 'compression-level': 10}, (err, res) => { + expect(err).to.exist + expect(err.toString()).to.equal('Error: Compression level must be between 1 and 9') + done() + }) + }) + + it('get with compression level', (done) => { + apiClients.a + .get('Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP', {compress: true, 'compression-level': 1}, (err, res) => { + expect(err).to.not.exist + done() + }) + }) + + it('get BIG file', (done) => { + if (!isNode) { + return done() + } + + apiClients.a.get('Qme79tX2bViL26vNjPsF3DP1R9rMKMvnPYJiKTTKPrXJjq', (err, files) => { + expect(err).to.not.exist + + files.on('data', (file) => { + // Do not blow out the memory of nodejs :) + streamEqual(file.content, testfileBig, (err, equal) => { + expect(err).to.not.exist + expect(equal).to.be.true + done() + }) + }) + }) + }) + + describe('promise', () => { + it('get', (done) => { + apiClients.a.get('Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP') + .then((files) => { + files.on('data', (file) => { + let buf = '' + file.content + .on('error', (err) => { + throw err + }) + .on('data', (data) => { + buf += data.toString() + }) + .on('end', () => { + expect(buf).to.contain(testfile.toString()) + done() + }) + }) + }) + .catch((err) => { + expect(err).to.not.exist + }) + }) + }) +}) diff --git a/test/api/log.spec.js b/test/api/log.spec.js index 6c46ba36f..65ac3c0ac 100644 --- a/test/api/log.spec.js +++ b/test/api/log.spec.js @@ -1,4 +1,5 @@ /* eslint-env mocha */ +/* eslint max-nested-callbacks: ["error", 8] */ /* globals apiClients */ 'use strict'