|
| 1 | +'use strict' |
| 2 | + |
| 3 | +const { importer } = require('ipfs-unixfs-engine') |
| 4 | +const pull = require('pull-stream') |
| 5 | +const toPull = require('stream-to-pull-stream') |
| 6 | +const waterfall = require('async/waterfall') |
| 7 | +const isStream = require('is-stream') |
| 8 | +const isSource = require('is-pull-stream').isSource |
| 9 | +const CID = require('cids') |
| 10 | +const { parseChunkerString } = require('./utils') |
| 11 | + |
| 12 | +const WRAPPER = 'wrapper/' |
| 13 | + |
| 14 | +function noop () {} |
| 15 | + |
| 16 | +function prepareFile (self, opts, file, callback) { |
| 17 | + opts = opts || {} |
| 18 | + |
| 19 | + let cid = new CID(file.multihash) |
| 20 | + |
| 21 | + if (opts.cidVersion === 1) { |
| 22 | + cid = cid.toV1() |
| 23 | + } |
| 24 | + |
| 25 | + waterfall([ |
| 26 | + (cb) => opts.onlyHash |
| 27 | + ? cb(null, file) |
| 28 | + : self.object.get(file.multihash, Object.assign({}, opts, { preload: false }), cb), |
| 29 | + (node, cb) => { |
| 30 | + const b58Hash = cid.toBaseEncodedString() |
| 31 | + |
| 32 | + let size = node.size |
| 33 | + |
| 34 | + if (Buffer.isBuffer(node)) { |
| 35 | + size = node.length |
| 36 | + } |
| 37 | + |
| 38 | + cb(null, { |
| 39 | + path: opts.wrapWithDirectory |
| 40 | + ? file.path.substring(WRAPPER.length) |
| 41 | + : (file.path || b58Hash), |
| 42 | + hash: b58Hash, |
| 43 | + size |
| 44 | + }) |
| 45 | + } |
| 46 | + ], callback) |
| 47 | +} |
| 48 | + |
| 49 | +function normalizeContent (opts, content) { |
| 50 | + if (!Array.isArray(content)) { |
| 51 | + content = [content] |
| 52 | + } |
| 53 | + |
| 54 | + return content.map((data) => { |
| 55 | + // Buffer input |
| 56 | + if (Buffer.isBuffer(data)) { |
| 57 | + data = { path: '', content: pull.values([data]) } |
| 58 | + } |
| 59 | + |
| 60 | + // Readable stream input |
| 61 | + if (isStream.readable(data)) { |
| 62 | + data = { path: '', content: toPull.source(data) } |
| 63 | + } |
| 64 | + |
| 65 | + if (isSource(data)) { |
| 66 | + data = { path: '', content: data } |
| 67 | + } |
| 68 | + |
| 69 | + if (data && data.content && typeof data.content !== 'function') { |
| 70 | + if (Buffer.isBuffer(data.content)) { |
| 71 | + data.content = pull.values([data.content]) |
| 72 | + } |
| 73 | + |
| 74 | + if (isStream.readable(data.content)) { |
| 75 | + data.content = toPull.source(data.content) |
| 76 | + } |
| 77 | + } |
| 78 | + |
| 79 | + if (opts.wrapWithDirectory && !data.path) { |
| 80 | + throw new Error('Must provide a path when wrapping with a directory') |
| 81 | + } |
| 82 | + |
| 83 | + if (opts.wrapWithDirectory) { |
| 84 | + data.path = WRAPPER + data.path |
| 85 | + } |
| 86 | + |
| 87 | + return data |
| 88 | + }) |
| 89 | +} |
| 90 | + |
| 91 | +function preloadFile (self, opts, file) { |
| 92 | + const isRootFile = opts.wrapWithDirectory |
| 93 | + ? file.path === '' |
| 94 | + : !file.path.includes('/') |
| 95 | + |
| 96 | + const shouldPreload = isRootFile && !opts.onlyHash && opts.preload !== false |
| 97 | + |
| 98 | + if (shouldPreload) { |
| 99 | + self._preload(file.hash) |
| 100 | + } |
| 101 | + |
| 102 | + return file |
| 103 | +} |
| 104 | + |
| 105 | +function pinFile (self, opts, file, cb) { |
| 106 | + // Pin a file if it is the root dir of a recursive add or the single file |
| 107 | + // of a direct add. |
| 108 | + const pin = 'pin' in opts ? opts.pin : true |
| 109 | + const isRootDir = !file.path.includes('/') |
| 110 | + const shouldPin = pin && isRootDir && !opts.onlyHash && !opts.hashAlg |
| 111 | + if (shouldPin) { |
| 112 | + return self.pin.add(file.hash, { preload: false }, err => cb(err, file)) |
| 113 | + } else { |
| 114 | + cb(null, file) |
| 115 | + } |
| 116 | +} |
| 117 | + |
| 118 | +module.exports = function (self) { |
| 119 | + // Internal add func that gets used by all add funcs |
| 120 | + return function addPullStream (options = {}) { |
| 121 | + let chunkerOptions |
| 122 | + try { |
| 123 | + chunkerOptions = parseChunkerString(options.chunker) |
| 124 | + } catch (err) { |
| 125 | + return pull.map(() => { throw err }) |
| 126 | + } |
| 127 | + const opts = Object.assign({}, { |
| 128 | + shardSplitThreshold: self._options.EXPERIMENTAL.sharding |
| 129 | + ? 1000 |
| 130 | + : Infinity |
| 131 | + }, options, chunkerOptions) |
| 132 | + |
| 133 | + // CID v0 is for multihashes encoded with sha2-256 |
| 134 | + if (opts.hashAlg && opts.cidVersion !== 1) { |
| 135 | + opts.cidVersion = 1 |
| 136 | + } |
| 137 | + |
| 138 | + let total = 0 |
| 139 | + |
| 140 | + const prog = opts.progress || noop |
| 141 | + const progress = (bytes) => { |
| 142 | + total += bytes |
| 143 | + prog(total) |
| 144 | + } |
| 145 | + |
| 146 | + opts.progress = progress |
| 147 | + return pull( |
| 148 | + pull.map(normalizeContent.bind(null, opts)), |
| 149 | + pull.flatten(), |
| 150 | + importer(self._ipld, opts), |
| 151 | + pull.asyncMap(prepareFile.bind(null, self, opts)), |
| 152 | + pull.map(preloadFile.bind(null, self, opts)), |
| 153 | + pull.asyncMap(pinFile.bind(null, self, opts)) |
| 154 | + ) |
| 155 | + } |
| 156 | +} |
0 commit comments