@@ -7,8 +7,7 @@ const through = require('pull-through')
7
7
const parallel = require ( 'async/parallel' )
8
8
const waterfall = require ( 'async/waterfall' )
9
9
const dagPB = require ( 'ipld-dag-pb' )
10
- const CID = require ( 'cids' )
11
- const multihash = require ( 'multihashing-async' )
10
+ const persist = require ( '../utils/persist' )
12
11
13
12
const reduce = require ( './reduce' )
14
13
@@ -27,7 +26,7 @@ const defaultOptions = {
27
26
28
27
module . exports = function builder ( createChunker , ipld , createReducer , _options ) {
29
28
const options = extend ( { } , defaultOptions , _options )
30
- options . cidVersion = options . cidVersion || options . cidVersion
29
+ options . cidVersion = options . cidVersion || defaultOptions . cidVersion
31
30
options . hashAlg = options . hashAlg || defaultOptions . hashAlg
32
31
33
32
if ( options . hashAlg !== 'sha2-256' ) {
@@ -71,33 +70,17 @@ module.exports = function builder (createChunker, ipld, createReducer, _options)
71
70
const d = new UnixFS ( 'directory' )
72
71
73
72
waterfall ( [
74
- ( cb ) => DAGNode . create ( d . marshal ( ) , [ ] , options . hashAlg , cb ) ,
75
- ( node , cb ) => {
76
- if ( options . onlyHash ) {
77
- return cb ( null , node )
78
- }
79
-
80
- const cid = new CID ( options . cidVersion , 'dag-pb' , node . multihash )
81
-
82
- node = new DAGNode (
83
- node . data ,
84
- node . links ,
85
- node . serialized ,
86
- cid
87
- )
88
-
89
- ipld . put ( node , {
90
- cid
91
- } , ( err ) => cb ( err , node ) )
92
- }
93
- ] , ( err , node ) => {
73
+ ( cb ) => DAGNode . create ( d . marshal ( ) , [ ] , cb ) ,
74
+ ( node , cb ) => persist ( node , ipld , options , cb )
75
+ ] , ( err , result ) => {
94
76
if ( err ) {
95
77
return callback ( err )
96
78
}
79
+
97
80
callback ( null , {
98
81
path : item . path ,
99
- multihash : node . multihash ,
100
- size : node . size
82
+ multihash : result . cid . buffer ,
83
+ size : result . node . size
101
84
} )
102
85
} )
103
86
}
@@ -134,55 +117,42 @@ module.exports = function builder (createChunker, ipld, createReducer, _options)
134
117
} ) ,
135
118
pull . asyncMap ( ( buffer , callback ) => {
136
119
if ( options . rawLeaves ) {
137
- return multihash ( buffer , options . hashAlg , ( error , hash ) => {
138
- if ( error ) {
139
- return callback ( error )
140
- }
141
-
142
- return callback ( null , {
143
- multihash : hash ,
144
- size : buffer . length ,
145
- leafSize : buffer . length ,
146
- cid : new CID ( 1 , 'raw' , hash ) ,
147
- data : buffer
148
- } )
120
+ return callback ( null , {
121
+ size : buffer . length ,
122
+ leafSize : buffer . length ,
123
+ data : buffer
149
124
} )
150
125
}
151
126
152
127
const file = new UnixFS ( options . leafType , buffer )
153
128
154
- DAGNode . create ( file . marshal ( ) , [ ] , options . hashAlg , ( err , node ) => {
129
+ DAGNode . create ( file . marshal ( ) , [ ] , ( err , node ) => {
155
130
if ( err ) {
156
131
return callback ( err )
157
132
}
158
133
159
134
callback ( null , {
160
- multihash : node . multihash ,
161
135
size : node . size ,
162
136
leafSize : file . fileSize ( ) ,
163
- cid : new CID ( options . cidVersion , 'dag-pb' , node . multihash ) ,
164
137
data : node
165
138
} )
166
139
} )
167
140
} ) ,
168
141
pull . asyncMap ( ( leaf , callback ) => {
169
- if ( options . onlyHash ) {
170
- return callback ( null , leaf )
171
- }
142
+ persist ( leaf . data , ipld , options , ( error , results ) => {
143
+ if ( error ) {
144
+ return callback ( error )
145
+ }
172
146
173
- ipld . put ( leaf . data , {
174
- cid : leaf . cid
175
- } , ( error ) => callback ( error , leaf ) )
176
- } ) ,
177
- pull . map ( ( leaf ) => {
178
- return {
179
- path : file . path ,
180
- multihash : leaf . cid . buffer ,
181
- size : leaf . size ,
182
- leafSize : leaf . leafSize ,
183
- name : '' ,
184
- cid : leaf . cid
185
- }
147
+ callback ( null , {
148
+ size : leaf . size ,
149
+ leafSize : leaf . leafSize ,
150
+ data : results . node ,
151
+ multihash : results . cid . buffer ,
152
+ path : leaf . path ,
153
+ name : ''
154
+ } )
155
+ } )
186
156
} ) ,
187
157
through ( // mark as single node if only one single node
188
158
function onData ( data ) {
0 commit comments