@@ -72,20 +72,21 @@ export const updateHamtDirectory = async (context, links, bucket, options) => {
72
72
}
73
73
74
74
/**
75
+ * @param {MfsContext } context
75
76
* @param {PBLink[] } links
76
77
* @param {Bucket<any> } rootBucket
77
78
* @param {Bucket<any> } parentBucket
78
79
* @param {number } positionAtParent
79
80
*/
80
- export const recreateHamtLevel = async ( links , rootBucket , parentBucket , positionAtParent ) => {
81
+ export const recreateHamtLevel = async ( context , links , rootBucket , parentBucket , positionAtParent ) => {
81
82
// recreate this level of the HAMT
82
83
const bucket = new Bucket ( {
83
84
hash : rootBucket . _options . hash ,
84
85
bits : rootBucket . _options . bits
85
86
} , parentBucket , positionAtParent )
86
87
parentBucket . _putObjectAt ( positionAtParent , bucket )
87
88
88
- await addLinksToHamtBucket ( links , bucket , rootBucket )
89
+ await addLinksToHamtBucket ( context , links , bucket , rootBucket )
89
90
90
91
return bucket
91
92
}
@@ -99,28 +100,57 @@ export const recreateInitialHamtLevel = async (links) => {
99
100
bits : hamtBucketBits
100
101
} )
101
102
102
- await addLinksToHamtBucket ( links , bucket , bucket )
103
+ // populate sub bucket but do not recurse as we do not want to pull whole shard in
104
+ await Promise . all (
105
+ links . map ( async link => {
106
+ const linkName = ( link . Name || '' )
107
+
108
+ if ( linkName . length === 2 ) {
109
+ const pos = parseInt ( linkName , 16 )
110
+
111
+ const subBucket = new Bucket ( {
112
+ hash : bucket . _options . hash ,
113
+ bits : bucket . _options . bits
114
+ } , bucket , pos )
115
+ bucket . _putObjectAt ( pos , subBucket )
116
+
117
+ return Promise . resolve ( )
118
+ }
119
+
120
+ return bucket . put ( linkName . substring ( 2 ) , {
121
+ size : link . Tsize ,
122
+ cid : link . Hash
123
+ } )
124
+ } )
125
+ )
103
126
104
127
return bucket
105
128
}
106
129
107
130
/**
131
+ * @param {MfsContext } context
108
132
* @param {PBLink[] } links
109
133
* @param {Bucket<any> } bucket
110
134
* @param {Bucket<any> } rootBucket
111
135
*/
112
- export const addLinksToHamtBucket = async ( links , bucket , rootBucket ) => {
136
+ export const addLinksToHamtBucket = async ( context , links , bucket , rootBucket ) => {
113
137
await Promise . all (
114
- links . map ( link => {
138
+ links . map ( async link => {
115
139
const linkName = ( link . Name || '' )
116
140
117
141
if ( linkName . length === 2 ) {
142
+ log ( 'Populating sub bucket' , linkName )
118
143
const pos = parseInt ( linkName , 16 )
144
+ const block = await context . repo . blocks . get ( link . Hash )
145
+ const node = dagPB . decode ( block )
119
146
120
- bucket . _putObjectAt ( pos , new Bucket ( {
147
+ const subBucket = new Bucket ( {
121
148
hash : rootBucket . _options . hash ,
122
149
bits : rootBucket . _options . bits
123
- } , bucket , pos ) )
150
+ } , bucket , pos )
151
+ bucket . _putObjectAt ( pos , subBucket )
152
+
153
+ await addLinksToHamtBucket ( context , node . Links , subBucket , rootBucket )
124
154
125
155
return Promise . resolve ( )
126
156
}
@@ -213,7 +243,7 @@ export const generatePath = async (context, fileName, rootNode) => {
213
243
if ( ! path [ i + 1 ] ) {
214
244
log ( `Loaded new subshard ${ segment . prefix } ` )
215
245
216
- await recreateHamtLevel ( node . Links , rootBucket , segment . bucket , parseInt ( segment . prefix , 16 ) )
246
+ await recreateHamtLevel ( context , node . Links , rootBucket , segment . bucket , parseInt ( segment . prefix , 16 ) )
217
247
const position = await rootBucket . _findNewBucketAndPos ( fileName )
218
248
219
249
// i--
@@ -229,7 +259,7 @@ export const generatePath = async (context, fileName, rootNode) => {
229
259
const nextSegment = path [ i + 1 ]
230
260
231
261
// add intermediate links to bucket
232
- await addLinksToHamtBucket ( node . Links , nextSegment . bucket , rootBucket )
262
+ await addLinksToHamtBucket ( context , node . Links , nextSegment . bucket , rootBucket )
233
263
234
264
nextSegment . node = node
235
265
}
0 commit comments