@@ -10,14 +10,20 @@ const mocha = require('mocha');
10
10
const assert = require ( 'assert' ) ;
11
11
const mongodb = require ( 'mongodb' ) ;
12
12
const { v4 : uuid } = require ( 'uuid' ) ;
13
+ const _ = require ( 'lodash' ) ;
14
+ const crypto = require ( 'crypto' ) ;
15
+ const stream = require ( 'stream' ) ;
13
16
17
+ const ObjectIO = require ( '../../sdk/object_io' ) ;
14
18
const P = require ( '../../util/promise' ) ;
15
19
const config = require ( '../../../config' ) ;
16
20
const MDStore = require ( '../../server/object_services/md_store' ) . MDStore ;
17
21
const coretest = require ( './coretest' ) ;
18
22
const lifecycle = require ( '../../server/bg_services/lifecycle' ) ;
19
23
const http_utils = require ( '../../util/http_utils' ) ;
20
24
const commonTests = require ( '../lifecycle/common' ) ;
25
+ const seed = crypto . randomBytes ( 16 ) ;
26
+ const generator = crypto . createCipheriv ( 'aes-128-gcm' , seed , Buffer . alloc ( 12 ) ) ;
21
27
22
28
const { rpc_client, EMAIL } = coretest ;
23
29
const Bucket = 'first.bucket' ;
@@ -27,6 +33,10 @@ const TagName2 = 'tagname2';
27
33
const TagValue = 'tagvalue' ;
28
34
const TagValue2 = 'tagvalue2' ;
29
35
36
+ const object_io = new ObjectIO ( ) ;
37
+ object_io . set_verification_mode ( ) ;
38
+
39
+
30
40
mocha . describe ( 'lifecycle' , ( ) => {
31
41
32
42
let s3 ;
@@ -85,6 +95,12 @@ mocha.describe('lifecycle', () => {
85
95
mocha . it ( 'test and prefix size' , async ( ) => {
86
96
await commonTests . test_and_prefix_size ( Bucket , Key , s3 ) ;
87
97
} ) ;
98
+ mocha . it ( 'test version' , async ( ) => {
99
+ await commonTests . test_version ( Bucket , Key , s3 ) ;
100
+ } ) ;
101
+ mocha . it ( 'test multipath' , async ( ) => {
102
+ await commonTests . test_multipart ( Bucket , Key , s3 ) ;
103
+ } ) ;
88
104
} ) ;
89
105
90
106
mocha . describe ( 'bucket-lifecycle-bg-worker' , function ( ) {
@@ -216,4 +232,131 @@ mocha.describe('lifecycle', () => {
216
232
217
233
console . log ( '✅ The lifecycle test was completed successfully' ) ;
218
234
} ) ;
235
+
236
+ mocha . describe ( 'bucket-lifecycle-multipart-upload' , function ( ) {
237
+ this . timeout ( 60000 ) ;
238
+ const multipart_bucket = 'test-multipart-bucket' ;
239
+ mocha . after ( async function ( ) {
240
+ //TODO Delete bucket
241
+ //await rpc_client.bucket.delete_bucket({ name: multipart_bucket });
242
+ } ) ;
243
+ async function create_mock_multipart_upload ( key , bucket , age , part_size , num_parts ) {
244
+ await rpc_client . bucket . create_bucket ( { name : bucket } ) ;
245
+ const content_type = 'test/test' ;
246
+ const size = num_parts * part_size ;
247
+ const data = generator . update ( Buffer . alloc ( size ) ) ;
248
+ const { obj_id } = await rpc_client . object . create_object_upload ( { bucket, key, content_type } ) ;
249
+ const mp_list_before = await rpc_client . object . list_multiparts ( { obj_id, bucket, key } ) ;
250
+ coretest . log ( 'list_multiparts before' , mp_list_before ) ;
251
+ assert . strictEqual ( mp_list_before . multiparts . length , 0 ) ;
252
+ const multiparts_ids = [ ] ;
253
+
254
+ const get_part_slice = i => data . slice ( i * part_size , ( i + 1 ) * part_size ) ;
255
+ const upload_multipart = async ( i , mp_data , split , finish ) => {
256
+ const resp = await object_io . upload_multipart ( {
257
+ client : rpc_client ,
258
+ obj_id,
259
+ bucket,
260
+ key,
261
+ num : i + 1 ,
262
+ size : mp_data . length ,
263
+ source_stream : readable_buffer ( mp_data , split , finish ) ,
264
+ } ) ;
265
+ console . log ( "upload_multipart" , resp ) ;
266
+ multiparts_ids . push ( new mongodb . ObjectId ( resp . multipart_id ) ) ;
267
+ } ;
268
+ // upload the real multiparts we want to complete with
269
+ await Promise . all ( _ . times ( num_parts ,
270
+ i => upload_multipart ( i , get_part_slice ( i ) )
271
+ ) ) ;
272
+
273
+ // go back in time
274
+ const create_time = new Date ( ) ;
275
+ create_time . setDate ( create_time . getDate ( ) - age ) ;
276
+ const update = {
277
+ create_time,
278
+ } ;
279
+
280
+ console . log ( 'create_mock_multipart_upload bucket' , bucket , 'obj_id' , obj_id , 'multiparts_ids' , multiparts_ids ) ;
281
+ await MDStore . instance ( ) . update_multiparts_by_ids ( multiparts_ids , update ) ;
282
+
283
+ const mp_list_after = await rpc_client . object . list_multiparts ( { obj_id, bucket, key } ) ;
284
+ coretest . log ( 'mp_list_after after' , mp_list_after ) ;
285
+ assert . strictEqual ( mp_list_after . multiparts . length , num_parts ) ;
286
+ const actual_create_time = mp_list_after . multiparts [ 0 ] . last_modified ;
287
+ assert . strictEqual ( actual_create_time , create_time . getTime ( ) , `object create_time/getTime actual ${ actual_create_time } !== expected ${ create_time . getTime ( ) } ` ) ;
288
+ }
289
+
290
+ mocha . it ( 'lifecyle - listMultiPart verify' , async ( ) => {
291
+ await create_mock_multipart_upload ( 'test-lifecycle-multipart' , multipart_bucket , 3 , 45 , 7 ) ;
292
+ } ) ;
293
+ } ) ;
294
+
295
+ mocha . describe ( 'bucket-lifecycle-version' , function ( ) {
296
+ this . timeout ( 60000 ) ;
297
+ const version_bucket = 'test-version-bucket' ;
298
+ mocha . after ( async function ( ) {
299
+ //TODO Delete bucket
300
+ //await rpc_client.bucket.delete_bucket({ name: version_bucket });
301
+ } ) ;
302
+
303
+ async function create_mock_version ( version_key , bucket , age , version_count ) {
304
+ await rpc_client . bucket . create_bucket ( { name : bucket } ) ;
305
+ rpc_client . bucket . update_bucket ( {
306
+ name : bucket ,
307
+ versioning : 'ENABLED'
308
+ } ) ;
309
+
310
+ const obj_upload_ids = [ ] ;
311
+ for ( let i = 0 ; i < version_count ; ++ i ) {
312
+ const content_type = 'application/octet_stream' ;
313
+ const { obj_id } = await rpc_client . object . create_object_upload ( { bucket, key : version_key , content_type } ) ;
314
+ await rpc_client . object . complete_object_upload ( { obj_id, bucket, key : version_key } ) ;
315
+ if ( i < version_count - 2 ) {
316
+ obj_upload_ids . push ( new mongodb . ObjectId ( obj_id ) ) ;
317
+ }
318
+ }
319
+ // go back in time
320
+ if ( age > 0 ) {
321
+ const create_time = new Date ( ) ;
322
+ create_time . setDate ( create_time . getDate ( ) - age ) ;
323
+ const update = {
324
+ create_time,
325
+ } ;
326
+ console . log ( 'blow_version_objects: bucket' , bucket , 'multiparts_ids' , obj_upload_ids , " obj_upload_ids length: " , obj_upload_ids . length , "update :" , update ) ;
327
+ const update_result = await MDStore . instance ( ) . update_objects_by_ids ( obj_upload_ids , update ) ;
328
+ console . log ( 'blow_version_objects: update_objects_by_ids' , update_result ) ;
329
+ }
330
+
331
+ const obj_params = {
332
+ bucket,
333
+ } ;
334
+ const list_obj = await rpc_client . object . list_object_versions ( obj_params ) ;
335
+ console . log ( "List updated objects : " , list_obj ) ;
336
+ assert . strictEqual ( list_obj . objects . length , version_count , `object total count ${ list_obj . objects . length } !== expected ${ version_count } ` ) ;
337
+ }
338
+
339
+ mocha . it ( 'lifecyle - version expiration' , async ( ) => {
340
+ await create_mock_version ( 'test-lifecycle-version' , version_bucket , 30 , 10 ) ;
341
+ } ) ;
342
+ } ) ;
343
+
344
+ function readable_buffer ( data , split = 1 , finish = 'end' ) {
345
+ const max = Math . ceil ( data . length / split ) ;
346
+ let pos = 0 ;
347
+ return new stream . Readable ( {
348
+ read ( ) {
349
+ if ( pos < data . length ) {
350
+ const len = Math . min ( data . length - pos , max ) ;
351
+ const buf = data . slice ( pos , pos + len ) ;
352
+ pos += len ;
353
+ setImmediate ( ( ) => this . push ( buf ) ) ;
354
+ } else if ( finish === 'fail' ) {
355
+ this . emit ( 'error' , new Error ( 'TEST_OBJECT_IO FAIL ON FINISH' ) ) ;
356
+ } else {
357
+ this . push ( null ) ;
358
+ }
359
+ }
360
+ } ) ;
361
+ }
219
362
} ) ;
0 commit comments