Skip to content

Commit dd317b1

Browse files
authored
Merge pull request #8844 from naveenpaul1/lifecycle-md-multipart
lifecycle | lifecycle pre-test setup
2 parents 111ef29 + f9f117e commit dd317b1

File tree

4 files changed

+412
-1
lines changed

4 files changed

+412
-1
lines changed

src/sdk/object_io.js

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -290,7 +290,9 @@ class ObjectIO {
290290
await this._upload_stream(params, complete_params);
291291
}
292292
dbg.log0('upload_multipart: complete upload', complete_params);
293-
return params.client.object.complete_multipart(complete_params);
293+
const multipart_params = await params.client.object.complete_multipart(complete_params);
294+
multipart_params.multipart_id = complete_params.multipart_id;
295+
return multipart_params;
294296
} catch (err) {
295297
dbg.warn('upload_multipart: failed', complete_params, err);
296298
// we leave the cleanup of failed multiparts to complete_object_upload or abort_object_upload

src/test/lifecycle/common.js

Lines changed: 66 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -389,6 +389,51 @@ function duplicate_id_lifecycle_configuration(Bucket, Key) {
389389
};
390390
}
391391

392+
function version_lifecycle_configuration(Bucket, Key, Days, ExpiredDeleteMarker, NewnonCurrentVersion, NonCurrentDays) {
393+
const ID = 'rule_id';
394+
return {
395+
Bucket,
396+
LifecycleConfiguration: {
397+
Rules: [{
398+
ID,
399+
Filter: {
400+
Prefix: Key,
401+
},
402+
Expiration: {
403+
Days: Days,
404+
ExpiredObjectDeleteMarker: ExpiredDeleteMarker,
405+
},
406+
NoncurrentVersionExpiration: {
407+
NewerNoncurrentVersions: NewnonCurrentVersion,
408+
NoncurrentDays: NonCurrentDays,
409+
},
410+
Status: 'Enabled',
411+
}, ],
412+
},
413+
};
414+
}
415+
exports.version_lifecycle_configuration = version_lifecycle_configuration;
416+
417+
function multipart_lifecycle_configuration(Bucket, Key, Days) {
418+
const ID = 'rule_id';
419+
return {
420+
Bucket,
421+
LifecycleConfiguration: {
422+
Rules: [{
423+
ID,
424+
Filter: {
425+
Prefix: Key,
426+
},
427+
AbortIncompleteMultipartUpload: {
428+
DaysAfterInitiation: Days,
429+
},
430+
Status: 'Enabled',
431+
}, ],
432+
},
433+
};
434+
}
435+
exports.multipart_lifecycle_configuration = multipart_lifecycle_configuration;
436+
392437
async function put_get_lifecycle_configuration(Bucket, putLifecycleParams, s3) {
393438
const putLifecycleResult = await s3.putBucketLifecycleConfiguration(putLifecycleParams);
394439
console.log('put lifecycle params:', putLifecycleParams, 'result', putLifecycleResult);
@@ -409,6 +454,27 @@ async function put_get_lifecycle_configuration(Bucket, putLifecycleParams, s3) {
409454
return getLifecycleResult;
410455
}
411456

457+
exports.test_multipart = async function(Bucket, Key, s3) {
458+
const putLifecycleParams = multipart_lifecycle_configuration(Bucket, Key, 10);
459+
const getLifecycleResult = await put_get_lifecycle_configuration(Bucket, putLifecycleParams, s3);
460+
461+
const expirationDays = getLifecycleResult.Rules[0].AbortIncompleteMultipartUpload.DaysAfterInitiation.Days;
462+
const expectedExpirationDays = putLifecycleParams.LifecycleConfiguration.Rules[0]
463+
.AbortIncompleteMultipartUpload.DaysAfterInitiation.Days;
464+
console.log('get lifecycle multipart expiration:', expirationDays, ' expected:', expectedExpirationDays);
465+
assert(expirationDays === expectedExpirationDays, 'Multipart Expiration days do not match');
466+
};
467+
468+
exports.test_version = async function(Bucket, Key, s3) {
469+
const putLifecycleParams = version_lifecycle_configuration(Bucket, Key, 10, true, 5, 10);
470+
const getLifecycleResult = await put_get_lifecycle_configuration(Bucket, putLifecycleParams, s3);
471+
472+
const expirationDays = getLifecycleResult.Rules[0].Expiration.Days;
473+
const expectedExpirationDays = putLifecycleParams.LifecycleConfiguration.Rules[0].Expiration.Days;
474+
console.log('get lifecycle version expiration:', expirationDays, ' expected:', expectedExpirationDays);
475+
assert(expirationDays === expectedExpirationDays, 'Expiration days do not match');
476+
};
477+
412478
exports.test_rules_length = async function(Bucket, Key, s3) {
413479
const putLifecycleParams = rules_length_lifecycle_configuration(Bucket, Key);
414480
const getLifecycleResult = await put_get_lifecycle_configuration(Bucket, putLifecycleParams, s3);

src/test/unit_tests/test_lifecycle.js

Lines changed: 143 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -10,14 +10,20 @@ const mocha = require('mocha');
1010
const assert = require('assert');
1111
const mongodb = require('mongodb');
1212
const { v4: uuid } = require('uuid');
13+
const _ = require('lodash');
14+
const crypto = require('crypto');
15+
const stream = require('stream');
1316

17+
const ObjectIO = require('../../sdk/object_io');
1418
const P = require('../../util/promise');
1519
const config = require('../../../config');
1620
const MDStore = require('../../server/object_services/md_store').MDStore;
1721
const coretest = require('./coretest');
1822
const lifecycle = require('../../server/bg_services/lifecycle');
1923
const http_utils = require('../../util/http_utils');
2024
const commonTests = require('../lifecycle/common');
25+
const seed = crypto.randomBytes(16);
26+
const generator = crypto.createCipheriv('aes-128-gcm', seed, Buffer.alloc(12));
2127

2228
const { rpc_client, EMAIL } = coretest;
2329
const Bucket = 'first.bucket';
@@ -27,6 +33,10 @@ const TagName2 = 'tagname2';
2733
const TagValue = 'tagvalue';
2834
const TagValue2 = 'tagvalue2';
2935

36+
const object_io = new ObjectIO();
37+
object_io.set_verification_mode();
38+
39+
3040
mocha.describe('lifecycle', () => {
3141

3242
let s3;
@@ -85,6 +95,12 @@ mocha.describe('lifecycle', () => {
8595
mocha.it('test and prefix size', async () => {
8696
await commonTests.test_and_prefix_size(Bucket, Key, s3);
8797
});
98+
mocha.it('test version', async () => {
99+
await commonTests.test_version(Bucket, Key, s3);
100+
});
101+
mocha.it('test multipath', async () => {
102+
await commonTests.test_multipart(Bucket, Key, s3);
103+
});
88104
});
89105

90106
mocha.describe('bucket-lifecycle-bg-worker', function() {
@@ -216,4 +232,131 @@ mocha.describe('lifecycle', () => {
216232

217233
console.log('✅ The lifecycle test was completed successfully');
218234
});
235+
236+
mocha.describe('bucket-lifecycle-multipart-upload', function() {
237+
this.timeout(60000);
238+
const multipart_bucket = 'test-multipart-bucket';
239+
mocha.after(async function() {
240+
//TODO Delete bucket
241+
//await rpc_client.bucket.delete_bucket({ name: multipart_bucket });
242+
});
243+
async function create_mock_multipart_upload(key, bucket, age, part_size, num_parts) {
244+
await rpc_client.bucket.create_bucket({ name: bucket });
245+
const content_type = 'test/test';
246+
const size = num_parts * part_size;
247+
const data = generator.update(Buffer.alloc(size));
248+
const { obj_id } = await rpc_client.object.create_object_upload({ bucket, key, content_type });
249+
const mp_list_before = await rpc_client.object.list_multiparts({ obj_id, bucket, key });
250+
coretest.log('list_multiparts before', mp_list_before);
251+
assert.strictEqual(mp_list_before.multiparts.length, 0);
252+
const multiparts_ids = [];
253+
254+
const get_part_slice = i => data.slice(i * part_size, (i + 1) * part_size);
255+
const upload_multipart = async (i, mp_data, split, finish) => {
256+
const resp = await object_io.upload_multipart({
257+
client: rpc_client,
258+
obj_id,
259+
bucket,
260+
key,
261+
num: i + 1,
262+
size: mp_data.length,
263+
source_stream: readable_buffer(mp_data, split, finish),
264+
});
265+
console.log("upload_multipart", resp);
266+
multiparts_ids.push(new mongodb.ObjectId(resp.multipart_id));
267+
};
268+
// upload the real multiparts we want to complete with
269+
await Promise.all(_.times(num_parts,
270+
i => upload_multipart(i, get_part_slice(i))
271+
));
272+
273+
// go back in time
274+
const create_time = new Date();
275+
create_time.setDate(create_time.getDate() - age);
276+
const update = {
277+
create_time,
278+
};
279+
280+
console.log('create_mock_multipart_upload bucket', bucket, 'obj_id', obj_id, 'multiparts_ids', multiparts_ids);
281+
await MDStore.instance().update_multiparts_by_ids(multiparts_ids, update);
282+
283+
const mp_list_after = await rpc_client.object.list_multiparts({ obj_id, bucket, key });
284+
coretest.log('mp_list_after after', mp_list_after);
285+
assert.strictEqual(mp_list_after.multiparts.length, num_parts);
286+
const actual_create_time = mp_list_after.multiparts[0].last_modified;
287+
assert.strictEqual(actual_create_time, create_time.getTime(), `object create_time/getTime actual ${actual_create_time} !== expected ${create_time.getTime()}`);
288+
}
289+
290+
mocha.it('lifecyle - listMultiPart verify', async () => {
291+
await create_mock_multipart_upload('test-lifecycle-multipart', multipart_bucket, 3, 45, 7);
292+
});
293+
});
294+
295+
mocha.describe('bucket-lifecycle-version', function() {
296+
this.timeout(60000);
297+
const version_bucket = 'test-version-bucket';
298+
mocha.after(async function() {
299+
//TODO Delete bucket
300+
//await rpc_client.bucket.delete_bucket({ name: version_bucket });
301+
});
302+
303+
async function create_mock_version(version_key, bucket, age, version_count) {
304+
await rpc_client.bucket.create_bucket({ name: bucket });
305+
rpc_client.bucket.update_bucket({
306+
name: bucket,
307+
versioning: 'ENABLED'
308+
});
309+
310+
const obj_upload_ids = [];
311+
for (let i = 0; i < version_count; ++i) {
312+
const content_type = 'application/octet_stream';
313+
const { obj_id } = await rpc_client.object.create_object_upload({ bucket, key: version_key, content_type });
314+
await rpc_client.object.complete_object_upload({ obj_id, bucket, key: version_key });
315+
if (i < version_count - 2) {
316+
obj_upload_ids.push(new mongodb.ObjectId(obj_id));
317+
}
318+
}
319+
// go back in time
320+
if (age > 0) {
321+
const create_time = new Date();
322+
create_time.setDate(create_time.getDate() - age);
323+
const update = {
324+
create_time,
325+
};
326+
console.log('blow_version_objects: bucket', bucket, 'multiparts_ids', obj_upload_ids, " obj_upload_ids length: ", obj_upload_ids.length, "update :", update);
327+
const update_result = await MDStore.instance().update_objects_by_ids(obj_upload_ids, update);
328+
console.log('blow_version_objects: update_objects_by_ids', update_result);
329+
}
330+
331+
const obj_params = {
332+
bucket,
333+
};
334+
const list_obj = await rpc_client.object.list_object_versions(obj_params);
335+
console.log("List updated objects : ", list_obj);
336+
assert.strictEqual(list_obj.objects.length, version_count, `object total count ${list_obj.objects.length} !== expected ${version_count}`);
337+
}
338+
339+
mocha.it('lifecyle - version expiration', async () => {
340+
await create_mock_version('test-lifecycle-version', version_bucket, 30, 10);
341+
});
342+
});
343+
344+
function readable_buffer(data, split = 1, finish = 'end') {
345+
const max = Math.ceil(data.length / split);
346+
let pos = 0;
347+
return new stream.Readable({
348+
read() {
349+
if (pos < data.length) {
350+
const len = Math.min(data.length - pos, max);
351+
const buf = data.slice(pos, pos + len);
352+
pos += len;
353+
setImmediate(() => this.push(buf));
354+
} else if (finish === 'fail') {
355+
this.emit('error', new Error('TEST_OBJECT_IO FAIL ON FINISH'));
356+
} else {
357+
this.push(null);
358+
}
359+
}
360+
});
361+
}
219362
});

0 commit comments

Comments
 (0)