Skip to content

Commit 46554f9

Browse files
authored
Merge pull request #9042 from naveenpaul1/unit-test-aws-sdkv3
SDK | Upgrade AWS SDK to v3 - Unit tests
2 parents 03764e8 + b288fe1 commit 46554f9

15 files changed

+549
-595
lines changed

src/server/bg_services/replication_server.js

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -52,7 +52,7 @@ async function delete_objects(req) {
5252
Delete: {
5353
Objects: batch.map(key => ({ Key: key }))
5454
}
55-
}).promise();
55+
});
5656

5757
res.Deleted?.forEach(obj => delete_done_list.push(obj.Key));
5858
} catch (err) {
@@ -83,7 +83,7 @@ async function copy_objects_mixed_types(req) {
8383
Key: key
8484
};
8585
try {
86-
await noobaa_con.copyObject(params).promise();
86+
await noobaa_con.copyObject(params);
8787
copy_res.num_of_objects += 1;
8888
// The size of the object can be in either Size or ContentLength, depending on whether
8989
// the request was ListObjectVersions or HeadObject
@@ -100,7 +100,7 @@ async function copy_objects_mixed_types(req) {
100100
Key: key
101101
};
102102
try {
103-
await noobaa_con.copyObject(params).promise();
103+
await noobaa_con.copyObject(params);
104104
copy_res.num_of_objects += 1;
105105
copy_res.size_of_objects += keys_diff_map[key][i].Size;
106106
} catch (err) {

src/server/utils/bucket_diff.js

Lines changed: 10 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22
'use strict';
33

44
const _ = require('lodash');
5-
const AWS = require('aws-sdk');
5+
const { S3 } = require('@aws-sdk/client-s3');
66

77
const SensitiveString = require('../../util/sensitive_string');
88
const replication_utils = require('../utils/replication_utils');
@@ -15,10 +15,10 @@ class BucketDiff {
1515
* first_bucket: string;
1616
* second_bucket: string;
1717
* version: boolean;
18-
* s3_params?: AWS.S3.ClientConfiguration
19-
* connection?: AWS.S3
20-
* for_replication: boolean
21-
* for_deletion: boolean
18+
* s3_params?: object;
19+
* connection?: import('@aws-sdk/client-s3').S3;
20+
* for_replication: boolean;
21+
* for_deletion: boolean;
2222
* }} params
2323
*/
2424
constructor(params) {
@@ -40,7 +40,7 @@ class BucketDiff {
4040
this.s3 = connection;
4141
} else {
4242
if (!s3_params) throw new Error('Expected s3_params');
43-
this.s3 = new AWS.S3(s3_params);
43+
this.s3 = new S3(s3_params);
4444
}
4545
// special treatment when we want the diff for replication purpose.
4646
this.for_replication = for_replication;
@@ -147,10 +147,10 @@ class BucketDiff {
147147
};
148148
if (this.version) {
149149
params.KeyMarker = continuation_token;
150-
return await this.s3.listObjectVersions(params).promise();
150+
return await this.s3.listObjectVersions(params);
151151
} else {
152152
if (continuation_token) params.ContinuationToken = continuation_token;
153-
return await this.s3.listObjectsV2(params).promise();
153+
return await this.s3.listObjectsV2(params);
154154
}
155155
} catch (err) {
156156
dbg.error('BucketDiff _list_objects: error:', err);
@@ -159,8 +159,7 @@ class BucketDiff {
159159
}
160160

161161
/**
162-
* @param {import("aws-sdk/lib/request").PromiseResult<AWS.S3.ListObjectVersionsOutput, AWS.AWSError> |
163-
* import("aws-sdk/lib/request").PromiseResult<AWS.S3.ListObjectsV2Output, AWS.AWSError>} list
162+
* @param { import("@aws-sdk/client-s3").ListObjectVersionsOutput | import("@aws-sdk/client-s3").ListObjectsV2Output} list
164163
*
165164
* _object_grouped_by_key_and_omitted will return the objects grouped by key.
166165
* When we have versioning enabled, if there is more than one key, it omits
@@ -203,8 +202,7 @@ class BucketDiff {
203202
/**
204203
* @param {_.Dictionary<any[]>} list
205204
*
206-
* @param {import("aws-sdk/lib/request").PromiseResult<AWS.S3.ListObjectVersionsOutput, AWS.AWSError> |
207-
* import("aws-sdk/lib/request").PromiseResult<AWS.S3.ListObjectsV2Output, AWS.AWSError>} list_objects_response
205+
* @param { import("@aws-sdk/client-s3").ListObjectVersionsOutput | import("@aws-sdk/client-s3").ListObjectsV2Output } list_objects_response
208206
* if the list is truncated on a version list, returns the the next key marker as the last key in the omitted objects list
209207
* if it is a list without versions, return NextContinuationToken.
210208
*/

src/server/utils/replication_utils.js

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -77,7 +77,7 @@ function update_replication_prom_report(bucket_name, replication_policy_id, repl
7777
/**
7878
* @param {any} bucket_name
7979
* @param {string} key
80-
* @param {AWS.S3} s3
80+
* @param {import('@aws-sdk/client-s3').S3} s3
8181
* @param {string} version_id
8282
*/
8383
async function get_object_md(bucket_name, key, s3, version_id) {
@@ -90,7 +90,7 @@ async function get_object_md(bucket_name, key, s3, version_id) {
9090

9191
dbg.log1('get_object_md params:', params);
9292
try {
93-
const head = await s3.headObject(params).promise();
93+
const head = await s3.headObject(params);
9494
//for namespace s3 we are omitting the 'noobaa-namespace-s3-bucket' as it will be defer between buckets
9595
if (head?.Metadata) head.Metadata = _.omit(head.Metadata, 'noobaa-namespace-s3-bucket');
9696
dbg.log1('get_object_md: finished successfully', head);

src/test/system_tests/test_bucket_access.js

Lines changed: 29 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@ const rpc = api.new_rpc();
1515
const test_utils = require('./test_utils');
1616

1717
const fs = require('fs');
18-
const AWS = require('aws-sdk');
18+
const { S3 } = require('@aws-sdk/client-s3');
1919
const crypto = require('crypto');
2020
const assert = require('assert');
2121

@@ -150,12 +150,15 @@ async function setup() {
150150
function get_new_server(user) {
151151
const access_key = user.access_keys.access_key;
152152
const secret_key = user.access_keys.secret_key;
153-
return new AWS.S3({
153+
return new S3({
154154
endpoint: target_s3_endpoint,
155-
s3ForcePathStyle: true,
156-
accessKeyId: access_key.unwrap(),
157-
secretAccessKey: secret_key.unwrap(),
158-
maxRedirects: 10,
155+
forcePathStyle: true,
156+
credentials: {
157+
accessKeyId: access_key.unwrap(),
158+
secretAccessKey: secret_key.unwrap(),
159+
},
160+
// v3: Deprecated. SDK does not follow redirects to avoid unintentional cross-region requests.
161+
//maxRedirects: 10,
159162
});
160163
}
161164

@@ -194,8 +197,8 @@ async function test_bucket_write_allowed() {
194197
Key: file_name,
195198
Body: fs.createReadStream(file_name)
196199
};
197-
await server.upload(params1).promise();
198-
await server.upload(params2).promise();
200+
await server.putObject({Bucket: params1.Bucket, Key: params1.Key, Body: params1.Body });
201+
await server.putObject({Bucket: params2.Bucket, Key: params2.Key, Body: params2.Body });
199202

200203
file_name = await ops.generate_random_file(1);
201204
// upload with full_access_user to both buckets:
@@ -205,7 +208,7 @@ async function test_bucket_write_allowed() {
205208
Key: file_name,
206209
Body: fs.createReadStream(file_name)
207210
};
208-
await server.upload(params).promise();
211+
await server.putObject({Bucket: params.Bucket, Key: params.Key, Body: params.Body});
209212
console.log('test_bucket_write_allowed PASSED');
210213
}
211214

@@ -218,13 +221,13 @@ async function test_bucket_read_allowed() {
218221
Key: file_name,
219222
Body: fs.createReadStream(file_name)
220223
};
221-
await server.upload(params1).promise();
224+
await server.putObject({Bucket: params1.Bucket, Key: params1.Key, Body: params1.Body });
222225
const server2 = get_new_server(bucket1_user);
223226
const params2 = {
224227
Bucket: 'bucket1',
225228
Key: file_name
226229
};
227-
await server2.getObject(params2).promise();
230+
await server2.getObject(params2);
228231
console.log('test_bucket_read_allowed PASSED');
229232
}
230233

@@ -238,13 +241,13 @@ async function test_bucket_list_allowed() {
238241
Key: file_name,
239242
Body: fs.createReadStream(file_name)
240243
};
241-
await server.upload(params1).promise();
244+
await server.putObject(params1);
242245

243246
const server2 = get_new_server(bucket1_user);
244247
const params2 = {
245248
Bucket: 'bucket1'
246249
};
247-
await server2.listObjects(params2).promise();
250+
await server2.listObjects(params2);
248251

249252
}
250253

@@ -260,7 +263,7 @@ async function test_bucket_write_denied() {
260263
Body: fs.createReadStream(file_name)
261264
};
262265
try {
263-
await server.upload(params1).promise();
266+
await server.putObject(params1);
264267

265268
throw new Error('expecting upload to fail with statusCode 403- AccessDenied');
266269

@@ -279,14 +282,14 @@ async function test_bucket_read_denied() {
279282
Key: file_name,
280283
Body: fs.createReadStream(file_name)
281284
};
282-
await server.upload(params1).promise();
285+
await server.putObject(params1);
283286
const server2 = get_new_server(bucket1_user);
284287
const params2 = {
285288
Bucket: 'bucket2',
286289
Key: file_name
287290
};
288291
try {
289-
await server2.getObject(params2).promise();
292+
await server2.getObject(params2);
290293
throw new Error('expecting read to fail with statusCode 403- AccessDenied');
291294
} catch (err) {
292295
assert(err.statusCode === 403, 'expecting read to fail with statusCode 403- AccessDenied');
@@ -304,14 +307,14 @@ async function test_bucket_list_denied() {
304307
Key: file_name,
305308
Body: fs.createReadStream(file_name)
306309
};
307-
await server.upload(params1).promise();
310+
await server.putObject(params1);
308311

309312
const server2 = get_new_server(bucket1_user);
310313
const params2 = {
311314
Bucket: 'bucket2'
312315
};
313316
try {
314-
await server2.listObjects(params2).promise();
317+
await server2.listObjects(params2);
315318
throw new Error('expecting read to fail with statusCode 403- AccessDenied');
316319
} catch (err) {
317320
assert(err.statusCode === 403, 'expecting read to fail with statusCode 403- AccessDenied');
@@ -326,7 +329,7 @@ async function test_create_bucket_add_creator_permissions() {
326329
const params = {
327330
Bucket: unique_bucket_name
328331
};
329-
await server.createBucket(params).promise();
332+
await server.createBucket(params);
330333

331334
// Owners have full access to the bucket
332335
const bucket = await client.bucket.read_bucket({ rpc_params: { name: unique_bucket_name } });
@@ -338,12 +341,12 @@ async function test_delete_bucket_deletes_permissions() {
338341
const server = get_new_server(full_access_user);
339342
const unique_bucket_name = 'bucket' + crypto.randomUUID();
340343

341-
await server.createBucket({ Bucket: unique_bucket_name }).promise();
344+
await server.createBucket({ Bucket: unique_bucket_name });
342345

343346
const bucket = await client.bucket.read_bucket({ rpc_params: { name: unique_bucket_name } });
344347
assert(bucket.owner_account.email.unwrap() === full_access_user.email, 'expecting full_access_user to have permissions to access ' + unique_bucket_name);
345348

346-
await server.deleteBucket({ Bucket: unique_bucket_name }).promise();
349+
await server.deleteBucket({ Bucket: unique_bucket_name });
347350

348351
try {
349352
await client.bucket.read_bucket({ rpc_params: { name: unique_bucket_name } });
@@ -356,7 +359,7 @@ async function test_delete_bucket_deletes_permissions() {
356359
async function test_no_s3_access() {
357360
console.log(`Starting test_no_s3_access`);
358361
const server = get_new_server(no_access_user);
359-
const data = await server.listBuckets().promise();
362+
const data = await server.listBuckets();
360363
assert(data.Buckets.length === 0, 'expecting an empty bucket list for no_access_user');
361364
}
362365

@@ -378,21 +381,21 @@ async function test_ip_restrictions() {
378381

379382
await client.account.update_account(single_ip_restriction);
380383
try {
381-
await server.listBuckets().promise();
384+
await server.listBuckets();
382385
} catch (err) {
383386
assert(err.statusCode === 403, 'expecting read to fail with statusCode 403- AccessDenied');
384387
}
385388
await client.account.update_account(no_ip_restriction);
386-
let data = await server.listBuckets().promise();
389+
let data = await server.listBuckets();
387390
assert(data.Buckets.length !== 0, 'expecting none empty bucket list for none-restricted IP');
388391
await client.account.update_account(range_ip_restriction);
389392
try {
390-
await server.listBuckets().promise();
393+
await server.listBuckets();
391394
} catch (err) {
392395
assert(err.statusCode === 403, 'expecting read to fail with statusCode 403- AccessDenied');
393396
}
394397
await client.account.update_account(no_ip_restriction);
395-
data = await server.listBuckets().promise();
398+
data = await server.listBuckets();
396399
assert(data.Buckets.length !== 0, 'expecting none empty bucket list for none-restricted IP');
397400
}
398401

src/test/system_tests/test_build_chunks.js

Lines changed: 6 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@ dbg.set_process_name('test_build_chunks');
1010

1111
const _ = require('lodash');
1212
const fs = require('fs');
13-
const AWS = require('aws-sdk');
13+
const { S3 } = require('@aws-sdk/client-s3');
1414
// const util = require('util');
1515
// const crypto = require('crypto');
1616

@@ -112,23 +112,17 @@ async function setup_case(
112112

113113
async function upload_random_file(size_mb, bucket_name, extension, content_type) {
114114
const filename = await ops.generate_random_file(size_mb, extension);
115-
const s3bucket = new AWS.S3({
115+
const s3bucket = new S3({
116116
endpoint: TEST_CTX.s3_endpoint,
117117
credentials: {
118118
accessKeyId: '123',
119119
secretAccessKey: 'abc'
120120
},
121-
s3ForcePathStyle: true,
122-
sslEnabled: false
121+
forcePathStyle: true,
122+
tls: false
123123
});
124-
125-
await P.ninvoke(s3bucket, 'upload', {
126-
Bucket: bucket_name,
127-
Key: filename,
128-
Body: fs.createReadStream(filename),
129-
ContentType: content_type
130-
});
131-
124+
await s3bucket.putObject({Bucket: bucket_name, Key: filename,
125+
Body: fs.createReadStream(filename), ContentType: content_type});
132126
return filename;
133127
}
134128

src/test/system_tests/test_cloud_pools.js

Lines changed: 11 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -5,23 +5,24 @@ const api = require('../../api');
55
const rpc = api.new_rpc();
66
const util = require('util');
77
const _ = require('lodash');
8-
const AWS = require('aws-sdk');
8+
const { S3 } = require('@aws-sdk/client-s3');
99
const argv = require('minimist')(process.argv);
1010
const P = require('../../util/promise');
1111
const basic_server_ops = require('../utils/basic_server_ops');
1212
const dotenv = require('../../util/dotenv');
1313
dotenv.load();
1414
const test_utils = require('./test_utils');
1515

16-
const s3 = new AWS.S3({
16+
const s3 = new S3({
1717
// endpoint: 'https://s3.amazonaws.com',
1818
credentials: {
1919
accessKeyId: process.env.AWS_ACCESS_KEY_ID,
2020
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY
2121
},
22-
s3ForcePathStyle: true,
23-
sslEnabled: false,
24-
signatureVersion: 'v4',
22+
forcePathStyle: true,
23+
tls: false,
24+
// signatureVersion is Deprecated in SDK v3
25+
//signatureVersion: 'v4',
2526
// region: 'eu-central-1'
2627
});
2728

@@ -295,15 +296,16 @@ function run_test() {
295296
.then(() => block_ids);
296297
})
297298
.then(function(block_ids) {
298-
return P.ninvoke(new AWS.S3({
299+
return P.ninvoke(new S3({
299300
endpoint: 'http://' + TEST_CTX.source_ip,
300301
credentials: {
301302
accessKeyId: argv.access_key || '123',
302303
secretAccessKey: argv.secret_key || 'abc'
303304
},
304-
s3ForcePathStyle: true,
305-
sslEnabled: false,
306-
signatureVersion: 'v4',
305+
forcePathStyle: true,
306+
tls: false,
307+
// signatureVersion is Deprecated in SDK v3
308+
//signatureVersion: 'v4',
307309
// region: 'eu-central-1'
308310
}), 'deleteObject', {
309311
Bucket: TEST_CTX.source_bucket,

0 commit comments

Comments
 (0)