Skip to content

SDK | Upgrade AWS SDK to v3 - Unit tests #9042

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 1 commit into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions src/server/bg_services/replication_server.js
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ async function delete_objects(req) {
Delete: {
Objects: batch.map(key => ({ Key: key }))
}
}).promise();
});

res.Deleted?.forEach(obj => delete_done_list.push(obj.Key));
} catch (err) {
Expand Down Expand Up @@ -83,7 +83,7 @@ async function copy_objects_mixed_types(req) {
Key: key
};
try {
await noobaa_con.copyObject(params).promise();
await noobaa_con.copyObject(params);
copy_res.num_of_objects += 1;
// The size of the object can be in either Size or ContentLength, depending on whether
// the request was ListObjectVersions or HeadObject
Expand All @@ -100,7 +100,7 @@ async function copy_objects_mixed_types(req) {
Key: key
};
try {
await noobaa_con.copyObject(params).promise();
await noobaa_con.copyObject(params);
copy_res.num_of_objects += 1;
copy_res.size_of_objects += keys_diff_map[key][i].Size;
} catch (err) {
Expand Down
23 changes: 11 additions & 12 deletions src/server/utils/bucket_diff.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
'use strict';

const _ = require('lodash');
const AWS = require('aws-sdk');
const { S3 } = require('@aws-sdk/client-s3');

const SensitiveString = require('../../util/sensitive_string');
const replication_utils = require('../utils/replication_utils');
Expand All @@ -15,10 +15,10 @@ class BucketDiff {
* first_bucket: string;
* second_bucket: string;
* version: boolean;
* s3_params?: AWS.S3.ClientConfiguration
* connection?: AWS.S3
* for_replication: boolean
* for_deletion: boolean
* s3_params?: object;
* connection?: import('@aws-sdk/client-s3').S3;
* for_replication: boolean;
* for_deletion: boolean;
* }} params
*/
constructor(params) {
Expand All @@ -40,7 +40,7 @@ class BucketDiff {
this.s3 = connection;
} else {
if (!s3_params) throw new Error('Expected s3_params');
this.s3 = new AWS.S3(s3_params);
this.s3 = new S3(s3_params);
}
// special treatment when we want the diff for replication purpose.
this.for_replication = for_replication;
Expand Down Expand Up @@ -147,10 +147,11 @@ class BucketDiff {
};
if (this.version) {
params.KeyMarker = continuation_token;
return await this.s3.listObjectVersions(params).promise();
return await this.s3.listObjectVersions(params);
} else {
if (continuation_token) params.ContinuationToken = continuation_token;
return await this.s3.listObjectsV2(params).promise();
//return await this.s3.listObjectsV2(params);
return await this.s3.listObjectsV2(params);
}
} catch (err) {
dbg.error('BucketDiff _list_objects: error:', err);
Expand All @@ -159,8 +160,7 @@ class BucketDiff {
}

/**
* @param {import("aws-sdk/lib/request").PromiseResult<AWS.S3.ListObjectVersionsOutput, AWS.AWSError> |
* import("aws-sdk/lib/request").PromiseResult<AWS.S3.ListObjectsV2Output, AWS.AWSError>} list
* @param { import("@aws-sdk/client-s3").ListObjectVersionsCommandOutput | import("@aws-sdk/client-s3").ListObjectsV2CommandOutput} list
*
* _object_grouped_by_key_and_omitted will return the objects grouped by key.
* When we have versioning enabled, if there is more than one key, it omits
Expand Down Expand Up @@ -203,8 +203,7 @@ class BucketDiff {
/**
* @param {_.Dictionary<any[]>} list
*
* @param {import("aws-sdk/lib/request").PromiseResult<AWS.S3.ListObjectVersionsOutput, AWS.AWSError> |
* import("aws-sdk/lib/request").PromiseResult<AWS.S3.ListObjectsV2Output, AWS.AWSError>} list_objects_response
* @param { import("@aws-sdk/client-s3").ListObjectVersionsOutput | import("@aws-sdk/client-s3").ListObjectsV2Output } list_objects_response
* if the list is truncated on a version list, returns the the next key marker as the last key in the omitted objects list
* if it is a list without versions, return NextContinuationToken.
*/
Expand Down
4 changes: 2 additions & 2 deletions src/server/utils/replication_utils.js
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ function update_replication_prom_report(bucket_name, replication_policy_id, repl
/**
* @param {any} bucket_name
* @param {string} key
* @param {AWS.S3} s3
* @param {import('@aws-sdk/client-s3').S3} s3
* @param {string} version_id
*/
async function get_object_md(bucket_name, key, s3, version_id) {
Expand All @@ -90,7 +90,7 @@ async function get_object_md(bucket_name, key, s3, version_id) {

dbg.log1('get_object_md params:', params);
try {
const head = await s3.headObject(params).promise();
const head = await s3.headObject(params);
//for namespace s3 we are omitting the 'noobaa-namespace-s3-bucket' as it will be defer between buckets
if (head?.Metadata) head.Metadata = _.omit(head.Metadata, 'noobaa-namespace-s3-bucket');
dbg.log1('get_object_md: finished successfully', head);
Expand Down
55 changes: 29 additions & 26 deletions src/test/system_tests/test_bucket_access.js
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ const rpc = api.new_rpc();
const test_utils = require('./test_utils');

const fs = require('fs');
const AWS = require('aws-sdk');
const { S3 } = require('@aws-sdk/client-s3');
const crypto = require('crypto');
const assert = require('assert');

Expand Down Expand Up @@ -150,12 +150,15 @@ async function setup() {
function get_new_server(user) {
const access_key = user.access_keys.access_key;
const secret_key = user.access_keys.secret_key;
return new AWS.S3({
return new S3({
endpoint: target_s3_endpoint,
s3ForcePathStyle: true,
accessKeyId: access_key.unwrap(),
secretAccessKey: secret_key.unwrap(),
maxRedirects: 10,
forcePathStyle: true,
credentials: {
accessKeyId: access_key.unwrap(),
secretAccessKey: secret_key.unwrap(),
},
// v3: Deprecated. SDK does not follow redirects to avoid unintentional cross-region requests.
//maxRedirects: 10,
});
}

Expand Down Expand Up @@ -194,8 +197,8 @@ async function test_bucket_write_allowed() {
Key: file_name,
Body: fs.createReadStream(file_name)
};
await server.upload(params1).promise();
await server.upload(params2).promise();
await server.putObject({Bucket: params1.Bucket, Key: params1.Key, Body: params1.Body });
await server.putObject({Bucket: params2.Bucket, Key: params2.Key, Body: params2.Body });

file_name = await ops.generate_random_file(1);
// upload with full_access_user to both buckets:
Expand All @@ -205,7 +208,7 @@ async function test_bucket_write_allowed() {
Key: file_name,
Body: fs.createReadStream(file_name)
};
await server.upload(params).promise();
await server.putObject({Bucket: params.Bucket, Key: params.Key, Body: params.Body});
console.log('test_bucket_write_allowed PASSED');
}

Expand All @@ -218,13 +221,13 @@ async function test_bucket_read_allowed() {
Key: file_name,
Body: fs.createReadStream(file_name)
};
await server.upload(params1).promise();
await server.putObject({Bucket: params1.Bucket, Key: params1.Key, Body: params1.Body });
const server2 = get_new_server(bucket1_user);
const params2 = {
Bucket: 'bucket1',
Key: file_name
};
await server2.getObject(params2).promise();
await server2.getObject(params2);
console.log('test_bucket_read_allowed PASSED');
}

Expand All @@ -238,13 +241,13 @@ async function test_bucket_list_allowed() {
Key: file_name,
Body: fs.createReadStream(file_name)
};
await server.upload(params1).promise();
await server.putObject(params1);

const server2 = get_new_server(bucket1_user);
const params2 = {
Bucket: 'bucket1'
};
await server2.listObjects(params2).promise();
await server2.listObjects(params2);

}

Expand All @@ -260,7 +263,7 @@ async function test_bucket_write_denied() {
Body: fs.createReadStream(file_name)
};
try {
await server.upload(params1).promise();
await server.putObject(params1);

throw new Error('expecting upload to fail with statusCode 403- AccessDenied');

Expand All @@ -279,14 +282,14 @@ async function test_bucket_read_denied() {
Key: file_name,
Body: fs.createReadStream(file_name)
};
await server.upload(params1).promise();
await server.putObject(params1);
const server2 = get_new_server(bucket1_user);
const params2 = {
Bucket: 'bucket2',
Key: file_name
};
try {
await server2.getObject(params2).promise();
await server2.getObject(params2);
throw new Error('expecting read to fail with statusCode 403- AccessDenied');
} catch (err) {
assert(err.statusCode === 403, 'expecting read to fail with statusCode 403- AccessDenied');
Expand All @@ -304,14 +307,14 @@ async function test_bucket_list_denied() {
Key: file_name,
Body: fs.createReadStream(file_name)
};
await server.upload(params1).promise();
await server.putObject(params1);

const server2 = get_new_server(bucket1_user);
const params2 = {
Bucket: 'bucket2'
};
try {
await server2.listObjects(params2).promise();
await server2.listObjects(params2);
throw new Error('expecting read to fail with statusCode 403- AccessDenied');
} catch (err) {
assert(err.statusCode === 403, 'expecting read to fail with statusCode 403- AccessDenied');
Expand All @@ -326,7 +329,7 @@ async function test_create_bucket_add_creator_permissions() {
const params = {
Bucket: unique_bucket_name
};
await server.createBucket(params).promise();
await server.createBucket(params);

// Owners have full access to the bucket
const bucket = await client.bucket.read_bucket({ rpc_params: { name: unique_bucket_name } });
Expand All @@ -338,12 +341,12 @@ async function test_delete_bucket_deletes_permissions() {
const server = get_new_server(full_access_user);
const unique_bucket_name = 'bucket' + crypto.randomUUID();

await server.createBucket({ Bucket: unique_bucket_name }).promise();
await server.createBucket({ Bucket: unique_bucket_name });

const bucket = await client.bucket.read_bucket({ rpc_params: { name: unique_bucket_name } });
assert(bucket.owner_account.email.unwrap() === full_access_user.email, 'expecting full_access_user to have permissions to access ' + unique_bucket_name);

await server.deleteBucket({ Bucket: unique_bucket_name }).promise();
await server.deleteBucket({ Bucket: unique_bucket_name });

try {
await client.bucket.read_bucket({ rpc_params: { name: unique_bucket_name } });
Expand All @@ -356,7 +359,7 @@ async function test_delete_bucket_deletes_permissions() {
async function test_no_s3_access() {
console.log(`Starting test_no_s3_access`);
const server = get_new_server(no_access_user);
const data = await server.listBuckets().promise();
const data = await server.listBuckets();
assert(data.Buckets.length === 0, 'expecting an empty bucket list for no_access_user');
}

Expand All @@ -378,21 +381,21 @@ async function test_ip_restrictions() {

await client.account.update_account(single_ip_restriction);
try {
await server.listBuckets().promise();
await server.listBuckets();
} catch (err) {
assert(err.statusCode === 403, 'expecting read to fail with statusCode 403- AccessDenied');
}
await client.account.update_account(no_ip_restriction);
let data = await server.listBuckets().promise();
let data = await server.listBuckets();
assert(data.Buckets.length !== 0, 'expecting none empty bucket list for none-restricted IP');
await client.account.update_account(range_ip_restriction);
try {
await server.listBuckets().promise();
await server.listBuckets();
} catch (err) {
assert(err.statusCode === 403, 'expecting read to fail with statusCode 403- AccessDenied');
}
await client.account.update_account(no_ip_restriction);
data = await server.listBuckets().promise();
data = await server.listBuckets();
assert(data.Buckets.length !== 0, 'expecting none empty bucket list for none-restricted IP');
}

Expand Down
18 changes: 6 additions & 12 deletions src/test/system_tests/test_build_chunks.js
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ dbg.set_process_name('test_build_chunks');

const _ = require('lodash');
const fs = require('fs');
const AWS = require('aws-sdk');
const { S3 } = require('@aws-sdk/client-s3');
// const util = require('util');
// const crypto = require('crypto');

Expand Down Expand Up @@ -112,23 +112,17 @@ async function setup_case(

async function upload_random_file(size_mb, bucket_name, extension, content_type) {
const filename = await ops.generate_random_file(size_mb, extension);
const s3bucket = new AWS.S3({
const s3bucket = new S3({
endpoint: TEST_CTX.s3_endpoint,
credentials: {
accessKeyId: '123',
secretAccessKey: 'abc'
},
s3ForcePathStyle: true,
sslEnabled: false
forcePathStyle: true,
tls: false
});

await P.ninvoke(s3bucket, 'upload', {
Bucket: bucket_name,
Key: filename,
Body: fs.createReadStream(filename),
ContentType: content_type
});

await s3bucket.putObject({Bucket: bucket_name, Key: filename,
Body: fs.createReadStream(filename), ContentType: content_type});
return filename;
}

Expand Down
20 changes: 11 additions & 9 deletions src/test/system_tests/test_cloud_pools.js
Original file line number Diff line number Diff line change
Expand Up @@ -5,23 +5,24 @@ const api = require('../../api');
const rpc = api.new_rpc();
const util = require('util');
const _ = require('lodash');
const AWS = require('aws-sdk');
const { S3 } = require('@aws-sdk/client-s3');
const argv = require('minimist')(process.argv);
const P = require('../../util/promise');
const basic_server_ops = require('../utils/basic_server_ops');
const dotenv = require('../../util/dotenv');
dotenv.load();
const test_utils = require('./test_utils');

const s3 = new AWS.S3({
const s3 = new S3({
// endpoint: 'https://s3.amazonaws.com',
credentials: {
accessKeyId: process.env.AWS_ACCESS_KEY_ID,
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY
},
s3ForcePathStyle: true,
sslEnabled: false,
signatureVersion: 'v4',
forcePathStyle: true,
tls: false,
// signatureVersion is Deprecated in SDK v3
//signatureVersion: 'v4',
// region: 'eu-central-1'
});

Expand Down Expand Up @@ -295,15 +296,16 @@ function run_test() {
.then(() => block_ids);
})
.then(function(block_ids) {
return P.ninvoke(new AWS.S3({
return P.ninvoke(new S3({
endpoint: 'http://' + TEST_CTX.source_ip,
credentials: {
accessKeyId: argv.access_key || '123',
secretAccessKey: argv.secret_key || 'abc'
},
s3ForcePathStyle: true,
sslEnabled: false,
signatureVersion: 'v4',
forcePathStyle: true,
tls: false,
// signatureVersion is Deprecated in SDK v3
//signatureVersion: 'v4',
// region: 'eu-central-1'
}), 'deleteObject', {
Bucket: TEST_CTX.source_bucket,
Expand Down
Loading