Skip to content

Commit 966bd12

Browse files
authored
Clean up imports and close file (#5711)
1 parent 8c76491 commit 966bd12

File tree

9 files changed

+24
-24
lines changed

9 files changed

+24
-24
lines changed

awsbatch-cli/src/awsbatch/awsbstat.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,6 @@
1010
# This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, express or implied.
1111
# See the License for the specific language governing permissions and limitations under the License.
1212

13-
import collections
1413
import re
1514
import sys
1615
from builtins import range
@@ -257,7 +256,7 @@ def __init__(self, log, boto3_factory):
257256
:param boto3_factory: an initialized Boto3ClientFactory object
258257
"""
259258
self.log = log
260-
mapping = collections.OrderedDict(
259+
mapping = OrderedDict(
261260
[
262261
("jobId", "id"),
263262
("jobName", "name"),

cli/src/pcluster/api/awslambda/entrypoint.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,6 @@
88
# or in the "LICENSE.txt" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
99
# OR CONDITIONS OF ANY KIND, express or implied. See the License for the specific language governing permissions and
1010
# limitations under the License.
11-
import os
1211
from os import environ
1312
from typing import Any, Dict
1413

@@ -53,7 +52,7 @@ def lambda_handler(event: Dict[str, Any], context: LambdaContext) -> Dict[str, A
5352
xray_recorder.configure(service="ParallelCluster Flask App")
5453
XRayMiddleware(pcluster_api.flask_app, xray_recorder)
5554
# Setting default region to region where lambda function is executed
56-
os.environ["AWS_DEFAULT_REGION"] = os.environ["AWS_REGION"]
55+
environ["AWS_DEFAULT_REGION"] = environ["AWS_REGION"]
5756
return handle_request(pcluster_api.app, event, context)
5857
except Exception as e:
5958
logger.critical("Unexpected exception: %s", e, exc_info=True)

cli/src/pcluster/cli/commands/cluster_logs.py

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -11,8 +11,7 @@
1111
import re
1212
from typing import List
1313

14-
import argparse
15-
from argparse import ArgumentParser, Namespace
14+
from argparse import ArgumentParser, ArgumentTypeError, Namespace
1615

1716
from pcluster import utils
1817
from pcluster.cli.commands.common import CliCommand, ExportLogsCommand
@@ -98,5 +97,5 @@ def __init__(self, accepted_filters: list):
9897

9998
def __call__(self, value):
10099
if not self._pattern.match(value):
101-
raise argparse.ArgumentTypeError(f"filters parameter must be in the form {self._pattern.pattern} ")
100+
raise ArgumentTypeError(f"filters parameter must be in the form {self._pattern.pattern} ")
102101
return value

cli/src/pcluster/cli/commands/common.py

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -11,8 +11,7 @@
1111
from abc import ABC, abstractmethod
1212
from functools import partial
1313

14-
import argparse
15-
from argparse import ArgumentParser
14+
from argparse import ArgumentParser, ArgumentTypeError
1615

1716
from pcluster import utils
1817
from pcluster.cli.exceptions import ParameterException
@@ -64,7 +63,7 @@ def __init__(self, subparsers, region_arg: bool = True, expects_extra_args: bool
6463
parser.set_defaults(func=self.execute, expects_extra_args=expects_extra_args)
6564

6665
@abstractmethod
67-
def register_command_args(self, parser: argparse.ArgumentParser) -> None:
66+
def register_command_args(self, parser: ArgumentParser) -> None:
6867
"""Register CLI arguments."""
6968
pass
7069

@@ -87,7 +86,7 @@ def __call__(self, value):
8786
try:
8887
return to_utc_datetime(value)
8988
except Exception as e:
90-
raise argparse.ArgumentTypeError(
89+
raise ArgumentTypeError(
9190
"Start time and end time filters must be in the ISO 8601 UTC format: YYYY-MM-DDThh:mm:ssZ "
9291
f"(e.g. 1984-09-15T19:20:30Z or 1984-09-15). {e}"
9392
)

cli/src/pcluster/cli/commands/configure/command.py

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -12,8 +12,7 @@
1212

1313
from typing import List
1414

15-
import argparse
16-
from argparse import Namespace
15+
from argparse import ArgumentParser, Namespace
1716

1817
from pcluster.cli.commands.common import CliCommand
1918

@@ -29,7 +28,7 @@ class ConfigureCommand(CliCommand):
2928
def __init__(self, subparsers):
3029
super().__init__(subparsers, name=self.name, help=self.help, description=self.description)
3130

32-
def register_command_args(self, parser: argparse.ArgumentParser) -> None: # noqa: D102
31+
def register_command_args(self, parser: ArgumentParser) -> None: # noqa: D102
3332
parser.add_argument("-c", "--config", help="Path to output the generated config file.", required=True)
3433

3534
def execute(self, args: Namespace, extra_args: List[str]) -> None: # noqa: D102 #pylint: disable=unused-argument

cli/src/pcluster/templates/cdk_builder_utils.py

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -11,8 +11,7 @@
1111

1212
# pylint: disable=too-many-lines
1313
import abc
14-
import hashlib
15-
from hashlib import sha1
14+
from hashlib import sha1, sha256
1615
from typing import List, Union
1716

1817
import pkg_resources
@@ -289,7 +288,7 @@ def add_cluster_iam_resource_prefix(stack_name, config, name: str, iam_type: str
289288
if iam_name_prefix:
290289
# Creating a Globally Unique Hash using Region, Type, Name and stack name
291290
resource_hash = (
292-
hashlib.sha256((name + stack_name + iam_type + config.region).encode("utf-8")).hexdigest()[:12].upper()
291+
sha256((name + stack_name + iam_type + config.region).encode("utf-8")).hexdigest()[:12].upper()
293292
)
294293
full_resource_name = iam_name_prefix + name + "-" + resource_hash
295294
if iam_path:
@@ -347,7 +346,7 @@ def generate_launch_template_version_cfn_parameter_hash(queue, compute_resource)
347346
# A nosec comment is appended to the following line in order to disable the B324 check.
348347
# The sha1 is used just as a hashing function.
349348
# [B324:hashlib] Use of weak MD4, MD5, or SHA1 hash for security. Consider usedforsecurity=False
350-
return hashlib.sha1((queue + compute_resource).encode()).hexdigest()[0:16].capitalize() # nosec nosemgrep
349+
return sha1((queue + compute_resource).encode()).hexdigest()[0:16].capitalize() # nosec nosemgrep
351350

352351

353352
class NodeIamResourcesBase(Construct):

tests/integration-tests/tests/networking/test_cluster_networking.py

Lines changed: 8 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -13,15 +13,20 @@
1313

1414
import boto3
1515
import pytest
16-
import utils
1716
from assertpy import assert_that
1817
from cfn_stacks_factory import CfnStack
1918
from constants import OSU_BENCHMARK_VERSION
2019
from fabric import Connection
2120
from remote_command_executor import RemoteCommandExecutor
2221
from troposphere import Template
2322
from troposphere.ec2 import EIP
24-
from utils import generate_stack_name, get_compute_nodes_instance_ids, get_username_for_os, render_jinja_template
23+
from utils import (
24+
check_pcluster_list_cluster_log_streams,
25+
generate_stack_name,
26+
get_compute_nodes_instance_ids,
27+
get_username_for_os,
28+
render_jinja_template,
29+
)
2530

2631
from tests.common.assertions import (
2732
assert_lambda_vpc_settings_are_correct,
@@ -161,7 +166,7 @@ def test_cluster_in_no_internet_subnet(
161166
_check_hostname(remote_command_executor)
162167
_run_prolog_epilog_jobs(remote_command_executor, slurm_commands)
163168
_run_mpi_jobs(mpi_variants, remote_command_executor, test_datadir, slurm_commands, cluster, region)
164-
utils.check_pcluster_list_cluster_log_streams(cluster, os)
169+
check_pcluster_list_cluster_log_streams(cluster, os)
165170
assert_no_errors_in_logs(remote_command_executor, scheduler)
166171
logging.info("Checking compute node is scaled down after scaledown idle time")
167172
wait_for_num_instances_in_cluster(cluster.cfn_name, region, 1)

util/sync_buckets.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -87,7 +87,7 @@ def _aws_credentials_type(value):
8787
"algorithm is expected to be found in the source bucket. This file is used to perform checksum validation "
8888
"and is also uploaded to the destination bucket",
8989
choices=list(HashingAlgorithm),
90-
type=lambda value: HashingAlgorithm(value),
90+
type=HashingAlgorithm,
9191
required=False,
9292
)
9393
parser.add_argument(

util/upload-cfn-templates.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -89,7 +89,7 @@ def upload_to_s3(args, region, aws_credentials=None):
8989
key = "{key_path}{name}-{version}{extension}".format(
9090
key_path=key_path, name=t, version=args.version, extension=template_ext
9191
)
92-
data = open(template_name, "rb")
92+
9393
for bucket in buckets:
9494
try:
9595
if aws_credentials:
@@ -110,7 +110,8 @@ def upload_to_s3(args, region, aws_credentials=None):
110110
exist = False
111111

112112
if (exist and args.override and not args.dryrun) or (not exist and not args.dryrun):
113-
put_object_to_s3(s3_client, bucket, key, region, data, template_name)
113+
with open(template_name, "rb") as data:
114+
put_object_to_s3(s3_client, bucket, key, region, data, template_name)
114115
else:
115116
print(
116117
"Not uploading %s to bucket %s, object exists %s, override is %s, dryrun is %s"

0 commit comments

Comments
 (0)