Skip to content

Commit b06bb87

Browse files
authored
Enhance code format validation (#1717)
Signed-off-by: tdruez <tdruez@nexb.com>
1 parent aa2c786 commit b06bb87

File tree

11 files changed

+60
-62
lines changed

11 files changed

+60
-62
lines changed

pyproject.toml

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -164,9 +164,12 @@ select = [
164164
"D", # pydocstyle
165165
"F", # Pyflakes
166166
"UP", # pyupgrade
167+
"DJ", # flake8-django
167168
"S", # flake8-bandit
168169
"I", # isort
169170
"C9", # McCabe complexity
171+
"FIX", # flake8-fix
172+
"FURB", # refurb
170173
]
171174
ignore = ["D1", "D203", "D205", "D212", "D400", "D415"]
172175

@@ -188,6 +191,5 @@ max-complexity = 10
188191
[tool.ruff.lint.per-file-ignores]
189192
# Allow the usage of assert in the test_spdx file.
190193
"**/test_spdx.py*" = ["S101"]
191-
"scanpipe/pipes/spdx.py" = ["UP006", "UP035"]
192194
# Allow complexity in management commands
193195
"scanpipe/management/commands/*" = ["C901"]

scanpipe/models.py

Lines changed: 29 additions & 30 deletions
Original file line numberDiff line numberDiff line change
@@ -431,6 +431,9 @@ class ExtraDataFieldMixin(models.Model):
431431
help_text=_("Optional mapping of extra data key/values."),
432432
)
433433

434+
class Meta:
435+
abstract = True
436+
434437
def update_extra_data(self, data):
435438
"""Update the `extra_data` field with the provided `data` dict."""
436439
if not isinstance(data, dict):
@@ -439,9 +442,6 @@ def update_extra_data(self, data):
439442
self.extra_data.update(data)
440443
self.save(update_fields=["extra_data"])
441444

442-
class Meta:
443-
abstract = True
444-
445445

446446
class UpdateMixin:
447447
"""
@@ -635,6 +635,10 @@ def save(self, *args, **kwargs):
635635
if global_webhook and is_new and not is_clone and not skip_global_webhook:
636636
self.setup_global_webhook()
637637

638+
def get_absolute_url(self):
639+
"""Return this project's details URL."""
640+
return reverse("project_detail", args=[self.slug])
641+
638642
def setup_global_webhook(self):
639643
"""
640644
Create a global webhook subscription instance from values defined in the
@@ -1180,8 +1184,7 @@ def write_input_file(self, file_object):
11801184
file_path = Path(self.input_path / filename)
11811185

11821186
with open(file_path, "wb+") as f:
1183-
for chunk in file_object.chunks():
1184-
f.write(chunk)
1187+
f.writelines(file_object.chunks())
11851188

11861189
def copy_input_from(self, input_location):
11871190
"""
@@ -1428,10 +1431,6 @@ def add_error(
14281431
object_instance,
14291432
)
14301433

1431-
def get_absolute_url(self):
1432-
"""Return this project's details URL."""
1433-
return reverse("project_detail", args=[self.slug])
1434-
14351434
@cached_property
14361435
def resource_count(self):
14371436
"""Return the number of resources related to this project."""
@@ -2533,21 +2532,6 @@ class Compliance(models.TextChoices):
25332532
class Meta:
25342533
abstract = True
25352534

2536-
@classmethod
2537-
def from_db(cls, db, field_names, values):
2538-
"""
2539-
Store the ``license_expression_field`` on loading this instance from the
2540-
database value.
2541-
The cached value is then used to detect changes on `save()`.
2542-
"""
2543-
new = super().from_db(db, field_names, values)
2544-
2545-
if cls.license_expression_field in field_names:
2546-
field_index = field_names.index(cls.license_expression_field)
2547-
new._loaded_license_expression = values[field_index]
2548-
2549-
return new
2550-
25512535
def save(self, codebase=None, *args, **kwargs):
25522536
"""
25532537
Injects policies, if the feature is enabled, when the
@@ -2566,6 +2550,21 @@ def save(self, codebase=None, *args, **kwargs):
25662550

25672551
super().save(*args, **kwargs)
25682552

2553+
@classmethod
2554+
def from_db(cls, db, field_names, values):
2555+
"""
2556+
Store the ``license_expression_field`` on loading this instance from the
2557+
database value.
2558+
The cached value is then used to detect changes on `save()`.
2559+
"""
2560+
new = super().from_db(db, field_names, values)
2561+
2562+
if cls.license_expression_field in field_names:
2563+
field_index = field_names.index(cls.license_expression_field)
2564+
new._loaded_license_expression = values[field_index]
2565+
2566+
return new
2567+
25692568
@property
25702569
def policy_index(self):
25712570
return self.project.policy_index
@@ -2790,6 +2789,9 @@ class Meta:
27902789
def __str__(self):
27912790
return self.path
27922791

2792+
def get_absolute_url(self):
2793+
return reverse("resource_detail", args=[self.project.slug, self.path])
2794+
27932795
@property
27942796
def location_path(self):
27952797
"""Return the location of the resource as a Path instance."""
@@ -2949,9 +2951,6 @@ def extracted_from(self, codebase=None):
29492951
archive_path, _, _ = self.path.rpartition("-extract")
29502952
return self.project.get_resource(archive_path)
29512953

2952-
def get_absolute_url(self):
2953-
return reverse("resource_detail", args=[self.project.slug, self.path])
2954-
29552954
def get_raw_url(self):
29562955
"""Return the URL to access the RAW content of the resource."""
29572956
return reverse("resource_raw", args=[self.project.slug, self.path])
@@ -3143,14 +3142,14 @@ class VulnerabilityMixin(models.Model):
31433142

31443143
affected_by_vulnerabilities = models.JSONField(blank=True, default=list)
31453144

3145+
class Meta:
3146+
abstract = True
3147+
31463148
@property
31473149
def is_vulnerable(self):
31483150
"""Returns True if this instance is affected by vulnerabilities."""
31493151
return bool(self.affected_by_vulnerabilities)
31503152

3151-
class Meta:
3152-
abstract = True
3153-
31543153

31553154
class VulnerabilityQuerySetMixin:
31563155
def vulnerable(self):

scanpipe/pipelines/analyze_root_filesystem.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -111,7 +111,6 @@ def match_not_analyzed_to_application_packages(self):
111111
Match files with "not-yet-analyzed" status to files already belonging to
112112
application packages.
113113
"""
114-
# TODO: do it one rootfs at a time e.g. for rfs in self.root_filesystems:
115114
rootfs.match_not_analyzed(
116115
self.project,
117116
reference_status=flag.APPLICATION_PACKAGE,

scanpipe/pipes/__init__.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -166,8 +166,7 @@ def _clean_package_data(package_data):
166166
package_data = package_data.copy()
167167
if release_date := package_data.get("release_date"):
168168
if type(release_date) is str:
169-
if release_date.endswith("Z"):
170-
release_date = release_date[:-1]
169+
release_date = release_date.removesuffix("Z")
171170
package_data["release_date"] = datetime.fromisoformat(release_date).date()
172171

173172
# Strip leading "codebase/" to make path compatible with

scanpipe/pipes/federatedcode.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -156,8 +156,7 @@ def add_scan_result(project, repo, package_scan_file, logger=None):
156156
write_to.parent.mkdir(parents=True, exist_ok=True)
157157
results_generator = JSONResultsGenerator(project)
158158
with open(write_to, encoding="utf-8", mode="w") as file:
159-
for chunk in results_generator:
160-
file.write(chunk)
159+
file.writelines(results_generator)
161160

162161
return relative_scan_file_path
163162

scanpipe/pipes/js.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -69,7 +69,7 @@ def is_source_mapping_in_minified(resource, map_file_name):
6969
lines = resource.file_content.split("\n")
7070
total_lines = len(lines)
7171
# Get the last 5 lines.
72-
tail = 5 if total_lines > 5 else total_lines
72+
tail = min(total_lines, 5)
7373
return any(source_mapping in line for line in reversed(lines[-tail:]))
7474

7575

scanpipe/pipes/spdx.py

Lines changed: 14 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,6 @@
2828
from datetime import datetime
2929
from datetime import timezone
3030
from pathlib import Path
31-
from typing import List # Python 3.8 compatibility
3231

3332
SPDX_SPEC_VERSION = "2.3"
3433
SPDX_LICENSE_LIST_VERSION = "3.20"
@@ -272,7 +271,7 @@ class ExtractedLicensingInfo:
272271

273272
name: str = ""
274273
comment: str = ""
275-
see_alsos: List[str] = field(default_factory=list)
274+
see_alsos: list[str] = field(default_factory=list)
276275

277276
def as_dict(self):
278277
"""Return the data as a serializable dict."""
@@ -332,9 +331,9 @@ class Package:
332331
comment: str = ""
333332
license_comments: str = ""
334333

335-
checksums: List[Checksum] = field(default_factory=list)
336-
external_refs: List[ExternalRef] = field(default_factory=list)
337-
attribution_texts: List[str] = field(default_factory=list)
334+
checksums: list[Checksum] = field(default_factory=list)
335+
external_refs: list[ExternalRef] = field(default_factory=list)
336+
attribution_texts: list[str] = field(default_factory=list)
338337

339338
def as_dict(self):
340339
"""Return the data as a serializable dict."""
@@ -380,9 +379,7 @@ def date_to_iso(date_str):
380379
if not date_str:
381380
return
382381

383-
if date_str.endswith("Z"):
384-
date_str = date_str[:-1]
385-
382+
date_str = date_str.removesuffix("Z")
386383
as_datetime = datetime.fromisoformat(date_str)
387384
return as_datetime.isoformat(timespec="seconds") + "Z"
388385

@@ -427,18 +424,18 @@ class File:
427424

428425
spdx_id: str
429426
name: str
430-
checksums: List[Checksum] = field(default_factory=list)
427+
checksums: list[Checksum] = field(default_factory=list)
431428

432429
license_concluded: str = "NOASSERTION"
433430
copyright_text: str = "NOASSERTION"
434-
license_in_files: List[str] = field(default_factory=list)
435-
contributors: List[str] = field(default_factory=list)
431+
license_in_files: list[str] = field(default_factory=list)
432+
contributors: list[str] = field(default_factory=list)
436433
notice_text: str = ""
437434
# Supported values:
438435
# SOURCE | BINARY | ARCHIVE | APPLICATION | AUDIO | IMAGE | TEXT | VIDEO |
439436
# DOCUMENTATION | SPDX | OTHER
440-
types: List[str] = field(default_factory=list)
441-
attribution_texts: List[str] = field(default_factory=list)
437+
types: list[str] = field(default_factory=list)
438+
attribution_texts: list[str] = field(default_factory=list)
442439
comment: str = ""
443440
license_comments: str = ""
444441

@@ -534,16 +531,16 @@ class Document:
534531
name: str
535532
namespace: str
536533
creation_info: CreationInfo
537-
packages: List[Package]
534+
packages: list[Package]
538535

539536
spdx_id: str = "SPDXRef-DOCUMENT"
540537
version: str = SPDX_SPEC_VERSION
541538
data_license: str = "CC0-1.0"
542539
comment: str = ""
543540

544-
files: List[File] = field(default_factory=list)
545-
extracted_licenses: List[ExtractedLicensingInfo] = field(default_factory=list)
546-
relationships: List[Relationship] = field(default_factory=list)
541+
files: list[File] = field(default_factory=list)
542+
extracted_licenses: list[ExtractedLicensingInfo] = field(default_factory=list)
543+
relationships: list[Relationship] = field(default_factory=list)
547544

548545
def as_dict(self):
549546
"""Return the SPDX document as a serializable dict."""

scanpipe/tests/pipes/test_output.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -234,7 +234,7 @@ def test_scanpipe_pipes_outputs_to_xlsx(self):
234234
"MESSAGES",
235235
"TODOS",
236236
]
237-
self.assertEqual(expected_sheet_names, workbook.get_sheet_names())
237+
self.assertEqual(expected_sheet_names, workbook.sheetnames)
238238

239239
def test_scanpipe_pipes_outputs_get_xlsx_report(self):
240240
project_qs = None
@@ -259,15 +259,15 @@ def test_scanpipe_pipes_outputs_get_xlsx_report(self):
259259
expected_sheet_names = [
260260
"PACKAGES",
261261
]
262-
self.assertEqual(expected_sheet_names, workbook.get_sheet_names())
262+
self.assertEqual(expected_sheet_names, workbook.sheetnames)
263263

264264
model_short_name = "todo"
265265
output_file = output.get_xlsx_report(project_qs, model_short_name)
266266
workbook = openpyxl.load_workbook(output_file, read_only=True, data_only=True)
267267
expected_sheet_names = [
268268
"TODOS",
269269
]
270-
self.assertEqual(expected_sheet_names, workbook.get_sheet_names())
270+
self.assertEqual(expected_sheet_names, workbook.sheetnames)
271271

272272
def test_scanpipe_pipes_outputs_get_xlsx_fields_order(self):
273273
output_file = output.to_xlsx(project=make_project())

scanpipe/tests/test_api.py

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -665,6 +665,9 @@ def test_scanpipe_api_project_action_results_download_output_formats(self):
665665
"application/octet-stream",
666666
]
667667
self.assertIn(response["Content-Type"], expected)
668+
# Forces Django to finish the response and close the file
669+
# to prevent a "ResourceWarning: unclosed file"
670+
self.assertTrue(response.getvalue().startswith(b"PK"))
668671

669672
def test_scanpipe_api_project_action_pipelines(self):
670673
url = reverse("project-pipelines")
@@ -704,9 +707,9 @@ def test_scanpipe_api_project_action_report(self):
704707

705708
output_file = io.BytesIO(b"".join(response.streaming_content))
706709
workbook = openpyxl.load_workbook(output_file, read_only=True, data_only=True)
707-
self.assertEqual(["PACKAGES"], workbook.get_sheet_names())
710+
self.assertEqual(["PACKAGES"], workbook.sheetnames)
708711

709-
todos_sheet = workbook.get_sheet_by_name("PACKAGES")
712+
todos_sheet = workbook["PACKAGES"]
710713
rows = list(todos_sheet.values)
711714
self.assertEqual(2, len(rows))
712715
self.assertEqual("project", rows[0][0]) # header row

scanpipe/tests/test_commands.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1280,8 +1280,8 @@ def test_scanpipe_management_command_report(self):
12801280
self.assertIn(f"Report generated at {output_file}", out.getvalue())
12811281

12821282
workbook = openpyxl.load_workbook(output_file, read_only=True, data_only=True)
1283-
self.assertEqual(["TODOS"], workbook.get_sheet_names())
1284-
todos_sheet = workbook.get_sheet_by_name("TODOS")
1283+
self.assertEqual(["TODOS"], workbook.sheetnames)
1284+
todos_sheet = workbook["TODOS"]
12851285
header = list(todos_sheet.values)[0]
12861286

12871287
self.assertNotIn("extra_data", header)

0 commit comments

Comments
 (0)