Skip to content

Commit 2311513

Browse files
GreenK173JKBTeque5
authored
Fix coding style and PEP8 violations (#86)
* fix coding style --------- Co-authored-by: JKB <j.kuben@era.aero> Co-authored-by: Teque5 <teque5@gmail.com>
1 parent d88719a commit 2311513

File tree

6 files changed

+54
-31
lines changed

6 files changed

+54
-31
lines changed

sigmf/error.py

Lines changed: 1 addition & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -8,21 +8,17 @@
88

99

1010
class SigMFError(Exception):
11-
""" SigMF base exception."""
12-
pass
11+
"""SigMF base exception."""
1312

1413

1514
class SigMFValidationError(SigMFError):
1615
"""Exceptions related to validating SigMF metadata."""
17-
pass
1816

1917

2018
class SigMFAccessError(SigMFError):
2119
"""Exceptions related to accessing the contents of SigMF metadata, notably
2220
when expected fields are missing or accessing out of bounds captures."""
23-
pass
2421

2522

2623
class SigMFFileError(SigMFError):
2724
"""Exceptions related to reading or writing SigMF files or archives."""
28-
pass

sigmf/schema.py

Lines changed: 7 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -4,27 +4,24 @@
44
#
55
# SPDX-License-Identifier: LGPL-3.0-or-later
66

7-
'''Schema IO'''
7+
"""Schema IO"""
88

99
import json
1010
import os
1111

1212
from . import utils
1313

14-
SCHEMA_META = 'schema-meta.json'
15-
SCHEMA_COLLECTION = 'schema-collection.json'
14+
SCHEMA_META = "schema-meta.json"
15+
SCHEMA_COLLECTION = "schema-collection.json"
1616

1717

1818
def get_schema(version=None, schema_file=SCHEMA_META):
19-
'''
19+
"""
2020
Load JSON Schema to for either a `sigmf-meta` or `sigmf-collection`.
2121
2222
TODO: In the future load specific schema versions.
23-
'''
24-
schema_path = os.path.join(
25-
utils.get_schema_path(os.path.dirname(utils.__file__)),
26-
schema_file
27-
)
28-
with open(schema_path, 'rb') as handle:
23+
"""
24+
schema_path = os.path.join(utils.get_schema_path(os.path.dirname(utils.__file__)), schema_file)
25+
with open(schema_path, "rb") as handle:
2926
schema = json.load(handle)
3027
return schema

sigmf/sigmf_hash.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44
#
55
# SPDX-License-Identifier: LGPL-3.0-or-later
66

7-
'''Hashing Functions'''
7+
"""Hashing Functions"""
88

99
import hashlib
1010
import os

sigmf/sigmffile.py

Lines changed: 42 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -205,7 +205,7 @@ def __next__(self):
205205
raise StopIteration
206206

207207
def __getitem__(self, sli):
208-
mem = self._memmap[sli] # matches behavior of numpy.ndarray.__getitem__()
208+
mem = self._memmap[sli] # matches behavior of numpy.ndarray.__getitem__()
209209

210210
if self._return_type is None:
211211
return mem
@@ -334,7 +334,7 @@ def add_capture(self, start_index, metadata=None):
334334
# sort captures by start_index
335335
self._metadata[self.CAPTURE_KEY] = sorted(
336336
capture_list,
337-
key=lambda item: item[self.START_INDEX_KEY]
337+
key=lambda item: item[self.START_INDEX_KEY],
338338
)
339339

340340
def get_captures(self):
@@ -375,13 +375,17 @@ def get_capture_byte_boundarys(self, index):
375375
compliant or noncompliant SigMF Recordings.
376376
"""
377377
if index >= len(self.get_captures()):
378-
raise SigMFAccessError("Invalid captures index {} (only {} captures in Recording)".format(index, len(self.get_captures())))
378+
raise SigMFAccessError(
379+
"Invalid captures index {} (only {} captures in Recording)".format(index, len(self.get_captures()))
380+
)
379381

380382
start_byte = 0
381383
prev_start_sample = 0
382384
for ii, capture in enumerate(self.get_captures()):
383385
start_byte += capture.get(self.HEADER_BYTES_KEY, 0)
384-
start_byte += (self.get_capture_start(ii) - prev_start_sample) * self.get_sample_size() * self.get_num_channels()
386+
start_byte += (
387+
(self.get_capture_start(ii) - prev_start_sample) * self.get_sample_size() * self.get_num_channels()
388+
)
385389
prev_start_sample = self.get_capture_start(ii)
386390
if ii >= index:
387391
break
@@ -390,7 +394,11 @@ def get_capture_byte_boundarys(self, index):
390394
if index == len(self.get_captures()) - 1: # last captures...data is the rest of the file
391395
end_byte = path.getsize(self.data_file) - self.get_global_field(self.TRAILING_BYTES_KEY, 0)
392396
else:
393-
end_byte += (self.get_capture_start(index+1) - self.get_capture_start(index)) * self.get_sample_size() * self.get_num_channels()
397+
end_byte += (
398+
(self.get_capture_start(index + 1) - self.get_capture_start(index))
399+
* self.get_sample_size()
400+
* self.get_num_channels()
401+
)
394402
return (start_byte, end_byte)
395403

396404
def add_annotation(self, start_index, length=None, metadata=None):
@@ -409,7 +417,7 @@ def add_annotation(self, start_index, length=None, metadata=None):
409417
# sort annotations by start_index
410418
self._metadata[self.ANNOTATION_KEY] = sorted(
411419
self._metadata[self.ANNOTATION_KEY],
412-
key=lambda item: item[self.START_INDEX_KEY]
420+
key=lambda item: item[self.START_INDEX_KEY],
413421
)
414422

415423
def get_annotations(self, index=None):
@@ -466,13 +474,18 @@ def _count_samples(self):
466474
header_bytes = sum([c.get(self.HEADER_BYTES_KEY, 0) for c in self.get_captures()])
467475
file_size = path.getsize(self.data_file) if self.data_size_bytes is None else self.data_size_bytes
468476
file_data_size = file_size - self.get_global_field(self.TRAILING_BYTES_KEY, 0) - header_bytes # bytes
469-
sample_size = self.get_sample_size() # size of a sample in bytes
477+
sample_size = self.get_sample_size() # size of a sample in bytes
470478
num_channels = self.get_num_channels()
471479
sample_count = file_data_size // sample_size // num_channels
472480
if file_data_size % (sample_size * num_channels) != 0:
473-
warnings.warn(f"File `{self.data_file}` does not contain an integer number of samples across channels. It may be invalid data.")
481+
warnings.warn(
482+
f"File `{self.data_file}` does not contain an integer number of samples across channels. "
483+
"It may be invalid data."
484+
)
474485
if self._get_sample_count_from_annotations() > sample_count:
475-
warnings.warn(f"File `{self.data_file}` ends before the final annotation in the corresponding SigMF metadata.")
486+
warnings.warn(
487+
f"File `{self.data_file}` ends before the final annotation in the corresponding SigMF metadata."
488+
)
476489
self.sample_count = sample_count
477490
return sample_count
478491

@@ -503,17 +516,27 @@ def calculate_hash(self):
503516
"""
504517
old_hash = self.get_global_field(self.HASH_KEY)
505518
if self.data_file is not None:
506-
new_hash = sigmf_hash.calculate_sha512(self.data_file, offset=self.data_offset, size=self.data_size_bytes)
519+
new_hash = sigmf_hash.calculate_sha512(
520+
filename=self.data_file,
521+
offset=self.data_offset,
522+
size=self.data_size_bytes,
523+
)
507524
else:
508-
new_hash = sigmf_hash.calculate_sha512(fileobj=self.data_buffer, offset=self.data_offset, size=self.data_size_bytes)
525+
new_hash = sigmf_hash.calculate_sha512(
526+
fileobj=self.data_buffer,
527+
offset=self.data_offset,
528+
size=self.data_size_bytes,
529+
)
509530
if old_hash is not None:
510531
if old_hash != new_hash:
511532
raise SigMFFileError("Calculated file hash does not match associated metadata.")
512533

513534
self.set_global_field(self.HASH_KEY, new_hash)
514535
return new_hash
515536

516-
def set_data_file(self, data_file=None, data_buffer=None, skip_checksum=False, offset=0, size_bytes=None, map_readonly=True):
537+
def set_data_file(
538+
self, data_file=None, data_buffer=None, skip_checksum=False, offset=0, size_bytes=None, map_readonly=True
539+
):
517540
"""
518541
Set the datafile path, then recalculate sample count. If not skipped,
519542
update the hash and return the hash string.
@@ -728,7 +751,13 @@ class SigMFCollection(SigMFMetafile):
728751
STREAMS_KEY = "core:streams"
729752
COLLECTION_KEY = "collection"
730753
VALID_COLLECTION_KEYS = [
731-
AUTHOR_KEY, COLLECTION_DOI_KEY, DESCRIPTION_KEY, EXTENSIONS_KEY, LICENSE_KEY, STREAMS_KEY, VERSION_KEY
754+
AUTHOR_KEY,
755+
COLLECTION_DOI_KEY,
756+
DESCRIPTION_KEY,
757+
EXTENSIONS_KEY,
758+
LICENSE_KEY,
759+
STREAMS_KEY,
760+
VERSION_KEY,
732761
]
733762
VALID_KEYS = {COLLECTION_KEY: VALID_COLLECTION_KEYS}
734763

tests/test_archivereader.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -84,7 +84,7 @@ def test_access_data_without_untar(self):
8484

8585

8686
def test_archiveread_data_file_unchanged(test_sigmffile):
87-
with tempfile.NamedTemporaryFile(suffix='.sigmf') as temp:
87+
with tempfile.NamedTemporaryFile(suffix=".sigmf") as temp:
8888
input_samples = test_sigmffile.read_samples()
8989
test_sigmffile.archive(temp.name)
9090

tests/test_utils.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,15 +13,16 @@
1313
from sigmf import utils
1414

1515

16+
# fmt: off
1617
@pytest.mark.parametrize("time_str, expected", [
1718
("1955-07-04T05:15:00Z", datetime(year=1955, month=7, day=4, hour=5, minute=15, second=00, microsecond=0, tzinfo=timezone.utc)),
1819
("2956-08-05T06:15:12Z", datetime(year=2956, month=8, day=5, hour=6, minute=15, second=12, microsecond=0, tzinfo=timezone.utc)),
1920
("3957-09-06T07:15:12.345Z", datetime(year=3957, month=9, day=6, hour=7, minute=15, second=12, microsecond=345000, tzinfo=timezone.utc)),
2021
("4958-10-07T08:15:12.0345Z", datetime(year=4958, month=10, day=7, hour=8, minute=15, second=12, microsecond=34500, tzinfo=timezone.utc)),
2122
("5959-11-08T09:15:12.000000Z", datetime(year=5959, month=11, day=8, hour=9, minute=15, second=12, microsecond=0, tzinfo=timezone.utc)),
2223
("6960-12-09T10:15:12.123456789123Z", datetime(year=6960, month=12, day=9, hour=10, minute=15, second=12, microsecond=123456, tzinfo=timezone.utc)),
23-
2424
])
25+
# fmt: on
2526
def test_parse_simple_iso8601(time_str: str, expected: datetime) -> None:
2627
"""Ensure various times are represented as expected"""
2728
date_struct = utils.parse_iso8601_datetime(time_str)

0 commit comments

Comments
 (0)