diff --git a/.github/workflows/flake8_python_linter.yml b/.github/workflows/flake8_python_linter.yml index 8814adc3c..c723df090 100644 --- a/.github/workflows/flake8_python_linter.yml +++ b/.github/workflows/flake8_python_linter.yml @@ -32,7 +32,7 @@ jobs: # E241: Multiple spaces after ',' # E251: Unexpected spaces around keyword / parameter equals # E272: Multiple spaces before keyword - flake8_args: "--ignore=W503,E202,E203,E221,E241,E251,E272,E126,E131,E121,E111,E114,E127 + flake8_args: "--ignore=W503,E202,E203,E221,E241,E251,E272,E126,E131,E121,E111,E127 --max-line-length 120 --exclude python/react-series-data-viewer/protocol_buffers/" github_token: ${{ secrets.GITHUB_TOKEN }} diff --git a/python/ingest_eeg_bids_datasets.py b/python/ingest_eeg_bids_datasets.py index e62e78d0c..ffe451f22 100755 --- a/python/ingest_eeg_bids_datasets.py +++ b/python/ingest_eeg_bids_datasets.py @@ -75,8 +75,8 @@ def main(): # --------------------------------------------------------------------------------------------- assembly_bids_path = config_db_obj.get_config("EEGAssemblyBIDS") if not assembly_bids_path: - data_dir = config_db_obj.get_config("dataDirBasepath") - assembly_bids_path = os.path.join(data_dir, 'assembly_bids') + data_dir = config_db_obj.get_config("dataDirBasepath") + assembly_bids_path = os.path.join(data_dir, 'assembly_bids') # --------------------------------------------------------------------------------------------- # Get all EEG upload with status = Extracted @@ -86,10 +86,10 @@ def main(): " WHERE Status = 'Extracted'" \ if upload_id: - query = query + " AND UploadID = %s" - eeg_dataset_list = db.pselect(query, (upload_id,)) + query = query + " AND UploadID = %s" + eeg_dataset_list = db.pselect(query, (upload_id,)) else: - eeg_dataset_list = db.pselect(query, ()) + eeg_dataset_list = db.pselect(query, ()) if not eeg_dataset_list: print('No new EEG datasets to ingest.') @@ -100,84 +100,84 @@ def main(): # --------------------------------------------------------------------------------------------- for eeg_dataset in eeg_dataset_list: - uploadid = str(eeg_dataset['UploadID']) + uploadid = str(eeg_dataset['UploadID']) - query = "SELECT s.CandID, c.PSCID, s.Visit_label " \ + query = "SELECT s.CandID, c.PSCID, s.Visit_label " \ " FROM session s " \ " JOIN candidate c ON c.CandID = s.CandID " \ " WHERE s.ID = %s" \ - session_data = db.pselect(query, (eeg_dataset['SessionID'],)) - - if not session_data: - print('Session ID ' + eeg_dataset['SessionID'] + ' associated with UploadID ' + uploadid + ' does not exist.') - sys.exit(INVALID_ARG) - - candid = session_data[0]['CandID'] - pscid = session_data[0]['PSCID'] - visit = session_data[0]['Visit_label'] - - # Subject id - subjectid = None - # BIDS subject id is either the pscid or the candid - - # Try the candid - if os.path.isdir( - os.path.join(assembly_bids_path, 'sub-' + str(candid)) - ): - subjectid = str(candid) - - # Try the pscid, case insensitive - if not subjectid: - gen = ( - dir for dir in os.listdir(assembly_bids_path) - if dir.lower() == 'sub-' + pscid.lower() - ) - subjectid = next(gen, None) - - # No match - if not subjectid: - print('No BIDS dataset matching candidate ' + pscid + ' ' + str(candid) + ' found.') - continue - - # Visit - path = os.path.join(assembly_bids_path, 'sub-' + subjectid, 'ses-' + visit) - if not os.path.isdir(path): - print('No BIDS dataset matching visit ' + visit + ' for candidate ' + pscid + ' ' + str(candid) + ' found.') - continue - - script = os.environ['LORIS_MRI'] + '/python/bids_import.py' - # Assume eeg and raw data for now - eeg_path = os.path.join(path, 'eeg') - command = 'python ' + script + ' -p ' + profile + ' -d ' + eeg_path + ' --nobidsvalidation --nocopy --type raw' - - try: - result = subprocess.run(command, shell = True, capture_output=True) - - if result.stdout: - print(result.stdout.decode('utf-8')) - - if result.stderr: - print( - f'ERROR: EEG Dataset with uploadID {uploadid} ingestion log:\n ' + result.stderr.decode('utf-8') - ) - - if result.returncode == 0: - db.update( - "UPDATE electrophysiology_uploader SET Status = 'Ingested' WHERE UploadID = %s", + session_data = db.pselect(query, (eeg_dataset['SessionID'],)) + + if not session_data: + print(f'Session ID {eeg_dataset["SessionID"]} associated with UploadID {uploadid} does not exist.') + sys.exit(INVALID_ARG) + + candid = session_data[0]['CandID'] + pscid = session_data[0]['PSCID'] + visit = session_data[0]['Visit_label'] + + # Subject id + subjectid = None + # BIDS subject id is either the pscid or the candid + + # Try the candid + if os.path.isdir( + os.path.join(assembly_bids_path, 'sub-' + str(candid)) + ): + subjectid = str(candid) + + # Try the pscid, case insensitive + if not subjectid: + gen = ( + dir for dir in os.listdir(assembly_bids_path) + if dir.lower() == 'sub-' + pscid.lower() + ) + subjectid = next(gen, None) + + # No match + if not subjectid: + print('No BIDS dataset matching candidate ' + pscid + ' ' + str(candid) + ' found.') + continue + + # Visit + path = os.path.join(assembly_bids_path, 'sub-' + subjectid, 'ses-' + visit) + if not os.path.isdir(path): + print(f'No BIDS dataset matching visit {visit} for candidate {pscid} {candid} found.') + continue + + script = os.environ['LORIS_MRI'] + '/python/bids_import.py' + # Assume eeg and raw data for now + eeg_path = os.path.join(path, 'eeg') + command = 'python ' + script + ' -p ' + profile + ' -d ' + eeg_path + ' --nobidsvalidation --nocopy --type raw' + + try: + result = subprocess.run(command, shell = True, capture_output=True) + + if result.stdout: + print(result.stdout.decode('utf-8')) + + if result.stderr: + print( + f'ERROR: EEG Dataset with uploadID {uploadid} ingestion log:\n ' + result.stderr.decode('utf-8') + ) + + if result.returncode == 0: + db.update( + "UPDATE electrophysiology_uploader SET Status = 'Ingested' WHERE UploadID = %s", + (uploadid,) + ) + print('EEG Dataset with uploadID ' + uploadid + ' successfully ingested') + continue + + except OSError: + print('ERROR: ' + script + ' not found') + + db.update( + "UPDATE electrophysiology_uploader SET Status = 'Failed Ingestion' WHERE UploadID = %s", (uploadid,) - ) - print('EEG Dataset with uploadID ' + uploadid + ' successfully ingested') - continue - - except OSError: - print('ERROR: ' + script + ' not found') - - db.update( - "UPDATE electrophysiology_uploader SET Status = 'Failed Ingestion' WHERE UploadID = %s", - (uploadid,) - ) - + ) + # TODO: reupload of archive after ingestion # Delete if already exist diff --git a/python/lib/bidsreader.py b/python/lib/bidsreader.py index 688bf628e..9e4c2e3fe 100755 --- a/python/lib/bidsreader.py +++ b/python/lib/bidsreader.py @@ -23,7 +23,7 @@ # or bids_pack_version[1] > 12 # or (bids_pack_version[1] == 12 and bids_pack_version[2] > 0)): -# from bids import BIDSLayoutIndexer +# from bids import BIDSLayoutIndexer __license__ = "GPLv3" diff --git a/python/lib/database_lib/bids_event_mapping.py b/python/lib/database_lib/bids_event_mapping.py index 83eb6d42e..2893a604d 100644 --- a/python/lib/database_lib/bids_event_mapping.py +++ b/python/lib/database_lib/bids_event_mapping.py @@ -67,4 +67,4 @@ def insert(self, target_id, property_name, property_value, hed_tag_id, column_names=column_names, values=values, get_last_id=True - ) \ No newline at end of file + ) diff --git a/python/lib/database_lib/physiological_coord_system.py b/python/lib/database_lib/physiological_coord_system.py index d13d5e33a..4c2c57c0a 100644 --- a/python/lib/database_lib/physiological_coord_system.py +++ b/python/lib/database_lib/physiological_coord_system.py @@ -279,4 +279,4 @@ def insert_coord_system_point_3d_relation(self, coord_system_id: int, 'Name' ), values=values_to_insert - ) \ No newline at end of file + ) diff --git a/python/lib/database_lib/physiological_modality.py b/python/lib/database_lib/physiological_modality.py index 874b24c23..4112c9c5d 100644 --- a/python/lib/database_lib/physiological_modality.py +++ b/python/lib/database_lib/physiological_modality.py @@ -37,4 +37,4 @@ def grep_id_from_modality_value(self, physiological_modality): where_field_name='PhysiologicalModality', where_value=physiological_modality, insert_if_not_found=False - ) \ No newline at end of file + ) diff --git a/python/lib/database_lib/physiological_output_type.py b/python/lib/database_lib/physiological_output_type.py index 7d09ed148..2441571b1 100644 --- a/python/lib/database_lib/physiological_output_type.py +++ b/python/lib/database_lib/physiological_output_type.py @@ -37,4 +37,4 @@ def grep_id_from_output_type(self, physiological_output_type): where_field_name='OutputTypeName', where_value=physiological_output_type, insert_if_not_found=False - ) \ No newline at end of file + ) diff --git a/python/lib/database_lib/physiological_parameter_file.py b/python/lib/database_lib/physiological_parameter_file.py index 3ada1e925..0f5b5fb80 100644 --- a/python/lib/database_lib/physiological_parameter_file.py +++ b/python/lib/database_lib/physiological_parameter_file.py @@ -42,4 +42,4 @@ def insert(self, physiological_file_id, project_id, parameter_type_id, value): column_names = ('PhysiologicalFileID', 'ProjectID', 'ParameterTypeID', 'Value'), values = (physiological_file_id, project_id, parameter_type_id, value), get_last_id = True - ) \ No newline at end of file + ) diff --git a/python/lib/database_lib/point_3d.py b/python/lib/database_lib/point_3d.py index eab6724ca..3445e814f 100644 --- a/python/lib/database_lib/point_3d.py +++ b/python/lib/database_lib/point_3d.py @@ -118,4 +118,4 @@ def grep_or_insert_point_by_coordinates(self, x: float, y: float, z: float): p = self.grep_point_by_coordinates(x, y, z) if p is None: p = self.insert_point_by_coordinates(x, y, z) - return p \ No newline at end of file + return p diff --git a/python/lib/imaging.py b/python/lib/imaging.py index 3c3331437..727c13b2b 100644 --- a/python/lib/imaging.py +++ b/python/lib/imaging.py @@ -168,7 +168,7 @@ def insert_parameter_file(self, file_id, parameter_name, value): """ # convert list values into strings that could be inserted into parameter_file - if type(value) == list: + if type(value) is list: if value and type(value[0]) in [float, int]: value = [str(f) for f in value] value = f"[{', '.join(value)}]"