|
3 | 3 | import numpy as np
|
4 | 4 | import re
|
5 | 5 | import datetime
|
| 6 | +import logging |
| 7 | + |
| 8 | +logger = logging.getLogger(__name__) |
6 | 9 |
|
7 | 10 |
|
8 | 11 | """
|
@@ -155,6 +158,9 @@ def load_probe_data(self):
|
155 | 158 | float(rec.duration))
|
156 | 159 | probe.recording_info['recording_files'].append(
|
157 | 160 | rec.absolute_foldername / 'continuous' / continuous_info['folder_name'])
|
| 161 | + elif continuous_type == 'lfp': |
| 162 | + probe.recording_info['recording_lfp_files'].append( |
| 163 | + rec.absolute_foldername / 'continuous' / continuous_info['folder_name']) |
158 | 164 |
|
159 | 165 | meta = getattr(probe, continuous_type + '_meta')
|
160 | 166 | if not meta:
|
@@ -229,7 +235,8 @@ def __init__(self, processor, probe_index=0):
|
229 | 235 | self.recording_info = {'recording_count': 0,
|
230 | 236 | 'recording_datetimes': [],
|
231 | 237 | 'recording_durations': [],
|
232 |
| - 'recording_files': []} |
| 238 | + 'recording_files': [], |
| 239 | + 'recording_lfp_files': []} |
233 | 240 |
|
234 | 241 | self._ap_timeseries = None
|
235 | 242 | self._ap_timestamps = None
|
@@ -303,3 +310,73 @@ def extract_spike_waveforms(self, spikes, channel_ind, n_wf=500, wf_win=(-32, 32
|
303 | 310 | return spike_wfs
|
304 | 311 | else: # if no spike found, return NaN of size (sample x channel x 1)
|
305 | 312 | return np.full((len(range(*wf_win)), len(channel_ind), 1), np.nan)
|
| 313 | + |
| 314 | + def compress(self): |
| 315 | + from mtscomp import compress as mts_compress |
| 316 | + |
| 317 | + ap_dirs = self.recording_info['recording_files'] |
| 318 | + lfp_dirs = self.recording_info['recording_lfp_files'] |
| 319 | + |
| 320 | + meta_mapping = {'ap': self.ap_meta, 'lfp': self.lfp_meta} |
| 321 | + |
| 322 | + compressed_files = [] |
| 323 | + for continuous_dir, continuous_type in zip( |
| 324 | + ap_dirs + lfp_dirs, |
| 325 | + ['ap'] * len(ap_dirs) + ['lfp'] * len(lfp_dirs)): |
| 326 | + dat_fp = continuous_dir / 'continuous.dat' |
| 327 | + if not dat_fp.exists(): |
| 328 | + raise FileNotFoundError(f'Compression error - "{dat_fp}" does not exist') |
| 329 | + cdat_fp = continuous_dir / 'continuous.cdat' |
| 330 | + ch_fp = continuous_dir / 'continuous.ch' |
| 331 | + |
| 332 | + if cdat_fp.exists(): |
| 333 | + assert ch_fp.exists() |
| 334 | + logger.info(f'Compressed file exists ({cdat_fp}), skipping...') |
| 335 | + continue |
| 336 | + |
| 337 | + try: |
| 338 | + mts_compress(dat_fp, cdat_fp, ch_fp, |
| 339 | + sample_rate=meta_mapping[continuous_type]['sample_rate'], |
| 340 | + n_channels=meta_mapping[continuous_type]['num_channels'], |
| 341 | + dtype=np.memmap(dat_fp).dtype) |
| 342 | + except Exception as e: |
| 343 | + cdat_fp.unlink(missing_ok=True) |
| 344 | + ch_fp.unlink(missing_ok=True) |
| 345 | + raise e |
| 346 | + else: |
| 347 | + compressed_files.append((cdat_fp, ch_fp)) |
| 348 | + |
| 349 | + return compressed_files |
| 350 | + |
| 351 | + def decompress(self): |
| 352 | + from mtscomp import decompress as mts_decompress |
| 353 | + |
| 354 | + ap_dirs = self.recording_info['recording_files'] |
| 355 | + lfp_dirs = self.recording_info['recording_lfp_files'] |
| 356 | + |
| 357 | + decompressed_files = [] |
| 358 | + for continuous_dir, continuous_type in zip( |
| 359 | + ap_dirs + lfp_dirs, |
| 360 | + ['ap'] * len(ap_dirs) + ['lfp'] * len(lfp_dirs)): |
| 361 | + dat_fp = continuous_dir / 'continuous.dat' |
| 362 | + |
| 363 | + if dat_fp.exists(): |
| 364 | + logger.info(f'Decompressed file exists ({dat_fp}), skipping...') |
| 365 | + continue |
| 366 | + |
| 367 | + cdat_fp = continuous_dir / 'continuous.cdat' |
| 368 | + ch_fp = continuous_dir / 'continuous.ch' |
| 369 | + |
| 370 | + if not cdat_fp.exists(): |
| 371 | + raise FileNotFoundError(f'Decompression error - "{cdat_fp}" does not exist') |
| 372 | + |
| 373 | + try: |
| 374 | + decomp_arr = mts_decompress(cdat_fp, ch_fp) |
| 375 | + decomp_arr.tofile(dat_fp) |
| 376 | + except Exception as e: |
| 377 | + dat_fp.unlink(missing_ok=True) |
| 378 | + raise e |
| 379 | + else: |
| 380 | + decompressed_files.append(dat_fp) |
| 381 | + |
| 382 | + return decompressed_files |
0 commit comments