@@ -310,11 +310,15 @@ def _update_module_status(self, updated_module_status={}):
310
310
modules_status = json .load (f )
311
311
modules_status = {** modules_status , ** updated_module_status }
312
312
else :
313
- # handle cases where hash is changed(different paramset) and trying to rerun processing
314
- # delete all files in kilosort output folder, all will be regenerated when kilosort is rerun
315
- # recreate /json_configs directory after all kilosort output files are deleted
316
- shutil .rmtree (self ._ks_output_dir )
317
- self ._json_directory .mkdir (parents = True , exist_ok = True )
313
+ # handle cases of processing rerun on different parameters (the hash changes)
314
+ # delete outdated files
315
+ outdated_files = [
316
+ f
317
+ for f in self ._json_directory .glob ("*" )
318
+ if f .is_file () and f .name != self ._module_input_json .name
319
+ ]
320
+ for f in outdated_files :
321
+ f .unlink ()
318
322
319
323
modules_status = {
320
324
module : {"start_time" : None , "completion_time" : None , "duration" : None }
@@ -608,11 +612,15 @@ def _update_module_status(self, updated_module_status={}):
608
612
modules_status = json .load (f )
609
613
modules_status = {** modules_status , ** updated_module_status }
610
614
else :
611
- # handle cases where hash is changed(different paramset) and trying to rerun processing
612
- # delete all files in kilosort output folder, all will be regenerated when kilosort is rerun
613
- # recreate /json_configs directory after all kilosort output files are deleted
614
- shutil .rmtree (self ._ks_output_dir )
615
- self ._json_directory .mkdir (parents = True , exist_ok = True )
615
+ # handle cases of processing rerun on different parameters (the hash changes)
616
+ # delete outdated files
617
+ outdated_files = [
618
+ f
619
+ for f in self ._json_directory .glob ("*" )
620
+ if f .is_file () and f .name != self ._module_input_json .name
621
+ ]
622
+ for f in outdated_files :
623
+ f .unlink ()
616
624
617
625
modules_status = {
618
626
module : {"start_time" : None , "completion_time" : None , "duration" : None }
@@ -759,8 +767,8 @@ def _write_channel_map_file(
759
767
)
760
768
761
769
if is_0_based :
762
- channel_ind += 1
763
- shank_ind += 1
770
+ channel_ind = channel_ind + 1
771
+ shank_ind = shank_ind + 1
764
772
765
773
channel_count = len (channel_ind )
766
774
chanMap0ind = np .arange (0 , channel_count , dtype = "float64" )
@@ -769,8 +777,7 @@ def _write_channel_map_file(
769
777
770
778
# channels to exclude
771
779
mask = get_noise_channels (ap_band_file , channel_count , sample_rate , bit_volts )
772
- bad_channel_ind = np .where (mask is False )[0 ]
773
- connected [bad_channel_ind ] = 0
780
+ connected = np .where (mask is False , 0 , connected )
774
781
775
782
mdict = {
776
783
"chanMap" : chanMap ,
0 commit comments