|
3 | 3 | import json
|
4 | 4 | import numpy as np
|
5 | 5 | import pynwb
|
6 |
| -import datajoint as dj |
| 6 | +# import datajoint as dj |
7 | 7 | from element_interface.utils import find_full_path
|
8 | 8 | from hdmf.backends.hdf5 import H5DataIO
|
9 | 9 | from hdmf.data_utils import GenericDataChunkIterator
|
|
14 | 14 | from spikeinterface import extractors
|
15 | 15 | from tqdm import tqdm
|
16 | 16 | import warnings
|
17 |
| -from ... import probe, ephys_no_curation |
| 17 | +from ... import probe |
| 18 | +from ... import ephys_no_curation as ephys |
18 | 19 |
|
19 |
| -assert probe.schema.is_activated(), 'probe not yet activated' |
| 20 | +# assert probe.schema.is_activated(), 'probe not yet activated' |
20 | 21 |
|
21 |
| -assert ephys_no_curation.schema.is_activated, \ |
22 |
| - "The ephys module must be activated before export." |
| 22 | +# assert ephys.schema.is_activated, \ |
| 23 | +# "The ephys module must be activated before export." |
23 | 24 |
|
24 | 25 |
|
25 | 26 | class DecimalEncoder(json.JSONEncoder):
|
@@ -48,7 +49,7 @@ def __init__(self, lfp_electrodes_query, chunk_length: int = 10000):
|
48 | 49 |
|
49 | 50 | first_record = (
|
50 | 51 | self.lfp_electrodes_query & dict(electrode=self.electrodes[0])
|
51 |
| - ).fetch1(as_dict=True) |
| 52 | + ).fetch1() |
52 | 53 |
|
53 | 54 | self.n_channels = len(self.electrodes)
|
54 | 55 | self.n_tt = len(first_record["lfp"])
|
@@ -166,7 +167,8 @@ def create_units_table(
|
166 | 167 | nwbfile: pynwb.NWBFile,
|
167 | 168 | paramset_record,
|
168 | 169 | name="units",
|
169 |
| - desc="data on spiking units"): |
| 170 | + desc="data on spiking units" |
| 171 | +): |
170 | 172 | """
|
171 | 173 |
|
172 | 174 | ephys.CuratedClustering.Unit::unit -> units.id
|
|
0 commit comments