Skip to content

Commit a2758f4

Browse files
Fix issues in filter updates and add script for active zone segmentation
1 parent 58f3e6b commit a2758f4

File tree

2 files changed

+69
-3
lines changed

2 files changed

+69
-3
lines changed
Lines changed: 66 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,66 @@
1+
import os
2+
from glob import glob
3+
4+
import h5py
5+
import numpy as np
6+
7+
from scipy.ndimage import binary_closing
8+
from skimage.measure import label
9+
from synaptic_reconstruction.ground_truth.shape_refinement import edge_filter
10+
from tqdm import tqdm
11+
12+
ROOT = "/mnt/lustre-emmy-hdd/projects/nim00007/data/synaptic-reconstruction/cooper/20241102_TOMO_DATA_Imig2014/final_Imig2014_seg_manComp" # noqa
13+
14+
OUTPUT_AZ = "./boundary_az"
15+
16+
17+
def filter_az(path):
18+
# Check if we have the output already.
19+
ds, fname = os.path.split(path)
20+
ds = os.path.basename(ds)
21+
out_path = os.path.join(OUTPUT_AZ, ds, fname)
22+
os.makedirs(os.path.join(OUTPUT_AZ, ds), exist_ok=True)
23+
if os.path.exists(out_path):
24+
return
25+
26+
with h5py.File(path, "r") as f:
27+
raw = f["raw"][:]
28+
az = f["AZ/segment_from_AZmodel_v3"][:]
29+
vesicles = f["/vesicles/segment_from_combined_vesicles"][:]
30+
31+
# Compute the sato filter of the raw data, smooth it afterwards.
32+
# This will highlight dark ridge-like structures, and so
33+
# will yield high values for the plasma membrane.
34+
hmap = edge_filter(raw, sigma=1.0, method="sato", per_slice=True, n_threads=8)
35+
36+
# Filter the active zone by combining a bunch of things:
37+
# 1. Find a mask with high values in the ridge filter.
38+
threshold_hmap = 0.5
39+
az_filtered = hmap > threshold_hmap
40+
# 2. Intersect it with the active zone predictions.
41+
az_filtered = np.logical_and(az_filtered, az)
42+
# 3. Intersect it with the negative vesicle mask.
43+
az_filtered = np.logical_and(az_filtered, vesicles == 0)
44+
45+
# Postprocessing of the filtered active zone:
46+
# 1. Apply connected components and only keep the largest component.
47+
az_filtered = label(az_filtered)
48+
ids, sizes = np.unique(az_filtered, return_counts=True)
49+
ids, sizes = ids[1:], sizes[1:]
50+
az_filtered = (az_filtered == ids[np.argmax(sizes)]).astype("uint8")
51+
# 2. Apply binary closing.
52+
az_filtered = np.logical_or(az_filtered, binary_closing(az_filtered, iterations=4)).astype("uint8")
53+
54+
# Save the result.
55+
with h5py.File(out_path, "a") as f:
56+
f.create_dataset("filtered_az", data=az_filtered, compression="gzip")
57+
58+
59+
def main():
60+
files = sorted(glob(os.path.join(ROOT, "**/*.h5"), recursive=True))
61+
for ff in tqdm(files):
62+
filter_az(ff)
63+
64+
65+
if __name__ == "__main__":
66+
main()

synaptic_reconstruction/ground_truth/shape_refinement.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,7 @@ def _sato_filter(raw, sigma, max_window=16):
3535
def edge_filter(
3636
data: np.ndarray,
3737
sigma: float,
38-
method: str = "sobel",
38+
method: str = "sato",
3939
per_slice: bool = True,
4040
n_threads: Optional[int] = None,
4141
) -> np.ndarray:
@@ -60,11 +60,11 @@ def edge_filter(
6060
if method in FILTERS[1:] and vigra is None:
6161
raise ValueError(f"Filter {method} requires vigra.")
6262

63-
if per_slice and data.ndim == 2:
63+
if per_slice and data.ndim == 3:
6464
n_threads = mp.cpu_count() if n_threads is None else n_threads
6565
filter_func = partial(edge_filter, sigma=sigma, method=method, per_slice=False)
6666
with futures.ThreadPoolExecutor(n_threads) as tp:
67-
edge_map = tp.map(filter_func, data)
67+
edge_map = list(tp.map(filter_func, data))
6868
edge_map = np.stack(edge_map)
6969
return edge_map
7070

0 commit comments

Comments
 (0)