Skip to content

Clean up #76

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 4 commits into from
Dec 6, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/build_docs.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ jobs:

- name: Generate Documentation
shell: bash -l {0}
run: pdoc synaptic_reconstruction -d google -o doc/
run: pdoc synapse_net -d google -o doc/

- name: Verify Documentation Output
run: ls -la doc/
Expand Down
3 changes: 2 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -10,4 +10,5 @@ slurm/
scripts/cooper/evaluation_results/
scripts/cooper/training/copy_testset.py
scripts/rizzoli/upsample_data.py
scripts/cooper/training/find_rec_testset.py
scripts/cooper/training/find_rec_testset.py
synapse-net-models/
2 changes: 1 addition & 1 deletion build_doc.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,6 @@

if args.out:
cmd.extend(["--out", "tmp/"])
cmd.append("synaptic_reconstruction")
cmd.append("synapse_net")

run(cmd)
4 changes: 2 additions & 2 deletions examples/domain_adaptation.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,8 +13,8 @@
from glob import glob

from sklearn.model_selection import train_test_split
from synaptic_reconstruction.training import mean_teacher_adaptation
from synaptic_reconstruction.tools.util import get_model_path
from synapse_net.training import mean_teacher_adaptation
from synapse_net.tools.util import get_model_path


def main():
Expand Down
2 changes: 1 addition & 1 deletion examples/network_training.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
from glob import glob

from sklearn.model_selection import train_test_split
from synaptic_reconstruction.training import supervised_training
from synapse_net.training import supervised_training


def main():
Expand Down
3 changes: 3 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
[build-system]
requires = ["setuptools>=64.0", "wheel"]
build-backend = "setuptools.build_meta"
5 changes: 5 additions & 0 deletions scripts/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
# SynapseNet Scripts

This folder contains scripts for several different projects that were either part of the SynapseNet publication, or that are part of a collaboration with other research groups.

We will add an overview of the different scripts soon.
6 changes: 3 additions & 3 deletions scripts/cooper/analysis/active_zone_analysis.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,9 +8,9 @@

from scipy.ndimage import binary_closing
from skimage.measure import label
from synaptic_reconstruction.ground_truth.shape_refinement import edge_filter
from synaptic_reconstruction.morphology import skeletonize_object
from synaptic_reconstruction.distance_measurements import measure_segmentation_to_object_distances
from synapse_net.ground_truth.shape_refinement import edge_filter
from synapse_net.morphology import skeletonize_object
from synapse_net.distance_measurements import measure_segmentation_to_object_distances
from tqdm import tqdm

from compute_skeleton_area import calculate_surface_area
Expand Down
2 changes: 1 addition & 1 deletion scripts/cooper/analysis/check_size_export.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@


def test_export():
from synaptic_reconstruction.imod.to_imod import write_segmentation_to_imod_as_points
from synapse_net.imod.to_imod import write_segmentation_to_imod_as_points
from subprocess import run

mrc_path = "20241108_3D_Imig_DATA_2014/!_M13DKO_TOMO_DATA_Imig2014_mrc-mod-FM/A_M13DKO_080212_CTRL4.8_crop/A_M13DKO_080212_CTRL4.8_crop.mrc" # noqa
Expand Down
2 changes: 1 addition & 1 deletion scripts/cooper/analysis/export_az_to_imod.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
import h5py
import pandas as pd

from synaptic_reconstruction.imod.to_imod import write_segmentation_to_imod
from synapse_net.imod.to_imod import write_segmentation_to_imod
from scipy.ndimage import binary_dilation, binary_closing


Expand Down
37 changes: 37 additions & 0 deletions scripts/cooper/analysis/export_vesicles_to_imod.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
import os
from glob import glob

import h5py

from synapse_net.imod.to_imod import write_segmentation_to_imod_as_points


def export_all_to_imod(check_input=True, check_export=True):
files = sorted(glob("./proofread_az/**/*.h5", recursive=True))
mrc_root = "./mrc_files"
output_folder = "./vesicle_export"

for ff in files:
ds, fname = os.path.split(ff)
ds = os.path.basename(ds)
out_folder = os.path.join(output_folder, ds)
out_path = os.path.join(out_folder, fname.replace(".h5", ".mod"))
if os.path.exists(out_path):
continue

os.makedirs(out_folder, exist_ok=True)
mrc_path = os.path.join(mrc_root, ds, fname.replace(".h5", ".rec"))
assert os.path.exists(mrc_path), mrc_path

with h5py.File(ff, "r") as f:
seg = f["vesicles"][:]

write_segmentation_to_imod_as_points(mrc_path, seg, out_path, min_radius=7, radius_factor=0.7)


def main():
export_all_to_imod()


if __name__ == "__main__":
main()
2 changes: 1 addition & 1 deletion scripts/cooper/analysis/measure_distances.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
import pandas as pd
from tqdm import tqdm

from synaptic_reconstruction.distance_measurements import measure_segmentation_to_object_distances
from synapse_net.distance_measurements import measure_segmentation_to_object_distances


RESOLUTION = (1.554,) * 3
Expand Down
2 changes: 1 addition & 1 deletion scripts/cooper/analysis/measure_vesicle_sizes.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
import pandas as pd
from tqdm import tqdm

from synaptic_reconstruction.imod.to_imod import convert_segmentation_to_spheres
from synapse_net.imod.to_imod import convert_segmentation_to_spheres

RESOLUTION = (1.554,) * 3

Expand Down
4 changes: 2 additions & 2 deletions scripts/cooper/analysis/proofread_bad_azs.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,8 @@

from magicgui import magicgui
from tqdm import tqdm
from synaptic_reconstruction.morphology import skeletonize_object
from synaptic_reconstruction.ground_truth.shape_refinement import edge_filter
from synapse_net.morphology import skeletonize_object
from synapse_net.ground_truth.shape_refinement import edge_filter


def proofread_az(raw_path, seg_path):
Expand Down
2 changes: 1 addition & 1 deletion scripts/cooper/export_mask_to_imod.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import argparse

from synaptic_reconstruction.imod.to_imod import write_segmentation_to_imod
from synapse_net.imod.to_imod import write_segmentation_to_imod


def export_mask_to_imod(args):
Expand Down
2 changes: 1 addition & 1 deletion scripts/cooper/export_vesicles_to_imod.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import argparse
from functools import partial

from synaptic_reconstruction.imod.to_imod import export_helper, write_segmentation_to_imod_as_points
from synapse_net.imod.to_imod import export_helper, write_segmentation_to_imod_as_points


def export_vesicles_to_imod(args):
Expand Down
2 changes: 1 addition & 1 deletion scripts/cooper/extract_mask_from_imod.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
import os
from pathlib import Path

from synaptic_reconstruction.imod.export import get_label_names, export_segmentation
from synapse_net.imod.export import get_label_names, export_segmentation


def extract_mask_from_imod(input_path, mod_file, name, output_folder, interpolation):
Expand Down
2 changes: 1 addition & 1 deletion scripts/cooper/full_reconstruction/segment_compartments.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
from glob import glob

import h5py
from synaptic_reconstruction.inference.compartments import segment_compartments
from synapse_net.inference.compartments import segment_compartments
from tqdm import tqdm

ROOT = "/mnt/lustre-emmy-hdd/projects/nim00007/data/synaptic-reconstruction/cooper/04_full_reconstruction" # noqa
Expand Down
2 changes: 1 addition & 1 deletion scripts/cooper/full_reconstruction/segment_mitochondria.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
from glob import glob

import h5py
from synaptic_reconstruction.inference.mitochondria import segment_mitochondria
from synapse_net.inference.mitochondria import segment_mitochondria
from tqdm import tqdm

ROOT = "/mnt/lustre-emmy-hdd/projects/nim00007/data/synaptic-reconstruction/cooper/04_full_reconstruction" # noqa
Expand Down
2 changes: 1 addition & 1 deletion scripts/cooper/ground_truth/2D-data/extract_vesicles.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
import numpy as np
from elf.io import open_file
from magicgui import magicgui
from synaptic_reconstruction.imod.export import export_point_annotations
from synapse_net.imod.export import export_point_annotations

EXPORT_FOLDER = "./exported"

Expand Down
4 changes: 2 additions & 2 deletions scripts/cooper/ground_truth/az/proofread_az.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,8 @@
from magicgui import magicgui

from scipy.ndimage import binary_dilation, binary_closing
from synaptic_reconstruction.morphology import skeletonize_object
from synaptic_reconstruction.ground_truth.shape_refinement import edge_filter
from synapse_net.morphology import skeletonize_object
from synapse_net.ground_truth.shape_refinement import edge_filter


def proofread_az(raw_path, seg_path):
Expand Down
1 change: 1 addition & 0 deletions scripts/cooper/ground_truth/compartments/.gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
annotations/
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
import h5py
import napari

from synaptic_reconstruction.inference.compartments import _segment_compartments_3d
from synapse_net.inference.compartments import _segment_compartments_3d


def check_pred(path, pred_path, name):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
from skimage.measure import label, regionprops
from skimage.morphology import remove_small_holes
from skimage.segmentation import watershed
from synaptic_reconstruction.ground_truth.shape_refinement import edge_filter
from synapse_net.ground_truth.shape_refinement import edge_filter
from tqdm import tqdm


Expand Down
4 changes: 2 additions & 2 deletions scripts/cooper/ground_truth/compartments/run_prediction_04.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,8 @@
import h5py
from tqdm import tqdm

from synaptic_reconstruction.inference.util import _Scaler
from synaptic_reconstruction.inference.compartments import segment_compartments
from synapse_net.inference.util import _Scaler
from synapse_net.inference.compartments import segment_compartments

INPUT_ROOT = "/mnt/lustre-emmy-hdd/projects/nim00007/data/synaptic-reconstruction/cooper/ground_truth/04Dataset_for_vesicle_eval" # noqa
# MODEL_PATH = "/mnt/lustre-emmy-hdd/projects/nim00007/compartment_models/compartment_model_3d.pt"
Expand Down
2 changes: 1 addition & 1 deletion scripts/cooper/ground_truth/explore_imod_annotations.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
from glob import glob

import numpy as np
from synaptic_reconstruction.imod import get_label_names
from synapse_net.imod import get_label_names
from tqdm import tqdm

ROOT = "/mnt/lustre-emmy-hdd/projects/nim00007/data/synaptic-reconstruction/cooper/original_imod_data/20240909_cp_datatransfer" # noqa
Expand Down
2 changes: 1 addition & 1 deletion scripts/cooper/ground_truth/vesicles/compare_version.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@


def main():
root_old = "/scratch-grete/projects/nim00007/data/synaptic_reconstruction/train_data_cooper"
root_old = "/scratch-grete/projects/nim00007/data/synapse_net/train_data_cooper"
root_new = "/projects/extern/nhr/nhr_ni/nim00007/dir.project/data/synaptic-reconstruction/cooper/original_imod_data/20240909_cp_datatransfer" # noqa

old_folders = sorted(glob(os.path.join(root_old, "0*"))) + sorted(glob(os.path.join(root_old, "1*")))
Expand Down
2 changes: 1 addition & 1 deletion scripts/cooper/ground_truth/vesicles/vesicle_extraction.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
from glob import glob
from pathlib import Path

from synaptic_reconstruction.ground_truth import extract_vesicle_training_data
from synapse_net.ground_truth import extract_vesicle_training_data

ROOT = "/mnt/lustre-emmy-hdd/projects/nim00007/data/synaptic-reconstruction/cooper/original_imod_data/20240909_cp_datatransfer" # noqa
OUT_ROOT = "/mnt/lustre-emmy-hdd/projects/nim00007/data/synaptic-reconstruction/cooper/extracted/20240909_cp_datatransfer" # noqa
Expand Down
10 changes: 5 additions & 5 deletions scripts/cooper/ground_truth/vesicles/vesicle_postprocessing.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,12 +9,12 @@
from scipy.ndimage import binary_dilation
from tqdm import tqdm

from synaptic_reconstruction.inference.vesicles import segment_vesicles
# from synaptic_reconstruction.ground_truth import find_additional_objects
from synaptic_reconstruction.inference.util import _get_file_paths
from synaptic_reconstruction.ground_truth.shape_refinement import refine_vesicle_shapes, edge_filter
from synapse_net.inference.vesicles import segment_vesicles
# from synapse_net.ground_truth import find_additional_objects
from synapse_net.inference.util import _get_file_paths
from synapse_net.ground_truth.shape_refinement import refine_vesicle_shapes, edge_filter

MODEL_PATH = "/scratch-grete/projects/nim00007/data/synaptic_reconstruction/models/cooper/vesicles/3D-UNet-for-Vesicle-Segmentation-vesicles-010508model_v1r45_0105mr45_0105mr45.zip" # noqa
MODEL_PATH = "/scratch-grete/projects/nim00007/data/synapse_net/models/cooper/vesicles/3D-UNet-for-Vesicle-Segmentation-vesicles-010508model_v1r45_0105mr45_0105mr45.zip" # noqa


def extract_gt_bounding_box(raw, vesicle_gt, halo=[2, 32, 32]):
Expand Down
4 changes: 2 additions & 2 deletions scripts/cooper/run_compartment_segmentation.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
import argparse
from functools import partial

from synaptic_reconstruction.inference.compartments import segment_compartments
from synaptic_reconstruction.inference.util import inference_helper, parse_tiling
from synapse_net.inference.compartments import segment_compartments
from synapse_net.inference.util import inference_helper, parse_tiling


def run_compartment_segmentation(args):
Expand Down
4 changes: 2 additions & 2 deletions scripts/cooper/run_cristae_segmentation.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
import argparse
from functools import partial

from synaptic_reconstruction.inference.cristae import segment_cristae
from synaptic_reconstruction.inference.util import inference_helper, parse_tiling
from synapse_net.inference.cristae import segment_cristae
from synapse_net.inference.util import inference_helper, parse_tiling


def run_cristae_segmentation(args):
Expand Down
4 changes: 2 additions & 2 deletions scripts/cooper/run_mitochondria_segmentation.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
import argparse
from functools import partial

from synaptic_reconstruction.inference.mitochondria import segment_mitochondria
from synaptic_reconstruction.inference.util import inference_helper, parse_tiling
from synapse_net.inference.mitochondria import segment_mitochondria
from synapse_net.inference.util import inference_helper, parse_tiling


def run_mitochondria_segmentation(args):
Expand Down
4 changes: 2 additions & 2 deletions scripts/cooper/run_vesicle_segmentation.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
import argparse
from functools import partial

from synaptic_reconstruction.inference.vesicles import segment_vesicles
from synaptic_reconstruction.inference.util import inference_helper, parse_tiling
from synapse_net.inference.vesicles import segment_vesicles
from synapse_net.inference.util import inference_helper, parse_tiling


def run_vesicle_segmentation(args):
Expand Down
8 changes: 4 additions & 4 deletions scripts/cooper/training/train_AZ.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,11 +8,11 @@

from sklearn.model_selection import train_test_split

from synaptic_reconstruction.training import supervised_training
from synaptic_reconstruction.training import semisupervised_training
from synapse_net.training import supervised_training
from synapse_net.training import semisupervised_training

TRAIN_ROOT = "/mnt/lustre-emmy-hdd/projects/nim00007/data/synaptic-reconstruction/cooper/exported_imod_objects"
OUTPUT_ROOT = "/mnt/lustre-emmy-hdd/usr/u12095/synaptic_reconstruction/training_AZ_v1"
OUTPUT_ROOT = "/mnt/lustre-emmy-hdd/usr/u12095/synapse_net/training_AZ_v1"


def _require_train_val_test_split(datasets):
Expand Down Expand Up @@ -112,7 +112,7 @@ def train(key, ignore_label = None, training_2D = False, testset = True):
sampler = torch_em.data.sampler.MinInstanceSampler(min_num_instances=1),
n_samples_train=None, n_samples_val=25,
check=check,
save_root="/mnt/lustre-emmy-hdd/usr/u12095/synaptic_reconstruction/AZ_models",
save_root="/mnt/lustre-emmy-hdd/usr/u12095/synapse_net/AZ_models",
n_iterations=int(5e3),
ignore_label= ignore_label,
label_transform=torch_em.transform.label.labels_to_binary,
Expand Down
2 changes: 1 addition & 1 deletion scripts/cooper/training/train_compartments.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
from skimage.morphology import disk
from scipy.ndimage import binary_dilation, distance_transform_edt

from synaptic_reconstruction.training import supervised_training
from synapse_net.training import supervised_training

TRAIN_ROOT = "/mnt/lustre-emmy-hdd/projects/nim00007/data/synaptic-reconstruction/cooper/ground_truth/compartments"
# TRAIN_ROOT = "/home/pape/Work/my_projects/synaptic-reconstruction/scripts/cooper/ground_truth/compartments/output/compartment_gt" # noqa
Expand Down
8 changes: 4 additions & 4 deletions scripts/cooper/training/train_vesicles.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,11 +5,11 @@

from sklearn.model_selection import train_test_split

from synaptic_reconstruction.training import supervised_training
from synaptic_reconstruction.training import semisupervised_training
from synapse_net.training import supervised_training
from synapse_net.training import semisupervised_training

TRAIN_ROOT = "/mnt/lustre-emmy-hdd/projects/nim00007/data/synaptic-reconstruction/cooper/vesicles_processed_v2"
OUTPUT_ROOT = "/mnt/lustre-emmy-hdd/usr/u12095/synaptic_reconstruction/training_v2"
OUTPUT_ROOT = "/mnt/lustre-emmy-hdd/usr/u12095/synapse_net/training_v2"


def _require_train_val_test_split(datasets):
Expand Down Expand Up @@ -111,7 +111,7 @@ def train(key, ignore_label = None, training_2D = False, testset = True):
patch_shape=patch_shape, batch_size=batch_size,
n_samples_train=None, n_samples_val=25,
check=check,
save_root="/mnt/lustre-emmy-hdd/usr/u12095/synaptic_reconstruction/models_v2",
save_root="/mnt/lustre-emmy-hdd/usr/u12095/synapse_net/models_v2",
ignore_label= ignore_label,
)

Expand Down
Loading
Loading