From f08ad89f2a697547f9834aba32d5f6f832c93c08 Mon Sep 17 00:00:00 2001 From: Constantin Pape Date: Thu, 17 Apr 2025 09:27:18 +0200 Subject: [PATCH 01/29] Add SBD to CryoVesNet evaluation --- .../baselines/cryo_ves_net/evaluate_cooper.py | 28 +++++++++++++++---- .../baselines/cryo_ves_net/evaluate_cryo.py | 15 ++++++++-- .../cryo_ves_net/evaluate_endbulb.py | 20 +++++++++---- .../cryo_ves_net/evaluate_inner_ear.py | 20 +++++++++---- 4 files changed, 63 insertions(+), 20 deletions(-) diff --git a/scripts/baselines/cryo_ves_net/evaluate_cooper.py b/scripts/baselines/cryo_ves_net/evaluate_cooper.py index ed123f4..71e1ff6 100644 --- a/scripts/baselines/cryo_ves_net/evaluate_cooper.py +++ b/scripts/baselines/cryo_ves_net/evaluate_cooper.py @@ -6,6 +6,7 @@ import numpy as np import pandas as pd from elf.evaluation.matching import matching +from elf.evaluation.dice import symmetric_best_dice_score from tqdm import tqdm INPUT_ROOT = "/mnt/lustre-emmy-hdd/projects/nim00007/data/synaptic-reconstruction/cooper/vesicles_processed_v2/testsets" # noqa @@ -25,11 +26,11 @@ ] -def evaluate_dataset(ds_name): +def evaluate_dataset(ds_name, force): result_folder = "./results/cooper" os.makedirs(result_folder, exist_ok=True) result_path = os.path.join(result_folder, f"{ds_name}.csv") - if os.path.exists(result_path): + if os.path.exists(result_path) and not force: results = pd.read_csv(result_path) return results @@ -44,6 +45,9 @@ def evaluate_dataset(ds_name): mask_key = None pred_files = sorted(glob(os.path.join(OUTPUT_ROOT, ds_name, "**/*.h5"), recursive=True)) + if ds_name == "04": + pred_names = [os.path.basename(path) for path in pred_files] + input_files = [path for path in input_files if os.path.basename(path) in pred_names] assert len(input_files) == len(pred_files), f"{len(input_files)}, {len(pred_files)}" results = { @@ -52,12 +56,13 @@ def evaluate_dataset(ds_name): "precision": [], "recall": [], "f1-score": [], + "sbd-score": [], } for inf, predf in tqdm(zip(input_files, pred_files), total=len(input_files), desc=f"Evaluate {ds_name}"): fname = os.path.basename(inf) sub_res_path = os.path.join(result_folder, f"{ds_name}_{fname}.json") - if os.path.exists(sub_res_path): + if os.path.exists(sub_res_path) and not force: print("Loading scores from", sub_res_path) with open(sub_res_path, "r") as f: scores = json.load(f) @@ -89,6 +94,8 @@ def evaluate_dataset(ds_name): gt[mask == 0] = 0 scores = matching(seg, gt) + sbd_score = symmetric_best_dice_score(seg, gt) + scores["sbd"] = sbd_score with open(sub_res_path, "w") as f: json.dump(scores, f) @@ -98,6 +105,7 @@ def evaluate_dataset(ds_name): results["precision"].append(scores["precision"]) results["recall"].append(scores["recall"]) results["f1-score"].append(scores["f1"]) + results["sbd-score"].append(scores["sbd"]) results = pd.DataFrame(results) results.to_csv(result_path, index=False) @@ -105,9 +113,11 @@ def evaluate_dataset(ds_name): def main(): + force = False + all_results = {} for ds in DATASETS: - result = evaluate_dataset(ds) + result = evaluate_dataset(ds, force=force) all_results[ds] = result groups = { @@ -123,16 +133,24 @@ def main(): } for name, datasets in groups.items(): - f1_scores = [] + f1_scores, sbd_scores = [], [] for ds in datasets: this_f1_scores = all_results[ds]["f1-score"].values.tolist() + this_sbd_scores = all_results[ds]["sbd-score"].values.tolist() f1_scores.extend(this_f1_scores) + sbd_scores.extend(this_sbd_scores) mean_f1 = np.mean(f1_scores) std_f1 = np.std(f1_scores) + print("F1-Score") print(name, ":", mean_f1, "+-", std_f1) + mean_sbd = np.mean(sbd_scores) + std_sbd = np.std(sbd_scores) + print("SBD-Score") + print(name, ":", mean_sbd, "+-", std_sbd) + if __name__ == "__main__": main() diff --git a/scripts/baselines/cryo_ves_net/evaluate_cryo.py b/scripts/baselines/cryo_ves_net/evaluate_cryo.py index 45da0d0..968c72b 100644 --- a/scripts/baselines/cryo_ves_net/evaluate_cryo.py +++ b/scripts/baselines/cryo_ves_net/evaluate_cryo.py @@ -4,17 +4,18 @@ import h5py import pandas as pd from elf.evaluation.matching import matching +from elf.evaluation.dice import symmetric_best_dice_score INPUT_FOLDER = "/mnt/lustre-emmy-hdd/projects/nim00007/data/synaptic-reconstruction/fernandez-busnadiego/vesicle_gt/v3" # noqa OUTPUT_FOLDER = "./predictions/cryo" -def evaluate_dataset(ds_name="cryo"): +def evaluate_dataset(ds_name="cryo", force=False): result_folder = "./results/cryo" os.makedirs(result_folder, exist_ok=True) result_path = os.path.join(result_folder, f"{ds_name}.csv") - if os.path.exists(result_path): + if os.path.exists(result_path) and not force: results = pd.read_csv(result_path) return results @@ -28,6 +29,7 @@ def evaluate_dataset(ds_name="cryo"): "precision": [], "recall": [], "f1-score": [], + "sbd-score": [], } for inf, predf in zip(input_files, pred_files): fname = os.path.basename(inf) @@ -39,12 +41,15 @@ def evaluate_dataset(ds_name="cryo"): assert gt.shape == seg.shape scores = matching(seg, gt) + sbd_score = symmetric_best_dice_score(seg, gt) + scores["sbd"] = sbd_score results["dataset"].append(ds_name) results["file"].append(fname) results["precision"].append(scores["precision"]) results["recall"].append(scores["recall"]) results["f1-score"].append(scores["f1"]) + results["sbd-score"].append(scores["sbd"]) results = pd.DataFrame(results) results.to_csv(result_path, index=False) @@ -52,9 +57,13 @@ def evaluate_dataset(ds_name="cryo"): def main(): - result = evaluate_dataset() + force = False + result = evaluate_dataset(force=force) print(result) + print("F1-Score") print(result["f1-score"].mean()) + print("SBD-Score") + print(result["sbd-score"].mean()) if __name__ == "__main__": diff --git a/scripts/baselines/cryo_ves_net/evaluate_endbulb.py b/scripts/baselines/cryo_ves_net/evaluate_endbulb.py index c30e4b1..ad44eb3 100644 --- a/scripts/baselines/cryo_ves_net/evaluate_endbulb.py +++ b/scripts/baselines/cryo_ves_net/evaluate_endbulb.py @@ -4,17 +4,19 @@ import h5py import pandas as pd from elf.evaluation.matching import matching +from elf.evaluation.dice import symmetric_best_dice_score +from tqdm import tqdm INPUT_FOLDER = "/mnt/lustre-emmy-hdd/projects/nim00007/data/synaptic-reconstruction/wichmann/extracted/endbulb_of_held/Automatische_Segmentierung_Dataset_Validierung" # noqa OUTPUT_FOLDER = "./predictions/endbulb" -def evaluate_dataset(ds_name="endbulb"): +def evaluate_dataset(ds_name="endbulb", force=False): result_folder = "./results/endbulb" os.makedirs(result_folder, exist_ok=True) result_path = os.path.join(result_folder, f"{ds_name}.csv") - if os.path.exists(result_path): + if os.path.exists(result_path) and not force: results = pd.read_csv(result_path) return results @@ -28,8 +30,9 @@ def evaluate_dataset(ds_name="endbulb"): "precision": [], "recall": [], "f1-score": [], + "sbd-score": [], } - for inf, predf in zip(input_files, pred_files): + for inf, predf in tqdm(zip(input_files, pred_files), total=len(input_files), desc="Run evaluation"): fname = os.path.basename(inf) with h5py.File(inf, "r") as f: @@ -39,12 +42,14 @@ def evaluate_dataset(ds_name="endbulb"): assert gt.shape == seg.shape scores = matching(seg, gt) + sbd_score = symmetric_best_dice_score(seg, gt) results["dataset"].append(ds_name) results["file"].append(fname) results["precision"].append(scores["precision"]) results["recall"].append(scores["recall"]) results["f1-score"].append(scores["f1"]) + results["sbd-score"].append(sbd_score) results = pd.DataFrame(results) results.to_csv(result_path, index=False) @@ -52,11 +57,14 @@ def evaluate_dataset(ds_name="endbulb"): def main(): - result = evaluate_dataset() + force = False + result = evaluate_dataset(force=force) print(result) print() - print(result["f1-score"].mean()) - print(result["f1-score"].std()) + print("F1-Score") + print(result["f1-score"].mean(), "+-", result["f1-score"].std()) + print("SBD-Score") + print(result["sbd-score"].mean(), "+-", result["sbd-score"].std()) if __name__ == "__main__": diff --git a/scripts/baselines/cryo_ves_net/evaluate_inner_ear.py b/scripts/baselines/cryo_ves_net/evaluate_inner_ear.py index 930cfc9..626ffad 100644 --- a/scripts/baselines/cryo_ves_net/evaluate_inner_ear.py +++ b/scripts/baselines/cryo_ves_net/evaluate_inner_ear.py @@ -4,17 +4,19 @@ import h5py import pandas as pd from elf.evaluation.matching import matching +from elf.evaluation.dice import symmetric_best_dice_score +from tqdm import tqdm INPUT_FOLDER = "/mnt/lustre-emmy-hdd/projects/nim00007/data/synaptic-reconstruction/moser/vesicle_gt" # noqa OUTPUT_FOLDER = "./predictions/inner_ear" -def evaluate_dataset(ds_name="inner_ear"): +def evaluate_dataset(ds_name="inner_ear", force=False): result_folder = "./results/inner_ear" os.makedirs(result_folder, exist_ok=True) result_path = os.path.join(result_folder, f"{ds_name}.csv") - if os.path.exists(result_path): + if os.path.exists(result_path) and not force: results = pd.read_csv(result_path) return results @@ -28,8 +30,9 @@ def evaluate_dataset(ds_name="inner_ear"): "precision": [], "recall": [], "f1-score": [], + "sbd-score": [], } - for inf, predf in zip(input_files, pred_files): + for inf, predf in tqdm(zip(input_files, pred_files), total=len(input_files), desc="Run evaluation"): fname = os.path.basename(inf) with h5py.File(inf, "r") as f: @@ -39,12 +42,14 @@ def evaluate_dataset(ds_name="inner_ear"): assert gt.shape == seg.shape scores = matching(seg, gt) + sbd_score = symmetric_best_dice_score(seg, gt) results["dataset"].append(ds_name) results["file"].append(fname) results["precision"].append(scores["precision"]) results["recall"].append(scores["recall"]) results["f1-score"].append(scores["f1"]) + results["sbd-score"].append(sbd_score) results = pd.DataFrame(results) results.to_csv(result_path, index=False) @@ -52,11 +57,14 @@ def evaluate_dataset(ds_name="inner_ear"): def main(): - result = evaluate_dataset() + force = False + result = evaluate_dataset(force=force) print(result) print() - print(result["f1-score"].mean()) - print(result["f1-score"].std()) + print("F1-Score") + print(result["f1-score"].mean(), "+-", result["f1-score"].std()) + print("SBD-Score") + print(result["sbd-score"].mean(), "+-", result["sbd-score"].std()) if __name__ == "__main__": From 7dd696284b4f91681493eae273a3fd325d10ae5e Mon Sep 17 00:00:00 2001 From: Constantin Pape Date: Sun, 25 May 2025 14:22:11 +0200 Subject: [PATCH 02/29] Work on revision of AZ model WIP --- scripts/cooper/revision/generate_az_eval_data.py | 13 +++++++++++++ 1 file changed, 13 insertions(+) create mode 100644 scripts/cooper/revision/generate_az_eval_data.py diff --git a/scripts/cooper/revision/generate_az_eval_data.py b/scripts/cooper/revision/generate_az_eval_data.py new file mode 100644 index 0000000..c5fdcd2 --- /dev/null +++ b/scripts/cooper/revision/generate_az_eval_data.py @@ -0,0 +1,13 @@ +from synapse_net.sample_data import get_sample_data +from synapse_net.inference import run_segmentation, get_model +from elf.io import open_file + + +sample_data = get_sample_data("tem_tomo") +tomo = open_file(sample_data, "r")["data"][:] + +model = get_model("active_zone") +seg = run_segmentation(tomo, model, "active_zone") + +with open_file("./pred.h5", "a") as f: + f.create_dataset("pred", data=seg, compression="gzip") From 39d69d5af9bdac0f8b17165923290447d4641da8 Mon Sep 17 00:00:00 2001 From: Constantin Pape Date: Sun, 25 May 2025 16:00:42 +0200 Subject: [PATCH 03/29] Implement AZ evaluation WIP --- .../cooper/revision/generate_az_eval_data.py | 28 +++++-- scripts/cooper/revision/run_az_evaluation.py | 65 +++++++++++++++ synapse_net/ground_truth/az_evaluation.py | 79 +++++++++++++++++++ 3 files changed, 167 insertions(+), 5 deletions(-) create mode 100644 scripts/cooper/revision/run_az_evaluation.py create mode 100644 synapse_net/ground_truth/az_evaluation.py diff --git a/scripts/cooper/revision/generate_az_eval_data.py b/scripts/cooper/revision/generate_az_eval_data.py index c5fdcd2..c0df1b7 100644 --- a/scripts/cooper/revision/generate_az_eval_data.py +++ b/scripts/cooper/revision/generate_az_eval_data.py @@ -1,13 +1,31 @@ from synapse_net.sample_data import get_sample_data -from synapse_net.inference import run_segmentation, get_model from elf.io import open_file sample_data = get_sample_data("tem_tomo") tomo = open_file(sample_data, "r")["data"][:] -model = get_model("active_zone") -seg = run_segmentation(tomo, model, "active_zone") -with open_file("./pred.h5", "a") as f: - f.create_dataset("pred", data=seg, compression="gzip") +def run_prediction(): + from synapse_net.inference import run_segmentation, get_model + + model = get_model("active_zone") + seg = run_segmentation(tomo, model, "active_zone") + + with open_file("./pred.h5", "a") as f: + f.create_dataset("pred", data=seg, compression="gzip") + + +def check_prediction(): + import napari + + with open_file("./pred.h5", "r") as f: + pred = f["pred"][:] + + v = napari.Viewer() + v.add_image(tomo) + v.add_labels(pred) + napari.run() + + +check_prediction() diff --git a/scripts/cooper/revision/run_az_evaluation.py b/scripts/cooper/revision/run_az_evaluation.py new file mode 100644 index 0000000..ab95633 --- /dev/null +++ b/scripts/cooper/revision/run_az_evaluation.py @@ -0,0 +1,65 @@ +import argparse +import os +from glob import glob + + +def _get_paths(seg_root, gt_root, image_root=None): + seg_paths = sorted(glob(os.path.join(seg_root, "**/*.h5"), recursive=True)) + gt_paths = sorted(glob(os.path.join(gt_root, "**/*.h5"), recursive=True)) + assert len(seg_paths) == len(gt_paths) + + if image_root is None: + image_paths = [None] * len(seg_paths) + else: + image_paths = sorted(glob(os.path.join(image_root, "**/*.mrc"), recursive=True)) + assert len(image_paths) == len(seg_paths) + + return seg_paths, gt_paths, image_paths + + +# TODO extend this +def run_az_evaluation(args): + from synapse_net.ground_truth.az_evaluation import az_evaluation + + seg_paths, gt_paths, _ = _get_paths(args.seg_root, args.gt_root) + result = az_evaluation(seg_paths, gt_paths, seg_key="seg", gt_key="gt") + + print(result) + + +def visualize_az_evaluation(args): + from elf.visualisation.metric_visualization import run_metric_visualization + from synapse_net.ground_truth.az_evaluation import _postprocess + from elf.io import open_file + + seg_paths, gt_paths, image_paths = _get_paths(args.seg_root, args.gt_root, args.image_root) + for seg_path, gt_path, image_path in zip(seg_paths, gt_paths, image_paths): + image = None if image_path is None else open_file(image_path, "r")["data"][:] + + with open_file(seg_path, "r") as f: + seg = f["seg"][:] + with open_file(gt_path, "r") as f: + gt = f["gt"][:] + + seg = _postprocess(seg, apply_cc=True, min_component_size=100) + gt = _postprocess(gt, apply_cc=True, min_component_size=100) + + run_metric_visualization(image, seg, gt) + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument("-s", "--seg_root", required=True) + parser.add_argument("-g", "--gt_root", required=True) + parser.add_argument("-i", "--image_root") + parser.add_argument("--visualize", action="store_true") + args = parser.parse_args() + + if args.visualize: + visualize_az_evaluation(args) + else: + run_az_evaluation(args) + + +if __name__ == "__main__": + main() diff --git a/synapse_net/ground_truth/az_evaluation.py b/synapse_net/ground_truth/az_evaluation.py new file mode 100644 index 0000000..fa5a804 --- /dev/null +++ b/synapse_net/ground_truth/az_evaluation.py @@ -0,0 +1,79 @@ +import os +from typing import List + +import h5py +import pandas as pd +import numpy as np + +from elf.evaluation.matching import _compute_scores, _compute_tps +from elf.evaluation import dice_score +from skimage.measure import label +from tqdm import tqdm + + +def _postprocess(data, apply_cc, min_component_size): + if apply_cc: + data = label(data) + ids, sizes = np.unique(data, return_counts=True) + filter_ids = ids[sizes < min_component_size] + data[np.isin(data, filter_ids)] = 0 + return data + + +def _single_az_evaluation(seg, gt, apply_cc, min_component_size): + assert seg.shape == gt.shape, f"{seg.shape}, {gt.shape}" + seg = _postprocess(seg, apply_cc, min_component_size) + gt = _postprocess(gt, apply_cc, min_component_size) + + dice = dice_score(seg > 0, gt > 0) + + n_true, n_matched, n_pred, scores = _compute_scores(seg, gt, criterion="iou", ignore_label=0) + tp = _compute_tps(scores, n_matched, threshold=0.5) + fp = n_pred - tp + fn = n_true - tp + + return {"tp": tp, "fp": fp, "fn": fn, "dice": dice} + + +# TODO further post-processing? +def az_evaluation( + seg_paths: List[str], + gt_paths: List[str], + seg_key: str, + gt_key: str, + apply_cc: bool = True, + min_component_size: int = 100, # TODO +) -> pd.DataFrame: + """Evaluate active zone segmentations against ground-truth annotations. + + Args: + seg_paths: The filepaths to the segmentations, stored as hd5 files. + gt_paths: The filepaths to the ground-truth annotatons, stored as hdf5 files. + seg_key: The internal path to the data in the segmentation hdf5 file. + gt_key: The internal path to the data in the ground-truth hdf5 file. + apply_cc: Whether to apply connected components before evaluation. + min_component_size: Minimum component size for filtering the segmentation and annotations before evaluation. + + Returns: + A data frame with the evaluation results per tomogram. + """ + assert len(seg_paths) == len(gt_paths) + + results = { + "tomo_name": [], + "tp": [], + "fp": [], + "fn": [], + "dice": [], + } + for seg_path, gt_path in tqdm(zip(seg_paths, gt_paths), total=len(seg_paths), desc="Run AZ Eval"): + with h5py.File(seg_path, "r") as f: + seg = f[seg_key][:] + with h5py.File(gt_path, "r") as f: + gt = f[gt_key][:] + # TODO more post-processing params + result = _single_az_evaluation(seg, gt, apply_cc, min_component_size) + results["tomo_name"].append(os.path.basename(seg_path)) + for res in ("tp", "fp", "fn", "dice"): + results[res].append(result[res]) + return pd.DataFrame(results) From d7f71a0a0fdb9c05c5fd2ee7143b00e91c175836 Mon Sep 17 00:00:00 2001 From: Constantin Pape Date: Wed, 28 May 2025 09:38:24 +0200 Subject: [PATCH 04/29] Update AZ training data --- scripts/cooper/revision/assort_new_az_data.py | 210 ++++++++++++++++++ .../cooper/revision/check_training_data.py | 37 +++ scripts/cooper/revision/eval_AZ.sh | 8 + scripts/cooper/revision/evaluate_result.py | 48 ++++ scripts/cooper/revision/prediction.py | 90 ++++++++ scripts/cooper/revision/run_az_evaluation.py | 57 +++-- scripts/cooper/revision/thin_az_gt.py | 76 +++++++ scripts/cooper/revision/train_az.py | 141 ++++++++++++ scripts/cooper/revision/visualize_AZ_eval.sh | 7 + synapse_net/ground_truth/az_evaluation.py | 174 +++++++++++++-- synapse_net/inference/compartments.py | 2 +- 11 files changed, 819 insertions(+), 31 deletions(-) create mode 100644 scripts/cooper/revision/assort_new_az_data.py create mode 100644 scripts/cooper/revision/check_training_data.py create mode 100755 scripts/cooper/revision/eval_AZ.sh create mode 100644 scripts/cooper/revision/evaluate_result.py create mode 100644 scripts/cooper/revision/prediction.py create mode 100644 scripts/cooper/revision/thin_az_gt.py create mode 100644 scripts/cooper/revision/train_az.py create mode 100755 scripts/cooper/revision/visualize_AZ_eval.sh diff --git a/scripts/cooper/revision/assort_new_az_data.py b/scripts/cooper/revision/assort_new_az_data.py new file mode 100644 index 0000000..13b7430 --- /dev/null +++ b/scripts/cooper/revision/assort_new_az_data.py @@ -0,0 +1,210 @@ +import os +from glob import glob + +import h5py +import numpy as np +from tqdm import tqdm +from skimage.transform import resize + +ROOT = "/mnt/ceph-hdd/cold_store/projects/nim00007/AZ_data/training_data" +INTER_ROOT = "/mnt/ceph-hdd/cold_store/projects/nim00007/AZ_predictions" +OUTPUT_ROOT = "/mnt/ceph-hdd/cold_store/projects/nim00007/new_AZ_train_data" + + +def _check_data(files, label_folder, check_thinned): + for ff in files: + with h5py.File(ff, "r") as f: + shape = f["raw"].shape + az = f["labels/az"][:] + n_az = az.max() + + if check_thinned: + label_file = os.path.join(label_folder, os.path.basename(ff)) + with h5py.File(label_file, "r") as f: + az_thin = f["labels/az_thin2"][:] + n_az_thin = az_thin.max() + else: + n_az_thin = None + + print(os.path.basename(ff), ":", shape, ":", n_az, ":", n_az_thin) + + +def assort_tem(): + old_name = "01data_withoutInvertedFiles_minusSVseg_corrected" + new_name = "tem" + + raw_folder = os.path.join(ROOT, old_name) + label_folder = os.path.join(INTER_ROOT, old_name) + output_folder = os.path.join(OUTPUT_ROOT, new_name) + os.makedirs(output_folder, exist_ok=True) + + files = glob(os.path.join(raw_folder, "*.h5")) + for ff in tqdm(files): + with h5py.File(ff, "r") as f: + raw = f["raw"][:] + az = f["labels/az"][:] + + label_path = os.path.join(label_folder, os.path.basename(ff)) + with h5py.File(label_path, "r") as f: + az_thin = f["labels/az_thin2"][:] + + z_range1 = np.where(az != 0)[0] + z_range2 = np.where(az != 0)[0] + z_range = slice( + np.min(np.concatenate([z_range1, z_range2])), + np.max(np.concatenate([z_range1, z_range2])) + 1, + ) + raw, az, az_thin = raw[z_range], az[z_range], az_thin[z_range] + + out_path = os.path.join(output_folder, os.path.basename(ff)) + with h5py.File(out_path, "a") as f: + f.create_dataset("raw", data=raw, compression="lzf") + f.create_dataset("labels/az_thin", data=az_thin, compression="lzf") + f.create_dataset("labels/az", data=az, compression="lzf") + + +def assort_chemical_fixation(): + old_name = "12_chemical_fix_cryopreparation_minusSVseg_corrected" + new_name = "chemical_fixation" + + raw_folder = os.path.join(ROOT, old_name) + label_folder = os.path.join(INTER_ROOT, old_name) + output_folder = os.path.join(OUTPUT_ROOT, new_name) + os.makedirs(output_folder, exist_ok=True) + + label_key = "labels/az_thin2" + + files = glob(os.path.join(raw_folder, "*.h5")) + for ff in tqdm(files): + with h5py.File(ff, "r") as f: + raw = f["raw"][:] + az = f["labels/az"][:] + + label_path = os.path.join(label_folder, os.path.basename(ff)) + with h5py.File(label_path, "r") as f: + az_thin = f[label_key][:] + + z_range1 = np.where(az != 0)[0] + z_range2 = np.where(az != 0)[0] + z_range = slice( + np.min(np.concatenate([z_range1, z_range2])), + np.max(np.concatenate([z_range1, z_range2])) + 1, + ) + raw, az, az_thin = raw[z_range], az[z_range], az_thin[z_range] + + out_path = os.path.join(output_folder, os.path.basename(ff)) + with h5py.File(out_path, "a") as f: + f.create_dataset("raw", data=raw, compression="lzf") + f.create_dataset("labels/az_thin", data=az_thin, compression="lzf") + f.create_dataset("labels/az", data=az, compression="lzf") + + +def assort_stem(): + old_names = [ + "04_hoi_stem_examples_fidi_and_sarah_corrected", + "04_hoi_stem_examples_minusSVseg_cropped_corrected", + "06_hoi_wt_stem750_fm_minusSVseg_cropped_corrected", + ] + new_names = ["stem", "stem_cropped", "stem_cropped"] + for old_name, new_name in zip(old_names, new_names): + print(old_name) + raw_folder = os.path.join(ROOT, f"{old_name}_rescaled_tomograms") + label_folder = os.path.join(INTER_ROOT, old_name) + files = glob(os.path.join(raw_folder, "*.h5")) + + # _check_data(files, label_folder, check_thinned=True) + # continue + + output_folder = os.path.join(OUTPUT_ROOT, new_name) + os.makedirs(output_folder, exist_ok=True) + for ff in tqdm(files): + with h5py.File(ff, "r") as f: + raw = f["raw"][:] + az = f["labels/az"][:] + + label_path = os.path.join(label_folder, os.path.basename(ff)) + with h5py.File(label_path, "r") as f: + az_thin = f["labels/az_thin2"][:] + az_thin = resize(az_thin, az.shape, order=0, anti_aliasing=False, preserve_range=True).astype(az_thin.dtype) + assert az_thin.shape == az.shape + + out_path = os.path.join(output_folder, os.path.basename(ff)) + with h5py.File(out_path, "a") as f: + f.create_dataset("raw", data=raw, compression="lzf") + f.create_dataset("labels/az_thin", data=az_thin, compression="lzf") + f.create_dataset("labels/az", data=az, compression="lzf") + + +def assort_wichmann(): + old_name = "wichmann_withAZ_rescaled_tomograms" + new_name = "endbulb_of_held" + + raw_folder = os.path.join(ROOT, old_name) + output_folder = os.path.join(OUTPUT_ROOT, new_name) + os.makedirs(output_folder, exist_ok=True) + + files = glob(os.path.join(raw_folder, "*.h5")) + + output_folder = os.path.join(OUTPUT_ROOT, new_name) + os.makedirs(output_folder, exist_ok=True) + for ff in tqdm(files): + with h5py.File(ff, "r") as f: + raw = f["raw"][:] + az = f["labels/az"][:] + + output_file = os.path.join(output_folder, os.path.basename(ff)) + with h5py.File(output_file, "a") as f: + f.create_dataset("raw", data=raw, compression="lzf") + f.create_dataset("labels/az", data=az, compression="lzf") + f.create_dataset("labels/az_thin", data=az, compression="lzf") + + +def crop_wichmann(): + input_name = "endbulb_of_held" + output_name = "endbulb_of_held_cropped" + + input_folder = os.path.join(OUTPUT_ROOT, input_name) + output_folder = os.path.join(OUTPUT_ROOT, output_name) + os.makedirs(output_folder, exist_ok=True) + files = glob(os.path.join(input_folder, "*.h5")) + + min_shape = (32, 512, 512) + + for ff in tqdm(files): + with h5py.File(ff, "r") as f: + az = f["labels/az"][:] + bb = np.where(az != 0) + bb = tuple(slice(int(b.min()), int(b.max()) + 1) for b in bb) + pad_width = [max(sh - (b.stop - b.start), 0) // 2 for b, sh in zip(bb, min_shape)] + bb = tuple( + slice(max(b.start - pw, 0), min(b.stop + pw, sh)) for b, pw, sh in zip(bb, pad_width, az.shape) + ) + az = az[bb] + raw = f["raw"][bb] + + # import napari + # v = napari.Viewer() + # v.add_image(raw) + # v.add_labels(az) + # v.add_labels(az_thin) + # napari.run() + + output_path = os.path.join(output_folder, os.path.basename(ff).replace(".h5", "_cropped.h5")) + with h5py.File(output_path, "a") as f: + f.create_dataset("raw", data=raw, compression="lzf") + f.create_dataset("labels/az", data=az, compression="lzf") + f.create_dataset("labels/az_thin", data=az, compression="lzf") + + +def main(): + # assort_tem() + # assort_chemical_fixation() + + # assort_stem() + + # assort_wichmann() + crop_wichmann() + + +if __name__ == "__main__": + main() diff --git a/scripts/cooper/revision/check_training_data.py b/scripts/cooper/revision/check_training_data.py new file mode 100644 index 0000000..8534cca --- /dev/null +++ b/scripts/cooper/revision/check_training_data.py @@ -0,0 +1,37 @@ +import argparse +import os +from glob import glob + +import napari +import h5py + +ROOT = "/mnt/ceph-hdd/cold_store/projects/nim00007/new_AZ_train_data" +all_names = [ + "chemical_fixation", + "tem", + "stem", + "stem_cropped", + "endbulb_of_held", + "endbulb_of_held_cropped", +] + + +parser = argparse.ArgumentParser() +parser.add_argument("-n", "--names", nargs="+", default=all_names) +args = parser.parse_args() +names = args.names + + +for ds in names: + paths = glob(os.path.join(ROOT, ds, "*.h5")) + for p in paths: + with h5py.File(p, "r") as f: + raw = f["raw"][:] + az = f["labels/az"][:] + az_thin = f["labels/az_thin"][:] + v = napari.Viewer() + v.add_image(raw) + v.add_labels(az) + v.add_labels(az_thin) + v.title = os.path.basename(p) + napari.run() diff --git a/scripts/cooper/revision/eval_AZ.sh b/scripts/cooper/revision/eval_AZ.sh new file mode 100755 index 0000000..520e7c2 --- /dev/null +++ b/scripts/cooper/revision/eval_AZ.sh @@ -0,0 +1,8 @@ +python run_az_evaluation.py \ + -s /mnt/ceph-hdd/cold_store/projects/nim00007/AZ_data/segmentations \ + -g /mnt/ceph-hdd/cold_store/projects/nim00007/AZ_data/training_data \ + --seg_key /AZ/segment_from_AZmodel_TEM_STEM_ChemFix_v1 \ + --criterion iop \ + -o v1 + # --dataset 01 \ + # --seg_key AZ/segment_from_AZmodel_TEM_STEM_ChemFix_wichmann_v2 \ diff --git a/scripts/cooper/revision/evaluate_result.py b/scripts/cooper/revision/evaluate_result.py new file mode 100644 index 0000000..b8b3fa0 --- /dev/null +++ b/scripts/cooper/revision/evaluate_result.py @@ -0,0 +1,48 @@ +import argparse +import pandas as pd + +parser = argparse.ArgumentParser() +parser.add_argument("result_path") +args = parser.parse_args() + +results = pd.read_excel(args.result_path) + + +def summarize_results(res): + print("Dice-Score:", res["dice"].mean(), "+-", res["dice"].std()) + tp, fp, fn = float(res["tp"].sum()), float(res["fp"].sum()), float(res["fn"].sum()) + precision = tp / (tp + fp) + recall = tp / (tp + fn) + f1_score = 2 * tp / (2 * tp + fn + fp) + print("Precision:", precision) + print("Recall:", recall) + print("F1-Score:", f1_score) + + +# # Compute the results for Chemical Fixation. +results_chem_fix = results[results.dataset.str.startswith("12")] +if results_chem_fix.size > 0: + print("Chemical Fixation Results:") + summarize_results(results_chem_fix) +# +# # Compute the results for STEM (=04). +results_stem = results[results.dataset.str.startswith(("04", "06"))] +if results_stem.size > 0: + print() + print("STEM Results:") + summarize_results(results_stem) +# +# # Compute the results for TEM (=01). +results_tem = results[results.dataset.str.startswith("01")] +if results_tem.size > 0: + print() + print("TEM Results:") + summarize_results(results_tem) + +# +# Compute the results for Wichmann. +results_wichmann = results[results.dataset.str.startswith("wichmann")] +if results_wichmann.size > 0: + print() + print("Wichmann Results:") + summarize_results(results_wichmann) diff --git a/scripts/cooper/revision/prediction.py b/scripts/cooper/revision/prediction.py new file mode 100644 index 0000000..ff5c026 --- /dev/null +++ b/scripts/cooper/revision/prediction.py @@ -0,0 +1,90 @@ +import os +from glob import glob + +import h5py +from synapse_net.inference.inference import get_model, compute_scale_from_voxel_size +from synapse_net.inference.compartments import segment_compartments +from synapse_net.inference.vesicles import segment_vesicles +from tqdm import tqdm + +ROOT = "/mnt/ceph-hdd/cold_store/projects/nim00007/AZ_data/training_data" +OUTPUT_ROOT = "/mnt/ceph-hdd/cold_store/projects/nim00007/AZ_predictions" +RESOLUTIONS = { + "01data_withoutInvertedFiles_minusSVseg_corrected": {"x": 1.554, "y": 1.554, "z": 1.554}, + "04_hoi_stem_examples_fidi_and_sarah_corrected": {"x": 0.8681, "y": 0.8681, "z": 0.8681}, + "04_hoi_stem_examples_fidi_and_sarah_corrected_rescaled_tomograms": {"x": 1.554, "y": 1.554, "z": 1.554}, + "04_hoi_stem_examples_minusSVseg_cropped_corrected": {"x": 0.8681, "y": 0.8681, "z": 0.8681}, + "04_hoi_stem_examples_minusSVseg_cropped_corrected_rescaled_tomograms": {"x": 1.554, "y": 1.554, "z": 1.554}, + "06_hoi_wt_stem750_fm_minusSVseg_cropped_corrected": {"x": 0.8681, "y": 0.8681, "z": 0.8681}, + "06_hoi_wt_stem750_fm_minusSVseg_cropped_corrected_rescaled_tomograms": {"x": 1.554, "y": 1.554, "z": 1.554}, + "12_chemical_fix_cryopreparation_minusSVseg_corrected": {"x": 1.554, "y": 1.554, "z": 1.554}, + "wichmann_withAZ": {"x": 1.748, "y": 1.748, "z": 1.748}, + "wichmann_withAZ_rescaled_tomograms": {"x": 1.554, "y": 1.554, "z": 1.554}, +} + + +def predict_boundaries(model, path, output_path): + output_key = "predictions/boundaries" + if os.path.exists(output_path): + with h5py.File(output_path, "r") as f: + if output_key in f: + return + + dataset = os.path.basename(os.path.split(path)[0]) + + with h5py.File(path, "r") as f: + data = f["raw"][:] + scale = compute_scale_from_voxel_size(RESOLUTIONS[dataset], "compartments") + _, pred = segment_compartments(data, model=model, scale=scale, verbose=False, return_predictions=True) + with h5py.File(output_path, "a") as f: + f.create_dataset(output_key, data=pred, compression="lzf") + + +def predict_all_boundaries(): + model = get_model("compartments") + files = sorted(glob(os.path.join(ROOT, "**/*.h5"), recursive=True)) + for path in tqdm(files): + folder_name = os.path.basename(os.path.split(path)[0]) + output_folder = os.path.join(OUTPUT_ROOT, folder_name) + os.makedirs(output_folder, exist_ok=True) + output_path = os.path.join(output_folder, os.path.basename(path)) + predict_boundaries(model, path, output_path) + + +def predict_vesicles(model, path, output_path): + output_key = "predictions/vesicle_seg" + if os.path.exists(output_path): + with h5py.File(output_path, "r") as f: + if output_key in f: + return + + dataset = os.path.basename(os.path.split(path)[0]) + if "rescaled" in dataset: + return + + with h5py.File(path, "r") as f: + data = f["raw"][:] + scale = compute_scale_from_voxel_size(RESOLUTIONS[dataset], "vesicles_3d") + seg = segment_vesicles(data, model=model, scale=scale, verbose=False) + with h5py.File(output_path, "a") as f: + f.create_dataset(output_key, data=seg, compression="lzf") + + +def predict_all_vesicles(): + model = get_model("vesicles_3d") + files = sorted(glob(os.path.join(ROOT, "**/*.h5"), recursive=True)) + for path in tqdm(files): + folder_name = os.path.basename(os.path.split(path)[0]) + output_folder = os.path.join(OUTPUT_ROOT, folder_name) + os.makedirs(output_folder, exist_ok=True) + output_path = os.path.join(output_folder, os.path.basename(path)) + predict_vesicles(model, path, output_path) + + +def main(): + # predict_all_boundaries() + predict_all_vesicles() + + +if __name__ == "__main__": + main() diff --git a/scripts/cooper/revision/run_az_evaluation.py b/scripts/cooper/revision/run_az_evaluation.py index ab95633..7727739 100644 --- a/scripts/cooper/revision/run_az_evaluation.py +++ b/scripts/cooper/revision/run_az_evaluation.py @@ -3,56 +3,79 @@ from glob import glob -def _get_paths(seg_root, gt_root, image_root=None): +def _get_paths(seg_root, gt_root, image_root=None, dataset=None): seg_paths = sorted(glob(os.path.join(seg_root, "**/*.h5"), recursive=True)) - gt_paths = sorted(glob(os.path.join(gt_root, "**/*.h5"), recursive=True)) - assert len(seg_paths) == len(gt_paths) + if dataset is not None: + seg_paths = [path for path in seg_paths if os.path.basename(os.path.split(path)[0]).startswith(dataset)] + + gt_paths = [] + for path in seg_paths: + gt_path = os.path.join(gt_root, os.path.relpath(path, seg_root)) + assert os.path.exists(gt_path), gt_path + gt_paths.append(gt_path) if image_root is None: image_paths = [None] * len(seg_paths) else: - image_paths = sorted(glob(os.path.join(image_root, "**/*.mrc"), recursive=True)) - assert len(image_paths) == len(seg_paths) + image_paths = [] + for path in seg_paths: + im_path = os.path.join(image_root, os.path.relpath(path, seg_root)) + assert os.path.exists(im_path), im_path + image_paths.append(im_path) return seg_paths, gt_paths, image_paths -# TODO extend this def run_az_evaluation(args): from synapse_net.ground_truth.az_evaluation import az_evaluation - seg_paths, gt_paths, _ = _get_paths(args.seg_root, args.gt_root) - result = az_evaluation(seg_paths, gt_paths, seg_key="seg", gt_key="gt") + seg_paths, gt_paths, _ = _get_paths(args.seg_root, args.gt_root, dataset=args.dataset) + dataset = [os.path.basename(os.path.split(path)[0]) for path in seg_paths] + result = az_evaluation( + seg_paths, gt_paths, seg_key=args.seg_key, gt_key="/labels/az", dataset=dataset, criterion=args.criterion + ) - print(result) + if args.output is None: + output_path = f"./results/{args.seg_key.replace('/', '_')}.xlsx" + else: + output_path = f"./results/{args.output}.xlsx" + result.to_excel(output_path, index=False) def visualize_az_evaluation(args): from elf.visualisation.metric_visualization import run_metric_visualization - from synapse_net.ground_truth.az_evaluation import _postprocess + from synapse_net.ground_truth.az_evaluation import _postprocess, _crop from elf.io import open_file - seg_paths, gt_paths, image_paths = _get_paths(args.seg_root, args.gt_root, args.image_root) + seg_paths, gt_paths, image_paths = _get_paths(args.seg_root, args.gt_root, args.image_root, dataset=args.dataset) for seg_path, gt_path, image_path in zip(seg_paths, gt_paths, image_paths): - image = None if image_path is None else open_file(image_path, "r")["data"][:] + image = None if image_path is None else open_file(image_path, "r")["raw"][:] with open_file(seg_path, "r") as f: - seg = f["seg"][:] + seg = f[args.seg_key][:] with open_file(gt_path, "r") as f: - gt = f["gt"][:] + gt = f["/labels/az"][:] + + seg, gt, bb = _crop(seg, gt, return_bb=True) + if image is not None: + image = image[bb] - seg = _postprocess(seg, apply_cc=True, min_component_size=100) - gt = _postprocess(gt, apply_cc=True, min_component_size=100) + seg = _postprocess(seg, apply_cc=True, min_component_size=10000, iterations=3) + gt = _postprocess(gt, apply_cc=True, min_component_size=500) - run_metric_visualization(image, seg, gt) + run_metric_visualization(image, seg, gt, title=os.path.basename(seg_path), criterion=args.criterion) def main(): parser = argparse.ArgumentParser() parser.add_argument("-s", "--seg_root", required=True) parser.add_argument("-g", "--gt_root", required=True) + parser.add_argument("--seg_key", required=True) parser.add_argument("-i", "--image_root") + parser.add_argument("-o", "--output") + parser.add_argument("-c", "--criterion", default="iou") parser.add_argument("--visualize", action="store_true") + parser.add_argument("--dataset") args = parser.parse_args() if args.visualize: diff --git a/scripts/cooper/revision/thin_az_gt.py b/scripts/cooper/revision/thin_az_gt.py new file mode 100644 index 0000000..158f1ea --- /dev/null +++ b/scripts/cooper/revision/thin_az_gt.py @@ -0,0 +1,76 @@ +import argparse +import os +from glob import glob +from tqdm import tqdm + +import h5py +import napari +from synapse_net.ground_truth.az_evaluation import thin_az + +ROOT = "/mnt/ceph-hdd/cold_store/projects/nim00007/AZ_data/training_data" +OUTPUT_ROOT = "/mnt/ceph-hdd/cold_store/projects/nim00007/AZ_predictions" + + +def run_az_thinning(): + files = sorted(glob(os.path.join(ROOT, "**/*.h5"), recursive=True)) + for ff in tqdm(files): + ds_name = os.path.basename(os.path.split(ff)[0]) + if not ds_name.startswith(("04", "06")): + continue + if "rescaled" in ds_name: + continue + + # print(ff) + ff_out = os.path.join(OUTPUT_ROOT, os.path.relpath(ff, ROOT)) + with h5py.File(ff_out, "r") as f_out, h5py.File(ff, "r") as f_in: + # if "labels/az_thin2" in f_out: + # continue + + boundary_pred = f_out["predictions/boundaries"] + vesicles = f_out["predictions/vesicle_seg"] + + tomo = f_in["raw"] + az = f_in["labels/az"][:] + + az_thin = thin_az( + az, boundary_map=boundary_pred, vesicles=vesicles, tomo=tomo, presyn_dist=8, check=True, + min_thinning_size=2500, + ) + + with h5py.File(ff_out, "a") as f: + ds = f.require_dataset("labels/az_thin2", shape=az_thin.shape, dtype=az_thin.dtype, compression="gzip") + ds[:] = az_thin + + +def check_az_thinning(): + files = sorted(glob(os.path.join(ROOT, "**/*.h5"), recursive=True)) + for ff in files: + + f_out = os.path.join(OUTPUT_ROOT, os.path.relpath(ff, ROOT)) + with h5py.File(f_out, "r") as f: + if "labels/az_thin" not in f: + continue + az_thin = f["labels/az_thin2"][:] + + with h5py.File(ff, "r") as f: + tomo = f["raw"][:] + + v = napari.Viewer() + v.add_image(tomo) + v.add_labels(az_thin) + napari.run() + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument("--check", action="store_true") + args = parser.parse_args() + + if args.check: + check_az_thinning() + else: + run_az_thinning() + + +if __name__ == "__main__": + main() diff --git a/scripts/cooper/revision/train_az.py b/scripts/cooper/revision/train_az.py new file mode 100644 index 0000000..1777e77 --- /dev/null +++ b/scripts/cooper/revision/train_az.py @@ -0,0 +1,141 @@ +import argparse +import os +import json +from glob import glob + +import torch_em + +from sklearn.model_selection import train_test_split + +from synapse_net.training import supervised_training + +TRAIN_ROOT = "/mnt/ceph-hdd/cold_store/projects/nim00007/new_AZ_train_data" +OUTPUT_ROOT = "./models_az_thin" + + +def _require_train_val_test_split(datasets): + train_ratio, val_ratio, test_ratio = 0.70, 0.1, 0.2 + + def _train_val_test_split(names): + train, test = train_test_split(names, test_size=1 - train_ratio, shuffle=True) + _ratio = test_ratio / (test_ratio + val_ratio) + if len(test) == 2: + val, test = test[:1], test[1:] + else: + val, test = train_test_split(test, test_size=_ratio) + return train, val, test + + for ds in datasets: + print(ds) + split_path = os.path.join(OUTPUT_ROOT, f"split-{ds}.json") + if os.path.exists(split_path): + continue + + ds_root = os.path.join(TRAIN_ROOT, ds) + assert os.path.exists(ds_root), ds_root + file_paths = sorted(glob(os.path.join(ds_root, "*.h5"))) + file_names = [os.path.basename(path) for path in file_paths] + + train, val, test = _train_val_test_split(file_names) + + with open(split_path, "w") as f: + json.dump({"train": train, "val": val, "test": test}, f) + + +def _require_train_val_split(datasets): + train_ratio = 0.8 + + def _train_val_split(names): + train, val = train_test_split(names, test_size=1 - train_ratio, shuffle=True) + return train, val + + for ds in datasets: + print(ds) + split_path = os.path.join(OUTPUT_ROOT, f"split-{ds}.json") + if os.path.exists(split_path): + continue + + file_paths = sorted(glob(os.path.join(TRAIN_ROOT, ds, "*.h5"))) + file_names = [os.path.basename(path) for path in file_paths] + + train, val = _train_val_split(file_names) + + with open(split_path, "w") as f: + json.dump({"train": train, "val": val}, f) + + +def get_paths(split, datasets, testset=True): + if testset: + _require_train_val_test_split(datasets) + else: + _require_train_val_split(datasets) + + paths = [] + for ds in datasets: + split_path = os.path.join(OUTPUT_ROOT, f"split-{ds}.json") + with open(split_path) as f: + names = json.load(f)[split] + ds_paths = [os.path.join(TRAIN_ROOT, ds, name) for name in names] + assert len(ds_paths) > 0 + assert all(os.path.exists(path) for path in ds_paths) + paths.extend(ds_paths) + + return paths + + +# TODO: commit, then train with distance. +def train(key, ignore_label=None, training_2D=False, testset=True, check=False): + + os.makedirs(OUTPUT_ROOT, exist_ok=True) + + datasets = ["tem", "chemical_fixation", "stem", "stem_cropped", "endbulb_of_held", "endbulb_of_held_cropped"] + train_paths = get_paths("train", datasets=datasets, testset=testset) + val_paths = get_paths("val", datasets=datasets, testset=testset) + + print("Start training with:") + print(len(train_paths), "tomograms for training") + print(len(val_paths), "tomograms for validation") + + # patch_shape = [48, 256, 256] + patch_shape = [48, 384, 384] + model_name = "v4" + + # checking for 2D training + if training_2D: + patch_shape = [1, 256, 256] + model_name = "2D-AZ-model-v1" + + batch_size = 2 + supervised_training( + name=model_name, + train_paths=train_paths, + val_paths=val_paths, + label_key=f"/labels/{key}", + patch_shape=patch_shape, batch_size=batch_size, + sampler=torch_em.data.sampler.MinInstanceSampler(min_num_instances=1, p_reject=0.9), + n_samples_train=None, n_samples_val=64, + check=check, + save_root=OUTPUT_ROOT, + n_iterations=int(1.5e5), + ignore_label=ignore_label, + label_transform=torch_em.transform.label.labels_to_binary, + out_channels=1, + # BCE_loss=False, + # sigmoid_layer=True, + ) + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument("-k", "--key", help="Key ID that will be used by model in training", default="az_thin") + parser.add_argument("-m", "--mask", type=int, default=None, + help="Mask ID that will be ignored by model in training") + parser.add_argument("-2D", "--training_2D", action='store_true', help="Set to True for 2D training") + parser.add_argument("-t", "--testset", action='store_false', help="Set to False if no testset should be created") + parser.add_argument("-c", "--check", action="store_true") + args = parser.parse_args() + train(args.key, args.mask, args.training_2D, args.testset, args.check) + + +if __name__ == "__main__": + main() diff --git a/scripts/cooper/revision/visualize_AZ_eval.sh b/scripts/cooper/revision/visualize_AZ_eval.sh new file mode 100755 index 0000000..140f84a --- /dev/null +++ b/scripts/cooper/revision/visualize_AZ_eval.sh @@ -0,0 +1,7 @@ +python run_az_evaluation.py \ + -s /mnt/ceph-hdd/cold_store/projects/nim00007/AZ_data/segmentations \ + -g /mnt/ceph-hdd/cold_store/projects/nim00007/AZ_data/training_data \ + -i /mnt/ceph-hdd/cold_store/projects/nim00007/AZ_data/training_data \ + --seg_key /AZ/segment_from_AZmodel_TEM_STEM_ChemFix_v1 \ + --dataset 04 \ + --visualize diff --git a/synapse_net/ground_truth/az_evaluation.py b/synapse_net/ground_truth/az_evaluation.py index fa5a804..ad190ff 100644 --- a/synapse_net/ground_truth/az_evaluation.py +++ b/synapse_net/ground_truth/az_evaluation.py @@ -1,33 +1,61 @@ import os -from typing import List +from typing import List, Optional import h5py import pandas as pd import numpy as np +import vigra from elf.evaluation.matching import _compute_scores, _compute_tps from elf.evaluation import dice_score -from skimage.measure import label +from elf.segmentation.workflows import simple_multicut_workflow +from scipy.ndimage import binary_dilation, binary_closing, distance_transform_edt, binary_opening +from skimage.measure import label, regionprops, regionprops_table +from skimage.segmentation import relabel_sequential, watershed from tqdm import tqdm -def _postprocess(data, apply_cc, min_component_size): +def _expand_seg(az, iterations): + return binary_closing(binary_dilation(az, iterations=iterations), iterations=iterations) + + +def _crop(seg, gt, return_bb=False): + bb_seg, bb_gt = np.where(seg), np.where(gt) + + # Handle empty segmentations. + if bb_seg[0].size == 0: + bb = tuple(slice(bgt.min(), bgt.max() + 1) for bseg, bgt in zip(bb_seg, bb_gt)) + else: + bb = tuple(slice( + min(bseg.min(), bgt.min()), max(bseg.max(), bgt.max()) + 1 + ) for bseg, bgt in zip(bb_seg, bb_gt)) + + if return_bb: + return seg[bb], gt[bb], bb + else: + return seg[bb], gt[bb] + + +def _postprocess(data, apply_cc, min_component_size, iterations=0): + if iterations > 0: + data = _expand_seg(data, iterations) if apply_cc: data = label(data) ids, sizes = np.unique(data, return_counts=True) filter_ids = ids[sizes < min_component_size] data[np.isin(data, filter_ids)] = 0 + data, _, _ = relabel_sequential(data) return data -def _single_az_evaluation(seg, gt, apply_cc, min_component_size): +def _single_az_evaluation(seg, gt, apply_cc, min_component_size, iterations, criterion): assert seg.shape == gt.shape, f"{seg.shape}, {gt.shape}" - seg = _postprocess(seg, apply_cc, min_component_size) - gt = _postprocess(gt, apply_cc, min_component_size) + seg = _postprocess(seg, apply_cc, min_component_size, iterations=iterations) + gt = _postprocess(gt, apply_cc, min_component_size=500) dice = dice_score(seg > 0, gt > 0) - n_true, n_matched, n_pred, scores = _compute_scores(seg, gt, criterion="iou", ignore_label=0) + n_true, n_matched, n_pred, scores = _compute_scores(seg, gt, criterion=criterion, ignore_label=0) tp = _compute_tps(scores, n_matched, threshold=0.5) fp = n_pred - tp fn = n_true - tp @@ -35,14 +63,17 @@ def _single_az_evaluation(seg, gt, apply_cc, min_component_size): return {"tp": tp, "fp": fp, "fn": fn, "dice": dice} -# TODO further post-processing? def az_evaluation( seg_paths: List[str], gt_paths: List[str], seg_key: str, gt_key: str, + crop: bool = True, apply_cc: bool = True, - min_component_size: int = 100, # TODO + min_component_size: int = 5000, + iterations: int = 3, + criterion: str = "iou", + **extra_cols ) -> pd.DataFrame: """Evaluate active zone segmentations against ground-truth annotations. @@ -51,29 +82,146 @@ def az_evaluation( gt_paths: The filepaths to the ground-truth annotatons, stored as hdf5 files. seg_key: The internal path to the data in the segmentation hdf5 file. gt_key: The internal path to the data in the ground-truth hdf5 file. + crop: Whether to crop the segmentation and ground-truth to the bounding box. apply_cc: Whether to apply connected components before evaluation. min_component_size: Minimum component size for filtering the segmentation and annotations before evaluation. + iterations: Post-processing iterations for expanding the AZ. + criterion: + extra_cols: Additional columns for the result table. Returns: A data frame with the evaluation results per tomogram. """ assert len(seg_paths) == len(gt_paths) - results = { + results = {key: [] for key in extra_cols.keys()} + results.update({ "tomo_name": [], "tp": [], "fp": [], "fn": [], "dice": [], - } + }) + + i = 0 for seg_path, gt_path in tqdm(zip(seg_paths, gt_paths), total=len(seg_paths), desc="Run AZ Eval"): with h5py.File(seg_path, "r") as f: + if seg_key not in f: + print("Segmentation", seg_key, "could not be found in", seg_path) + i += 1 + continue seg = f[seg_key][:] + with h5py.File(gt_path, "r") as f: gt = f[gt_key][:] - # TODO more post-processing params - result = _single_az_evaluation(seg, gt, apply_cc, min_component_size) + + if crop: + seg, gt = _crop(seg, gt) + + result = _single_az_evaluation(seg, gt, apply_cc, min_component_size, iterations, criterion=criterion) results["tomo_name"].append(os.path.basename(seg_path)) for res in ("tp", "fp", "fn", "dice"): results[res].append(result[res]) + for name, val in extra_cols.items(): + results[name].append(val[i]) + i += 1 + return pd.DataFrame(results) + + +def _get_presynaptic_mask(boundary_map, vesicles): + mask = np.zeros(vesicles.shape, dtype="bool") + + def _compute_mask_2d(z): + distances = distance_transform_edt(boundary_map[z] < 0.25).astype("float32") + seeds = vigra.analysis.localMaxima(distances, marker=np.nan, allowAtBorder=True, allowPlateaus=True) + seeds = label(np.isnan(seeds)) + overseg = watershed(boundary_map[z], markers=seeds) + seg = simple_multicut_workflow( + boundary_map[z], use_2dws=False, watershed=overseg, n_threads=1, beta=0.6 + ) + + def n_vesicles(mask, seg): + return len(np.unique(seg[mask])) - 1 + + props = pd.DataFrame(regionprops_table(seg, vesicles[z], properties=["label"], extra_properties=[n_vesicles])) + ids, n_ves = props.label.values, props.n_vesicles.values + presyn_id = ids[np.argmax(n_ves)] + + mask[z] = seg == presyn_id + + for z in range(mask.shape[0]): + _compute_mask_2d(z) + + mask = binary_opening(mask, iterations=5) + + return mask + + +def thin_az( + az_segmentation: np.ndarray, + boundary_map: np.typing.ArrayLike, + vesicles: np.typing.ArrayLike, + tomo: Optional[np.typing.ArrayLike] = None, + min_thinning_size: int = 2500, + post_closing: int = 2, + presyn_dist: int = 6, + check: bool = False, +) -> np.ndarray: + """ + + Args: + az_segmentation: + boundary_map: + vesicles: + min_thinning_size: + """ + az_segmentation = label(az_segmentation) + thinned_az = np.zeros(az_segmentation.shape, dtype="uint8") + props = regionprops(az_segmentation) + + min_bb_shape = (32, 384, 384) + + for prop in props: + az_id = prop.label + + bb = tuple(slice(start, stop) for start, stop in zip(prop.bbox[:3], prop.bbox[3:])) + pad_width = [max(sh - (b.stop - b.start), 0) // 2 for b, sh in zip(bb, min_bb_shape)] + bb = tuple( + slice(max(b.start - pw, 0), min(b.stop + pw, sh)) for b, pw, sh in zip(bb, pad_width, az_segmentation.shape) + ) + + # If this is a small component then we discard it. This is likely some artifact in the ground-truth. + if prop.area < min_thinning_size: + continue + + # First, get everything for this bounding box. + az_bb = (az_segmentation[bb] == az_id) + vesicles_bb = vesicles[bb] + # Skip if we don't have a vesicle. + if vesicles[bb].max() == 0: + continue + + mask_bb = _get_presynaptic_mask(boundary_map[bb], vesicles_bb) + + # Apply post-processing to filter out only the parts of the AZ close to the presynaptic mask. + distances = np.stack([distance_transform_edt(mask_bb[z] == 0) for z in range(mask_bb.shape[0])]) + az_bb[distances > presyn_dist] = 0 + az_bb = np.logical_or(binary_closing(az_bb, iterations=post_closing), az_bb) + + if check: + import napari + tomo_bb = tomo[bb] + + v = napari.Viewer() + v.add_image(tomo_bb) + v.add_labels(az_bb.astype("uint8"), name="az-thinned") + v.add_labels(az_segmentation[bb], name="az", visible=False) + v.add_labels(mask_bb, visible=False) + v.title = f"{prop.label}: {prop.area}" + + napari.run() + + thinned_az[bb][az_bb] = 1 + + return thinned_az diff --git a/synapse_net/inference/compartments.py b/synapse_net/inference/compartments.py index 0826827..6252acf 100644 --- a/synapse_net/inference/compartments.py +++ b/synapse_net/inference/compartments.py @@ -128,7 +128,7 @@ def _segment_compartments_3d( continue seg_z = _segment_compartments_2d(prediction[z], distances=distances[z]) seg_z[seg_z != 0] += offset - offset = int(seg_z.max()) + offset = max(int(seg_z.max()), offset) seg_2d[z] = seg_z seg = _merge_segmentation_3d(seg_2d, min_z_extent) From f8d801f6af79b8c2ba8bd2f3932cb7da9a4d42bc Mon Sep 17 00:00:00 2001 From: Constantin Pape Date: Thu, 29 May 2025 21:13:15 +0200 Subject: [PATCH 05/29] Clean up AZ scripts and summarize current state --- scripts/cooper/revision/README.md | 19 +++ scripts/cooper/revision/az_prediction.py | 61 ++++++++++ scripts/cooper/revision/check_prediction.py | 44 +++++++ scripts/cooper/revision/common.py | 69 +++++++++++ scripts/cooper/revision/eval_AZ.sh | 8 -- scripts/cooper/revision/evaluate_result.py | 13 ++- scripts/cooper/revision/fix_az.py | 17 +++ .../cooper/revision/generate_az_eval_data.py | 31 ----- scripts/cooper/revision/merge_az.py | 108 ++++++++++++++++++ scripts/cooper/revision/run_az_evaluation.py | 94 +++++++-------- scripts/cooper/revision/train_az.py | 28 +++-- scripts/cooper/revision/visualize_AZ_eval.sh | 7 -- synapse_net/ground_truth/az_evaluation.py | 40 +++++-- synapse_net/inference/active_zone.py | 1 - synapse_net/training/__init__.py | 1 + synapse_net/training/transform.py | 18 +++ 16 files changed, 431 insertions(+), 128 deletions(-) create mode 100644 scripts/cooper/revision/README.md create mode 100644 scripts/cooper/revision/az_prediction.py create mode 100644 scripts/cooper/revision/check_prediction.py create mode 100644 scripts/cooper/revision/common.py delete mode 100755 scripts/cooper/revision/eval_AZ.sh create mode 100644 scripts/cooper/revision/fix_az.py delete mode 100644 scripts/cooper/revision/generate_az_eval_data.py create mode 100644 scripts/cooper/revision/merge_az.py delete mode 100755 scripts/cooper/revision/visualize_AZ_eval.sh create mode 100644 synapse_net/training/transform.py diff --git a/scripts/cooper/revision/README.md b/scripts/cooper/revision/README.md new file mode 100644 index 0000000..aec9a0b --- /dev/null +++ b/scripts/cooper/revision/README.md @@ -0,0 +1,19 @@ +# Improving the AZ model + +Scripts for improving the AZ annotations, training the AZ model, and evaluating it. + +The most important scripts are: +- For improving and updating the AZ annotations: + - `prediction.py`: Run prediction of vesicle and boundary model. + - `thin_az_gt.py`: Thin the AZ annotations, so that it aligns only with the presynaptic membrane. This is done by intersecting the annotations with the presynaptic compartment, using predictions from the network used for compartment segmentation. + - `assort_new_az_data.py`: Create a new version of the annotation, renaming the dataset, and creating a cropped version of the endbulb of held data. + - `merge_az.py`: Merge AZ annotations with predictions from model v4, in order to remove some artifacts that resulted from AZ thinning. +- For evaluating the AZ predictions: + - `az_prediction.py`: Run prediction with the AZ model. + - `run_az_evaluation.py`: Evaluate the predictions of an AZ model. + - `evaluate_result.py`: Summarize the evaluation results. +- And for training: `train_az_gt.py`. So far, I have trained: + - v3: Trained on the initial annotations. + - v4: Trained on the thinned annotations. + - v5: Trained on the thinned annotations with an additional distance loss (did not help). + - v6: Trained on the merged annotations. diff --git a/scripts/cooper/revision/az_prediction.py b/scripts/cooper/revision/az_prediction.py new file mode 100644 index 0000000..747a7ea --- /dev/null +++ b/scripts/cooper/revision/az_prediction.py @@ -0,0 +1,61 @@ +import argparse +import os + +import h5py +from synapse_net.inference.active_zone import segment_active_zone +from torch_em.util import load_model +from tqdm import tqdm + +from common import get_file_names, get_split_folder, ALL_NAMES, INPUT_ROOT, OUTPUT_ROOT + + +def run_prediction(model, name, split_folder, version, split_names): + file_names = get_file_names(name, split_folder, split_names=split_names) + + output_folder = os.path.join(OUTPUT_ROOT, name) + os.makedirs(output_folder, exist_ok=True) + output_key = f"predictions/az/v{version}" + + for fname in tqdm(file_names): + output_path = os.path.join(output_folder, fname) + + if os.path.exists(output_path): + with h5py.File(output_path, "r") as f: + if output_key in f: + continue + + input_path = os.path.join(INPUT_ROOT, name, fname) + with h5py.File(input_path, "r") as f: + raw = f["raw"][:] + + _, pred = segment_active_zone(raw, model=model, verbose=False, return_predictions=True) + with h5py.File(output_path, "a") as f: + f.create_dataset(output_key, data=pred, compression="lzf") + + +def get_model(version): + assert version in (3, 4, 5) + split_folder = get_split_folder(version) + if version == 3: + model_path = os.path.join(split_folder, "checkpoints", "3D-AZ-model-TEM_STEM_ChemFix_wichmann-v3") + else: + model_path = os.path.join(split_folder, "checkpoints", f"v{version}") + model = load_model(model_path) + return model + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument("--version", "-v", type=int) + parser.add_argument("--names", nargs="+", default=ALL_NAMES) + parser.add_argument("--splits", nargs="+", default=["test"]) + args = parser.parse_args() + + model = get_model(args.version) + split_folder = get_split_folder(args.version) + for name in args.names: + run_prediction(model, name, split_folder, args.version, args.splits) + + +if __name__ == "__main__": + main() diff --git a/scripts/cooper/revision/check_prediction.py b/scripts/cooper/revision/check_prediction.py new file mode 100644 index 0000000..e54e66a --- /dev/null +++ b/scripts/cooper/revision/check_prediction.py @@ -0,0 +1,44 @@ +import argparse +import os + +import h5py +import napari +from common import ALL_NAMES, get_file_names, get_split_folder, get_paths + + +def check_predictions(name, split, version): + split_folder = get_split_folder(version) + file_names = get_file_names(name, split_folder, split_names=[split]) + seg_paths, gt_paths = get_paths(name, file_names) + + for seg_path, gt_path in zip(seg_paths, gt_paths): + + with h5py.File(gt_path, "r") as f: + raw = f["raw"][:] + gt = f["labels/az"][:] if version == 3 else f["labels/az_thin"][:] + + with h5py.File(seg_path) as f: + seg_key = f"predictions/az/v{version}" + pred = f[seg_key][:] + + v = napari.Viewer() + v.add_image(raw) + v.add_image(pred, blending="additive") + v.add_labels(gt) + v.title = f"{name}/{os.path.basename(seg_path)}" + napari.run() + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument("--version", "-v", type=int, required=True) + parser.add_argument("--split", default="test") + parser.add_argument("--names", nargs="+", default=ALL_NAMES) + args = parser.parse_args() + + for name in args.names: + check_predictions(name, args.split, args.version) + + +if __name__ == "__main__": + main() diff --git a/scripts/cooper/revision/common.py b/scripts/cooper/revision/common.py new file mode 100644 index 0000000..98d8430 --- /dev/null +++ b/scripts/cooper/revision/common.py @@ -0,0 +1,69 @@ +import json +import os + + +# The root folder which contains the new AZ training data. +INPUT_ROOT = "/mnt/ceph-hdd/cold_store/projects/nim00007/new_AZ_train_data" +# The output folder for AZ predictions. +OUTPUT_ROOT = "/mnt/ceph-hdd/cold_store/projects/nim00007/AZ_predictions_new" + +# The names of all datasets for which to run prediction / evaluation. +# This excludes 'endbulb_of_held_cropped', which is a duplicate of 'endbulb_of_held', +# which we don't evaluate on because of this. +ALL_NAMES = [ + "chemical_fixation", "endbulb_of_held", "stem", "stem_cropped", "tem" +] + +# The translation of new dataset names to old dataset names. +NAME_TRANSLATION = { + "chemical_fixation": ["12_chemical_fix_cryopreparation_minusSVseg_corrected"], + "endbulb_of_held": ["wichmann_withAZ_rescaled_tomograms"], + "stem": ["04_hoi_stem_examples_fidi_and_sarah_corrected_rescaled_tomograms"], + "stem_cropped": ["04_hoi_stem_examples_minusSVseg_cropped_corrected_rescaled_tomograms", + "06_hoi_wt_stem750_fm_minusSVseg_cropped_corrected_rescaled_tomograms"], + "tem": ["01data_withoutInvertedFiles_minusSVseg_corrected"], +} + + +# Get the paths to the files with raw data / ground-truth and the segmentation. +def get_paths(name, file_names, skip_seg=False): + seg_paths, gt_paths = [], [] + for fname in file_names: + if not skip_seg: + seg_path = os.path.join(OUTPUT_ROOT, name, fname) + assert os.path.exists(seg_path), seg_path + seg_paths.append(seg_path) + + gt_path = os.path.join(INPUT_ROOT, name, fname) + assert os.path.exists(gt_path), gt_path + gt_paths.append(gt_path) + + return seg_paths, gt_paths + + +def get_file_names(name, split_folder, split_names): + split_path = os.path.join(split_folder, f"split-{name}.json") + if os.path.exists(split_path): + with open(split_path) as f: + splits = json.load(f) + file_names = [fname for split in split_names for fname in splits[split]] + + else: + old_names = NAME_TRANSLATION[name] + file_names = [] + for old_name in old_names: + split_path = os.path.join(split_folder, f"split-{old_name}.json") + with open(split_path) as f: + splits = json.load(f) + this_file_names = [fname for split in split_names for fname in splits[split]] + file_names.extend(this_file_names) + return file_names + + +def get_split_folder(version): + assert version in (3, 4, 5) + if version == 3: + split_folder = "splits" + else: + split_folder = "models_az_thin" + return split_folder diff --git a/scripts/cooper/revision/eval_AZ.sh b/scripts/cooper/revision/eval_AZ.sh deleted file mode 100755 index 520e7c2..0000000 --- a/scripts/cooper/revision/eval_AZ.sh +++ /dev/null @@ -1,8 +0,0 @@ -python run_az_evaluation.py \ - -s /mnt/ceph-hdd/cold_store/projects/nim00007/AZ_data/segmentations \ - -g /mnt/ceph-hdd/cold_store/projects/nim00007/AZ_data/training_data \ - --seg_key /AZ/segment_from_AZmodel_TEM_STEM_ChemFix_v1 \ - --criterion iop \ - -o v1 - # --dataset 01 \ - # --seg_key AZ/segment_from_AZmodel_TEM_STEM_ChemFix_wichmann_v2 \ diff --git a/scripts/cooper/revision/evaluate_result.py b/scripts/cooper/revision/evaluate_result.py index b8b3fa0..a7627ba 100644 --- a/scripts/cooper/revision/evaluate_result.py +++ b/scripts/cooper/revision/evaluate_result.py @@ -6,6 +6,7 @@ args = parser.parse_args() results = pd.read_excel(args.result_path) +print(results) def summarize_results(res): @@ -20,29 +21,29 @@ def summarize_results(res): # # Compute the results for Chemical Fixation. -results_chem_fix = results[results.dataset.str.startswith("12")] +results_chem_fix = results[results.dataset == "chemical_fixation"] if results_chem_fix.size > 0: print("Chemical Fixation Results:") summarize_results(results_chem_fix) # # # Compute the results for STEM (=04). -results_stem = results[results.dataset.str.startswith(("04", "06"))] +results_stem = results[results.dataset.str.startswith("stem")] if results_stem.size > 0: print() print("STEM Results:") summarize_results(results_stem) # # # Compute the results for TEM (=01). -results_tem = results[results.dataset.str.startswith("01")] +results_tem = results[results.dataset == "tem"] if results_tem.size > 0: print() print("TEM Results:") summarize_results(results_tem) # -# Compute the results for Wichmann. -results_wichmann = results[results.dataset.str.startswith("wichmann")] +# Compute the results for Wichmann / endbulb of held. +results_wichmann = results[results.dataset.str.startswith("endbulb")] if results_wichmann.size > 0: print() - print("Wichmann Results:") + print("Endbulb of Held Results:") summarize_results(results_wichmann) diff --git a/scripts/cooper/revision/fix_az.py b/scripts/cooper/revision/fix_az.py new file mode 100644 index 0000000..72ca768 --- /dev/null +++ b/scripts/cooper/revision/fix_az.py @@ -0,0 +1,17 @@ +import os +from glob import glob +import h5py +from tqdm import tqdm + + +INPUT_ROOT = "/mnt/ceph-hdd/cold_store/projects/nim00007/new_AZ_train_data" + +files = glob(os.path.join(INPUT_ROOT, "**/*.h5"), recursive=True) + +key = "labels/az_merged" +for ff in tqdm(files): + with h5py.File(ff, "a") as f: + az = f[key][:] + az = az.squeeze() + del f[key] + f.create_dataset(key, data=az, compression="lzf") diff --git a/scripts/cooper/revision/generate_az_eval_data.py b/scripts/cooper/revision/generate_az_eval_data.py deleted file mode 100644 index c0df1b7..0000000 --- a/scripts/cooper/revision/generate_az_eval_data.py +++ /dev/null @@ -1,31 +0,0 @@ -from synapse_net.sample_data import get_sample_data -from elf.io import open_file - - -sample_data = get_sample_data("tem_tomo") -tomo = open_file(sample_data, "r")["data"][:] - - -def run_prediction(): - from synapse_net.inference import run_segmentation, get_model - - model = get_model("active_zone") - seg = run_segmentation(tomo, model, "active_zone") - - with open_file("./pred.h5", "a") as f: - f.create_dataset("pred", data=seg, compression="gzip") - - -def check_prediction(): - import napari - - with open_file("./pred.h5", "r") as f: - pred = f["pred"][:] - - v = napari.Viewer() - v.add_image(tomo) - v.add_labels(pred) - napari.run() - - -check_prediction() diff --git a/scripts/cooper/revision/merge_az.py b/scripts/cooper/revision/merge_az.py new file mode 100644 index 0000000..b9a1a17 --- /dev/null +++ b/scripts/cooper/revision/merge_az.py @@ -0,0 +1,108 @@ +import argparse +import os + +import h5py +import napari +import numpy as np +from scipy.ndimage import binary_closing +from common import ALL_NAMES, get_file_names, get_split_folder, get_paths + + +SKIP_MERGE = [ + "36859_J1_66K_TS_CA3_PS_26_rec_2Kb1dawbp_crop.h5", + "36859_J1_66K_TS_CA3_PS_23_rec_2Kb1dawbp_crop.h5", + "36859_J1_66K_TS_CA3_PS_23_rec_2Kb1dawbp_crop.h5", + "36859_J1_STEM750_66K_SP_17_rec_2kb1dawbp_crop.h5", +] + + +# STEM CROPPED IS OFTEN TOO SMALL! +def merge_az(name, version, check): + split_folder = get_split_folder(version) + file_names = get_file_names(name, split_folder, split_names=["train", "val", "test"]) + seg_paths, gt_paths = get_paths(name, file_names) + + for seg_path, gt_path in zip(seg_paths, gt_paths): + + with h5py.File(gt_path, "r") as f: + if not check and ("labels/az_merged" in f): + continue + raw = f["raw"][:] + gt = f["labels/az"][:] + gt_thin = f["labels/az_thin"][:] + + with h5py.File(seg_path) as f: + seg_key = f"predictions/az/v{version}" + pred = f[seg_key][:] + + fname = os.path.basename(seg_path) + if fname in SKIP_MERGE: + az_merged = gt + else: + threshold = 0.4 + gt_ = np.logical_or(binary_closing(gt, iterations=4), gt) + seg = pred > threshold + az_merged = np.logical_and(seg, gt_) + az_merged = np.logical_or(az_merged, gt_thin) + az_merged = np.logical_or(binary_closing(az_merged, iterations=2), az_merged) + + if check: + v = napari.Viewer() + v.add_image(raw) + v.add_image(pred, blending="additive", visible=False) + v.add_labels(seg, colormap={1: "blue"}) + v.add_labels(gt, colormap={1: "yellow"}) + v.add_labels(az_merged) + v.title = f"{name}/{fname}" + napari.run() + + else: + with h5py.File(gt_path, "a") as f: + f.create_dataset("labels/az_merged", data=az_merged, compression="lzf") + + +def visualize_merge(args): + for name in args.names: + if "endbulb" in name: + continue + merge_az(name, args.version, check=True) + + +def copy_az(name, version): + split_folder = get_split_folder(version) + file_names = get_file_names(name, split_folder, split_names=["train", "val", "test"]) + _, gt_paths = get_paths(name, file_names, skip_seg=True) + + for gt_path in gt_paths: + with h5py.File(gt_path, "a") as f: + if "labels/az_merged" in f: + continue + az = f["labels/az"][:] + f.create_dataset("labels/az_merged", data=az, compression="lzf") + + +def run_merge(args): + for name in args.names: + print("Merging", name) + if "endbulb" in name: + copy_az(name, args.version) + else: + merge_az(name, args.version, check=False) + + +def main(): + parser = argparse.ArgumentParser() + + parser.add_argument("--visualize", action="store_true") + parser.add_argument("--names", nargs="+", default=ALL_NAMES + ["endbulb_of_held_cropped"]) + parser.add_argument("--version", "-v", type=int, default=4) + + args = parser.parse_args() + if args.visualize: + visualize_merge(args) + else: + run_merge(args) + + +if __name__ == "__main__": + main() diff --git a/scripts/cooper/revision/run_az_evaluation.py b/scripts/cooper/revision/run_az_evaluation.py index 7727739..ba06e2b 100644 --- a/scripts/cooper/revision/run_az_evaluation.py +++ b/scripts/cooper/revision/run_az_evaluation.py @@ -1,45 +1,30 @@ import argparse import os -from glob import glob - -def _get_paths(seg_root, gt_root, image_root=None, dataset=None): - seg_paths = sorted(glob(os.path.join(seg_root, "**/*.h5"), recursive=True)) - if dataset is not None: - seg_paths = [path for path in seg_paths if os.path.basename(os.path.split(path)[0]).startswith(dataset)] - - gt_paths = [] - for path in seg_paths: - gt_path = os.path.join(gt_root, os.path.relpath(path, seg_root)) - assert os.path.exists(gt_path), gt_path - gt_paths.append(gt_path) - - if image_root is None: - image_paths = [None] * len(seg_paths) - else: - image_paths = [] - for path in seg_paths: - im_path = os.path.join(image_root, os.path.relpath(path, seg_root)) - assert os.path.exists(im_path), im_path - image_paths.append(im_path) - - return seg_paths, gt_paths, image_paths +import pandas as pd +from common import get_paths, get_file_names, ALL_NAMES def run_az_evaluation(args): from synapse_net.ground_truth.az_evaluation import az_evaluation - seg_paths, gt_paths, _ = _get_paths(args.seg_root, args.gt_root, dataset=args.dataset) - dataset = [os.path.basename(os.path.split(path)[0]) for path in seg_paths] - result = az_evaluation( - seg_paths, gt_paths, seg_key=args.seg_key, gt_key="/labels/az", dataset=dataset, criterion=args.criterion - ) + seg_key = f"predictions/az/v{args.version}" - if args.output is None: - output_path = f"./results/{args.seg_key.replace('/', '_')}.xlsx" - else: - output_path = f"./results/{args.output}.xlsx" - result.to_excel(output_path, index=False) + split_folder = "./models_az_thin" + results = [] + for dataset in args.datasets: + print(dataset, ":") + file_names = get_file_names(dataset, split_folder, split_names=["test"]) + seg_paths, gt_paths = get_paths(dataset, file_names) + result = az_evaluation( + seg_paths, gt_paths, seg_key=seg_key, gt_key="/labels/az_merged", + criterion=args.criterion, dataset=[dataset] * len(seg_paths), threshold=args.threshold, + ) + results.append(result) + + results = pd.concat(results) + output_path = f"./results/v{args.version}.xlsx" + results.to_excel(output_path, index=False) def visualize_az_evaluation(args): @@ -47,35 +32,40 @@ def visualize_az_evaluation(args): from synapse_net.ground_truth.az_evaluation import _postprocess, _crop from elf.io import open_file - seg_paths, gt_paths, image_paths = _get_paths(args.seg_root, args.gt_root, args.image_root, dataset=args.dataset) - for seg_path, gt_path, image_path in zip(seg_paths, gt_paths, image_paths): - image = None if image_path is None else open_file(image_path, "r")["raw"][:] + seg_key = f"predictions/az/v{args.version}" + + split_folder = "./models_az_thin" + for dataset in args.datasets: + file_names = get_file_names(dataset, split_folder, split_names=["test"]) + seg_paths, gt_paths = get_paths(dataset, file_names) + + for seg_path, gt_path in zip(seg_paths, gt_paths): + + with open_file(seg_path, "r") as f: + seg = f[seg_key][:].squeeze() + with open_file(gt_path, "r") as f: + gt = f["/labels/az_merged"][:] - with open_file(seg_path, "r") as f: - seg = f[args.seg_key][:] - with open_file(gt_path, "r") as f: - gt = f["/labels/az"][:] + seg = seg > args.threshold - seg, gt, bb = _crop(seg, gt, return_bb=True) - if image is not None: - image = image[bb] + seg, gt, bb = _crop(seg, gt, return_bb=True) + with open_file(gt_path, "r") as f: + image = f["raw"][bb] - seg = _postprocess(seg, apply_cc=True, min_component_size=10000, iterations=3) - gt = _postprocess(gt, apply_cc=True, min_component_size=500) + seg = _postprocess(seg, apply_cc=True, min_component_size=5000, iterations=3) + gt = _postprocess(gt, apply_cc=True, min_component_size=500) - run_metric_visualization(image, seg, gt, title=os.path.basename(seg_path), criterion=args.criterion) + run_metric_visualization(image, seg, gt, title=os.path.basename(seg_path), criterion=args.criterion) def main(): parser = argparse.ArgumentParser() - parser.add_argument("-s", "--seg_root", required=True) - parser.add_argument("-g", "--gt_root", required=True) - parser.add_argument("--seg_key", required=True) - parser.add_argument("-i", "--image_root") - parser.add_argument("-o", "--output") + parser.add_argument("--version", "-v", type=int, required=True) parser.add_argument("-c", "--criterion", default="iou") parser.add_argument("--visualize", action="store_true") - parser.add_argument("--dataset") + parser.add_argument("--datasets", nargs="+", default=ALL_NAMES) + # Set the threshold to None if the AZ prediction already a segmentation. + parser.add_argument("--threshold", type=float, default=0.5) args = parser.parse_args() if args.visualize: diff --git a/scripts/cooper/revision/train_az.py b/scripts/cooper/revision/train_az.py index 1777e77..dd24d5c 100644 --- a/scripts/cooper/revision/train_az.py +++ b/scripts/cooper/revision/train_az.py @@ -7,7 +7,7 @@ from sklearn.model_selection import train_test_split -from synapse_net.training import supervised_training +from synapse_net.training import supervised_training, AZDistanceLabelTransform TRAIN_ROOT = "/mnt/ceph-hdd/cold_store/projects/nim00007/new_AZ_train_data" OUTPUT_ROOT = "./models_az_thin" @@ -83,8 +83,7 @@ def get_paths(split, datasets, testset=True): return paths -# TODO: commit, then train with distance. -def train(key, ignore_label=None, training_2D=False, testset=True, check=False): +def train(key, ignore_label=None, use_distances=False, training_2D=False, testset=True, check=False): os.makedirs(OUTPUT_ROOT, exist_ok=True) @@ -98,13 +97,20 @@ def train(key, ignore_label=None, training_2D=False, testset=True, check=False): # patch_shape = [48, 256, 256] patch_shape = [48, 384, 384] - model_name = "v4" + model_name = "v6" # checking for 2D training if training_2D: patch_shape = [1, 256, 256] model_name = "2D-AZ-model-v1" + if use_distances: + out_channels = 2 + label_transform = AZDistanceLabelTransform() + else: + out_channels = 1 + label_transform = torch_em.transform.label.labels_to_binary + batch_size = 2 supervised_training( name=model_name, @@ -112,14 +118,14 @@ def train(key, ignore_label=None, training_2D=False, testset=True, check=False): val_paths=val_paths, label_key=f"/labels/{key}", patch_shape=patch_shape, batch_size=batch_size, - sampler=torch_em.data.sampler.MinInstanceSampler(min_num_instances=1, p_reject=0.9), - n_samples_train=None, n_samples_val=64, + sampler=torch_em.data.sampler.MinInstanceSampler(min_num_instances=1, p_reject=0.85), + n_samples_train=None, n_samples_val=100, check=check, save_root=OUTPUT_ROOT, - n_iterations=int(1.5e5), + n_iterations=int(2e5), ignore_label=ignore_label, - label_transform=torch_em.transform.label.labels_to_binary, - out_channels=1, + label_transform=label_transform, + out_channels=out_channels, # BCE_loss=False, # sigmoid_layer=True, ) @@ -127,14 +133,14 @@ def train(key, ignore_label=None, training_2D=False, testset=True, check=False): def main(): parser = argparse.ArgumentParser() - parser.add_argument("-k", "--key", help="Key ID that will be used by model in training", default="az_thin") + parser.add_argument("-k", "--key", help="Key ID that will be used by model in training", default="az_merged") parser.add_argument("-m", "--mask", type=int, default=None, help="Mask ID that will be ignored by model in training") parser.add_argument("-2D", "--training_2D", action='store_true', help="Set to True for 2D training") parser.add_argument("-t", "--testset", action='store_false', help="Set to False if no testset should be created") parser.add_argument("-c", "--check", action="store_true") args = parser.parse_args() - train(args.key, args.mask, args.training_2D, args.testset, args.check) + train(args.key, ignore_label=args.mask, training_2D=args.training_2D, testset=args.testset, check=args.check) if __name__ == "__main__": diff --git a/scripts/cooper/revision/visualize_AZ_eval.sh b/scripts/cooper/revision/visualize_AZ_eval.sh deleted file mode 100755 index 140f84a..0000000 --- a/scripts/cooper/revision/visualize_AZ_eval.sh +++ /dev/null @@ -1,7 +0,0 @@ -python run_az_evaluation.py \ - -s /mnt/ceph-hdd/cold_store/projects/nim00007/AZ_data/segmentations \ - -g /mnt/ceph-hdd/cold_store/projects/nim00007/AZ_data/training_data \ - -i /mnt/ceph-hdd/cold_store/projects/nim00007/AZ_data/training_data \ - --seg_key /AZ/segment_from_AZmodel_TEM_STEM_ChemFix_v1 \ - --dataset 04 \ - --visualize diff --git a/synapse_net/ground_truth/az_evaluation.py b/synapse_net/ground_truth/az_evaluation.py index ad190ff..9c95505 100644 --- a/synapse_net/ground_truth/az_evaluation.py +++ b/synapse_net/ground_truth/az_evaluation.py @@ -50,11 +50,11 @@ def _postprocess(data, apply_cc, min_component_size, iterations=0): def _single_az_evaluation(seg, gt, apply_cc, min_component_size, iterations, criterion): assert seg.shape == gt.shape, f"{seg.shape}, {gt.shape}" + dice = dice_score(seg > 0, gt > 0) + seg = _postprocess(seg, apply_cc, min_component_size, iterations=iterations) gt = _postprocess(gt, apply_cc, min_component_size=500) - dice = dice_score(seg > 0, gt > 0) - n_true, n_matched, n_pred, scores = _compute_scores(seg, gt, criterion=criterion, ignore_label=0) tp = _compute_tps(scores, n_matched, threshold=0.5) fp = n_pred - tp @@ -73,10 +73,14 @@ def az_evaluation( min_component_size: int = 5000, iterations: int = 3, criterion: str = "iou", + threshold: Optional[float] = None, **extra_cols ) -> pd.DataFrame: """Evaluate active zone segmentations against ground-truth annotations. + This computes the dice score as well as false positives, false negatives and true positives + for each segmented tomogram. + Args: seg_paths: The filepaths to the segmentations, stored as hd5 files. gt_paths: The filepaths to the ground-truth annotatons, stored as hdf5 files. @@ -84,9 +88,11 @@ def az_evaluation( gt_key: The internal path to the data in the ground-truth hdf5 file. crop: Whether to crop the segmentation and ground-truth to the bounding box. apply_cc: Whether to apply connected components before evaluation. - min_component_size: Minimum component size for filtering the segmentation and annotations before evaluation. - iterations: Post-processing iterations for expanding the AZ. - criterion: + min_component_size: Minimum component size for filtering the segmentation before evaluation. + iterations: Post-processing iterations for expanding the AZ annotations. + criterion: The criterion for matching annotations and segmentations + threshold: Threshold applied to the segmentation. This is required if the segmentation is passed as + probability prediction instead of a binary segmentation. Possible values: 'iou', 'iop', 'iot'. extra_cols: Additional columns for the result table. Returns: @@ -110,11 +116,14 @@ def az_evaluation( print("Segmentation", seg_key, "could not be found in", seg_path) i += 1 continue - seg = f[seg_key][:] + seg = f[seg_key][:].squeeze() with h5py.File(gt_path, "r") as f: gt = f[gt_key][:] + if threshold is not None: + seg = seg > threshold + if crop: seg, gt = _crop(seg, gt) @@ -163,18 +172,25 @@ def thin_az( boundary_map: np.typing.ArrayLike, vesicles: np.typing.ArrayLike, tomo: Optional[np.typing.ArrayLike] = None, + presyn_dist: int = 6, min_thinning_size: int = 2500, post_closing: int = 2, - presyn_dist: int = 6, check: bool = False, ) -> np.ndarray: - """ + """Thin the active zone annotations by restricting them to a certain distance from the presynaptic mask. Args: - az_segmentation: - boundary_map: - vesicles: - min_thinning_size: + az_segmentation: The active zone annotations. + boundary_map: The boundary / membrane predictions. + vesicles: The vesicle segmentation. + tomo: The tomogram data. Optional, will only be used for evaluation. + presyn_dist: The maximal distance to the presynaptic compartment, which is used for thinning. + min_thinning_size: The minimal size for a label component. + post_closing: Closing iterations to apply to the AZ annotations after thinning. + check: Whether to visually check the results. + + Returns: + The thinned AZ annotations. """ az_segmentation = label(az_segmentation) thinned_az = np.zeros(az_segmentation.shape, dtype="uint8") diff --git a/synapse_net/inference/active_zone.py b/synapse_net/inference/active_zone.py index e9040fe..2be654f 100644 --- a/synapse_net/inference/active_zone.py +++ b/synapse_net/inference/active_zone.py @@ -106,7 +106,6 @@ def segment_active_zone( # Run segmentation and rescale the result if necessary. foreground = pred[0] - print(f"shape {foreground.shape}") segmentation = _run_segmentation(foreground, verbose=verbose, min_size=min_size) segmentation = scaler.rescale_output(segmentation, is_segmentation=True) diff --git a/synapse_net/training/__init__.py b/synapse_net/training/__init__.py index 7e32f94..84204c9 100644 --- a/synapse_net/training/__init__.py +++ b/synapse_net/training/__init__.py @@ -3,3 +3,4 @@ from .supervised_training import supervised_training from .semisupervised_training import semisupervised_training from .domain_adaptation import mean_teacher_adaptation +from .transform import AZDistanceLabelTransform diff --git a/synapse_net/training/transform.py b/synapse_net/training/transform.py new file mode 100644 index 0000000..04bad88 --- /dev/null +++ b/synapse_net/training/transform.py @@ -0,0 +1,18 @@ +import numpy as np +from torch_em.transform.label import labels_to_binary +from scipy.ndimage import distance_transform_edt + + +class AZDistanceLabelTransform: + def __init__(self, max_distance: float = 50.0): + self.max_distance = max_distance + + def __call__(self, input_): + binary_target = labels_to_binary(input_).astype("float32") + if binary_target.sum() == 0: + distances = np.ones_like(binary_target, dtype="float32") + else: + distances = distance_transform_edt(binary_target == 0) + distances = np.clip(distances, 0.0, self.max_distance) + distances /= self.max_distance + return np.stack([binary_target, distances]) From 5f66328d29f59589e94f5783c382e160de92046c Mon Sep 17 00:00:00 2001 From: SarahMuth Date: Thu, 5 Jun 2025 22:02:53 +0200 Subject: [PATCH 06/29] cropping stem data --- .gitignore | 1 + environment.yaml | 2 +- run_sbatch_revision.sbatch | 11 +++ scripts/cooper/revision/assort_new_az_data.py | 77 ++++++++++++++++++- scripts/cooper/revision/common.py | 8 +- scripts/cooper/revision/merge_az.py | 4 +- 6 files changed, 93 insertions(+), 10 deletions(-) create mode 100644 run_sbatch_revision.sbatch diff --git a/.gitignore b/.gitignore index e5a747b..4554ff1 100644 --- a/.gitignore +++ b/.gitignore @@ -7,6 +7,7 @@ models/*/ *.egg-info/ run_sbatch.sbatch slurm/ +slurm_revision/ scripts/cooper/evaluation_results/ scripts/cooper/training/copy_testset.py scripts/rizzoli/upsample_data.py diff --git a/environment.yaml b/environment.yaml index e85fc3c..c4fd63b 100644 --- a/environment.yaml +++ b/environment.yaml @@ -1,7 +1,7 @@ channels: - conda-forge name: - synapse-net + synapse-net-cpu dependencies: - bioimageio.core - kornia diff --git a/run_sbatch_revision.sbatch b/run_sbatch_revision.sbatch new file mode 100644 index 0000000..5e532a0 --- /dev/null +++ b/run_sbatch_revision.sbatch @@ -0,0 +1,11 @@ +#! /bin/bash +#SBATCH -c 4 #4 #8 +#SBATCH --mem 256G #120G #32G #64G #256G +#SBATCH -p grete:shared #grete:shared #grete-h100:shared +#SBATCH -t 24:00:00 #6:00:00 #48:00:00 #SBATCH -G A100:1 #V100:1 #2 #A100:1 #gtx1080:2 #v100:1 #H100:1 +#SBATCH --output=/user/muth9/u12095/synapse-net/slurm_revision/slurm-%j.out +#SBATCH -A nim00007 #SBATCH --constraint 80gb + +source ~/.bashrc +conda activate synapse-net +python /user/muth9/u12095/synapse-net/scripts/cooper/revision/merge_az.py -v 6 \ No newline at end of file diff --git a/scripts/cooper/revision/assort_new_az_data.py b/scripts/cooper/revision/assort_new_az_data.py index 13b7430..a7cd623 100644 --- a/scripts/cooper/revision/assort_new_az_data.py +++ b/scripts/cooper/revision/assort_new_az_data.py @@ -5,10 +5,13 @@ import numpy as np from tqdm import tqdm from skimage.transform import resize +from skimage.measure import label +from scipy.ndimage import binary_closing -ROOT = "/mnt/ceph-hdd/cold_store/projects/nim00007/AZ_data/training_data" -INTER_ROOT = "/mnt/ceph-hdd/cold_store/projects/nim00007/AZ_predictions" -OUTPUT_ROOT = "/mnt/ceph-hdd/cold_store/projects/nim00007/new_AZ_train_data" +ROOT = "/mnt/ceph-hdd/cold/nim00007/AZ_data/training_data" +INTER_ROOT = "/mnt/ceph-hdd/cold/nim00007/AZ_predictions" +OUTPUT_ROOT = "/mnt/ceph-hdd/cold/nim00007/new_AZ_train_data" +STEM_INPUT="/mnt/lustre-emmy-hdd/usr/u12095/synaptic_reconstruction/for_revison/postprocessed_AZ" def _check_data(files, label_folder, check_thinned): @@ -195,6 +198,70 @@ def crop_wichmann(): f.create_dataset("labels/az", data=az, compression="lzf") f.create_dataset("labels/az_thin", data=az, compression="lzf") +def crop_stem(): + input_name = "04_hoi_stem_examples_minusSVseg" + output_name = "stem_cropped2" + + input_folder = os.path.join(STEM_INPUT, input_name) + output_folder = os.path.join(OUTPUT_ROOT, output_name) + os.makedirs(output_folder, exist_ok=True) + files = glob(os.path.join(input_folder, "*.h5")) + + min_shape = (32, 512, 512) + + for ff in tqdm(files): + with h5py.File(ff, "r") as f: + az = f["labels/az"][:] + raw_full = f["raw"][:] + + # Label connected components in the az volume + labeled = label(az) + num, sizes = np.unique(labeled, return_counts=True) + #print(f"num {num}, sizes {sizes}") + num, sizes = num[1:], sizes[1:] + + #exclude artifacts and background + keep_labels = num[(sizes > 2000) & (num != 0)] + #print(f"keep_labels {keep_labels}") + + #Clean up az annotations + az = np.isin(labeled, keep_labels).astype("uint8") + # Apply binary closing. + az = np.logical_or(az, binary_closing(az, iterations=4)).astype("uint8") + + crop_id = 1 + for l in keep_labels: + + output_path = os.path.join(output_folder, os.path.basename(ff).replace(".h5", f"_crop{crop_id}.h5")) + if os.path.exists(output_path): + print(f"Skipping existing file: {output_path}") + crop_id += 1 + continue + + + mask = labeled == l + bb = np.where(mask) + if not bb[0].size: + continue + bb = tuple(slice(int(b.min()), int(b.max()) + 1) for b in bb) + pad_width = [max(sh - (b.stop - b.start), 0) // 2 for b, sh in zip(bb, min_shape)] + bb = tuple( + slice(max(b.start - pw, 0), min(b.stop + pw, sh)) for b, pw, sh in zip(bb, pad_width, az.shape) + ) + az_crop = az[bb] + raw_crop = raw_full[bb] + + + import napari + v = napari.Viewer() + v.add_image(raw_crop) + v.add_labels(az_crop) + napari.run() + + with h5py.File(output_path, "a") as f: + f.create_dataset("raw", data=raw_crop, compression="lzf") + f.create_dataset("labels/az", data=az_crop, compression="lzf") + crop_id += 1 def main(): # assort_tem() @@ -203,7 +270,9 @@ def main(): # assort_stem() # assort_wichmann() - crop_wichmann() + #crop_wichmann() + + crop_stem() if __name__ == "__main__": diff --git a/scripts/cooper/revision/common.py b/scripts/cooper/revision/common.py index 98d8430..ff5db24 100644 --- a/scripts/cooper/revision/common.py +++ b/scripts/cooper/revision/common.py @@ -3,9 +3,9 @@ # The root folder which contains the new AZ training data. -INPUT_ROOT = "/mnt/ceph-hdd/cold_store/projects/nim00007/new_AZ_train_data" +INPUT_ROOT = "/mnt/ceph-hdd/cold/nim00007/new_AZ_train_data" # The output folder for AZ predictions. -OUTPUT_ROOT = "/mnt/ceph-hdd/cold_store/projects/nim00007/AZ_predictions_new" +OUTPUT_ROOT = "/mnt/ceph-hdd/cold/nim00007/AZ_predictions_new" # The names of all datasets for which to run prediction / evaluation. # This excludes 'endbulb_of_held_cropped', which is a duplicate of 'endbulb_of_held', @@ -61,9 +61,11 @@ def get_file_names(name, split_folder, split_names): def get_split_folder(version): - assert version in (3, 4, 5) + assert version in (3, 4, 5, 6) if version == 3: split_folder = "splits" + elif version == 6: + split_folder= "/mnt/ceph-hdd/cold/nim00007/new_AZ_train_data/splits" else: split_folder = "models_az_thin" return split_folder diff --git a/scripts/cooper/revision/merge_az.py b/scripts/cooper/revision/merge_az.py index b9a1a17..452df18 100644 --- a/scripts/cooper/revision/merge_az.py +++ b/scripts/cooper/revision/merge_az.py @@ -57,8 +57,8 @@ def merge_az(name, version, check): napari.run() else: - with h5py.File(gt_path, "a") as f: - f.create_dataset("labels/az_merged", data=az_merged, compression="lzf") + with h5py.File(seg_path, "a") as f: + f.create_dataset(f"labels/az_merged_v{version}", data=az_merged, compression="lzf") def visualize_merge(args): From c41628c435147c900c516e572db88f8ceccc43a8 Mon Sep 17 00:00:00 2001 From: SarahMuth Date: Fri, 6 Jun 2025 14:32:42 +0200 Subject: [PATCH 07/29] make prediction more flexible --- scripts/cooper/revision/assort_new_az_data.py | 3 +- scripts/cooper/revision/prediction.py | 59 ++++++++++++++----- 2 files changed, 47 insertions(+), 15 deletions(-) diff --git a/scripts/cooper/revision/assort_new_az_data.py b/scripts/cooper/revision/assort_new_az_data.py index a7cd623..cd3a061 100644 --- a/scripts/cooper/revision/assort_new_az_data.py +++ b/scripts/cooper/revision/assort_new_az_data.py @@ -199,7 +199,8 @@ def crop_wichmann(): f.create_dataset("labels/az_thin", data=az, compression="lzf") def crop_stem(): - input_name = "04_hoi_stem_examples_minusSVseg" + #forgot about 06, added later + input_name = "06_hoi_wt_stem750_fm_minusSVseg"#"04_hoi_stem_examples_minusSVseg" output_name = "stem_cropped2" input_folder = os.path.join(STEM_INPUT, input_name) diff --git a/scripts/cooper/revision/prediction.py b/scripts/cooper/revision/prediction.py index ff5c026..85bc09b 100644 --- a/scripts/cooper/revision/prediction.py +++ b/scripts/cooper/revision/prediction.py @@ -1,5 +1,6 @@ import os from glob import glob +import argparse import h5py from synapse_net.inference.inference import get_model, compute_scale_from_voxel_size @@ -20,10 +21,11 @@ "12_chemical_fix_cryopreparation_minusSVseg_corrected": {"x": 1.554, "y": 1.554, "z": 1.554}, "wichmann_withAZ": {"x": 1.748, "y": 1.748, "z": 1.748}, "wichmann_withAZ_rescaled_tomograms": {"x": 1.554, "y": 1.554, "z": 1.554}, + "stem_cropped2_rescaled": {"x": 1.554, "y": 1.554, "z": 1.554}, } -def predict_boundaries(model, path, output_path): +def predict_boundaries(model, path, output_path, visualize=False): output_key = "predictions/boundaries" if os.path.exists(output_path): with h5py.File(output_path, "r") as f: @@ -36,22 +38,30 @@ def predict_boundaries(model, path, output_path): data = f["raw"][:] scale = compute_scale_from_voxel_size(RESOLUTIONS[dataset], "compartments") _, pred = segment_compartments(data, model=model, scale=scale, verbose=False, return_predictions=True) + + if visualize: + import napari + v = napari.Viewer() + v.add_image(data) + v.add_labels(pred) + napari.run() + with h5py.File(output_path, "a") as f: f.create_dataset(output_key, data=pred, compression="lzf") -def predict_all_boundaries(): +def predict_all_boundaries(folder=ROOT, out_path=OUTPUT_ROOT, visualize=False): model = get_model("compartments") - files = sorted(glob(os.path.join(ROOT, "**/*.h5"), recursive=True)) + files = sorted(glob(os.path.join(folder, "**/*.h5"), recursive=True)) for path in tqdm(files): folder_name = os.path.basename(os.path.split(path)[0]) - output_folder = os.path.join(OUTPUT_ROOT, folder_name) + output_folder = os.path.join(out_path, folder_name) os.makedirs(output_folder, exist_ok=True) output_path = os.path.join(output_folder, os.path.basename(path)) - predict_boundaries(model, path, output_path) + predict_boundaries(model, path, output_path, visualize) -def predict_vesicles(model, path, output_path): +def predict_vesicles(model, path, output_path, visualize=False): output_key = "predictions/vesicle_seg" if os.path.exists(output_path): with h5py.File(output_path, "r") as f: @@ -59,31 +69,52 @@ def predict_vesicles(model, path, output_path): return dataset = os.path.basename(os.path.split(path)[0]) - if "rescaled" in dataset: - return + #if "rescaled" in dataset: + # return with h5py.File(path, "r") as f: data = f["raw"][:] scale = compute_scale_from_voxel_size(RESOLUTIONS[dataset], "vesicles_3d") seg = segment_vesicles(data, model=model, scale=scale, verbose=False) + + if visualize: + import napari + v = napari.Viewer() + v.add_image(data) + v.add_labels(seg) + napari.run() + with h5py.File(output_path, "a") as f: f.create_dataset(output_key, data=seg, compression="lzf") -def predict_all_vesicles(): +def predict_all_vesicles(folder=ROOT, out_path=OUTPUT_ROOT, visualize=False): model = get_model("vesicles_3d") - files = sorted(glob(os.path.join(ROOT, "**/*.h5"), recursive=True)) + files = sorted(glob(os.path.join(folder, "**/*.h5"), recursive=True)) for path in tqdm(files): folder_name = os.path.basename(os.path.split(path)[0]) - output_folder = os.path.join(OUTPUT_ROOT, folder_name) + output_folder = os.path.join(out_path, folder_name) os.makedirs(output_folder, exist_ok=True) output_path = os.path.join(output_folder, os.path.basename(path)) - predict_vesicles(model, path, output_path) + predict_vesicles(model, path, output_path, visualize) def main(): - # predict_all_boundaries() - predict_all_vesicles() + parser = argparse.ArgumentParser() + + parser.add_argument("-i","--input_folder", type=str) + parser.add_argument("-o","--out_path", type=str) + parser.add_argument("--vesicles", action="store_true") + parser.add_argument("--boundaries", action="store_true") + parser.add_argument("--visualize", action="store_true") + + args = parser.parse_args() + if args.boundaries: + predict_all_boundaries(args.input_folder, args.out_path, args.visualize) + elif args.vesicles: + predict_all_vesicles(args.input_folder, args.out_path, args.visualize) + else: + print("Choose which structure to predict: --vesicles or --boundaries") if __name__ == "__main__": From b769a8e77fb5cf8f6696ebf7d25ffbe9fb302c07 Mon Sep 17 00:00:00 2001 From: SarahMuth Date: Fri, 6 Jun 2025 15:57:59 +0200 Subject: [PATCH 08/29] make az_thin more flexible --- scripts/cooper/revision/thin_az_gt.py | 29 +++++++++++++++------------ 1 file changed, 16 insertions(+), 13 deletions(-) diff --git a/scripts/cooper/revision/thin_az_gt.py b/scripts/cooper/revision/thin_az_gt.py index 158f1ea..e1fd03a 100644 --- a/scripts/cooper/revision/thin_az_gt.py +++ b/scripts/cooper/revision/thin_az_gt.py @@ -11,17 +11,18 @@ OUTPUT_ROOT = "/mnt/ceph-hdd/cold_store/projects/nim00007/AZ_predictions" -def run_az_thinning(): - files = sorted(glob(os.path.join(ROOT, "**/*.h5"), recursive=True)) +def run_az_thinning(folder=ROOT, out_path=OUTPUT_ROOT): + files = sorted(glob(os.path.join(folder, "**/*.h5"), recursive=True)) for ff in tqdm(files): ds_name = os.path.basename(os.path.split(ff)[0]) - if not ds_name.startswith(("04", "06")): + '''if not ds_name.startswith(("04", "06")): continue if "rescaled" in ds_name: - continue + continue''' - # print(ff) - ff_out = os.path.join(OUTPUT_ROOT, os.path.relpath(ff, ROOT)) + print(f"ff {ff}") + ff_out = os.path.join(out_path, os.path.relpath(ff, folder)) + print(f"ff_out {ff_out}") with h5py.File(ff_out, "r") as f_out, h5py.File(ff, "r") as f_in: # if "labels/az_thin2" in f_out: # continue @@ -38,19 +39,19 @@ def run_az_thinning(): ) with h5py.File(ff_out, "a") as f: - ds = f.require_dataset("labels/az_thin2", shape=az_thin.shape, dtype=az_thin.dtype, compression="gzip") + ds = f.require_dataset("labels/az_thin", shape=az_thin.shape, dtype=az_thin.dtype, compression="gzip") ds[:] = az_thin -def check_az_thinning(): - files = sorted(glob(os.path.join(ROOT, "**/*.h5"), recursive=True)) +def check_az_thinning(folder=ROOT, out_path=OUTPUT_ROOT): + files = sorted(glob(os.path.join(folder, "**/*.h5"), recursive=True)) for ff in files: - f_out = os.path.join(OUTPUT_ROOT, os.path.relpath(ff, ROOT)) + f_out = os.path.join(out_path, os.path.relpath(ff, folder)) with h5py.File(f_out, "r") as f: if "labels/az_thin" not in f: continue - az_thin = f["labels/az_thin2"][:] + az_thin = f["labels/az_thin"][:] with h5py.File(ff, "r") as f: tomo = f["raw"][:] @@ -63,13 +64,15 @@ def check_az_thinning(): def main(): parser = argparse.ArgumentParser() + parser.add_argument("-i","--input_folder", type=str) + parser.add_argument("-o","--out_path", type=str) parser.add_argument("--check", action="store_true") args = parser.parse_args() if args.check: - check_az_thinning() + check_az_thinning(args.input_folder, args.out_path) else: - run_az_thinning() + run_az_thinning(args.input_folder, args.out_path) if __name__ == "__main__": From d2aa6d8a68d36de5192c1a9cf09227c7a7f40e31 Mon Sep 17 00:00:00 2001 From: SarahMuth Date: Fri, 6 Jun 2025 21:48:33 +0200 Subject: [PATCH 09/29] empty crop without AZ annotation --- scripts/cooper/revision/assort_new_az_data.py | 81 ++++++++++++++++++- scripts/cooper/revision/remove_az_thin.py | 18 +++++ 2 files changed, 98 insertions(+), 1 deletion(-) create mode 100644 scripts/cooper/revision/remove_az_thin.py diff --git a/scripts/cooper/revision/assort_new_az_data.py b/scripts/cooper/revision/assort_new_az_data.py index cd3a061..5edf06e 100644 --- a/scripts/cooper/revision/assort_new_az_data.py +++ b/scripts/cooper/revision/assort_new_az_data.py @@ -2,6 +2,7 @@ from glob import glob import h5py +import tifffile import numpy as np from tqdm import tqdm from skimage.transform import resize @@ -12,6 +13,7 @@ INTER_ROOT = "/mnt/ceph-hdd/cold/nim00007/AZ_predictions" OUTPUT_ROOT = "/mnt/ceph-hdd/cold/nim00007/new_AZ_train_data" STEM_INPUT="/mnt/lustre-emmy-hdd/usr/u12095/synaptic_reconstruction/for_revison/postprocessed_AZ" +TIF_INPUT = "/mnt/ceph-hdd/cold/nim00007/new_AZ_train_data/stem/" def _check_data(files, label_folder, check_thinned): @@ -264,6 +266,82 @@ def crop_stem(): f.create_dataset("labels/az", data=az_crop, compression="lzf") crop_id += 1 +def get_bounding_box_3d(file_path, raw_volume): + volume = tifffile.imread(file_path) + filename = os.path.basename(file_path) + print(f"filename {filename}") + + # Find the z index where the 2D rectangle is located (non-zero slice) + z_indices = np.where(np.any(volume, axis=(1, 2)))[0] + + if len(z_indices) == 0: + raise ValueError("No non-zero 2D rectangle found in the volume.") + + z_rect = z_indices[0] + + # Get the 2D mask from that slice + mask_2d = volume[z_rect] + y_indices, x_indices = np.where(mask_2d) + + if len(x_indices) == 0 or len(y_indices) == 0: + raise ValueError("Found slice has no non-zero pixels.") + + x_min, x_max = x_indices.min(), x_indices.max() + 1 + y_min, y_max = y_indices.min(), y_indices.max() + 1 + + # Determine z_start and z_end based on filename + if filename.endswith("_toend.tif"): + z_start, z_end = z_rect, raw_volume.shape[0] + elif filename.endswith("_tostart.tif"): + z_start, z_end = 0, z_rect + 1 + else: + print("here?") + z_start, z_end = z_rect, z_rect + 1 + + # Return bounding box as slices, usable directly for numpy indexing + return ( + slice(z_start, z_end), + slice(y_min, y_max), + slice(x_min, x_max) + ) + +def neg_crop_stem(): + input_name = "mask_for_neg_example"#"04_hoi_stem_examples_minusSVseg" + output_name = "stem_cropped2" + + input_folder = TIF_INPUT + tif_input_folder = os.path.join(TIF_INPUT, input_name) + output_folder = os.path.join(OUTPUT_ROOT, output_name) + os.makedirs(output_folder, exist_ok=True) + tif_files = glob(os.path.join(tif_input_folder, "*.tif")) + print(f"tif_files {tif_files}") + + for ff in tqdm(tif_files): + input_path = os.path.join(input_folder, os.path.basename(ff).replace('_tostart.tif', '.h5').replace('_toend.tif', '.h5')) + with h5py.File(input_path, "r") as f: + raw_full = f["raw"][:] + + + output_path = os.path.join(output_folder, os.path.basename(ff).replace('_tostart.tif', '_cropped_noAZ.h5').replace('_toend.tif', '_cropped_noAZ.h5')) + if os.path.exists(output_path): + print(f"Skipping existing file: {output_path}") + continue + + + bb = get_bounding_box_3d(ff, raw_full) + print(f"bb {bb}") + + raw_crop = raw_full[bb] + + + import napari + v = napari.Viewer() + v.add_image(raw_crop) + napari.run() + + with h5py.File(output_path, "a") as f: + f.create_dataset("raw", data=raw_crop, compression="lzf") + def main(): # assort_tem() # assort_chemical_fixation() @@ -273,7 +351,8 @@ def main(): # assort_wichmann() #crop_wichmann() - crop_stem() + #crop_stem() + neg_crop_stem() if __name__ == "__main__": diff --git a/scripts/cooper/revision/remove_az_thin.py b/scripts/cooper/revision/remove_az_thin.py new file mode 100644 index 0000000..378b835 --- /dev/null +++ b/scripts/cooper/revision/remove_az_thin.py @@ -0,0 +1,18 @@ +import h5py + +files = [ + "/mnt/ceph-hdd/cold/nim00007/new_AZ_train_data/stem_cropped2_rescaled/36859_H2_SP_02_rec_2Kb1dawbp_crop_crop1.h5", + "/mnt/ceph-hdd/cold/nim00007/new_AZ_train_data/stem_cropped2_rescaled/36859_H2_SP_07_rec_2Kb1dawbp_crop_crop1.h5" +] + +for file in files: + with h5py.File(file, "r+") as f: + # Load the replacement data + gt = f["labels/az"][:] + + # Delete the existing dataset if it exists + if "labels/az_thin" in f: + del f["labels/az_thin"] + + # Recreate the dataset with the new data + f.create_dataset("labels/az_thin", data=gt) \ No newline at end of file From 59c3534904948c5fa741c15a131686c06962e01c Mon Sep 17 00:00:00 2001 From: SarahMuth Date: Mon, 9 Jun 2025 20:06:59 +0200 Subject: [PATCH 10/29] prepare for training AZ --- run_sbatch_revision.sbatch | 8 +++--- scripts/cooper/revision/common.py | 2 +- scripts/cooper/revision/merge_az.py | 20 +++++++++++--- scripts/cooper/revision/remove_az_thin.py | 32 +++++++++++++++++++++-- scripts/cooper/revision/train_az.py | 19 +++++++++----- 5 files changed, 65 insertions(+), 16 deletions(-) diff --git a/run_sbatch_revision.sbatch b/run_sbatch_revision.sbatch index 5e532a0..94844f0 100644 --- a/run_sbatch_revision.sbatch +++ b/run_sbatch_revision.sbatch @@ -2,10 +2,12 @@ #SBATCH -c 4 #4 #8 #SBATCH --mem 256G #120G #32G #64G #256G #SBATCH -p grete:shared #grete:shared #grete-h100:shared -#SBATCH -t 24:00:00 #6:00:00 #48:00:00 #SBATCH -G A100:1 #V100:1 #2 #A100:1 #gtx1080:2 #v100:1 #H100:1 +#SBATCH -t 48:00:00 #6:00:00 #48:00:00 +#SBATCH -G A100:1 #V100:1 #2 #A100:1 #gtx1080:2 #v100:1 #H100:1 #SBATCH --output=/user/muth9/u12095/synapse-net/slurm_revision/slurm-%j.out -#SBATCH -A nim00007 #SBATCH --constraint 80gb +#SBATCH -A nim00007 +#SBATCH --constraint 80gb source ~/.bashrc conda activate synapse-net -python /user/muth9/u12095/synapse-net/scripts/cooper/revision/merge_az.py -v 6 \ No newline at end of file +python /user/muth9/u12095/synapse-net/scripts/cooper/revision/train_az.py -k az_merged_v6 \ No newline at end of file diff --git a/scripts/cooper/revision/common.py b/scripts/cooper/revision/common.py index ff5db24..7eb2fad 100644 --- a/scripts/cooper/revision/common.py +++ b/scripts/cooper/revision/common.py @@ -5,7 +5,7 @@ # The root folder which contains the new AZ training data. INPUT_ROOT = "/mnt/ceph-hdd/cold/nim00007/new_AZ_train_data" # The output folder for AZ predictions. -OUTPUT_ROOT = "/mnt/ceph-hdd/cold/nim00007/AZ_predictions_new" +OUTPUT_ROOT = "/mnt/ceph-hdd/cold/nim00007/AZ_prediction_new_copy" # The names of all datasets for which to run prediction / evaluation. # This excludes 'endbulb_of_held_cropped', which is a duplicate of 'endbulb_of_held', diff --git a/scripts/cooper/revision/merge_az.py b/scripts/cooper/revision/merge_az.py index 452df18..e113971 100644 --- a/scripts/cooper/revision/merge_az.py +++ b/scripts/cooper/revision/merge_az.py @@ -1,5 +1,6 @@ import argparse import os +from glob import glob import h5py import napari @@ -19,13 +20,19 @@ # STEM CROPPED IS OFTEN TOO SMALL! def merge_az(name, version, check): split_folder = get_split_folder(version) - file_names = get_file_names(name, split_folder, split_names=["train", "val", "test"]) + + if name == "stem_cropped": + file_paths = glob(os.path.join("/mnt/ceph-hdd/cold/nim00007/new_AZ_train_data/stem_cropped", "*.h5")) + file_names = [os.path.basename(path) for path in file_paths] + else: + file_names = get_file_names(name, split_folder, split_names=["train", "val", "test"]) seg_paths, gt_paths = get_paths(name, file_names) for seg_path, gt_path in zip(seg_paths, gt_paths): with h5py.File(gt_path, "r") as f: - if not check and ("labels/az_merged" in f): + #if not check and ("labels/az_merged" in f): + if f"labels/az_merged_v{version}" in f : continue raw = f["raw"][:] gt = f["labels/az"][:] @@ -56,9 +63,16 @@ def merge_az(name, version, check): v.title = f"{name}/{fname}" napari.run() + print(f"gt_path {gt_path}") + with h5py.File(gt_path, "a") as f: + f.create_dataset(f"labels/az_merged_v{version}", data=az_merged, compression="lzf") + else: - with h5py.File(seg_path, "a") as f: + print(f"gt_path {gt_path}") + with h5py.File(gt_path, "a") as f: f.create_dataset(f"labels/az_merged_v{version}", data=az_merged, compression="lzf") + '''with h5py.File(seg_path, "a") as f: + f.create_dataset(f"labels/az_merged_v{version}", data=az_merged, compression="lzf")''' def visualize_merge(args): diff --git a/scripts/cooper/revision/remove_az_thin.py b/scripts/cooper/revision/remove_az_thin.py index 378b835..0045835 100644 --- a/scripts/cooper/revision/remove_az_thin.py +++ b/scripts/cooper/revision/remove_az_thin.py @@ -1,4 +1,4 @@ -import h5py +'''import h5py files = [ "/mnt/ceph-hdd/cold/nim00007/new_AZ_train_data/stem_cropped2_rescaled/36859_H2_SP_02_rec_2Kb1dawbp_crop_crop1.h5", @@ -15,4 +15,32 @@ del f["labels/az_thin"] # Recreate the dataset with the new data - f.create_dataset("labels/az_thin", data=gt) \ No newline at end of file + f.create_dataset("labels/az_thin", data=gt) +''' +import h5py +import numpy as np +import os +from glob import glob + +folder = "/mnt/ceph-hdd/cold/nim00007/new_AZ_train_data/stem_cropped/" + +# List of file names to process +file_names = [ + "36859_H2_SP_01_rec_2Kb1dawbp_crop_cropped_noAZ.h5", + "36859_H2_SP_02_rec_2Kb1dawbp_crop_cropped_noAZ.h5", + "36859_H2_SP_03_rec_2Kb1dawbp_crop_cropped_noAZ.h5", + "36859_H3_SP_05_rec_2kb1dawbp_crop_cropped_noAZ.h5", + "36859_H3_SP_07_rec_2kb1dawbp_crop_cropped_noAZ.h5", + "36859_H3_SP_10_rec_2kb1dawbp_crop_cropped_noAZ.h5" +] + +file_paths = glob(os.path.join("/mnt/ceph-hdd/cold/nim00007/new_AZ_train_data/endbulb_of_held_cropped", "*.h5")) + +for fname in file_paths: + #file_path = os.path.join(folder, fname) + + with h5py.File(fname, "a") as f: + az_merged = f["/labels/az_merged"][:] + f.create_dataset("/labels/az_merged_v6", data=az_merged, compression="lzf") + + print(f"Updated file: {fname}") diff --git a/scripts/cooper/revision/train_az.py b/scripts/cooper/revision/train_az.py index dd24d5c..d5be4ae 100644 --- a/scripts/cooper/revision/train_az.py +++ b/scripts/cooper/revision/train_az.py @@ -9,12 +9,12 @@ from synapse_net.training import supervised_training, AZDistanceLabelTransform -TRAIN_ROOT = "/mnt/ceph-hdd/cold_store/projects/nim00007/new_AZ_train_data" +TRAIN_ROOT = "/mnt/ceph-hdd/cold/nim00007/new_AZ_train_data" OUTPUT_ROOT = "./models_az_thin" def _require_train_val_test_split(datasets): - train_ratio, val_ratio, test_ratio = 0.70, 0.1, 0.2 + train_ratio, val_ratio, test_ratio = 0.60, 0.2, 0.2 def _train_val_test_split(names): train, test = train_test_split(names, test_size=1 - train_ratio, shuffle=True) @@ -87,9 +87,14 @@ def train(key, ignore_label=None, use_distances=False, training_2D=False, testse os.makedirs(OUTPUT_ROOT, exist_ok=True) - datasets = ["tem", "chemical_fixation", "stem", "stem_cropped", "endbulb_of_held", "endbulb_of_held_cropped"] - train_paths = get_paths("train", datasets=datasets, testset=testset) - val_paths = get_paths("val", datasets=datasets, testset=testset) + datasets_with_testset_true = ["tem", "chemical_fixation", "stem", "endbulb_of_held"] + datasets_with_testset_false = ["stem_cropped", "endbulb_of_held_cropped"] + + train_paths = get_paths("train", datasets=datasets_with_testset_true, testset=True) + val_paths = get_paths("val", datasets=datasets_with_testset_true, testset=True) + + train_paths += get_paths("train", datasets=datasets_with_testset_false, testset=False) + val_paths += get_paths("val", datasets=datasets_with_testset_false, testset=False) print("Start training with:") print(len(train_paths), "tomograms for training") @@ -97,7 +102,7 @@ def train(key, ignore_label=None, use_distances=False, training_2D=False, testse # patch_shape = [48, 256, 256] patch_shape = [48, 384, 384] - model_name = "v6" + model_name = "v7" # checking for 2D training if training_2D: @@ -121,7 +126,7 @@ def train(key, ignore_label=None, use_distances=False, training_2D=False, testse sampler=torch_em.data.sampler.MinInstanceSampler(min_num_instances=1, p_reject=0.85), n_samples_train=None, n_samples_val=100, check=check, - save_root=OUTPUT_ROOT, + save_root="/mnt/lustre-emmy-hdd/usr/u12095/synapse_net/models/ConstantinAZ", n_iterations=int(2e5), ignore_label=ignore_label, label_transform=label_transform, From b1c8feb8c7c5e19927fce78f0dd046e33c7e6553 Mon Sep 17 00:00:00 2001 From: SarahMuth Date: Wed, 11 Jun 2025 13:06:06 +0200 Subject: [PATCH 11/29] make training more usable --- models_az_thin/split-chemical_fixation.json | 1 + models_az_thin/split-endbulb_of_held.json | 1 + .../split-endbulb_of_held_cropped.json | 1 + models_az_thin/split-stem.json | 1 + models_az_thin/split-stem_cropped.json | 1 + models_az_thin/split-tem.json | 1 + run_sbatch_revision.sbatch | 5 ++- scripts/cooper/revision/az_prediction.py | 31 ++++++++++++--- scripts/cooper/revision/common.py | 6 +-- scripts/cooper/revision/remove_az_thin.py | 38 +++++++++---------- 10 files changed, 55 insertions(+), 31 deletions(-) create mode 100644 models_az_thin/split-chemical_fixation.json create mode 100644 models_az_thin/split-endbulb_of_held.json create mode 100644 models_az_thin/split-endbulb_of_held_cropped.json create mode 100644 models_az_thin/split-stem.json create mode 100644 models_az_thin/split-stem_cropped.json create mode 100644 models_az_thin/split-tem.json diff --git a/models_az_thin/split-chemical_fixation.json b/models_az_thin/split-chemical_fixation.json new file mode 100644 index 0000000..c700fc7 --- /dev/null +++ b/models_az_thin/split-chemical_fixation.json @@ -0,0 +1 @@ +{"train": ["20180305_06_MS.h5", "20171113_01_MS.h5", "20171113_05_MS.h5", "20171113_04_MS.h5", "20180305_05_MS.h5", "20171113_3.2_MS.h5", "20180305_09_MS.h5", "20180305_07_MS.h5", "20180305_04_MS.h5", "20171006_2_2_MS.h5", "20171006_05_MS.h5", "20180305_10_MS.h5", "20171006_03_MS.h5", "20171006_2_3_MS.h5"], "val": ["20171013_01_MS.h5", "20171013_1.2_MS.h5", "20171006_3_2_MS.h5", "20180305_08_MS.h5", "20180305_02_MS.h5"], "test": ["20180305_03_MS.h5", "20171113_06_MS.h5", "20171113_07_MS.h5", "20171113_02_MS.h5", "20180305_01_MS.h5"]} \ No newline at end of file diff --git a/models_az_thin/split-endbulb_of_held.json b/models_az_thin/split-endbulb_of_held.json new file mode 100644 index 0000000..06e93b5 --- /dev/null +++ b/models_az_thin/split-endbulb_of_held.json @@ -0,0 +1 @@ +{"train": ["M2_eb7_model.h5", "Wt22_eb1_10K_model2.h5", "WT40_eb5_model.h5", "KO8_eb4_model.h5", "M3_eb4_model.h5", "WT20_eb11_model2.h5", "WT13_syn6_model2.h5", "WT20_eb7_AZ1_model2.h5", "M2_eb10_model.h5", "WT11_syn1_model2.h5", "M8_eb8_model.h5", "WT22_eb10_model2.h5", "WT13_eb3_model2.h5", "WT20_syn7_model2.h5", "M7_eb6_model.h5", "WT20_eb7_AZ2_model2.h5", "WT13_syn5_model2.h5", "WT13_syn9_model2.h5", "M2_eb3_model.h5", "WT20_syn3_model2.h5", "WT20_syn1_model2.h5", "WT21_syn4_model2.h5", "WT20_eb4_model2.h5", "M2_eb9_model.h5", "M7_eb3_model.h5", "WT21_eb5_model2.h5", "WT22_eb6_AZ1_model2.h5", "WT13_syn10_model2.h5", "WT19_syn6_model2.h5", "M2_eb2_AZ2_model.h5", "WT41_eb4_model.h5", "WT13_syn4_model2.h5", "WT40_eb10_model.h5", "M1_eb8_model.h5", "WT19_syn9_model2.h5", "WT22_eb5_model2.h5", "WT39_eb7_model.h5", "KO9_eb13_model.h5", "WT39_eb5_model.h5", "WT11_eb5_model2.h5", "M7_eb15_model.h5", "M7_eb2_model.h5", "M7_eb9_model.h5", "WT22_syn7_model2.h5", "M1_eb7_model.h5", "WT11_syn6_model2.h5", "M7_eb5_model.h5", "WT22_syn5_model2.h5", "WT21_eb3_model2.h5", "WT19_syn3_model2.h5", "WT22_syn9_model2.h5", "M5_eb3_model.h5", "WT22_syn6_model2.h5", "WT39_eb9_model.h5", "WT13_eb5_model2.h5", "WT20_eb9_model2.h5", "WT20_eb2_AZ2_12K_model2.h5", "M7_eb12_model.h5", "M1_eb1_model.h5", "WT40_eb1_model.h5", "M2_eb6_model.h5", "M8_eb14_model.h5", "KO9_eb1_model.h5", "WT20_eb1_AZ2_12K_model2.h5", "M7_eb7_model.h5", "M8_eb9_model.h5", "WT40_eb11_model.h5", "M1_eb3_model.h5", "M8_eb12_model.h5", "M7_eb11_model.h5", "KO9_eb10_model.h5", "KO9_eb4_model.h5", "KO8_eb2_model.h5", "WT39_eb4_model.h5", "M1_eb5_model.h5", "M10_eb9_model.h5"], "val": ["WT41_eb6_model.h5", "Wt22_syn2_10K_model2.h5", "M1_eb6_model.h5", "KO9_eb11_model.h5", "WT20_eb5_model2.h5", "WT16_syn2_model2.h5", "KO9_eb9_model.h5", "WT21_eb7_model2.h5", "M7_eb4_model.h5", "M1_eb9_model.h5", "WT40_eb8_model.h5", "M2_eb5_model.h5", "WT13_eb4_model2.h5", "WT39_eb10_model.h5", "WT39_eb8_model.h5", "WT20_syn6_model2.h5", "WT13_syn11_model2.h5", "WT41_eb2_model.h5", "WT39_eb2_model.h5", "M2_eb14_model.h5", "M2_eb8_model.h5", "WT20_syn5_model2.h5", "M10_eb12_model.h5", "M5_eb1_model.h5", "KO9_eb12_model.h5"], "test": ["WT11_eb2_model2.h5", "WT21_syn5_model2.h5", "WT20_eb8_AZ2_model2.h5", "M10_eb8_model.h5", "WT21_eb9_model2.h5", "M2_eb1_model.h5", "WT22_syn10_model2.h5", "WT11_syn3_model2.h5", "WT11_eb1_model2.h5", "WT13_syn7_model2.h5", "WT21_eb4_model2.h5", "WT40_eb9_model.h5", "M6_eb2_model.h5", "WT22_syn1_10K_model2.h5", "WT19_syn1_model2.h5", "M7_eb10_model.h5", "KO9_eb6_model.h5", "WT11_eb7_model2.h5", "WT40_eb3_model.h5", "KO9_eb14_model.h5", "WT20_syn2_model2.h5", "WT22_eb9_model2.h5", "WT13_syn1_model2.h5", "WT39_eb3_model.h5", "WT21_syn3_model2.h5", "M8_eb6_model.h5"]} \ No newline at end of file diff --git a/models_az_thin/split-endbulb_of_held_cropped.json b/models_az_thin/split-endbulb_of_held_cropped.json new file mode 100644 index 0000000..3aef9c5 --- /dev/null +++ b/models_az_thin/split-endbulb_of_held_cropped.json @@ -0,0 +1 @@ +{"train": ["Wt22_eb1_10K_model2_cropped.h5", "WT22_eb6_AZ1_model2_cropped.h5", "WT21_eb3_model2_cropped.h5", "M7_eb11_model_cropped.h5", "WT20_syn3_model2_cropped.h5", "WT13_syn7_model2_cropped.h5", "WT13_eb4_model2_cropped.h5", "WT39_eb2_model_cropped.h5", "WT20_syn1_model2_cropped.h5", "WT40_eb3_model_cropped.h5", "WT20_eb7_AZ1_model2_cropped.h5", "WT21_syn5_model2_cropped.h5", "WT13_syn4_model2_cropped.h5", "KO9_eb13_model_cropped.h5", "M10_eb9_model_cropped.h5", "WT20_syn5_model2_cropped.h5", "M10_eb8_model_cropped.h5", "M7_eb12_model_cropped.h5", "WT39_eb7_model_cropped.h5", "WT20_eb4_model2_cropped.h5", "M1_eb8_model_cropped.h5", "WT40_eb11_model_cropped.h5", "KO9_eb14_model_cropped.h5", "WT39_eb10_model_cropped.h5", "KO9_eb6_model_cropped.h5", "WT13_syn1_model2_cropped.h5", "WT13_syn9_model2_cropped.h5", "WT13_eb3_model2_cropped.h5", "WT41_eb4_model_cropped.h5", "WT40_eb5_model_cropped.h5", "WT11_eb5_model2_cropped.h5", "WT22_eb9_model2_cropped.h5", "M2_eb2_AZ2_model_cropped.h5", "WT41_eb6_model_cropped.h5", "WT13_eb5_model2_cropped.h5", "WT13_syn11_model2_cropped.h5", "WT22_syn5_model2_cropped.h5", "WT20_syn6_model2_cropped.h5", "WT22_syn9_model2_cropped.h5", "WT11_syn6_model2_cropped.h5", "M8_eb12_model_cropped.h5", "WT39_eb4_model_cropped.h5", "M8_eb8_model_cropped.h5", "WT21_eb9_model2_cropped.h5", "WT39_eb3_model_cropped.h5", "M2_eb1_model_cropped.h5", "M2_eb9_model_cropped.h5", "WT39_eb5_model_cropped.h5", "WT22_eb10_model2_cropped.h5", "M7_eb4_model_cropped.h5", "WT20_eb7_AZ2_model2_cropped.h5", "WT40_eb10_model_cropped.h5", "WT19_syn9_model2_cropped.h5", "WT22_syn6_model2_cropped.h5", "WT11_eb1_model2_cropped.h5", "M10_eb12_model_cropped.h5", "KO9_eb11_model_cropped.h5", "WT19_syn6_model2_cropped.h5", "M7_eb5_model_cropped.h5", "WT39_eb9_model_cropped.h5", "M2_eb14_model_cropped.h5", "Wt22_syn2_10K_model2_cropped.h5", "WT20_syn2_model2_cropped.h5", "M7_eb9_model_cropped.h5", "M5_eb3_model_cropped.h5", "WT22_syn1_10K_model2_cropped.h5", "M1_eb7_model_cropped.h5", "M1_eb6_model_cropped.h5", "M7_eb7_model_cropped.h5", "WT21_eb7_model2_cropped.h5", "M2_eb8_model_cropped.h5", "WT20_eb1_AZ2_12K_model2_cropped.h5", "WT20_eb9_model2_cropped.h5", "WT41_eb2_model_cropped.h5", "WT20_eb5_model2_cropped.h5", "KO9_eb12_model_cropped.h5", "M3_eb4_model_cropped.h5", "WT19_syn1_model2_cropped.h5", "M2_eb3_model_cropped.h5", "KO9_eb9_model_cropped.h5", "WT13_syn5_model2_cropped.h5", "M1_eb1_model_cropped.h5", "M2_eb5_model_cropped.h5", "WT20_eb11_model2_cropped.h5", "WT13_syn6_model2_cropped.h5", "KO9_eb10_model_cropped.h5", "M2_eb7_model_cropped.h5", "M1_eb5_model_cropped.h5", "WT13_syn10_model2_cropped.h5", "WT22_eb5_model2_cropped.h5", "KO8_eb2_model_cropped.h5", "M2_eb10_model_cropped.h5", "KO9_eb1_model_cropped.h5", "M7_eb10_model_cropped.h5", "WT21_syn3_model2_cropped.h5", "WT40_eb8_model_cropped.h5"], "val": ["WT20_eb2_AZ2_12K_model2_cropped.h5", "WT19_syn3_model2_cropped.h5", "M6_eb2_model_cropped.h5", "M8_eb9_model_cropped.h5", "KO8_eb4_model_cropped.h5", "WT21_eb4_model2_cropped.h5", "WT22_syn10_model2_cropped.h5", "WT20_syn7_model2_cropped.h5", "M7_eb3_model_cropped.h5", "M7_eb6_model_cropped.h5", "WT16_syn2_model2_cropped.h5", "WT39_eb8_model_cropped.h5", "KO9_eb4_model_cropped.h5", "M1_eb9_model_cropped.h5", "M2_eb6_model_cropped.h5", "WT11_syn1_model2_cropped.h5", "WT11_eb7_model2_cropped.h5", "M1_eb3_model_cropped.h5", "WT21_eb5_model2_cropped.h5", "WT11_syn3_model2_cropped.h5", "M8_eb6_model_cropped.h5", "WT21_syn4_model2_cropped.h5", "M7_eb15_model_cropped.h5", "WT22_syn7_model2_cropped.h5"]} \ No newline at end of file diff --git a/models_az_thin/split-stem.json b/models_az_thin/split-stem.json new file mode 100644 index 0000000..ecaedc3 --- /dev/null +++ b/models_az_thin/split-stem.json @@ -0,0 +1 @@ +{"train": ["36859_H3_SP_10_rec_2kb1dawbp_crop.h5", "36859_H3_SP_01_rec_2kb1dawbp_crop.h5", "36859_H2_SP_02_rec_2Kb1dawbp_crop.h5", "36859_H2_SP_03_rec_2Kb1dawbp_crop.h5"], "val": ["36859_H3_SP_05_rec_2kb1dawbp_crop.h5", "36859_H2_SP_01_rec_2Kb1dawbp_crop.h5"], "test": ["36859_H3_SP_07_rec_2kb1dawbp_crop.h5", "36859_J1_STEM750_66K_SP_03_rec_2kb1dawbp_crop.h5"]} \ No newline at end of file diff --git a/models_az_thin/split-stem_cropped.json b/models_az_thin/split-stem_cropped.json new file mode 100644 index 0000000..a9031ec --- /dev/null +++ b/models_az_thin/split-stem_cropped.json @@ -0,0 +1 @@ +{"train": ["36859_J1_66K_TS_CA3_PS_23_rec_2Kb1dawbp_crop_crop1.h5", "36859_J1_STEM750_66K_SP_15_rec_2kb1dawbp_crop_crop1.h5", "36859_J1_66K_TS_CA3_PS_43_rec_2Kb1dawbp_crop_crop1.h5", "36859_H2_SP_01_rec_2Kb1dawbp_crop_crop1.h5", "36859_J1_STEM750_66K_SP_10_rec_2kb1dawbp_crop_crop1.h5", "36859_H3_SP_05_rec_2kb1dawbp_crop_crop1.h5", "36859_J1_STEM750_66K_SP_07_rec_2kb1dawbp_crop_crop2.h5", "36859_H2_SP_01_rec_2Kb1dawbp_crop_crop2.h5", "36859_J1_STEM750_66K_SP_08_rec_2kb1dawbp_crop_crop3.h5", "36859_H3_SP_07_rec_2kb1dawbp_crop_crop2.h5", "36859_H3_SP_07_rec_2kb1dawbp_crop_cropped_noAZ.h5", "36859_J1_STEM750_66K_SP_03_rec_2kb1dawbp_crop_crop1.h5", "36859_J1_STEM750_66K_SP_06_rec_2kb1dawbp_crop_crop1.h5", "36859_H2_SP_03_rec_2Kb1dawbp_crop_crop1.h5", "36859_H3_SP_07_rec_2kb1dawbp_crop_crop1.h5", "36859_J1_STEM750_66K_SP_02_rec_2kb1dawbp_crop_crop1.h5", "36859_J1_STEM750_66K_SP_17_rec_2kb1dawbp_crop_crop1.h5", "36859_H2_SP_11_rec_2Kb1dawbp_crop_crop1.h5", "36859_H2_SP_04_rec_2Kb1dawbp_crop_crop1.h5", "36859_H2_SP_04_rec_2Kb1dawbp_crop_crop2.h5", "36859_H3_SP_10_rec_2kb1dawbp_crop_cropped_noAZ.h5", "36859_H2_SP_06_rec_2Kb1dawbp_crop_crop1.h5", "36859_J1_66K_TS_CA3_PS_26_rec_2Kb1dawbp_crop_crop1.h5", "36859_J1_STEM750_66K_SP_14_rec_2kb1dawbp_crop_crop2.h5", "36859_H3_SP_01_rec_2kb1dawbp_crop_crop1.h5", "36859_H2_SP_01_rec_2Kb1dawbp_crop_crop3.h5", "36859_J1_STEM750_66K_SP_13_rec_2kb1dawbp_crop_crop2.h5", "36859_H2_SP_10_rec_crop_crop1.h5", "36859_H2_SP_02_rec_2Kb1dawbp_crop_cropped_noAZ.h5", "36859_J1_66K_TS_CA3_PS_32_rec_2Kb1dawbp_crop_crop1.h5", "36859_J1_STEM750_66K_SP_01_rec_2kb1dawbp_crop_crop1.h5", "36859_H2_SP_03_rec_2Kb1dawbp_crop_cropped_noAZ.h5", "36859_H3_SP_10_rec_2kb1dawbp_crop_crop1.h5", "36859_H2_SP_02_rec_2Kb1dawbp_crop_crop1.h5", "36859_J1_STEM750_66K_SP_08_rec_2kb1dawbp_crop_crop2.h5", "36859_J1_STEM750_66K_SP_03_rec_2kb1dawbp_crop_crop2.h5", "36859_H3_SP_05_rec_2kb1dawbp_crop_crop2.h5", "36859_H2_SP_07_rec_2Kb1dawbp_crop_crop1.h5", "36859_J1_STEM750_66K_SP_13_rec_2kb1dawbp_crop_crop1.h5", "36859_J1_STEM750_66K_SP_14_rec_2kb1dawbp_crop_crop1.h5"], "val": ["36859_J1_STEM750_66K_SP_03_rec_2kb1dawbp_crop_crop3.h5", "36859_H2_SP_01_rec_2Kb1dawbp_crop_cropped_noAZ.h5", "36859_J1_STEM750_66K_SP_08_rec_2kb1dawbp_crop_crop1.h5", "36859_J1_STEM750_66K_SP_12_rec_2kb1dawbp_crop_crop1.h5", "36859_H3_SP_05_rec_2kb1dawbp_crop_cropped_noAZ.h5", "36859_H3_SP_09_rec_2kb1dawbp_crop_crop1.h5", "36859_J1_STEM750_66K_SP_11_rec_2kb1dawbp_crop_crop1.h5", "36859_J1_STEM750_66K_SP_07_rec_2kb1dawbp_crop_crop1.h5", "36859_J1_STEM750_66K_SP_12_rec_2kb1dawbp_crop_crop2.h5", "36859_J1_STEM750_66K_SP_10_rec_2kb1dawbp_crop_crop2.h5"]} \ No newline at end of file diff --git a/models_az_thin/split-tem.json b/models_az_thin/split-tem.json new file mode 100644 index 0000000..f5982cf --- /dev/null +++ b/models_az_thin/split-tem.json @@ -0,0 +1 @@ +{"train": ["20190805_09002_B4_SC_08_SP.h5", "WT_MF_DIV28_3.2_MS_09204_K1.h5", "20190524_09204_F4_SC_09_SP.h5", "20190524_09204_F4_SC_06_SP.h5", "WT_MF_DIV14_6.2_MS_E2_09175_CA3_2.h5", "WT_MF_DIV14_07_MS_C2_09175_CA3.h5", "WT_Unt_SC_09175_C4_04_DIV15_mtk_04.h5", "M13_CTRL_22723_O3_06_DIV29_06_MS.h5", "WT_Unt_SC_09175_E2_04_DIV14_mtk_03.h5", "WT_MF_DIV14_01_MS_E2_09175_CA3.h5", "M13_CTRL_22723_J1_03_DIV29_03_MS.h5", "20190807_23032_D4_SC_03_SP.h5", "M13_DKO_09201_U1_05_DIV31_05_MS.h5", "WT_Unt_SC_09175_E4_02_DIV14_mtk_02.h5", "20190805_09002_B4_SC_01_SP.h5", "WT_Unt_SC_09175_C4_08_DIV15_mtk_08.h5", "WT_MF_DIV14_01_MS_B2_09175_CA3.h5", "20190805_09002_B4_SC_10_SP.h5", "WT_Unt_SC_09175_D4_01_DIV14_mtk_01.h5", "WT_Unt_SC_09175_E2_03_DIV14_mtk_03.h5", "WT_Unt_SC_09175_C4_02_DIV15_mtk_02.h5", "20190805_09002_B4_SC_12_SP.h5", "20190805_09002_B4_SC_02_SP.h5", "M13_DKO_09201_U1_04_DIV31_04_MS.h5", "WT_MF_DIV14_02_MS_D2_09175_CA3.h5", "WT_Unt_SC_09175_E2_05_DIV14_mtk_05.h5", "20190807_23032_D4_SC_07_SP.h5", "M13_CTRL_09201_S2_03_DIV31_03_MS.h5", "WT_MF_DIV14_01.2_MS_D1_09175_CA3.h5", "20190807_23032_D4_SC_01_SP.h5", "WT_Unt_SC_09175_E4_03_DIV14_mtk_03.h5", "WT_MF_DIV28_3.3_MS_09204_K1.h5", "WT_Unt_SC_09175_D5_04_DIV14_mtk_04.h5", "20190524_09204_F4_SC_05_SP.h5", "M13_DKO_22723_A1_03_DIV29_03_MS.h5", "20190805_09002_B4_SC_7.2_SP.h5", "M13_DKO_22723_A4_08_DIV29_08_MS.h5", "WT_Unt_SC_09175_E4_05_DIV14_mtk_05.h5", "M13_CTRL_09201_S1_01_DIV31_01.h5", "WT_Unt_SC_09175_C4_03_DIV15_mtk_03.h5", "WT_MF_DIV28_03_MS_09204_M1.h5", "M13_DKO_09201_U1_5.2_DIV31_5.2_MS.h5", "WT_Unt_SC_09175_E2_01_DIV14_mtk_01.h5", "WT_MF_DIV28_2.3_MS_09002_B1.h5", "WT_MF_DIV28_01_MS_09204_F1.h5", "M13_CTRL_09201_S2_05_DIV31_05_MS.h5", "20190807_23032_D4_SC_04_SP.h5", "M13_DKO_09201_Q1_04_DIV31_04_MS.h5", "WT_MF_DIV28_04_MS_09204_M1.h5", "WT_Unt_SC_09175_D4_04_DIV14_mtk_04.h5", "WT_MF_DIV14_02_MS_B2_09175_CA3.h5", "20190807_23032_D4_SC_10_SP.h5", "M13_DKO_09201_U1_03_DIV31_03_MS.h5", "WT_MF_DIV14_01_MS_D1_09175_CA3.h5", "WT_MF_DIV28_05_MS_09204_F1.h5", "WT_Unt_SC_09175_D4_05_DIV14_mtk_05.h5", "WT_MF_DIV14_01_MS_D2_09175_CA3.h5", "WT_MF_DIV28_03_MS_09204_K1.h5", "M13_DKO_09201_O1_01_DIV31_01_MS.h5", "WT_MF_DIV14_04_MS_C2_09175_CA3.h5", "20190524_09204_F4_SC_04_SP.h5", "WT_MF_DIV14_02_MS_E1_09175_CA3.h5", "20190807_23032_D4_SC_09_SP.h5", "M13_CTRL_22723_O2_04_DIV29_04_MS.h5", "WT_MF_DIV28_03_MS_09204_F1.h5", "WT_MF_DIV28_01_MS_09002_B1.h5", "20190524_09204_F4_SC_03_SP.h5", "M13_DKO_22723_A1_4.2_DIV29_4.2_MS.h5", "M13_DKO_23037_K1_01_DIV29_01_MS.h5", "M13_DKO_23037_K1_1.2_DIV29_1.2_MS.h5", "20190524_09204_F4_SC_11_SP.h5", "WT_MF_DIV28_1.2_MS_09204_F1.h5", "M13_DKO_09201_O3_06_DIV31_06_MS.h5", "WT_Unt_SC_09175_C4_01_DIV15_mtk_01.h5", "WT_MF_DIV14_3.2_MS_D2_09175_CA3.h5", "WT_MF_DIV28_07_MS_09002_B2.h5", "WT_MF_DIV28_06_MS_09204_F1.h5", "M13_CTRL_09201_S2_04_DIV31_04_MS.h5", "WT_MF_DIV28_1.2_MS_09002_B1.h5", "WT_MF_DIV28_3.4_MS_09204_K1.h5", "20190524_09204_F4_SC_10_SP.h5", "20190805_09002_B4_SC_11_SP.h5", "20190524_09204_F4_SC_01_SP.h5", "WT_Unt_SC_09175_C4_05_DIV15_mtk_05.h5", "WT_MF_DIV28_04_MS_09002_B2.h5"], "val": ["WT_MF_DIV28_02_MS_09002_B1.h5", "M13_CTRL_09201_S2_06_DIV31_06_MS.h5", "20190805_09002_B4_SC_05_SP.h5", "WT_MF_DIV14_04_MS_B2_09175_CA3.h5", "M13_CTRL_22723_O3_07_DIV29_07_MS.h5", "WT_MF_DIV14_03.2_MS_D1_09175_CA3.h5", "WT_MF_DIV14_04_MS_E1_09175_CA3.h5", "20190524_09204_F4_SC_07_SP.h5", "WT_MF_DIV28_08_MS_09204_F2.h5", "WT_MF_DIV14_06_MS_C2_09175_CA3.h5", "WT_Unt_SC_09175_B5_03_DIV16_mtk_05.h5", "WT_MF_DIV28_2.2_MS_09002_B1.h5", "WT_Unt_SC_09175_D4_02_DIV14_mtk_02.h5", "WT_MF_DIV28_04_MS_09204_F1.h5", "20190805_09002_B4_SC_04_SP.h5", "M13_DKO_09201_Q1_01_DIV31_01_MS.h5", "WT_MF_DIV28_02_MS_09204_M1.h5", "M13_CTRL_22723_O2_05_DIV29_05_MS_.h5", "20190807_23032_D4_SC_08_SP.h5", "WT_Unt_SC_09175_D5_05_DIV14_mtk_05.h5", "WT_Unt_SC_09175_D5_02_DIV14_mtk_02.h5", "WT_MF_DIV14_05_MS_B2_09175_CA3.h5", "WT_Unt_SC_09175_D5_01_DIV14_mtk_01.h5", "20190524_09204_F4_SC_02_SP.h5", "WT_Unt_SC_09175_E4_04_DIV14_mtk_04.h5", "M13_DKO_09201_O3_6.2_DIV31_6.2_MS.h5", "M13_DKO_22723_A1_05_DIV29_05_MS.h5", "WT_MF_DIV14_04_MS_E2_09175_CA3_2.h5", "WT_MF_DIV14_03_MS_C2_09175_CA3.h5"], "test": ["WT_MF_DIV28_4.2_MS_09204_M1.h5", "M13_DKO_09201_Q1_03_DIV31_03_MS.h5", "WT_MF_DIV14_3.1_MS_D2_09175_CA3.h5", "WT_Unt_SC_09175_B5_03_DIV16_mtk_04.h5", "WT_MF_DIV28_10_MS_09002_B3.h5", "WT_MF_DIV14_06_MS_E2_09175_CA3_2.h5", "WT_MF_DIV14_03.3_MS_D1_09175_CA3.h5", "20190805_09002_B4_SC_09_SP.h5", "20190805_09002_B4_SC_7.1_SP.h5", "WT_MF_DIV14_03.1_MS_D1_09175_CA3.h5", "WT_Unt_SC_09175_E4_01_DIV14_mtk_01.h5", "WT_MF_DIV28_09_MS_09002_B3.h5", "WT_Unt_SC_09175_B5_01_DIV16_mtk_01.h5", "M13_DKO_22723_A1_06_DIV29_06_MS.h5", "WT_MF_DIV14_01_MS_orig_C2_09175_CA3.h5", "20190807_23032_D4_SC_05_SP.h5", "M13_DKO_22723_A4_10_DIV29_10_MS.h5", "WT_MF_DIV14_05_MS_C2_09175_CA3.h5", "WT_MF_DIV28_07_MS_09204_F2.h5", "WT_Unt_SC_09175_D5_03_DIV14_mtk_03.h5", "M13_CTRL_22723_O2_05_DIV29_5.2.h5", "WT_MF_DIV14_02_MS_C2_09175_CA3.h5", "WT_Unt_SC_09175_B5_03_DIV16_mtk_03.h5", "WT_MF_DIV14_03_MS_B2_09175_CA3.h5", "WT_Unt_SC_09175_B5_02_DIV16_mtk_02.h5", "WT_Unt_SC_09175_C4_06_DIV15_mtk_06.h5", "WT_MF_DIV14_05_MS_E2_09175_CA3_2.h5", "M13_CTRL_09201_S2_02_DIV31_02_MS.h5", "WT_MF_DIV28_08_MS_09002_B3.h5"]} \ No newline at end of file diff --git a/run_sbatch_revision.sbatch b/run_sbatch_revision.sbatch index 94844f0..1e12f90 100644 --- a/run_sbatch_revision.sbatch +++ b/run_sbatch_revision.sbatch @@ -2,7 +2,7 @@ #SBATCH -c 4 #4 #8 #SBATCH --mem 256G #120G #32G #64G #256G #SBATCH -p grete:shared #grete:shared #grete-h100:shared -#SBATCH -t 48:00:00 #6:00:00 #48:00:00 +#SBATCH -t 12:00:00 #6:00:00 #48:00:00 #SBATCH -G A100:1 #V100:1 #2 #A100:1 #gtx1080:2 #v100:1 #H100:1 #SBATCH --output=/user/muth9/u12095/synapse-net/slurm_revision/slurm-%j.out #SBATCH -A nim00007 @@ -10,4 +10,5 @@ source ~/.bashrc conda activate synapse-net -python /user/muth9/u12095/synapse-net/scripts/cooper/revision/train_az.py -k az_merged_v6 \ No newline at end of file +python /user/muth9/u12095/synapse-net/scripts/cooper/revision/az_prediction.py -v 7 --names 20241019_Tomo-eval_MF_Synapse 20241019_Tomo-eval_PS_Synapse \ + -i /mnt/lustre-emmy-hdd/projects/nim00007/data/synaptic-reconstruction/cooper/ground_truth/04Dataset_for_vesicle_eval \ No newline at end of file diff --git a/scripts/cooper/revision/az_prediction.py b/scripts/cooper/revision/az_prediction.py index 747a7ea..2ad8c89 100644 --- a/scripts/cooper/revision/az_prediction.py +++ b/scripts/cooper/revision/az_prediction.py @@ -1,5 +1,6 @@ import argparse import os +from glob import glob import h5py from synapse_net.inference.active_zone import segment_active_zone @@ -9,22 +10,28 @@ from common import get_file_names, get_split_folder, ALL_NAMES, INPUT_ROOT, OUTPUT_ROOT -def run_prediction(model, name, split_folder, version, split_names): - file_names = get_file_names(name, split_folder, split_names=split_names) +def run_prediction(model, name, split_folder, version, split_names, input_path): + if input_path: + file_names =glob(os.path.join(input_path, name, "*.h5")) + else: + file_names = get_file_names(name, split_folder, split_names=split_names) output_folder = os.path.join(OUTPUT_ROOT, name) os.makedirs(output_folder, exist_ok=True) output_key = f"predictions/az/v{version}" for fname in tqdm(file_names): + input_path = os.path.join(INPUT_ROOT, name, fname) + print(f"segmenting {input_path}") + output_path = os.path.join(output_folder, fname) if os.path.exists(output_path): with h5py.File(output_path, "r") as f: if output_key in f: + print(f"skipping, because {output_key} already exists in {output_path}") continue - input_path = os.path.join(INPUT_ROOT, name, fname) with h5py.File(input_path, "r") as f: raw = f["raw"][:] @@ -34,10 +41,12 @@ def run_prediction(model, name, split_folder, version, split_names): def get_model(version): - assert version in (3, 4, 5) + assert version in (3, 4, 5, 7) split_folder = get_split_folder(version) if version == 3: model_path = os.path.join(split_folder, "checkpoints", "3D-AZ-model-TEM_STEM_ChemFix_wichmann-v3") + elif version == 7: + model_path = "/mnt/lustre-emmy-hdd/usr/u12095/synapse_net/models/ConstantinAZ/checkpoints/v7/" else: model_path = os.path.join(split_folder, "checkpoints", f"v{version}") model = load_model(model_path) @@ -49,12 +58,22 @@ def main(): parser.add_argument("--version", "-v", type=int) parser.add_argument("--names", nargs="+", default=ALL_NAMES) parser.add_argument("--splits", nargs="+", default=["test"]) + parser.add_argument("--model_path", default=None) + parser.add_argument("--input", "-i", default=None) + args = parser.parse_args() - model = get_model(args.version) + if args.model_path: + model = load_model(model_path) + else: + model = get_model(args.version) + split_folder = get_split_folder(args.version) + for name in args.names: - run_prediction(model, name, split_folder, args.version, args.splits) + run_prediction(model, name, split_folder, args.version, args.splits, args.input) + + print("Finished segmenting!") if __name__ == "__main__": diff --git a/scripts/cooper/revision/common.py b/scripts/cooper/revision/common.py index 7eb2fad..603a73d 100644 --- a/scripts/cooper/revision/common.py +++ b/scripts/cooper/revision/common.py @@ -5,13 +5,13 @@ # The root folder which contains the new AZ training data. INPUT_ROOT = "/mnt/ceph-hdd/cold/nim00007/new_AZ_train_data" # The output folder for AZ predictions. -OUTPUT_ROOT = "/mnt/ceph-hdd/cold/nim00007/AZ_prediction_new_copy" +OUTPUT_ROOT = "/mnt/ceph-hdd/cold/nim00007/AZ_prediction_new" # The names of all datasets for which to run prediction / evaluation. # This excludes 'endbulb_of_held_cropped', which is a duplicate of 'endbulb_of_held', # which we don't evaluate on because of this. ALL_NAMES = [ - "chemical_fixation", "endbulb_of_held", "stem", "stem_cropped", "tem" + "chemical_fixation", "endbulb_of_held", "stem", "tem" ] # The translation of new dataset names to old dataset names. @@ -61,7 +61,7 @@ def get_file_names(name, split_folder, split_names): def get_split_folder(version): - assert version in (3, 4, 5, 6) + assert version in (3, 4, 5, 6, 7) if version == 3: split_folder = "splits" elif version == 6: diff --git a/scripts/cooper/revision/remove_az_thin.py b/scripts/cooper/revision/remove_az_thin.py index 0045835..e285a30 100644 --- a/scripts/cooper/revision/remove_az_thin.py +++ b/scripts/cooper/revision/remove_az_thin.py @@ -17,30 +17,28 @@ # Recreate the dataset with the new data f.create_dataset("labels/az_thin", data=gt) ''' -import h5py -import numpy as np import os +import h5py from glob import glob +import numpy as np -folder = "/mnt/ceph-hdd/cold/nim00007/new_AZ_train_data/stem_cropped/" - -# List of file names to process -file_names = [ - "36859_H2_SP_01_rec_2Kb1dawbp_crop_cropped_noAZ.h5", - "36859_H2_SP_02_rec_2Kb1dawbp_crop_cropped_noAZ.h5", - "36859_H2_SP_03_rec_2Kb1dawbp_crop_cropped_noAZ.h5", - "36859_H3_SP_05_rec_2kb1dawbp_crop_cropped_noAZ.h5", - "36859_H3_SP_07_rec_2kb1dawbp_crop_cropped_noAZ.h5", - "36859_H3_SP_10_rec_2kb1dawbp_crop_cropped_noAZ.h5" -] +# Collect all file paths +file_paths1 = glob(os.path.join("/mnt/ceph-hdd/cold/nim00007/new_AZ_train_data/chemical_fixation", "*.h5")) +file_paths2 = glob(os.path.join("/mnt/ceph-hdd/cold/nim00007/new_AZ_train_data/stem", "*.h5")) +file_paths3 = glob(os.path.join("/mnt/ceph-hdd/cold/nim00007/new_AZ_train_data/stem_cropped", "*.h5")) +file_paths4 = glob(os.path.join("/mnt/ceph-hdd/cold/nim00007/new_AZ_train_data/tem", "*.h5")) -file_paths = glob(os.path.join("/mnt/ceph-hdd/cold/nim00007/new_AZ_train_data/endbulb_of_held_cropped", "*.h5")) +all_file_paths = file_paths1 + file_paths2 + file_paths3 + file_paths4 -for fname in file_paths: - #file_path = os.path.join(folder, fname) - +for fname in all_file_paths: with h5py.File(fname, "a") as f: - az_merged = f["/labels/az_merged"][:] - f.create_dataset("/labels/az_merged_v6", data=az_merged, compression="lzf") + if "/labels/az_merged_v6" in f: + az_merged = f["/labels/az_merged_v6"][:] # shape (1, 46, 446, 446) + az_merged = np.squeeze(az_merged) # shape (46, 446, 446) + + del f["/labels/az_merged_v6"] # delete old dataset - print(f"Updated file: {fname}") + f.create_dataset("/labels/az_merged_v6", data=az_merged, compression="lzf") + print(f"Updated file: {fname}") + else: + print(f"Dataset not found in: {fname}") From 6525651dacdfad0b3094fa83b84da416d7d7bc5a Mon Sep 17 00:00:00 2001 From: SarahMuth Date: Fri, 13 Jun 2025 16:20:18 +0200 Subject: [PATCH 12/29] fix a few things --- run_sbatch_revision.sbatch | 2 +- scripts/cooper/revision/az_prediction.py | 29 +++++++++++----- scripts/cooper/revision/check_prediction.py | 7 ++-- .../revision/evaluation_results/v7.xlsx | Bin 0 -> 7667 bytes scripts/cooper/revision/remove_az_thin.py | 32 ++++++++++++++++-- scripts/cooper/revision/run_az_evaluation.py | 2 +- 6 files changed, 58 insertions(+), 14 deletions(-) create mode 100644 scripts/cooper/revision/evaluation_results/v7.xlsx diff --git a/run_sbatch_revision.sbatch b/run_sbatch_revision.sbatch index 1e12f90..a8ad364 100644 --- a/run_sbatch_revision.sbatch +++ b/run_sbatch_revision.sbatch @@ -10,5 +10,5 @@ source ~/.bashrc conda activate synapse-net -python /user/muth9/u12095/synapse-net/scripts/cooper/revision/az_prediction.py -v 7 --names 20241019_Tomo-eval_MF_Synapse 20241019_Tomo-eval_PS_Synapse \ +python /user/muth9/u12095/synapse-net/scripts/cooper/revision/az_prediction.py -v 7 --names 20241019_Tomo-eval_MF_Synapse 20241019_Tomo-eval_PS_Synapse 20241019_Tomo-eval_SC_Synapse \ -i /mnt/lustre-emmy-hdd/projects/nim00007/data/synaptic-reconstruction/cooper/ground_truth/04Dataset_for_vesicle_eval \ No newline at end of file diff --git a/scripts/cooper/revision/az_prediction.py b/scripts/cooper/revision/az_prediction.py index 2ad8c89..cc9bca4 100644 --- a/scripts/cooper/revision/az_prediction.py +++ b/scripts/cooper/revision/az_prediction.py @@ -10,34 +10,47 @@ from common import get_file_names, get_split_folder, ALL_NAMES, INPUT_ROOT, OUTPUT_ROOT -def run_prediction(model, name, split_folder, version, split_names, input_path): - if input_path: - file_names =glob(os.path.join(input_path, name, "*.h5")) +def run_prediction(model, name, split_folder, version, split_names, in_path): + if in_path: + file_paths = glob(os.path.join(in_path, name, "*.h5")) + file_names = [os.path.basename(path) for path in file_paths] else: file_names = get_file_names(name, split_folder, split_names=split_names) output_folder = os.path.join(OUTPUT_ROOT, name) os.makedirs(output_folder, exist_ok=True) output_key = f"predictions/az/v{version}" + output_key_seg = f"predictions/az/seg_v{version}" for fname in tqdm(file_names): - input_path = os.path.join(INPUT_ROOT, name, fname) + if in_path: + input_path=os.path.join(in_path, name, fname) + else: + input_path = os.path.join(INPUT_ROOT, name, fname) print(f"segmenting {input_path}") output_path = os.path.join(output_folder, fname) if os.path.exists(output_path): with h5py.File(output_path, "r") as f: - if output_key in f: - print(f"skipping, because {output_key} already exists in {output_path}") + if output_key in f and output_key_seg in f: + print(f"skipping, because {output_key} and {output_key_seg} already exists in {output_path}") continue with h5py.File(input_path, "r") as f: raw = f["raw"][:] - _, pred = segment_active_zone(raw, model=model, verbose=False, return_predictions=True) + seg, pred = segment_active_zone(raw, model=model, verbose=False, return_predictions=True) with h5py.File(output_path, "a") as f: - f.create_dataset(output_key, data=pred, compression="lzf") + if output_key in f: + print(f"{output_key} already saved") + else: + f.create_dataset(output_key, data=pred, compression="lzf") + if output_key_seg in f: + print(f"{output_key_seg} already saved") + else: + f.create_dataset(output_key_seg, data=seg, compression="lzf") + def get_model(version): diff --git a/scripts/cooper/revision/check_prediction.py b/scripts/cooper/revision/check_prediction.py index e54e66a..04c1175 100644 --- a/scripts/cooper/revision/check_prediction.py +++ b/scripts/cooper/revision/check_prediction.py @@ -18,13 +18,16 @@ def check_predictions(name, split, version): gt = f["labels/az"][:] if version == 3 else f["labels/az_thin"][:] with h5py.File(seg_path) as f: - seg_key = f"predictions/az/v{version}" - pred = f[seg_key][:] + pred_key = f"predictions/az/v{version}" + seg_key = f"predictions/az/seg_v{version}" + pred = f[pred_key][:] + seg = f[seg_key][:] v = napari.Viewer() v.add_image(raw) v.add_image(pred, blending="additive") v.add_labels(gt) + v.add_labels(seg) v.title = f"{name}/{os.path.basename(seg_path)}" napari.run() diff --git a/scripts/cooper/revision/evaluation_results/v7.xlsx b/scripts/cooper/revision/evaluation_results/v7.xlsx new file mode 100644 index 0000000000000000000000000000000000000000..8a8e52e390f2b631c0d3c6ea6a973c53e755f050 GIT binary patch literal 7667 zcmZ{J1ymecv-P0CT?e=5d_TvpYUwE2nvki$}KWo%S=v`#FuQpsN-J?OKb$z(2~Q_UpB8wWZ|TB*Wk2@w`%;c4ylgZj(phRHK&H~#a85eX@f1mpn1@<0gxmi8dAy> zX1C@rq{Z6*TPa*fW{WUfmfND|M#@Vy?%Z z3fxqO`U-0jc;mH?*Nv8BrlKZXZ-CiT(Cz( z!k{h0UXH#iExD10nsiZwhDpu=HmQ*zy~-j4lUcZjX&UeIrBJ!{Za#pOcfW}Roc-Xj z`W79Vaq4fm#JjTC*~a^M!$5$UP@a$SP5p5nR)xBGtFa?sP%Lw* zq!F;~H&Q#}kK1$dBxbNKZlrp~d*2bndROOE zzDMSuptQ9gNJQGYBR5YEFm(4_oSH+)GH1W`$p9|Vbgs#p9vl7;k0ZPv8Ae@noTJPV zQt~WJLBj)x4$Z1-1=Y(#<2MG<=1fx-VeGA1K#aVxu%F7|48yDScR#Z`$IAr+3xg#- zIAw%5RwKPR?97Ly-+F!9E`E~^*HHN2-o=G^a>HcuEQZxplGsy%+Fl_Dc5#tf!6E=V zy29Gml(MJO&uTD(!M2MMJ()V!E)0V&o+!IA!Qb^YGr}{Jn8C$k-0!yu#adNx1&C!a z3ZrwLYf&SYWAbN}L|?Xy)DUhE2e%mADzt%B6=tOQUgMK&snj2 zUobCsoWaX{t(yAEv6<hcUiU!%9s#%F4(y{ZH zww^gTuXcctQ$faxQUvJPN)`t7co%7DOf)j8xt1HN!@u{(RiXF1uECQr$1FQ;XCymZ0%Ae+k5pT z-ZEUUD=CZJmT7E3#Wqj1@6e2%R*;zLdE=D3Cha7Sq`g!c4b<;7jquHdneR4J1>e(b z#de@V4~onC!c_^eqMXzYta+Ujx52BuVSnJJ_!4SIaE^h??x)$=o85lxhO#56ESk~S z(^7^XCDA#sLB*JW7Zy48nIl?4BreW1%v?%smhJEpE|Lu!OYYZ?BI>n~Pj&Px`AtSp zI8#c4)Dywq+;_80Ew*OUwpJ=_TV3nf{U}`+b;Ji8bBOJ$CqCOY_UqBW^h{10dY((S zkZQv2jcUh=aR@cZu!$hn8RV6!rDv2+dv-ZhhV;aG!GchtY>KX}ucIECPuq!nq4>xG z9i8Iu0&V&2D^J(CaWJ_|VvUG4HOvkdcJmAB0vUa;v;F!lPw(8vmGO5ZcP-A%6zg7I ztZiMprb|N93j&4-k-WjkOqtZ^WaI+dT0hcK(20?zGsYhuBZu2mQitYTV|Sn8bg+|t zsFe{90dqw}Ynsoo5Rt?;;Gi*(du|;ar9byegkN)oxkTy;*esAwD8}_m3wkPk(q%cT zTLxiAt!(N}cLIArmTH56u7%vJ4=d&$rCZ3QmCnj?H)xwxloa^ZA~nx)%zpdhJm5vFNX=eNdI0yeBt&gC^!JX1{eUK_fr%4XGt^@cm)H<=xZ8@17#I`_jGZ2j|g+=GRN( zCyBDEFZV73_b$!-3N&b0*XA!9*Ea*9gFgcjU>!faI;ZVh26r73o^&d{y4mj+m9PUF zG`-KP(Zonezq${+omu<*ApD`DLBn`_iINJJ;yaEvdG<7o3$fpq0@nWGH($3eVLpU# z7brA|#KdISImk1Pi*g9S&(a1obwuLi-XP#+wqIm4bSe=vUqmM1mYY>*Y}K2JmKupA z3ud-vcg{vO(Ai+nfqVeR)2oHWle~qLd|KwPBbKr}rDdShXL8LB*Vmc(YXBCjntVLT zzKCc_I0^gPA_hn#>Ii-NcENgGl!fCL{8FFRC;umkcp9T}l^04a(-WBzY&jJNmPHaz!3R^q z*y&J%tF$R&Y|HRgv@Cn*soI}YkcVq29sM;)yAclya@NnUGWAG)ND5GFoF`v;%=L2%PHSe$52WnQMO zrs8uFddwrCKpy54Pp|R}L0EHqj~DY&hsU;%Lv12eyu)uxsu*$99>G4Q>dX+%sqc7} z`m?`At=!0y999uRL@#mN_>R7jCt8eBx+Abjgn@0zwt@91wEEl46>)iCE2)r8ZQ|e$ z6UvzRO1w!SA{tgz^Yv|2MzaO(F|0@Qh+$i&L&-@@GbyAWaSditT9YRqZ$>0_UE7*j zWnkc9H4Y#RAzt-RVJJGVL4V{+0TQm^enS%f$Qy`E?v;=Jxsu_hzUASNI7th;yDfc6 z=10e?H=V^Lowqsl-cN4!KLmF-iEbSkz}9|xpK>{t6(IvO@_m?aTHk#UM#U%SzghKa zN5LE*2}FyN13zPqiQ;=`THARu3#OkBczDJ`bij~?-jeA7;e3tcLIZu)&<47iY9bjT zdg@Qc49a9qql9)kbe>t4ecE+UM4ZEQGFYpc==*_F?rGe{EJ$&*orPkLp6f=q3ILSB!_3L7Xa1Wl;&<8_B6-~KEzQSaPY^*>Q5H3#*T5#6j>d<~6Prk0-0ep4lvt<4j(UcJ-xM8&aRmArr1#K(Ldu zc)4BB8aAF~Fg?ISO;$N9ovTm%V12h`>ORxmOljcxnfYfl@_%fK(B*;zMt}_@@~D}} zE$yr`@mtk_lJ(H^y}Rrm&cA>d!cE{LXKhKQ1eB)MzHfHpfdE!o&>ID43+qz+R(7|z zH=e)8av`r})b^q}a6YYxUy?D&alb*M!Ss3ID+Z%26ij%?TR!f?@s`cYe$;_3fU)}P zD^-XNSkZVcMPCr=^C?=7g!#`fg3!4%D$ubWDzqnaR8 zsNDsd`>{8fS&xy51gc7uljD;*(oq95@r>yT@m}Exx?)B1_I>7)5-Wze}U$9SLBb74{C?Oq0NgVx9*X)yvCmq+$%QW_*Z0fTUWge z_WRiD*V0$|)Vf!j|=Z#2!BOzY(i5ZZnz*FKxpXllTS!sGlgbE}mOeY`iwlfUR?M@V46+042p zK^a+; z@e1<|G63L$4FKT&y>amJ4sf>h`dxEc(_cx>=Oefy9eR|Zp{4y+p&O%8;tok0>GrbiS2Vjk;%x6?I1UT;Wo?%l_S>xn zAI*E=LY!)>@Unc1al3bFC{xO9pk1Au0>hD-qE6CPFLt7B?3E6y&UrMNrr!$(>Tu7$ z4((Py+0Gs3SQB7>&N*OL87YN+H|ON51k4wUjW*aDsE*EVct)5_@w}xXQe@Goc>@## zPj!#G90f>5X24IvteSu1daZtcVThUr?-?n*^*&(e>9!pmKD7?)KgM@|O10wtVu$Um zh|{=93=Zc-A$h^xi8j@4qQW~uXE!12)B*PhjJdRMmWrZv0D1@*f^MGMAh*u)YK1lu$Z zv`{p#&?H(KRdDZjak7%Njh8$^1=Sv=S7BHm^9t*04sPEFA6HJD{iIJRRR+6rou`(o zC-`R6%M`8kl%G{tTebMFe|tEac}pUF)mz9{5jO`1V}jfouF(GN=!Fkb6(6FK&W>;t z|L@TKz2~=bb94F~yh$muzia8>kcx>%Gp^w(>igz7p(+>ziZ5Wseyv$UnOm+6^{guF z>YGulqwvM?Zu5dze92P9i)Tq^UOal7bJJ(52Hb{+^>0zg?Qk0xh>Sn)eiSn7kFWY1 zo*raO(p=E*An(~T{GmGRf-dWb!u!w!&}b)Tl%&Z`Ue;{z;dC+?wd(SzZdnsp>}oyg zlb!N;yJEztQPsJ~Yk>qj?xGSNy%K@-xxG{h*!JUmjbR)0cc4K_Y%asQRMFY5ou9AD zV?TEfPgUfRB7LHNp+Xf`A2H4pxl^(q^iuP7&np!FLQ)$pNp$7_ny?)`paqmmQj>xgzmR~E zewJ<02V-mQ$+u!SQm^;ZQ8tYwt7~)PJuoa+?g`7(zI-Kig3lLJL-$tEhMi-_WZQJW z`9h@MIE^i(f^FaQx=FJA#!cqQiJD4cCxq92WdRyoQxZ8S}{Z>(JI_$TKMl346`#cvEbC=uaN#s1}!;Ty> zEBjh(<>u}7YNbi_bt7)3TG`0OFRxpc>NacCDG7UN{QyN0x+uAY&~(FSj3OoQh%P6F-UI<-W(L!wpk(XMV-3cP799{F1w)7mM{3=_5~YW4}n(%OWap zNXR58|9689p_;#bp$LZm|90J<=s$a7e`5iFP^7{CM*qJ~*`M$~J4pY)R}qi@U;0UZ z8u+t<_#XpdG#Gyw_+RUZe_HwTbK*Z%kn|BmA*S)q$Hkw}KlAoKP&V>^W${0Of9An| zfP{!rDB_v_9kPFd|BUm0z^;^kf&Y)F|I^N&q4bZPr-=0NzXX<+8XBVE2LNCq{xXOR KUQG4-?tcI`lG~vG literal 0 HcmV?d00001 diff --git a/scripts/cooper/revision/remove_az_thin.py b/scripts/cooper/revision/remove_az_thin.py index e285a30..d383049 100644 --- a/scripts/cooper/revision/remove_az_thin.py +++ b/scripts/cooper/revision/remove_az_thin.py @@ -17,7 +17,7 @@ # Recreate the dataset with the new data f.create_dataset("labels/az_thin", data=gt) ''' -import os +'''import os import h5py from glob import glob import numpy as np @@ -41,4 +41,32 @@ f.create_dataset("/labels/az_merged_v6", data=az_merged, compression="lzf") print(f"Updated file: {fname}") else: - print(f"Dataset not found in: {fname}") + print(f"Dataset not found in: {fname}")''' + +import os +import h5py + +# List of target folders +base_path = "/mnt/lustre-emmy-hdd/projects/nim00007/data/synaptic-reconstruction/cooper/ground_truth/04Dataset_for_vesicle_eval" +folders = [ + "20241019_Tomo-eval_MF_Synapse", + "20241019_Tomo-eval_PS_Synapse", + "20241019_Tomo-eval_SC_Synapse" +] + +# Keys to delete +keys_to_delete = ["/predictions/az/seg_v7", "/predictions/az/v7", "/predictions/az", "/predictions"] + +for folder in folders: + folder_path = os.path.join(base_path, folder) + for filename in os.listdir(folder_path): + if filename.endswith(".h5"): + file_path = os.path.join(folder_path, filename) + print(f"Processing: {file_path}") + with h5py.File(file_path, 'a') as h5file: + for key in keys_to_delete: + if key in h5file: + print(f" Deleting key: {key}") + del h5file[key] + else: + print(f" Key not found: {key}") diff --git a/scripts/cooper/revision/run_az_evaluation.py b/scripts/cooper/revision/run_az_evaluation.py index ba06e2b..50ea0d6 100644 --- a/scripts/cooper/revision/run_az_evaluation.py +++ b/scripts/cooper/revision/run_az_evaluation.py @@ -23,7 +23,7 @@ def run_az_evaluation(args): results.append(result) results = pd.concat(results) - output_path = f"./results/v{args.version}.xlsx" + output_path = f"/user/muth9/u12095/synapse-net/scripts/cooper/revision/evaluation_results/v{args.version}.xlsx" results.to_excel(output_path, index=False) From b842bca06b90446c7dce564b1a1f20f45aa9bbe2 Mon Sep 17 00:00:00 2001 From: SarahMuth Date: Thu, 19 Jun 2025 17:34:09 +0200 Subject: [PATCH 13/29] new data analysis, not tested --- .../analysis_segmentations.py | 259 ++++++++++++++++++ .../updated_data_analysis/data_analysis.py | 92 +++++++ .../run_data_analysis.py | 79 ++++++ .../updated_data_analysis/store_results.py | 108 ++++++++ 4 files changed, 538 insertions(+) create mode 100644 scripts/cooper/revision/updated_data_analysis/analysis_segmentations.py create mode 100644 scripts/cooper/revision/updated_data_analysis/data_analysis.py create mode 100644 scripts/cooper/revision/updated_data_analysis/run_data_analysis.py create mode 100644 scripts/cooper/revision/updated_data_analysis/store_results.py diff --git a/scripts/cooper/revision/updated_data_analysis/analysis_segmentations.py b/scripts/cooper/revision/updated_data_analysis/analysis_segmentations.py new file mode 100644 index 0000000..fe07e55 --- /dev/null +++ b/scripts/cooper/revision/updated_data_analysis/analysis_segmentations.py @@ -0,0 +1,259 @@ +import os +import numpy as np +import h5py + +from skimage.measure import regionprops +from skimage.morphology import remove_small_holes +from skimage.segmentation import relabel_sequential + +from synapse_net.inference.vesicles import segment_vesicles +from synapse_net.inference.compartments import segment_compartments +from synapse_net.inference.active_zone import segment_active_zone +from synapse_net.inference.inference import get_model_path + + +def fill_and_filter_vesicles(vesicles: np.ndarray) -> np.ndarray: + """ + Apply a size filter and fill small holes in vesicle segments. + + Args: + vesicles (np.ndarray): 3D volume with vesicle segment labels. + + Returns: + np.ndarray: Processed vesicle segmentation volume. + """ + ids, sizes = np.unique(vesicles, return_counts=True) + ids, sizes = ids[1:], sizes[1:] # remove background + + min_size = 2500 + vesicles_pp = vesicles.copy() + filter_ids = ids[sizes < min_size] + vesicles_pp[np.isin(vesicles, filter_ids)] = 0 + + props = regionprops(vesicles_pp) + for prop in props: + bb = prop.bbox + bb = np.s_[ + bb[0]:bb[3], bb[1]:bb[4], bb[2]:bb[5] + ] + mask = vesicles_pp[bb] == prop.label + mask = remove_small_holes(mask, area_threshold=1000) + vesicles_pp[bb][mask] = prop.label + + return vesicles_pp + + +def SV_pred(raw: np.ndarray, SV_model: str, output_path: str = None, store: bool = False) -> np.ndarray: + """ + Run synaptic vesicle segmentation and optionally store the output. + + Args: + raw (np.ndarray): Raw EM image volume. + SV_model (str): Path to vesicle model. + output_path (str): HDF5 file to store predictions. + store (bool): Whether to store predictions. + + Returns: + np.ndarray: Segmentation result. + """ + seg, pred = segment_vesicles(input_volume=raw, model_path=SV_model, verbose=False, return_predictions=True) + + if store and output_path: + pred_key = f"predictions/SV/pred" + seg_key = f"predictions/SV/seg" + + with h5py.File(output_path, "a") as f: + if pred_key in f: + print(f"{pred_key} already saved") + else: + f.create_dataset(pred_key, data=pred, compression="lzf") + if seg_key in f: + print(f"{seg_key} already saved") + else: + f.create_dataset(seg_key, data=seg, compression="lzf") + elif store and not output_path: + print("Output path is missing, not storing SV predictions") + else: + print("Not storing SV predictions") + + return seg + + +def compartment_pred(raw: np.ndarray, compartment_model: str, output_path: str = None, store: bool = False) -> np.ndarray: + """ + Run compartment segmentation and optionally store the output. + + Args: + raw (np.ndarray): Raw EM image volume. + compartment_model (str): Path to compartment model. + output_path (str): HDF5 file to store predictions. + store (bool): Whether to store predictions. + + Returns: + np.ndarray: Segmentation result. + """ + seg, pred = segment_compartments(input_volume=raw, model_path=compartment_model, verbose=False, return_predictions=True) + + if store and output_path: + pred_key = f"predictions/compartment/pred" + seg_key = f"predictions/compartment/seg" + + with h5py.File(output_path, "a") as f: + if pred_key in f: + print(f"{pred_key} already saved") + else: + f.create_dataset(pred_key, data=pred, compression="lzf") + if seg_key in f: + print(f"{seg_key} already saved") + else: + f.create_dataset(seg_key, data=seg, compression="lzf") + elif store and not output_path: + print("Output path is missing, not storing compartment predictions") + else: + print("Not storing compartment predictions") + + return seg + + +def AZ_pred(raw: np.ndarray, AZ_model: str, output_path: str = None, store: bool = False) -> np.ndarray: + """ + Run active zone segmentation and optionally store the output. + + Args: + raw (np.ndarray): Raw EM image volume. + AZ_model (str): Path to AZ model. + output_path (str): HDF5 file to store predictions. + store (bool): Whether to store predictions. + + Returns: + np.ndarray: Segmentation result. + """ + seg, pred = segment_active_zone(raw, model_path=AZ_model, verbose=False, return_predictions=True) + + if store and output_path: + pred_key = f"predictions/az/pred" + seg_key = f"predictions/az/seg" + + with h5py.File(output_path, "a") as f: + if pred_key in f: + print(f"{pred_key} already saved") + else: + f.create_dataset(pred_key, data=pred, compression="lzf") + if seg_key in f: + print(f"{seg_key} already saved") + else: + f.create_dataset(seg_key, data=seg, compression="lzf") + elif store and not output_path: + print("Output path is missing, not storing AZ predictions") + else: + print("Not storing AZ predictions") + + return seg + + +def filter_presynaptic_SV(sv_seg: np.ndarray, compartment_seg: np.ndarray, output_path: str = None, + store: bool = False, input_path: str = None) -> np.ndarray: + """ + Filters synaptic vesicle segmentation to retain only vesicles in the presynaptic region. + + Args: + sv_seg (np.ndarray): Vesicle segmentation. + compartment_seg (np.ndarray): Compartment segmentation. + output_path (str): Optional HDF5 file to store outputs. + store (bool): Whether to store outputs. + input_path (str): Path to input file (for filename-based filtering). + + Returns: + np.ndarray: Filtered presynaptic vesicle segmentation. + """ + # Fill out small holes in vesicles and then apply a size filter. + vesicles_pp = fill_and_filter_vesicles(sv_seg) + + def n_vesicles(mask, ves): + return len(np.unique(ves[mask])) - 1 + + # Find the segment with most vesicles. + props = regionprops(compartment_seg, intensity_image=vesicles_pp, extra_properties=[n_vesicles]) + compartment_ids = [prop.label for prop in props] + vesicle_counts = [prop.n_vesicles for prop in props] + if len(compartment_ids) == 0: + mask = np.ones(compartment_seg.shape, dtype="bool") + else: + mask = (compartment_seg == compartment_ids[np.argmax(vesicle_counts)]).astype("uint8") + + # Filter all vesicles that are not in the mask. + props = regionprops(vesicles_pp, mask) + filter_ids = [prop.label for prop in props if prop.max_intensity == 0] + + name = os.path.basename(input_path) if input_path else "unknown" + print(name) + + no_filter = ["C_M13DKO_080212_CTRL6.7B_crop.h5", "E_M13DKO_080212_DKO1.2_crop.h5", + "G_M13DKO_080212_CTRL6.7B_crop.h5", "A_SNAP25_120812_CTRL2.3_14_crop.h5", + "A_SNAP25_12082_KO2.1_6_crop.h5", "B_SNAP25_120812_CTRL2.3_14_crop.h5", + "B_SNAP25_12082_CTRL2.3_5_crop.h5", "D_SNAP25_120812_CTRL2.3_14_crop.h5", + "G_SNAP25_12.08.12_KO1.1_3_crop.h5"] + # Don't filter for wrong masks (visual inspection) + if name not in no_filter: + vesicles_pp[np.isin(vesicles_pp, filter_ids)] = 0 + + if store and output_path: + seg_presynapse = f"predictions/compartment/presynapse" + seg_presynaptic_SV = f"predictions/SV/presynaptic" + + with h5py.File(output_path, "a") as f: + if seg_presynapse in f: + print(f"{seg_presynapse} already saved") + else: + f.create_dataset(seg_presynapse, data=mask, compression="lzf") + if seg_presynaptic_SV in f: + print(f"{seg_presynaptic_SV} already saved") + else: + f.create_dataset(seg_presynaptic_SV, data=vesicles_pp, compression="lzf") + elif store and not output_path: + print("Output path is missing, not storing presynapse seg and presynaptic SV seg") + else: + print("Not storing presynapse seg and presynaptic SV seg") + + #All non-zero labels are relabeled starting from 1.Labels are sequential (1, 2, 3, ..., n). + #We do this to make the analysis part easier -> can match distances and diameters better + vesicles_pp, _, _ = relabel_sequential(vesicles_pp) + + return vesicles_pp + + +def run_predictions(input_path: str, output_path: str = None, store: bool = False): + """ + Run full inference pipeline: vesicles, compartments, active zone, and presynaptic SV filtering. + + Args: + input_path (str): Path to input HDF5 file with 'raw' dataset. + output_path (str): Path to output HDF5 file to store predictions. + store (bool): Whether to store intermediate and final results. + + Returns: + Tuple[np.ndarray, np.ndarray]: (Filtered vesicle segmentation, AZ segmentation) + """ + with h5py.File(input_path, "r") as f: + raw = f["raw"][:] + + SV_model = get_model_path("vesicles_3d") + compartment_model = get_model_path("compartments") + # TODO upload better AZ model + AZ_model = "/mnt/lustre-emmy-hdd/usr/u12095/synapse_net/models/ConstantinAZ/checkpoints/v7/" + + print("Running SV prediction") + sv_seg = SV_pred(raw, SV_model, output_path, store) + + print("Running compartment prediction") + comp_seg = compartment_pred(raw, compartment_model, output_path, store) + + print("Running AZ prediction") + az_seg = AZ_pred(raw, AZ_model, output_path, store) + + print("Filtering the presynaptic SV") + presyn_SV_seg = filter_presynaptic_SV(sv_seg, comp_seg, output_path, store, input_path) + + print("Done with predictions") + + return presyn_SV_seg, az_seg diff --git a/scripts/cooper/revision/updated_data_analysis/data_analysis.py b/scripts/cooper/revision/updated_data_analysis/data_analysis.py new file mode 100644 index 0000000..eb6e01a --- /dev/null +++ b/scripts/cooper/revision/updated_data_analysis/data_analysis.py @@ -0,0 +1,92 @@ +from synapse_net.distance_measurements import measure_segmentation_to_object_distances +from synapse_net.imod.to_imod import convert_segmentation_to_spheres + + +def calc_AZ_SV_distance(vesicles, az, resolution): + """ + Calculate the distance between synaptic vesicles (SVs) and the active zone (AZ). + + Args: + vesicles (np.ndarray): Segmentation of synaptic vesicles. + az (np.ndarray): Segmentation of the active zone. + resolution (tuple): Voxel resolution in nanometers (z, y, x). + + Returns: + list of dict: Each dict contains 'seg_id' and 'distance', sorted by seg_id. + """ + distances, _, _, seg_ids = measure_segmentation_to_object_distances(vesicles, az, resolution=resolution) + + dist_list = [{"seg_id": sid, "distance": dist} for sid, dist in zip(seg_ids, distances)] + dist_list.sort(key=lambda x: x["seg_id"]) + + return dist_list + + +def sort_by_distances(input_list): + """ + Sort a list of dictionaries by the 'distance' key from smallest to largest. + + Args: + input_list (list of dict): List containing 'distance' as a key in each dictionary. + + Returns: + list of dict: Sorted list by ascending distance. + """ + sorted_list = sorted(input_list, key=lambda x: x["distance"]) + return sorted_list + + +def combine_lists(list1, list2): + """ + Combine two lists of dictionaries based on the shared 'seg_id' key. + + Args: + list1 (list of dict): First list with 'seg_id' key. + list2 (list of dict): Second list with 'seg_id' key. + + Returns: + list of dict: Combined dictionaries matching by 'seg_id'. Overlapping keys are merged. + """ + combined_dict = {} + + for item in list1: + seg_id = item["seg_id"] + combined_dict[seg_id] = item.copy() + + for item in list2: + seg_id = item["seg_id"] + if seg_id in combined_dict: + for key, value in item.items(): + if key != "seg_id": + combined_dict[seg_id][key] = value + else: + combined_dict[seg_id] = item.copy() + + combined_list = list(combined_dict.values()) + return combined_list + + +def calc_SV_diameters(vesicles, resolution): + """ + Calculate diameters of synaptic vesicles from segmentation data. + + Args: + vesicles (np.ndarray): Segmentation of synaptic vesicles. + resolution (tuple): Voxel resolution in nanometers (z, y, x). + + Returns: + list of dict: Each dict contains 'seg_id' and 'diameter', sorted by seg_id. + """ + coordinates, radii = convert_segmentation_to_spheres( + vesicles, resolution=resolution, radius_factor=0.7, estimate_radius_2d=True + ) + + # Assuming the segment ID is the index of the vesicle (same order as radii) + seg_ids = list(range(len(radii))) + radii_nm = radii * resolution[0] + diameters = radii_nm * 2 + + diam_list = [{"seg_id": sid, "diameter": diam} for sid, diam in zip(seg_ids, diameters)] + diam_list.sort(key=lambda x: x["seg_id"]) + + return diam_list diff --git a/scripts/cooper/revision/updated_data_analysis/run_data_analysis.py b/scripts/cooper/revision/updated_data_analysis/run_data_analysis.py new file mode 100644 index 0000000..eb592cc --- /dev/null +++ b/scripts/cooper/revision/updated_data_analysis/run_data_analysis.py @@ -0,0 +1,79 @@ +import argparse +import os + +from .analysis_segmentations import run_predictions +from .data_analysis import calc_AZ_SV_distance, calc_SV_diameters, combine_lists, sort_by_distances +from .store_results import run_store_results + +def run_data_analysis(input_path, output_path, store, resolution, analysis_output): + print("Starting SV, compartment, and AZ predictions") + SV_seg, az_seg = run_predictions(input_path, output_path, store) + + print("Performing automatic data analysis") + print("Calculating per SV distance to AZ") + dist_list = calc_AZ_SV_distance(SV_seg, az_seg, resolution) + + print("Calculating per SV diameters") + diam_list = calc_SV_diameters(SV_seg, resolution) + + print("Combining lists") + combined_list = combine_lists(dist_list, diam_list) + + print("Sorting the combined list by distances") + sorted_list = sort_by_distances(combined_list) + + print(f"Storing lists under {analysis_output}") + run_store_results(input_path, analysis_output, sorted_list) + + +def main(): + parser = argparse.ArgumentParser(description="Run data analysis on HDF5 data.") + parser.add_argument( + "--input_path", "-i", type=str, required=True, + help="Path to an HDF5 file or directory of files." + ) + parser.add_argument( + "--analysis_output", "-s", type=str, required=True, + help="Path to the folder where the analysis results get saved." + ) + parser.add_argument( + "--output_folder", "-o", type=str, default=None, + help="Optional output folder for storing results." + ) + parser.add_argument( + "--store", action="store_true", + help="Store predictions in output files." + ) + parser.add_argument( + "--resolution", type=float, nargs=3, default=(1.554, 1.554, 1.554), + help="Resolution of input image." + ) + + args = parser.parse_args() + + input_path = args.input_path + output_folder = args.output_folder + store = args.store + resolution = args.resolution + analysis_output = args.analysis_output + + if os.path.isfile(input_path): + filename = os.path.basename(input_path) + output_path = os.path.join(output_folder, filename) if output_folder else None + run_data_analysis(input_path, output_path, store, resolution, analysis_output) + + elif os.path.isdir(input_path): + for file in os.listdir(input_path): + if file.endswith(".h5"): + full_input_path = os.path.join(input_path, file) + output_path = os.path.join(output_folder, file) if output_folder else None + run_data_analysis(full_input_path, output_path, store, resolution, analysis_output) + + else: + raise ValueError(f"Invalid input path: {input_path}") + + print("Finished!") + + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/scripts/cooper/revision/updated_data_analysis/store_results.py b/scripts/cooper/revision/updated_data_analysis/store_results.py new file mode 100644 index 0000000..d0f0e3b --- /dev/null +++ b/scripts/cooper/revision/updated_data_analysis/store_results.py @@ -0,0 +1,108 @@ +import os +import pandas as pd + +def get_group(input_path): + """ + Determines whether a tomogram belongs to 'CTRL' or 'KO' group. + + Parameters: + input_path (str): Path to the input .h5 file. + + Returns: + str: 'CTRL' if input_path contains 'CTRL', else 'KO'. + """ + return 'CTRL' if 'CTRL' in input_path else 'KO' + + +def get_tomogram_name(input_path): + """ + Extracts the tomogram name from the input path (without extension). + + Parameters: + input_path (str): Path to the input .h5 file. + + Returns: + str: Tomogram base name without extension. + """ + return os.path.splitext(os.path.basename(input_path))[0] + + +def prepare_output_directory(base_output, group): + """ + Ensures that the group-specific output directory exists. + + Parameters: + base_output (str): Base output directory. + group (str): Group name ('CTRL' or 'KO'). + + Returns: + str: Full path to the group-specific directory. + """ + group_dir = os.path.join(base_output, group) + os.makedirs(group_dir, exist_ok=True) + return group_dir + +def write_or_append_csv(file_path, new_data): + """ + Writes a new DataFrame to CSV, or appends a new column(s) to an existing one. + + Parameters: + file_path (str): Path to the target CSV file. + new_data (pd.DataFrame): DataFrame to write or append. + """ + print(f"saving {file_path}") + if os.path.exists(file_path): + existing = pd.read_csv(file_path, index_col=0) + combined = pd.concat([existing, new_data], axis=1) + else: + combined = new_data + + combined.to_csv(file_path) + +def save_filtered_dataframes(output_dir, tomogram_name, df): + """ + Saves the sorted segment data into multiple filtered CSV files. + + Parameters: + output_dir (str): Directory where CSVs will be saved. + tomogram_name (str): Name of the tomogram (used as column header). + df (pd.DataFrame): DataFrame containing 'seg_id', 'distance', and 'diameter'. + """ + thresholds = { + 'AZ_distances': None, + 'AZ_distances_within_200': 200, + 'AZ_distances_within_100': 100, + 'AZ_distances_within_40': 40, + 'AZ_distances_within_40_with_diameters': 40, + } + + for filename, max_dist in thresholds.items(): + file_path = os.path.join(output_dir, f"{filename}.csv") + filtered_df = df if max_dist is None else df[df['distance'] <= max_dist] + + if filename == 'AZ_distances_within_40_with_diameters': + data = pd.DataFrame({ + f"{tomogram_name}_distance": filtered_df['distance'].values, + f"{tomogram_name}_diameter": filtered_df['diameter'].values + }) + else: + data = pd.DataFrame({tomogram_name: filtered_df['distance'].values}) + + write_or_append_csv(file_path, data) + +def run_store_results(input_path, analysis_output, sorted_list): + """ + Processes a single tomogram's sorted segment data and stores results into categorized CSV files. + + Parameters: + input_path (str): Path to the input .h5 file. + analysis_output (str): Directory where results should be saved. + sorted_list (list of dict): List of dicts with 'seg_id', 'distance', and 'diameter', + sorted by distance ascendingly. + """ + group = get_group(input_path) + tomogram_name = get_tomogram_name(input_path) + group_dir = prepare_output_directory(analysis_output, group) + df = pd.DataFrame(sorted_list) + + save_filtered_dataframes(group_dir, tomogram_name, df) From 0785657a8624dbefbe2ba2fb0626cf2caf60e5cf Mon Sep 17 00:00:00 2001 From: SarahMuth Date: Mon, 23 Jun 2025 13:05:41 +0200 Subject: [PATCH 14/29] fix import --- run_sbatch_revision.sbatch | 14 +++++++------- .../updated_data_analysis/run_data_analysis.py | 8 ++++---- 2 files changed, 11 insertions(+), 11 deletions(-) diff --git a/run_sbatch_revision.sbatch b/run_sbatch_revision.sbatch index a8ad364..bbfc645 100644 --- a/run_sbatch_revision.sbatch +++ b/run_sbatch_revision.sbatch @@ -1,14 +1,14 @@ #! /bin/bash #SBATCH -c 4 #4 #8 -#SBATCH --mem 256G #120G #32G #64G #256G -#SBATCH -p grete:shared #grete:shared #grete-h100:shared +#SBATCH --mem 120G #120G #32G #64G #256G +#SBATCH -p grete-h100:shared #grete:shared #grete-h100:shared #SBATCH -t 12:00:00 #6:00:00 #48:00:00 -#SBATCH -G A100:1 #V100:1 #2 #A100:1 #gtx1080:2 #v100:1 #H100:1 +#SBATCH -G H100:1 #V100:1 #2 #A100:1 #gtx1080:2 #v100:1 #H100:1 #SBATCH --output=/user/muth9/u12095/synapse-net/slurm_revision/slurm-%j.out -#SBATCH -A nim00007 -#SBATCH --constraint 80gb +#SBATCH -A nim00007 #SBATCH --constraint 80gb source ~/.bashrc conda activate synapse-net -python /user/muth9/u12095/synapse-net/scripts/cooper/revision/az_prediction.py -v 7 --names 20241019_Tomo-eval_MF_Synapse 20241019_Tomo-eval_PS_Synapse 20241019_Tomo-eval_SC_Synapse \ - -i /mnt/lustre-emmy-hdd/projects/nim00007/data/synaptic-reconstruction/cooper/ground_truth/04Dataset_for_vesicle_eval \ No newline at end of file +python /user/muth9/u12095/synapse-net/scripts/cooper/revision/updated_data_analysis/run_data_analysis.py \ + -i /mnt/lustre-emmy-hdd/projects/nim00007/data/synaptic-reconstruction/cooper/20241102_TOMO_DATA_Imig2014/exported/Munc13DKO/ \ + -o /mnt/lustre-emmy-hdd/projects/nim00007/data/synaptic-reconstruction/cooper/20241102_TOMO_DATA_Imig2014/afterRevision_analysis --store \ No newline at end of file diff --git a/scripts/cooper/revision/updated_data_analysis/run_data_analysis.py b/scripts/cooper/revision/updated_data_analysis/run_data_analysis.py index eb592cc..1aabc3e 100644 --- a/scripts/cooper/revision/updated_data_analysis/run_data_analysis.py +++ b/scripts/cooper/revision/updated_data_analysis/run_data_analysis.py @@ -1,9 +1,9 @@ import argparse import os -from .analysis_segmentations import run_predictions -from .data_analysis import calc_AZ_SV_distance, calc_SV_diameters, combine_lists, sort_by_distances -from .store_results import run_store_results +from analysis_segmentations import run_predictions +from data_analysis import calc_AZ_SV_distance, calc_SV_diameters, combine_lists, sort_by_distances +from store_results import run_store_results def run_data_analysis(input_path, output_path, store, resolution, analysis_output): print("Starting SV, compartment, and AZ predictions") @@ -33,7 +33,7 @@ def main(): help="Path to an HDF5 file or directory of files." ) parser.add_argument( - "--analysis_output", "-s", type=str, required=True, + "--analysis_output", "-s", type=str, default = "./analysis_results/", help="Path to the folder where the analysis results get saved." ) parser.add_argument( From d7b9a1adf3c02626377f21d66fcb4dad81c51a04 Mon Sep 17 00:00:00 2001 From: SarahMuth Date: Mon, 23 Jun 2025 14:35:54 +0200 Subject: [PATCH 15/29] exclude SV at boundary for automatic analysis --- .../revision/updated_data_analysis/analysis_segmentations.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/scripts/cooper/revision/updated_data_analysis/analysis_segmentations.py b/scripts/cooper/revision/updated_data_analysis/analysis_segmentations.py index fe07e55..43c58df 100644 --- a/scripts/cooper/revision/updated_data_analysis/analysis_segmentations.py +++ b/scripts/cooper/revision/updated_data_analysis/analysis_segmentations.py @@ -56,7 +56,8 @@ def SV_pred(raw: np.ndarray, SV_model: str, output_path: str = None, store: bool Returns: np.ndarray: Segmentation result. """ - seg, pred = segment_vesicles(input_volume=raw, model_path=SV_model, verbose=False, return_predictions=True) + #Excluding boundary SV, because they would also not be used in the manual annotation + seg, pred = segment_vesicles(input_volume=raw, model_path=SV_model, exclude_boundary=True, verbose=False, return_predictions=True) if store and output_path: pred_key = f"predictions/SV/pred" From 17b9368faf919971a44bc5785002d121699e4328 Mon Sep 17 00:00:00 2001 From: SarahMuth Date: Thu, 26 Jun 2025 13:58:20 +0200 Subject: [PATCH 16/29] remove background from analysis --- .../Munc13DKO/CTRL/AZ_distances.csv | 132 ++++++++++++++++++ .../CTRL/AZ_distances_within_100.csv | 44 ++++++ .../CTRL/AZ_distances_within_200.csv | 74 ++++++++++ .../Munc13DKO/CTRL/AZ_distances_within_40.csv | 18 +++ .../AZ_distances_within_40_with_diameters.csv | 18 +++ .../Munc13DKO/KO/AZ_distances.csv | 80 +++++++++++ .../Munc13DKO/KO/AZ_distances_within_100.csv | 21 +++ .../Munc13DKO/KO/AZ_distances_within_200.csv | 42 ++++++ .../Munc13DKO/KO/AZ_distances_within_40.csv | 9 ++ .../AZ_distances_within_40_with_diameters.csv | 9 ++ analysis_results/SNAP25/CTRL/AZ_distances.csv | 94 +++++++++++++ .../SNAP25/CTRL/AZ_distances_within_100.csv | 23 +++ .../SNAP25/CTRL/AZ_distances_within_200.csv | 57 ++++++++ .../SNAP25/CTRL/AZ_distances_within_40.csv | 11 ++ .../AZ_distances_within_40_with_diameters.csv | 11 ++ analysis_results/SNAP25/KO/AZ_distances.csv | 43 ++++++ .../SNAP25/KO/AZ_distances_within_100.csv | 14 ++ .../SNAP25/KO/AZ_distances_within_200.csv | 22 +++ .../SNAP25/KO/AZ_distances_within_40.csv | 6 + .../AZ_distances_within_40_with_diameters.csv | 6 + run_sbatch_revision.sbatch | 6 +- .../updated_data_analysis/data_analysis.py | 16 ++- .../run_data_analysis.py | 21 ++- 23 files changed, 769 insertions(+), 8 deletions(-) create mode 100644 analysis_results/Munc13DKO/CTRL/AZ_distances.csv create mode 100644 analysis_results/Munc13DKO/CTRL/AZ_distances_within_100.csv create mode 100644 analysis_results/Munc13DKO/CTRL/AZ_distances_within_200.csv create mode 100644 analysis_results/Munc13DKO/CTRL/AZ_distances_within_40.csv create mode 100644 analysis_results/Munc13DKO/CTRL/AZ_distances_within_40_with_diameters.csv create mode 100644 analysis_results/Munc13DKO/KO/AZ_distances.csv create mode 100644 analysis_results/Munc13DKO/KO/AZ_distances_within_100.csv create mode 100644 analysis_results/Munc13DKO/KO/AZ_distances_within_200.csv create mode 100644 analysis_results/Munc13DKO/KO/AZ_distances_within_40.csv create mode 100644 analysis_results/Munc13DKO/KO/AZ_distances_within_40_with_diameters.csv create mode 100644 analysis_results/SNAP25/CTRL/AZ_distances.csv create mode 100644 analysis_results/SNAP25/CTRL/AZ_distances_within_100.csv create mode 100644 analysis_results/SNAP25/CTRL/AZ_distances_within_200.csv create mode 100644 analysis_results/SNAP25/CTRL/AZ_distances_within_40.csv create mode 100644 analysis_results/SNAP25/CTRL/AZ_distances_within_40_with_diameters.csv create mode 100644 analysis_results/SNAP25/KO/AZ_distances.csv create mode 100644 analysis_results/SNAP25/KO/AZ_distances_within_100.csv create mode 100644 analysis_results/SNAP25/KO/AZ_distances_within_200.csv create mode 100644 analysis_results/SNAP25/KO/AZ_distances_within_40.csv create mode 100644 analysis_results/SNAP25/KO/AZ_distances_within_40_with_diameters.csv diff --git a/analysis_results/Munc13DKO/CTRL/AZ_distances.csv b/analysis_results/Munc13DKO/CTRL/AZ_distances.csv new file mode 100644 index 0000000..3ab77e6 --- /dev/null +++ b/analysis_results/Munc13DKO/CTRL/AZ_distances.csv @@ -0,0 +1,132 @@ +,A_M13DKO_080212_CTRL4.8_crop,B_M13DKO_080212_CTRL4.8_crop,F_M13DKO_060212_CTRL7.2_crop,F_M13DKO_080212_CTRL6.7B_crop,C_M13DKO_060212_CTRL7.2_crop,A_M13DKO_080212_CTRL6.7B_crop +0,0.0,0.0,1.554,2.6916069549620354,0.0,6.216 +1,1.554,12.624755680804284,1.554,3.4748496370346733,0.0,6.407306142209845 +2,2.19768787592779,18.32138979444518,1.554,4.39537575185558,0.0,7.7700000000000005 +3,2.6916069549620354,21.64471538274412,1.554,7.77,0.0,7.923876324123188 +4,3.4748496370346733,21.756,3.108,8.927050352720096,16.519246592989646,9.57949936061379 +5,19.47156418986416,25.67629389144781,4.914179483901662,18.648,26.04978687052929,9.950455064970647 +6,26.463666412649623,27.885533023415565,6.59306362778337,31.504451685436457,26.234539447072443,11.313290767941925 +7,31.15760311705636,29.526,12.528748540855943,31.58101182672905,27.005640744111226,11.419521180855176 +8,34.748496370346736,29.93215388173728,28.94781255984638,32.074198602615155,40.37410432443053,11.419521180855178 +9,45.30649622294799,32.634,37.6824973694685,40.553147793975256,43.788620713605496,11.834911406512512 +10,50.33137077410072,35.53872631369897,41.524089682978,43.65052948132474,46.67177125415319,11.936500492187816 +11,51.44655417032321,37.4575252252468,43.09373973096325,49.46018507850532,48.37410054977767,12.137127996358942 +12,51.96033186960992,43.87126681553657,43.59517051234001,49.97022257304844,64.16721733097049,12.528748540855945 +13,56.03026682071039,46.30815703523517,43.73343654459367,54.412195471236046,71.43330811323244,13.36801346498424 +14,64.09190366965238,54.94219689819474,49.14179483901662,59.560990623057975,75.17249124513567,28.144137009331093 +15,65.36043454567908,56.22388321701019,50.99867186505939,76.0031517241226,75.89186161901684,29.48507690340997 +16,67.55879839073518,59.98520517594318,53.51719701180173,76.54140197304986,76.60447689267254,39.61938162061595 +17,68.55236346035052,61.88745249240754,55.29270888643457,80.86774714309779,79.92154165179748,46.51628463237364 +18,72.65677102651894,66.24124373832363,55.987150025697865,81.32929929613313,80.88267703284802,51.680722672965786 +19,77.77766118880152,66.822,63.23844476266,83.98791363047424,85.2436639522258,56.90696228055053 +20,80.17796699842171,66.84006732492121,64.80384960787438,94.70479409195714,86.0332350897024,59.50014164689022 +21,86.92682074020654,67.11049351628999,65.60015481689048,98.7004110629738,87.2595561987339,63.123778404021415 +22,89.12159516076898,67.34398449750357,67.77293141070409,100.3383516109369,90.10520291303938,63.903231185911096 +23,89.45966478810436,69.27075928557446,74.08848809363032,100.35038475262564,92.34212687609052,64.09190366965238 +24,90.10520291303938,71.77057773210412,79.20828093072089,100.6507457498453,106.87634293893107,65.6553505816548 +25,91.43543728773872,73.12061285301156,83.46875259640579,101.04585517476708,109.3777576292365,66.822 +26,91.54102096874384,73.8763166380133,85.04512578625538,101.59404994388206,115.77039258808792,70.63438579049159 +27,94.01378920137196,74.23502079207628,85.14445273768574,103.71131020288963,117.78663939513685,73.8763166380133 +28,99.12766114460686,75.70069793601641,87.71500656102126,107.39478020835092,119.73870005975512,74.36502968465756 +29,100.42255332344423,76.19355657796794,91.2239033148659,117.48897759364492,120.42246708982506,74.60818574392492 +30,104.69624503295236,76.5887130326656,95.9083679560861,124.1353253348941,122.223853547497,76.77766573685345 +31,109.76345440992644,78.53467682495422,98.29587435899842,124.32971212063512,122.63807258759412,81.72917261296607 +32,111.82323125361742,83.82962221076748,100.06118995894462,124.42679163267049,122.65776241233164,84.6038475720815 +33,113.86695747230624,85.58294355769729,100.3985028972046,129.55178964414193,129.14104651891282,89.90397016817444 +34,117.12872318948928,86.82953271784895,107.39478020835092,133.0009763122061,129.77528340943817,96.97258841548988 +35,117.75588168749788,90.34609056290152,108.53552528089594,133.79750486462743,130.2860106227833,100.87842199400227 +36,118.2776749348752,92.99362449114456,111.7584249710061,140.83218775549858,134.27495245204892,103.32639370460967 +37,122.61837960110222,93.40819829115644,112.20052187044408,145.5211270022329,137.31592816567203,108.42421818025713 +38,129.07558051002522,94.5899771646024,113.86695747230624,147.49907879034365,144.5888232609976,117.21116467299521 +39,139.69587036129593,98.33271919356244,119.45601133471683,149.94288232523743,146.290757028597,119.64790866538371 +40,139.7045135706073,98.60249396440236,119.45601133471683,156.59200830182874,151.0740583819737,125.02700215553439 +41,141.40546128067334,98.89595441675051,122.2040938103139,160.33334282051254,156.8847470979891,135.6349147085661 +42,144.08689340810983,99.96460577624462,122.49029801580204,163.1107892568729,163.42879477007716,139.42766511707782 +43,150.26464853717255,103.29133016860612,130.2860106227833,165.73247896534946,170.6501451625518,139.55750621159723 +44,150.61779743443336,105.92308405630946,137.50924434378948,173.10890399976543,175.05104719481116,144.59717399728115 +45,152.292,106.15082692094302,140.08428682760962,175.16826966091776,176.0483795551666,144.67230893298137 +46,152.58507514170577,107.98902427561794,140.21351987593778,180.77903149425268,179.53912879369778,161.55621971313886 +47,160.19019138511572,108.08960915832752,148.23400650323126,183.45758172395057,180.57185780735605,163.40662842124857 +48,161.69069427768562,110.11490923576152,153.1143511497208,186.0521592027354,181.37250550179868,164.8265906945842 +49,163.93779358037,112.71589540078187,154.37877446073992,191.4386722060096,183.81921736314732,165.63773928667345 +50,165.47728703359869,118.11422324174173,156.42999657354724,202.5274012572126,185.4671493930933,166.71313158836648 +51,166.36511736539003,118.16532618327597,160.78456325157586,206.55343369694924,186.8228598111056,174.2490713547708 +52,168.38506096444544,118.28788316645117,166.5754628989516,217.8317802433796,194.8024384446971,176.05523809304853 +53,172.494,119.06114781909336,167.9039290308598,220.503783967532,198.92414025452013,178.73702404370505 +54,176.92411139242722,120.3121211848582,170.033450685446,223.19248922846845,204.49128454777724,182.010487851662 +55,185.83786943462303,126.6867464891257,171.34920113032334,224.25033062182985,205.9387161074867,183.33248726835077 +56,191.59628941083383,127.8159095731044,173.07402481019503,228.5172719599112,211.549576487404,196.2290386767463 +57,192.8838922668246,128.616385425808,173.91613754910728,229.8764709838743,224.9921536231875,198.82699746261827 +58,195.2976786958821,128.95391260446502,174.47067426934532,238.14764802533745,227.5907144063659,202.0319534925107 +59,196.87407599783168,132.93741116781237,174.4983548461131,246.2048060375752,237.62484279216264,204.32588628952524 +60,197.8468434016576,139.600759740053,175.91115264246326,247.5790740591781,239.83511878788727,212.733461965907 +61,199.11221954465776,141.83176752758885,180.35105656469,250.3628560629552,250.45929412980465,215.04799789814365 +62,199.6270117794684,142.54509189726596,183.7009422512579,254.66641729132644,269.1068579728135,216.24623331748467 +63,200.5141802566592,143.89402275285795,184.93252929649773,263.1266647833321,284.08463141113424,218.46277693923057 +64,201.45139210241263,148.09546621014434,188.9372602532385,273.4156902886153,284.81051321887685,234.90600375469336 +65,202.1514495421688,154.59761851982068,194.6536206701535,277.3269636512108,287.4311939856216,244.23521473366614 +66,205.8448836770057,161.89965492242408,195.98891268640685,283.3142742750531,289.44058277304515,244.64520870844785 +67,216.7203296509121,168.57856176868992,199.11828366074272,290.63954783201825,299.76897650690944,247.75945956511933 +68,220.8211581166986,173.77027842528193,204.1899232087617,309.76486621952466,304.7187559045226,253.2638621596062 +69,221.83043824507044,176.53467359133728,204.686046627512,310.407366993762,318.8162706638417,259.871364216991 +70,228.1576851565601,182.61318420092235,204.7155398693514,311.44424229065464,347.4919133332458,263.4385241531694 +71,232.3424891491008,185.00433634917857,208.00973528178915,312.9255964027232,,271.83897733768794 +72,233.2708773679218,198.9423492472128,210.16377813505355,313.3112198501675,,280.69817301151073 +73,238.046222276263,200.24904539098307,211.12678543472404,320.0825070196745,,290.7807660558036 +74,238.17299771384663,203.6214448529428,213.23235788219387,332.3962045992704,,295.1291078290991 +75,239.56813990178244,204.21357546451213,214.55332388942384,360.3772572790908,,315.2590726624692 +76,246.3812969606257,207.98070918236627,218.86036383959524,380.568222772212,, +77,247.76920639175484,219.4774007773921,219.47189919440711,,, +78,252.3037569280331,229.20314715116808,223.86771037378304,,, +79,253.36396174673305,230.1232125710051,230.3329972105604,,, +80,253.8733801011836,230.799083897662,237.7873908011104,,, +81,255.23474296419755,249.71092494322312,238.4769837950824,,, +82,255.31515346332264,253.3067668263128,244.5168508058289,,, +83,255.40026639766845,257.5470787487212,244.69455920391857,,, +84,260.6368874123538,270.74408629552744,245.20721557083104,,, +85,261.1875994070163,,249.5125938304517,,, +86,269.814858256546,,251.0082836083304,,, +87,270.2843390135655,,255.1968939622895,,, +88,270.4986873979244,,256.0377115504667,,, +89,274.6582349175062,,256.2074290726169,,, +90,276.9871500990614,,257.1294814835514,,, +91,282.23395306022275,,261.70947172771565,,, +92,284.22910644056145,,268.468960842776,,, +93,289.0481773891681,,268.95874921630644,,, +94,293.4921443650579,,275.5010161142786,,, +95,294.0962962296534,,276.0045746360012,,, +96,302.8426651712074,,278.44367146695936,,, +97,303.5873336290564,,281.3941732232564,,, +98,304.1595256506033,,282.563183716492,,, +99,306.8904104073635,,294.4491708801368,,, +100,308.69888285512144,,294.4696738409577,,, +101,308.9022109147165,,296.2969055930217,,, +102,309.0975925302558,,296.48837635900674,,, +103,310.1232732575871,,298.2060810647563,,, +104,313.28038744230383,,305.4390854360326,,, +105,314.52668239753524,,305.66828432141926,,, +106,315.30885938076653,,309.1366540350723,,, +107,316.7950112107197,,311.0524224949872,,, +108,328.736202752298,,316.9436240910992,,, +109,333.143677004382,,317.1797371270744,,, +110,336.13131359038834,,325.6177006552316,,, +111,341.3215165675906,,330.18751904940325,,, +112,357.2916030247563,,334.92575761801305,,, +113,,,346.48624663614,,, +114,,,346.61516268045744,,, +115,,,347.3772266628888,,, +116,,,347.8461603410336,,, +117,,,360.7790979034124,,, +118,,,361.1871775631024,,, +119,,,376.9307005644407,,, +120,,,379.1441544004074,,, +121,,,384.0392751633614,,, +122,,,385.3857338200261,,, +123,,,386.4745199880583,,, +124,,,392.8547653319226,,, +125,,,413.027942342888,,, +126,,,414.0323806902064,,, +127,,,417.13250608409794,,, +128,,,425.2227893516527,,, +129,,,431.7715661874923,,, +130,,,461.3888546898376,,, diff --git a/analysis_results/Munc13DKO/CTRL/AZ_distances_within_100.csv b/analysis_results/Munc13DKO/CTRL/AZ_distances_within_100.csv new file mode 100644 index 0000000..efca971 --- /dev/null +++ b/analysis_results/Munc13DKO/CTRL/AZ_distances_within_100.csv @@ -0,0 +1,44 @@ +,A_M13DKO_080212_CTRL4.8_crop,B_M13DKO_080212_CTRL4.8_crop,F_M13DKO_060212_CTRL7.2_crop,F_M13DKO_080212_CTRL6.7B_crop,C_M13DKO_060212_CTRL7.2_crop,A_M13DKO_080212_CTRL6.7B_crop +0,0.0,0.0,1.554,2.6916069549620354,0.0,6.216 +1,1.554,12.624755680804284,1.554,3.4748496370346733,0.0,6.407306142209845 +2,2.19768787592779,18.32138979444518,1.554,4.39537575185558,0.0,7.7700000000000005 +3,2.6916069549620354,21.64471538274412,1.554,7.77,0.0,7.923876324123188 +4,3.4748496370346733,21.756,3.108,8.927050352720096,16.519246592989646,9.57949936061379 +5,19.47156418986416,25.67629389144781,4.914179483901662,18.648,26.04978687052929,9.950455064970647 +6,26.463666412649623,27.885533023415565,6.59306362778337,31.504451685436457,26.234539447072443,11.313290767941925 +7,31.15760311705636,29.526,12.528748540855943,31.58101182672905,27.005640744111226,11.419521180855176 +8,34.748496370346736,29.93215388173728,28.94781255984638,32.074198602615155,40.37410432443053,11.419521180855178 +9,45.30649622294799,32.634,37.6824973694685,40.553147793975256,43.788620713605496,11.834911406512512 +10,50.33137077410072,35.53872631369897,41.524089682978,43.65052948132474,46.67177125415319,11.936500492187816 +11,51.44655417032321,37.4575252252468,43.09373973096325,49.46018507850532,48.37410054977767,12.137127996358942 +12,51.96033186960992,43.87126681553657,43.59517051234001,49.97022257304844,64.16721733097049,12.528748540855945 +13,56.03026682071039,46.30815703523517,43.73343654459367,54.412195471236046,71.43330811323244,13.36801346498424 +14,64.09190366965238,54.94219689819474,49.14179483901662,59.560990623057975,75.17249124513567,28.144137009331093 +15,65.36043454567908,56.22388321701019,50.99867186505939,76.0031517241226,75.89186161901684,29.48507690340997 +16,67.55879839073518,59.98520517594318,53.51719701180173,76.54140197304986,76.60447689267254,39.61938162061595 +17,68.55236346035052,61.88745249240754,55.29270888643457,80.86774714309779,79.92154165179748,46.51628463237364 +18,72.65677102651894,66.24124373832363,55.987150025697865,81.32929929613313,80.88267703284802,51.680722672965786 +19,77.77766118880152,66.822,63.23844476266,83.98791363047424,85.2436639522258,56.90696228055053 +20,80.17796699842171,66.84006732492121,64.80384960787438,94.70479409195714,86.0332350897024,59.50014164689022 +21,86.92682074020654,67.11049351628999,65.60015481689048,98.7004110629738,87.2595561987339,63.123778404021415 +22,89.12159516076898,67.34398449750357,67.77293141070409,,90.10520291303938,63.903231185911096 +23,89.45966478810436,69.27075928557446,74.08848809363032,,92.34212687609052,64.09190366965238 +24,90.10520291303938,71.77057773210412,79.20828093072089,,,65.6553505816548 +25,91.43543728773872,73.12061285301156,83.46875259640579,,,66.822 +26,91.54102096874384,73.8763166380133,85.04512578625538,,,70.63438579049159 +27,94.01378920137196,74.23502079207628,85.14445273768574,,,73.8763166380133 +28,99.12766114460686,75.70069793601641,87.71500656102126,,,74.36502968465756 +29,,76.19355657796794,91.2239033148659,,,74.60818574392492 +30,,76.5887130326656,95.9083679560861,,,76.77766573685345 +31,,78.53467682495422,98.29587435899842,,,81.72917261296607 +32,,83.82962221076748,,,,84.6038475720815 +33,,85.58294355769729,,,,89.90397016817444 +34,,86.82953271784895,,,,96.97258841548988 +35,,90.34609056290152,,,, +36,,92.99362449114456,,,, +37,,93.40819829115644,,,, +38,,94.5899771646024,,,, +39,,98.33271919356244,,,, +40,,98.60249396440236,,,, +41,,98.89595441675051,,,, +42,,99.96460577624462,,,, diff --git a/analysis_results/Munc13DKO/CTRL/AZ_distances_within_200.csv b/analysis_results/Munc13DKO/CTRL/AZ_distances_within_200.csv new file mode 100644 index 0000000..f068626 --- /dev/null +++ b/analysis_results/Munc13DKO/CTRL/AZ_distances_within_200.csv @@ -0,0 +1,74 @@ +,A_M13DKO_080212_CTRL4.8_crop,B_M13DKO_080212_CTRL4.8_crop,F_M13DKO_060212_CTRL7.2_crop,F_M13DKO_080212_CTRL6.7B_crop,C_M13DKO_060212_CTRL7.2_crop,A_M13DKO_080212_CTRL6.7B_crop +0,0.0,0.0,1.554,2.6916069549620354,0.0,6.216 +1,1.554,12.624755680804284,1.554,3.4748496370346733,0.0,6.407306142209845 +2,2.19768787592779,18.32138979444518,1.554,4.39537575185558,0.0,7.7700000000000005 +3,2.6916069549620354,21.64471538274412,1.554,7.77,0.0,7.923876324123188 +4,3.4748496370346733,21.756,3.108,8.927050352720096,16.519246592989646,9.57949936061379 +5,19.47156418986416,25.67629389144781,4.914179483901662,18.648,26.04978687052929,9.950455064970647 +6,26.463666412649623,27.885533023415565,6.59306362778337,31.504451685436457,26.234539447072443,11.313290767941925 +7,31.15760311705636,29.526,12.528748540855943,31.58101182672905,27.005640744111226,11.419521180855176 +8,34.748496370346736,29.93215388173728,28.94781255984638,32.074198602615155,40.37410432443053,11.419521180855178 +9,45.30649622294799,32.634,37.6824973694685,40.553147793975256,43.788620713605496,11.834911406512512 +10,50.33137077410072,35.53872631369897,41.524089682978,43.65052948132474,46.67177125415319,11.936500492187816 +11,51.44655417032321,37.4575252252468,43.09373973096325,49.46018507850532,48.37410054977767,12.137127996358942 +12,51.96033186960992,43.87126681553657,43.59517051234001,49.97022257304844,64.16721733097049,12.528748540855945 +13,56.03026682071039,46.30815703523517,43.73343654459367,54.412195471236046,71.43330811323244,13.36801346498424 +14,64.09190366965238,54.94219689819474,49.14179483901662,59.560990623057975,75.17249124513567,28.144137009331093 +15,65.36043454567908,56.22388321701019,50.99867186505939,76.0031517241226,75.89186161901684,29.48507690340997 +16,67.55879839073518,59.98520517594318,53.51719701180173,76.54140197304986,76.60447689267254,39.61938162061595 +17,68.55236346035052,61.88745249240754,55.29270888643457,80.86774714309779,79.92154165179748,46.51628463237364 +18,72.65677102651894,66.24124373832363,55.987150025697865,81.32929929613313,80.88267703284802,51.680722672965786 +19,77.77766118880152,66.822,63.23844476266,83.98791363047424,85.2436639522258,56.90696228055053 +20,80.17796699842171,66.84006732492121,64.80384960787438,94.70479409195714,86.0332350897024,59.50014164689022 +21,86.92682074020654,67.11049351628999,65.60015481689048,98.7004110629738,87.2595561987339,63.123778404021415 +22,89.12159516076898,67.34398449750357,67.77293141070409,100.3383516109369,90.10520291303938,63.903231185911096 +23,89.45966478810436,69.27075928557446,74.08848809363032,100.35038475262564,92.34212687609052,64.09190366965238 +24,90.10520291303938,71.77057773210412,79.20828093072089,100.6507457498453,106.87634293893107,65.6553505816548 +25,91.43543728773872,73.12061285301156,83.46875259640579,101.04585517476708,109.3777576292365,66.822 +26,91.54102096874384,73.8763166380133,85.04512578625538,101.59404994388206,115.77039258808792,70.63438579049159 +27,94.01378920137196,74.23502079207628,85.14445273768574,103.71131020288963,117.78663939513685,73.8763166380133 +28,99.12766114460686,75.70069793601641,87.71500656102126,107.39478020835092,119.73870005975512,74.36502968465756 +29,100.42255332344423,76.19355657796794,91.2239033148659,117.48897759364492,120.42246708982506,74.60818574392492 +30,104.69624503295236,76.5887130326656,95.9083679560861,124.1353253348941,122.223853547497,76.77766573685345 +31,109.76345440992644,78.53467682495422,98.29587435899842,124.32971212063512,122.63807258759412,81.72917261296607 +32,111.82323125361742,83.82962221076748,100.06118995894462,124.42679163267049,122.65776241233164,84.6038475720815 +33,113.86695747230624,85.58294355769729,100.3985028972046,129.55178964414193,129.14104651891282,89.90397016817444 +34,117.12872318948928,86.82953271784895,107.39478020835092,133.0009763122061,129.77528340943817,96.97258841548988 +35,117.75588168749788,90.34609056290152,108.53552528089594,133.79750486462743,130.2860106227833,100.87842199400227 +36,118.2776749348752,92.99362449114456,111.7584249710061,140.83218775549858,134.27495245204892,103.32639370460967 +37,122.61837960110222,93.40819829115644,112.20052187044408,145.5211270022329,137.31592816567203,108.42421818025713 +38,129.07558051002522,94.5899771646024,113.86695747230624,147.49907879034365,144.5888232609976,117.21116467299521 +39,139.69587036129593,98.33271919356244,119.45601133471683,149.94288232523743,146.290757028597,119.64790866538371 +40,139.7045135706073,98.60249396440236,119.45601133471683,156.59200830182874,151.0740583819737,125.02700215553439 +41,141.40546128067334,98.89595441675051,122.2040938103139,160.33334282051254,156.8847470979891,135.6349147085661 +42,144.08689340810983,99.96460577624462,122.49029801580204,163.1107892568729,163.42879477007716,139.42766511707782 +43,150.26464853717255,103.29133016860612,130.2860106227833,165.73247896534946,170.6501451625518,139.55750621159723 +44,150.61779743443336,105.92308405630946,137.50924434378948,173.10890399976543,175.05104719481116,144.59717399728115 +45,152.292,106.15082692094302,140.08428682760962,175.16826966091776,176.0483795551666,144.67230893298137 +46,152.58507514170577,107.98902427561794,140.21351987593778,180.77903149425268,179.53912879369778,161.55621971313886 +47,160.19019138511572,108.08960915832752,148.23400650323126,183.45758172395057,180.57185780735605,163.40662842124857 +48,161.69069427768562,110.11490923576152,153.1143511497208,186.0521592027354,181.37250550179868,164.8265906945842 +49,163.93779358037,112.71589540078187,154.37877446073992,191.4386722060096,183.81921736314732,165.63773928667345 +50,165.47728703359869,118.11422324174173,156.42999657354724,,185.4671493930933,166.71313158836648 +51,166.36511736539003,118.16532618327597,160.78456325157586,,186.8228598111056,174.2490713547708 +52,168.38506096444544,118.28788316645117,166.5754628989516,,194.8024384446971,176.05523809304853 +53,172.494,119.06114781909336,167.9039290308598,,198.92414025452013,178.73702404370505 +54,176.92411139242722,120.3121211848582,170.033450685446,,,182.010487851662 +55,185.83786943462303,126.6867464891257,171.34920113032334,,,183.33248726835077 +56,191.59628941083383,127.8159095731044,173.07402481019503,,,196.2290386767463 +57,192.8838922668246,128.616385425808,173.91613754910728,,,198.82699746261827 +58,195.2976786958821,128.95391260446502,174.47067426934532,,, +59,196.87407599783168,132.93741116781237,174.4983548461131,,, +60,197.8468434016576,139.600759740053,175.91115264246326,,, +61,199.11221954465776,141.83176752758885,180.35105656469,,, +62,199.6270117794684,142.54509189726596,183.7009422512579,,, +63,,143.89402275285795,184.93252929649773,,, +64,,148.09546621014434,188.9372602532385,,, +65,,154.59761851982068,194.6536206701535,,, +66,,161.89965492242408,195.98891268640685,,, +67,,168.57856176868992,199.11828366074272,,, +68,,173.77027842528193,,,, +69,,176.53467359133728,,,, +70,,182.61318420092235,,,, +71,,185.00433634917857,,,, +72,,198.9423492472128,,,, diff --git a/analysis_results/Munc13DKO/CTRL/AZ_distances_within_40.csv b/analysis_results/Munc13DKO/CTRL/AZ_distances_within_40.csv new file mode 100644 index 0000000..4c5fd24 --- /dev/null +++ b/analysis_results/Munc13DKO/CTRL/AZ_distances_within_40.csv @@ -0,0 +1,18 @@ +,A_M13DKO_080212_CTRL4.8_crop,B_M13DKO_080212_CTRL4.8_crop,F_M13DKO_060212_CTRL7.2_crop,F_M13DKO_080212_CTRL6.7B_crop,C_M13DKO_060212_CTRL7.2_crop,A_M13DKO_080212_CTRL6.7B_crop +0,0.0,0.0,1.554,2.6916069549620354,0.0,6.216 +1,1.554,12.624755680804284,1.554,3.4748496370346733,0.0,6.407306142209845 +2,2.19768787592779,18.32138979444518,1.554,4.39537575185558,0.0,7.7700000000000005 +3,2.6916069549620354,21.64471538274412,1.554,7.77,0.0,7.923876324123188 +4,3.4748496370346733,21.756,3.108,8.927050352720096,16.519246592989646,9.57949936061379 +5,19.47156418986416,25.67629389144781,4.914179483901662,18.648,26.04978687052929,9.950455064970647 +6,26.463666412649623,27.885533023415565,6.59306362778337,31.504451685436457,26.234539447072443,11.313290767941925 +7,31.15760311705636,29.526,12.528748540855943,31.58101182672905,27.005640744111226,11.419521180855176 +8,34.748496370346736,29.93215388173728,28.94781255984638,32.074198602615155,,11.419521180855178 +9,,32.634,37.6824973694685,,,11.834911406512512 +10,,35.53872631369897,,,,11.936500492187816 +11,,37.4575252252468,,,,12.137127996358942 +12,,,,,,12.528748540855945 +13,,,,,,13.36801346498424 +14,,,,,,28.144137009331093 +15,,,,,,29.48507690340997 +16,,,,,,39.61938162061595 diff --git a/analysis_results/Munc13DKO/CTRL/AZ_distances_within_40_with_diameters.csv b/analysis_results/Munc13DKO/CTRL/AZ_distances_within_40_with_diameters.csv new file mode 100644 index 0000000..b9a80c0 --- /dev/null +++ b/analysis_results/Munc13DKO/CTRL/AZ_distances_within_40_with_diameters.csv @@ -0,0 +1,18 @@ +,A_M13DKO_080212_CTRL4.8_crop_distance,A_M13DKO_080212_CTRL4.8_crop_diameter,B_M13DKO_080212_CTRL4.8_crop_distance,B_M13DKO_080212_CTRL4.8_crop_diameter,F_M13DKO_060212_CTRL7.2_crop_distance,F_M13DKO_060212_CTRL7.2_crop_diameter,F_M13DKO_080212_CTRL6.7B_crop_distance,F_M13DKO_080212_CTRL6.7B_crop_diameter,C_M13DKO_060212_CTRL7.2_crop_distance,C_M13DKO_060212_CTRL7.2_crop_diameter,A_M13DKO_080212_CTRL6.7B_crop_distance,A_M13DKO_080212_CTRL6.7B_crop_diameter +0,0.0,45.10659722111719,0.0,43.031607685816496,1.554,59.141287909587376,2.6916069549620354,45.48512075696828,0.0,47.09024278669811,6.216,47.81289742868499 +1,1.554,50.26043018499918,12.624755680804284,48.75977939805501,1.554,49.91813042345726,3.4748496370346733,39.42752527305825,0.0,46.96871937691663,6.407306142209845,45.48512075696828 +2,2.19768787592779,47.81289742868499,18.32138979444518,47.81289742868499,1.554,56.26909744547573,4.39537575185558,49.91813042345726,0.0,41.819205728975206,7.7700000000000005,52.48540900511086 +3,2.6916069549620354,43.95147120000001,21.64471538274412,45.10659722111719,1.554,47.81289742868499,7.77,49.91813042345726,0.0,46.96871937691663,7.923876324123188,49.91813042345726 +4,3.4748496370346733,48.75977939805501,21.756,48.75977939805501,3.108,50.26043018499918,8.927050352720096,48.40686923918031,16.519246592989646,41.13020956412944,9.57949936061379,50.8258071396247 +5,19.47156418986416,43.29642007870028,25.67629389144781,45.10659722111719,4.914179483901662,47.81289742868499,18.648,49.91813042345726,26.04978687052929,44.46856512026162,9.950455064970647,45.48512075696828 +6,26.463666412649623,54.51509065097238,27.885533023415565,44.08131334757461,6.59306362778337,52.81107133775702,31.504451685436457,52.5941871715535,26.234539447072443,51.60693007739111,11.313290767941925,49.91813042345726 +7,31.15760311705636,48.75977939805501,29.526,46.96871937691663,12.528748540855943,48.75977939805501,31.58101182672905,46.96871937691663,27.005640744111226,48.40686923918032,11.419521180855176,50.26043018499918 +8,34.748496370346736,45.98497225710271,29.93215388173728,48.40686923918031,28.94781255984638,46.96871937691663,32.074198602615155,56.67391631552351,,,11.419521180855178,57.37547691442199 +9,,,32.634,47.81289742868499,37.6824973694685,49.91813042345726,,,,,11.834911406512512,47.45294577914065 +10,,,35.53872631369897,50.26043018499918,,,,,,,11.936500492187816,49.91813042345726 +11,,,37.4575252252468,48.75977939805501,,,,,,,12.137127996358942,49.91813042345726 +12,,,,,,,,,,,12.528748540855945,46.96871937691663 +13,,,,,,,,,,,13.36801346498424,45.35929722199581 +14,,,,,,,,,,,28.144137009331093,52.48540900511086 +15,,,,,,,,,,,29.48507690340997,50.26043018499918 +16,,,,,,,,,,,39.61938162061595,50.26043018499918 diff --git a/analysis_results/Munc13DKO/KO/AZ_distances.csv b/analysis_results/Munc13DKO/KO/AZ_distances.csv new file mode 100644 index 0000000..3faf66e --- /dev/null +++ b/analysis_results/Munc13DKO/KO/AZ_distances.csv @@ -0,0 +1,80 @@ +,A_M13DKO_080212_DKO1.2_crop,G_M13DKO_060212_DKO1.1_crop,C_M13DKO_080212_DKO1.2_crop,E_M13DKO_080212_DKO1.2_crop,H_M13DKO_080212_DKO1.2_crop +0,9.061299244589597,58.57982277200914,8.368546110287019,14.660406679215964,6.216 +1,9.57949936061379,86.71821275833584,9.828358967803323,14.660406679215964,7.121322629961376 +2,10.19025946676531,88.16810423276662,17.65004713874725,20.14214209065163,8.368546110287019 +3,13.277381820223445,98.52899228145998,22.57315387800296,24.57089741950831,11.419521180855176 +4,15.847752648246376,135.13546829755688,30.885139144902684,27.13944325147441,63.78975983651295 +5,16.519246592989646,146.05946714951415,41.26155329116926,31.46610176046597,78.41158164455044 +6,27.885533023415565,156.99246063426102,47.543257944739125,35.742000000000004,82.08298020905431 +7,31.42770503870749,175.51946733054996,49.16635959678121,41.26155329116925,93.48572620459232 +8,46.98120075093867,180.99263946359807,59.15414849357567,52.261553708247135,113.0047601121298 +9,70.2744848433626,190.9144499193291,75.63686934293354,53.607369269532334,122.64791789508699 +10,73.71269225852492,,76.73047118322681,68.51712708513107,127.74976449293362 +11,77.73107378648515,,76.90337628479,71.6527139472051,128.95391260446502 +12,79.42141142034684,,83.90160987728423,72.92218477253682,156.39911816886948 +13,81.31445140932823,,85.6534576535005,82.00939616897567,169.4430179263814 +14,90.1989578875499,,86.73213556692814,84.84612304637143,173.3040977357431 +15,95.02300207844416,,86.746056140899,85.3569069964464,176.50731235844026 +16,106.9779740133454,,93.69215367361346,87.57724142721099,178.7032433505335 +17,107.89953677379714,,93.92384227660196,106.6388270940749,178.89908258009598 +18,109.1124918604648,,97.76623920352056,108.4019430453163,192.01806552509584 +19,116.46709051058156,,99.31020563869556,109.77445440538524,193.80816149997398 +20,118.14488765917888,,101.29648603974375,116.55,219.35633407768285 +21,131.89789560110503,,107.1696808617064,122.60853192172232,220.23530112132343 +22,132.06257362326392,,110.20259780967054,135.7061141437629,221.63984585809473 +23,134.55342898640674,,113.0047601121298,139.92042028238768,235.84989960566026 +24,139.59211010655295,,116.85005376121998,144.00306834231,237.52319379799525 +25,140.95216895103104,,121.54028620996417,149.5477752291889,243.76014477350475 +26,141.58466624603102,,123.3547150132495,150.03143682575327,250.86874232554362 +27,145.18884862137313,,134.27495245204892,150.48947553898913,258.74916653778814 +28,172.9344377040039,,138.48049666288753,151.15396215779458,262.0230178438528 +29,179.2766494666832,,139.60075974005298,157.6295363312346,276.1445319610729 +30,180.6988634053906,,143.26329287015568,163.43618288494136,278.58673767428337 +31,181.2326474893528,,146.6123027443468,164.78995849262176, +32,187.0618422875173,,147.63,173.67992329569933, +33,195.2976786958821,,148.77876211341456,181.5322112904484, +34,202.58701197263363,,150.34498249027138,187.5388936300948, +35,209.62302243789924,,160.7019343505236,189.2118645857072, +36,212.23908251780583,,167.1976902232803,192.47656053660145, +37,213.3116201054223,,174.95445207253232,200.91723633377003, +38,214.29992433969733,,186.5512114031962,201.73290369198577, +39,219.1635901330328,,189.8553033654841,204.6919456158449, +40,222.98681674036249,,195.0440251532972,204.9277659078925, +41,227.38902013070023,,202.31862544017048,205.0396855342887, +42,234.4738314951159,,205.65709038105155,208.5025607324764, +43,238.674367128102,,227.4155690712489,211.22398798431965, +44,254.3343075088377,,228.4009970030779,211.469653822954, +45,254.73278728895505,,233.0015592136671,218.0810759878078, +46,258.85180960541885,,233.44162965503816,223.9324244945336, +47,265.20183555925854,,233.99958415347663,225.63523464210996, +48,270.91796173011494,,240.4736545570013,230.50068745233716, +49,276.5465147203993,,249.27051305760176,233.46231841562783, +50,282.9645824056432,,255.0265039402768,235.08070474626368, +51,298.5945389989576,,256.4476699601695,240.4887175815115, +52,301.11532032761136,,274.8340274565724,240.5991509461328, +53,305.5537063692732,,281.5657602834549,245.60083845133752, +54,324.1161225795471,,284.75115384489663,247.9104923152709, +55,,,288.31202478564785,251.36880701471296, +56,,,300.0065321288855,252.5046776438013, +57,,,316.1120199612789,253.09217075998225, +58,,,331.3702317951931,256.41470904766754, +59,,,342.64202348223426,258.5857867787787, +60,,,350.75676833954327,259.2200565156948, +61,,,354.018799738093,260.3355863880311, +62,,,,260.92858560916625, +63,,,,275.9170652569355, +64,,,,276.4897482656455, +65,,,,279.1150124231945, +66,,,,281.63865312843694, +67,,,,284.891052586774, +68,,,,285.49648830064444, +69,,,,288.43763817504816, +70,,,,289.2026983691543, +71,,,,295.5256962431524, +72,,,,297.0092028271178, +73,,,,298.55005383352386, +74,,,,298.6956170083518, +75,,,,303.7940828456012, +76,,,,306.2957258663595, +77,,,,313.4036988741518, +78,,,,314.13101978633057, diff --git a/analysis_results/Munc13DKO/KO/AZ_distances_within_100.csv b/analysis_results/Munc13DKO/KO/AZ_distances_within_100.csv new file mode 100644 index 0000000..2e84d58 --- /dev/null +++ b/analysis_results/Munc13DKO/KO/AZ_distances_within_100.csv @@ -0,0 +1,21 @@ +,A_M13DKO_080212_DKO1.2_crop,G_M13DKO_060212_DKO1.1_crop,C_M13DKO_080212_DKO1.2_crop,E_M13DKO_080212_DKO1.2_crop,H_M13DKO_080212_DKO1.2_crop +0,9.061299244589597,58.57982277200914,8.368546110287019,14.660406679215964,6.216 +1,9.57949936061379,86.71821275833584,9.828358967803323,14.660406679215964,7.121322629961376 +2,10.19025946676531,88.16810423276662,17.65004713874725,20.14214209065163,8.368546110287019 +3,13.277381820223445,98.52899228145998,22.57315387800296,24.57089741950831,11.419521180855176 +4,15.847752648246376,,30.885139144902684,27.13944325147441,63.78975983651295 +5,16.519246592989646,,41.26155329116926,31.46610176046597,78.41158164455044 +6,27.885533023415565,,47.543257944739125,35.742000000000004,82.08298020905431 +7,31.42770503870749,,49.16635959678121,41.26155329116925,93.48572620459232 +8,46.98120075093867,,59.15414849357567,52.261553708247135, +9,70.2744848433626,,75.63686934293354,53.607369269532334, +10,73.71269225852492,,76.73047118322681,68.51712708513107, +11,77.73107378648515,,76.90337628479,71.6527139472051, +12,79.42141142034684,,83.90160987728423,72.92218477253682, +13,81.31445140932823,,85.6534576535005,82.00939616897567, +14,90.1989578875499,,86.73213556692814,84.84612304637143, +15,95.02300207844416,,86.746056140899,85.3569069964464, +16,,,93.69215367361346,87.57724142721099, +17,,,93.92384227660196,, +18,,,97.76623920352056,, +19,,,99.31020563869556,, diff --git a/analysis_results/Munc13DKO/KO/AZ_distances_within_200.csv b/analysis_results/Munc13DKO/KO/AZ_distances_within_200.csv new file mode 100644 index 0000000..c4f1d87 --- /dev/null +++ b/analysis_results/Munc13DKO/KO/AZ_distances_within_200.csv @@ -0,0 +1,42 @@ +,A_M13DKO_080212_DKO1.2_crop,G_M13DKO_060212_DKO1.1_crop,C_M13DKO_080212_DKO1.2_crop,E_M13DKO_080212_DKO1.2_crop,H_M13DKO_080212_DKO1.2_crop +0,9.061299244589597,58.57982277200914,8.368546110287019,14.660406679215964,6.216 +1,9.57949936061379,86.71821275833584,9.828358967803323,14.660406679215964,7.121322629961376 +2,10.19025946676531,88.16810423276662,17.65004713874725,20.14214209065163,8.368546110287019 +3,13.277381820223445,98.52899228145998,22.57315387800296,24.57089741950831,11.419521180855176 +4,15.847752648246376,135.13546829755688,30.885139144902684,27.13944325147441,63.78975983651295 +5,16.519246592989646,146.05946714951415,41.26155329116926,31.46610176046597,78.41158164455044 +6,27.885533023415565,156.99246063426102,47.543257944739125,35.742000000000004,82.08298020905431 +7,31.42770503870749,175.51946733054996,49.16635959678121,41.26155329116925,93.48572620459232 +8,46.98120075093867,180.99263946359807,59.15414849357567,52.261553708247135,113.0047601121298 +9,70.2744848433626,190.9144499193291,75.63686934293354,53.607369269532334,122.64791789508699 +10,73.71269225852492,,76.73047118322681,68.51712708513107,127.74976449293362 +11,77.73107378648515,,76.90337628479,71.6527139472051,128.95391260446502 +12,79.42141142034684,,83.90160987728423,72.92218477253682,156.39911816886948 +13,81.31445140932823,,85.6534576535005,82.00939616897567,169.4430179263814 +14,90.1989578875499,,86.73213556692814,84.84612304637143,173.3040977357431 +15,95.02300207844416,,86.746056140899,85.3569069964464,176.50731235844026 +16,106.9779740133454,,93.69215367361346,87.57724142721099,178.7032433505335 +17,107.89953677379714,,93.92384227660196,106.6388270940749,178.89908258009598 +18,109.1124918604648,,97.76623920352056,108.4019430453163,192.01806552509584 +19,116.46709051058156,,99.31020563869556,109.77445440538524,193.80816149997398 +20,118.14488765917888,,101.29648603974375,116.55, +21,131.89789560110503,,107.1696808617064,122.60853192172232, +22,132.06257362326392,,110.20259780967054,135.7061141437629, +23,134.55342898640674,,113.0047601121298,139.92042028238768, +24,139.59211010655295,,116.85005376121998,144.00306834231, +25,140.95216895103104,,121.54028620996417,149.5477752291889, +26,141.58466624603102,,123.3547150132495,150.03143682575327, +27,145.18884862137313,,134.27495245204892,150.48947553898913, +28,172.9344377040039,,138.48049666288753,151.15396215779458, +29,179.2766494666832,,139.60075974005298,157.6295363312346, +30,180.6988634053906,,143.26329287015568,163.43618288494136, +31,181.2326474893528,,146.6123027443468,164.78995849262176, +32,187.0618422875173,,147.63,173.67992329569933, +33,195.2976786958821,,148.77876211341456,181.5322112904484, +34,,,150.34498249027138,187.5388936300948, +35,,,160.7019343505236,189.2118645857072, +36,,,167.1976902232803,192.47656053660145, +37,,,174.95445207253232,, +38,,,186.5512114031962,, +39,,,189.8553033654841,, +40,,,195.0440251532972,, diff --git a/analysis_results/Munc13DKO/KO/AZ_distances_within_40.csv b/analysis_results/Munc13DKO/KO/AZ_distances_within_40.csv new file mode 100644 index 0000000..c414590 --- /dev/null +++ b/analysis_results/Munc13DKO/KO/AZ_distances_within_40.csv @@ -0,0 +1,9 @@ +,A_M13DKO_080212_DKO1.2_crop,G_M13DKO_060212_DKO1.1_crop,C_M13DKO_080212_DKO1.2_crop,E_M13DKO_080212_DKO1.2_crop,H_M13DKO_080212_DKO1.2_crop +0,9.061299244589597,,8.368546110287019,14.660406679215964,6.216 +1,9.57949936061379,,9.828358967803323,14.660406679215964,7.121322629961376 +2,10.19025946676531,,17.65004713874725,20.14214209065163,8.368546110287019 +3,13.277381820223445,,22.57315387800296,24.57089741950831,11.419521180855176 +4,15.847752648246376,,30.885139144902684,27.13944325147441, +5,16.519246592989646,,,31.46610176046597, +6,27.885533023415565,,,35.742000000000004, +7,31.42770503870749,,,, diff --git a/analysis_results/Munc13DKO/KO/AZ_distances_within_40_with_diameters.csv b/analysis_results/Munc13DKO/KO/AZ_distances_within_40_with_diameters.csv new file mode 100644 index 0000000..0d837b5 --- /dev/null +++ b/analysis_results/Munc13DKO/KO/AZ_distances_within_40_with_diameters.csv @@ -0,0 +1,9 @@ +,A_M13DKO_080212_DKO1.2_crop_distance,A_M13DKO_080212_DKO1.2_crop_diameter,G_M13DKO_060212_DKO1.1_crop_distance,G_M13DKO_060212_DKO1.1_crop_diameter,C_M13DKO_080212_DKO1.2_crop_distance,C_M13DKO_080212_DKO1.2_crop_diameter,E_M13DKO_080212_DKO1.2_crop_distance,E_M13DKO_080212_DKO1.2_crop_diameter,H_M13DKO_080212_DKO1.2_crop_distance,H_M13DKO_080212_DKO1.2_crop_diameter +0,9.061299244589597,59.04457273790773,,,8.368546110287019,47.81289742868499,14.660406679215964,54.5150906509724,6.216,47.81289742868499 +1,9.57949936061379,55.036778046111,,,9.828358967803323,,14.660406679215964,43.95147120000001,7.121322629961376,52.5941871715535 +2,10.19025946676531,51.71755599228722,,,17.65004713874725,50.71323599999999,20.14214209065163,57.37547691442199,8.368546110287019,44.081313347574614 +3,13.277381820223445,53.45644442588228,,,22.57315387800296,48.75977939805501,24.57089741950831,50.26043018499918,11.419521180855176,47.81289742868499 +4,15.847752648246376,46.96871937691663,,,30.885139144902684,40.71121517937428,27.13944325147441,59.04457273790773,, +5,16.519246592989646,52.81107133775702,,,,,31.46610176046597,46.96871937691663,, +6,27.885533023415565,55.45059423403328,,,,,35.742000000000004,54.19966799968288,, +7,31.42770503870749,,,,,,,,, diff --git a/analysis_results/SNAP25/CTRL/AZ_distances.csv b/analysis_results/SNAP25/CTRL/AZ_distances.csv new file mode 100644 index 0000000..73b6a8b --- /dev/null +++ b/analysis_results/SNAP25/CTRL/AZ_distances.csv @@ -0,0 +1,94 @@ +,E_SNAP25_120812_CTRL2.3_14_crop,C_SNAP25_12082_CTRL2.3_5_crop,D_SNAP25_12082_CTRL2.3_5_crop,B_SNAP25_120812_CTRL1.4_4_crop,B_SNAP25_120812_CTRL1.3_13_crop +0,0.0,0.0,0.0,0.0,1.554 +1,0.0,0.0,1.554,0.0,2.19768787592779 +2,32.3366452805482,0.0,3.108,0.0,2.19768787592779 +3,38.91211035140603,0.0,3.4748496370346733,0.0,4.662000000000001 +4,60.62591980333164,0.0,5.154034924212291,0.0,4.914179483901662 +5,63.7329484019059,1.554,32.14940198510697,0.0,5.154034924212291 +6,69.4274609646644,18.648000000000003,50.13908212163442,0.0,5.814535579046705 +7,71.77057773210413,22.679883333033263,50.73756454541349,2.19768787592779,6.407306142209845 +8,77.8397343520647,41.29080643436261,53.47205385993696,23.5162872069551,14.411203142000323 +9,92.11956808409384,53.13225765954238,81.43315870086337,39.558381766700215,45.519203947345126 +10,98.38182417499688,61.965445532167365,91.4750454495651,48.24913495597615,47.46700567762833 +11,100.48265420459394,63.6191733363457,92.38134629891468,49.97022257304844,55.29270888643457 +12,102.3518715412669,66.09525734876898,93.04554723359952,52.23844446382377,57.35081160716037 +13,110.07103875225307,68.05739407294405,94.57721112403348,56.073350461694375,58.57982277200914 +14,112.16823240115714,86.38339215381623,105.43177105597724,58.53858389131053,60.92393529640054 +15,121.83796063624835,87.54966238655635,112.27582785265938,63.10464708719953,68.53474753728943 +16,128.01413975026352,96.36053144311732,113.86695747230624,67.2542762060525,81.92100807485221 +17,129.32790965603675,102.86963551991424,113.90936602404564,69.25332610062856,94.51335505630936 +18,134.44570004280538,106.3667309829535,116.73633105421808,72.25682245988956,95.66886452759853 +19,138.3409169407229,115.10095210726972,125.9123645239021,84.46100795041463,100.09738500080809 +20,140.69494110308304,115.4779899028382,133.39983509734938,85.73799801721522,105.34011116379173 +21,149.41853439249095,115.96838881350384,137.68475078962086,93.89812732956926,105.37449296675169 +22,157.3305088023299,126.5246148541856,143.28014836675737,103.5131986366956,107.59696698327514 +23,168.60004814945933,128.86961368763392,145.81953017343045,105.672,115.32104063006022 +24,170.98237656553962,145.33015976045715,149.0139343148821,106.13945138354542,116.14525722559661 +25,180.02940769774253,156.99246063426105,158.3631985658284,125.25856957509934,125.48970968171056 +26,180.97262442701103,157.66017375355136,160.49141427503218,130.66543582753627,125.61473298940695 +27,191.72228987783348,161.2045628262426,167.32763102368958,130.88702802035044,136.4160646551571 +28,195.1739864428659,163.05896221919235,168.88627201759178,138.17498288764142,137.83375556082044 +29,200.0439277658785,172.86460188251382,169.69224609274283,142.6297738342174,141.65287517025556 +30,205.9621675356909,199.57256722305297,178.50042494067068,142.7905311566562,148.80310750787433 +31,206.21995723013816,203.4375344129004,184.43564399540563,147.65453480337135,150.30482088076883 +32,206.3955383238698,215.81026383376675,186.92624107920216,147.95679619402418,153.9401530075893 +33,211.1496606485552,242.4638428797168,187.96335036384087,151.696191659514,158.06555898107595 +34,224.77201257274,252.47120196172872,191.84820759131424,151.89505411302898,158.60699849628324 +35,225.9507449777495,271.10063361047315,192.7649150960828,153.1695431213399,161.33933049321857 +36,229.76088387713,272.81885205388573,195.84716190948487,161.23452093146804,162.8514891304344 +37,247.57419695113623,378.595990818709,203.29503787353,162.05619858555244,175.78068784710112 +38,247.5985815306704,,204.3022470361009,163.2883570742262,186.6482740772065 +39,249.92842426582857,,208.7514292645681,172.6269487652493,188.16238592237292 +40,,,218.5677659491445,173.36679190663938,190.50924535045536 +41,,,223.48983786293283,173.8119649506328,196.1305610148505 +42,,,223.60866660306348,176.54151323697212,196.16134059492967 +43,,,225.0994613765213,177.71401590195413,196.63473771437233 +44,,,225.9774628939797,178.62890333873742,205.7568769105908 +45,,,232.9393646509752,179.17559349420333,210.09482299190526 +46,,,237.4825220179372,181.73829021975527,215.91654262700672 +47,,,241.41578810011572,184.46182922219975,224.17493606556465 +48,,,244.50203594244363,186.38284969384927,231.31121653737418 +49,,,255.9245040085064,187.73194747831283,232.68004169674717 +50,,,269.07544777626964,187.92480300375468,243.55201027295996 +51,,,269.1741531945443,192.04950421180476,243.7452839174535 +52,,,269.38490269501,192.558095815263,243.89385173062485 +53,,,273.975973450228,193.1528857770445,248.74196034445012 +54,,,275.7069294232556,198.182223784072,251.18139942280757 +55,,,282.06277248158784,198.79663060524945,265.5612771998207 +56,,,291.9411419926969,202.8669477071117,268.1719569231652 +57,,,292.17266416282,202.90265638477976,275.8514150045274 +58,,,301.66418353526825,205.5278829550872,285.13676888819515 +59,,,306.18532629112065,212.8015620995297,292.6269065619223 +60,,,311.43261118257993,222.7484323985244,297.8211707787074 +61,,,313.7310095097391,226.76688758282148,304.27859688778636 +62,,,319.6597264404761,227.50581229498292,319.2174737322505 +63,,,323.6239978802561,229.47692323194505,322.85074630856906 +64,,,334.99064405442726,234.9111438735932,330.90349238410886 +65,,,340.9498665375894,236.5196168185633,339.72942231723175 +66,,,348.79944547547666,238.5579813294873,356.235646447685 +67,,,369.16250436359326,239.97101269945088,363.9347234546327 +68,,,,245.79250104915732,384.1933051889374 +69,,,,247.20325519701396, +70,,,,248.294966851928, +71,,,,256.31580087852564, +72,,,,259.20142367664573, +73,,,,269.6850481061195, +74,,,,270.93578879874843, +75,,,,271.6301318852531, +76,,,,276.6338249816895, +77,,,,281.4456603253992, +78,,,,281.5014273924735, +79,,,,293.41808109930787, +80,,,,297.90629902705984, +81,,,,298.2749073522612, +82,,,,301.0230773013923, +83,,,,301.6721887413555, +84,,,,323.832869394075, +85,,,,325.50643542025404, +86,,,,329.1986792440092, +87,,,,339.4734252279551, +88,,,,339.9426059616535, +89,,,,345.2189227142684, +90,,,,357.9769790642968, +91,,,,362.4519697835839, +92,,,,377.6635656136292, diff --git a/analysis_results/SNAP25/CTRL/AZ_distances_within_100.csv b/analysis_results/SNAP25/CTRL/AZ_distances_within_100.csv new file mode 100644 index 0000000..970efec --- /dev/null +++ b/analysis_results/SNAP25/CTRL/AZ_distances_within_100.csv @@ -0,0 +1,23 @@ +,E_SNAP25_120812_CTRL2.3_14_crop,C_SNAP25_12082_CTRL2.3_5_crop,D_SNAP25_12082_CTRL2.3_5_crop,B_SNAP25_120812_CTRL1.4_4_crop,B_SNAP25_120812_CTRL1.3_13_crop +0,0.0,0.0,0.0,0.0,1.554 +1,0.0,0.0,1.554,0.0,2.19768787592779 +2,32.3366452805482,0.0,3.108,0.0,2.19768787592779 +3,38.91211035140603,0.0,3.4748496370346733,0.0,4.662000000000001 +4,60.62591980333164,0.0,5.154034924212291,0.0,4.914179483901662 +5,63.7329484019059,1.554,32.14940198510697,0.0,5.154034924212291 +6,69.4274609646644,18.648000000000003,50.13908212163442,0.0,5.814535579046705 +7,71.77057773210413,22.679883333033263,50.73756454541349,2.19768787592779,6.407306142209845 +8,77.8397343520647,41.29080643436261,53.47205385993696,23.5162872069551,14.411203142000323 +9,92.11956808409384,53.13225765954238,81.43315870086337,39.558381766700215,45.519203947345126 +10,98.38182417499688,61.965445532167365,91.4750454495651,48.24913495597615,47.46700567762833 +11,,63.6191733363457,92.38134629891468,49.97022257304844,55.29270888643457 +12,,66.09525734876898,93.04554723359952,52.23844446382377,57.35081160716037 +13,,68.05739407294405,94.57721112403348,56.073350461694375,58.57982277200914 +14,,86.38339215381623,,58.53858389131053,60.92393529640054 +15,,87.54966238655635,,63.10464708719953,68.53474753728943 +16,,96.36053144311732,,67.2542762060525,81.92100807485221 +17,,,,69.25332610062856,94.51335505630936 +18,,,,72.25682245988956,95.66886452759853 +19,,,,84.46100795041463, +20,,,,85.73799801721522, +21,,,,93.89812732956926, diff --git a/analysis_results/SNAP25/CTRL/AZ_distances_within_200.csv b/analysis_results/SNAP25/CTRL/AZ_distances_within_200.csv new file mode 100644 index 0000000..c7cbe00 --- /dev/null +++ b/analysis_results/SNAP25/CTRL/AZ_distances_within_200.csv @@ -0,0 +1,57 @@ +,E_SNAP25_120812_CTRL2.3_14_crop,C_SNAP25_12082_CTRL2.3_5_crop,D_SNAP25_12082_CTRL2.3_5_crop,B_SNAP25_120812_CTRL1.4_4_crop,B_SNAP25_120812_CTRL1.3_13_crop +0,0.0,0.0,0.0,0.0,1.554 +1,0.0,0.0,1.554,0.0,2.19768787592779 +2,32.3366452805482,0.0,3.108,0.0,2.19768787592779 +3,38.91211035140603,0.0,3.4748496370346733,0.0,4.662000000000001 +4,60.62591980333164,0.0,5.154034924212291,0.0,4.914179483901662 +5,63.7329484019059,1.554,32.14940198510697,0.0,5.154034924212291 +6,69.4274609646644,18.648000000000003,50.13908212163442,0.0,5.814535579046705 +7,71.77057773210413,22.679883333033263,50.73756454541349,2.19768787592779,6.407306142209845 +8,77.8397343520647,41.29080643436261,53.47205385993696,23.5162872069551,14.411203142000323 +9,92.11956808409384,53.13225765954238,81.43315870086337,39.558381766700215,45.519203947345126 +10,98.38182417499688,61.965445532167365,91.4750454495651,48.24913495597615,47.46700567762833 +11,100.48265420459394,63.6191733363457,92.38134629891468,49.97022257304844,55.29270888643457 +12,102.3518715412669,66.09525734876898,93.04554723359952,52.23844446382377,57.35081160716037 +13,110.07103875225307,68.05739407294405,94.57721112403348,56.073350461694375,58.57982277200914 +14,112.16823240115714,86.38339215381623,105.43177105597724,58.53858389131053,60.92393529640054 +15,121.83796063624835,87.54966238655635,112.27582785265938,63.10464708719953,68.53474753728943 +16,128.01413975026352,96.36053144311732,113.86695747230624,67.2542762060525,81.92100807485221 +17,129.32790965603675,102.86963551991424,113.90936602404564,69.25332610062856,94.51335505630936 +18,134.44570004280538,106.3667309829535,116.73633105421808,72.25682245988956,95.66886452759853 +19,138.3409169407229,115.10095210726972,125.9123645239021,84.46100795041463,100.09738500080809 +20,140.69494110308304,115.4779899028382,133.39983509734938,85.73799801721522,105.34011116379173 +21,149.41853439249095,115.96838881350384,137.68475078962086,93.89812732956926,105.37449296675169 +22,157.3305088023299,126.5246148541856,143.28014836675737,103.5131986366956,107.59696698327514 +23,168.60004814945933,128.86961368763392,145.81953017343045,105.672,115.32104063006022 +24,170.98237656553962,145.33015976045715,149.0139343148821,106.13945138354542,116.14525722559661 +25,180.02940769774253,156.99246063426105,158.3631985658284,125.25856957509934,125.48970968171056 +26,180.97262442701103,157.66017375355136,160.49141427503218,130.66543582753627,125.61473298940695 +27,191.72228987783348,161.2045628262426,167.32763102368958,130.88702802035044,136.4160646551571 +28,195.1739864428659,163.05896221919235,168.88627201759178,138.17498288764142,137.83375556082044 +29,,172.86460188251382,169.69224609274283,142.6297738342174,141.65287517025556 +30,,199.57256722305297,178.50042494067068,142.7905311566562,148.80310750787433 +31,,,184.43564399540563,147.65453480337135,150.30482088076883 +32,,,186.92624107920216,147.95679619402418,153.9401530075893 +33,,,187.96335036384087,151.696191659514,158.06555898107595 +34,,,191.84820759131424,151.89505411302898,158.60699849628324 +35,,,192.7649150960828,153.1695431213399,161.33933049321857 +36,,,195.84716190948487,161.23452093146804,162.8514891304344 +37,,,,162.05619858555244,175.78068784710112 +38,,,,163.2883570742262,186.6482740772065 +39,,,,172.6269487652493,188.16238592237292 +40,,,,173.36679190663938,190.50924535045536 +41,,,,173.8119649506328,196.1305610148505 +42,,,,176.54151323697212,196.16134059492967 +43,,,,177.71401590195413,196.63473771437233 +44,,,,178.62890333873742, +45,,,,179.17559349420333, +46,,,,181.73829021975527, +47,,,,184.46182922219975, +48,,,,186.38284969384927, +49,,,,187.73194747831283, +50,,,,187.92480300375468, +51,,,,192.04950421180476, +52,,,,192.558095815263, +53,,,,193.1528857770445, +54,,,,198.182223784072, +55,,,,198.79663060524945, diff --git a/analysis_results/SNAP25/CTRL/AZ_distances_within_40.csv b/analysis_results/SNAP25/CTRL/AZ_distances_within_40.csv new file mode 100644 index 0000000..db28ca3 --- /dev/null +++ b/analysis_results/SNAP25/CTRL/AZ_distances_within_40.csv @@ -0,0 +1,11 @@ +,E_SNAP25_120812_CTRL2.3_14_crop,C_SNAP25_12082_CTRL2.3_5_crop,D_SNAP25_12082_CTRL2.3_5_crop,B_SNAP25_120812_CTRL1.4_4_crop,B_SNAP25_120812_CTRL1.3_13_crop +0,0.0,0.0,0.0,0.0,1.554 +1,0.0,0.0,1.554,0.0,2.19768787592779 +2,32.3366452805482,0.0,3.108,0.0,2.19768787592779 +3,38.91211035140603,0.0,3.4748496370346733,0.0,4.662000000000001 +4,,0.0,5.154034924212291,0.0,4.914179483901662 +5,,1.554,32.14940198510697,0.0,5.154034924212291 +6,,18.648000000000003,,0.0,5.814535579046705 +7,,22.679883333033263,,2.19768787592779,6.407306142209845 +8,,,,23.5162872069551,14.411203142000323 +9,,,,39.558381766700215, diff --git a/analysis_results/SNAP25/CTRL/AZ_distances_within_40_with_diameters.csv b/analysis_results/SNAP25/CTRL/AZ_distances_within_40_with_diameters.csv new file mode 100644 index 0000000..920cb73 --- /dev/null +++ b/analysis_results/SNAP25/CTRL/AZ_distances_within_40_with_diameters.csv @@ -0,0 +1,11 @@ +,E_SNAP25_120812_CTRL2.3_14_crop_distance,E_SNAP25_120812_CTRL2.3_14_crop_diameter,C_SNAP25_12082_CTRL2.3_5_crop_distance,C_SNAP25_12082_CTRL2.3_5_crop_diameter,D_SNAP25_12082_CTRL2.3_5_crop_distance,D_SNAP25_12082_CTRL2.3_5_crop_diameter,B_SNAP25_120812_CTRL1.4_4_crop_distance,B_SNAP25_120812_CTRL1.4_4_crop_diameter,B_SNAP25_120812_CTRL1.3_13_crop_distance,B_SNAP25_120812_CTRL1.3_13_crop_diameter +0,0.0,56.67391631552351,0.0,27.0470592,0.0,44.08131334757461,0.0,49.91813042345726,1.554,48.05136722436951 +1,0.0,54.51509065097238,0.0,40.71121517937428,1.554,59.8139155734108,0.0,45.98497225710271,2.19768787592779,48.75977939805501 +2,32.3366452805482,45.48512075696828,0.0,48.75977939805501,3.108,42.36233505146845,0.0,43.95147120000001,2.19768787592779,47.45294577914065 +3,38.91211035140603,52.81107133775702,0.0,48.40686923918031,3.4748496370346733,54.5150906509724,0.0,48.40686923918031,4.662000000000001,50.26043018499918 +4,,,0.0,43.29642007870028,5.154034924212291,47.81289742868499,0.0,44.46856512026162,4.914179483901662,49.22639078946891 +5,,,1.554,47.81289742868499,32.14940198510697,42.36233505146845,0.0,49.22639078946891,5.154034924212291,46.96871937691663 +6,,,18.648000000000003,50.26043018499918,,,0.0,50.71323599999999,5.814535579046705,47.81289742868499 +7,,,22.679883333033263,46.96871937691663,,,2.19768787592779,49.91813042345726,6.407306142209845,48.05136722436951 +8,,,,,,,23.5162872069551,45.98497225710271,14.411203142000323,46.96871937691663 +9,,,,,,,39.558381766700215,42.76515554070582,, diff --git a/analysis_results/SNAP25/KO/AZ_distances.csv b/analysis_results/SNAP25/KO/AZ_distances.csv new file mode 100644 index 0000000..0222287 --- /dev/null +++ b/analysis_results/SNAP25/KO/AZ_distances.csv @@ -0,0 +1,43 @@ +,D_SNAP25_12.08.12_KO1.1_3_crop,C_SNAP25_12082_KO1.2_6_crop,E_SNAP25_12082_KO2.1_6_crop,A_SNAP25_12082_KO1.2_6_crop,B_SNAP25_12082_KO1.2_6_crop +0,9.193587982936805,1.554,3.4748496370346733,4.662,6.216 +1,10.878,1.554,5.154034924212291,7.77,10.308069848424582 +2,12.90849348297469,4.662,6.216,9.452612972083434,40.582911576179455 +3,14.660406679215962,6.216,6.407306142209845,11.419521180855176,60.04556260041203 +4,22.46591738612069,13.636294658007358,6.407306142209845,41.14433273246754,75.15642703056074 +5,59.642026021925176,60.76517558602131,77.0602260832396,50.13908212163442,89.87710509356651 +6,63.581203071348064,86.46721885200193,80.34345317448087,51.11691610416262,90.78605789437054 +7,76.22524447976537,118.81750265007256,90.4796399197079,56.35259124476886,92.88969193618848 +8,97.60555117410075,144.34644175732217,90.51966630517371,65.1012870840508,103.85092612008812 +9,102.0801770178716,198.70550218854032,113.29288830284098,76.60447689267252,123.66755042451517 +10,104.9726712435194,201.667053223872,133.30023228786965,91.686,130.47123393300149 +11,107.27103402130513,203.9710098224745,137.6145750711021,96.67328958921384,130.66543582753627 +12,115.18484494064312,255.41444912925343,166.3941463513666,99.94044514609688,152.21269363624046 +13,115.92673350008616,,167.8463882721341,112.46924027484135,156.91552993888146 +14,116.82938513918492,,214.6602249975528,136.35409155577256,161.81013340331936 +15,117.55062463466538,,,165.7251932205843,169.13632057012472 +16,149.57199545369448,,,167.86077531097013,174.35990801786974 +17,170.03345068544604,,,197.68199389929276,192.21918426629537 +18,170.26763221469898,,,199.64515666551995,193.06534764167287 +19,180.93926111267285,,,210.15803273727133,193.95762797064725 +20,,,,215.83823701096156,198.12128776080576 +21,,,,217.7541633585912,205.73927103982848 +22,,,,223.82455723177475,219.64788150127922 +23,,,,240.9652263377436,226.74558791738374 +24,,,,247.78869889484469,227.2456022544771 +25,,,,250.931304910328,244.34889642476392 +26,,,,252.1936610860789,250.5797895202245 +27,,,,258.6044639753924,254.04929642886242 +28,,,,266.5098673445319,267.56793480535 +29,,,,268.03684634766165,271.812325151013 +30,,,,280.03925114883447,287.284126286156 +31,,,,289.24027207150806,305.54580285777126 +32,,,,299.587663390868,314.7108989342441 +33,,,,300.9669155305945,316.49757610446244 +34,,,,314.46909259893886,320.2710681594577 +35,,,,318.0009551872447,338.03697325588513 +36,,,,320.66292015760104,345.3028563218092 +37,,,,331.09683148589625,360.6920704257303 +38,,,,334.8933096853384,375.15825913339563 +39,,,,343.8907550894615, +40,,,,363.57622585642207, +41,,,,388.3818780427326, diff --git a/analysis_results/SNAP25/KO/AZ_distances_within_100.csv b/analysis_results/SNAP25/KO/AZ_distances_within_100.csv new file mode 100644 index 0000000..3693a70 --- /dev/null +++ b/analysis_results/SNAP25/KO/AZ_distances_within_100.csv @@ -0,0 +1,14 @@ +,D_SNAP25_12.08.12_KO1.1_3_crop,C_SNAP25_12082_KO1.2_6_crop,E_SNAP25_12082_KO2.1_6_crop,A_SNAP25_12082_KO1.2_6_crop,B_SNAP25_12082_KO1.2_6_crop +0,9.193587982936805,1.554,3.4748496370346733,4.662,6.216 +1,10.878,1.554,5.154034924212291,7.77,10.308069848424582 +2,12.90849348297469,4.662,6.216,9.452612972083434,40.582911576179455 +3,14.660406679215962,6.216,6.407306142209845,11.419521180855176,60.04556260041203 +4,22.46591738612069,13.636294658007358,6.407306142209845,41.14433273246754,75.15642703056074 +5,59.642026021925176,60.76517558602131,77.0602260832396,50.13908212163442,89.87710509356651 +6,63.581203071348064,86.46721885200193,80.34345317448087,51.11691610416262,90.78605789437054 +7,76.22524447976537,,90.4796399197079,56.35259124476886,92.88969193618848 +8,97.60555117410075,,90.51966630517371,65.1012870840508, +9,,,,76.60447689267252, +10,,,,91.686, +11,,,,96.67328958921384, +12,,,,99.94044514609688, diff --git a/analysis_results/SNAP25/KO/AZ_distances_within_200.csv b/analysis_results/SNAP25/KO/AZ_distances_within_200.csv new file mode 100644 index 0000000..c7da11c --- /dev/null +++ b/analysis_results/SNAP25/KO/AZ_distances_within_200.csv @@ -0,0 +1,22 @@ +,D_SNAP25_12.08.12_KO1.1_3_crop,C_SNAP25_12082_KO1.2_6_crop,E_SNAP25_12082_KO2.1_6_crop,A_SNAP25_12082_KO1.2_6_crop,B_SNAP25_12082_KO1.2_6_crop +0,9.193587982936805,1.554,3.4748496370346733,4.662,6.216 +1,10.878,1.554,5.154034924212291,7.77,10.308069848424582 +2,12.90849348297469,4.662,6.216,9.452612972083434,40.582911576179455 +3,14.660406679215962,6.216,6.407306142209845,11.419521180855176,60.04556260041203 +4,22.46591738612069,13.636294658007358,6.407306142209845,41.14433273246754,75.15642703056074 +5,59.642026021925176,60.76517558602131,77.0602260832396,50.13908212163442,89.87710509356651 +6,63.581203071348064,86.46721885200193,80.34345317448087,51.11691610416262,90.78605789437054 +7,76.22524447976537,118.81750265007256,90.4796399197079,56.35259124476886,92.88969193618848 +8,97.60555117410075,144.34644175732217,90.51966630517371,65.1012870840508,103.85092612008812 +9,102.0801770178716,198.70550218854032,113.29288830284098,76.60447689267252,123.66755042451517 +10,104.9726712435194,,133.30023228786965,91.686,130.47123393300149 +11,107.27103402130513,,137.6145750711021,96.67328958921384,130.66543582753627 +12,115.18484494064312,,166.3941463513666,99.94044514609688,152.21269363624046 +13,115.92673350008616,,167.8463882721341,112.46924027484135,156.91552993888146 +14,116.82938513918492,,,136.35409155577256,161.81013340331936 +15,117.55062463466538,,,165.7251932205843,169.13632057012472 +16,149.57199545369448,,,167.86077531097013,174.35990801786974 +17,170.03345068544604,,,197.68199389929276,192.21918426629537 +18,170.26763221469898,,,199.64515666551995,193.06534764167287 +19,180.93926111267285,,,,193.95762797064725 +20,,,,,198.12128776080576 diff --git a/analysis_results/SNAP25/KO/AZ_distances_within_40.csv b/analysis_results/SNAP25/KO/AZ_distances_within_40.csv new file mode 100644 index 0000000..44898fe --- /dev/null +++ b/analysis_results/SNAP25/KO/AZ_distances_within_40.csv @@ -0,0 +1,6 @@ +,D_SNAP25_12.08.12_KO1.1_3_crop,C_SNAP25_12082_KO1.2_6_crop,E_SNAP25_12082_KO2.1_6_crop,A_SNAP25_12082_KO1.2_6_crop,B_SNAP25_12082_KO1.2_6_crop +0,9.193587982936805,1.554,3.4748496370346733,4.662,6.216 +1,10.878,1.554,5.154034924212291,7.77,10.308069848424582 +2,12.90849348297469,4.662,6.216,9.452612972083434, +3,14.660406679215962,6.216,6.407306142209845,11.419521180855176, +4,22.46591738612069,13.636294658007358,6.407306142209845,, diff --git a/analysis_results/SNAP25/KO/AZ_distances_within_40_with_diameters.csv b/analysis_results/SNAP25/KO/AZ_distances_within_40_with_diameters.csv new file mode 100644 index 0000000..33cb48a --- /dev/null +++ b/analysis_results/SNAP25/KO/AZ_distances_within_40_with_diameters.csv @@ -0,0 +1,6 @@ +,D_SNAP25_12.08.12_KO1.1_3_crop_distance,D_SNAP25_12.08.12_KO1.1_3_crop_diameter,C_SNAP25_12082_KO1.2_6_crop_distance,C_SNAP25_12082_KO1.2_6_crop_diameter,E_SNAP25_12082_KO2.1_6_crop_distance,E_SNAP25_12082_KO2.1_6_crop_diameter,A_SNAP25_12082_KO1.2_6_crop_distance,A_SNAP25_12082_KO1.2_6_crop_diameter,B_SNAP25_12082_KO1.2_6_crop_distance,B_SNAP25_12082_KO1.2_6_crop_diameter +0,9.193587982936805,56.26909744547573,1.554,47.81289742868499,3.4748496370346733,54.5150906509724,4.662,49.91813042345726,6.216,62.88804789234515 +1,10.878,59.8139155734108,1.554,50.26043018499918,5.154034924212291,50.26043018499918,7.77,47.81289742868499,10.308069848424582,57.37547691442199 +2,12.90849348297469,62.34039118335815,4.662,46.96871937691663,6.216,58.36307916126143,9.452612972083434,46.96871937691663,, +3,14.660406679215962,49.91813042345726,6.216,57.37547691442199,6.407306142209845,56.26909744547573,11.419521180855176,51.49606651201725,, +4,22.46591738612069,64.59166755828387,13.636294658007358,51.71755599228722,6.407306142209845,57.37547691442199,,,, diff --git a/run_sbatch_revision.sbatch b/run_sbatch_revision.sbatch index bbfc645..f9f1615 100644 --- a/run_sbatch_revision.sbatch +++ b/run_sbatch_revision.sbatch @@ -1,14 +1,14 @@ #! /bin/bash #SBATCH -c 4 #4 #8 #SBATCH --mem 120G #120G #32G #64G #256G -#SBATCH -p grete-h100:shared #grete:shared #grete-h100:shared +#SBATCH -p grete:shared #grete:shared #grete-h100:shared #SBATCH -t 12:00:00 #6:00:00 #48:00:00 -#SBATCH -G H100:1 #V100:1 #2 #A100:1 #gtx1080:2 #v100:1 #H100:1 +#SBATCH -G A100:1 #V100:1 #2 #A100:1 #gtx1080:2 #v100:1 #H100:1 #SBATCH --output=/user/muth9/u12095/synapse-net/slurm_revision/slurm-%j.out #SBATCH -A nim00007 #SBATCH --constraint 80gb source ~/.bashrc conda activate synapse-net python /user/muth9/u12095/synapse-net/scripts/cooper/revision/updated_data_analysis/run_data_analysis.py \ - -i /mnt/lustre-emmy-hdd/projects/nim00007/data/synaptic-reconstruction/cooper/20241102_TOMO_DATA_Imig2014/exported/Munc13DKO/ \ + -i /mnt/lustre-emmy-hdd/projects/nim00007/data/synaptic-reconstruction/cooper/20241102_TOMO_DATA_Imig2014/exported/SNAP25/ \ -o /mnt/lustre-emmy-hdd/projects/nim00007/data/synaptic-reconstruction/cooper/20241102_TOMO_DATA_Imig2014/afterRevision_analysis --store \ No newline at end of file diff --git a/scripts/cooper/revision/updated_data_analysis/data_analysis.py b/scripts/cooper/revision/updated_data_analysis/data_analysis.py index eb6e01a..32238fc 100644 --- a/scripts/cooper/revision/updated_data_analysis/data_analysis.py +++ b/scripts/cooper/revision/updated_data_analysis/data_analysis.py @@ -16,7 +16,12 @@ def calc_AZ_SV_distance(vesicles, az, resolution): """ distances, _, _, seg_ids = measure_segmentation_to_object_distances(vesicles, az, resolution=resolution) - dist_list = [{"seg_id": sid, "distance": dist} for sid, dist in zip(seg_ids, distances)] + # Exclude seg_id == 0 + dist_list = [ + {"seg_id": sid, "distance": dist} + for sid, dist in zip(seg_ids, distances) + if sid != 0 + ] dist_list.sort(key=lambda x: x["seg_id"]) return dist_list @@ -86,7 +91,12 @@ def calc_SV_diameters(vesicles, resolution): radii_nm = radii * resolution[0] diameters = radii_nm * 2 - diam_list = [{"seg_id": sid, "diameter": diam} for sid, diam in zip(seg_ids, diameters)] + # Exclude seg_id == 0 + diam_list = [ + {"seg_id": sid, "diameter": diam} + for sid, diam in zip(seg_ids, diameters) + if sid != 0 + ] diam_list.sort(key=lambda x: x["seg_id"]) - return diam_list + return diam_list \ No newline at end of file diff --git a/scripts/cooper/revision/updated_data_analysis/run_data_analysis.py b/scripts/cooper/revision/updated_data_analysis/run_data_analysis.py index 1aabc3e..97a40f5 100644 --- a/scripts/cooper/revision/updated_data_analysis/run_data_analysis.py +++ b/scripts/cooper/revision/updated_data_analysis/run_data_analysis.py @@ -1,5 +1,6 @@ import argparse import os +from tqdm import tqdm from analysis_segmentations import run_predictions from data_analysis import calc_AZ_SV_distance, calc_SV_diameters, combine_lists, sort_by_distances @@ -18,6 +19,7 @@ def run_data_analysis(input_path, output_path, store, resolution, analysis_outpu print("Combining lists") combined_list = combine_lists(dist_list, diam_list) + print(combined_list) print("Sorting the combined list by distances") sorted_list = sort_by_distances(combined_list) @@ -52,10 +54,25 @@ def main(): args = parser.parse_args() input_path = args.input_path + # Get the last directory name of the input_path + if os.path.isdir(input_path): + input_name = os.path.basename(os.path.normpath(input_path)) + else: + input_name = os.path.basename(os.path.dirname(input_path)) + + #get complete output_folder if there was an input output_folder = args.output_folder + if output_folder: + output_folder = os.path.join(output_folder, input_name) + os.makedirs(output_folder, exist_ok=True) + store = args.store resolution = args.resolution + + #get complete output path for the analysis analysis_output = args.analysis_output + analysis_output = os.path.join(analysis_output, input_name) + os.makedirs(analysis_output, exist_ok=True) if os.path.isfile(input_path): filename = os.path.basename(input_path) @@ -63,8 +80,8 @@ def main(): run_data_analysis(input_path, output_path, store, resolution, analysis_output) elif os.path.isdir(input_path): - for file in os.listdir(input_path): - if file.endswith(".h5"): + h5_files = [file for file in os.listdir(input_path) if file.endswith(".h5")] + for file in tqdm(h5_files, desc="Processing files"): full_input_path = os.path.join(input_path, file) output_path = os.path.join(output_folder, file) if output_folder else None run_data_analysis(full_input_path, output_path, store, resolution, analysis_output) From ac4b9d717c0653b278928900360d167ff41c0999 Mon Sep 17 00:00:00 2001 From: SarahMuth Date: Thu, 26 Jun 2025 15:14:55 +0200 Subject: [PATCH 17/29] store segID, use existing seg --- .gitignore | 1 + .../Munc13DKO/KO/AZ_distances.csv | 72 +++++------ .../Munc13DKO/KO/AZ_distances_within_100.csv | 16 +-- .../Munc13DKO/KO/AZ_distances_within_200.csv | 32 ++--- .../Munc13DKO/KO/AZ_distances_within_40.csv | 14 +-- .../AZ_distances_within_40_with_diameters.csv | 16 +-- run_sbatch_revision.sbatch | 2 +- .../analysis_segmentations.py | 119 +++++++++++------- .../updated_data_analysis/store_results.py | 39 ++++++ 9 files changed, 189 insertions(+), 122 deletions(-) diff --git a/.gitignore b/.gitignore index 4554ff1..f18dcce 100644 --- a/.gitignore +++ b/.gitignore @@ -14,3 +14,4 @@ scripts/rizzoli/upsample_data.py scripts/cooper/training/find_rec_testset.py synapse-net-models/ scripts/portal/upscale_tomo.py +analysis_results/ diff --git a/analysis_results/Munc13DKO/KO/AZ_distances.csv b/analysis_results/Munc13DKO/KO/AZ_distances.csv index 3faf66e..e022a58 100644 --- a/analysis_results/Munc13DKO/KO/AZ_distances.csv +++ b/analysis_results/Munc13DKO/KO/AZ_distances.csv @@ -1,14 +1,14 @@ ,A_M13DKO_080212_DKO1.2_crop,G_M13DKO_060212_DKO1.1_crop,C_M13DKO_080212_DKO1.2_crop,E_M13DKO_080212_DKO1.2_crop,H_M13DKO_080212_DKO1.2_crop -0,9.061299244589597,58.57982277200914,8.368546110287019,14.660406679215964,6.216 -1,9.57949936061379,86.71821275833584,9.828358967803323,14.660406679215964,7.121322629961376 +0,8.511608543630283,58.57982277200914,8.368546110287019,14.660406679215964,6.216 +1,9.061299244589597,86.71821275833584,9.828358967803323,14.660406679215964,7.121322629961376 2,10.19025946676531,88.16810423276662,17.65004713874725,20.14214209065163,8.368546110287019 -3,13.277381820223445,98.52899228145998,22.57315387800296,24.57089741950831,11.419521180855176 -4,15.847752648246376,135.13546829755688,30.885139144902684,27.13944325147441,63.78975983651295 -5,16.519246592989646,146.05946714951415,41.26155329116926,31.46610176046597,78.41158164455044 -6,27.885533023415565,156.99246063426102,47.543257944739125,35.742000000000004,82.08298020905431 -7,31.42770503870749,175.51946733054996,49.16635959678121,41.26155329116925,93.48572620459232 +3,13.186127255566738,98.52899228145998,22.57315387800296,24.57089741950831,11.419521180855176 +4,15.38381513149453,135.13546829755688,30.885139144902684,27.13944325147441,63.78975983651295 +5,15.847752648246376,146.05946714951415,41.26155329116926,31.46610176046597,78.41158164455044 +6,27.842198835580497,156.99246063426102,47.543257944739125,35.742000000000004,82.08298020905431 +7,31.31223237011376,175.51946733054996,49.16635959678121,41.26155329116925,93.48572620459232 8,46.98120075093867,180.99263946359807,59.15414849357567,52.261553708247135,113.0047601121298 -9,70.2744848433626,190.9144499193291,75.63686934293354,53.607369269532334,122.64791789508699 +9,70.73687830262232,190.9144499193291,75.63686934293354,53.607369269532334,122.64791789508699 10,73.71269225852492,,76.73047118322681,68.51712708513107,127.74976449293362 11,77.73107378648515,,76.90337628479,71.6527139472051,128.95391260446502 12,79.42141142034684,,83.90160987728423,72.92218477253682,156.39911816886948 @@ -16,44 +16,44 @@ 14,90.1989578875499,,86.73213556692814,84.84612304637143,173.3040977357431 15,95.02300207844416,,86.746056140899,85.3569069964464,176.50731235844026 16,106.9779740133454,,93.69215367361346,87.57724142721099,178.7032433505335 -17,107.89953677379714,,93.92384227660196,106.6388270940749,178.89908258009598 +17,107.5857443530508,,93.92384227660196,106.6388270940749,178.89908258009598 18,109.1124918604648,,97.76623920352056,108.4019430453163,192.01806552509584 -19,116.46709051058156,,99.31020563869556,109.77445440538524,193.80816149997398 +19,116.51891585489456,,99.31020563869556,109.77445440538524,193.80816149997398 20,118.14488765917888,,101.29648603974375,116.55,219.35633407768285 21,131.89789560110503,,107.1696808617064,122.60853192172232,220.23530112132343 -22,132.06257362326392,,110.20259780967054,135.7061141437629,221.63984585809473 +22,131.89789560110503,,110.20259780967054,135.7061141437629,221.63984585809473 23,134.55342898640674,,113.0047601121298,139.92042028238768,235.84989960566026 24,139.59211010655295,,116.85005376121998,144.00306834231,237.52319379799525 25,140.95216895103104,,121.54028620996417,149.5477752291889,243.76014477350475 26,141.58466624603102,,123.3547150132495,150.03143682575327,250.86874232554362 27,145.18884862137313,,134.27495245204892,150.48947553898913,258.74916653778814 -28,172.9344377040039,,138.48049666288753,151.15396215779458,262.0230178438528 -29,179.2766494666832,,139.60075974005298,157.6295363312346,276.1445319610729 +28,172.8855555909747,,138.48049666288753,151.15396215779458,262.0230178438528 +29,179.00704009619287,,139.60075974005298,157.6295363312346,276.1445319610729 30,180.6988634053906,,143.26329287015568,163.43618288494136,278.58673767428337 -31,181.2326474893528,,146.6123027443468,164.78995849262176, -32,187.0618422875173,,147.63,173.67992329569933, -33,195.2976786958821,,148.77876211341456,181.5322112904484, +31,180.97262442701103,,146.6123027443468,164.78995849262176, +32,186.79700555415764,,147.63,173.67992329569933, +33,195.0749761707021,,148.77876211341456,181.5322112904484, 34,202.58701197263363,,150.34498249027138,187.5388936300948, -35,209.62302243789924,,160.7019343505236,189.2118645857072, -36,212.23908251780583,,167.1976902232803,192.47656053660145, -37,213.3116201054223,,174.95445207253232,200.91723633377003, -38,214.29992433969733,,186.5512114031962,201.73290369198577, -39,219.1635901330328,,189.8553033654841,204.6919456158449, -40,222.98681674036249,,195.0440251532972,204.9277659078925, -41,227.38902013070023,,202.31862544017048,205.0396855342887, -42,234.4738314951159,,205.65709038105155,208.5025607324764, -43,238.674367128102,,227.4155690712489,211.22398798431965, -44,254.3343075088377,,228.4009970030779,211.469653822954, -45,254.73278728895505,,233.0015592136671,218.0810759878078, -46,258.85180960541885,,233.44162965503816,223.9324244945336, -47,265.20183555925854,,233.99958415347663,225.63523464210996, -48,270.91796173011494,,240.4736545570013,230.50068745233716, -49,276.5465147203993,,249.27051305760176,233.46231841562783, -50,282.9645824056432,,255.0265039402768,235.08070474626368, -51,298.5945389989576,,256.4476699601695,240.4887175815115, -52,301.11532032761136,,274.8340274565724,240.5991509461328, -53,305.5537063692732,,281.5657602834549,245.60083845133752, -54,324.1161225795471,,284.75115384489663,247.9104923152709, +35,212.23908251780583,,160.7019343505236,189.2118645857072, +36,212.5744768122457,,167.1976902232803,192.47656053660145, +37,214.18156861877728,,174.95445207253232,200.91723633377003, +38,219.1635901330328,,186.5512114031962,201.73290369198577, +39,222.53692370480903,,189.8553033654841,204.6919456158449, +40,227.38902013070023,,195.0440251532972,204.9277659078925, +41,238.674367128102,,202.31862544017048,205.0396855342887, +42,254.3343075088377,,205.65709038105155,208.5025607324764, +43,258.10906162318287,,227.4155690712489,211.22398798431965, +44,267.2789649635751,,228.4009970030779,211.469653822954, +45,270.91796173011494,,233.0015592136671,218.0810759878078, +46,274.62306290623155,,233.44162965503816,223.9324244945336, +47,282.65290734043407,,233.99958415347663,225.63523464210996, +48,298.19393360026623,,240.4736545570013,230.50068745233716, +49,355.98812751551145,,249.27051305760176,233.46231841562783, +50,,,255.0265039402768,235.08070474626368, +51,,,256.4476699601695,240.4887175815115, +52,,,274.8340274565724,240.5991509461328, +53,,,281.5657602834549,245.60083845133752, +54,,,284.75115384489663,247.9104923152709, 55,,,288.31202478564785,251.36880701471296, 56,,,300.0065321288855,252.5046776438013, 57,,,316.1120199612789,253.09217075998225, diff --git a/analysis_results/Munc13DKO/KO/AZ_distances_within_100.csv b/analysis_results/Munc13DKO/KO/AZ_distances_within_100.csv index 2e84d58..9e7d061 100644 --- a/analysis_results/Munc13DKO/KO/AZ_distances_within_100.csv +++ b/analysis_results/Munc13DKO/KO/AZ_distances_within_100.csv @@ -1,14 +1,14 @@ ,A_M13DKO_080212_DKO1.2_crop,G_M13DKO_060212_DKO1.1_crop,C_M13DKO_080212_DKO1.2_crop,E_M13DKO_080212_DKO1.2_crop,H_M13DKO_080212_DKO1.2_crop -0,9.061299244589597,58.57982277200914,8.368546110287019,14.660406679215964,6.216 -1,9.57949936061379,86.71821275833584,9.828358967803323,14.660406679215964,7.121322629961376 +0,8.511608543630283,58.57982277200914,8.368546110287019,14.660406679215964,6.216 +1,9.061299244589597,86.71821275833584,9.828358967803323,14.660406679215964,7.121322629961376 2,10.19025946676531,88.16810423276662,17.65004713874725,20.14214209065163,8.368546110287019 -3,13.277381820223445,98.52899228145998,22.57315387800296,24.57089741950831,11.419521180855176 -4,15.847752648246376,,30.885139144902684,27.13944325147441,63.78975983651295 -5,16.519246592989646,,41.26155329116926,31.46610176046597,78.41158164455044 -6,27.885533023415565,,47.543257944739125,35.742000000000004,82.08298020905431 -7,31.42770503870749,,49.16635959678121,41.26155329116925,93.48572620459232 +3,13.186127255566738,98.52899228145998,22.57315387800296,24.57089741950831,11.419521180855176 +4,15.38381513149453,,30.885139144902684,27.13944325147441,63.78975983651295 +5,15.847752648246376,,41.26155329116926,31.46610176046597,78.41158164455044 +6,27.842198835580497,,47.543257944739125,35.742000000000004,82.08298020905431 +7,31.31223237011376,,49.16635959678121,41.26155329116925,93.48572620459232 8,46.98120075093867,,59.15414849357567,52.261553708247135, -9,70.2744848433626,,75.63686934293354,53.607369269532334, +9,70.73687830262232,,75.63686934293354,53.607369269532334, 10,73.71269225852492,,76.73047118322681,68.51712708513107, 11,77.73107378648515,,76.90337628479,71.6527139472051, 12,79.42141142034684,,83.90160987728423,72.92218477253682, diff --git a/analysis_results/Munc13DKO/KO/AZ_distances_within_200.csv b/analysis_results/Munc13DKO/KO/AZ_distances_within_200.csv index c4f1d87..b203236 100644 --- a/analysis_results/Munc13DKO/KO/AZ_distances_within_200.csv +++ b/analysis_results/Munc13DKO/KO/AZ_distances_within_200.csv @@ -1,14 +1,14 @@ ,A_M13DKO_080212_DKO1.2_crop,G_M13DKO_060212_DKO1.1_crop,C_M13DKO_080212_DKO1.2_crop,E_M13DKO_080212_DKO1.2_crop,H_M13DKO_080212_DKO1.2_crop -0,9.061299244589597,58.57982277200914,8.368546110287019,14.660406679215964,6.216 -1,9.57949936061379,86.71821275833584,9.828358967803323,14.660406679215964,7.121322629961376 +0,8.511608543630283,58.57982277200914,8.368546110287019,14.660406679215964,6.216 +1,9.061299244589597,86.71821275833584,9.828358967803323,14.660406679215964,7.121322629961376 2,10.19025946676531,88.16810423276662,17.65004713874725,20.14214209065163,8.368546110287019 -3,13.277381820223445,98.52899228145998,22.57315387800296,24.57089741950831,11.419521180855176 -4,15.847752648246376,135.13546829755688,30.885139144902684,27.13944325147441,63.78975983651295 -5,16.519246592989646,146.05946714951415,41.26155329116926,31.46610176046597,78.41158164455044 -6,27.885533023415565,156.99246063426102,47.543257944739125,35.742000000000004,82.08298020905431 -7,31.42770503870749,175.51946733054996,49.16635959678121,41.26155329116925,93.48572620459232 +3,13.186127255566738,98.52899228145998,22.57315387800296,24.57089741950831,11.419521180855176 +4,15.38381513149453,135.13546829755688,30.885139144902684,27.13944325147441,63.78975983651295 +5,15.847752648246376,146.05946714951415,41.26155329116926,31.46610176046597,78.41158164455044 +6,27.842198835580497,156.99246063426102,47.543257944739125,35.742000000000004,82.08298020905431 +7,31.31223237011376,175.51946733054996,49.16635959678121,41.26155329116925,93.48572620459232 8,46.98120075093867,180.99263946359807,59.15414849357567,52.261553708247135,113.0047601121298 -9,70.2744848433626,190.9144499193291,75.63686934293354,53.607369269532334,122.64791789508699 +9,70.73687830262232,190.9144499193291,75.63686934293354,53.607369269532334,122.64791789508699 10,73.71269225852492,,76.73047118322681,68.51712708513107,127.74976449293362 11,77.73107378648515,,76.90337628479,71.6527139472051,128.95391260446502 12,79.42141142034684,,83.90160987728423,72.92218477253682,156.39911816886948 @@ -16,23 +16,23 @@ 14,90.1989578875499,,86.73213556692814,84.84612304637143,173.3040977357431 15,95.02300207844416,,86.746056140899,85.3569069964464,176.50731235844026 16,106.9779740133454,,93.69215367361346,87.57724142721099,178.7032433505335 -17,107.89953677379714,,93.92384227660196,106.6388270940749,178.89908258009598 +17,107.5857443530508,,93.92384227660196,106.6388270940749,178.89908258009598 18,109.1124918604648,,97.76623920352056,108.4019430453163,192.01806552509584 -19,116.46709051058156,,99.31020563869556,109.77445440538524,193.80816149997398 +19,116.51891585489456,,99.31020563869556,109.77445440538524,193.80816149997398 20,118.14488765917888,,101.29648603974375,116.55, 21,131.89789560110503,,107.1696808617064,122.60853192172232, -22,132.06257362326392,,110.20259780967054,135.7061141437629, +22,131.89789560110503,,110.20259780967054,135.7061141437629, 23,134.55342898640674,,113.0047601121298,139.92042028238768, 24,139.59211010655295,,116.85005376121998,144.00306834231, 25,140.95216895103104,,121.54028620996417,149.5477752291889, 26,141.58466624603102,,123.3547150132495,150.03143682575327, 27,145.18884862137313,,134.27495245204892,150.48947553898913, -28,172.9344377040039,,138.48049666288753,151.15396215779458, -29,179.2766494666832,,139.60075974005298,157.6295363312346, +28,172.8855555909747,,138.48049666288753,151.15396215779458, +29,179.00704009619287,,139.60075974005298,157.6295363312346, 30,180.6988634053906,,143.26329287015568,163.43618288494136, -31,181.2326474893528,,146.6123027443468,164.78995849262176, -32,187.0618422875173,,147.63,173.67992329569933, -33,195.2976786958821,,148.77876211341456,181.5322112904484, +31,180.97262442701103,,146.6123027443468,164.78995849262176, +32,186.79700555415764,,147.63,173.67992329569933, +33,195.0749761707021,,148.77876211341456,181.5322112904484, 34,,,150.34498249027138,187.5388936300948, 35,,,160.7019343505236,189.2118645857072, 36,,,167.1976902232803,192.47656053660145, diff --git a/analysis_results/Munc13DKO/KO/AZ_distances_within_40.csv b/analysis_results/Munc13DKO/KO/AZ_distances_within_40.csv index c414590..f8837e1 100644 --- a/analysis_results/Munc13DKO/KO/AZ_distances_within_40.csv +++ b/analysis_results/Munc13DKO/KO/AZ_distances_within_40.csv @@ -1,9 +1,9 @@ ,A_M13DKO_080212_DKO1.2_crop,G_M13DKO_060212_DKO1.1_crop,C_M13DKO_080212_DKO1.2_crop,E_M13DKO_080212_DKO1.2_crop,H_M13DKO_080212_DKO1.2_crop -0,9.061299244589597,,8.368546110287019,14.660406679215964,6.216 -1,9.57949936061379,,9.828358967803323,14.660406679215964,7.121322629961376 +0,8.511608543630283,,8.368546110287019,14.660406679215964,6.216 +1,9.061299244589597,,9.828358967803323,14.660406679215964,7.121322629961376 2,10.19025946676531,,17.65004713874725,20.14214209065163,8.368546110287019 -3,13.277381820223445,,22.57315387800296,24.57089741950831,11.419521180855176 -4,15.847752648246376,,30.885139144902684,27.13944325147441, -5,16.519246592989646,,,31.46610176046597, -6,27.885533023415565,,,35.742000000000004, -7,31.42770503870749,,,, +3,13.186127255566738,,22.57315387800296,24.57089741950831,11.419521180855176 +4,15.38381513149453,,30.885139144902684,27.13944325147441, +5,15.847752648246376,,,31.46610176046597, +6,27.842198835580497,,,35.742000000000004, +7,31.31223237011376,,,, diff --git a/analysis_results/Munc13DKO/KO/AZ_distances_within_40_with_diameters.csv b/analysis_results/Munc13DKO/KO/AZ_distances_within_40_with_diameters.csv index 0d837b5..243623f 100644 --- a/analysis_results/Munc13DKO/KO/AZ_distances_within_40_with_diameters.csv +++ b/analysis_results/Munc13DKO/KO/AZ_distances_within_40_with_diameters.csv @@ -1,9 +1,9 @@ ,A_M13DKO_080212_DKO1.2_crop_distance,A_M13DKO_080212_DKO1.2_crop_diameter,G_M13DKO_060212_DKO1.1_crop_distance,G_M13DKO_060212_DKO1.1_crop_diameter,C_M13DKO_080212_DKO1.2_crop_distance,C_M13DKO_080212_DKO1.2_crop_diameter,E_M13DKO_080212_DKO1.2_crop_distance,E_M13DKO_080212_DKO1.2_crop_diameter,H_M13DKO_080212_DKO1.2_crop_distance,H_M13DKO_080212_DKO1.2_crop_diameter -0,9.061299244589597,59.04457273790773,,,8.368546110287019,47.81289742868499,14.660406679215964,54.5150906509724,6.216,47.81289742868499 -1,9.57949936061379,55.036778046111,,,9.828358967803323,,14.660406679215964,43.95147120000001,7.121322629961376,52.5941871715535 -2,10.19025946676531,51.71755599228722,,,17.65004713874725,50.71323599999999,20.14214209065163,57.37547691442199,8.368546110287019,44.081313347574614 -3,13.277381820223445,53.45644442588228,,,22.57315387800296,48.75977939805501,24.57089741950831,50.26043018499918,11.419521180855176,47.81289742868499 -4,15.847752648246376,46.96871937691663,,,30.885139144902684,40.71121517937428,27.13944325147441,59.04457273790773,, -5,16.519246592989646,52.81107133775702,,,,,31.46610176046597,46.96871937691663,, -6,27.885533023415565,55.45059423403328,,,,,35.742000000000004,54.19966799968288,, -7,31.42770503870749,,,,,,,,, +0,8.511608543630283,59.04457273790773,,,8.368546110287019,47.81289742868499,14.660406679215964,54.5150906509724,6.216,47.81289742868499 +1,9.061299244589597,55.036778046111,,,9.828358967803323,,14.660406679215964,43.95147120000001,7.121322629961376,52.5941871715535 +2,10.19025946676531,52.81107133775702,,,17.65004713874725,50.71323599999999,20.14214209065163,57.37547691442199,8.368546110287019,44.081313347574614 +3,13.186127255566738,53.45644442588228,,,22.57315387800296,48.75977939805501,24.57089741950831,50.26043018499918,11.419521180855176,47.81289742868499 +4,15.38381513149453,46.96871937691663,,,30.885139144902684,40.71121517937428,27.13944325147441,59.04457273790773,, +5,15.847752648246376,53.45644442588228,,,,,31.46610176046597,46.96871937691663,, +6,27.842198835580497,55.45059423403328,,,,,35.742000000000004,54.19966799968288,, +7,31.31223237011376,,,,,,,,, diff --git a/run_sbatch_revision.sbatch b/run_sbatch_revision.sbatch index f9f1615..12a3c9a 100644 --- a/run_sbatch_revision.sbatch +++ b/run_sbatch_revision.sbatch @@ -10,5 +10,5 @@ source ~/.bashrc conda activate synapse-net python /user/muth9/u12095/synapse-net/scripts/cooper/revision/updated_data_analysis/run_data_analysis.py \ - -i /mnt/lustre-emmy-hdd/projects/nim00007/data/synaptic-reconstruction/cooper/20241102_TOMO_DATA_Imig2014/exported/SNAP25/ \ + -i /mnt/lustre-emmy-hdd/projects/nim00007/data/synaptic-reconstruction/cooper/20241102_TOMO_DATA_Imig2014/exported/Munc13DKO/ \ -o /mnt/lustre-emmy-hdd/projects/nim00007/data/synaptic-reconstruction/cooper/20241102_TOMO_DATA_Imig2014/afterRevision_analysis --store \ No newline at end of file diff --git a/scripts/cooper/revision/updated_data_analysis/analysis_segmentations.py b/scripts/cooper/revision/updated_data_analysis/analysis_segmentations.py index 43c58df..49d21b9 100644 --- a/scripts/cooper/revision/updated_data_analysis/analysis_segmentations.py +++ b/scripts/cooper/revision/updated_data_analysis/analysis_segmentations.py @@ -56,26 +56,34 @@ def SV_pred(raw: np.ndarray, SV_model: str, output_path: str = None, store: bool Returns: np.ndarray: Segmentation result. """ - #Excluding boundary SV, because they would also not be used in the manual annotation - seg, pred = segment_vesicles(input_volume=raw, model_path=SV_model, exclude_boundary=True, verbose=False, return_predictions=True) - - if store and output_path: - pred_key = f"predictions/SV/pred" - seg_key = f"predictions/SV/seg" + pred_key = f"predictions/SV/pred" + seg_key = f"predictions/SV/seg" + use_existing_seg = False + #checking if segmentation is already in output path and if so, use it + if output_path: with h5py.File(output_path, "a") as f: - if pred_key in f: - print(f"{pred_key} already saved") - else: - f.create_dataset(pred_key, data=pred, compression="lzf") if seg_key in f: - print(f"{seg_key} already saved") - else: + seg = f[seg_key][:] + use_existing_seg = True + print(f"Using existing SV seg in {output_path}") + + if not use_existing_seg: + #Excluding boundary SV, because they would also not be used in the manual annotation + seg, pred = segment_vesicles(input_volume=raw, model_path=SV_model, exclude_boundary=True, verbose=False, return_predictions=True) + + if store and output_path: + with h5py.File(output_path, "a") as f: + if pred_key in f: + print(f"{pred_key} already saved") + else: + f.create_dataset(pred_key, data=pred, compression="lzf") + f.create_dataset(seg_key, data=seg, compression="lzf") - elif store and not output_path: - print("Output path is missing, not storing SV predictions") - else: - print("Not storing SV predictions") + elif store and not output_path: + print("Output path is missing, not storing SV predictions") + else: + print("Not storing SV predictions") return seg @@ -93,26 +101,35 @@ def compartment_pred(raw: np.ndarray, compartment_model: str, output_path: str = Returns: np.ndarray: Segmentation result. """ - seg, pred = segment_compartments(input_volume=raw, model_path=compartment_model, verbose=False, return_predictions=True) - if store and output_path: - pred_key = f"predictions/compartment/pred" - seg_key = f"predictions/compartment/seg" + pred_key = f"predictions/compartment/pred" + seg_key = f"predictions/compartment/seg" + use_existing_seg = False + #checking if segmentation is already in output path and if so, use it + if output_path: with h5py.File(output_path, "a") as f: - if pred_key in f: - print(f"{pred_key} already saved") - else: - f.create_dataset(pred_key, data=pred, compression="lzf") if seg_key in f: - print(f"{seg_key} already saved") - else: + seg = f[seg_key][:] + use_existing_seg = True + print(f"Using existing compartment seg in {output_path}") + + if not use_existing_seg: + seg, pred = segment_compartments(input_volume=raw, model_path=compartment_model, verbose=False, return_predictions=True) + + if store and output_path: + with h5py.File(output_path, "a") as f: + if pred_key in f: + print(f"{pred_key} already saved") + else: + f.create_dataset(pred_key, data=pred, compression="lzf") + f.create_dataset(seg_key, data=seg, compression="lzf") - elif store and not output_path: - print("Output path is missing, not storing compartment predictions") - else: - print("Not storing compartment predictions") - + elif store and not output_path: + print("Output path is missing, not storing compartment predictions") + else: + print("Not storing compartment predictions") + return seg @@ -129,25 +146,35 @@ def AZ_pred(raw: np.ndarray, AZ_model: str, output_path: str = None, store: bool Returns: np.ndarray: Segmentation result. """ - seg, pred = segment_active_zone(raw, model_path=AZ_model, verbose=False, return_predictions=True) - - if store and output_path: - pred_key = f"predictions/az/pred" - seg_key = f"predictions/az/seg" + pred_key = f"predictions/az/pred" + seg_key = f"predictions/az/seg" + use_existing_seg = False + #checking if segmentation is already in output path and if so, use it + if output_path: with h5py.File(output_path, "a") as f: - if pred_key in f: - print(f"{pred_key} already saved") - else: - f.create_dataset(pred_key, data=pred, compression="lzf") if seg_key in f: - print(f"{seg_key} already saved") - else: + seg = f[seg_key][:] + use_existing_seg = True + print(f"Using existing AZ seg in {output_path}") + + if not use_existing_seg: + + seg, pred = segment_active_zone(raw, model_path=AZ_model, verbose=False, return_predictions=True) + + if store and output_path: + + with h5py.File(output_path, "a") as f: + if pred_key in f: + print(f"{pred_key} already saved") + else: + f.create_dataset(pred_key, data=pred, compression="lzf") + f.create_dataset(seg_key, data=seg, compression="lzf") - elif store and not output_path: - print("Output path is missing, not storing AZ predictions") - else: - print("Not storing AZ predictions") + elif store and not output_path: + print("Output path is missing, not storing AZ predictions") + else: + print("Not storing AZ predictions") return seg diff --git a/scripts/cooper/revision/updated_data_analysis/store_results.py b/scripts/cooper/revision/updated_data_analysis/store_results.py index d0f0e3b..404f765 100644 --- a/scripts/cooper/revision/updated_data_analysis/store_results.py +++ b/scripts/cooper/revision/updated_data_analysis/store_results.py @@ -90,6 +90,41 @@ def save_filtered_dataframes(output_dir, tomogram_name, df): write_or_append_csv(file_path, data) +def save_filtered_dataframes_with_seg_id(output_dir, tomogram_name, df): + """ + Saves segment data including seg_id into separate CSV files. + + Parameters: + output_dir (str): Directory to save files. + tomogram_name (str): Base name of the tomogram. + df (pd.DataFrame): DataFrame with 'seg_id', 'distance', 'diameter'. + """ + thresholds = { + 'AZ_distances_with_seg_id': None, + 'AZ_distances_within_200_with_seg_id': 200, + 'AZ_distances_within_100_with_seg_id': 100, + 'AZ_distances_within_40_with_seg_id': 40, + 'AZ_distances_within_40_with_diameters_and_seg_id': 40, + } + + for filename, max_dist in thresholds.items(): + file_path = os.path.join(output_dir, f"{filename}.csv") + filtered_df = df if max_dist is None else df[df['distance'] <= max_dist] + + if filename == 'AZ_distances_within_40_with_diameters_and_seg_id': + data = pd.DataFrame({ + f"{tomogram_name}_seg_id": filtered_df['seg_id'].values, + f"{tomogram_name}_distance": filtered_df['distance'].values, + f"{tomogram_name}_diameter": filtered_df['diameter'].values + }) + else: + data = pd.DataFrame({ + f"{tomogram_name}_seg_id": filtered_df['seg_id'].values, + f"{tomogram_name}_distance": filtered_df['distance'].values + }) + + write_or_append_csv(file_path, data) + def run_store_results(input_path, analysis_output, sorted_list): """ Processes a single tomogram's sorted segment data and stores results into categorized CSV files. @@ -105,4 +140,8 @@ def run_store_results(input_path, analysis_output, sorted_list): group_dir = prepare_output_directory(analysis_output, group) df = pd.DataFrame(sorted_list) + # First run: distances only save_filtered_dataframes(group_dir, tomogram_name, df) + + # Second run: include seg_id in the filenames and output + save_filtered_dataframes_with_seg_id(group_dir, tomogram_name, df) From a9b714fed9326d88bddb153a7c0fad73e1abf1a0 Mon Sep 17 00:00:00 2001 From: SarahMuth Date: Thu, 26 Jun 2025 15:22:03 +0200 Subject: [PATCH 18/29] minor addition --- .../cooper/revision/updated_data_analysis/store_results.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/scripts/cooper/revision/updated_data_analysis/store_results.py b/scripts/cooper/revision/updated_data_analysis/store_results.py index 404f765..65d0d0e 100644 --- a/scripts/cooper/revision/updated_data_analysis/store_results.py +++ b/scripts/cooper/revision/updated_data_analysis/store_results.py @@ -108,7 +108,11 @@ def save_filtered_dataframes_with_seg_id(output_dir, tomogram_name, df): } for filename, max_dist in thresholds.items(): - file_path = os.path.join(output_dir, f"{filename}.csv") + #storing with seg ID data in subfolder + with_segID_dir = os.path.join(output_dir, "with_segID") + os.makedirs(with_segID_dir, exist_ok=True) + file_path = os.path.join(with_segID_dir, f"{filename}.csv") + filtered_df = df if max_dist is None else df[df['distance'] <= max_dist] if filename == 'AZ_distances_within_40_with_diameters_and_seg_id': From d6eaa667345aa402e65cb4e832abb8c5460757e5 Mon Sep 17 00:00:00 2001 From: SarahMuth Date: Mon, 30 Jun 2025 17:04:42 +0200 Subject: [PATCH 19/29] add boundary threshold for compartment seg --- .../Munc13DKO/CTRL/AZ_distances.csv | 132 ------------------ .../CTRL/AZ_distances_within_100.csv | 44 ------ .../CTRL/AZ_distances_within_200.csv | 74 ---------- .../Munc13DKO/CTRL/AZ_distances_within_40.csv | 18 --- .../AZ_distances_within_40_with_diameters.csv | 18 --- .../Munc13DKO/KO/AZ_distances.csv | 80 ----------- .../Munc13DKO/KO/AZ_distances_within_100.csv | 21 --- .../Munc13DKO/KO/AZ_distances_within_200.csv | 42 ------ .../Munc13DKO/KO/AZ_distances_within_40.csv | 9 -- .../AZ_distances_within_40_with_diameters.csv | 9 -- analysis_results/SNAP25/CTRL/AZ_distances.csv | 94 ------------- .../SNAP25/CTRL/AZ_distances_within_100.csv | 23 --- .../SNAP25/CTRL/AZ_distances_within_200.csv | 57 -------- .../SNAP25/CTRL/AZ_distances_within_40.csv | 11 -- .../AZ_distances_within_40_with_diameters.csv | 11 -- analysis_results/SNAP25/KO/AZ_distances.csv | 43 ------ .../SNAP25/KO/AZ_distances_within_100.csv | 14 -- .../SNAP25/KO/AZ_distances_within_200.csv | 22 --- .../SNAP25/KO/AZ_distances_within_40.csv | 6 - .../AZ_distances_within_40_with_diameters.csv | 6 - run_sbatch_revision.sbatch | 11 +- .../analysis_segmentations.py | 2 +- .../updated_data_analysis/store_results.py | 46 +++--- 23 files changed, 36 insertions(+), 757 deletions(-) delete mode 100644 analysis_results/Munc13DKO/CTRL/AZ_distances.csv delete mode 100644 analysis_results/Munc13DKO/CTRL/AZ_distances_within_100.csv delete mode 100644 analysis_results/Munc13DKO/CTRL/AZ_distances_within_200.csv delete mode 100644 analysis_results/Munc13DKO/CTRL/AZ_distances_within_40.csv delete mode 100644 analysis_results/Munc13DKO/CTRL/AZ_distances_within_40_with_diameters.csv delete mode 100644 analysis_results/Munc13DKO/KO/AZ_distances.csv delete mode 100644 analysis_results/Munc13DKO/KO/AZ_distances_within_100.csv delete mode 100644 analysis_results/Munc13DKO/KO/AZ_distances_within_200.csv delete mode 100644 analysis_results/Munc13DKO/KO/AZ_distances_within_40.csv delete mode 100644 analysis_results/Munc13DKO/KO/AZ_distances_within_40_with_diameters.csv delete mode 100644 analysis_results/SNAP25/CTRL/AZ_distances.csv delete mode 100644 analysis_results/SNAP25/CTRL/AZ_distances_within_100.csv delete mode 100644 analysis_results/SNAP25/CTRL/AZ_distances_within_200.csv delete mode 100644 analysis_results/SNAP25/CTRL/AZ_distances_within_40.csv delete mode 100644 analysis_results/SNAP25/CTRL/AZ_distances_within_40_with_diameters.csv delete mode 100644 analysis_results/SNAP25/KO/AZ_distances.csv delete mode 100644 analysis_results/SNAP25/KO/AZ_distances_within_100.csv delete mode 100644 analysis_results/SNAP25/KO/AZ_distances_within_200.csv delete mode 100644 analysis_results/SNAP25/KO/AZ_distances_within_40.csv delete mode 100644 analysis_results/SNAP25/KO/AZ_distances_within_40_with_diameters.csv diff --git a/analysis_results/Munc13DKO/CTRL/AZ_distances.csv b/analysis_results/Munc13DKO/CTRL/AZ_distances.csv deleted file mode 100644 index 3ab77e6..0000000 --- a/analysis_results/Munc13DKO/CTRL/AZ_distances.csv +++ /dev/null @@ -1,132 +0,0 @@ -,A_M13DKO_080212_CTRL4.8_crop,B_M13DKO_080212_CTRL4.8_crop,F_M13DKO_060212_CTRL7.2_crop,F_M13DKO_080212_CTRL6.7B_crop,C_M13DKO_060212_CTRL7.2_crop,A_M13DKO_080212_CTRL6.7B_crop -0,0.0,0.0,1.554,2.6916069549620354,0.0,6.216 -1,1.554,12.624755680804284,1.554,3.4748496370346733,0.0,6.407306142209845 -2,2.19768787592779,18.32138979444518,1.554,4.39537575185558,0.0,7.7700000000000005 -3,2.6916069549620354,21.64471538274412,1.554,7.77,0.0,7.923876324123188 -4,3.4748496370346733,21.756,3.108,8.927050352720096,16.519246592989646,9.57949936061379 -5,19.47156418986416,25.67629389144781,4.914179483901662,18.648,26.04978687052929,9.950455064970647 -6,26.463666412649623,27.885533023415565,6.59306362778337,31.504451685436457,26.234539447072443,11.313290767941925 -7,31.15760311705636,29.526,12.528748540855943,31.58101182672905,27.005640744111226,11.419521180855176 -8,34.748496370346736,29.93215388173728,28.94781255984638,32.074198602615155,40.37410432443053,11.419521180855178 -9,45.30649622294799,32.634,37.6824973694685,40.553147793975256,43.788620713605496,11.834911406512512 -10,50.33137077410072,35.53872631369897,41.524089682978,43.65052948132474,46.67177125415319,11.936500492187816 -11,51.44655417032321,37.4575252252468,43.09373973096325,49.46018507850532,48.37410054977767,12.137127996358942 -12,51.96033186960992,43.87126681553657,43.59517051234001,49.97022257304844,64.16721733097049,12.528748540855945 -13,56.03026682071039,46.30815703523517,43.73343654459367,54.412195471236046,71.43330811323244,13.36801346498424 -14,64.09190366965238,54.94219689819474,49.14179483901662,59.560990623057975,75.17249124513567,28.144137009331093 -15,65.36043454567908,56.22388321701019,50.99867186505939,76.0031517241226,75.89186161901684,29.48507690340997 -16,67.55879839073518,59.98520517594318,53.51719701180173,76.54140197304986,76.60447689267254,39.61938162061595 -17,68.55236346035052,61.88745249240754,55.29270888643457,80.86774714309779,79.92154165179748,46.51628463237364 -18,72.65677102651894,66.24124373832363,55.987150025697865,81.32929929613313,80.88267703284802,51.680722672965786 -19,77.77766118880152,66.822,63.23844476266,83.98791363047424,85.2436639522258,56.90696228055053 -20,80.17796699842171,66.84006732492121,64.80384960787438,94.70479409195714,86.0332350897024,59.50014164689022 -21,86.92682074020654,67.11049351628999,65.60015481689048,98.7004110629738,87.2595561987339,63.123778404021415 -22,89.12159516076898,67.34398449750357,67.77293141070409,100.3383516109369,90.10520291303938,63.903231185911096 -23,89.45966478810436,69.27075928557446,74.08848809363032,100.35038475262564,92.34212687609052,64.09190366965238 -24,90.10520291303938,71.77057773210412,79.20828093072089,100.6507457498453,106.87634293893107,65.6553505816548 -25,91.43543728773872,73.12061285301156,83.46875259640579,101.04585517476708,109.3777576292365,66.822 -26,91.54102096874384,73.8763166380133,85.04512578625538,101.59404994388206,115.77039258808792,70.63438579049159 -27,94.01378920137196,74.23502079207628,85.14445273768574,103.71131020288963,117.78663939513685,73.8763166380133 -28,99.12766114460686,75.70069793601641,87.71500656102126,107.39478020835092,119.73870005975512,74.36502968465756 -29,100.42255332344423,76.19355657796794,91.2239033148659,117.48897759364492,120.42246708982506,74.60818574392492 -30,104.69624503295236,76.5887130326656,95.9083679560861,124.1353253348941,122.223853547497,76.77766573685345 -31,109.76345440992644,78.53467682495422,98.29587435899842,124.32971212063512,122.63807258759412,81.72917261296607 -32,111.82323125361742,83.82962221076748,100.06118995894462,124.42679163267049,122.65776241233164,84.6038475720815 -33,113.86695747230624,85.58294355769729,100.3985028972046,129.55178964414193,129.14104651891282,89.90397016817444 -34,117.12872318948928,86.82953271784895,107.39478020835092,133.0009763122061,129.77528340943817,96.97258841548988 -35,117.75588168749788,90.34609056290152,108.53552528089594,133.79750486462743,130.2860106227833,100.87842199400227 -36,118.2776749348752,92.99362449114456,111.7584249710061,140.83218775549858,134.27495245204892,103.32639370460967 -37,122.61837960110222,93.40819829115644,112.20052187044408,145.5211270022329,137.31592816567203,108.42421818025713 -38,129.07558051002522,94.5899771646024,113.86695747230624,147.49907879034365,144.5888232609976,117.21116467299521 -39,139.69587036129593,98.33271919356244,119.45601133471683,149.94288232523743,146.290757028597,119.64790866538371 -40,139.7045135706073,98.60249396440236,119.45601133471683,156.59200830182874,151.0740583819737,125.02700215553439 -41,141.40546128067334,98.89595441675051,122.2040938103139,160.33334282051254,156.8847470979891,135.6349147085661 -42,144.08689340810983,99.96460577624462,122.49029801580204,163.1107892568729,163.42879477007716,139.42766511707782 -43,150.26464853717255,103.29133016860612,130.2860106227833,165.73247896534946,170.6501451625518,139.55750621159723 -44,150.61779743443336,105.92308405630946,137.50924434378948,173.10890399976543,175.05104719481116,144.59717399728115 -45,152.292,106.15082692094302,140.08428682760962,175.16826966091776,176.0483795551666,144.67230893298137 -46,152.58507514170577,107.98902427561794,140.21351987593778,180.77903149425268,179.53912879369778,161.55621971313886 -47,160.19019138511572,108.08960915832752,148.23400650323126,183.45758172395057,180.57185780735605,163.40662842124857 -48,161.69069427768562,110.11490923576152,153.1143511497208,186.0521592027354,181.37250550179868,164.8265906945842 -49,163.93779358037,112.71589540078187,154.37877446073992,191.4386722060096,183.81921736314732,165.63773928667345 -50,165.47728703359869,118.11422324174173,156.42999657354724,202.5274012572126,185.4671493930933,166.71313158836648 -51,166.36511736539003,118.16532618327597,160.78456325157586,206.55343369694924,186.8228598111056,174.2490713547708 -52,168.38506096444544,118.28788316645117,166.5754628989516,217.8317802433796,194.8024384446971,176.05523809304853 -53,172.494,119.06114781909336,167.9039290308598,220.503783967532,198.92414025452013,178.73702404370505 -54,176.92411139242722,120.3121211848582,170.033450685446,223.19248922846845,204.49128454777724,182.010487851662 -55,185.83786943462303,126.6867464891257,171.34920113032334,224.25033062182985,205.9387161074867,183.33248726835077 -56,191.59628941083383,127.8159095731044,173.07402481019503,228.5172719599112,211.549576487404,196.2290386767463 -57,192.8838922668246,128.616385425808,173.91613754910728,229.8764709838743,224.9921536231875,198.82699746261827 -58,195.2976786958821,128.95391260446502,174.47067426934532,238.14764802533745,227.5907144063659,202.0319534925107 -59,196.87407599783168,132.93741116781237,174.4983548461131,246.2048060375752,237.62484279216264,204.32588628952524 -60,197.8468434016576,139.600759740053,175.91115264246326,247.5790740591781,239.83511878788727,212.733461965907 -61,199.11221954465776,141.83176752758885,180.35105656469,250.3628560629552,250.45929412980465,215.04799789814365 -62,199.6270117794684,142.54509189726596,183.7009422512579,254.66641729132644,269.1068579728135,216.24623331748467 -63,200.5141802566592,143.89402275285795,184.93252929649773,263.1266647833321,284.08463141113424,218.46277693923057 -64,201.45139210241263,148.09546621014434,188.9372602532385,273.4156902886153,284.81051321887685,234.90600375469336 -65,202.1514495421688,154.59761851982068,194.6536206701535,277.3269636512108,287.4311939856216,244.23521473366614 -66,205.8448836770057,161.89965492242408,195.98891268640685,283.3142742750531,289.44058277304515,244.64520870844785 -67,216.7203296509121,168.57856176868992,199.11828366074272,290.63954783201825,299.76897650690944,247.75945956511933 -68,220.8211581166986,173.77027842528193,204.1899232087617,309.76486621952466,304.7187559045226,253.2638621596062 -69,221.83043824507044,176.53467359133728,204.686046627512,310.407366993762,318.8162706638417,259.871364216991 -70,228.1576851565601,182.61318420092235,204.7155398693514,311.44424229065464,347.4919133332458,263.4385241531694 -71,232.3424891491008,185.00433634917857,208.00973528178915,312.9255964027232,,271.83897733768794 -72,233.2708773679218,198.9423492472128,210.16377813505355,313.3112198501675,,280.69817301151073 -73,238.046222276263,200.24904539098307,211.12678543472404,320.0825070196745,,290.7807660558036 -74,238.17299771384663,203.6214448529428,213.23235788219387,332.3962045992704,,295.1291078290991 -75,239.56813990178244,204.21357546451213,214.55332388942384,360.3772572790908,,315.2590726624692 -76,246.3812969606257,207.98070918236627,218.86036383959524,380.568222772212,, -77,247.76920639175484,219.4774007773921,219.47189919440711,,, -78,252.3037569280331,229.20314715116808,223.86771037378304,,, -79,253.36396174673305,230.1232125710051,230.3329972105604,,, -80,253.8733801011836,230.799083897662,237.7873908011104,,, -81,255.23474296419755,249.71092494322312,238.4769837950824,,, -82,255.31515346332264,253.3067668263128,244.5168508058289,,, -83,255.40026639766845,257.5470787487212,244.69455920391857,,, -84,260.6368874123538,270.74408629552744,245.20721557083104,,, -85,261.1875994070163,,249.5125938304517,,, -86,269.814858256546,,251.0082836083304,,, -87,270.2843390135655,,255.1968939622895,,, -88,270.4986873979244,,256.0377115504667,,, -89,274.6582349175062,,256.2074290726169,,, -90,276.9871500990614,,257.1294814835514,,, -91,282.23395306022275,,261.70947172771565,,, -92,284.22910644056145,,268.468960842776,,, -93,289.0481773891681,,268.95874921630644,,, -94,293.4921443650579,,275.5010161142786,,, -95,294.0962962296534,,276.0045746360012,,, -96,302.8426651712074,,278.44367146695936,,, -97,303.5873336290564,,281.3941732232564,,, -98,304.1595256506033,,282.563183716492,,, -99,306.8904104073635,,294.4491708801368,,, -100,308.69888285512144,,294.4696738409577,,, -101,308.9022109147165,,296.2969055930217,,, -102,309.0975925302558,,296.48837635900674,,, -103,310.1232732575871,,298.2060810647563,,, -104,313.28038744230383,,305.4390854360326,,, -105,314.52668239753524,,305.66828432141926,,, -106,315.30885938076653,,309.1366540350723,,, -107,316.7950112107197,,311.0524224949872,,, -108,328.736202752298,,316.9436240910992,,, -109,333.143677004382,,317.1797371270744,,, -110,336.13131359038834,,325.6177006552316,,, -111,341.3215165675906,,330.18751904940325,,, -112,357.2916030247563,,334.92575761801305,,, -113,,,346.48624663614,,, -114,,,346.61516268045744,,, -115,,,347.3772266628888,,, -116,,,347.8461603410336,,, -117,,,360.7790979034124,,, -118,,,361.1871775631024,,, -119,,,376.9307005644407,,, -120,,,379.1441544004074,,, -121,,,384.0392751633614,,, -122,,,385.3857338200261,,, -123,,,386.4745199880583,,, -124,,,392.8547653319226,,, -125,,,413.027942342888,,, -126,,,414.0323806902064,,, -127,,,417.13250608409794,,, -128,,,425.2227893516527,,, -129,,,431.7715661874923,,, -130,,,461.3888546898376,,, diff --git a/analysis_results/Munc13DKO/CTRL/AZ_distances_within_100.csv b/analysis_results/Munc13DKO/CTRL/AZ_distances_within_100.csv deleted file mode 100644 index efca971..0000000 --- a/analysis_results/Munc13DKO/CTRL/AZ_distances_within_100.csv +++ /dev/null @@ -1,44 +0,0 @@ -,A_M13DKO_080212_CTRL4.8_crop,B_M13DKO_080212_CTRL4.8_crop,F_M13DKO_060212_CTRL7.2_crop,F_M13DKO_080212_CTRL6.7B_crop,C_M13DKO_060212_CTRL7.2_crop,A_M13DKO_080212_CTRL6.7B_crop -0,0.0,0.0,1.554,2.6916069549620354,0.0,6.216 -1,1.554,12.624755680804284,1.554,3.4748496370346733,0.0,6.407306142209845 -2,2.19768787592779,18.32138979444518,1.554,4.39537575185558,0.0,7.7700000000000005 -3,2.6916069549620354,21.64471538274412,1.554,7.77,0.0,7.923876324123188 -4,3.4748496370346733,21.756,3.108,8.927050352720096,16.519246592989646,9.57949936061379 -5,19.47156418986416,25.67629389144781,4.914179483901662,18.648,26.04978687052929,9.950455064970647 -6,26.463666412649623,27.885533023415565,6.59306362778337,31.504451685436457,26.234539447072443,11.313290767941925 -7,31.15760311705636,29.526,12.528748540855943,31.58101182672905,27.005640744111226,11.419521180855176 -8,34.748496370346736,29.93215388173728,28.94781255984638,32.074198602615155,40.37410432443053,11.419521180855178 -9,45.30649622294799,32.634,37.6824973694685,40.553147793975256,43.788620713605496,11.834911406512512 -10,50.33137077410072,35.53872631369897,41.524089682978,43.65052948132474,46.67177125415319,11.936500492187816 -11,51.44655417032321,37.4575252252468,43.09373973096325,49.46018507850532,48.37410054977767,12.137127996358942 -12,51.96033186960992,43.87126681553657,43.59517051234001,49.97022257304844,64.16721733097049,12.528748540855945 -13,56.03026682071039,46.30815703523517,43.73343654459367,54.412195471236046,71.43330811323244,13.36801346498424 -14,64.09190366965238,54.94219689819474,49.14179483901662,59.560990623057975,75.17249124513567,28.144137009331093 -15,65.36043454567908,56.22388321701019,50.99867186505939,76.0031517241226,75.89186161901684,29.48507690340997 -16,67.55879839073518,59.98520517594318,53.51719701180173,76.54140197304986,76.60447689267254,39.61938162061595 -17,68.55236346035052,61.88745249240754,55.29270888643457,80.86774714309779,79.92154165179748,46.51628463237364 -18,72.65677102651894,66.24124373832363,55.987150025697865,81.32929929613313,80.88267703284802,51.680722672965786 -19,77.77766118880152,66.822,63.23844476266,83.98791363047424,85.2436639522258,56.90696228055053 -20,80.17796699842171,66.84006732492121,64.80384960787438,94.70479409195714,86.0332350897024,59.50014164689022 -21,86.92682074020654,67.11049351628999,65.60015481689048,98.7004110629738,87.2595561987339,63.123778404021415 -22,89.12159516076898,67.34398449750357,67.77293141070409,,90.10520291303938,63.903231185911096 -23,89.45966478810436,69.27075928557446,74.08848809363032,,92.34212687609052,64.09190366965238 -24,90.10520291303938,71.77057773210412,79.20828093072089,,,65.6553505816548 -25,91.43543728773872,73.12061285301156,83.46875259640579,,,66.822 -26,91.54102096874384,73.8763166380133,85.04512578625538,,,70.63438579049159 -27,94.01378920137196,74.23502079207628,85.14445273768574,,,73.8763166380133 -28,99.12766114460686,75.70069793601641,87.71500656102126,,,74.36502968465756 -29,,76.19355657796794,91.2239033148659,,,74.60818574392492 -30,,76.5887130326656,95.9083679560861,,,76.77766573685345 -31,,78.53467682495422,98.29587435899842,,,81.72917261296607 -32,,83.82962221076748,,,,84.6038475720815 -33,,85.58294355769729,,,,89.90397016817444 -34,,86.82953271784895,,,,96.97258841548988 -35,,90.34609056290152,,,, -36,,92.99362449114456,,,, -37,,93.40819829115644,,,, -38,,94.5899771646024,,,, -39,,98.33271919356244,,,, -40,,98.60249396440236,,,, -41,,98.89595441675051,,,, -42,,99.96460577624462,,,, diff --git a/analysis_results/Munc13DKO/CTRL/AZ_distances_within_200.csv b/analysis_results/Munc13DKO/CTRL/AZ_distances_within_200.csv deleted file mode 100644 index f068626..0000000 --- a/analysis_results/Munc13DKO/CTRL/AZ_distances_within_200.csv +++ /dev/null @@ -1,74 +0,0 @@ -,A_M13DKO_080212_CTRL4.8_crop,B_M13DKO_080212_CTRL4.8_crop,F_M13DKO_060212_CTRL7.2_crop,F_M13DKO_080212_CTRL6.7B_crop,C_M13DKO_060212_CTRL7.2_crop,A_M13DKO_080212_CTRL6.7B_crop -0,0.0,0.0,1.554,2.6916069549620354,0.0,6.216 -1,1.554,12.624755680804284,1.554,3.4748496370346733,0.0,6.407306142209845 -2,2.19768787592779,18.32138979444518,1.554,4.39537575185558,0.0,7.7700000000000005 -3,2.6916069549620354,21.64471538274412,1.554,7.77,0.0,7.923876324123188 -4,3.4748496370346733,21.756,3.108,8.927050352720096,16.519246592989646,9.57949936061379 -5,19.47156418986416,25.67629389144781,4.914179483901662,18.648,26.04978687052929,9.950455064970647 -6,26.463666412649623,27.885533023415565,6.59306362778337,31.504451685436457,26.234539447072443,11.313290767941925 -7,31.15760311705636,29.526,12.528748540855943,31.58101182672905,27.005640744111226,11.419521180855176 -8,34.748496370346736,29.93215388173728,28.94781255984638,32.074198602615155,40.37410432443053,11.419521180855178 -9,45.30649622294799,32.634,37.6824973694685,40.553147793975256,43.788620713605496,11.834911406512512 -10,50.33137077410072,35.53872631369897,41.524089682978,43.65052948132474,46.67177125415319,11.936500492187816 -11,51.44655417032321,37.4575252252468,43.09373973096325,49.46018507850532,48.37410054977767,12.137127996358942 -12,51.96033186960992,43.87126681553657,43.59517051234001,49.97022257304844,64.16721733097049,12.528748540855945 -13,56.03026682071039,46.30815703523517,43.73343654459367,54.412195471236046,71.43330811323244,13.36801346498424 -14,64.09190366965238,54.94219689819474,49.14179483901662,59.560990623057975,75.17249124513567,28.144137009331093 -15,65.36043454567908,56.22388321701019,50.99867186505939,76.0031517241226,75.89186161901684,29.48507690340997 -16,67.55879839073518,59.98520517594318,53.51719701180173,76.54140197304986,76.60447689267254,39.61938162061595 -17,68.55236346035052,61.88745249240754,55.29270888643457,80.86774714309779,79.92154165179748,46.51628463237364 -18,72.65677102651894,66.24124373832363,55.987150025697865,81.32929929613313,80.88267703284802,51.680722672965786 -19,77.77766118880152,66.822,63.23844476266,83.98791363047424,85.2436639522258,56.90696228055053 -20,80.17796699842171,66.84006732492121,64.80384960787438,94.70479409195714,86.0332350897024,59.50014164689022 -21,86.92682074020654,67.11049351628999,65.60015481689048,98.7004110629738,87.2595561987339,63.123778404021415 -22,89.12159516076898,67.34398449750357,67.77293141070409,100.3383516109369,90.10520291303938,63.903231185911096 -23,89.45966478810436,69.27075928557446,74.08848809363032,100.35038475262564,92.34212687609052,64.09190366965238 -24,90.10520291303938,71.77057773210412,79.20828093072089,100.6507457498453,106.87634293893107,65.6553505816548 -25,91.43543728773872,73.12061285301156,83.46875259640579,101.04585517476708,109.3777576292365,66.822 -26,91.54102096874384,73.8763166380133,85.04512578625538,101.59404994388206,115.77039258808792,70.63438579049159 -27,94.01378920137196,74.23502079207628,85.14445273768574,103.71131020288963,117.78663939513685,73.8763166380133 -28,99.12766114460686,75.70069793601641,87.71500656102126,107.39478020835092,119.73870005975512,74.36502968465756 -29,100.42255332344423,76.19355657796794,91.2239033148659,117.48897759364492,120.42246708982506,74.60818574392492 -30,104.69624503295236,76.5887130326656,95.9083679560861,124.1353253348941,122.223853547497,76.77766573685345 -31,109.76345440992644,78.53467682495422,98.29587435899842,124.32971212063512,122.63807258759412,81.72917261296607 -32,111.82323125361742,83.82962221076748,100.06118995894462,124.42679163267049,122.65776241233164,84.6038475720815 -33,113.86695747230624,85.58294355769729,100.3985028972046,129.55178964414193,129.14104651891282,89.90397016817444 -34,117.12872318948928,86.82953271784895,107.39478020835092,133.0009763122061,129.77528340943817,96.97258841548988 -35,117.75588168749788,90.34609056290152,108.53552528089594,133.79750486462743,130.2860106227833,100.87842199400227 -36,118.2776749348752,92.99362449114456,111.7584249710061,140.83218775549858,134.27495245204892,103.32639370460967 -37,122.61837960110222,93.40819829115644,112.20052187044408,145.5211270022329,137.31592816567203,108.42421818025713 -38,129.07558051002522,94.5899771646024,113.86695747230624,147.49907879034365,144.5888232609976,117.21116467299521 -39,139.69587036129593,98.33271919356244,119.45601133471683,149.94288232523743,146.290757028597,119.64790866538371 -40,139.7045135706073,98.60249396440236,119.45601133471683,156.59200830182874,151.0740583819737,125.02700215553439 -41,141.40546128067334,98.89595441675051,122.2040938103139,160.33334282051254,156.8847470979891,135.6349147085661 -42,144.08689340810983,99.96460577624462,122.49029801580204,163.1107892568729,163.42879477007716,139.42766511707782 -43,150.26464853717255,103.29133016860612,130.2860106227833,165.73247896534946,170.6501451625518,139.55750621159723 -44,150.61779743443336,105.92308405630946,137.50924434378948,173.10890399976543,175.05104719481116,144.59717399728115 -45,152.292,106.15082692094302,140.08428682760962,175.16826966091776,176.0483795551666,144.67230893298137 -46,152.58507514170577,107.98902427561794,140.21351987593778,180.77903149425268,179.53912879369778,161.55621971313886 -47,160.19019138511572,108.08960915832752,148.23400650323126,183.45758172395057,180.57185780735605,163.40662842124857 -48,161.69069427768562,110.11490923576152,153.1143511497208,186.0521592027354,181.37250550179868,164.8265906945842 -49,163.93779358037,112.71589540078187,154.37877446073992,191.4386722060096,183.81921736314732,165.63773928667345 -50,165.47728703359869,118.11422324174173,156.42999657354724,,185.4671493930933,166.71313158836648 -51,166.36511736539003,118.16532618327597,160.78456325157586,,186.8228598111056,174.2490713547708 -52,168.38506096444544,118.28788316645117,166.5754628989516,,194.8024384446971,176.05523809304853 -53,172.494,119.06114781909336,167.9039290308598,,198.92414025452013,178.73702404370505 -54,176.92411139242722,120.3121211848582,170.033450685446,,,182.010487851662 -55,185.83786943462303,126.6867464891257,171.34920113032334,,,183.33248726835077 -56,191.59628941083383,127.8159095731044,173.07402481019503,,,196.2290386767463 -57,192.8838922668246,128.616385425808,173.91613754910728,,,198.82699746261827 -58,195.2976786958821,128.95391260446502,174.47067426934532,,, -59,196.87407599783168,132.93741116781237,174.4983548461131,,, -60,197.8468434016576,139.600759740053,175.91115264246326,,, -61,199.11221954465776,141.83176752758885,180.35105656469,,, -62,199.6270117794684,142.54509189726596,183.7009422512579,,, -63,,143.89402275285795,184.93252929649773,,, -64,,148.09546621014434,188.9372602532385,,, -65,,154.59761851982068,194.6536206701535,,, -66,,161.89965492242408,195.98891268640685,,, -67,,168.57856176868992,199.11828366074272,,, -68,,173.77027842528193,,,, -69,,176.53467359133728,,,, -70,,182.61318420092235,,,, -71,,185.00433634917857,,,, -72,,198.9423492472128,,,, diff --git a/analysis_results/Munc13DKO/CTRL/AZ_distances_within_40.csv b/analysis_results/Munc13DKO/CTRL/AZ_distances_within_40.csv deleted file mode 100644 index 4c5fd24..0000000 --- a/analysis_results/Munc13DKO/CTRL/AZ_distances_within_40.csv +++ /dev/null @@ -1,18 +0,0 @@ -,A_M13DKO_080212_CTRL4.8_crop,B_M13DKO_080212_CTRL4.8_crop,F_M13DKO_060212_CTRL7.2_crop,F_M13DKO_080212_CTRL6.7B_crop,C_M13DKO_060212_CTRL7.2_crop,A_M13DKO_080212_CTRL6.7B_crop -0,0.0,0.0,1.554,2.6916069549620354,0.0,6.216 -1,1.554,12.624755680804284,1.554,3.4748496370346733,0.0,6.407306142209845 -2,2.19768787592779,18.32138979444518,1.554,4.39537575185558,0.0,7.7700000000000005 -3,2.6916069549620354,21.64471538274412,1.554,7.77,0.0,7.923876324123188 -4,3.4748496370346733,21.756,3.108,8.927050352720096,16.519246592989646,9.57949936061379 -5,19.47156418986416,25.67629389144781,4.914179483901662,18.648,26.04978687052929,9.950455064970647 -6,26.463666412649623,27.885533023415565,6.59306362778337,31.504451685436457,26.234539447072443,11.313290767941925 -7,31.15760311705636,29.526,12.528748540855943,31.58101182672905,27.005640744111226,11.419521180855176 -8,34.748496370346736,29.93215388173728,28.94781255984638,32.074198602615155,,11.419521180855178 -9,,32.634,37.6824973694685,,,11.834911406512512 -10,,35.53872631369897,,,,11.936500492187816 -11,,37.4575252252468,,,,12.137127996358942 -12,,,,,,12.528748540855945 -13,,,,,,13.36801346498424 -14,,,,,,28.144137009331093 -15,,,,,,29.48507690340997 -16,,,,,,39.61938162061595 diff --git a/analysis_results/Munc13DKO/CTRL/AZ_distances_within_40_with_diameters.csv b/analysis_results/Munc13DKO/CTRL/AZ_distances_within_40_with_diameters.csv deleted file mode 100644 index b9a80c0..0000000 --- a/analysis_results/Munc13DKO/CTRL/AZ_distances_within_40_with_diameters.csv +++ /dev/null @@ -1,18 +0,0 @@ -,A_M13DKO_080212_CTRL4.8_crop_distance,A_M13DKO_080212_CTRL4.8_crop_diameter,B_M13DKO_080212_CTRL4.8_crop_distance,B_M13DKO_080212_CTRL4.8_crop_diameter,F_M13DKO_060212_CTRL7.2_crop_distance,F_M13DKO_060212_CTRL7.2_crop_diameter,F_M13DKO_080212_CTRL6.7B_crop_distance,F_M13DKO_080212_CTRL6.7B_crop_diameter,C_M13DKO_060212_CTRL7.2_crop_distance,C_M13DKO_060212_CTRL7.2_crop_diameter,A_M13DKO_080212_CTRL6.7B_crop_distance,A_M13DKO_080212_CTRL6.7B_crop_diameter -0,0.0,45.10659722111719,0.0,43.031607685816496,1.554,59.141287909587376,2.6916069549620354,45.48512075696828,0.0,47.09024278669811,6.216,47.81289742868499 -1,1.554,50.26043018499918,12.624755680804284,48.75977939805501,1.554,49.91813042345726,3.4748496370346733,39.42752527305825,0.0,46.96871937691663,6.407306142209845,45.48512075696828 -2,2.19768787592779,47.81289742868499,18.32138979444518,47.81289742868499,1.554,56.26909744547573,4.39537575185558,49.91813042345726,0.0,41.819205728975206,7.7700000000000005,52.48540900511086 -3,2.6916069549620354,43.95147120000001,21.64471538274412,45.10659722111719,1.554,47.81289742868499,7.77,49.91813042345726,0.0,46.96871937691663,7.923876324123188,49.91813042345726 -4,3.4748496370346733,48.75977939805501,21.756,48.75977939805501,3.108,50.26043018499918,8.927050352720096,48.40686923918031,16.519246592989646,41.13020956412944,9.57949936061379,50.8258071396247 -5,19.47156418986416,43.29642007870028,25.67629389144781,45.10659722111719,4.914179483901662,47.81289742868499,18.648,49.91813042345726,26.04978687052929,44.46856512026162,9.950455064970647,45.48512075696828 -6,26.463666412649623,54.51509065097238,27.885533023415565,44.08131334757461,6.59306362778337,52.81107133775702,31.504451685436457,52.5941871715535,26.234539447072443,51.60693007739111,11.313290767941925,49.91813042345726 -7,31.15760311705636,48.75977939805501,29.526,46.96871937691663,12.528748540855943,48.75977939805501,31.58101182672905,46.96871937691663,27.005640744111226,48.40686923918032,11.419521180855176,50.26043018499918 -8,34.748496370346736,45.98497225710271,29.93215388173728,48.40686923918031,28.94781255984638,46.96871937691663,32.074198602615155,56.67391631552351,,,11.419521180855178,57.37547691442199 -9,,,32.634,47.81289742868499,37.6824973694685,49.91813042345726,,,,,11.834911406512512,47.45294577914065 -10,,,35.53872631369897,50.26043018499918,,,,,,,11.936500492187816,49.91813042345726 -11,,,37.4575252252468,48.75977939805501,,,,,,,12.137127996358942,49.91813042345726 -12,,,,,,,,,,,12.528748540855945,46.96871937691663 -13,,,,,,,,,,,13.36801346498424,45.35929722199581 -14,,,,,,,,,,,28.144137009331093,52.48540900511086 -15,,,,,,,,,,,29.48507690340997,50.26043018499918 -16,,,,,,,,,,,39.61938162061595,50.26043018499918 diff --git a/analysis_results/Munc13DKO/KO/AZ_distances.csv b/analysis_results/Munc13DKO/KO/AZ_distances.csv deleted file mode 100644 index e022a58..0000000 --- a/analysis_results/Munc13DKO/KO/AZ_distances.csv +++ /dev/null @@ -1,80 +0,0 @@ -,A_M13DKO_080212_DKO1.2_crop,G_M13DKO_060212_DKO1.1_crop,C_M13DKO_080212_DKO1.2_crop,E_M13DKO_080212_DKO1.2_crop,H_M13DKO_080212_DKO1.2_crop -0,8.511608543630283,58.57982277200914,8.368546110287019,14.660406679215964,6.216 -1,9.061299244589597,86.71821275833584,9.828358967803323,14.660406679215964,7.121322629961376 -2,10.19025946676531,88.16810423276662,17.65004713874725,20.14214209065163,8.368546110287019 -3,13.186127255566738,98.52899228145998,22.57315387800296,24.57089741950831,11.419521180855176 -4,15.38381513149453,135.13546829755688,30.885139144902684,27.13944325147441,63.78975983651295 -5,15.847752648246376,146.05946714951415,41.26155329116926,31.46610176046597,78.41158164455044 -6,27.842198835580497,156.99246063426102,47.543257944739125,35.742000000000004,82.08298020905431 -7,31.31223237011376,175.51946733054996,49.16635959678121,41.26155329116925,93.48572620459232 -8,46.98120075093867,180.99263946359807,59.15414849357567,52.261553708247135,113.0047601121298 -9,70.73687830262232,190.9144499193291,75.63686934293354,53.607369269532334,122.64791789508699 -10,73.71269225852492,,76.73047118322681,68.51712708513107,127.74976449293362 -11,77.73107378648515,,76.90337628479,71.6527139472051,128.95391260446502 -12,79.42141142034684,,83.90160987728423,72.92218477253682,156.39911816886948 -13,81.31445140932823,,85.6534576535005,82.00939616897567,169.4430179263814 -14,90.1989578875499,,86.73213556692814,84.84612304637143,173.3040977357431 -15,95.02300207844416,,86.746056140899,85.3569069964464,176.50731235844026 -16,106.9779740133454,,93.69215367361346,87.57724142721099,178.7032433505335 -17,107.5857443530508,,93.92384227660196,106.6388270940749,178.89908258009598 -18,109.1124918604648,,97.76623920352056,108.4019430453163,192.01806552509584 -19,116.51891585489456,,99.31020563869556,109.77445440538524,193.80816149997398 -20,118.14488765917888,,101.29648603974375,116.55,219.35633407768285 -21,131.89789560110503,,107.1696808617064,122.60853192172232,220.23530112132343 -22,131.89789560110503,,110.20259780967054,135.7061141437629,221.63984585809473 -23,134.55342898640674,,113.0047601121298,139.92042028238768,235.84989960566026 -24,139.59211010655295,,116.85005376121998,144.00306834231,237.52319379799525 -25,140.95216895103104,,121.54028620996417,149.5477752291889,243.76014477350475 -26,141.58466624603102,,123.3547150132495,150.03143682575327,250.86874232554362 -27,145.18884862137313,,134.27495245204892,150.48947553898913,258.74916653778814 -28,172.8855555909747,,138.48049666288753,151.15396215779458,262.0230178438528 -29,179.00704009619287,,139.60075974005298,157.6295363312346,276.1445319610729 -30,180.6988634053906,,143.26329287015568,163.43618288494136,278.58673767428337 -31,180.97262442701103,,146.6123027443468,164.78995849262176, -32,186.79700555415764,,147.63,173.67992329569933, -33,195.0749761707021,,148.77876211341456,181.5322112904484, -34,202.58701197263363,,150.34498249027138,187.5388936300948, -35,212.23908251780583,,160.7019343505236,189.2118645857072, -36,212.5744768122457,,167.1976902232803,192.47656053660145, -37,214.18156861877728,,174.95445207253232,200.91723633377003, -38,219.1635901330328,,186.5512114031962,201.73290369198577, -39,222.53692370480903,,189.8553033654841,204.6919456158449, -40,227.38902013070023,,195.0440251532972,204.9277659078925, -41,238.674367128102,,202.31862544017048,205.0396855342887, -42,254.3343075088377,,205.65709038105155,208.5025607324764, -43,258.10906162318287,,227.4155690712489,211.22398798431965, -44,267.2789649635751,,228.4009970030779,211.469653822954, -45,270.91796173011494,,233.0015592136671,218.0810759878078, -46,274.62306290623155,,233.44162965503816,223.9324244945336, -47,282.65290734043407,,233.99958415347663,225.63523464210996, -48,298.19393360026623,,240.4736545570013,230.50068745233716, -49,355.98812751551145,,249.27051305760176,233.46231841562783, -50,,,255.0265039402768,235.08070474626368, -51,,,256.4476699601695,240.4887175815115, -52,,,274.8340274565724,240.5991509461328, -53,,,281.5657602834549,245.60083845133752, -54,,,284.75115384489663,247.9104923152709, -55,,,288.31202478564785,251.36880701471296, -56,,,300.0065321288855,252.5046776438013, -57,,,316.1120199612789,253.09217075998225, -58,,,331.3702317951931,256.41470904766754, -59,,,342.64202348223426,258.5857867787787, -60,,,350.75676833954327,259.2200565156948, -61,,,354.018799738093,260.3355863880311, -62,,,,260.92858560916625, -63,,,,275.9170652569355, -64,,,,276.4897482656455, -65,,,,279.1150124231945, -66,,,,281.63865312843694, -67,,,,284.891052586774, -68,,,,285.49648830064444, -69,,,,288.43763817504816, -70,,,,289.2026983691543, -71,,,,295.5256962431524, -72,,,,297.0092028271178, -73,,,,298.55005383352386, -74,,,,298.6956170083518, -75,,,,303.7940828456012, -76,,,,306.2957258663595, -77,,,,313.4036988741518, -78,,,,314.13101978633057, diff --git a/analysis_results/Munc13DKO/KO/AZ_distances_within_100.csv b/analysis_results/Munc13DKO/KO/AZ_distances_within_100.csv deleted file mode 100644 index 9e7d061..0000000 --- a/analysis_results/Munc13DKO/KO/AZ_distances_within_100.csv +++ /dev/null @@ -1,21 +0,0 @@ -,A_M13DKO_080212_DKO1.2_crop,G_M13DKO_060212_DKO1.1_crop,C_M13DKO_080212_DKO1.2_crop,E_M13DKO_080212_DKO1.2_crop,H_M13DKO_080212_DKO1.2_crop -0,8.511608543630283,58.57982277200914,8.368546110287019,14.660406679215964,6.216 -1,9.061299244589597,86.71821275833584,9.828358967803323,14.660406679215964,7.121322629961376 -2,10.19025946676531,88.16810423276662,17.65004713874725,20.14214209065163,8.368546110287019 -3,13.186127255566738,98.52899228145998,22.57315387800296,24.57089741950831,11.419521180855176 -4,15.38381513149453,,30.885139144902684,27.13944325147441,63.78975983651295 -5,15.847752648246376,,41.26155329116926,31.46610176046597,78.41158164455044 -6,27.842198835580497,,47.543257944739125,35.742000000000004,82.08298020905431 -7,31.31223237011376,,49.16635959678121,41.26155329116925,93.48572620459232 -8,46.98120075093867,,59.15414849357567,52.261553708247135, -9,70.73687830262232,,75.63686934293354,53.607369269532334, -10,73.71269225852492,,76.73047118322681,68.51712708513107, -11,77.73107378648515,,76.90337628479,71.6527139472051, -12,79.42141142034684,,83.90160987728423,72.92218477253682, -13,81.31445140932823,,85.6534576535005,82.00939616897567, -14,90.1989578875499,,86.73213556692814,84.84612304637143, -15,95.02300207844416,,86.746056140899,85.3569069964464, -16,,,93.69215367361346,87.57724142721099, -17,,,93.92384227660196,, -18,,,97.76623920352056,, -19,,,99.31020563869556,, diff --git a/analysis_results/Munc13DKO/KO/AZ_distances_within_200.csv b/analysis_results/Munc13DKO/KO/AZ_distances_within_200.csv deleted file mode 100644 index b203236..0000000 --- a/analysis_results/Munc13DKO/KO/AZ_distances_within_200.csv +++ /dev/null @@ -1,42 +0,0 @@ -,A_M13DKO_080212_DKO1.2_crop,G_M13DKO_060212_DKO1.1_crop,C_M13DKO_080212_DKO1.2_crop,E_M13DKO_080212_DKO1.2_crop,H_M13DKO_080212_DKO1.2_crop -0,8.511608543630283,58.57982277200914,8.368546110287019,14.660406679215964,6.216 -1,9.061299244589597,86.71821275833584,9.828358967803323,14.660406679215964,7.121322629961376 -2,10.19025946676531,88.16810423276662,17.65004713874725,20.14214209065163,8.368546110287019 -3,13.186127255566738,98.52899228145998,22.57315387800296,24.57089741950831,11.419521180855176 -4,15.38381513149453,135.13546829755688,30.885139144902684,27.13944325147441,63.78975983651295 -5,15.847752648246376,146.05946714951415,41.26155329116926,31.46610176046597,78.41158164455044 -6,27.842198835580497,156.99246063426102,47.543257944739125,35.742000000000004,82.08298020905431 -7,31.31223237011376,175.51946733054996,49.16635959678121,41.26155329116925,93.48572620459232 -8,46.98120075093867,180.99263946359807,59.15414849357567,52.261553708247135,113.0047601121298 -9,70.73687830262232,190.9144499193291,75.63686934293354,53.607369269532334,122.64791789508699 -10,73.71269225852492,,76.73047118322681,68.51712708513107,127.74976449293362 -11,77.73107378648515,,76.90337628479,71.6527139472051,128.95391260446502 -12,79.42141142034684,,83.90160987728423,72.92218477253682,156.39911816886948 -13,81.31445140932823,,85.6534576535005,82.00939616897567,169.4430179263814 -14,90.1989578875499,,86.73213556692814,84.84612304637143,173.3040977357431 -15,95.02300207844416,,86.746056140899,85.3569069964464,176.50731235844026 -16,106.9779740133454,,93.69215367361346,87.57724142721099,178.7032433505335 -17,107.5857443530508,,93.92384227660196,106.6388270940749,178.89908258009598 -18,109.1124918604648,,97.76623920352056,108.4019430453163,192.01806552509584 -19,116.51891585489456,,99.31020563869556,109.77445440538524,193.80816149997398 -20,118.14488765917888,,101.29648603974375,116.55, -21,131.89789560110503,,107.1696808617064,122.60853192172232, -22,131.89789560110503,,110.20259780967054,135.7061141437629, -23,134.55342898640674,,113.0047601121298,139.92042028238768, -24,139.59211010655295,,116.85005376121998,144.00306834231, -25,140.95216895103104,,121.54028620996417,149.5477752291889, -26,141.58466624603102,,123.3547150132495,150.03143682575327, -27,145.18884862137313,,134.27495245204892,150.48947553898913, -28,172.8855555909747,,138.48049666288753,151.15396215779458, -29,179.00704009619287,,139.60075974005298,157.6295363312346, -30,180.6988634053906,,143.26329287015568,163.43618288494136, -31,180.97262442701103,,146.6123027443468,164.78995849262176, -32,186.79700555415764,,147.63,173.67992329569933, -33,195.0749761707021,,148.77876211341456,181.5322112904484, -34,,,150.34498249027138,187.5388936300948, -35,,,160.7019343505236,189.2118645857072, -36,,,167.1976902232803,192.47656053660145, -37,,,174.95445207253232,, -38,,,186.5512114031962,, -39,,,189.8553033654841,, -40,,,195.0440251532972,, diff --git a/analysis_results/Munc13DKO/KO/AZ_distances_within_40.csv b/analysis_results/Munc13DKO/KO/AZ_distances_within_40.csv deleted file mode 100644 index f8837e1..0000000 --- a/analysis_results/Munc13DKO/KO/AZ_distances_within_40.csv +++ /dev/null @@ -1,9 +0,0 @@ -,A_M13DKO_080212_DKO1.2_crop,G_M13DKO_060212_DKO1.1_crop,C_M13DKO_080212_DKO1.2_crop,E_M13DKO_080212_DKO1.2_crop,H_M13DKO_080212_DKO1.2_crop -0,8.511608543630283,,8.368546110287019,14.660406679215964,6.216 -1,9.061299244589597,,9.828358967803323,14.660406679215964,7.121322629961376 -2,10.19025946676531,,17.65004713874725,20.14214209065163,8.368546110287019 -3,13.186127255566738,,22.57315387800296,24.57089741950831,11.419521180855176 -4,15.38381513149453,,30.885139144902684,27.13944325147441, -5,15.847752648246376,,,31.46610176046597, -6,27.842198835580497,,,35.742000000000004, -7,31.31223237011376,,,, diff --git a/analysis_results/Munc13DKO/KO/AZ_distances_within_40_with_diameters.csv b/analysis_results/Munc13DKO/KO/AZ_distances_within_40_with_diameters.csv deleted file mode 100644 index 243623f..0000000 --- a/analysis_results/Munc13DKO/KO/AZ_distances_within_40_with_diameters.csv +++ /dev/null @@ -1,9 +0,0 @@ -,A_M13DKO_080212_DKO1.2_crop_distance,A_M13DKO_080212_DKO1.2_crop_diameter,G_M13DKO_060212_DKO1.1_crop_distance,G_M13DKO_060212_DKO1.1_crop_diameter,C_M13DKO_080212_DKO1.2_crop_distance,C_M13DKO_080212_DKO1.2_crop_diameter,E_M13DKO_080212_DKO1.2_crop_distance,E_M13DKO_080212_DKO1.2_crop_diameter,H_M13DKO_080212_DKO1.2_crop_distance,H_M13DKO_080212_DKO1.2_crop_diameter -0,8.511608543630283,59.04457273790773,,,8.368546110287019,47.81289742868499,14.660406679215964,54.5150906509724,6.216,47.81289742868499 -1,9.061299244589597,55.036778046111,,,9.828358967803323,,14.660406679215964,43.95147120000001,7.121322629961376,52.5941871715535 -2,10.19025946676531,52.81107133775702,,,17.65004713874725,50.71323599999999,20.14214209065163,57.37547691442199,8.368546110287019,44.081313347574614 -3,13.186127255566738,53.45644442588228,,,22.57315387800296,48.75977939805501,24.57089741950831,50.26043018499918,11.419521180855176,47.81289742868499 -4,15.38381513149453,46.96871937691663,,,30.885139144902684,40.71121517937428,27.13944325147441,59.04457273790773,, -5,15.847752648246376,53.45644442588228,,,,,31.46610176046597,46.96871937691663,, -6,27.842198835580497,55.45059423403328,,,,,35.742000000000004,54.19966799968288,, -7,31.31223237011376,,,,,,,,, diff --git a/analysis_results/SNAP25/CTRL/AZ_distances.csv b/analysis_results/SNAP25/CTRL/AZ_distances.csv deleted file mode 100644 index 73b6a8b..0000000 --- a/analysis_results/SNAP25/CTRL/AZ_distances.csv +++ /dev/null @@ -1,94 +0,0 @@ -,E_SNAP25_120812_CTRL2.3_14_crop,C_SNAP25_12082_CTRL2.3_5_crop,D_SNAP25_12082_CTRL2.3_5_crop,B_SNAP25_120812_CTRL1.4_4_crop,B_SNAP25_120812_CTRL1.3_13_crop -0,0.0,0.0,0.0,0.0,1.554 -1,0.0,0.0,1.554,0.0,2.19768787592779 -2,32.3366452805482,0.0,3.108,0.0,2.19768787592779 -3,38.91211035140603,0.0,3.4748496370346733,0.0,4.662000000000001 -4,60.62591980333164,0.0,5.154034924212291,0.0,4.914179483901662 -5,63.7329484019059,1.554,32.14940198510697,0.0,5.154034924212291 -6,69.4274609646644,18.648000000000003,50.13908212163442,0.0,5.814535579046705 -7,71.77057773210413,22.679883333033263,50.73756454541349,2.19768787592779,6.407306142209845 -8,77.8397343520647,41.29080643436261,53.47205385993696,23.5162872069551,14.411203142000323 -9,92.11956808409384,53.13225765954238,81.43315870086337,39.558381766700215,45.519203947345126 -10,98.38182417499688,61.965445532167365,91.4750454495651,48.24913495597615,47.46700567762833 -11,100.48265420459394,63.6191733363457,92.38134629891468,49.97022257304844,55.29270888643457 -12,102.3518715412669,66.09525734876898,93.04554723359952,52.23844446382377,57.35081160716037 -13,110.07103875225307,68.05739407294405,94.57721112403348,56.073350461694375,58.57982277200914 -14,112.16823240115714,86.38339215381623,105.43177105597724,58.53858389131053,60.92393529640054 -15,121.83796063624835,87.54966238655635,112.27582785265938,63.10464708719953,68.53474753728943 -16,128.01413975026352,96.36053144311732,113.86695747230624,67.2542762060525,81.92100807485221 -17,129.32790965603675,102.86963551991424,113.90936602404564,69.25332610062856,94.51335505630936 -18,134.44570004280538,106.3667309829535,116.73633105421808,72.25682245988956,95.66886452759853 -19,138.3409169407229,115.10095210726972,125.9123645239021,84.46100795041463,100.09738500080809 -20,140.69494110308304,115.4779899028382,133.39983509734938,85.73799801721522,105.34011116379173 -21,149.41853439249095,115.96838881350384,137.68475078962086,93.89812732956926,105.37449296675169 -22,157.3305088023299,126.5246148541856,143.28014836675737,103.5131986366956,107.59696698327514 -23,168.60004814945933,128.86961368763392,145.81953017343045,105.672,115.32104063006022 -24,170.98237656553962,145.33015976045715,149.0139343148821,106.13945138354542,116.14525722559661 -25,180.02940769774253,156.99246063426105,158.3631985658284,125.25856957509934,125.48970968171056 -26,180.97262442701103,157.66017375355136,160.49141427503218,130.66543582753627,125.61473298940695 -27,191.72228987783348,161.2045628262426,167.32763102368958,130.88702802035044,136.4160646551571 -28,195.1739864428659,163.05896221919235,168.88627201759178,138.17498288764142,137.83375556082044 -29,200.0439277658785,172.86460188251382,169.69224609274283,142.6297738342174,141.65287517025556 -30,205.9621675356909,199.57256722305297,178.50042494067068,142.7905311566562,148.80310750787433 -31,206.21995723013816,203.4375344129004,184.43564399540563,147.65453480337135,150.30482088076883 -32,206.3955383238698,215.81026383376675,186.92624107920216,147.95679619402418,153.9401530075893 -33,211.1496606485552,242.4638428797168,187.96335036384087,151.696191659514,158.06555898107595 -34,224.77201257274,252.47120196172872,191.84820759131424,151.89505411302898,158.60699849628324 -35,225.9507449777495,271.10063361047315,192.7649150960828,153.1695431213399,161.33933049321857 -36,229.76088387713,272.81885205388573,195.84716190948487,161.23452093146804,162.8514891304344 -37,247.57419695113623,378.595990818709,203.29503787353,162.05619858555244,175.78068784710112 -38,247.5985815306704,,204.3022470361009,163.2883570742262,186.6482740772065 -39,249.92842426582857,,208.7514292645681,172.6269487652493,188.16238592237292 -40,,,218.5677659491445,173.36679190663938,190.50924535045536 -41,,,223.48983786293283,173.8119649506328,196.1305610148505 -42,,,223.60866660306348,176.54151323697212,196.16134059492967 -43,,,225.0994613765213,177.71401590195413,196.63473771437233 -44,,,225.9774628939797,178.62890333873742,205.7568769105908 -45,,,232.9393646509752,179.17559349420333,210.09482299190526 -46,,,237.4825220179372,181.73829021975527,215.91654262700672 -47,,,241.41578810011572,184.46182922219975,224.17493606556465 -48,,,244.50203594244363,186.38284969384927,231.31121653737418 -49,,,255.9245040085064,187.73194747831283,232.68004169674717 -50,,,269.07544777626964,187.92480300375468,243.55201027295996 -51,,,269.1741531945443,192.04950421180476,243.7452839174535 -52,,,269.38490269501,192.558095815263,243.89385173062485 -53,,,273.975973450228,193.1528857770445,248.74196034445012 -54,,,275.7069294232556,198.182223784072,251.18139942280757 -55,,,282.06277248158784,198.79663060524945,265.5612771998207 -56,,,291.9411419926969,202.8669477071117,268.1719569231652 -57,,,292.17266416282,202.90265638477976,275.8514150045274 -58,,,301.66418353526825,205.5278829550872,285.13676888819515 -59,,,306.18532629112065,212.8015620995297,292.6269065619223 -60,,,311.43261118257993,222.7484323985244,297.8211707787074 -61,,,313.7310095097391,226.76688758282148,304.27859688778636 -62,,,319.6597264404761,227.50581229498292,319.2174737322505 -63,,,323.6239978802561,229.47692323194505,322.85074630856906 -64,,,334.99064405442726,234.9111438735932,330.90349238410886 -65,,,340.9498665375894,236.5196168185633,339.72942231723175 -66,,,348.79944547547666,238.5579813294873,356.235646447685 -67,,,369.16250436359326,239.97101269945088,363.9347234546327 -68,,,,245.79250104915732,384.1933051889374 -69,,,,247.20325519701396, -70,,,,248.294966851928, -71,,,,256.31580087852564, -72,,,,259.20142367664573, -73,,,,269.6850481061195, -74,,,,270.93578879874843, -75,,,,271.6301318852531, -76,,,,276.6338249816895, -77,,,,281.4456603253992, -78,,,,281.5014273924735, -79,,,,293.41808109930787, -80,,,,297.90629902705984, -81,,,,298.2749073522612, -82,,,,301.0230773013923, -83,,,,301.6721887413555, -84,,,,323.832869394075, -85,,,,325.50643542025404, -86,,,,329.1986792440092, -87,,,,339.4734252279551, -88,,,,339.9426059616535, -89,,,,345.2189227142684, -90,,,,357.9769790642968, -91,,,,362.4519697835839, -92,,,,377.6635656136292, diff --git a/analysis_results/SNAP25/CTRL/AZ_distances_within_100.csv b/analysis_results/SNAP25/CTRL/AZ_distances_within_100.csv deleted file mode 100644 index 970efec..0000000 --- a/analysis_results/SNAP25/CTRL/AZ_distances_within_100.csv +++ /dev/null @@ -1,23 +0,0 @@ -,E_SNAP25_120812_CTRL2.3_14_crop,C_SNAP25_12082_CTRL2.3_5_crop,D_SNAP25_12082_CTRL2.3_5_crop,B_SNAP25_120812_CTRL1.4_4_crop,B_SNAP25_120812_CTRL1.3_13_crop -0,0.0,0.0,0.0,0.0,1.554 -1,0.0,0.0,1.554,0.0,2.19768787592779 -2,32.3366452805482,0.0,3.108,0.0,2.19768787592779 -3,38.91211035140603,0.0,3.4748496370346733,0.0,4.662000000000001 -4,60.62591980333164,0.0,5.154034924212291,0.0,4.914179483901662 -5,63.7329484019059,1.554,32.14940198510697,0.0,5.154034924212291 -6,69.4274609646644,18.648000000000003,50.13908212163442,0.0,5.814535579046705 -7,71.77057773210413,22.679883333033263,50.73756454541349,2.19768787592779,6.407306142209845 -8,77.8397343520647,41.29080643436261,53.47205385993696,23.5162872069551,14.411203142000323 -9,92.11956808409384,53.13225765954238,81.43315870086337,39.558381766700215,45.519203947345126 -10,98.38182417499688,61.965445532167365,91.4750454495651,48.24913495597615,47.46700567762833 -11,,63.6191733363457,92.38134629891468,49.97022257304844,55.29270888643457 -12,,66.09525734876898,93.04554723359952,52.23844446382377,57.35081160716037 -13,,68.05739407294405,94.57721112403348,56.073350461694375,58.57982277200914 -14,,86.38339215381623,,58.53858389131053,60.92393529640054 -15,,87.54966238655635,,63.10464708719953,68.53474753728943 -16,,96.36053144311732,,67.2542762060525,81.92100807485221 -17,,,,69.25332610062856,94.51335505630936 -18,,,,72.25682245988956,95.66886452759853 -19,,,,84.46100795041463, -20,,,,85.73799801721522, -21,,,,93.89812732956926, diff --git a/analysis_results/SNAP25/CTRL/AZ_distances_within_200.csv b/analysis_results/SNAP25/CTRL/AZ_distances_within_200.csv deleted file mode 100644 index c7cbe00..0000000 --- a/analysis_results/SNAP25/CTRL/AZ_distances_within_200.csv +++ /dev/null @@ -1,57 +0,0 @@ -,E_SNAP25_120812_CTRL2.3_14_crop,C_SNAP25_12082_CTRL2.3_5_crop,D_SNAP25_12082_CTRL2.3_5_crop,B_SNAP25_120812_CTRL1.4_4_crop,B_SNAP25_120812_CTRL1.3_13_crop -0,0.0,0.0,0.0,0.0,1.554 -1,0.0,0.0,1.554,0.0,2.19768787592779 -2,32.3366452805482,0.0,3.108,0.0,2.19768787592779 -3,38.91211035140603,0.0,3.4748496370346733,0.0,4.662000000000001 -4,60.62591980333164,0.0,5.154034924212291,0.0,4.914179483901662 -5,63.7329484019059,1.554,32.14940198510697,0.0,5.154034924212291 -6,69.4274609646644,18.648000000000003,50.13908212163442,0.0,5.814535579046705 -7,71.77057773210413,22.679883333033263,50.73756454541349,2.19768787592779,6.407306142209845 -8,77.8397343520647,41.29080643436261,53.47205385993696,23.5162872069551,14.411203142000323 -9,92.11956808409384,53.13225765954238,81.43315870086337,39.558381766700215,45.519203947345126 -10,98.38182417499688,61.965445532167365,91.4750454495651,48.24913495597615,47.46700567762833 -11,100.48265420459394,63.6191733363457,92.38134629891468,49.97022257304844,55.29270888643457 -12,102.3518715412669,66.09525734876898,93.04554723359952,52.23844446382377,57.35081160716037 -13,110.07103875225307,68.05739407294405,94.57721112403348,56.073350461694375,58.57982277200914 -14,112.16823240115714,86.38339215381623,105.43177105597724,58.53858389131053,60.92393529640054 -15,121.83796063624835,87.54966238655635,112.27582785265938,63.10464708719953,68.53474753728943 -16,128.01413975026352,96.36053144311732,113.86695747230624,67.2542762060525,81.92100807485221 -17,129.32790965603675,102.86963551991424,113.90936602404564,69.25332610062856,94.51335505630936 -18,134.44570004280538,106.3667309829535,116.73633105421808,72.25682245988956,95.66886452759853 -19,138.3409169407229,115.10095210726972,125.9123645239021,84.46100795041463,100.09738500080809 -20,140.69494110308304,115.4779899028382,133.39983509734938,85.73799801721522,105.34011116379173 -21,149.41853439249095,115.96838881350384,137.68475078962086,93.89812732956926,105.37449296675169 -22,157.3305088023299,126.5246148541856,143.28014836675737,103.5131986366956,107.59696698327514 -23,168.60004814945933,128.86961368763392,145.81953017343045,105.672,115.32104063006022 -24,170.98237656553962,145.33015976045715,149.0139343148821,106.13945138354542,116.14525722559661 -25,180.02940769774253,156.99246063426105,158.3631985658284,125.25856957509934,125.48970968171056 -26,180.97262442701103,157.66017375355136,160.49141427503218,130.66543582753627,125.61473298940695 -27,191.72228987783348,161.2045628262426,167.32763102368958,130.88702802035044,136.4160646551571 -28,195.1739864428659,163.05896221919235,168.88627201759178,138.17498288764142,137.83375556082044 -29,,172.86460188251382,169.69224609274283,142.6297738342174,141.65287517025556 -30,,199.57256722305297,178.50042494067068,142.7905311566562,148.80310750787433 -31,,,184.43564399540563,147.65453480337135,150.30482088076883 -32,,,186.92624107920216,147.95679619402418,153.9401530075893 -33,,,187.96335036384087,151.696191659514,158.06555898107595 -34,,,191.84820759131424,151.89505411302898,158.60699849628324 -35,,,192.7649150960828,153.1695431213399,161.33933049321857 -36,,,195.84716190948487,161.23452093146804,162.8514891304344 -37,,,,162.05619858555244,175.78068784710112 -38,,,,163.2883570742262,186.6482740772065 -39,,,,172.6269487652493,188.16238592237292 -40,,,,173.36679190663938,190.50924535045536 -41,,,,173.8119649506328,196.1305610148505 -42,,,,176.54151323697212,196.16134059492967 -43,,,,177.71401590195413,196.63473771437233 -44,,,,178.62890333873742, -45,,,,179.17559349420333, -46,,,,181.73829021975527, -47,,,,184.46182922219975, -48,,,,186.38284969384927, -49,,,,187.73194747831283, -50,,,,187.92480300375468, -51,,,,192.04950421180476, -52,,,,192.558095815263, -53,,,,193.1528857770445, -54,,,,198.182223784072, -55,,,,198.79663060524945, diff --git a/analysis_results/SNAP25/CTRL/AZ_distances_within_40.csv b/analysis_results/SNAP25/CTRL/AZ_distances_within_40.csv deleted file mode 100644 index db28ca3..0000000 --- a/analysis_results/SNAP25/CTRL/AZ_distances_within_40.csv +++ /dev/null @@ -1,11 +0,0 @@ -,E_SNAP25_120812_CTRL2.3_14_crop,C_SNAP25_12082_CTRL2.3_5_crop,D_SNAP25_12082_CTRL2.3_5_crop,B_SNAP25_120812_CTRL1.4_4_crop,B_SNAP25_120812_CTRL1.3_13_crop -0,0.0,0.0,0.0,0.0,1.554 -1,0.0,0.0,1.554,0.0,2.19768787592779 -2,32.3366452805482,0.0,3.108,0.0,2.19768787592779 -3,38.91211035140603,0.0,3.4748496370346733,0.0,4.662000000000001 -4,,0.0,5.154034924212291,0.0,4.914179483901662 -5,,1.554,32.14940198510697,0.0,5.154034924212291 -6,,18.648000000000003,,0.0,5.814535579046705 -7,,22.679883333033263,,2.19768787592779,6.407306142209845 -8,,,,23.5162872069551,14.411203142000323 -9,,,,39.558381766700215, diff --git a/analysis_results/SNAP25/CTRL/AZ_distances_within_40_with_diameters.csv b/analysis_results/SNAP25/CTRL/AZ_distances_within_40_with_diameters.csv deleted file mode 100644 index 920cb73..0000000 --- a/analysis_results/SNAP25/CTRL/AZ_distances_within_40_with_diameters.csv +++ /dev/null @@ -1,11 +0,0 @@ -,E_SNAP25_120812_CTRL2.3_14_crop_distance,E_SNAP25_120812_CTRL2.3_14_crop_diameter,C_SNAP25_12082_CTRL2.3_5_crop_distance,C_SNAP25_12082_CTRL2.3_5_crop_diameter,D_SNAP25_12082_CTRL2.3_5_crop_distance,D_SNAP25_12082_CTRL2.3_5_crop_diameter,B_SNAP25_120812_CTRL1.4_4_crop_distance,B_SNAP25_120812_CTRL1.4_4_crop_diameter,B_SNAP25_120812_CTRL1.3_13_crop_distance,B_SNAP25_120812_CTRL1.3_13_crop_diameter -0,0.0,56.67391631552351,0.0,27.0470592,0.0,44.08131334757461,0.0,49.91813042345726,1.554,48.05136722436951 -1,0.0,54.51509065097238,0.0,40.71121517937428,1.554,59.8139155734108,0.0,45.98497225710271,2.19768787592779,48.75977939805501 -2,32.3366452805482,45.48512075696828,0.0,48.75977939805501,3.108,42.36233505146845,0.0,43.95147120000001,2.19768787592779,47.45294577914065 -3,38.91211035140603,52.81107133775702,0.0,48.40686923918031,3.4748496370346733,54.5150906509724,0.0,48.40686923918031,4.662000000000001,50.26043018499918 -4,,,0.0,43.29642007870028,5.154034924212291,47.81289742868499,0.0,44.46856512026162,4.914179483901662,49.22639078946891 -5,,,1.554,47.81289742868499,32.14940198510697,42.36233505146845,0.0,49.22639078946891,5.154034924212291,46.96871937691663 -6,,,18.648000000000003,50.26043018499918,,,0.0,50.71323599999999,5.814535579046705,47.81289742868499 -7,,,22.679883333033263,46.96871937691663,,,2.19768787592779,49.91813042345726,6.407306142209845,48.05136722436951 -8,,,,,,,23.5162872069551,45.98497225710271,14.411203142000323,46.96871937691663 -9,,,,,,,39.558381766700215,42.76515554070582,, diff --git a/analysis_results/SNAP25/KO/AZ_distances.csv b/analysis_results/SNAP25/KO/AZ_distances.csv deleted file mode 100644 index 0222287..0000000 --- a/analysis_results/SNAP25/KO/AZ_distances.csv +++ /dev/null @@ -1,43 +0,0 @@ -,D_SNAP25_12.08.12_KO1.1_3_crop,C_SNAP25_12082_KO1.2_6_crop,E_SNAP25_12082_KO2.1_6_crop,A_SNAP25_12082_KO1.2_6_crop,B_SNAP25_12082_KO1.2_6_crop -0,9.193587982936805,1.554,3.4748496370346733,4.662,6.216 -1,10.878,1.554,5.154034924212291,7.77,10.308069848424582 -2,12.90849348297469,4.662,6.216,9.452612972083434,40.582911576179455 -3,14.660406679215962,6.216,6.407306142209845,11.419521180855176,60.04556260041203 -4,22.46591738612069,13.636294658007358,6.407306142209845,41.14433273246754,75.15642703056074 -5,59.642026021925176,60.76517558602131,77.0602260832396,50.13908212163442,89.87710509356651 -6,63.581203071348064,86.46721885200193,80.34345317448087,51.11691610416262,90.78605789437054 -7,76.22524447976537,118.81750265007256,90.4796399197079,56.35259124476886,92.88969193618848 -8,97.60555117410075,144.34644175732217,90.51966630517371,65.1012870840508,103.85092612008812 -9,102.0801770178716,198.70550218854032,113.29288830284098,76.60447689267252,123.66755042451517 -10,104.9726712435194,201.667053223872,133.30023228786965,91.686,130.47123393300149 -11,107.27103402130513,203.9710098224745,137.6145750711021,96.67328958921384,130.66543582753627 -12,115.18484494064312,255.41444912925343,166.3941463513666,99.94044514609688,152.21269363624046 -13,115.92673350008616,,167.8463882721341,112.46924027484135,156.91552993888146 -14,116.82938513918492,,214.6602249975528,136.35409155577256,161.81013340331936 -15,117.55062463466538,,,165.7251932205843,169.13632057012472 -16,149.57199545369448,,,167.86077531097013,174.35990801786974 -17,170.03345068544604,,,197.68199389929276,192.21918426629537 -18,170.26763221469898,,,199.64515666551995,193.06534764167287 -19,180.93926111267285,,,210.15803273727133,193.95762797064725 -20,,,,215.83823701096156,198.12128776080576 -21,,,,217.7541633585912,205.73927103982848 -22,,,,223.82455723177475,219.64788150127922 -23,,,,240.9652263377436,226.74558791738374 -24,,,,247.78869889484469,227.2456022544771 -25,,,,250.931304910328,244.34889642476392 -26,,,,252.1936610860789,250.5797895202245 -27,,,,258.6044639753924,254.04929642886242 -28,,,,266.5098673445319,267.56793480535 -29,,,,268.03684634766165,271.812325151013 -30,,,,280.03925114883447,287.284126286156 -31,,,,289.24027207150806,305.54580285777126 -32,,,,299.587663390868,314.7108989342441 -33,,,,300.9669155305945,316.49757610446244 -34,,,,314.46909259893886,320.2710681594577 -35,,,,318.0009551872447,338.03697325588513 -36,,,,320.66292015760104,345.3028563218092 -37,,,,331.09683148589625,360.6920704257303 -38,,,,334.8933096853384,375.15825913339563 -39,,,,343.8907550894615, -40,,,,363.57622585642207, -41,,,,388.3818780427326, diff --git a/analysis_results/SNAP25/KO/AZ_distances_within_100.csv b/analysis_results/SNAP25/KO/AZ_distances_within_100.csv deleted file mode 100644 index 3693a70..0000000 --- a/analysis_results/SNAP25/KO/AZ_distances_within_100.csv +++ /dev/null @@ -1,14 +0,0 @@ -,D_SNAP25_12.08.12_KO1.1_3_crop,C_SNAP25_12082_KO1.2_6_crop,E_SNAP25_12082_KO2.1_6_crop,A_SNAP25_12082_KO1.2_6_crop,B_SNAP25_12082_KO1.2_6_crop -0,9.193587982936805,1.554,3.4748496370346733,4.662,6.216 -1,10.878,1.554,5.154034924212291,7.77,10.308069848424582 -2,12.90849348297469,4.662,6.216,9.452612972083434,40.582911576179455 -3,14.660406679215962,6.216,6.407306142209845,11.419521180855176,60.04556260041203 -4,22.46591738612069,13.636294658007358,6.407306142209845,41.14433273246754,75.15642703056074 -5,59.642026021925176,60.76517558602131,77.0602260832396,50.13908212163442,89.87710509356651 -6,63.581203071348064,86.46721885200193,80.34345317448087,51.11691610416262,90.78605789437054 -7,76.22524447976537,,90.4796399197079,56.35259124476886,92.88969193618848 -8,97.60555117410075,,90.51966630517371,65.1012870840508, -9,,,,76.60447689267252, -10,,,,91.686, -11,,,,96.67328958921384, -12,,,,99.94044514609688, diff --git a/analysis_results/SNAP25/KO/AZ_distances_within_200.csv b/analysis_results/SNAP25/KO/AZ_distances_within_200.csv deleted file mode 100644 index c7da11c..0000000 --- a/analysis_results/SNAP25/KO/AZ_distances_within_200.csv +++ /dev/null @@ -1,22 +0,0 @@ -,D_SNAP25_12.08.12_KO1.1_3_crop,C_SNAP25_12082_KO1.2_6_crop,E_SNAP25_12082_KO2.1_6_crop,A_SNAP25_12082_KO1.2_6_crop,B_SNAP25_12082_KO1.2_6_crop -0,9.193587982936805,1.554,3.4748496370346733,4.662,6.216 -1,10.878,1.554,5.154034924212291,7.77,10.308069848424582 -2,12.90849348297469,4.662,6.216,9.452612972083434,40.582911576179455 -3,14.660406679215962,6.216,6.407306142209845,11.419521180855176,60.04556260041203 -4,22.46591738612069,13.636294658007358,6.407306142209845,41.14433273246754,75.15642703056074 -5,59.642026021925176,60.76517558602131,77.0602260832396,50.13908212163442,89.87710509356651 -6,63.581203071348064,86.46721885200193,80.34345317448087,51.11691610416262,90.78605789437054 -7,76.22524447976537,118.81750265007256,90.4796399197079,56.35259124476886,92.88969193618848 -8,97.60555117410075,144.34644175732217,90.51966630517371,65.1012870840508,103.85092612008812 -9,102.0801770178716,198.70550218854032,113.29288830284098,76.60447689267252,123.66755042451517 -10,104.9726712435194,,133.30023228786965,91.686,130.47123393300149 -11,107.27103402130513,,137.6145750711021,96.67328958921384,130.66543582753627 -12,115.18484494064312,,166.3941463513666,99.94044514609688,152.21269363624046 -13,115.92673350008616,,167.8463882721341,112.46924027484135,156.91552993888146 -14,116.82938513918492,,,136.35409155577256,161.81013340331936 -15,117.55062463466538,,,165.7251932205843,169.13632057012472 -16,149.57199545369448,,,167.86077531097013,174.35990801786974 -17,170.03345068544604,,,197.68199389929276,192.21918426629537 -18,170.26763221469898,,,199.64515666551995,193.06534764167287 -19,180.93926111267285,,,,193.95762797064725 -20,,,,,198.12128776080576 diff --git a/analysis_results/SNAP25/KO/AZ_distances_within_40.csv b/analysis_results/SNAP25/KO/AZ_distances_within_40.csv deleted file mode 100644 index 44898fe..0000000 --- a/analysis_results/SNAP25/KO/AZ_distances_within_40.csv +++ /dev/null @@ -1,6 +0,0 @@ -,D_SNAP25_12.08.12_KO1.1_3_crop,C_SNAP25_12082_KO1.2_6_crop,E_SNAP25_12082_KO2.1_6_crop,A_SNAP25_12082_KO1.2_6_crop,B_SNAP25_12082_KO1.2_6_crop -0,9.193587982936805,1.554,3.4748496370346733,4.662,6.216 -1,10.878,1.554,5.154034924212291,7.77,10.308069848424582 -2,12.90849348297469,4.662,6.216,9.452612972083434, -3,14.660406679215962,6.216,6.407306142209845,11.419521180855176, -4,22.46591738612069,13.636294658007358,6.407306142209845,, diff --git a/analysis_results/SNAP25/KO/AZ_distances_within_40_with_diameters.csv b/analysis_results/SNAP25/KO/AZ_distances_within_40_with_diameters.csv deleted file mode 100644 index 33cb48a..0000000 --- a/analysis_results/SNAP25/KO/AZ_distances_within_40_with_diameters.csv +++ /dev/null @@ -1,6 +0,0 @@ -,D_SNAP25_12.08.12_KO1.1_3_crop_distance,D_SNAP25_12.08.12_KO1.1_3_crop_diameter,C_SNAP25_12082_KO1.2_6_crop_distance,C_SNAP25_12082_KO1.2_6_crop_diameter,E_SNAP25_12082_KO2.1_6_crop_distance,E_SNAP25_12082_KO2.1_6_crop_diameter,A_SNAP25_12082_KO1.2_6_crop_distance,A_SNAP25_12082_KO1.2_6_crop_diameter,B_SNAP25_12082_KO1.2_6_crop_distance,B_SNAP25_12082_KO1.2_6_crop_diameter -0,9.193587982936805,56.26909744547573,1.554,47.81289742868499,3.4748496370346733,54.5150906509724,4.662,49.91813042345726,6.216,62.88804789234515 -1,10.878,59.8139155734108,1.554,50.26043018499918,5.154034924212291,50.26043018499918,7.77,47.81289742868499,10.308069848424582,57.37547691442199 -2,12.90849348297469,62.34039118335815,4.662,46.96871937691663,6.216,58.36307916126143,9.452612972083434,46.96871937691663,, -3,14.660406679215962,49.91813042345726,6.216,57.37547691442199,6.407306142209845,56.26909744547573,11.419521180855176,51.49606651201725,, -4,22.46591738612069,64.59166755828387,13.636294658007358,51.71755599228722,6.407306142209845,57.37547691442199,,,, diff --git a/run_sbatch_revision.sbatch b/run_sbatch_revision.sbatch index 12a3c9a..1245cac 100644 --- a/run_sbatch_revision.sbatch +++ b/run_sbatch_revision.sbatch @@ -1,14 +1,15 @@ #! /bin/bash #SBATCH -c 4 #4 #8 -#SBATCH --mem 120G #120G #32G #64G #256G +#SBATCH --mem 256G #120G #32G #64G #256G #SBATCH -p grete:shared #grete:shared #grete-h100:shared -#SBATCH -t 12:00:00 #6:00:00 #48:00:00 +#SBATCH -t 4:00:00 #6:00:00 #48:00:00 #SBATCH -G A100:1 #V100:1 #2 #A100:1 #gtx1080:2 #v100:1 #H100:1 #SBATCH --output=/user/muth9/u12095/synapse-net/slurm_revision/slurm-%j.out -#SBATCH -A nim00007 #SBATCH --constraint 80gb +#SBATCH -A nim00007 +#SBATCH --constraint 80gb source ~/.bashrc conda activate synapse-net python /user/muth9/u12095/synapse-net/scripts/cooper/revision/updated_data_analysis/run_data_analysis.py \ - -i /mnt/lustre-emmy-hdd/projects/nim00007/data/synaptic-reconstruction/cooper/20241102_TOMO_DATA_Imig2014/exported/Munc13DKO/ \ - -o /mnt/lustre-emmy-hdd/projects/nim00007/data/synaptic-reconstruction/cooper/20241102_TOMO_DATA_Imig2014/afterRevision_analysis --store \ No newline at end of file + -i /mnt/lustre-emmy-hdd/projects/nim00007/data/synaptic-reconstruction/cooper/20241102_TOMO_DATA_Imig2014/exported/SNAP25/ \ + -o /mnt/lustre-emmy-hdd/projects/nim00007/data/synaptic-reconstruction/cooper/20241102_TOMO_DATA_Imig2014/afterRevision_analysis/boundaryThreshold0_9 --store \ No newline at end of file diff --git a/scripts/cooper/revision/updated_data_analysis/analysis_segmentations.py b/scripts/cooper/revision/updated_data_analysis/analysis_segmentations.py index 49d21b9..741d34b 100644 --- a/scripts/cooper/revision/updated_data_analysis/analysis_segmentations.py +++ b/scripts/cooper/revision/updated_data_analysis/analysis_segmentations.py @@ -115,7 +115,7 @@ def compartment_pred(raw: np.ndarray, compartment_model: str, output_path: str = print(f"Using existing compartment seg in {output_path}") if not use_existing_seg: - seg, pred = segment_compartments(input_volume=raw, model_path=compartment_model, verbose=False, return_predictions=True) + seg, pred = segment_compartments(input_volume=raw, model_path=compartment_model, verbose=False, return_predictions=True, boundary_threshold=0.9) if store and output_path: with h5py.File(output_path, "a") as f: diff --git a/scripts/cooper/revision/updated_data_analysis/store_results.py b/scripts/cooper/revision/updated_data_analysis/store_results.py index 65d0d0e..55f0e2c 100644 --- a/scripts/cooper/revision/updated_data_analysis/store_results.py +++ b/scripts/cooper/revision/updated_data_analysis/store_results.py @@ -42,29 +42,29 @@ def prepare_output_directory(base_output, group): os.makedirs(group_dir, exist_ok=True) return group_dir -def write_or_append_csv(file_path, new_data): +def write_or_append_excel(file_path, new_data): """ - Writes a new DataFrame to CSV, or appends a new column(s) to an existing one. + Writes a new DataFrame to Excel, or appends a new column(s) to an existing one. Parameters: - file_path (str): Path to the target CSV file. + file_path (str): Path to the target Excel file. new_data (pd.DataFrame): DataFrame to write or append. """ print(f"saving {file_path}") if os.path.exists(file_path): - existing = pd.read_csv(file_path, index_col=0) + existing = pd.read_excel(file_path, index_col=0) combined = pd.concat([existing, new_data], axis=1) else: combined = new_data - combined.to_csv(file_path) + combined.to_excel(file_path, index=True) def save_filtered_dataframes(output_dir, tomogram_name, df): """ - Saves the sorted segment data into multiple filtered CSV files. + Saves the sorted segment data into multiple filtered Excel files. Parameters: - output_dir (str): Directory where CSVs will be saved. + output_dir (str): Directory where Excel files will be saved. tomogram_name (str): Name of the tomogram (used as column header). df (pd.DataFrame): DataFrame containing 'seg_id', 'distance', and 'diameter'. """ @@ -74,10 +74,11 @@ def save_filtered_dataframes(output_dir, tomogram_name, df): 'AZ_distances_within_100': 100, 'AZ_distances_within_40': 40, 'AZ_distances_within_40_with_diameters': 40, + 'AZ_distances_within_40_only_diameters': 40, } for filename, max_dist in thresholds.items(): - file_path = os.path.join(output_dir, f"{filename}.csv") + file_path = os.path.join(output_dir, f"{filename}.xlsx") filtered_df = df if max_dist is None else df[df['distance'] <= max_dist] if filename == 'AZ_distances_within_40_with_diameters': @@ -85,14 +86,19 @@ def save_filtered_dataframes(output_dir, tomogram_name, df): f"{tomogram_name}_distance": filtered_df['distance'].values, f"{tomogram_name}_diameter": filtered_df['diameter'].values }) + elif filename == 'AZ_distances_within_40_only_diameters': + data = pd.DataFrame({ + f"{tomogram_name}_diameter": filtered_df['diameter'].values + }) else: data = pd.DataFrame({tomogram_name: filtered_df['distance'].values}) - write_or_append_csv(file_path, data) + write_or_append_excel(file_path, data) + def save_filtered_dataframes_with_seg_id(output_dir, tomogram_name, df): """ - Saves segment data including seg_id into separate CSV files. + Saves segment data including seg_id into separate Excel files. Parameters: output_dir (str): Directory to save files. @@ -105,14 +111,14 @@ def save_filtered_dataframes_with_seg_id(output_dir, tomogram_name, df): 'AZ_distances_within_100_with_seg_id': 100, 'AZ_distances_within_40_with_seg_id': 40, 'AZ_distances_within_40_with_diameters_and_seg_id': 40, + 'AZ_distances_within_40_only_diameters_and_seg_id': 40, } + with_segID_dir = os.path.join(output_dir, "with_segID") + os.makedirs(with_segID_dir, exist_ok=True) + for filename, max_dist in thresholds.items(): - #storing with seg ID data in subfolder - with_segID_dir = os.path.join(output_dir, "with_segID") - os.makedirs(with_segID_dir, exist_ok=True) - file_path = os.path.join(with_segID_dir, f"{filename}.csv") - + file_path = os.path.join(with_segID_dir, f"{filename}.xlsx") filtered_df = df if max_dist is None else df[df['distance'] <= max_dist] if filename == 'AZ_distances_within_40_with_diameters_and_seg_id': @@ -121,17 +127,23 @@ def save_filtered_dataframes_with_seg_id(output_dir, tomogram_name, df): f"{tomogram_name}_distance": filtered_df['distance'].values, f"{tomogram_name}_diameter": filtered_df['diameter'].values }) + elif filename == 'AZ_distances_within_40_only_diameters_and_seg_id': + data = pd.DataFrame({ + f"{tomogram_name}_seg_id": filtered_df['seg_id'].values, + f"{tomogram_name}_diameter": filtered_df['diameter'].values + }) else: data = pd.DataFrame({ f"{tomogram_name}_seg_id": filtered_df['seg_id'].values, f"{tomogram_name}_distance": filtered_df['distance'].values }) - write_or_append_csv(file_path, data) + write_or_append_excel(file_path, data) + def run_store_results(input_path, analysis_output, sorted_list): """ - Processes a single tomogram's sorted segment data and stores results into categorized CSV files. + Processes a single tomogram's sorted segment data and stores results into categorized Excel files. Parameters: input_path (str): Path to the input .h5 file. From 7b831390df2ae19b366f5d265c181a3374ed089c Mon Sep 17 00:00:00 2001 From: SarahMuth Date: Tue, 1 Jul 2025 12:33:50 +0200 Subject: [PATCH 20/29] change presynaptic filtering --- run_sbatch_revision.sbatch | 2 +- .../analysis_segmentations.py | 15 +++++++++------ 2 files changed, 10 insertions(+), 7 deletions(-) diff --git a/run_sbatch_revision.sbatch b/run_sbatch_revision.sbatch index 1245cac..11b8296 100644 --- a/run_sbatch_revision.sbatch +++ b/run_sbatch_revision.sbatch @@ -12,4 +12,4 @@ source ~/.bashrc conda activate synapse-net python /user/muth9/u12095/synapse-net/scripts/cooper/revision/updated_data_analysis/run_data_analysis.py \ -i /mnt/lustre-emmy-hdd/projects/nim00007/data/synaptic-reconstruction/cooper/20241102_TOMO_DATA_Imig2014/exported/SNAP25/ \ - -o /mnt/lustre-emmy-hdd/projects/nim00007/data/synaptic-reconstruction/cooper/20241102_TOMO_DATA_Imig2014/afterRevision_analysis/boundaryThreshold0_9 --store \ No newline at end of file + -o /mnt/lustre-emmy-hdd/projects/nim00007/data/synaptic-reconstruction/cooper/20241102_TOMO_DATA_Imig2014/afterRevision_analysis/boundaryT0_9_constantins_presynapticFiltering --store \ No newline at end of file diff --git a/scripts/cooper/revision/updated_data_analysis/analysis_segmentations.py b/scripts/cooper/revision/updated_data_analysis/analysis_segmentations.py index 741d34b..55c8f59 100644 --- a/scripts/cooper/revision/updated_data_analysis/analysis_segmentations.py +++ b/scripts/cooper/revision/updated_data_analysis/analysis_segmentations.py @@ -10,6 +10,7 @@ from synapse_net.inference.compartments import segment_compartments from synapse_net.inference.active_zone import segment_active_zone from synapse_net.inference.inference import get_model_path +from synapse_net.ground_truth.az_evaluation import _get_presynaptic_mask def fill_and_filter_vesicles(vesicles: np.ndarray) -> np.ndarray: @@ -130,7 +131,7 @@ def compartment_pred(raw: np.ndarray, compartment_model: str, output_path: str = else: print("Not storing compartment predictions") - return seg + return seg, pred def AZ_pred(raw: np.ndarray, AZ_model: str, output_path: str = None, store: bool = False) -> np.ndarray: @@ -179,7 +180,7 @@ def AZ_pred(raw: np.ndarray, AZ_model: str, output_path: str = None, store: bool return seg -def filter_presynaptic_SV(sv_seg: np.ndarray, compartment_seg: np.ndarray, output_path: str = None, +def filter_presynaptic_SV(sv_seg: np.ndarray, compartment_seg: np.ndarray, compartment_pred: np.ndarray, output_path: str = None, store: bool = False, input_path: str = None) -> np.ndarray: """ Filters synaptic vesicle segmentation to retain only vesicles in the presynaptic region. @@ -200,14 +201,16 @@ def filter_presynaptic_SV(sv_seg: np.ndarray, compartment_seg: np.ndarray, outpu def n_vesicles(mask, ves): return len(np.unique(ves[mask])) - 1 - # Find the segment with most vesicles. + '''# Find the segment with most vesicles. props = regionprops(compartment_seg, intensity_image=vesicles_pp, extra_properties=[n_vesicles]) compartment_ids = [prop.label for prop in props] vesicle_counts = [prop.n_vesicles for prop in props] if len(compartment_ids) == 0: mask = np.ones(compartment_seg.shape, dtype="bool") else: - mask = (compartment_seg == compartment_ids[np.argmax(vesicle_counts)]).astype("uint8") + mask = (compartment_seg == compartment_ids[np.argmax(vesicle_counts)]).astype("uint8")''' + + mask = _get_presynaptic_mask(compartment_pred, vesicles_pp) # Filter all vesicles that are not in the mask. props = regionprops(vesicles_pp, mask) @@ -274,13 +277,13 @@ def run_predictions(input_path: str, output_path: str = None, store: bool = Fals sv_seg = SV_pred(raw, SV_model, output_path, store) print("Running compartment prediction") - comp_seg = compartment_pred(raw, compartment_model, output_path, store) + comp_seg, comp_pred = compartment_pred(raw, compartment_model, output_path, store) print("Running AZ prediction") az_seg = AZ_pred(raw, AZ_model, output_path, store) print("Filtering the presynaptic SV") - presyn_SV_seg = filter_presynaptic_SV(sv_seg, comp_seg, output_path, store, input_path) + presyn_SV_seg = filter_presynaptic_SV(sv_seg, comp_seg, comp_pred, output_path, store, input_path) print("Done with predictions") From d966ea35a0216c92d8d70ca1cc182359ada0b036 Mon Sep 17 00:00:00 2001 From: SarahMuth Date: Wed, 2 Jul 2025 15:56:30 +0200 Subject: [PATCH 21/29] minor things for analysis; 1st implementation of surface dice for eval --- .gitignore | 1 + run_sbatch_revision.sbatch | 10 +- scripts/cooper/revision/surface_dice.py | 114 ++++++++++++++++++ .../analysis_segmentations.py | 15 +-- .../run_data_analysis.py | 2 +- 5 files changed, 129 insertions(+), 13 deletions(-) create mode 100644 scripts/cooper/revision/surface_dice.py diff --git a/.gitignore b/.gitignore index f18dcce..438fbdf 100644 --- a/.gitignore +++ b/.gitignore @@ -15,3 +15,4 @@ scripts/cooper/training/find_rec_testset.py synapse-net-models/ scripts/portal/upscale_tomo.py analysis_results/ +scripts/cooper/revision/evaluation_results/ \ No newline at end of file diff --git a/run_sbatch_revision.sbatch b/run_sbatch_revision.sbatch index 11b8296..121d8e0 100644 --- a/run_sbatch_revision.sbatch +++ b/run_sbatch_revision.sbatch @@ -1,15 +1,15 @@ #! /bin/bash #SBATCH -c 4 #4 #8 -#SBATCH --mem 256G #120G #32G #64G #256G +#SBATCH --mem 120G #120G #32G #64G #256G #SBATCH -p grete:shared #grete:shared #grete-h100:shared #SBATCH -t 4:00:00 #6:00:00 #48:00:00 #SBATCH -G A100:1 #V100:1 #2 #A100:1 #gtx1080:2 #v100:1 #H100:1 #SBATCH --output=/user/muth9/u12095/synapse-net/slurm_revision/slurm-%j.out -#SBATCH -A nim00007 -#SBATCH --constraint 80gb +#SBATCH -A nim00007 #SBATCH --constraint 80gb source ~/.bashrc conda activate synapse-net python /user/muth9/u12095/synapse-net/scripts/cooper/revision/updated_data_analysis/run_data_analysis.py \ - -i /mnt/lustre-emmy-hdd/projects/nim00007/data/synaptic-reconstruction/cooper/20241102_TOMO_DATA_Imig2014/exported/SNAP25/ \ - -o /mnt/lustre-emmy-hdd/projects/nim00007/data/synaptic-reconstruction/cooper/20241102_TOMO_DATA_Imig2014/afterRevision_analysis/boundaryT0_9_constantins_presynapticFiltering --store \ No newline at end of file + -i /mnt/lustre-emmy-hdd/projects/nim00007/data/synaptic-reconstruction/cooper/20241102_TOMO_DATA_Imig2014/final_Imig2014_seg_autoComp/SNAP25/ \ + -o /mnt/lustre-emmy-hdd/projects/nim00007/data/synaptic-reconstruction/cooper/20241102_TOMO_DATA_Imig2014/afterRevision_analysis/boundaryT0_9_constantins_presynapticFiltering/full_dataset --store \ + -s ./analysis_results/full_dataset \ No newline at end of file diff --git a/scripts/cooper/revision/surface_dice.py b/scripts/cooper/revision/surface_dice.py new file mode 100644 index 0000000..feca763 --- /dev/null +++ b/scripts/cooper/revision/surface_dice.py @@ -0,0 +1,114 @@ +import sys +import os + +# Add membrain-seg to Python path +MEMBRAIN_SEG_PATH = "/user/muth9/u12095/membrain-seg/src" +if MEMBRAIN_SEG_PATH not in sys.path: + sys.path.insert(0, MEMBRAIN_SEG_PATH) + +import argparse +import h5py +import pandas as pd +from tqdm import tqdm +import numpy as np + +from membrain_seg.segmentation.skeletonize import skeletonization +from membrain_seg.benchmark.metrics import masked_surface_dice + + +def load_segmentation(file_path, key): + """Load a dataset from an HDF5 file.""" + with h5py.File(file_path, "r") as f: + data = f[key][:] + return data + + +def evaluate_surface_dice(pred, gt, raw, check): + """Skeletonize predictions and GT, compute surface dice.""" + gt_skeleton = skeletonization(gt == 1, batch_size=100000) + pred_skeleton = skeletonization(pred, batch_size=100000) + mask = gt != 2 + + if check: + import napari + v = napari.Viewer() + v.add_image(raw) + v.add_labels(gt, name= f"gt") + v.add_labels(gt_skeleton.astype(np.uint16), name= f"gt_skeleton") + v.add_labels(pred, name= f"pred") + v.add_labels(pred_skeleton.astype(np.uint16), name= f"pred_skeleton") + napari.run() + + surf_dice, confusion_dict = masked_surface_dice( + pred_skeleton, gt_skeleton, pred, gt, mask + ) + return surf_dice, confusion_dict + + +def process_file(pred_path, gt_path, seg_key, gt_key, check): + """Process a single prediction/GT file pair.""" + try: + pred = load_segmentation(pred_path, seg_key) + gt = load_segmentation(gt_path, gt_key) + raw = load_segmentation(gt_path, "raw") + surf_dice, confusion = evaluate_surface_dice(pred, gt, raw, check) + + result = { + "tomo_name": os.path.basename(pred_path), + "surface_dice": surf_dice, + **confusion, + } + return result + + except Exception as e: + print(f"Error processing {pred_path}: {e}") + return None + + +def collect_results(input_folder, gt_folder, version, check=False): + """Loop through prediction files and compute metrics.""" + results = [] + seg_key = f"predictions/az/seg_v{version}" + gt_key = "/labels/az_merged" + + for fname in tqdm(os.listdir(input_folder), desc="Processing segmentations"): + if not fname.endswith(".h5"): + continue + + pred_path = os.path.join(input_folder, fname) + gt_path = os.path.join(gt_folder, fname) + + if not os.path.exists(gt_path): + print(f"Warning: Ground truth file not found for {fname}") + continue + + result = process_file(pred_path, gt_path, seg_key, gt_key, check) + if result: + results.append(result) + + return results + + +def save_results(results, output_file): + """Save results as an Excel file.""" + df = pd.DataFrame(results) + df.to_excel(output_file, index=False) + print(f"Results saved to {output_file}") + + +def main(): + parser = argparse.ArgumentParser(description="Compute surface dice for AZ segmentations.") + parser.add_argument("--input_folder", "-i", required=True, help="Folder with predicted segmentations (.h5)") + parser.add_argument("--gt_folder", "-gt", required=True, help="Folder with ground truth segmentations (.h5)") + parser.add_argument("--version", "-v", required=True, help="Version string used in prediction key") + parser.add_argument("--check", action="store_true", help="Version string used in prediction key") + + args = parser.parse_args() + + output_file = f"/user/muth9/u12095/synapse-net/scripts/cooper/revision/evaluation_results/v{args.version}_surface_dice.xlsx" + results = collect_results(args.input_folder, args.gt_folder, args.version, args.check) + save_results(results, output_file) + + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/scripts/cooper/revision/updated_data_analysis/analysis_segmentations.py b/scripts/cooper/revision/updated_data_analysis/analysis_segmentations.py index 55c8f59..2247862 100644 --- a/scripts/cooper/revision/updated_data_analysis/analysis_segmentations.py +++ b/scripts/cooper/revision/updated_data_analysis/analysis_segmentations.py @@ -62,8 +62,8 @@ def SV_pred(raw: np.ndarray, SV_model: str, output_path: str = None, store: bool use_existing_seg = False #checking if segmentation is already in output path and if so, use it - if output_path: - with h5py.File(output_path, "a") as f: + if output_path and os.path.exists(output_path): + with h5py.File(output_path, "r") as f: if seg_key in f: seg = f[seg_key][:] use_existing_seg = True @@ -108,10 +108,11 @@ def compartment_pred(raw: np.ndarray, compartment_model: str, output_path: str = use_existing_seg = False #checking if segmentation is already in output path and if so, use it - if output_path: - with h5py.File(output_path, "a") as f: - if seg_key in f: + if output_path and os.path.exists(output_path): + with h5py.File(output_path, "r") as f: + if seg_key in f and pred_key in f: seg = f[seg_key][:] + pred = f[pred_key][:] use_existing_seg = True print(f"Using existing compartment seg in {output_path}") @@ -152,8 +153,8 @@ def AZ_pred(raw: np.ndarray, AZ_model: str, output_path: str = None, store: bool use_existing_seg = False #checking if segmentation is already in output path and if so, use it - if output_path: - with h5py.File(output_path, "a") as f: + if output_path and os.path.exists(output_path): + with h5py.File(output_path, "r") as f: if seg_key in f: seg = f[seg_key][:] use_existing_seg = True diff --git a/scripts/cooper/revision/updated_data_analysis/run_data_analysis.py b/scripts/cooper/revision/updated_data_analysis/run_data_analysis.py index 97a40f5..cee93b3 100644 --- a/scripts/cooper/revision/updated_data_analysis/run_data_analysis.py +++ b/scripts/cooper/revision/updated_data_analysis/run_data_analysis.py @@ -80,7 +80,7 @@ def main(): run_data_analysis(input_path, output_path, store, resolution, analysis_output) elif os.path.isdir(input_path): - h5_files = [file for file in os.listdir(input_path) if file.endswith(".h5")] + h5_files = sorted([file for file in os.listdir(input_path) if file.endswith(".h5")]) for file in tqdm(h5_files, desc="Processing files"): full_input_path = os.path.join(input_path, file) output_path = os.path.join(output_folder, file) if output_folder else None From ad03103b645dd6a6b1bfeb47f6ef76504b786366 Mon Sep 17 00:00:00 2001 From: SarahMuth Date: Wed, 2 Jul 2025 18:23:08 +0200 Subject: [PATCH 22/29] add dataset info --- run_sbatch_revision.sbatch | 5 +---- scripts/cooper/revision/surface_dice.py | 25 ++++++++++++++++++++++--- 2 files changed, 23 insertions(+), 7 deletions(-) diff --git a/run_sbatch_revision.sbatch b/run_sbatch_revision.sbatch index 121d8e0..2e38fa0 100644 --- a/run_sbatch_revision.sbatch +++ b/run_sbatch_revision.sbatch @@ -9,7 +9,4 @@ source ~/.bashrc conda activate synapse-net -python /user/muth9/u12095/synapse-net/scripts/cooper/revision/updated_data_analysis/run_data_analysis.py \ - -i /mnt/lustre-emmy-hdd/projects/nim00007/data/synaptic-reconstruction/cooper/20241102_TOMO_DATA_Imig2014/final_Imig2014_seg_autoComp/SNAP25/ \ - -o /mnt/lustre-emmy-hdd/projects/nim00007/data/synaptic-reconstruction/cooper/20241102_TOMO_DATA_Imig2014/afterRevision_analysis/boundaryT0_9_constantins_presynapticFiltering/full_dataset --store \ - -s ./analysis_results/full_dataset \ No newline at end of file +python scripts/cooper/revision/surface_dice.py -i /mnt/ceph-hdd/cold/nim00007/AZ_prediction_new/endbulb_of_held/ -gt /mnt/ceph-hdd/cold/nim00007/new_AZ_train_data/endbulb_of_held/ -v 7 \ No newline at end of file diff --git a/scripts/cooper/revision/surface_dice.py b/scripts/cooper/revision/surface_dice.py index feca763..edb9d62 100644 --- a/scripts/cooper/revision/surface_dice.py +++ b/scripts/cooper/revision/surface_dice.py @@ -70,6 +70,7 @@ def collect_results(input_folder, gt_folder, version, check=False): results = [] seg_key = f"predictions/az/seg_v{version}" gt_key = "/labels/az_merged" + input_folder_name = os.path.basename(os.path.normpath(input_folder)) for fname in tqdm(os.listdir(input_folder), desc="Processing segmentations"): if not fname.endswith(".h5"): @@ -84,18 +85,36 @@ def collect_results(input_folder, gt_folder, version, check=False): result = process_file(pred_path, gt_path, seg_key, gt_key, check) if result: + result["input_folder"] = input_folder_name results.append(result) return results def save_results(results, output_file): - """Save results as an Excel file.""" - df = pd.DataFrame(results) - df.to_excel(output_file, index=False) + """Append results to an Excel file, updating rows with matching tomo_name and input_folder.""" + new_df = pd.DataFrame(results) + + if os.path.exists(output_file): + existing_df = pd.read_excel(output_file) + + # Drop rows where tomo_name and input_folder match any in new_df + combined_df = existing_df[ + ~existing_df.set_index(["tomo_name", "input_folder"]).index.isin( + new_df.set_index(["tomo_name", "input_folder"]).index + ) + ] + + # Append new data and reset index + final_df = pd.concat([combined_df, new_df], ignore_index=True) + else: + final_df = new_df + + final_df.to_excel(output_file, index=False) print(f"Results saved to {output_file}") + def main(): parser = argparse.ArgumentParser(description="Compute surface dice for AZ segmentations.") parser.add_argument("--input_folder", "-i", required=True, help="Folder with predicted segmentations (.h5)") From f17c349cff4bbd1faaaffc18820ff01ed371662a Mon Sep 17 00:00:00 2001 From: SarahMuth Date: Thu, 3 Jul 2025 19:29:41 +0200 Subject: [PATCH 23/29] option to calc surface dice per component --- .gitignore | 3 +- run_sbatch_revision.sbatch | 12 +- scripts/cooper/revision/surface_dice.py | 135 +++++++++++++----- .../updated_data_analysis/store_results.py | 16 +-- 4 files changed, 120 insertions(+), 46 deletions(-) diff --git a/.gitignore b/.gitignore index 438fbdf..093d490 100644 --- a/.gitignore +++ b/.gitignore @@ -15,4 +15,5 @@ scripts/cooper/training/find_rec_testset.py synapse-net-models/ scripts/portal/upscale_tomo.py analysis_results/ -scripts/cooper/revision/evaluation_results/ \ No newline at end of file +scripts/cooper/revision/evaluation_results/ +scripts/cooper/revision/export_tif_to_h5.py \ No newline at end of file diff --git a/run_sbatch_revision.sbatch b/run_sbatch_revision.sbatch index 2e38fa0..56254eb 100644 --- a/run_sbatch_revision.sbatch +++ b/run_sbatch_revision.sbatch @@ -1,12 +1,16 @@ #! /bin/bash #SBATCH -c 4 #4 #8 -#SBATCH --mem 120G #120G #32G #64G #256G +#SBATCH --mem 256G #120G #32G #64G #256G #SBATCH -p grete:shared #grete:shared #grete-h100:shared -#SBATCH -t 4:00:00 #6:00:00 #48:00:00 +#SBATCH -t 6:00:00 #6:00:00 #48:00:00 #SBATCH -G A100:1 #V100:1 #2 #A100:1 #gtx1080:2 #v100:1 #H100:1 #SBATCH --output=/user/muth9/u12095/synapse-net/slurm_revision/slurm-%j.out -#SBATCH -A nim00007 #SBATCH --constraint 80gb +#SBATCH -A nim00007 +#SBATCH --constraint 80gb source ~/.bashrc conda activate synapse-net -python scripts/cooper/revision/surface_dice.py -i /mnt/ceph-hdd/cold/nim00007/AZ_prediction_new/endbulb_of_held/ -gt /mnt/ceph-hdd/cold/nim00007/new_AZ_train_data/endbulb_of_held/ -v 7 \ No newline at end of file +python /user/muth9/u12095/synapse-net/scripts/cooper/revision/updated_data_analysis/run_data_analysis.py \ + -i /mnt/lustre-emmy-hdd/projects/nim00007/data/synaptic-reconstruction/cooper/20241102_TOMO_DATA_Imig2014/exported/SNAP25/ \ + -o /mnt/lustre-emmy-hdd/projects/nim00007/data/synaptic-reconstruction/cooper/20241102_TOMO_DATA_Imig2014/afterRevision_analysis/boundaryT0_9_constantins_presynapticFiltering --store \ + -s ./analysis_results/man_subset \ No newline at end of file diff --git a/scripts/cooper/revision/surface_dice.py b/scripts/cooper/revision/surface_dice.py index edb9d62..b22ddbf 100644 --- a/scripts/cooper/revision/surface_dice.py +++ b/scripts/cooper/revision/surface_dice.py @@ -11,20 +11,20 @@ import pandas as pd from tqdm import tqdm import numpy as np +from scipy.ndimage import label +from skimage.measure import regionprops from membrain_seg.segmentation.skeletonize import skeletonization from membrain_seg.benchmark.metrics import masked_surface_dice def load_segmentation(file_path, key): - """Load a dataset from an HDF5 file.""" with h5py.File(file_path, "r") as f: data = f[key][:] return data def evaluate_surface_dice(pred, gt, raw, check): - """Skeletonize predictions and GT, compute surface dice.""" gt_skeleton = skeletonization(gt == 1, batch_size=100000) pred_skeleton = skeletonization(pred, batch_size=100000) mask = gt != 2 @@ -33,10 +33,10 @@ def evaluate_surface_dice(pred, gt, raw, check): import napari v = napari.Viewer() v.add_image(raw) - v.add_labels(gt, name= f"gt") - v.add_labels(gt_skeleton.astype(np.uint16), name= f"gt_skeleton") - v.add_labels(pred, name= f"pred") - v.add_labels(pred_skeleton.astype(np.uint16), name= f"pred_skeleton") + v.add_labels(gt, name="gt") + v.add_labels(gt_skeleton.astype(np.uint16), name="gt_skeleton") + v.add_labels(pred, name="pred") + v.add_labels(pred_skeleton.astype(np.uint16), name="pred_skeleton") napari.run() surf_dice, confusion_dict = masked_surface_dice( @@ -45,28 +45,80 @@ def evaluate_surface_dice(pred, gt, raw, check): return surf_dice, confusion_dict -def process_file(pred_path, gt_path, seg_key, gt_key, check): - """Process a single prediction/GT file pair.""" +def process_file(pred_path, gt_path, seg_key, gt_key, check, + min_bb_shape=(32, 384, 384), min_thinning_size=2500, + global_eval=False): try: pred = load_segmentation(pred_path, seg_key) gt = load_segmentation(gt_path, gt_key) raw = load_segmentation(gt_path, "raw") - surf_dice, confusion = evaluate_surface_dice(pred, gt, raw, check) - result = { - "tomo_name": os.path.basename(pred_path), - "surface_dice": surf_dice, - **confusion, - } - return result + if global_eval: + gt_bin = (gt == 1).astype(np.uint8) + pred_bin = pred.astype(np.uint8) + + dice, confusion = evaluate_surface_dice(pred_bin, gt_bin, raw, check) + return [{ + "tomo_name": os.path.basename(pred_path), + "gt_component_id": -1, # -1 indicates global eval + "surface_dice": dice, + **confusion + }] + + labeled_gt, _ = label(gt == 1) + props = regionprops(labeled_gt) + results = [] + + for prop in props: + if prop.area < min_thinning_size: + continue + + comp_id = prop.label + bbox_start = prop.bbox[:3] + bbox_end = prop.bbox[3:] + bbox = tuple(slice(start, stop) for start, stop in zip(bbox_start, bbox_end)) + + pad_width = [ + max(min_shape - (sl.stop - sl.start), 0) // 2 + for sl, min_shape in zip(bbox, min_bb_shape) + ] + + expanded_bbox = tuple( + slice( + max(sl.start - pw, 0), + min(sl.stop + pw, dim) + ) + for sl, pw, dim in zip(bbox, pad_width, gt.shape) + ) + + gt_crop = (labeled_gt[expanded_bbox] == comp_id).astype(np.uint8) + pred_crop = pred[expanded_bbox].astype(np.uint8) + raw_crop = raw[expanded_bbox] + + try: + dice, confusion = evaluate_surface_dice(pred_crop, gt_crop, raw_crop, check) + except Exception as e: + print(f"Error computing Dice for GT component {comp_id} in {pred_path}: {e}") + continue + + result = { + "tomo_name": os.path.basename(pred_path), + "gt_component_id": comp_id, + "surface_dice": dice, + **confusion + } + results.append(result) + + return results except Exception as e: print(f"Error processing {pred_path}: {e}") - return None + return [] -def collect_results(input_folder, gt_folder, version, check=False): - """Loop through prediction files and compute metrics.""" +def collect_results(input_folder, gt_folder, version, check=False, + min_bb_shape=(32, 384, 384), min_thinning_size=2500, + global_eval=False): results = [] seg_key = f"predictions/az/seg_v{version}" gt_key = "/labels/az_merged" @@ -83,29 +135,32 @@ def collect_results(input_folder, gt_folder, version, check=False): print(f"Warning: Ground truth file not found for {fname}") continue - result = process_file(pred_path, gt_path, seg_key, gt_key, check) - if result: - result["input_folder"] = input_folder_name - results.append(result) + file_results = process_file( + pred_path, gt_path, seg_key, gt_key, check, + min_bb_shape=min_bb_shape, + min_thinning_size=min_thinning_size, + global_eval=global_eval + ) + + for res in file_results: + res["input_folder"] = input_folder_name + results.append(res) return results def save_results(results, output_file): - """Append results to an Excel file, updating rows with matching tomo_name and input_folder.""" new_df = pd.DataFrame(results) if os.path.exists(output_file): existing_df = pd.read_excel(output_file) - # Drop rows where tomo_name and input_folder match any in new_df combined_df = existing_df[ - ~existing_df.set_index(["tomo_name", "input_folder"]).index.isin( - new_df.set_index(["tomo_name", "input_folder"]).index + ~existing_df.set_index(["tomo_name", "input_folder", "gt_component_id"]).index.isin( + new_df.set_index(["tomo_name", "input_folder", "gt_component_id"]).index ) ] - # Append new data and reset index final_df = pd.concat([combined_df, new_df], ignore_index=True) else: final_df = new_df @@ -114,20 +169,34 @@ def save_results(results, output_file): print(f"Results saved to {output_file}") - def main(): - parser = argparse.ArgumentParser(description="Compute surface dice for AZ segmentations.") + parser = argparse.ArgumentParser(description="Compute surface dice per GT component or globally for AZ segmentations.") parser.add_argument("--input_folder", "-i", required=True, help="Folder with predicted segmentations (.h5)") parser.add_argument("--gt_folder", "-gt", required=True, help="Folder with ground truth segmentations (.h5)") parser.add_argument("--version", "-v", required=True, help="Version string used in prediction key") - parser.add_argument("--check", action="store_true", help="Version string used in prediction key") + parser.add_argument("--check", action="store_true", help="Visualize intermediate outputs in Napari") + parser.add_argument("--global_eval", action="store_true", help="If set, compute global surface dice instead of per-component") args = parser.parse_args() - output_file = f"/user/muth9/u12095/synapse-net/scripts/cooper/revision/evaluation_results/v{args.version}_surface_dice.xlsx" - results = collect_results(args.input_folder, args.gt_folder, args.version, args.check) + min_bb_shape = (32, 384, 384) + min_thinning_size = 2500 + + suffix = "global" if args.global_eval else "per_gt_component" + output_file = f"/user/muth9/u12095/synapse-net/scripts/cooper/revision/evaluation_results/v{args.version}_surface_dice_{suffix}.xlsx" + + results = collect_results( + args.input_folder, + args.gt_folder, + args.version, + args.check, + min_bb_shape=min_bb_shape, + min_thinning_size=min_thinning_size, + global_eval=args.global_eval + ) + save_results(results, output_file) if __name__ == "__main__": - main() \ No newline at end of file + main() diff --git a/scripts/cooper/revision/updated_data_analysis/store_results.py b/scripts/cooper/revision/updated_data_analysis/store_results.py index 55f0e2c..d044278 100644 --- a/scripts/cooper/revision/updated_data_analysis/store_results.py +++ b/scripts/cooper/revision/updated_data_analysis/store_results.py @@ -73,20 +73,20 @@ def save_filtered_dataframes(output_dir, tomogram_name, df): 'AZ_distances_within_200': 200, 'AZ_distances_within_100': 100, 'AZ_distances_within_40': 40, - 'AZ_distances_within_40_with_diameters': 40, - 'AZ_distances_within_40_only_diameters': 40, + 'AZ_distances_within_100_with_diameters': 100, + 'AZ_distances_within_100_only_diameters': 100, } for filename, max_dist in thresholds.items(): file_path = os.path.join(output_dir, f"{filename}.xlsx") filtered_df = df if max_dist is None else df[df['distance'] <= max_dist] - if filename == 'AZ_distances_within_40_with_diameters': + if filename == 'AZ_distances_within_100_with_diameters': data = pd.DataFrame({ f"{tomogram_name}_distance": filtered_df['distance'].values, f"{tomogram_name}_diameter": filtered_df['diameter'].values }) - elif filename == 'AZ_distances_within_40_only_diameters': + elif filename == 'AZ_distances_within_100_only_diameters': data = pd.DataFrame({ f"{tomogram_name}_diameter": filtered_df['diameter'].values }) @@ -110,8 +110,8 @@ def save_filtered_dataframes_with_seg_id(output_dir, tomogram_name, df): 'AZ_distances_within_200_with_seg_id': 200, 'AZ_distances_within_100_with_seg_id': 100, 'AZ_distances_within_40_with_seg_id': 40, - 'AZ_distances_within_40_with_diameters_and_seg_id': 40, - 'AZ_distances_within_40_only_diameters_and_seg_id': 40, + 'AZ_distances_within_100_with_diameters_and_seg_id': 100, + 'AZ_distances_within_100_only_diameters_and_seg_id': 100, } with_segID_dir = os.path.join(output_dir, "with_segID") @@ -121,13 +121,13 @@ def save_filtered_dataframes_with_seg_id(output_dir, tomogram_name, df): file_path = os.path.join(with_segID_dir, f"{filename}.xlsx") filtered_df = df if max_dist is None else df[df['distance'] <= max_dist] - if filename == 'AZ_distances_within_40_with_diameters_and_seg_id': + if filename == 'AZ_distances_within_100_with_diameters_and_seg_id': data = pd.DataFrame({ f"{tomogram_name}_seg_id": filtered_df['seg_id'].values, f"{tomogram_name}_distance": filtered_df['distance'].values, f"{tomogram_name}_diameter": filtered_df['diameter'].values }) - elif filename == 'AZ_distances_within_40_only_diameters_and_seg_id': + elif filename == 'AZ_distances_within_100_only_diameters_and_seg_id': data = pd.DataFrame({ f"{tomogram_name}_seg_id": filtered_df['seg_id'].values, f"{tomogram_name}_diameter": filtered_df['diameter'].values From 48618d0d33a4a3bca3e844c4fbe1da49b5082904 Mon Sep 17 00:00:00 2001 From: SarahMuth Date: Fri, 4 Jul 2025 10:05:04 +0200 Subject: [PATCH 24/29] added compartment seg saved in h5 --- run_sbatch_revision.sbatch | 9 +- .../cooper/run_compartment_segmentation_h5.py | 105 ++++++++++++++++++ 2 files changed, 109 insertions(+), 5 deletions(-) create mode 100644 scripts/cooper/run_compartment_segmentation_h5.py diff --git a/run_sbatch_revision.sbatch b/run_sbatch_revision.sbatch index 56254eb..8be14a2 100644 --- a/run_sbatch_revision.sbatch +++ b/run_sbatch_revision.sbatch @@ -2,7 +2,7 @@ #SBATCH -c 4 #4 #8 #SBATCH --mem 256G #120G #32G #64G #256G #SBATCH -p grete:shared #grete:shared #grete-h100:shared -#SBATCH -t 6:00:00 #6:00:00 #48:00:00 +#SBATCH -t 3:00:00 #6:00:00 #48:00:00 #SBATCH -G A100:1 #V100:1 #2 #A100:1 #gtx1080:2 #v100:1 #H100:1 #SBATCH --output=/user/muth9/u12095/synapse-net/slurm_revision/slurm-%j.out #SBATCH -A nim00007 @@ -10,7 +10,6 @@ source ~/.bashrc conda activate synapse-net -python /user/muth9/u12095/synapse-net/scripts/cooper/revision/updated_data_analysis/run_data_analysis.py \ - -i /mnt/lustre-emmy-hdd/projects/nim00007/data/synaptic-reconstruction/cooper/20241102_TOMO_DATA_Imig2014/exported/SNAP25/ \ - -o /mnt/lustre-emmy-hdd/projects/nim00007/data/synaptic-reconstruction/cooper/20241102_TOMO_DATA_Imig2014/afterRevision_analysis/boundaryT0_9_constantins_presynapticFiltering --store \ - -s ./analysis_results/man_subset \ No newline at end of file +python /user/muth9/u12095/synapse-net/scripts/cooper/run_compartment_segmentation_h5.py \ + -i /mnt/ceph-hdd/cold/nim00007/new_AZ_train_data/stem_for_eval/20241019_Tomo-eval_PS_Synapse_36859_J1_66K_TS_CA3_PS_46_rec_2Kb1dawbp_crop.h5 \ + --data_ext .h5 \ No newline at end of file diff --git a/scripts/cooper/run_compartment_segmentation_h5.py b/scripts/cooper/run_compartment_segmentation_h5.py new file mode 100644 index 0000000..4376493 --- /dev/null +++ b/scripts/cooper/run_compartment_segmentation_h5.py @@ -0,0 +1,105 @@ +import argparse +from functools import partial + +from synapse_net.inference.compartments import segment_compartments +from synapse_net.inference.inference import get_model_path +from synapse_net.inference.util import inference_helper, parse_tiling + +import h5py +import numpy as np +from elf.io import open_file + +def get_volume(input_path): + ''' + with h5py.File(input_path) as seg_file: + input_volume = seg_file["raw"][:] + ''' + with open_file(input_path, "r") as f: + + # Try to automatically derive the key with the raw data. + keys = list(f.keys()) + if len(keys) == 1: + key = keys[0] + elif "data" in keys: + key = "data" + elif "raw" in keys: + key = "raw" + + input_volume = f[key][:] + return input_volume + +def run_compartment_segmentation(args): + tiling = parse_tiling(args.tile_shape, args.halo) + + if args.model is None: + model_path = get_model_path("compartments") + else: + model_path = args.model + + # Call segment_compartments directly, since we need its outputs + segmentation, predictions = segment_compartments( + get_volume(args.input_path), + model_path=model_path, + verbose=True, + tiling=tiling, + scale=None, + boundary_threshold=args.boundary_threshold, + return_predictions=True + ) + + # Save outputs into input HDF5 file + with h5py.File(args.input_path, "a") as f: + pred_grp = f.require_group("predictions") + + if "comp_seg" in pred_grp: + if args.force: + del pred_grp["comp_seg"] + else: + raise RuntimeError("comp_seg already exists. Use --force to overwrite.") + pred_grp.create_dataset("comp_seg", data=segmentation.astype(np.uint8), compression="gzip") + + if "boundaries" in pred_grp: + if args.force: + del pred_grp["boundaries"] + else: + raise RuntimeError("boundaries already exist. Use --force to overwrite.") + pred_grp.create_dataset("boundaries", data=predictions.astype(np.float32), compression="gzip") + + print(f"Saved segmentation to: predictions/comp_seg") + print(f"Saved boundaries to: predictions/boundaries") + + +def main(): + parser = argparse.ArgumentParser(description="Segment synaptic compartments in EM tomograms.") + parser.add_argument( + "--input_path", "-i", required=True, + help="The filepath to mrc file or directory containing the tomogram data." + ) + parser.add_argument( + "--model", "-m", help="The filepath to the compartment model." + ) + parser.add_argument( + "--force", action="store_true", + help="Whether to over-write already present segmentation results." + ) + parser.add_argument( + "--tile_shape", type=int, nargs=3, + help="The tile shape for prediction. Lower the tile shape if GPU memory is insufficient." + ) + parser.add_argument( + "--halo", type=int, nargs=3, + help="The halo for prediction. Increase the halo to minimize boundary artifacts." + ) + parser.add_argument( + "--data_ext", default=".mrc", help="The extension of the tomogram data. By default .mrc." + ) + parser.add_argument( + "--boundary_threshold", type=float, default=0.4, help="Threshold that determines when the prediction of the network is foreground for the segmentation. Need higher threshold than default for TEM." + ) + + args = parser.parse_args() + run_compartment_segmentation(args) + + +if __name__ == "__main__": + main() From 0c30cf051872943d029b1c74a4ed5faf41512a2e Mon Sep 17 00:00:00 2001 From: SarahMuth Date: Fri, 4 Jul 2025 11:57:50 +0200 Subject: [PATCH 25/29] updated SV seg h5 --- run_sbatch_revision.sbatch | 6 ++--- scripts/cooper/revision/surface_dice.py | 1 + scripts/cooper/vesicle_segmentation_h5.py | 29 +++++++++++++++-------- 3 files changed, 23 insertions(+), 13 deletions(-) diff --git a/run_sbatch_revision.sbatch b/run_sbatch_revision.sbatch index 8be14a2..804ec7b 100644 --- a/run_sbatch_revision.sbatch +++ b/run_sbatch_revision.sbatch @@ -10,6 +10,6 @@ source ~/.bashrc conda activate synapse-net -python /user/muth9/u12095/synapse-net/scripts/cooper/run_compartment_segmentation_h5.py \ - -i /mnt/ceph-hdd/cold/nim00007/new_AZ_train_data/stem_for_eval/20241019_Tomo-eval_PS_Synapse_36859_J1_66K_TS_CA3_PS_46_rec_2Kb1dawbp_crop.h5 \ - --data_ext .h5 \ No newline at end of file +python /user/muth9/u12095/synapse-net/scripts/cooper/vesicle_segmentation_h5.py \ + -i /mnt/ceph-hdd/cold/nim00007/new_AZ_train_data/stem_for_eval/ \ + -o /mnt/ceph-hdd/cold/nim00007/new_AZ_train_data/stem_for_eval/ \ No newline at end of file diff --git a/scripts/cooper/revision/surface_dice.py b/scripts/cooper/revision/surface_dice.py index b22ddbf..abebeb7 100644 --- a/scripts/cooper/revision/surface_dice.py +++ b/scripts/cooper/revision/surface_dice.py @@ -129,6 +129,7 @@ def collect_results(input_folder, gt_folder, version, check=False, continue pred_path = os.path.join(input_folder, fname) + print(pred_path) gt_path = os.path.join(gt_folder, fname) if not os.path.exists(gt_path): diff --git a/scripts/cooper/vesicle_segmentation_h5.py b/scripts/cooper/vesicle_segmentation_h5.py index 0237973..3bda54b 100644 --- a/scripts/cooper/vesicle_segmentation_h5.py +++ b/scripts/cooper/vesicle_segmentation_h5.py @@ -8,6 +8,7 @@ from synapse_net.inference.vesicles import segment_vesicles from synapse_net.inference.util import parse_tiling +from synapse_net.inference.inference import get_model_path def _require_output_folders(output_folder): #seg_output = os.path.join(output_folder, "segmentations") @@ -34,7 +35,7 @@ def get_volume(input_path): input_volume = f[key][:] return input_volume -def run_vesicle_segmentation(input_path, output_path, model_path, mask_path, mask_key,tile_shape, halo, include_boundary, key_label): +def run_vesicle_segmentation(input_path, output_path, mask_path, mask_key,tile_shape, halo, include_boundary, key_label, model_path=None, save_pred=False): tiling = parse_tiling(tile_shape, halo) print(f"using tiling {tiling}") input = get_volume(input_path) @@ -45,7 +46,10 @@ def run_vesicle_segmentation(input_path, output_path, model_path, mask_path, mas mask = f[mask_key][:] else: mask = None - + + if model_path is None: + model_path = get_model_path("vesicles_3d") + segmentation, prediction = segment_vesicles(input_volume=input, model_path=model_path, verbose=False, tiling=tiling, return_predictions=True, exclude_boundary=not include_boundary, mask = mask) foreground, boundaries = prediction[:2] @@ -63,14 +67,15 @@ def run_vesicle_segmentation(input_path, output_path, model_path, mask_path, mas else: f.create_dataset("raw", data=input, compression="gzip") - key=f"vesicles/segment_from_{key_label}" + key=f"predictions/{key_label}" if key in f: print("Skipping", input_path, "because", key, "exists") else: f.create_dataset(key, data=segmentation, compression="gzip") - f.create_dataset(f"prediction_{key_label}/foreground", data = foreground, compression="gzip") - f.create_dataset(f"prediction_{key_label}/boundaries", data = boundaries, compression="gzip") - + if save_pred: + f.create_dataset(f"prediction_{key_label}/foreground", data = foreground, compression="gzip") + f.create_dataset(f"prediction_{key_label}/boundaries", data = boundaries, compression="gzip") + if mask is not None: if mask_key in f: print("mask image already saved") @@ -97,7 +102,7 @@ def segment_folder(args): print(f"Mask file not found for {input_path}") mask_path = None - run_vesicle_segmentation(input_path, args.output_path, args.model_path, mask_path, args.mask_key, args.tile_shape, args.halo, args.include_boundary, args.key_label) + run_vesicle_segmentation(input_path, args.output_path, mask_path, args.mask_key, args.tile_shape, args.halo, args.include_boundary, args.key_label, args.model_path, args.save_pred) def main(): parser = argparse.ArgumentParser(description="Segment vesicles in EM tomograms.") @@ -110,7 +115,7 @@ def main(): help="The filepath to directory where the segmentations will be saved." ) parser.add_argument( - "--model_path", "-m", required=True, help="The filepath to the vesicle model." + "--model_path", "-m", help="The filepath to the vesicle model." ) parser.add_argument( "--mask_path", help="The filepath to a h5 file with a mask that will be used to restrict the segmentation. Needs to be in combination with mask_key." @@ -131,9 +136,13 @@ def main(): help="Include vesicles that touch the top / bottom of the tomogram. By default these are excluded." ) parser.add_argument( - "--key_label", "-k", default = "combined_vesicles", + "--key_label", "-k", default = "vesicle_seg", help="Give the key name for saving the segmentation in h5." ) + parser.add_argument( + "--save_pred", action="store_true", + help="If set to true the prediction is also saved." + ) args = parser.parse_args() input_ = args.input_path @@ -141,7 +150,7 @@ def main(): if os.path.isdir(input_): segment_folder(args) else: - run_vesicle_segmentation(input_, args.output_path, args.model_path, args.mask_path, args.mask_key, args.tile_shape, args.halo, args.include_boundary, args.key_label) + run_vesicle_segmentation(input_, args.output_path, args.mask_path, args.mask_key, args.tile_shape, args.halo, args.include_boundary, args.key_label, args.model_path, args.save_pred) print("Finished segmenting!") From 8763e4962d57527fba587dc7e9c86a497208b25b Mon Sep 17 00:00:00 2001 From: SarahMuth Date: Fri, 4 Jul 2025 14:37:02 +0200 Subject: [PATCH 26/29] make usage of surface dice more general --- run_sbatch_revision.sbatch | 7 +++---- scripts/cooper/revision/surface_dice.py | 17 +++++++++++++---- scripts/cooper/revision/thin_az_gt.py | 2 +- 3 files changed, 17 insertions(+), 9 deletions(-) diff --git a/run_sbatch_revision.sbatch b/run_sbatch_revision.sbatch index 804ec7b..5608573 100644 --- a/run_sbatch_revision.sbatch +++ b/run_sbatch_revision.sbatch @@ -1,15 +1,14 @@ #! /bin/bash #SBATCH -c 4 #4 #8 -#SBATCH --mem 256G #120G #32G #64G #256G +#SBATCH --mem 120G #120G #32G #64G #256G #SBATCH -p grete:shared #grete:shared #grete-h100:shared #SBATCH -t 3:00:00 #6:00:00 #48:00:00 #SBATCH -G A100:1 #V100:1 #2 #A100:1 #gtx1080:2 #v100:1 #H100:1 #SBATCH --output=/user/muth9/u12095/synapse-net/slurm_revision/slurm-%j.out -#SBATCH -A nim00007 -#SBATCH --constraint 80gb +#SBATCH -A nim00007 #SBATCH --constraint 80gb source ~/.bashrc conda activate synapse-net -python /user/muth9/u12095/synapse-net/scripts/cooper/vesicle_segmentation_h5.py \ +python /user/muth9/u12095/synapse-net/scripts/cooper/revision/thin_az_gt.py \ -i /mnt/ceph-hdd/cold/nim00007/new_AZ_train_data/stem_for_eval/ \ -o /mnt/ceph-hdd/cold/nim00007/new_AZ_train_data/stem_for_eval/ \ No newline at end of file diff --git a/scripts/cooper/revision/surface_dice.py b/scripts/cooper/revision/surface_dice.py index abebeb7..b1e74e8 100644 --- a/scripts/cooper/revision/surface_dice.py +++ b/scripts/cooper/revision/surface_dice.py @@ -1,7 +1,8 @@ import sys import os -# Add membrain-seg to Python path +# Add membrain-seg to Python path +#Delete before last commit MEMBRAIN_SEG_PATH = "/user/muth9/u12095/membrain-seg/src" if MEMBRAIN_SEG_PATH not in sys.path: sys.path.insert(0, MEMBRAIN_SEG_PATH) @@ -12,10 +13,14 @@ from tqdm import tqdm import numpy as np from scipy.ndimage import label -from skimage.measure import regionprops +from skima -from membrain_seg.segmentation.skeletonize import skeletonization -from membrain_seg.benchmark.metrics import masked_surface_dice +try: + from membrain_seg.segmentation.skeletonize import skeletonization + from membrain_seg.benchmark.metrics import masked_surface_dice +except ImportError: + skeletonization=None + masked_surface_dice=None def load_segmentation(file_path, key): @@ -25,6 +30,10 @@ def load_segmentation(file_path, key): def evaluate_surface_dice(pred, gt, raw, check): + if skeletonization is None: + print("Error! Install membrain_seg. For more information check out https://teamtomo.org/membrain-seg/installation/ ") + raise RuntimeError + gt_skeleton = skeletonization(gt == 1, batch_size=100000) pred_skeleton = skeletonization(pred, batch_size=100000) mask = gt != 2 diff --git a/scripts/cooper/revision/thin_az_gt.py b/scripts/cooper/revision/thin_az_gt.py index e1fd03a..1008588 100644 --- a/scripts/cooper/revision/thin_az_gt.py +++ b/scripts/cooper/revision/thin_az_gt.py @@ -34,7 +34,7 @@ def run_az_thinning(folder=ROOT, out_path=OUTPUT_ROOT): az = f_in["labels/az"][:] az_thin = thin_az( - az, boundary_map=boundary_pred, vesicles=vesicles, tomo=tomo, presyn_dist=8, check=True, + az, boundary_map=boundary_pred, vesicles=vesicles, tomo=tomo, presyn_dist=8, check=False, min_thinning_size=2500, ) From 6e956606d30aadb82c50cea6491a57434deedbbc Mon Sep 17 00:00:00 2001 From: SarahMuth Date: Fri, 4 Jul 2025 15:56:10 +0200 Subject: [PATCH 27/29] make code more flexible --- run_sbatch_revision.sbatch | 8 ++++---- scripts/cooper/revision/az_prediction.py | 4 +++- scripts/cooper/revision/merge_az.py | 13 +++++++++---- 3 files changed, 16 insertions(+), 9 deletions(-) diff --git a/run_sbatch_revision.sbatch b/run_sbatch_revision.sbatch index 5608573..ce82018 100644 --- a/run_sbatch_revision.sbatch +++ b/run_sbatch_revision.sbatch @@ -2,13 +2,13 @@ #SBATCH -c 4 #4 #8 #SBATCH --mem 120G #120G #32G #64G #256G #SBATCH -p grete:shared #grete:shared #grete-h100:shared -#SBATCH -t 3:00:00 #6:00:00 #48:00:00 +#SBATCH -t 2:00:00 #6:00:00 #48:00:00 #SBATCH -G A100:1 #V100:1 #2 #A100:1 #gtx1080:2 #v100:1 #H100:1 #SBATCH --output=/user/muth9/u12095/synapse-net/slurm_revision/slurm-%j.out #SBATCH -A nim00007 #SBATCH --constraint 80gb source ~/.bashrc conda activate synapse-net -python /user/muth9/u12095/synapse-net/scripts/cooper/revision/thin_az_gt.py \ - -i /mnt/ceph-hdd/cold/nim00007/new_AZ_train_data/stem_for_eval/ \ - -o /mnt/ceph-hdd/cold/nim00007/new_AZ_train_data/stem_for_eval/ \ No newline at end of file +python /user/muth9/u12095/synapse-net/scripts/cooper/revision/merge_az.py -v 6 \ + --names stem_for_eval \ + -i /mnt/ceph-hdd/cold/nim00007/new_AZ_train_data/ \ No newline at end of file diff --git a/scripts/cooper/revision/az_prediction.py b/scripts/cooper/revision/az_prediction.py index cc9bca4..4fca971 100644 --- a/scripts/cooper/revision/az_prediction.py +++ b/scripts/cooper/revision/az_prediction.py @@ -54,10 +54,12 @@ def run_prediction(model, name, split_folder, version, split_names, in_path): def get_model(version): - assert version in (3, 4, 5, 7) + assert version in (3, 4, 5, 6, 7) split_folder = get_split_folder(version) if version == 3: model_path = os.path.join(split_folder, "checkpoints", "3D-AZ-model-TEM_STEM_ChemFix_wichmann-v3") + elif version ==6: + model_path = "/mnt/ceph-hdd/cold/nim00007/models/AZ/v6/" elif version == 7: model_path = "/mnt/lustre-emmy-hdd/usr/u12095/synapse_net/models/ConstantinAZ/checkpoints/v7/" else: diff --git a/scripts/cooper/revision/merge_az.py b/scripts/cooper/revision/merge_az.py index e113971..cb1a776 100644 --- a/scripts/cooper/revision/merge_az.py +++ b/scripts/cooper/revision/merge_az.py @@ -18,14 +18,18 @@ # STEM CROPPED IS OFTEN TOO SMALL! -def merge_az(name, version, check): +def merge_az(name, version, check, in_path): split_folder = get_split_folder(version) if name == "stem_cropped": file_paths = glob(os.path.join("/mnt/ceph-hdd/cold/nim00007/new_AZ_train_data/stem_cropped", "*.h5")) file_names = [os.path.basename(path) for path in file_paths] else: - file_names = get_file_names(name, split_folder, split_names=["train", "val", "test"]) + if in_path: + file_paths = glob(os.path.join(in_path, name, "*.h5")) + file_names = [os.path.basename(path) for path in file_paths] + else: + file_names = get_file_names(name, split_folder, split_names=["train", "val", "test"]) seg_paths, gt_paths = get_paths(name, file_names) for seg_path, gt_path in zip(seg_paths, gt_paths): @@ -79,7 +83,7 @@ def visualize_merge(args): for name in args.names: if "endbulb" in name: continue - merge_az(name, args.version, check=True) + merge_az(name, args.version, check=True, in_path=args.in_path) def copy_az(name, version): @@ -101,7 +105,7 @@ def run_merge(args): if "endbulb" in name: copy_az(name, args.version) else: - merge_az(name, args.version, check=False) + merge_az(name, args.version, check=False, in_path= args.in_path) def main(): @@ -110,6 +114,7 @@ def main(): parser.add_argument("--visualize", action="store_true") parser.add_argument("--names", nargs="+", default=ALL_NAMES + ["endbulb_of_held_cropped"]) parser.add_argument("--version", "-v", type=int, default=4) + parser.add_argument("--in_path", "-i", default=None) args = parser.parse_args() if args.visualize: From 34886d77e919962a909260685778499020391100 Mon Sep 17 00:00:00 2001 From: SarahMuth Date: Fri, 4 Jul 2025 17:11:15 +0200 Subject: [PATCH 28/29] fixed a few mistakes --- .gitignore | 3 ++- run_sbatch_revision.sbatch | 5 ++--- scripts/cooper/revision/surface_dice.py | 4 ++-- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/.gitignore b/.gitignore index 093d490..203ef2a 100644 --- a/.gitignore +++ b/.gitignore @@ -16,4 +16,5 @@ synapse-net-models/ scripts/portal/upscale_tomo.py analysis_results/ scripts/cooper/revision/evaluation_results/ -scripts/cooper/revision/export_tif_to_h5.py \ No newline at end of file +scripts/cooper/revision/export_tif_to_h5.py +scripts/cooper/revision/copy_path.py \ No newline at end of file diff --git a/run_sbatch_revision.sbatch b/run_sbatch_revision.sbatch index ce82018..4fc05fb 100644 --- a/run_sbatch_revision.sbatch +++ b/run_sbatch_revision.sbatch @@ -9,6 +9,5 @@ source ~/.bashrc conda activate synapse-net -python /user/muth9/u12095/synapse-net/scripts/cooper/revision/merge_az.py -v 6 \ - --names stem_for_eval \ - -i /mnt/ceph-hdd/cold/nim00007/new_AZ_train_data/ \ No newline at end of file +python scripts/cooper/revision/surface_dice.py -i /mnt/ceph-hdd/cold/nim00007/AZ_prediction_new/endbulb_of_held/ \ + -gt /mnt/ceph-hdd/cold/nim00007/new_AZ_train_data/endbulb_of_held/ -v 7 --global_eval \ No newline at end of file diff --git a/scripts/cooper/revision/surface_dice.py b/scripts/cooper/revision/surface_dice.py index b1e74e8..1b9553c 100644 --- a/scripts/cooper/revision/surface_dice.py +++ b/scripts/cooper/revision/surface_dice.py @@ -13,7 +13,7 @@ from tqdm import tqdm import numpy as np from scipy.ndimage import label -from skima +from skimage.measure import regionprops try: from membrain_seg.segmentation.skeletonize import skeletonization @@ -130,7 +130,7 @@ def collect_results(input_folder, gt_folder, version, check=False, global_eval=False): results = [] seg_key = f"predictions/az/seg_v{version}" - gt_key = "/labels/az_merged" + gt_key = "/labels/az_merged_v6" input_folder_name = os.path.basename(os.path.normpath(input_folder)) for fname in tqdm(os.listdir(input_folder), desc="Processing segmentations"): From f516ae40b03233a35d176f9a04d188658c4fc282 Mon Sep 17 00:00:00 2001 From: SarahMuth Date: Mon, 7 Jul 2025 13:45:57 +0200 Subject: [PATCH 29/29] minor adjustment --- run_sbatch_revision.sbatch | 7 ++-- .../revision/evaluation_results/v7.xlsx | Bin 7667 -> 7899 bytes scripts/cooper/revision/run_az_evaluation.py | 30 +++++++++++++++--- 3 files changed, 29 insertions(+), 8 deletions(-) diff --git a/run_sbatch_revision.sbatch b/run_sbatch_revision.sbatch index 4fc05fb..65fc41b 100644 --- a/run_sbatch_revision.sbatch +++ b/run_sbatch_revision.sbatch @@ -1,13 +1,12 @@ #! /bin/bash #SBATCH -c 4 #4 #8 -#SBATCH --mem 120G #120G #32G #64G #256G +#SBATCH --mem 256G #120G #32G #64G #256G #SBATCH -p grete:shared #grete:shared #grete-h100:shared -#SBATCH -t 2:00:00 #6:00:00 #48:00:00 +#SBATCH -t 4:00:00 #6:00:00 #48:00:00 #SBATCH -G A100:1 #V100:1 #2 #A100:1 #gtx1080:2 #v100:1 #H100:1 #SBATCH --output=/user/muth9/u12095/synapse-net/slurm_revision/slurm-%j.out #SBATCH -A nim00007 #SBATCH --constraint 80gb source ~/.bashrc conda activate synapse-net -python scripts/cooper/revision/surface_dice.py -i /mnt/ceph-hdd/cold/nim00007/AZ_prediction_new/endbulb_of_held/ \ - -gt /mnt/ceph-hdd/cold/nim00007/new_AZ_train_data/endbulb_of_held/ -v 7 --global_eval \ No newline at end of file +python scripts/cooper/revision/surface_dice.py -i /mnt/ceph-hdd/cold/nim00007/AZ_prediction_new/stem_for_eval/ -gt /mnt/ceph-hdd/cold/nim00007/new_AZ_train_data/stem_for_eval/ -v 7 \ No newline at end of file diff --git a/scripts/cooper/revision/evaluation_results/v7.xlsx b/scripts/cooper/revision/evaluation_results/v7.xlsx index 8a8e52e390f2b631c0d3c6ea6a973c53e755f050..db083d1020475d55aa9e75c40f3ead1a0fda9151 100644 GIT binary patch delta 3971 zcmZ8kXH-+$)(zayi!nmzNK+t?NS7i_=@1mqC_zM!YUoW+-~bj{q=h07s+T5&8d^Yl z5fSOV3xY^9h!hLF==iB_fxRvEQK1UMi}2ObwJz%6I$9nyIy6fvoLZn$h*8iU99oXf#7}(e zc-Uy`*V_kQu3L9K$}HNZE1%b&C_j!F^e>oFyvaQfj%m*ZN?JhwFancyn;;6Iul_=F zk)HjxBcmwmv~^Yx2yBd3WE2A2e0Wu$JuT6FJ08--R#&ZJOQ0FWbNvlUT#qi^IB;a1 zS$@HMN<*l3yRHQWc|uPP`*NPXj?8vK8r5`pxVrnp{=38eaP8OX7X1#&X8%!N(DC=a zuC7f=zQ8x9v6UJD{Kr5!fp2Sj{@;)6_Ep_}TxBU7`qD1(5vlj9?eOPkz?;%|YNVvE zcjCxX#$D57z|ltkc3nW0A#HwS&oufUuSuy4&cyf; z3fqbV2I%pWDN=BV&oTPMT&=~^lJy8X7?SPQq+z_UYAbh?IRA5hZ}oQmsWJAMp1|m# z!m|t08~(~Bxu0JC^x*t<2{;*BX*l|>fA~-s6x3hTRg*1i^@7Lyj-x4yAG0T_ z1b`59Qezz~nW6as1LLUSJJB^f%9e{ucYhhI(1KT9CVa_L{ZEYjurfKtSW0pgMhAriZU~ zbeV%9){^av(7CURm71NkcWq=LC)s~`i<3srt`sT-`DSK00qG^{m#3i>};Y>x&EFct(U>F zuBMS0F}9uDB<4?&+K@eWL^~$t%U6tNS@!mGcgGVPQCbhuNrlE%a)D)|bCSa#ZR>Z5 z%x!XkrzbF-*%RSD=MWqpb)DyrA6#|U(T+uB_zS4y3F%AwJbqj3H?FH7=(^YVZ4mx4 z<9i^Wduze!^>zDPdeQE;fpxr3D@*ijH<5e}x8TIc`THIcnz#cKl_5TfKC>C&hn~$b z>%$s4D(dKMoCqfULVk^u;psZs$A2^T^I>JQNH{T`D~b_`5eDr?^Sn@XWP*^ z-jZ_=#4>u3cnZBCMt|6fO2g%)cl2J_TMPmubxkm4E~{@0MV6dxS*=lGLWmADMA1va znPgzV%e^`Q%fz6mDYjDir3_~?3jpFv!V>gcb@A%q zF2qcEf%DlX^>hPY40oME!Xb&(%0-_)&n3`r&tve7MB?ciJ}&|zMF6iRg(ZRI!Rn2wC z9VcaVF-HWhR!TozxR3MXR@2Ds)Lmf&h@il|~A5eZ|XK6u4DNGo1&m~xq z^ujYfC2fq*NywBuTgccr?`UfQ83$32g5W4BXC|^zj5ktHJ{onW0rFUQq^w zi7@|`zl|CBbG6T-mBVgXF3ePL4QkPjw@+ru3(;VCYR!!rxJzU1d!=z^+9@ejczw%W z2HsCK32Y^W`GE9mT8=o=8&9np9gu=u-=#AA8a+AwGJVNZyqut9#|JW)cIC}2K{1ts z&v%orN56g^edWxXJ!bx}s&-FyW8094{<+|tajP-qgAidTzi=X$^t94gN{Ggdr`9{N zvr@ZjPsBT7Bt8tLEb&p{gK%vFF(YH)7?8_tgMXt}fb%}>C$4fNM8f`7qKSWRn>!)_ zT9o-jYIa_(hbR3ZftR`$%w?McPkJuX&HdCbbQKQNikZ_DGD8@!{gqhx%(O4`W_Q4- zEVRusc2VdG!t?Ii;`Yaza8%@Wu2Jxr{^!EaNp=0-tQM~))We=Ek7wDJ_Ei8$aNgv? z;w$xet}D@ItC+>fB#Wm`Q>z1%riR8ll9cylRk{{8(g7C^?{CU2&O7-3jMmz;WB3xys8RS<)7+w*iFJHl47xpVc zII$s9x$U@2A-O7x^mO#whTUWZw00m-7`iar<2}2i?%1u%ZV9MpKPek`)4nt&eCITi zJIlRP^J}VUUF@@}TR7SGJl8+bPOOrL*4|da3mE}^t9SgtpWdCGvF-}FS-Dg*LWvgS zALCDkL(Z{^8h2Y>f;vr6=AvgiMn5(rxtJxWywZ)jZwC5pL#1&IsX$^acv`9YjD!V3 z(6DCnus_^{1?W8v$3X}4z@)^=ZuY_9=DP;9%s4xs>QKMbX^XBn`<7HjOdeG@sX@#L^~(!p`L+Q$0%A z*EE|MwZF^wVNlMA>HkE7Drb?fnO@&74M4+u3&Z6oS2Gc-q=)vES89uQM8d1-2ZblZ zu<}#WqP-~sE%5$k*_>8|i^k~6ActD$2!_V$5&FJ@LJ#_Y0-of0UHyc4B-4A zjZcqJVI7)`HI^H%^;tmY^v7919vO54r0vrG^=I_+Lq zdV~8CqCz-ee;$#MJ1)pXHPqd#*)qGZ^wV<`2NRa~q$aTv*^wx7r^L6&b>x(ih0^wl zDik|KaX~dSpB(Ot7vksUUQO4@JBUuD|2FLR3bhpDRKYeqATC>zU#j<_vY2;I{ z+2EJZIo0(}wA{GEq;EV*!imOA7$sf+Z7`|ZHXu9nc=*g}GfbJbTZu1G?~wIcC%R;G z&!OlV12D?pP>-X6njCt{S@w3TmNG5-U;0j6t6!Qn4BZfG{e6dOtKipOf$bOsBK;Z~ z%jB2HqD-j*HV}8^#mQd7CmZ0%E$YX`YGq6A=m|uPNfbN=>&2VYn|?NQ&JrIf_%(iZ zJLwsq9HxzWw6+=|m;O_#p>S+R^#&s5l^iq8{^{yMh0h>5a*+W{i3tmB#p~P6ffosy-&cr z^0ixa!&M`D8^gh^Anq%JZ~UIu^*xRO+P|nV39rbt`ny&Oa${)zG#>GM z_|v${Yy9s<8K2sJBPM>APw5X#il2{W9d#`<;`~0}3w70d`ElueeKmY)M8E`G@ zO)7gk7j453YAcvlX~8!@0Dvp~y8wvrXX!*5i45%rlpi0d5)EiOi2${*LXRYhlkD_ZT_;R8!0bj+0zbb6d|PJDNcQ^vT=z;~|44zBF3F)&Ayc2*98dX~cB z&dttznZ=Z8gC7wtJ7a@M5WkR!@Q}jnVW-Ea1}0^(3NA#+Gr1QlC3j(9mKoxf(ouP2 zkg5e|07qkgWxr`)+sAK8*J|+vb(){Uhka^1=PSb=V|5RG4Q3pI@180+Gn#Z%5p0V7 zr3fY$n=i`ARJR3GXX5@PO%U~_F+CdqFpC5L_yFmCP%z=od|!WvI!An)NspkbIXB=m zKwub74w`%unpZyRx%_FpZ&!1-iyU4%Qa^K|XUx@|32+#LIC3+i4%wM6E#d>WB9x8~ z@zlLKb%M6erIuIPe?J~7Bxvf~YieB+?3Eu{Y$CY5*|>42QMfs}lD&}q*;}_YS4&$* zK+FB-<^o~=WaZ1wjg!gVm6iU*sr9*&zfLtGwzoDm{Vp9JMVu-eob2|&jzi`ej^u;u z4p@83#D^E;k3Dpcj&q^=E1ObphmPG(cksHQVagl6Gq9FAT@q|rZd;fzy*J}B4r}Y5 zS-z!okXQ5*zrEYHcy3I0hgv7#K6rYvK71G-FF+vb4xDCLJsN2C9RAU(+tJ_;+~0Oe zayhK)@BDo9c;e&K(~zCzXKhfzA34Wr2Pgh*Kdo(b<_|*DbhL9VTHA7$eg6o<7YGi{ zM4W7d?jPHXuj17_*0kGZ)_39xoDV1ZHxqSD?yc;Ht?WAo=bOXCKaTF#%*=<8JAZ^e z9Bv|f^=}@!S>O5D`{C7Q!$(JJuhlg@k#@CzXTG#xPDwvF3HwR;_~}&jZBw;5rhkGT z!p^tK7QmZ51igJecrIW3by3;!;{I?T7xgxooCI=o?8b%T6J+?i0E%J_V5~P#T6UIG z7JhSF#NMY^*?F7}B&+w%poVm9Fz!;%1D*&1++7lvO=o94D{PH=izEt}MiD{JQNaL2GL2DbXOw=Y_(k`wTrojf?p+m9Z|~bLZX} z;N`|T1~4H=)~eE2H=kp@hhNMefL@8>`|nUiUe+^rP*;lXRJLkk1JkFzRuUQs6VL81 z9v-BVoCr&Q_x78g&N4_wi*)}z#6e=lzpcMUVLJcJ%ev9EwPv3Nr<#>uNG<%)rQ>v@Ok&4X#3?LC(%Xy|9w)KlNEMlkw8X4W*-!bt^a{1A$(@{_9=ar=UtY^odfT|MtY-)W9 zGTZ1Wb)d0kClK5+?gx>3iC~1#@%3%k+iy0J%-_?0zqZrD+;j#K_*EnDeBeh7Bhr8d z_WRg%i1QbRn}l{h=^nG%Xc(i*N8rBH;k|v;8X;$@p&@mEFD}gM3nu&Rh_L; zGE(S@uw5{qiP&n2n$m|`(A$+o(S}8(fU$~3W^9|Ux9-=#16=H?$DolFG|`Tt3x&ZX z%_K}g?b0OeJ_pxYKyE!;XN^r#4Jy6@Ze57rRVMV5$UW$Jnd#>~+~+;G$~3t85-9SP z8_lS1U8+#76-g8J;4)Y}58CWy5i>r}m_4S1t`h(gBKX_D0uTm9h^ zO@50iUKNQDqSvoVCd>%}ei4CKGdN0maFxRq{cf+<&6hXHYntXpreg2eRi@cIf_f2s z$&%7MH#4iN#O&EKk>y{G4$hYrkS?g)s7UPm>c}5I`i$d^3O7{T#Cc}XM8s)Kw(sm2 zAzIqPR#$S8C?(~>dv-fY3Mu)G(R@tOt3wYbw+v?Xvo#&SF4}c>6+|+DtI<7k`9OC2 zwdy3ro-&k9?m-}W=d%5W_Q`GnAwkQ<^LPQx4?pvL`lz|6xcO&JRlwDwwYL#1wdxDs z?1n%72-uSGep*NeFn=5<%0~KYPL<&t=j)?s|3-S24p10lf*$fZJU&+8wD#jt**m1r zXz1y6Iqr4gG^YAYKa}(n;B6@RdUXThV6Ya{#cdP914m@S)A-5FUd`JCmy^3Sorog# zFJ^eAW>#sI5}>SCFN5AxT8KbfO8v5}7+ z4NY_$MqWF|8+DoV1j)F@ID(F*nH=K8)e>4O(kbs@Zq`E%|~tF zRYt}*T7C~K<)6~TEhyg2VnE3xn8UG}y&YtU&A)^mXxxi989YZ=4tz>l88z032hWw% ze$yODl;yAhh14Q6&nv*4p=U#*WA9pBDv|0FvDwIzp|roM{7?|cs5*d*b-$8VQ_-oW zgYbQN6(kvkik6D?vzoUxzOba>qogQk>1YPV+VWlIH9oh5YriPPd}$={_HA5={+n06 zoGEE2ygetcm|G@+!Rl@OowMAH?EOS99L;SR(UUzg93RFRx7hByJtO$el=u?rpJ{o` z+UQeK1tK|Oqs!`Q^NU`mvF*wj`YKmw`b)#Li|(cV6#Kh8dtDBGKdXfwDF|+1`&4}U zxW`H^lb%Pe(KOb0f~l-gysM_Q)?WOcaCU6I&{jIx#kY@uH4ak0@jjAA^~qQw%_nms zM8y~~NvAk~e2$eeU#KUYJQH^(2>P+h>|G9z>a#JLJ~`w2i}|Fe{w%wp4mkmyXWiPl zXn|Ar;|1(pN=qHT9sU#iM;zThHPr-j5lTJaYLGrdCS84_IEA=S9;Q`APG33B{z|~@ zftginkx4i!EzyVcwWRhhp8a6}aTub^fLt#=L0rggxfs9^;x}0!?;5)}ZU!Drt5wtz zN%CH+Hm}CsDtIL9ut3j3B|KM<-RQ5M-+#@}@d|)3dvaicvPK$WMpDqq8$`6+nlW38yLTE)hLOpgeP``(2*ct<7RJDWwJ`+ZB|*M03N4S2V+eJ<5(Z> ztCbge?VK)V3^kxJ=hEUxnOyPFUiPMtD;|=r&Y)$(JqZ@;>a5)^3{80HqAE4pte*%^ zA``hZs=^buRxM-J6Tvne?q)V}TdF=to?7TCojiLYm)y@vMo4*7y;lSnB($4gCXuI zaw%3o$I0`!?oF*dIP$tmwHP~2)!j5ktA*KM;AXCU@r_bbsBL@JE`y_Ac*~8q6I&-2 zCQAHdy|LxLp2c~EE<$*YAeS?&G*7buWkd#GZu)tvGkv&h;qTei^sbEVu?D_3&V8uG>Axt@-6?`>zMd4 zz~2FP@ci!p3S#=7K$d_O%ReNRJ}ICk`0MXKFaiMNivs}XX?*tI6$zY9mxqe{&iOZ> z{5Jyth>U`!--U|*ZkGpzIG}0N7lM=b4+zD1_{)Xd##x#I=|J@Vupp-4${MkqcAke0 z0AT(9I)F%^1TO&Kg7?7r%gg=x{?Ea54j2T;l`aK?{qAK7gV5dKPxpdJ2>lx2ah$<@ Y2n{Q#^#4184LH3TCdO0*`PJ+H01m9$xc~qF diff --git a/scripts/cooper/revision/run_az_evaluation.py b/scripts/cooper/revision/run_az_evaluation.py index 50ea0d6..8e91833 100644 --- a/scripts/cooper/revision/run_az_evaluation.py +++ b/scripts/cooper/revision/run_az_evaluation.py @@ -1,5 +1,6 @@ import argparse import os +from glob import glob import pandas as pd from common import get_paths, get_file_names, ALL_NAMES @@ -14,17 +15,37 @@ def run_az_evaluation(args): results = [] for dataset in args.datasets: print(dataset, ":") - file_names = get_file_names(dataset, split_folder, split_names=["test"]) + if args.in_path: + file_paths = glob(os.path.join(args.in_path, dataset, "*.h5")) + file_names = [os.path.basename(path) for path in file_paths] + else: + file_names = get_file_names(dataset, split_folder, split_names=["test"]) seg_paths, gt_paths = get_paths(dataset, file_names) result = az_evaluation( - seg_paths, gt_paths, seg_key=seg_key, gt_key="/labels/az_merged", + seg_paths, gt_paths, seg_key=seg_key, gt_key="/labels/az_merged_v6", criterion=args.criterion, dataset=[dataset] * len(seg_paths), threshold=args.threshold, ) results.append(result) results = pd.concat(results) output_path = f"/user/muth9/u12095/synapse-net/scripts/cooper/revision/evaluation_results/v{args.version}.xlsx" - results.to_excel(output_path, index=False) + + if os.path.exists(output_path): + # Read existing data + existing = pd.read_excel(output_path) + + # Ensure consistent column naming and types + if "tomo_name" in results.columns and "tomo_name" in existing.columns: + # Drop existing entries with matching "tomo_name" + existing = existing[~existing["tomo_name"].isin(results["tomo_name"])] + + # Combine: old (filtered) + new + combined = pd.concat([existing, results], ignore_index=True) + else: + combined = results + + # Save back to Excel + combined.to_excel(output_path, index=False) def visualize_az_evaluation(args): @@ -44,7 +65,7 @@ def visualize_az_evaluation(args): with open_file(seg_path, "r") as f: seg = f[seg_key][:].squeeze() with open_file(gt_path, "r") as f: - gt = f["/labels/az_merged"][:] + gt = f["/labels/az_merged_v6"][:] seg = seg > args.threshold @@ -66,6 +87,7 @@ def main(): parser.add_argument("--datasets", nargs="+", default=ALL_NAMES) # Set the threshold to None if the AZ prediction already a segmentation. parser.add_argument("--threshold", type=float, default=0.5) + parser.add_argument("--in_path", "-i", default=None) args = parser.parse_args() if args.visualize: