diff --git a/helpers/metrics.py b/helpers/metrics.py index 241cd7f..47c2929 100644 --- a/helpers/metrics.py +++ b/helpers/metrics.py @@ -150,15 +150,7 @@ def get_metrics(tp_gdf, fp_gdf, fn_gdf, mismatch_gdf, id_classes=0, method='macr pw_k = by_class_dict.copy() rw_k = by_class_dict.copy() - by_class_dict = {key: None for key in id_classes} - tp_k = by_class_dict.copy() - fp_k = by_class_dict.copy() - fn_k = by_class_dict.copy() - p_k = by_class_dict.copy() - r_k = by_class_dict.copy() - count_k = by_class_dict.copy() - pw_k = by_class_dict.copy() - rw_k = by_class_dict.copy() + total_labels = len(tp_gdf) + len(fn_gdf) + len(mismatch_gdf) for id_cl in id_classes: @@ -176,25 +168,27 @@ def get_metrics(tp_gdf, fp_gdf, fn_gdf, mismatch_gdf, id_classes=0, method='macr fn_count = pure_fn_count + mismatched_fn_count tp_count = 0 if tp_gdf.empty else len(tp_gdf[tp_gdf.det_class==id_cl]) - tp_k[id_cl] = tp_count fp_k[id_cl] = fp_count fn_k[id_cl] = fn_count - - p_k[id_cl] = 0 if tp_count == 0 else tp_count / (tp_count + fp_count) - r_k[id_cl] = 0 if tp_count == 0 else tp_count / (tp_count + fn_count) - count_k[id_cl] = 0 if tp_count == 0 else tp_count + fn_count + tp_k[id_cl] = tp_count + + count_k[id_cl] = tp_count + fn_count + if tp_count > 0: + p_k[id_cl] = tp_count / (tp_count + fp_count) + r_k[id_cl] = tp_count / (tp_count + fn_count) + + if (method == 'macro-weighted-average') & (total_labels > 0): + pw_k[id_cl] = (count_k[id_cl] / total_labels) * p_k[id_cl] + rw_k[id_cl] = (count_k[id_cl] / total_labels) * r_k[id_cl] if method == 'macro-average': precision = sum(p_k.values()) / len(id_classes) recall = sum(r_k.values()) / len(id_classes) - elif method == 'macro-weighted-average': - for id_cl in id_classes: - pw_k[id_cl] = 0 if sum(count_k.values()) == 0 else (count_k[id_cl] / sum(count_k.values())) * p_k[id_cl] - rw_k[id_cl] = 0 if sum(count_k.values()) == 0 else (count_k[id_cl] / sum(count_k.values())) * r_k[id_cl] + elif method == 'macro-weighted-average': precision = sum(pw_k.values()) / len(id_classes) recall = sum(rw_k.values()) / len(id_classes) elif method == 'micro-average': - if sum(tp_k.values()) == 0 and sum(fp_k.values()) == 0: + if sum(tp_k.values()) == 0: precision = 0 recall = 0 else: @@ -224,4 +218,9 @@ def intersection_over_union(polygon1_shape, polygon2_shape): polygon_intersection = polygon1_shape.intersection(polygon2_shape).area polygon_union = polygon1_shape.area + polygon2_shape.area - polygon_intersection - return polygon_intersection / polygon_union \ No newline at end of file + if polygon_union != 0: + iou = polygon_intersection / polygon_union + else: + iou = 0 + + return iou \ No newline at end of file diff --git a/helpers/misc.py b/helpers/misc.py index 9f3d18b..1806f23 100644 --- a/helpers/misc.py +++ b/helpers/misc.py @@ -16,6 +16,15 @@ from shapely.validation import make_valid from rasterio.transform import from_bounds +try: + try: + from helpers.metrics import intersection_over_union + except ModuleNotFoundError: + from metrics import intersection_over_union +except Exception as e: + logger.error(f"Could not import some dependencies. Exception: {e}") + sys.exit(1) + class BadFileExtensionException(Exception): "Raised when the file extension is different from the expected one" @@ -144,29 +153,6 @@ def clip_row(row, fact=fact): return clipped_labels_gdf -def intersection_over_union(polygon1_shape, polygon2_shape): - """Determine the intersection area over union area (IoU) of two polygons - - Args: - polygon1_shape (geometry): first polygon - polygon2_shape (geometry): second polygon - - Returns: - int: Unrounded ratio between the intersection and union area - """ - - # Calculate intersection and union, and the IoU - polygon_intersection = polygon1_shape.intersection(polygon2_shape).area - polygon_union = polygon1_shape.area + polygon2_shape.area - polygon_intersection - - if polygon_union != 0: - iou = polygon_intersection / polygon_union - else: - iou = 0 - - return iou - - def format_logger(logger): logger.remove() diff --git a/scripts/make_detections.py b/scripts/make_detections.py index 4314266..2bf1ae3 100644 --- a/scripts/make_detections.py +++ b/scripts/make_detections.py @@ -62,6 +62,7 @@ def main(cfg_file_path): DETECTRON2_CFG_FILE = cfg['detectron2_config_file'] WORKING_DIR = cfg['working_directory'] + OUTPUT_DIR = cfg['output_folder'] if 'output_folder' in cfg.keys() else '.' SAMPLE_TAGGED_IMG_SUBDIR = cfg['sample_tagged_img_subfolder'] LOG_SUBDIR = cfg['log_subfolder'] @@ -74,9 +75,8 @@ def main(cfg_file_path): os.chdir(WORKING_DIR) # let's make the output directories in case they don't exist - for DIR in [SAMPLE_TAGGED_IMG_SUBDIR, LOG_SUBDIR]: - if not os.path.exists(DIR): - os.makedirs(DIR) + for directory in [OUTPUT_DIR, SAMPLE_TAGGED_IMG_SUBDIR, LOG_SUBDIR]: + os.makedirs(directory, exist_ok=True) written_files = [] @@ -121,7 +121,7 @@ def main(cfg_file_path): logger.info(f"Making detections over the entire {dataset} dataset...") - detections_filename = f'{dataset}_detections_at_{threshold_str}_threshold.gpkg' + detections_filename = os.path.join(OUTPUT_DIR, f'{dataset}_detections_at_{threshold_str}_threshold.gpkg') for d in tqdm(DatasetCatalog.get(dataset)): @@ -181,13 +181,14 @@ def main(cfg_file_path): im = cv2.imread(d["file_name"]) outputs = predictor(im) v = Visualizer(im[:, :, ::-1], # [:, :, ::-1] is for RGB -> BGR conversion, cf. https://stackoverflow.com/questions/14556545/why-opencv-using-bgr-colour-space-instead-of-rgb - metadata=MetadataCatalog.get(dataset), - scale=1.0, - instance_mode=ColorMode.IMAGE_BW # remove the colors of unsegmented pixels + metadata=MetadataCatalog.get(dataset), + scale=1.0, + instance_mode=ColorMode.IMAGE_BW # remove the colors of unsegmented pixels ) v = v.draw_instance_predictions(outputs["instances"].to("cpu")) - cv2.imwrite(os.path.join(SAMPLE_TAGGED_IMG_SUBDIR, output_filename), v.get_image()[:, :, ::-1]) - written_files.append(os.path.join(WORKING_DIR, SAMPLE_TAGGED_IMG_SUBDIR, output_filename)) + filepath = os.path.join(SAMPLE_TAGGED_IMG_SUBDIR, output_filename) + cv2.imwrite(filepath, v.get_image()[:, :, ::-1]) + written_files.append(os.path.join(WORKING_DIR, filepath)) logger.success(DONE_MSG)