Skip to content

Commit bf8b1b7

Browse files
author
Matt Sokoloff
committed
add mask preprocessing to coco utils
1 parent eecae58 commit bf8b1b7

File tree

1 file changed

+72
-16
lines changed

1 file changed

+72
-16
lines changed

examples/integrations/detectron2/coco_utils.py

Lines changed: 72 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -59,20 +59,76 @@ def visualize_coco_examples(coco_examples, metadata_catalog, scale = 1.0, max_im
5959
return Image.fromarray(np.vstack(images))
6060

6161

62-
def visualize_object_inferences(coco_examples, metadata_catalog, predictor, scale = 1.0, max_images = 5, resize_dims = (512, 768)):
63-
images = []
64-
for idx, example in enumerate(coco_examples):
65-
if idx > max_images:
66-
break
67-
im = cv2.imread(example['file_name'])
68-
outputs = predictor(im)
69-
v = Visualizer(im[:, :, ::-1], metadata_catalog, scale=scale)
70-
out = v.draw_instance_predictions(outputs["instances"].to("cpu"))
71-
images.append(cv2.resize(out.get_image()[:, :, ::-1], resize_dims))
72-
Image.fromarray(np.vstack(images))
73-
74-
75-
76-
def visualize_panoptic_inferences():
77-
...
62+
def visualize_object_inferences(coco_examples, metadata_catalog, predictor, scale = 1.0, max_images = 5, resize_dims = (768, 512)):
63+
images = []
64+
for idx, example in enumerate(coco_examples):
65+
if idx > max_images:
66+
break
67+
im = cv2.imread(example['file_name'])
68+
outputs = predictor(im)
69+
v = Visualizer(im[:, :, ::-1], metadata_catalog, scale=scale)
70+
out = v.draw_instance_predictions(outputs["instances"].to("cpu"))
71+
images.append(cv2.resize(out.get_image()[:, :, ::-1], resize_dims))
72+
return Image.fromarray(np.vstack(images))
73+
74+
75+
76+
def _process_panoptic_to_semantic(input_panoptic, output_semantic, segments, id_map):
77+
panoptic = np.asarray(Image.open(input_panoptic), dtype=np.uint32)
78+
panoptic = rgb2id(panoptic)
79+
80+
output = np.zeros_like(panoptic, dtype=np.uint8)
81+
for seg in segments:
82+
cat_id = seg["category_id"]
83+
new_cat_id = id_map[cat_id]
84+
output[panoptic == seg["id"]] = new_cat_id
85+
Image.fromarray(output).save(output_semantic)
86+
87+
88+
def separate_coco_semantic_from_panoptic(panoptic_json, panoptic_root, sem_seg_root, categories):
89+
"""
90+
Create semantic segmentation annotations from panoptic segmentation
91+
annotations, to be used by PanopticFPN.
92+
93+
It maps all thing categories to class 0, and maps all unlabeled pixels to class 255.
94+
It maps all stuff categories to contiguous ids starting from 1.
95+
96+
Args:
97+
panoptic_json (str): path to the panoptic json file, in COCO's format.
98+
panoptic_root (str): a directory with panoptic annotation files, in COCO's format.
99+
sem_seg_root (str): a directory to output semantic annotation files
100+
categories (list[dict]): category metadata. Each dict needs to have:
101+
"id": corresponds to the "category_id" in the json annotations
102+
"isthing": 0 or 1
103+
"""
104+
os.makedirs(sem_seg_root, exist_ok=True)
105+
106+
stuff_ids = [k["id"] for k in categories if k["isthing"] == 0]
107+
thing_ids = [k["id"] for k in categories if k["isthing"] == 1]
108+
id_map = {} # map from category id to id in the output semantic annotation
109+
assert len(stuff_ids) <= 254
110+
for i, stuff_id in enumerate(stuff_ids):
111+
id_map[stuff_id] = i + 1
112+
for thing_id in thing_ids:
113+
id_map[thing_id] = 0
114+
id_map[0] = 255
115+
116+
with open(panoptic_json) as f:
117+
obj = json.load(f)
118+
119+
def iter_annotations():
120+
for anno in obj["annotations"]:
121+
file_name = anno["file_name"]
122+
segments = anno["segments_info"]
123+
input = os.path.join(panoptic_root, file_name)
124+
output = os.path.join(sem_seg_root, file_name)
125+
yield input, output, segments
126+
127+
fn = functools.partial(_process_panoptic_to_semantic, id_map=id_map)
128+
futures = []
129+
with ThreadPoolExecutor(max_workers=12) as executor:
130+
for args in iter_annotations():
131+
futures.append(executor.submit(fn, *args))
132+
for _ in tqdm(as_completed(futures)):
133+
_.result()
78134

0 commit comments

Comments
 (0)