-
Notifications
You must be signed in to change notification settings - Fork 411
Sourcery Starbot ⭐ refactored tensorboy/pytorch_Realtime_Multi-Person_Pose_Estimation #158
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
base: master
Are you sure you want to change the base?
Changes from all commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -40,7 +40,7 @@ | |
# update config file | ||
update_config(cfg, args) | ||
|
||
model = get_model('vgg19') | ||
model = get_model('vgg19') | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Lines
|
||
model.load_state_dict(torch.load(args.weight)) | ||
model.cuda() | ||
model.float() | ||
|
@@ -53,15 +53,15 @@ | |
while True: | ||
# Capture frame-by-frame | ||
ret, oriImg = video_capture.read() | ||
shape_dst = np.min(oriImg.shape[0:2]) | ||
|
||
shape_dst = np.min(oriImg.shape[:2]) | ||
|
||
with torch.no_grad(): | ||
paf, heatmap, imscale = get_outputs( | ||
oriImg, model, 'rtpose') | ||
|
||
humans = paf_to_pose_cpp(heatmap, paf, cfg) | ||
|
||
out = draw_humans(oriImg, humans) | ||
|
||
# Display the resulting frame | ||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -124,14 +124,13 @@ def append_result(image_id, humans, upsample_keypoints, outputs): | |
""" | ||
for human in humans: | ||
one_result = { | ||
"image_id": 0, | ||
"category_id": 1, | ||
"keypoints": [], | ||
"score": 0 | ||
"score": 0, | ||
"image_id": image_id, | ||
} | ||
one_result["image_id"] = image_id | ||
keypoints = np.zeros((18, 3)) | ||
|
||
Comment on lines
-127
to
+133
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Function
|
||
all_scores = [] | ||
for i in range(cfg.MODEL.NUM_KEYPOINTS): | ||
if i not in human.body_parts.keys(): | ||
|
@@ -146,7 +145,7 @@ def append_result(image_id, humans, upsample_keypoints, outputs): | |
keypoints[i, 2] = 1 | ||
score = human.body_parts[i].score | ||
all_scores.append(score) | ||
|
||
keypoints = keypoints[ORDER_COCO,:] | ||
one_result["score"] = 1. | ||
one_result["keypoints"] = list(keypoints.reshape(51)) | ||
|
@@ -165,20 +164,19 @@ def append_result_legacy(image_id, person_to_joint_assoc, joint_list, outputs): | |
|
||
for ridxPred in range(len(person_to_joint_assoc)): | ||
one_result = { | ||
"image_id": 0, | ||
"category_id": 1, | ||
"keypoints": [], | ||
"score": 0 | ||
"score": 0, | ||
"image_id": image_id, | ||
} | ||
|
||
one_result["image_id"] = image_id | ||
keypoints = np.zeros((17, 3)) | ||
|
||
for part in range(17): | ||
ind = ORDER_COCO[part] | ||
index = int(person_to_joint_assoc[ridxPred, ind]) | ||
|
||
if -1 == index: | ||
if index == -1: | ||
Comment on lines
-168
to
+179
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Function
|
||
keypoints[part, 0] = 0 | ||
keypoints[part, 1] = 0 | ||
keypoints[part, 2] = 0 | ||
|
@@ -248,31 +246,31 @@ def run_eval(image_dir, anno_file, vis_dir, model, preprocess): | |
:returns: float, the reported mAP score | ||
""" | ||
coco = COCO(anno_file) | ||
cat_ids = coco.getCatIds(catNms=['person']) | ||
cat_ids = coco.getCatIds(catNms=['person']) | ||
img_ids = coco.getImgIds(catIds=cat_ids) | ||
print("Total number of validation images {}".format(len(img_ids))) | ||
print(f"Total number of validation images {len(img_ids)}") | ||
|
||
# iterate all val images | ||
outputs = [] | ||
print("Processing Images in validation set") | ||
for i in range(len(img_ids)): | ||
if i % 10 == 0 and i != 0: | ||
print("Processed {} images".format(i)) | ||
print(f"Processed {i} images") | ||
img = coco.loadImgs(img_ids[i])[0] | ||
file_name = img['file_name'] | ||
file_path = os.path.join(image_dir, file_name) | ||
|
||
oriImg = cv2.imread(file_path) | ||
# Get the shortest side of the image (either height or width) | ||
shape_dst = np.min(oriImg.shape[0:2]) | ||
shape_dst = np.min(oriImg.shape[:2]) | ||
|
||
# Get results of original image | ||
paf, heatmap, scale_img = get_outputs(oriImg, model, preprocess) | ||
|
||
humans = paf_to_pose_cpp(heatmap, paf, cfg) | ||
|
||
out = draw_humans(oriImg, humans) | ||
|
||
Comment on lines
-251
to
+273
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Function
|
||
vis_path = os.path.join(vis_dir, file_name) | ||
cv2.imwrite(vis_path, out) | ||
# subset indicated how many peoples foun in this image. | ||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -11,39 +11,36 @@ | |
from . import transforms, utils | ||
|
||
def kp_connections(keypoints): | ||
kp_lines = [ | ||
[keypoints.index('neck'), keypoints.index('right_hip')], | ||
return [ | ||
[keypoints.index('neck'), keypoints.index('right_hip')], | ||
[keypoints.index('right_hip'), keypoints.index('right_knee')], | ||
[keypoints.index('right_knee'), keypoints.index('right_ankle')], | ||
[keypoints.index('neck'), keypoints.index('left_hip')], | ||
[keypoints.index('neck'), keypoints.index('left_hip')], | ||
[keypoints.index('left_hip'), keypoints.index('left_knee')], | ||
[keypoints.index('left_knee'), keypoints.index('left_ankle')], | ||
[keypoints.index('neck'), keypoints.index('right_shoulder')], | ||
[keypoints.index('neck'), keypoints.index('right_shoulder')], | ||
[keypoints.index('right_shoulder'), keypoints.index('right_elbow')], | ||
[keypoints.index('right_elbow'), keypoints.index('right_wrist')], | ||
[keypoints.index('right_shoulder'), keypoints.index('right_eye')], | ||
[keypoints.index('neck'), keypoints.index('left_shoulder')], | ||
[keypoints.index('right_elbow'), keypoints.index('right_wrist')], | ||
[keypoints.index('right_shoulder'), keypoints.index('right_eye')], | ||
[keypoints.index('neck'), keypoints.index('left_shoulder')], | ||
[keypoints.index('left_shoulder'), keypoints.index('left_elbow')], | ||
[keypoints.index('left_elbow'), keypoints.index('left_wrist')], | ||
[keypoints.index('left_shoulder'), keypoints.index('left_eye')], | ||
[keypoints.index('neck'), keypoints.index('nose')], | ||
[keypoints.index('left_shoulder'), keypoints.index('left_eye')], | ||
[keypoints.index('neck'), keypoints.index('nose')], | ||
[keypoints.index('nose'), keypoints.index('right_eye')], | ||
[keypoints.index('nose'), keypoints.index('left_eye')], | ||
[keypoints.index('nose'), keypoints.index('left_eye')], | ||
[keypoints.index('right_eye'), keypoints.index('right_ear')], | ||
[keypoints.index('left_eye'), keypoints.index('left_ear')] | ||
[keypoints.index('left_eye'), keypoints.index('left_ear')], | ||
] | ||
return kp_lines | ||
Comment on lines
-14
to
-35
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Function
|
||
|
||
def get_keypoints(): | ||
"""Get the COCO keypoints and their left/right flip coorespondence map.""" | ||
# Keypoints are not available in the COCO json for the test split, so we | ||
# provide them here. | ||
keypoints = [ | ||
return [ | ||
'nose', | ||
'neck', | ||
'right_shoulder', | ||
'right_elbow', | ||
'right_wrist', | ||
'right_wrist', | ||
Comment on lines
-39
to
+43
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Function
This removes the following comments ( why? ):
|
||
'left_shoulder', | ||
'left_elbow', | ||
'left_wrist', | ||
|
@@ -53,12 +50,11 @@ def get_keypoints(): | |
'left_hip', | ||
'left_knee', | ||
'left_ankle', | ||
'right_eye', | ||
'right_eye', | ||
'left_eye', | ||
'right_ear', | ||
'left_ear'] | ||
|
||
return keypoints | ||
'left_ear', | ||
] | ||
|
||
def collate_images_anns_meta(batch): | ||
images = torch.utils.data.dataloader.default_collate([b[0] for b in batch]) | ||
|
@@ -124,16 +120,16 @@ def __init__(self, root, annFile, image_transform=None, target_transforms=None, | |
self.filter_for_keypoint_annotations() | ||
if n_images: | ||
self.ids = self.ids[:n_images] | ||
print('Images: {}'.format(len(self.ids))) | ||
print(f'Images: {len(self.ids)}') | ||
|
||
self.preprocess = preprocess or transforms.Normalize() | ||
self.image_transform = image_transform or transforms.image_transform | ||
self.target_transforms = target_transforms | ||
|
||
self.HEATMAP_COUNT = len(get_keypoints()) | ||
self.LIMB_IDS = kp_connections(get_keypoints()) | ||
self.input_y = input_y | ||
self.input_x = input_x | ||
self.input_x = input_x | ||
Comment on lines
-127
to
+132
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Function
|
||
self.stride = stride | ||
self.log = logging.getLogger(self.__class__.__name__) | ||
|
||
|
@@ -257,13 +253,13 @@ def add_neck(self, keypoint): | |
return keypoint | ||
|
||
def get_ground_truth(self, anns): | ||
|
||
grid_y = int(self.input_y / self.stride) | ||
grid_x = int(self.input_x / self.stride) | ||
channels_heat = (self.HEATMAP_COUNT + 1) | ||
channels_paf = 2 * len(self.LIMB_IDS) | ||
heatmaps = np.zeros((int(grid_y), int(grid_x), channels_heat)) | ||
pafs = np.zeros((int(grid_y), int(grid_x), channels_paf)) | ||
heatmaps = np.zeros((grid_y, grid_x, channels_heat)) | ||
pafs = np.zeros((grid_y, grid_x, channels_paf)) | ||
Comment on lines
-260
to
+262
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Function
|
||
|
||
keypoints = [] | ||
for ann in anns: | ||
|
@@ -286,7 +282,7 @@ def get_ground_truth(self, anns): | |
# pafs | ||
for i, (k1, k2) in enumerate(self.LIMB_IDS): | ||
# limb | ||
count = np.zeros((int(grid_y), int(grid_x)), dtype=np.uint32) | ||
count = np.zeros((grid_y, grid_x), dtype=np.uint32) | ||
for joint in keypoints: | ||
if joint[k1, 2] > 0.5 and joint[k2, 2] > 0.5: | ||
centerA = joint[k1, :2] | ||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -20,8 +20,8 @@ | |
def putGaussianMaps(center, accumulate_confid_map, sigma, grid_y, grid_x, stride): | ||
|
||
start = stride / 2.0 - 0.5 | ||
y_range = [i for i in range(int(grid_y))] | ||
x_range = [i for i in range(int(grid_x))] | ||
y_range = list(range(int(grid_y))) | ||
x_range = list(range(int(grid_x))) | ||
Comment on lines
-23
to
+24
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Function
|
||
xx, yy = np.meshgrid(x_range, y_range) | ||
xx = xx * stride + start | ||
yy = yy * stride + start | ||
|
@@ -32,5 +32,5 @@ def putGaussianMaps(center, accumulate_confid_map, sigma, grid_y, grid_x, stride | |
cofid_map = np.multiply(mask, cofid_map) | ||
accumulate_confid_map += cofid_map | ||
accumulate_confid_map[accumulate_confid_map > 1.0] = 1.0 | ||
|
||
return accumulate_confid_map |
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -37,8 +37,8 @@ def putVecMaps(centerA, centerB, accumulate_vec_map, count, grid_y, grid_x, stri | |
min_y = max(int(round(min(centerA[1], centerB[1]) - thre)), 0) | ||
max_y = min(int(round(max(centerA[1], centerB[1]) + thre)), grid_y) | ||
|
||
range_x = list(range(int(min_x), int(max_x), 1)) | ||
range_y = list(range(int(min_y), int(max_y), 1)) | ||
range_x = list(range(int(min_x), int(max_x))) | ||
range_y = list(range(int(min_y), int(max_y))) | ||
Comment on lines
-40
to
+41
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Function
|
||
xx, yy = np.meshgrid(range_x, range_y) | ||
ba_x = xx - centerA[0] # the vector from (x,y) to centerA | ||
ba_y = yy - centerA[1] | ||
|
@@ -60,9 +60,9 @@ def putVecMaps(centerA, centerB, accumulate_vec_map, count, grid_y, grid_x, stri | |
|
||
mask = count == 0 | ||
|
||
count[mask == True] = 1 | ||
count[mask] = 1 | ||
|
||
accumulate_vec_map = np.divide(accumulate_vec_map, count[:, :, np.newaxis]) | ||
count[mask == True] = 0 | ||
count[mask] = 0 | ||
|
||
return accumulate_vec_map, count |
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -16,18 +16,13 @@ | |
def rtpose_preprocess(image): | ||
image = image.astype(np.float32) | ||
image = image / 256. - 0.5 | ||
image = image.transpose((2, 0, 1)).astype(np.float32) | ||
|
||
return image | ||
return image.transpose((2, 0, 1)).astype(np.float32) | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Function
|
||
|
||
def inverse_rtpose_preprocess(image): | ||
image = image.astype(np.float32) | ||
image = image.transpose((1, 2, 0)).astype(np.float32) | ||
image = (image + 0.5) * 256. | ||
image = image.astype(np.uint8) | ||
|
||
|
||
return image | ||
image = image.transpose((1, 2, 0)).astype(np.float32) | ||
image = (image + 0.5) * 256. | ||
return image.astype(np.uint8) | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Function
|
||
|
||
def vgg_preprocess(image): | ||
image = image.astype(np.float32) / 255. | ||
|
@@ -47,9 +42,7 @@ def inception_preprocess(image): | |
image = image.copy()[:, :, ::-1] | ||
image = image.astype(np.float32) | ||
image = image / 128. - 1. | ||
image = image.transpose((2, 0, 1)).astype(np.float32) | ||
|
||
return image | ||
return image.transpose((2, 0, 1)).astype(np.float32) | ||
Comment on lines
-50
to
+45
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Function
|
||
|
||
def inverse_vgg_preprocess(image): | ||
means = [0.485, 0.456, 0.406] | ||
|
@@ -70,9 +63,7 @@ def inverse_inception_preprocess(image): | |
image = image.transpose((1, 2, 0)).astype(np.float32) | ||
image = image[:, :, ::-1] | ||
image = (image + 1.)*128. | ||
image = image.astype(np.uint8) | ||
|
||
return image | ||
return image.astype(np.uint8) | ||
Comment on lines
-73
to
+66
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Function
|
||
|
||
def ssd_preprocess(image): | ||
image = image.astype(np.float32) | ||
|
@@ -93,9 +84,7 @@ def preprocess(image, mode): | |
'inception': inception_preprocess, | ||
'ssd': ssd_preprocess | ||
} | ||
if mode not in preprocessors: | ||
return image | ||
return preprocessors[mode](image) | ||
return image if mode not in preprocessors else preprocessors[mode](image) | ||
Comment on lines
-96
to
+87
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Function
|
||
|
||
|
||
def put_vec_maps(centerA, centerB, accumulate_vec_map, count, params_transform): | ||
|
@@ -132,8 +121,8 @@ def put_vec_maps(centerA, centerB, accumulate_vec_map, count, params_transform): | |
min_y = max(int(round(min(centerA[1], centerB[1]) - thre)), 0) | ||
max_y = min(int(round(max(centerA[1], centerB[1]) + thre)), grid_y) | ||
|
||
range_x = list(range(int(min_x), int(max_x), 1)) | ||
range_y = list(range(int(min_y), int(max_y), 1)) | ||
range_x = list(range(int(min_x), int(max_x))) | ||
range_y = list(range(int(min_y), int(max_y))) | ||
Comment on lines
-135
to
+125
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Function
|
||
xx, yy = np.meshgrid(range_x, range_y) | ||
ba_x = xx - centerA[0] # the vector from (x,y) to centerA | ||
ba_y = yy - centerA[1] | ||
|
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Lines
45-62
refactored with the following changes:remove-redundant-slice-index
)