def calculate_sift(self, last_frame, new_frame, last_kp=None):
# find corresponding points in the input image and the template image
bf = cv2.BFMatcher()
matches = bf.knnMatch(self.descs[k], scene_desc, k=2)
# Apply Lowe Ratio Test to the keypoints
# this should weed out unsure matches
good_keypoints = []
for m, n in matches:
if m.distance < self.good_thresh * n.distance:
good_keypoints.append(m)
# put keypoints from template image in template_pts
# transform the keypoint data into arrays for homography check
# grab precomputed points
template_pts = np.float32(
[self.kps[k][m.queryIdx].pt for m in good_keypoints]
).reshape(-1, 1, 2)
# put corresponding keypoints from input image in scene_img_pts
scene_img_pts = np.float32(
[scene_kps[m.trainIdx].pt for m in good_keypoints]
).reshape(-1, 1, 2)
# if we can't find any matching keypoints, bail
# (probably the scene image was nonexistant/real bad)
if scene_img_pts.shape[0] == 0:
return None
# use OpenCV to calculate optical flow
new_frame_matched_features, status, error = cv2.calcOpticalFlowPyrLK(
self.last_frame_gray,
frame_gray,
self.last_frame_features,
None,
**self.lk_params
)
self.publish_interframe_motion(
self.last_frame_features,
new_frame_matched_features,
status,
error
)
# save data for next frame
self.store_as_last_frame(frame_gray)
评论列表
文章目录