def get_matches(self, train, corr):
train_img = cv2.imread(train, 0)
query_img = self.query
# Initiate SIFT detector
sift = cv2.xfeatures2d.SIFT_create()
# find the keypoints and descriptors with SIFT
kp1, des1 = sift.detectAndCompute(train_img, None)
kp2, des2 = sift.detectAndCompute(query_img, None)
# create BFMatcher object
bf = cv2.BFMatcher()
try:
matches = bf.knnMatch(des1, des2, k=2)
except cv2.error:
return False
good_matches = []
cluster = []
for m, n in matches:
img2_idx = m.trainIdx
img1_idx = m.queryIdx
(x1, y1) = kp1[img1_idx].pt
(x2, y2) = kp2[img2_idx].pt
# print("Comare %d to %d and %d to %d" % (x1,x2,y1,y2))
if m.distance < 0.8 * n.distance and y2 > self.yThreshold and x2 < self.xThreshold:
good_matches.append([m])
cluster.append([int(x2), int(y2)])
if len(cluster) <= corr:
return False
self.kmeans = KMeans(n_clusters=1, random_state=0).fit(cluster)
new_cluster = self.compare_distances(train_img, cluster)
if len(new_cluster) == 0 or len(new_cluster) / len(cluster) < .5:
return False
img3 = cv2.drawMatchesKnn(
train_img, kp1, query_img, kp2, good_matches, None, flags=2)
if self._debug:
self.images.append(img3)
self.debug_matcher(img3)
return True
评论列表
文章目录