Thanks for the response. I made some changes to a different feature descriptor. I only tested it on one sample, but if did find the object, and the angle was within the expected range (near zero).
def feature_detection_with_angle(train_img_path, scene_img_path, rationThreshold = 0.75):
sceneImage = cv2.imread(scene_img_path)
templateImage = cv2.imread(train_img_path)
sceneImage = cv2.cvtColor(sceneImage, cv2.COLOR_BGR2GRAY)
templateImage = cv2.cvtColor(templateImage, cv2.COLOR_BGR2GRAY)
sift = cv2.SIFT_create()
kpscene, descr1 = sift.detectAndCompute(sceneImage,None)
kptemplate, descr2 = sift.detectAndCompute(templateImage,None)
FLANN_INDEX_KDTREE = 0
index_params = dict(algorithm = FLANN_INDEX_KDTREE, trees = 5)
search_params = dict(checks=50)
matcher = cv2.FlannBasedMatcher(index_params, search_params)
matches = matcher.knnMatch(descr1,descr2,k=2)
pointsTemplate = []
pointsScene = []
goodMatches = []
for i,(m,n) in enumerate(matches):
if( m.distance < rationThreshold * n.distance):
goodMatches.append(m)
pointsScene = kpscene[m.queryIdx].pt
pointsTemplate = kptemplate[m.trainIdx].pt
if len(goodMatches) > 4:
pointsSceneGood = np.float32([kpscene[m.queryIdx].pt for m in goodMatches])[:10]
pointsTemplateGood = np.float32([kptemplate[m.trainIdx].pt for m in goodMatches])[:10]
homography, mask = cv2.findHomography(pointsTemplateGood, pointsSceneGood, cv2.RANSAC)
if homography is not None:
theta_radians = np.arctan2(homography[1, 0], homography[0, 0])
print('Angle: ' + str(np.degrees(theta_radians)))
h, w = sceneImage.shape
corners_query_img = np.float32([[0, 0], [0, h-1], [w-1, h-1], [w-1, 0]]).reshape(-1, 1, 2)
transformed_corners = cv2.perspectiveTransform(corners_query_img, homography)
img3 = cv2.drawMatches(sceneImage,kpscene,templateImage,kptemplate,goodMatches,None,flags=cv2.DrawMatchesFlags_NOT_DRAW_SINGLE_POINTS)
plt.imshow(img3),plt.show()