rotate_image.py
1.3 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
import numpy as np
import imutils
import cv2
def rotate_image(image, template, maxFeatures=500, keepPercent=0.2, debug=False):
gray_image = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
gray_template = cv2.cvtColor(template, cv2.COLOR_BGR2GRAY)
orb = cv2.ORB_create(maxFeatures)
(kpsA, descsA) = orb.detectAndCompute(gray_image, None)
(kpsB, descsB) = orb.detectAndCompute(gray_template, None)
method = cv2.DESCRIPTOR_MATCHER_BRUTEFORCE_HAMMING
matcher = cv2.DescriptorMatcher_create(method)
matches = matcher.match(descsA, descsB, None)
matches = sorted(matches, key=lambda x: x.distance)
keep = int(len(matches) * keepPercent)
matches = matches[:keep]
if debug:
matchedVis = cv2.drawMatches(image, kpsA, template, kpsB, matches, None)
matchedVis = imutils.resize(matchedVis, width=1000)
cv2.imshow("Matched Keypoints", matchedVis)
cv2.waitKey(0)
ptsA = np.zeros((len(matches), 2), dtype=float)
ptsB = np.zeros((len(matches), 2), dtype=float)
for (i, m) in enumerate(matches):
ptsA[i] = kpsA[m.queryIdx].pt
ptsB[i] = kpsB[m.trainIdx].pt
(H, mask) = cv2.findHomography(ptsA, ptsB, method=cv2.RANSAC)
(h, w) = template.shape[:2]
aligned = cv2.warpPerspective(image, H, (w, h))
return aligned