|
| 1 | +import cv2 |
| 2 | +import numpy as np |
| 3 | + |
| 4 | +# Load the image |
| 5 | +path=input('Enter the path of the image: ') |
| 6 | +image = cv2.imread(path) |
| 7 | +path2=input('Enter the path for testing image: ') |
| 8 | +test_image=cv2.imread(path2) |
| 9 | + |
| 10 | +#Resizing the image |
| 11 | +image=cv2.resize(image,(600,600)) |
| 12 | +test_image=cv2.resize(test_image,(600,600)) |
| 13 | + |
| 14 | +# Convert the image to gray scale |
| 15 | +gray = cv2.cvtColor(image, cv2.COLOR_RGB2GRAY) |
| 16 | +test_gray = cv2.cvtColor(test_image, cv2.COLOR_RGB2GRAY) |
| 17 | + |
| 18 | +#Display the given and test image |
| 19 | +image_stack = np.concatenate((image, test_image), axis=1) |
| 20 | +cv2.imshow('image VS test_image', image_stack) |
| 21 | + |
| 22 | +#Implementing the ORB alogorithm |
| 23 | +orb = cv2.ORB_create() |
| 24 | + |
| 25 | +train_keypoints, train_descriptor = orb.detectAndCompute(gray, None) |
| 26 | +test_keypoints, test_descriptor = orb.detectAndCompute(test_gray, None) |
| 27 | + |
| 28 | +keypoints = np.copy(image) |
| 29 | + |
| 30 | +cv2.drawKeypoints(image, train_keypoints, keypoints, color = (0, 255, 0)) |
| 31 | + |
| 32 | +# Display image with keypoints |
| 33 | +cv2.imshow('keypoints',keypoints) |
| 34 | +# Print the number of keypoints detected in the given image |
| 35 | +print("Number of Keypoints Detected In The Image: ", len(train_keypoints)) |
| 36 | + |
| 37 | +# Create a Brute Force Matcher object. |
| 38 | +bf = cv2.BFMatcher(cv2.NORM_HAMMING, crossCheck = True) |
| 39 | + |
| 40 | +# Perform the matching between the ORB descriptors of the training image and the test image |
| 41 | +matches = bf.match(train_descriptor, test_descriptor) |
| 42 | + |
| 43 | +# The matches with shorter distance are the ones we want. |
| 44 | +matches = sorted(matches, key = lambda x : x.distance) |
| 45 | + |
| 46 | +result = cv2.drawMatches(image, train_keypoints, test_image, test_keypoints, matches, test_gray, flags = 2) |
| 47 | + |
| 48 | +# Display the best matching points |
| 49 | +cv2.imshow('result',result) |
| 50 | + |
| 51 | +#Naming the output image |
| 52 | +image_name = path.split(r'/') |
| 53 | +image_path = image_name[-1].split('.') |
| 54 | +output = r"./ORB Algorithm/"+ image_path[0] + "(featureMatched).jpg" |
| 55 | +cv2.imwrite(output,result) |
| 56 | + |
| 57 | +# Print total number of matching points between the training and query images |
| 58 | +print("\nNumber of Matching Keypoints Between The input image and Test Image: ", len(matches)) |
| 59 | +cv2.waitKey(0) |
| 60 | +cv2.destroyAllWindows() |
0 commit comments