// compute the descriptor
void ComputeORB(const cv::Mat &img, vector<cv::KeyPoint> &keypoints, vector<DescType> &descriptors) {
const int half_patch_size = 8;
const int half_boundary = 16;
int bad_points = 0;
for (auto &kp: keypoints) {
if (kp.pt.x < half_boundary || kp.pt.y < half_boundary ||
kp.pt.x >= img.cols - half_boundary || kp.pt.y >= img.rows - half_boundary) {
// outside
bad_points++;
descriptors.push_back({});
continue;
}
//使用一个for循环遍历关键点向量keypoints,对于每个关键点kp,首先使用kp.pt.x和kp.pt.y分别获取其在图像中的x和y坐标。然后,通过比较kp.pt.x和kp.pt.y与半边界half_boundary和图像的宽度和高度之间的大小关系,判断该关键点是否位于图像的边界区域之外。如果该关键点在边界区域之外,则将其标记为“坏点”,并将一个空描述符添加到descriptors向量中。由于ORB描述符是固定长度的,因此可以将一个空描述符表示为一个长度为0的向量。最后,使用continue语句跳过当前关键点,继续处理下一个关键点。
float m01 = 0, m10 = 0;
for (int dx = -half_patch_size; dx < half_patch_size; ++dx) {
for (int dy = -half_patch_size; dy < half_patch_size; ++dy) {
uchar pixel = img.at<uchar>(kp.pt.y + dy, kp.pt.x + dx);
m10 += dx * pixel;
m01 += dy * pixel;
}
}
//通过两个嵌套的for循环遍历了以关键点kp为中心,半径为half_patch_size的正方形区域内的所有像素,并计算了该区域内所有像素的质心。在每次循环迭代中,首先使用img.at<uchar>(kp.pt.y + dy, kp.pt.x + dx)获取当前像素的灰度值,dx和dy是像素在x和y方向上的偏移量,它们的范围都是从-half_patch_size到half_patch_size。然后,将dx * pixel和dy * pixel分别累加到变量m10和m01中,从而计算出该区域的质心位置。变量m10和m01分别保存了该区域的水平方向和垂直方向上的质心位置,可以通过它们来计算出关键点相对于图像中心的偏移量。
// angle should be arc tan(m01/m10);
float m_sqrt = sqrt(m01 * m01 + m10 * m10) + 1e-18; // avoid divide by zero
float sin_theta = m01 / m_sqrt;
float cos_theta = m10 / m_sqrt;
// compute the angle of this point
DescType desc(8, 0);
for (int i = 0; i < 8; i++) {
uint32_t d = 0;
for (int k = 0; k < 32; k++) {
int idx_pq = i * 32 + k;
cv::Point2f p(ORB_pattern[idx_pq * 4], ORB_pattern[idx_pq * 4 + 1]);
cv::Point2f q(ORB_pattern[idx_pq * 4 + 2], ORB_pattern[idx_pq * 4 + 3]);
// rotate with theta
cv::Point2f pp = cv::Point2f(cos_theta * p.x - sin_theta * p.y, sin_theta * p.x + cos_theta * p.y)
+ kp.pt;
cv::Point2f qq = cv::Point2f(cos_theta * q.x - sin_theta * q.y, sin_theta * q.x + cos_theta * q.y)
+ kp.pt;
if (img.at<uchar>(pp.y, pp.x) < img.at<uchar>(qq.y, qq.x)) {
d |= 1 << k;
}
}
desc[i] = d;
}
//描述子由8个32位整数组成,每个整数代表了8个方向上的特征值。该代码使用了ORB特征提取器中定义的特征点采样模式(ORB_pattern),并将其应用于关键点周围区域的图像像素上,从而生成描述子。
descriptors.push_back(desc);
}
cout << "bad/total: " << bad_points << "/" << keypoints.size() << endl;
}
// brute-force matching
void BfMatch(const vector<DescType> &desc1, const vector<DescType> &desc2, vector<cv::DMatch> &matches) {
const int d_max = 40;
for (size_t i1 = 0; i1 < desc1.size(); ++i1) {
if (desc1[i1].empty()) continue;
cv::DMatch m{i1, 0, 256};
for (size_t i2 = 0; i2 < desc2.size(); ++i2) {
if (desc2[i2].empty()) continue;
int distance = 0;
for (int k = 0; k < 8; k++) {
distance += _mm_popcnt_u32(desc1[i1][k] ^ desc2[i2][k]);
}
if (distance < d_max && distance < m.distance) {
m.distance = distance;
m.trainIdx = i2;
}
}
if (m.distance < d_max) {
matches.push_back(m);
}
}
}