🌟 特征提取魔法指南
🎨 在圖像處理的世界里,特征提取就像是尋找圖像的"指紋",讓我們能夠識別和理解圖像的獨特性。讓我們一起來探索這些神奇的特征提取術吧!
📚 目錄
- 基礎概念 - 特征的"體檢"
- Harris角點 - 圖像的"關節"
- SIFT特征 - 圖像的"全身體檢"
- SURF特征 - 圖像的"快速體檢"
- ORB特征 - 圖像的"經濟體檢"
- 特征匹配 - 圖像的"認親"
- 性能優化 - "體檢"的加速器
- 實戰應用 - "體檢"的實踐
1. 什么是特征提取?
特征提取就像是給圖像做"體檢",主要目的是:
- 🔍 發現圖像中的關鍵信息
- 🎯 提取有意義的特征
- 🛠? 降低數據維度
- 📊 提高識別效率
常見的特征包括:
- 角點特征(圖像的"關節")
- SIFT特征(圖像的"指紋")
- SURF特征(圖像的"快速指紋")
- ORB特征(圖像的"經濟指紋")
2. Harris角點檢測
2.1 基本原理
角點檢測就像是尋找圖像中的"關節",這些點通常具有以下特點:
- 在兩個方向上都有明顯變化
- 對旋轉和光照變化不敏感
- 具有局部唯一性
數學表達式:
Harris角點檢測的響應函數:
R = det ? ( M ) ? k ? trace ( M ) 2 R = \det(M) - k \cdot \text{trace}(M)^2 R=det(M)?k?trace(M)2
其中:
- M M M 是自相關矩陣
- k k k 是經驗常數(通常取0.04-0.06)
- det ? ( M ) \det(M) det(M) 是矩陣的行列式
- trace ( M ) \text{trace}(M) trace(M) 是矩陣的跡
2.2 手動實現
C++實現
void harris_corner_detection(const Mat& src, Mat& dst,double k = 0.04, int blockSize = 3) {CV_Assert(!src.empty() && src.channels() == 1);// 計算梯度Mat dx, dy;Sobel(src, dx, CV_32F, 1, 0, 3);Sobel(src, dy, CV_32F, 0, 1, 3);// 計算自相關矩陣的元素Mat dx2 = dx.mul(dx);Mat dy2 = dy.mul(dy);Mat dxdy = dx.mul(dy);// 計算響應函數dst.create(src.size(), CV_32F);for (int y = 0; y < src.rows; y++) {for (int x = 0; x < src.cols; x++) {float a = 0, b = 0, c = 0;for (int i = -blockSize/2; i <= blockSize/2; i++) {for (int j = -blockSize/2; j <= blockSize/2; j++) {int ny = y + i;int nx = x + j;if (ny >= 0 && ny < src.rows && nx >= 0 && nx < src.cols) {a += dx2.at<float>(ny, nx);b += dxdy.at<float>(ny, nx);c += dy2.at<float>(ny, nx);}}}float det = a * c - b * b;float trace = a + c;dst.at<float>(y, x) = det - k * trace * trace;}}
}
Python實現
def harris_corner_detection_manual(image, k=0.04, block_size=3):"""手動實現Harris角點檢測參數:image: 輸入灰度圖像k: Harris角點檢測參數block_size: 鄰域大小"""if len(image.shape) == 3:gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)else:gray = image.copy()// 計算梯度dx = cv2.Sobel(gray, cv2.CV_32F, 1, 0, ksize=3)dy = cv2.Sobel(gray, cv2.CV_32F, 0, 1, ksize=3)// 計算自相關矩陣的元素dx2 = dx * dxdy2 = dy * dydxdy = dx * dy// 計算響應函數height, width = gray.shaperesponse = np.zeros((height, width), dtype=np.float32)offset = block_size // 2for y in range(offset, height - offset):for x in range(offset, width - offset):// 計算局部窗口內的自相關矩陣a = np.sum(dx2[y-offset:y+offset+1, x-offset:x+offset+1])b = np.sum(dxdy[y-offset:y+offset+1, x-offset:x+offset+1])c = np.sum(dy2[y-offset:y+offset+1, x-offset:x+offset+1])// 計算響應值det = a * c - b * btrace = a + cresponse[y, x] = det - k * trace * tracereturn response
3. SIFT特征
3.1 基本原理
SIFT(Scale-Invariant Feature Transform)就像是圖像的"全身體檢",不管圖像怎么變化(旋轉、縮放),都能找到穩定的特征點。
主要步驟:
-
尺度空間構建(多角度檢查):
L ( x , y , σ ) = G ( x , y , σ ) ? I ( x , y ) L(x,y,\sigma) = G(x,y,\sigma) * I(x,y) L(x,y,σ)=G(x,y,σ)?I(x,y)
其中:- G ( x , y , σ ) G(x,y,\sigma) G(x,y,σ) 是高斯核
- I ( x , y ) I(x,y) I(x,y) 是輸入圖像
- σ \sigma σ 是尺度參數
-
關鍵點定位(找到重點):
D ( x , y , σ ) = L ( x , y , k σ ) ? L ( x , y , σ ) D(x,y,\sigma) = L(x,y,k\sigma) - L(x,y,\sigma) D(x,y,σ)=L(x,y,kσ)?L(x,y,σ) -
方向分配(確定朝向):
- 計算梯度方向直方圖
- 選擇主方向
3.2 手動實現
C++實現
void sift_features(const Mat& src, vector<KeyPoint>& keypoints,Mat& descriptors, int nfeatures = 0) {CV_Assert(!src.empty());// 構建高斯金字塔vector<Mat> gaussian_pyramid;buildGaussianPyramid(src, gaussian_pyramid, 4);// 構建DOG金字塔vector<Mat> dog_pyramid;buildDoGPyramid(gaussian_pyramid, dog_pyramid);// 檢測關鍵點detectKeypoints(dog_pyramid, keypoints);// 計算描述子computeDescriptors(gaussian_pyramid, keypoints, descriptors);
}void buildGaussianPyramid(const Mat& src, vector<Mat>& pyramid, int nOctaves) {pyramid.clear();Mat current = src.clone();for (int i = 0; i < nOctaves; i++) {// 存儲當前八度的圖像pyramid.push_back(current);// 下采樣Mat down;pyrDown(current, down);current = down;}
}void buildDoGPyramid(const vector<Mat>& gaussian_pyramid,vector<Mat>& dog_pyramid) {dog_pyramid.clear();for (size_t i = 0; i < gaussian_pyramid.size() - 1; i++) {Mat dog;subtract(gaussian_pyramid[i + 1], gaussian_pyramid[i], dog);dog_pyramid.push_back(dog);}
}void detectKeypoints(const vector<Mat>& dog_pyramid,vector<KeyPoint>& keypoints) {keypoints.clear();for (size_t i = 1; i < dog_pyramid.size() - 1; i++) {const Mat& prev = dog_pyramid[i - 1];const Mat& curr = dog_pyramid[i];const Mat& next = dog_pyramid[i + 1];for (int y = 1; y < curr.rows - 1; y++) {for (int x = 1; x < curr.cols - 1; x++) {// 檢查當前點是否為極值點float val = curr.at<float>(y, x);bool is_max = true;bool is_min = true;// 檢查3x3x3鄰域for (int dy = -1; dy <= 1 && (is_max || is_min); dy++) {for (int dx = -1; dx <= 1 && (is_max || is_min); dx++) {// 檢查相鄰層if (val <= prev.at<float>(y + dy, x + dx)) is_max = false;if (val >= prev.at<float>(y + dy, x + dx)) is_min = false;if (val <= next.at<float>(y + dy, x + dx)) is_max = false;if (val >= next.at<float>(y + dy, x + dx)) is_min = false;// 檢查當前層if (dx != 0 || dy != 0) {if (val <= curr.at<float>(y + dy, x + dx)) is_max = false;if (val >= curr.at<float>(y + dy, x + dx)) is_min = false;}}}if (is_max || is_min) {KeyPoint kp(x, y, 1.6 * pow(2, i/3.0));keypoints.push_back(kp);}}}}
}void computeDescriptors(const vector<Mat>& gaussian_pyramid,const vector<KeyPoint>& keypoints,Mat& descriptors) {descriptors.create(keypoints.size(), 128, CV_32F);for (size_t i = 0; i < keypoints.size(); i++) {const KeyPoint& kp = keypoints[i];float* desc = descriptors.ptr<float>(i);// 計算梯度方向直方圖Mat mag, angle;const Mat& img = gaussian_pyramid[kp.octave];Sobel(img, mag, CV_32F, 1, 0);Sobel(img, angle, CV_32F, 0, 1);// 計算描述子for (int y = -4; y < 4; y++) {for (int x = -4; x < 4; x++) {int px = kp.pt.x + x;int py = kp.pt.y + y;if (px >= 0 && px < img.cols && py >= 0 && py < img.rows) {float m = mag.at<float>(py, px);float a = angle.at<float>(py, px);// 將梯度方向量化為8個方向int bin = (int)(a * 8 / (2 * CV_PI)) % 8;desc[bin] += m;}}}// 歸一化描述子float norm = 0;for (int j = 0; j < 128; j++) {norm += desc[j] * desc[j];}norm = sqrt(norm);for (int j = 0; j < 128; j++) {desc[j] /= norm;}}
}
Python實現
def sift_features_manual(image, n_features=0):"""手動實現SIFT特征提取參數:image: 輸入圖像n_features: 期望的特征點數量,0表示不限制"""if len(image.shape) == 3:gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)else:gray = image.copy()# 構建高斯金字塔gaussian_pyramid = build_gaussian_pyramid(gray, n_octaves=4)# 構建DOG金字塔dog_pyramid = build_dog_pyramid(gaussian_pyramid)# 檢測關鍵點keypoints = detect_keypoints(dog_pyramid)# 計算描述子descriptors = compute_descriptors(gaussian_pyramid, keypoints)return keypoints, descriptorsdef build_gaussian_pyramid(image, n_octaves):"""構建高斯金字塔"""pyramid = []current = image.copy()for i in range(n_octaves):pyramid.append(current)current = cv2.pyrDown(current)return pyramiddef build_dog_pyramid(gaussian_pyramid):"""構建DOG金字塔"""dog_pyramid = []for i in range(len(gaussian_pyramid) - 1):dog = cv2.subtract(gaussian_pyramid[i + 1], gaussian_pyramid[i])dog_pyramid.append(dog)return dog_pyramiddef detect_keypoints(dog_pyramid):"""檢測關鍵點"""keypoints = []for i in range(1, len(dog_pyramid) - 1):prev = dog_pyramid[i - 1]curr = dog_pyramid[i]next = dog_pyramid[i + 1]for y in range(1, curr.shape[0] - 1):for x in range(1, curr.shape[1] - 1):val = curr[y, x]is_max = Trueis_min = True# 檢查3x3x3鄰域for dy in range(-1, 2):for dx in range(-1, 2):# 檢查相鄰層if val <= prev[y + dy, x + dx]: is_max = Falseif val >= prev[y + dy, x + dx]: is_min = Falseif val <= next[y + dy, x + dx]: is_max = Falseif val >= next[y + dy, x + dx]: is_min = False# 檢查當前層if dx != 0 or dy != 0:if val <= curr[y + dy, x + dx]: is_max = Falseif val >= curr[y + dy, x + dx]: is_min = Falseif is_max or is_min:kp = cv2.KeyPoint(x, y, 1.6 * (2 ** (i/3.0)))keypoints.append(kp)return keypointsdef compute_descriptors(gaussian_pyramid, keypoints):"""計算描述子"""descriptors = np.zeros((len(keypoints), 128), dtype=np.float32)for i, kp in enumerate(keypoints):img = gaussian_pyramid[kp.octave]desc = descriptors[i]# 計算梯度mag, angle = cv2.cartToPolar(cv2.Sobel(img, cv2.CV_32F, 1, 0),cv2.Sobel(img, cv2.CV_32F, 0, 1))# 計算描述子for y in range(-4, 4):for x in range(-4, 4):px = int(kp.pt[0] + x)py = int(kp.pt[1] + y)if 0 <= px < img.shape[1] and 0 <= py < img.shape[0]:m = mag.at<float>(py, px)a = angle.at<float>(py, px)# 將梯度方向量化為8個方向bin = int(a * 8 / (2 * np.pi)) % 8desc[bin] += m# 歸一化描述子norm = np.linalg.norm(desc)if norm > 0:desc /= normreturn descriptors
4. SURF特征
4.1 基本原理
SURF(Speeded-Up Robust Features)就像是SIFT的"快速體檢版",用積分圖像和盒子濾波器加速計算。
核心思想:
H ( x , y ) = D x x ( x , y ) D y y ( x , y ) ? ( D x y ( x , y ) ) 2 H(x,y) = D_{xx}(x,y)D_{yy}(x,y) - (D_{xy}(x,y))^2 H(x,y)=Dxx?(x,y)Dyy?(x,y)?(Dxy?(x,y))2
其中:
- D x x D_{xx} Dxx? 是x方向二階導
- D y y D_{yy} Dyy? 是y方向二階導
- D x y D_{xy} Dxy? 是xy方向二階導
4.2 手動實現
C++實現
void surf_features(const Mat& src, vector<KeyPoint>& keypoints,Mat& descriptors, int nfeatures = 0) {CV_Assert(!src.empty());// 計算積分圖Mat integral;integral(src, integral, CV_32F);// 使用Hessian矩陣檢測特征點detectSurfFeatures(integral, keypoints);// 計算描述子computeSurfDescriptors(integral, keypoints, descriptors);
}void detectSurfFeatures(const Mat& integral, vector<KeyPoint>& keypoints) {keypoints.clear();// 使用不同尺度的Hessian矩陣檢測特征點vector<float> scales = {1.2f, 1.6f, 2.0f, 2.4f, 2.8f};for (float scale : scales) {int size = (int)(scale * 9);if (size % 2 == 0) size++;// 計算Hessian矩陣的行列式Mat det = Mat::zeros(integral.rows, integral.cols, CV_32F);for (int y = size/2; y < integral.rows - size/2; y++) {for (int x = size/2; x < integral.cols - size/2; x++) {// 計算Dxx, Dyy, Dxyfloat dxx = calculateHessian(integral, x, y, size, 0);float dyy = calculateHessian(integral, x, y, size, 1);float dxy = calculateHessian(integral, x, y, size, 2);// 計算Hessian行列式float hessian = dxx * dyy - 0.81f * dxy * dxy;if (hessian > 0) {det.at<float>(y, x) = hessian;}}}// 非極大值抑制for (int y = size/2 + 1; y < det.rows - size/2 - 1; y++) {for (int x = size/2 + 1; x < det.cols - size/2 - 1; x++) {float val = det.at<float>(y, x);if (val > 0) {bool is_max = true;for (int dy = -1; dy <= 1 && is_max; dy++) {for (int dx = -1; dx <= 1 && is_max; dx++) {if (dx == 0 && dy == 0) continue;if (val <= det.at<float>(y + dy, x + dx)) {is_max = false;}}}if (is_max) {KeyPoint kp(x, y, size);keypoints.push_back(kp);}}}}}
}float calculateHessian(const Mat& integral, int x, int y, int size, int type) {int half = size / 2;float response = 0;switch (type) {case 0: // Dxxresponse = boxFilter(integral, x - half, y - half, size, half) -2 * boxFilter(integral, x - half/2, y - half, half, half) +boxFilter(integral, x, y - half, size, half);break;case 1: // Dyyresponse = boxFilter(integral, x - half, y - half, size, size) -2 * boxFilter(integral, x - half, y - half/2, size, half) +boxFilter(integral, x - half, y, size, size);break;case 2: // Dxyresponse = boxFilter(integral, x - half, y - half, size, size) +boxFilter(integral, x, y, size, size) -boxFilter(integral, x - half, y, size, size) -boxFilter(integral, x, y - half, size, size);break;}return response;
}float boxFilter(const Mat& integral, int x, int y, int width, int height) {int x1 = max(0, x);int y1 = max(0, y);int x2 = min(integral.cols - 1, x + width - 1);int y2 = min(integral.rows - 1, y + height - 1);return integral.at<float>(y2, x2) -integral.at<float>(y2, x1) -integral.at<float>(y1, x2) +integral.at<float>(y1, x1);
}void computeSurfDescriptors(const Mat& integral,const vector<KeyPoint>& keypoints,Mat& descriptors) {descriptors.create(keypoints.size(), 64, CV_32F);for (size_t i = 0; i < keypoints.size(); i++) {const KeyPoint& kp = keypoints[i];float* desc = descriptors.ptr<float>(i);// 計算主方向float angle = computeOrientation(integral, kp);// 計算描述子for (int y = -2; y < 2; y++) {for (int x = -2; x < 2; x++) {float dx = 0, dy = 0, abs_dx = 0, abs_dy = 0;// 計算4x4子區域內的Haar小波響應for (int sy = 0; sy < 5; sy++) {for (int sx = 0; sx < 5; sx++) {int px = kp.pt.x + x * 5 + sx;int py = kp.pt.y + y * 5 + sy;if (px >= 0 && px < integral.cols && py >= 0 && py < integral.rows) {float haar_x = haarWavelet(integral, px, py, 2, 0);float haar_y = haarWavelet(integral, px, py, 2, 1);// 旋轉到主方向float rot_x = haar_x * cos(angle) + haar_y * sin(angle);float rot_y = -haar_x * sin(angle) + haar_y * cos(angle);dx += rot_x;dy += rot_y;abs_dx += abs(rot_x);abs_dy += abs(rot_y);}}}// 存儲描述子int idx = (y + 2) * 16 + (x + 2) * 4;desc[idx] = dx;desc[idx + 1] = dy;desc[idx + 2] = abs_dx;desc[idx + 3] = abs_dy;}}// 歸一化描述子float norm = 0;for (int j = 0; j < 64; j++) {norm += desc[j] * desc[j];}norm = sqrt(norm);for (int j = 0; j < 64; j++) {desc[j] /= norm;}}
}float computeOrientation(const Mat& integral, const KeyPoint& kp) {float angle = 0;float max_response = 0;// 在60度扇形區域內搜索主方向for (float theta = 0; theta < 2 * CV_PI; theta += 0.1f) {float response = 0;for (int r = 6; r < 20; r += 2) {int x = kp.pt.x + r * cos(theta);int y = kp.pt.y + r * sin(theta);if (x >= 0 && x < integral.cols && y >= 0 && y < integral.rows) {response += haarWavelet(integral, x, y, 4, 0) +haarWavelet(integral, x, y, 4, 1);}}if (response > max_response) {max_response = response;angle = theta;}}return angle;
}float haarWavelet(const Mat& integral, int x, int y, int size, int type) {float response = 0;switch (type) {case 0: // x方向response = boxFilter(integral, x + size/2, y - size/2, size/2, size) -boxFilter(integral, x - size/2, y - size/2, size/2, size);break;case 1: // y方向response = boxFilter(integral, x - size/2, y + size/2, size, size/2) -boxFilter(integral, x - size/2, y - size/2, size, size/2);break;}return response;
}
Python實現
def surf_features_manual(image, n_features=0):"""手動實現SURF特征提取參數:image: 輸入圖像n_features: 期望的特征點數量,0表示不限制"""if len(image.shape) == 3:gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)else:gray = image.copy()# 計算積分圖integral = cv2.integral(gray.astype(np.float32))# 檢測特征點keypoints = detect_surf_features(integral)# 計算描述子descriptors = compute_surf_descriptors(integral, keypoints)return keypoints, descriptorsdef detect_surf_features(integral):"""檢測SURF特征點"""keypoints = []# 使用不同尺度的Hessian矩陣檢測特征點scales = [1.2, 1.6, 2.0, 2.4, 2.8]for scale in scales:size = int(scale * 9)if size % 2 == 0:size += 1# 計算Hessian矩陣的行列式det = np.zeros((integral.shape[0], integral.shape[1]), dtype=np.float32)for y in range(size//2, integral.shape[0] - size//2):for x in range(size//2, integral.shape[1] - size//2):# 計算Dxx, Dyy, Dxydxx = calculate_hessian(integral, x, y, size, 0)dyy = calculate_hessian(integral, x, y, size, 1)dxy = calculate_hessian(integral, x, y, size, 2)# 計算Hessian行列式hessian = dxx * dyy - 0.81 * dxy * dxyif hessian > 0:det[y, x] = hessian# 非極大值抑制for y in range(size//2 + 1, det.shape[0] - size//2 - 1):for x in range(size//2 + 1, det.shape[1] - size//2 - 1):val = det[y, x]if val > 0:is_max = Truefor dy in range(-1, 2):for dx in range(-1, 2):if dx == 0 and dy == 0:continueif val <= det[y + dy, x + dx]:is_max = Falsebreakif not is_max:breakif is_max:kp = cv2.KeyPoint(x, y, size)keypoints.append(kp)return keypointsdef calculate_hessian(integral, x, y, size, type):"""計算Hessian矩陣的元素"""half = size // 2response = 0if type == 0: # Dxxresponse = box_filter(integral, x - half, y - half, size, half) - \2 * box_filter(integral, x - half//2, y - half, half, half) + \box_filter(integral, x, y - half, size, half)elif type == 1: # Dyyresponse = box_filter(integral, x - half, y - half, size, size) - \2 * box_filter(integral, x - half, y - half//2, size, half) + \box_filter(integral, x - half, y, size, size)else: # Dxyresponse = box_filter(integral, x - half, y - half, size, size) + \box_filter(integral, x, y, size, size) - \box_filter(integral, x - half, y, size, size) - \box_filter(integral, x, y - half, size, size)return responsedef box_filter(integral, x, y, width, height):"""計算積分圖上的盒式濾波"""x1 = max(0, x)y1 = max(0, y)x2 = min(integral.shape[1] - 1, x + width - 1)y2 = min(integral.shape[0] - 1, y + height - 1)return integral[y2, x2] - integral[y2, x1] - integral[y1, x2] + integral[y1, x1]def compute_surf_descriptors(integral, keypoints):"""計算SURF描述子"""descriptors = np.zeros((len(keypoints), 64), dtype=np.float32)for i, kp in enumerate(keypoints):desc = descriptors[i]# 計算主方向angle = compute_orientation(integral, kp)# 計算描述子for y in range(-2, 2):for x in range(-2, 2):dx = dy = abs_dx = abs_dy = 0# 計算4x4子區域內的Haar小波響應for sy in range(5):for sx in range(5):px = int(kp.pt[0] + x * 5 + sx)py = int(kp.pt[1] + y * 5 + sy)if 0 <= px < integral.shape[1] and 0 <= py < integral.shape[0]:haar_x = haar_wavelet(integral, px, py, 2, 0)haar_y = haar_wavelet(integral, px, py, 2, 1)# 旋轉到主方向rot_x = haar_x * np.cos(angle) + haar_y * np.sin(angle)rot_y = -haar_x * np.sin(angle) + haar_y * np.cos(angle)dx += rot_xdy += rot_yabs_dx += abs(rot_x)abs_dy += abs(rot_y)# 存儲描述子idx = (y + 2) * 16 + (x + 2) * 4desc[idx:idx+4] = [dx, dy, abs_dx, abs_dy]# 歸一化描述子norm = np.linalg.norm(desc)if norm > 0:desc /= normreturn descriptorsdef compute_orientation(integral, kp):"""計算特征點的主方向"""angle = 0max_response = 0# 在60度扇形區域內搜索主方向for theta in np.arange(0, 2 * np.pi, 0.1):response = 0for r in range(6, 20, 2):x = int(kp.pt[0] + r * np.cos(theta))y = int(kp.pt[1] + r * np.sin(theta))if 0 <= x < integral.shape[1] and 0 <= y < integral.shape[0]:response += haar_wavelet(integral, x, y, 4, 0) + \haar_wavelet(integral, x, y, 4, 1)if response > max_response:max_response = responseangle = thetareturn angledef haar_wavelet(integral, x, y, size, type):"""計算Haar小波響應"""if type == 0: # x方向return box_filter(integral, x + size//2, y - size//2, size//2, size) - \box_filter(integral, x - size//2, y - size//2, size//2, size)else: # y方向return box_filter(integral, x - size//2, y + size//2, size, size//2) - \box_filter(integral, x - size//2, y - size//2, size, size//2)
5. ORB特征
5.1 基本原理
ORB(Oriented FAST and Rotated BRIEF)就像是"經濟實惠型體檢",速度快、效果好、還不要錢!
主要組成:
-
FAST角點檢測:
- 檢測像素圓周上的強度變化
- 快速篩選候選點
-
BRIEF描述子:
- 二進制描述子
- 漢明距離匹配
5.2 手動實現
C++實現
void orb_features(const Mat& src, vector<KeyPoint>& keypoints,Mat& descriptors, int nfeatures = 500) {CV_Assert(!src.empty());// FAST角點檢測detectFASTFeatures(src, keypoints, nfeatures);// 計算方向computeOrientation(src, keypoints);// 計算rBRIEF描述子computeORBDescriptors(src, keypoints, descriptors);
}void detectFASTFeatures(const Mat& src, vector<KeyPoint>& keypoints,int nfeatures) {keypoints.clear();// FAST角點檢測參數const int threshold = 20;const int min_arc = 9;// 檢測角點for (int y = 3; y < src.rows - 3; y++) {for (int x = 3; x < src.cols - 3; x++) {uchar center = src.at<uchar>(y, x);int brighter = 0, darker = 0;// 檢查圓周上的像素for (int i = 0; i < 16; i++) {int dx = fast_circle[i][0];int dy = fast_circle[i][1];uchar pixel = src.at<uchar>(y + dy, x + dx);if (pixel > center + threshold) brighter++;else if (pixel < center - threshold) darker++;}// 判斷是否為角點if (brighter >= min_arc || darker >= min_arc) {keypoints.push_back(KeyPoint(x, y, 1));}}}// 如果特征點太多,選擇響應最強的nfeatures個if (nfeatures > 0 && keypoints.size() > nfeatures) {// 計算角點響應值vector<pair<float, int>> responses;for (size_t i = 0; i < keypoints.size(); i++) {float response = calculateFASTResponse(src, keypoints[i]);responses.push_back(make_pair(response, i));}// 按響應值排序sort(responses.begin(), responses.end(), greater<pair<float, int>>());// 選擇前nfeatures個特征點vector<KeyPoint> selected;for (int i = 0; i < nfeatures; i++) {selected.push_back(keypoints[responses[i].second]);}keypoints = selected;}
}float calculateFASTResponse(const Mat& src, const KeyPoint& kp) {float response = 0;uchar center = src.at<uchar>(kp.pt.y, kp.pt.x);// 計算與中心點的差異for (int i = 0; i < 16; i++) {int dx = fast_circle[i][0];int dy = fast_circle[i][1];uchar pixel = src.at<uchar>(kp.pt.y + dy, kp.pt.x + dx);response += abs(pixel - center);}return response;
}void computeOrientation(const Mat& src, vector<KeyPoint>& keypoints) {for (size_t i = 0; i < keypoints.size(); i++) {KeyPoint& kp = keypoints[i];float m01 = 0, m10 = 0;// 計算質心for (int y = -7; y <= 7; y++) {for (int x = -7; x <= 7; x++) {if (x*x + y*y <= 49) { // 圓形區域int px = kp.pt.x + x;int py = kp.pt.y + y;if (px >= 0 && px < src.cols && py >= 0 && py < src.rows) {float intensity = src.at<uchar>(py, px);m10 += x * intensity;m01 += y * intensity;}}}}// 計算方向kp.angle = atan2(m01, m10) * 180 / CV_PI;if (kp.angle < 0) kp.angle += 360;}
}void computeORBDescriptors(const Mat& src,const vector<KeyPoint>& keypoints,Mat& descriptors) {descriptors.create(keypoints.size(), 32, CV_8U);for (size_t i = 0; i < keypoints.size(); i++) {const KeyPoint& kp = keypoints[i];uchar* desc = descriptors.ptr<uchar>(i);// 計算描述子for (int j = 0; j < 32; j++) {int pattern[4][4] = {{0, 0, 0, 0},{0, 0, 0, 0},{0, 0, 0, 0},{0, 0, 0, 0}};// 生成隨機模式for (int k = 0; k < 4; k++) {for (int l = 0; l < 4; l++) {pattern[k][l] = (rand() % 2) ? 1 : -1;}}// 計算描述子的一位float sum = 0;for (int k = 0; k < 4; k++) {for (int l = 0; l < 4; l++) {int px = kp.pt.x + k - 2;int py = kp.pt.y + l - 2;if (px >= 0 && px < src.cols && py >= 0 && py < src.rows) {sum += pattern[k][l] * src.at<uchar>(py, px);}}}desc[j] = (sum > 0) ? 1 : 0;}}
}
Python實現
def orb_features_manual(image, n_features=500):"""手動實現ORB特征提取參數:image: 輸入圖像n_features: 期望的特征點數量"""if len(image.shape) == 3:gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)else:gray = image.copy()# FAST角點檢測keypoints = detect_fast_features(gray, n_features)# 計算方向keypoints = compute_orientation(gray, keypoints)# 計算rBRIEF描述子descriptors = compute_orb_descriptors(gray, keypoints)return keypoints, descriptorsdef detect_fast_features(image, n_features):"""檢測FAST角點"""keypoints = []# FAST角點檢測參數threshold = 20min_arc = 9# 檢測角點for y in range(3, image.shape[0] - 3):for x in range(3, image.shape[1] - 3):center = image[y, x]brighter = darker = 0# 檢查圓周上的像素for i in range(16):dx = fast_circle[i][0]dy = fast_circle[i][1]pixel = image[y + dy, x + dx]if pixel > center + threshold:brighter += 1elif pixel < center - threshold:darker += 1# 判斷是否為角點if brighter >= min_arc or darker >= min_arc:keypoints.append(cv2.KeyPoint(x, y, 1))# 如果特征點太多,選擇響應最強的n_features個if n_features > 0 and len(keypoints) > n_features:# 計算角點響應值responses = []for i, kp in enumerate(keypoints):response = calculate_fast_response(image, kp)responses.append((response, i))# 按響應值排序responses.sort(reverse=True)# 選擇前n_features個特征點selected = []for i in range(n_features):selected.append(keypoints[responses[i][1]])keypoints = selectedreturn keypointsdef calculate_fast_response(image, kp):"""計算FAST角點響應值"""response = 0center = image[int(kp.pt[1]), int(kp.pt[0])]# 計算與中心點的差異for i in range(16):dx = fast_circle[i][0]dy = fast_circle[i][1]pixel = image[int(kp.pt[1] + dy), int(kp.pt[0] + dx)]response += abs(pixel - center)return responsedef compute_orientation(image, keypoints):"""計算特征點方向"""for kp in keypoints:m01 = m10 = 0# 計算質心for y in range(-7, 8):for x in range(-7, 8):if x*x + y*y <= 49: # 圓形區域px = int(kp.pt[0] + x)py = int(kp.pt[1] + y)if 0 <= px < image.shape[1] and 0 <= py < image.shape[0]:intensity = image[py, px]m10 += x * intensitym01 += y * intensity# 計算方向kp.angle = np.arctan2(m01, m10) * 180 / np.piif kp.angle < 0:kp.angle += 360return keypointsdef compute_orb_descriptors(image, keypoints):"""計算ORB描述子"""descriptors = np.zeros((len(keypoints), 32), dtype=np.uint8)for i, kp in enumerate(keypoints):desc = descriptors[i]# 計算描述子for j in range(32):# 生成隨機模式pattern = np.random.choice([-1, 1], size=(4, 4))# 計算描述子的一位sum_val = 0for k in range(4):for l in range(4):px = int(kp.pt[0] + k - 2)py = int(kp.pt[1] + l - 2)if 0 <= px < image.shape[1] and 0 <= py < image.shape[0]:sum_val += pattern[k, l] * image[py, px]desc[j] = 1 if sum_val > 0 else 0return descriptors
6. 特征匹配
6.1 基本原理
特征匹配就像是"認親",通過比較特征描述子來找到對應的特征點。
匹配策略:
-
暴力匹配:
- 遍歷所有可能
- 計算距離最小值
-
快速近似匹配:
- 構建搜索樹
- 快速查找最近鄰
6.2 手動實現
C++實現
void feature_matching(const Mat& src1, const Mat& src2,vector<DMatch>& matches,const vector<KeyPoint>& keypoints1,const vector<KeyPoint>& keypoints2,const Mat& descriptors1,const Mat& descriptors2) {matches.clear();// 暴力匹配for (int i = 0; i < descriptors1.rows; i++) {double minDist = DBL_MAX;int minIdx = -1;for (int j = 0; j < descriptors2.rows; j++) {double dist = 0;// 計算歐氏距離for (int k = 0; k < descriptors1.cols; k++) {double diff = descriptors1.at<float>(i,k) -descriptors2.at<float>(j,k);dist += diff * diff;}dist = sqrt(dist);if (dist < minDist) {minDist = dist;minIdx = j;}}if (minIdx >= 0) {DMatch match;match.queryIdx = i;match.trainIdx = minIdx;match.distance = minDist;matches.push_back(match);}}
}
Python實現
def feature_matching_manual(descriptors1, descriptors2, threshold=0.7):"""手動實現特征匹配參數:descriptors1: 第一幅圖像的特征描述子descriptors2: 第二幅圖像的特征描述子threshold: 匹配閾值"""matches = []# 暴力匹配for i in range(len(descriptors1)):dist = np.linalg.norm(descriptors2 - descriptors1[i], axis=1)idx1, idx2 = np.argsort(dist)[:2]# 比率測試if dist[idx1] < threshold * dist[idx2]:matches.append(cv2.DMatch(i, idx1, dist[idx1]))return matches
7. 代碼實現與優化
7.1 性能優化技巧
- SIMD加速:
// 使用AVX2指令集加速特征計算
inline void compute_features_simd(const float* src, float* dst, int width) {alignas(32) float buffer[8];__m256 sum = _mm256_setzero_ps();for (int x = 0; x < width; x += 8) {__m256 data = _mm256_loadu_ps(src + x);sum = _mm256_add_ps(sum, data);}_mm256_store_ps(buffer, sum);*dst = buffer[0] + buffer[1] + buffer[2] + buffer[3] +buffer[4] + buffer[5] + buffer[6] + buffer[7];
}
- OpenMP并行化:
#pragma omp parallel for collapse(2)
for (int y = 0; y < src.rows; y++) {for (int x = 0; x < src.cols; x++) {// 處理每個像素}
}
- 內存優化:
// 使用連續內存訪問
Mat temp = src.clone();
temp = temp.reshape(1, src.total());
8. 實驗效果與應用
8.1 應用場景
-
圖像配準:
- 醫學圖像對齊
- 遙感圖像拼接
- 全景圖像合成
-
目標識別:
- 人臉識別
- 物體檢測
- 場景匹配
-
運動跟蹤:
- 視頻監控
- 手勢識別
- 增強現實
8.2 注意事項
-
特征提取過程中的注意點:
- 選擇合適的特征類型
- 考慮計算效率
- 注意特征的可區分性
-
算法選擇建議:
- 根據應用場景選擇
- 考慮實時性要求
- 權衡準確性和效率
總結
特征提取就像是給圖像做"體檢"!通過Harris角點檢測、SIFT、SURF、ORB等"檢查項目",我們可以發現圖像中隱藏的"特征"。在實際應用中,需要根據具體場景選擇合適的"檢查方案",就像醫生為每個病人制定個性化的體檢計劃一樣。
記住:好的特征提取就像是一個經驗豐富的"醫生",既要發現關鍵特征,又要保持效率!🏥
參考資料
- Harris C, Stephens M. A combined corner and edge detector[C]. Alvey vision conference, 1988
- Lowe D G. Distinctive image features from scale-invariant keypoints[J]. IJCV, 2004
- Bay H, et al. SURF: Speeded Up Robust Features[C]. ECCV, 2006
- Rublee E, et al. ORB: An efficient alternative to SIFT or SURF[C]. ICCV, 2011
- OpenCV官方文檔: https://docs.opencv.org/
- 更多資源: IP101項目主頁