{"id":1024458,"date":"2024-12-30T14:13:58","date_gmt":"2024-12-30T06:13:58","guid":{"rendered":"https:\/\/docs.pingcode.com\/ask\/ask-ask\/1024458.html"},"modified":"2024-12-30T14:14:01","modified_gmt":"2024-12-30T06:14:01","slug":"python%e5%a6%82%e4%bd%95%e6%8f%90%e5%8f%96%e5%9b%be%e7%89%87%e7%89%b9%e5%be%81-2","status":"publish","type":"post","link":"https:\/\/docs.pingcode.com\/ask\/1024458.html","title":{"rendered":"python\u5982\u4f55\u63d0\u53d6\u56fe\u7247\u7279\u5f81"},"content":{"rendered":"<p style=\"text-align:center;\" ><img decoding=\"async\" src=\"https:\/\/cdn-docs.pingcode.com\/wp-content\/uploads\/2024\/12\/39c03dac-275f-49dd-8202-07859219a4ba.webp?x-oss-process=image\/auto-orient,1\/format,webp\" alt=\"python\u5982\u4f55\u63d0\u53d6\u56fe\u7247\u7279\u5f81\" \/><\/p>\n<p><p> \u5728\u4f7f\u7528Python\u63d0\u53d6\u56fe\u7247\u7279\u5f81\u65f6\uff0c\u4e3b\u8981\u7684\u65b9\u6cd5\u5305\u62ec\u4f7f\u7528\u5377\u79ef\u795e\u7ecf\u7f51\u7edc\uff08CNN\uff09\u3001\u76f4\u65b9\u56fe\u3001\u989c\u8272\u7279\u5f81\u3001\u5f62\u72b6\u7279\u5f81\u3001\u5173\u952e\u70b9\u68c0\u6d4b\u548c\u63cf\u8ff0\u7b26\u7b49\u3002<strong>\u6700\u5e38\u7528\u7684\u65b9\u6cd5\u5305\u62ec\u5377\u79ef\u795e\u7ecf\u7f51\u7edc\uff08CNN\uff09\u3001\u76f4\u65b9\u56fe\u3001\u989c\u8272\u7279\u5f81\u3001\u5f62\u72b6\u7279\u5f81\u3001\u5173\u952e\u70b9\u68c0\u6d4b\u548c\u63cf\u8ff0\u7b26<\/strong>\u3002\u5176\u4e2d\uff0c\u5377\u79ef\u795e\u7ecf\u7f51\u7edc\uff08CNN\uff09\u662f\u6700\u4e3a\u5f3a\u5927\u548c\u5e7f\u6cdb\u4f7f\u7528\u7684\u65b9\u6cd5\uff0c\u5b83\u901a\u8fc7\u591a\u5c42\u7f51\u7edc\u7ed3\u6784\u81ea\u52a8\u5b66\u4e60\u56fe\u50cf\u7279\u5f81\uff0c\u9002\u7528\u4e8e\u5404\u79cd\u56fe\u50cf\u5206\u7c7b\u548c\u8bc6\u522b\u4efb\u52a1\u3002\u4e0b\u9762\uff0c\u6211\u4eec\u5c06\u8be6\u7ec6\u8ba8\u8bba\u8fd9\u4e9b\u65b9\u6cd5\u7684\u5177\u4f53\u5b9e\u73b0\u3002<\/p>\n<\/p>\n<p><h3>\u4e00\u3001\u5377\u79ef\u795e\u7ecf\u7f51\u7edc\uff08CNN\uff09<\/h3>\n<\/p>\n<p><p>\u5377\u79ef\u795e\u7ecf\u7f51\u7edc\uff08CNN\uff09\u662f\u4e00\u79cd\u6df1\u5ea6\u5b66\u4e60\u7b97\u6cd5\uff0c\u7279\u522b\u9002\u7528\u4e8e\u56fe\u50cf\u6570\u636e\u7684\u5904\u7406\u548c\u5206\u6790\u3002\u5b83\u901a\u8fc7\u591a\u4e2a\u5377\u79ef\u5c42\u3001\u6c60\u5316\u5c42\u548c\u5168\u8fde\u63a5\u5c42\u6765\u63d0\u53d6\u56fe\u50cf\u7279\u5f81\u3002<\/p>\n<\/p>\n<p><h4>1. \u4f7f\u7528\u9884\u8bad\u7ec3\u6a21\u578b<\/h4>\n<\/p>\n<p><p>\u4f7f\u7528\u9884\u8bad\u7ec3\u6a21\u578b\uff08\u5982VGG16\u3001ResNet\u3001Inception\u7b49\uff09\u53ef\u4ee5\u5feb\u901f\u63d0\u53d6\u56fe\u50cf\u7279\u5f81\u3002\u9884\u8bad\u7ec3\u6a21\u578b\u5df2\u7ecf\u5728\u5927\u89c4\u6a21\u6570\u636e\u96c6\u4e0a\u8bad\u7ec3\u8fc7\uff0c\u53ef\u4ee5\u76f4\u63a5\u4f7f\u7528\u6765\u63d0\u53d6\u7279\u5f81\u3002<\/p>\n<\/p>\n<p><pre><code class=\"language-python\">from keras.applications.vgg16 import VGG16, preprocess_input<\/p>\n<p>from keras.preprocessing import image<\/p>\n<p>from keras.models import Model<\/p>\n<p>import numpy as np<\/p>\n<h2><strong>\u52a0\u8f7d\u9884\u8bad\u7ec3\u7684VGG16\u6a21\u578b<\/strong><\/h2>\n<p>base_model = VGG16(weights=&#39;imagenet&#39;)<\/p>\n<p>model = Model(inputs=base_model.input, outputs=base_model.get_layer(&#39;fc1&#39;).output)<\/p>\n<h2><strong>\u52a0\u8f7d\u56fe\u50cf\u5e76\u8fdb\u884c\u9884\u5904\u7406<\/strong><\/h2>\n<p>img_path = &#39;path_to_your_image.jpg&#39;<\/p>\n<p>img = image.load_img(img_path, target_size=(224, 224))<\/p>\n<p>img_data = image.img_to_array(img)<\/p>\n<p>img_data = np.expand_dims(img_data, axis=0)<\/p>\n<p>img_data = preprocess_input(img_data)<\/p>\n<h2><strong>\u63d0\u53d6\u56fe\u50cf\u7279\u5f81<\/strong><\/h2>\n<p>features = model.predict(img_data)<\/p>\n<p>print(features)<\/p>\n<p><\/code><\/pre>\n<\/p>\n<p><h4>2. \u81ea\u5b9a\u4e49CNN<\/h4>\n<\/p>\n<p><p>\u9664\u4e86\u4f7f\u7528\u9884\u8bad\u7ec3\u6a21\u578b\u5916\uff0c\u8fd8\u53ef\u4ee5\u6839\u636e\u5177\u4f53\u4efb\u52a1\u81ea\u5b9a\u4e49CNN\u6a21\u578b\u3002\u8fd9\u79cd\u65b9\u6cd5\u53ef\u4ee5\u6839\u636e\u5177\u4f53\u9700\u6c42\u8bbe\u8ba1\u7f51\u7edc\u7ed3\u6784\uff0c\u4f46\u9700\u8981\u5927\u91cf\u6570\u636e\u548c\u8ba1\u7b97\u8d44\u6e90\u8fdb\u884c\u8bad\u7ec3\u3002<\/p>\n<\/p>\n<p><pre><code class=\"language-python\">from keras.models import Sequential<\/p>\n<p>from keras.layers import Conv2D, MaxPooling2D, Flatten, Dense<\/p>\n<p>from keras.preprocessing.image import ImageDataGenerator<\/p>\n<h2><strong>\u6784\u5efa\u81ea\u5b9a\u4e49CNN\u6a21\u578b<\/strong><\/h2>\n<p>model = Sequential()<\/p>\n<p>model.add(Conv2D(32, (3, 3), activation=&#39;relu&#39;, input_shape=(64, 64, 3)))<\/p>\n<p>model.add(MaxPooling2D(pool_size=(2, 2)))<\/p>\n<p>model.add(Flatten())<\/p>\n<p>model.add(Dense(128, activation=&#39;relu&#39;))<\/p>\n<p>model.add(Dense(1, activation=&#39;sigmoid&#39;))<\/p>\n<p>model.compile(optimizer=&#39;adam&#39;, loss=&#39;binary_crossentropy&#39;, metrics=[&#39;accuracy&#39;])<\/p>\n<h2><strong>\u52a0\u8f7d\u548c\u9884\u5904\u7406\u56fe\u50cf\u6570\u636e<\/strong><\/h2>\n<p>datagen = ImageDataGenerator(rescale=1.0\/255)<\/p>\n<p>tr<a href=\"https:\/\/docs.pingcode.com\/blog\/59162.html\" target=\"_blank\">AI<\/a>n_generator = datagen.flow_from_directory(&#39;path_to_training_data&#39;, target_size=(64, 64), batch_size=32, class_mode=&#39;binary&#39;)<\/p>\n<h2><strong>\u8bad\u7ec3\u6a21\u578b<\/strong><\/h2>\n<p>model.fit(train_generator, steps_per_epoch=8000, epochs=5)<\/p>\n<p><\/code><\/pre>\n<\/p>\n<p><h3>\u4e8c\u3001\u989c\u8272\u7279\u5f81<\/h3>\n<\/p>\n<p><p>\u989c\u8272\u7279\u5f81\u662f\u56fe\u50cf\u7279\u5f81\u63d0\u53d6\u4e2d\u6700\u57fa\u672c\u7684\u65b9\u6cd5\u4e4b\u4e00\uff0c\u5e38\u7528\u7684\u6709\u989c\u8272\u76f4\u65b9\u56fe\u3001\u989c\u8272\u77e9\u7b49\u3002\u989c\u8272\u76f4\u65b9\u56fe\u53ef\u4ee5\u53cd\u6620\u56fe\u50cf\u7684\u989c\u8272\u5206\u5e03\u60c5\u51b5\u3002<\/p>\n<\/p>\n<p><h4>1. \u989c\u8272\u76f4\u65b9\u56fe<\/h4>\n<\/p>\n<p><p>\u989c\u8272\u76f4\u65b9\u56fe\u662f\u7edf\u8ba1\u56fe\u50cf\u4e2d\u6bcf\u79cd\u989c\u8272\u51fa\u73b0\u6b21\u6570\u7684\u76f4\u65b9\u56fe\u3002\u53ef\u4ee5\u4f7f\u7528OpenCV\u5e93\u6765\u8ba1\u7b97\u989c\u8272\u76f4\u65b9\u56fe\u3002<\/p>\n<\/p>\n<p><pre><code class=\"language-python\">import cv2<\/p>\n<p>import numpy as np<\/p>\n<p>from matplotlib import pyplot as plt<\/p>\n<h2><strong>\u52a0\u8f7d\u56fe\u50cf<\/strong><\/h2>\n<p>img = cv2.imread(&#39;path_to_your_image.jpg&#39;)<\/p>\n<h2><strong>\u8ba1\u7b97\u989c\u8272\u76f4\u65b9\u56fe<\/strong><\/h2>\n<p>color = (&#39;b&#39;, &#39;g&#39;, &#39;r&#39;)<\/p>\n<p>for i, col in enumerate(color):<\/p>\n<p>    histr = cv2.calcHist([img], [i], None, [256], [0, 256])<\/p>\n<p>    plt.plot(histr, color=col)<\/p>\n<p>    plt.xlim([0, 256])<\/p>\n<p>plt.show()<\/p>\n<p><\/code><\/pre>\n<\/p>\n<p><h4>2. \u989c\u8272\u77e9<\/h4>\n<\/p>\n<p><p>\u989c\u8272\u77e9\u5305\u62ec\u56fe\u50cf\u7684\u5747\u503c\u3001\u6807\u51c6\u5dee\u548c\u504f\u5ea6\uff0c\u53ef\u4ee5\u63cf\u8ff0\u56fe\u50cf\u7684\u989c\u8272\u5206\u5e03\u3002<\/p>\n<\/p>\n<p><pre><code class=\"language-python\">def color_moments(image):<\/p>\n<p>    # \u8ba1\u7b97\u5747\u503c<\/p>\n<p>    mean = np.mean(image, axis=(0, 1))<\/p>\n<p>    # \u8ba1\u7b97\u6807\u51c6\u5dee<\/p>\n<p>    std = np.std(image, axis=(0, 1))<\/p>\n<p>    # \u8ba1\u7b97\u504f\u5ea6<\/p>\n<p>    skewness = np.mean((image - mean)  3, axis=(0, 1))<\/p>\n<p>    return np.concatenate([mean, std, skewness])<\/p>\n<h2><strong>\u52a0\u8f7d\u56fe\u50cf<\/strong><\/h2>\n<p>img = cv2.imread(&#39;path_to_your_image.jpg&#39;)<\/p>\n<p>img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)<\/p>\n<h2><strong>\u63d0\u53d6\u989c\u8272\u77e9<\/strong><\/h2>\n<p>color_features = color_moments(img)<\/p>\n<p>print(color_features)<\/p>\n<p><\/code><\/pre>\n<\/p>\n<p><h3>\u4e09\u3001\u5f62\u72b6\u7279\u5f81<\/h3>\n<\/p>\n<p><p>\u5f62\u72b6\u7279\u5f81\u662f\u63cf\u8ff0\u56fe\u50cf\u4e2d\u7269\u4f53\u5f62\u72b6\u7684\u7279\u5f81\uff0c\u5e38\u7528\u7684\u65b9\u6cd5\u5305\u62ec\u8fb9\u7f18\u68c0\u6d4b\u3001\u8f6e\u5ed3\u63d0\u53d6\u3001Hu\u77e9\u7b49\u3002<\/p>\n<\/p>\n<p><h4>1. \u8fb9\u7f18\u68c0\u6d4b<\/h4>\n<\/p>\n<p><p>\u8fb9\u7f18\u68c0\u6d4b\u53ef\u4ee5\u7a81\u51fa\u56fe\u50cf\u4e2d\u7684\u7269\u4f53\u8f6e\u5ed3\u3002\u5e38\u7528\u7684\u8fb9\u7f18\u68c0\u6d4b\u7b97\u6cd5\u6709Canny\u7b97\u6cd5\u3002<\/p>\n<\/p>\n<p><pre><code class=\"language-python\">import cv2<\/p>\n<p>from matplotlib import pyplot as plt<\/p>\n<h2><strong>\u52a0\u8f7d\u56fe\u50cf<\/strong><\/h2>\n<p>img = cv2.imread(&#39;path_to_your_image.jpg&#39;, 0)<\/p>\n<h2><strong>\u8fdb\u884c\u8fb9\u7f18\u68c0\u6d4b<\/strong><\/h2>\n<p>edges = cv2.Canny(img, 100, 200)<\/p>\n<h2><strong>\u663e\u793a\u7ed3\u679c<\/strong><\/h2>\n<p>plt.subplot(121), plt.imshow(img, cmap=&#39;gray&#39;)<\/p>\n<p>plt.title(&#39;Original Image&#39;), plt.xticks([]), plt.yticks([])<\/p>\n<p>plt.subplot(122), plt.imshow(edges, cmap=&#39;gray&#39;)<\/p>\n<p>plt.title(&#39;Edge Image&#39;), plt.xticks([]), plt.yticks([])<\/p>\n<p>plt.show()<\/p>\n<p><\/code><\/pre>\n<\/p>\n<p><h4>2. \u8f6e\u5ed3\u63d0\u53d6<\/h4>\n<\/p>\n<p><p>\u8f6e\u5ed3\u63d0\u53d6\u53ef\u4ee5\u83b7\u53d6\u56fe\u50cf\u4e2d\u7269\u4f53\u7684\u8fb9\u754c\u8f6e\u5ed3\u3002<\/p>\n<\/p>\n<p><pre><code class=\"language-python\">import cv2<\/p>\n<p>from matplotlib import pyplot as plt<\/p>\n<h2><strong>\u52a0\u8f7d\u56fe\u50cf<\/strong><\/h2>\n<p>img = cv2.imread(&#39;path_to_your_image.jpg&#39;)<\/p>\n<p>gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)<\/p>\n<h2><strong>\u63d0\u53d6\u8f6e\u5ed3<\/strong><\/h2>\n<p>contours, _ = cv2.findContours(gray, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)<\/p>\n<h2><strong>\u7ed8\u5236\u8f6e\u5ed3<\/strong><\/h2>\n<p>cv2.drawContours(img, contours, -1, (0, 255, 0), 3)<\/p>\n<h2><strong>\u663e\u793a\u7ed3\u679c<\/strong><\/h2>\n<p>plt.imshow(cv2.cvtColor(img, cv2.COLOR_BGR2RGB))<\/p>\n<p>plt.show()<\/p>\n<p><\/code><\/pre>\n<\/p>\n<p><h4>3. Hu\u77e9<\/h4>\n<\/p>\n<p><p>Hu\u77e9\u662f\u901a\u8fc7\u8ba1\u7b97\u56fe\u50cf\u7684\u77e9\u6765\u63cf\u8ff0\u56fe\u50cf\u7684\u5f62\u72b6\u7279\u5f81\u3002<\/p>\n<\/p>\n<p><pre><code class=\"language-python\">import cv2<\/p>\n<h2><strong>\u52a0\u8f7d\u56fe\u50cf<\/strong><\/h2>\n<p>img = cv2.imread(&#39;path_to_your_image.jpg&#39;)<\/p>\n<p>gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)<\/p>\n<h2><strong>\u63d0\u53d6\u8f6e\u5ed3<\/strong><\/h2>\n<p>contours, _ = cv2.findContours(gray, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)<\/p>\n<h2><strong>\u8ba1\u7b97Hu\u77e9<\/strong><\/h2>\n<p>for contour in contours:<\/p>\n<p>    moments = cv2.moments(contour)<\/p>\n<p>    huMoments = cv2.HuMoments(moments)<\/p>\n<p>    print(huMoments)<\/p>\n<p><\/code><\/pre>\n<\/p>\n<p><h3>\u56db\u3001\u5173\u952e\u70b9\u68c0\u6d4b\u548c\u63cf\u8ff0\u7b26<\/h3>\n<\/p>\n<p><p>\u5173\u952e\u70b9\u68c0\u6d4b\u548c\u63cf\u8ff0\u7b26\u662f\u56fe\u50cf\u7279\u5f81\u63d0\u53d6\u7684\u91cd\u8981\u65b9\u6cd5\u4e4b\u4e00\uff0c\u5e38\u7528\u7684\u7b97\u6cd5\u6709SIFT\u3001SURF\u3001ORB\u7b49\u3002<\/p>\n<\/p>\n<p><h4>1. SIFT\uff08\u5c3a\u5ea6\u4e0d\u53d8\u7279\u5f81\u53d8\u6362\uff09<\/h4>\n<\/p>\n<p><p>SIFT\u662f\u4e00\u79cd\u5e38\u7528\u7684\u5173\u952e\u70b9\u68c0\u6d4b\u548c\u63cf\u8ff0\u7b26\u7b97\u6cd5\uff0c\u4f46\u7531\u4e8e\u4e13\u5229\u95ee\u9898\uff0cOpenCV 3.4.2\u53ca\u66f4\u9ad8\u7248\u672c\u4e2d\u9ed8\u8ba4\u4e0d\u518d\u5305\u542bSIFT\u3002<\/p>\n<\/p>\n<p><pre><code class=\"language-python\">import cv2<\/p>\n<h2><strong>\u52a0\u8f7d\u56fe\u50cf<\/strong><\/h2>\n<p>img = cv2.imread(&#39;path_to_your_image.jpg&#39;)<\/p>\n<p>gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)<\/p>\n<h2><strong>\u521d\u59cb\u5316SIFT\u68c0\u6d4b\u5668<\/strong><\/h2>\n<p>sift = cv2.xfeatures2d.SIFT_create()<\/p>\n<h2><strong>\u68c0\u6d4b\u5173\u952e\u70b9\u548c\u63cf\u8ff0\u7b26<\/strong><\/h2>\n<p>keypoints, descriptors = sift.detectAndCompute(gray, None)<\/p>\n<h2><strong>\u7ed8\u5236\u5173\u952e\u70b9<\/strong><\/h2>\n<p>img = cv2.drawKeypoints(img, keypoints, None)<\/p>\n<p>cv2.imshow(&#39;SIFT keypoints&#39;, img)<\/p>\n<p>cv2.waitKey(0)<\/p>\n<p>cv2.destroyAllWindows()<\/p>\n<p><\/code><\/pre>\n<\/p>\n<p><h4>2. ORB\uff08Oriented FAST and Rotated BRIEF\uff09<\/h4>\n<\/p>\n<p><p>ORB\u662f\u514d\u8d39\u7684\u3001\u5f00\u6e90\u7684\u5173\u952e\u70b9\u68c0\u6d4b\u548c\u63cf\u8ff0\u7b26\u7b97\u6cd5\u3002<\/p>\n<\/p>\n<p><pre><code class=\"language-python\">import cv2<\/p>\n<h2><strong>\u52a0\u8f7d\u56fe\u50cf<\/strong><\/h2>\n<p>img = cv2.imread(&#39;path_to_your_image.jpg&#39;)<\/p>\n<p>gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)<\/p>\n<h2><strong>\u521d\u59cb\u5316ORB\u68c0\u6d4b\u5668<\/strong><\/h2>\n<p>orb = cv2.ORB_create()<\/p>\n<h2><strong>\u68c0\u6d4b\u5173\u952e\u70b9\u548c\u63cf\u8ff0\u7b26<\/strong><\/h2>\n<p>keypoints, descriptors = orb.detectAndCompute(gray, None)<\/p>\n<h2><strong>\u7ed8\u5236\u5173\u952e\u70b9<\/strong><\/h2>\n<p>img = cv2.drawKeypoints(img, keypoints, None)<\/p>\n<p>cv2.imshow(&#39;ORB keypoints&#39;, img)<\/p>\n<p>cv2.waitKey(0)<\/p>\n<p>cv2.destroyAllWindows()<\/p>\n<p><\/code><\/pre>\n<\/p>\n<p><h3>\u4e94\u3001\u6587\u672c\u7279\u5f81\u548c\u5143\u6570\u636e\u63d0\u53d6<\/h3>\n<\/p>\n<p><p>\u6709\u65f6\uff0c\u56fe\u50cf\u53ef\u80fd\u5305\u542b\u6587\u672c\u4fe1\u606f\u6216\u5143\u6570\u636e\uff0c\u8fd9\u4e9b\u4fe1\u606f\u4e5f\u53ef\u4ee5\u4f5c\u4e3a\u7279\u5f81\u6765\u4f7f\u7528\u3002<\/p>\n<\/p>\n<p><h4>1. OCR\uff08\u5149\u5b66\u5b57\u7b26\u8bc6\u522b\uff09<\/h4>\n<\/p>\n<p><p>OCR\u53ef\u4ee5\u7528\u4e8e\u4ece\u56fe\u50cf\u4e2d\u63d0\u53d6\u6587\u672c\u4fe1\u606f\uff0cTesseract\u662f\u5e38\u7528\u7684OCR\u5f15\u64ce\u3002<\/p>\n<\/p>\n<p><pre><code class=\"language-python\">import cv2<\/p>\n<p>import pytesseract<\/p>\n<h2><strong>\u52a0\u8f7d\u56fe\u50cf<\/strong><\/h2>\n<p>img = cv2.imread(&#39;path_to_your_image.jpg&#39;)<\/p>\n<h2><strong>\u4f7f\u7528Tesseract\u8fdb\u884cOCR<\/strong><\/h2>\n<p>text = pytesseract.image_to_string(img)<\/p>\n<p>print(text)<\/p>\n<p><\/code><\/pre>\n<\/p>\n<p><h4>2. \u5143\u6570\u636e\u63d0\u53d6<\/h4>\n<\/p>\n<p><p>\u56fe\u50cf\u6587\u4ef6\u901a\u5e38\u5305\u542b\u4e00\u4e9b\u5143\u6570\u636e\uff0c\u5982\u62cd\u6444\u65f6\u95f4\u3001\u76f8\u673a\u578b\u53f7\u7b49\uff0c\u8fd9\u4e9b\u4fe1\u606f\u53ef\u4ee5\u4f5c\u4e3a\u7279\u5f81\u3002<\/p>\n<\/p>\n<p><pre><code class=\"language-python\">from PIL import Image<\/p>\n<p>from PIL.ExifTags import TAGS<\/p>\n<h2><strong>\u52a0\u8f7d\u56fe\u50cf<\/strong><\/h2>\n<p>img = Image.open(&#39;path_to_your_image.jpg&#39;)<\/p>\n<h2><strong>\u63d0\u53d6\u5143\u6570\u636e<\/strong><\/h2>\n<p>exif_data = img._getexif()<\/p>\n<p>if exif_data is not None:<\/p>\n<p>    for tag, value in exif_data.items():<\/p>\n<p>        tag_name = TAGS.get(tag, tag)<\/p>\n<p>        print(f&quot;{tag_name}: {value}&quot;)<\/p>\n<p><\/code><\/pre>\n<\/p>\n<p><h3>\u516d\u3001\u7ed3\u5408\u591a\u79cd\u7279\u5f81<\/h3>\n<\/p>\n<p><p>\u5728\u5b9e\u9645\u5e94\u7528\u4e2d\uff0c\u901a\u5e38\u4f1a\u7ed3\u5408\u591a\u79cd\u7279\u5f81\uff08\u5982\u989c\u8272\u3001\u5f62\u72b6\u3001\u5173\u952e\u70b9\u7b49\uff09\u6765\u63d0\u9ad8\u56fe\u50cf\u7279\u5f81\u63d0\u53d6\u7684\u6548\u679c\u3002<\/p>\n<\/p>\n<p><pre><code class=\"language-python\">import cv2<\/p>\n<p>import numpy as np<\/p>\n<p>from keras.applications.vgg16 import VGG16, preprocess_input<\/p>\n<p>from keras.preprocessing import image<\/p>\n<p>from keras.models import Model<\/p>\n<p>from matplotlib import pyplot as plt<\/p>\n<p>def extract_features(img_path):<\/p>\n<p>    # \u989c\u8272\u76f4\u65b9\u56fe<\/p>\n<p>    img = cv2.imread(img_path)<\/p>\n<p>    color_hist = cv2.calcHist([img], [0, 1, 2], None, [8, 8, 8], [0, 256, 0, 256, 0, 256])<\/p>\n<p>    color_hist = cv2.normalize(color_hist, color_hist).flatten()<\/p>\n<p>    # \u8fb9\u7f18\u68c0\u6d4b<\/p>\n<p>    gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)<\/p>\n<p>    edges = cv2.Canny(gray, 100, 200)<\/p>\n<p>    edge_hist = cv2.calcHist([edges], [0], None, [256], [0, 256])<\/p>\n<p>    edge_hist = cv2.normalize(edge_hist, edge_hist).flatten()<\/p>\n<p>    # \u5173\u952e\u70b9\u68c0\u6d4b\u548c\u63cf\u8ff0\u7b26\uff08ORB\uff09<\/p>\n<p>    orb = cv2.ORB_create()<\/p>\n<p>    keypoints, descriptors = orb.detectAndCompute(gray, None)<\/p>\n<p>    if descriptors is not None:<\/p>\n<p>        descriptors = descriptors.flatten()<\/p>\n<p>    else:<\/p>\n<p>        descriptors = np.array([])<\/p>\n<p>    # CNN\u7279\u5f81\uff08VGG16\uff09<\/p>\n<p>    base_model = VGG16(weights=&#39;imagenet&#39;)<\/p>\n<p>    model = Model(inputs=base_model.input, outputs=base_model.get_layer(&#39;fc1&#39;).output)<\/p>\n<p>    img = image.load_img(img_path, target_size=(224, 224))<\/p>\n<p>    img_data = image.img_to_array(img)<\/p>\n<p>    img_data = np.expand_dims(img_data, axis=0)<\/p>\n<p>    img_data = preprocess_input(img_data)<\/p>\n<p>    cnn_features = model.predict(img_data).flatten()<\/p>\n<p>    # \u5408\u5e76\u6240\u6709\u7279\u5f81<\/p>\n<p>    features = np.concatenate([color_hist, edge_hist, descriptors, cnn_features])<\/p>\n<p>    return features<\/p>\n<h2><strong>\u793a\u4f8b<\/strong><\/h2>\n<p>img_path = &#39;path_to_your_image.jpg&#39;<\/p>\n<p>features = extract_features(img_path)<\/p>\n<p>print(features)<\/p>\n<p><\/code><\/pre>\n<\/p>\n<p><p>\u901a\u8fc7\u7ed3\u5408\u591a\u79cd\u7279\u5f81\uff0c\u53ef\u4ee5\u83b7\u5f97\u66f4\u5168\u9762\u3001\u51c6\u786e\u7684\u56fe\u50cf\u8868\u793a\uff0c\u63d0\u9ad8\u56fe\u50cf\u5206\u7c7b\u548c\u8bc6\u522b\u7684\u6548\u679c\u3002<\/p>\n<\/p>\n<h2><strong>\u76f8\u5173\u95ee\u7b54FAQs\uff1a<\/strong><\/h2>\n<p> <strong>\u5982\u4f55\u4f7f\u7528Python\u63d0\u53d6\u56fe\u7247\u7684\u989c\u8272\u7279\u5f81\uff1f<\/strong><br \/>\u63d0\u53d6\u56fe\u7247\u7684\u989c\u8272\u7279\u5f81\u53ef\u4ee5\u901a\u8fc7\u8ba1\u7b97\u56fe\u7247\u7684\u989c\u8272\u76f4\u65b9\u56fe\u6765\u5b9e\u73b0\u3002\u4f7f\u7528OpenCV\u6216PIL\u5e93\u53ef\u4ee5\u975e\u5e38\u65b9\u4fbf\u5730\u83b7\u53d6\u56fe\u7247\u7684\u989c\u8272\u4fe1\u606f\u3002\u9996\u5148\uff0c\u8bfb\u53d6\u56fe\u7247\u5e76\u5c06\u5176\u8f6c\u6362\u4e3aHSV\u6216RGB\u8272\u5f69\u7a7a\u95f4\uff0c\u7136\u540e\u8ba1\u7b97\u6bcf\u4e2a\u989c\u8272\u901a\u9053\u7684\u76f4\u65b9\u56fe\u3002\u6700\u540e\uff0c\u5c06\u8fd9\u4e9b\u76f4\u65b9\u56fe\u5408\u5e76\u4e3a\u4e00\u4e2a\u7279\u5f81\u5411\u91cf\uff0c\u4f5c\u4e3a\u56fe\u7247\u7684\u989c\u8272\u7279\u5f81\u3002<\/p>\n<p><strong>\u63d0\u53d6\u56fe\u7247\u7279\u5f81\u65f6\uff0c\u4f7f\u7528\u54ea\u4e9b\u5e93\u6bd4\u8f83\u597d\uff1f<\/strong><br \/>\u5728Python\u4e2d\uff0cOpenCV\u3001PIL\uff08Pillow\uff09\u548cscikit-image\u662f\u63d0\u53d6\u56fe\u7247\u7279\u5f81\u7684\u5e38\u7528\u5e93\u3002OpenCV\u529f\u80fd\u5f3a\u5927\uff0c\u9002\u5408\u5904\u7406\u590d\u6742\u7684\u56fe\u50cf\u5904\u7406\u4efb\u52a1\uff1bPIL\u6613\u4e8e\u4f7f\u7528\uff0c\u9002\u5408\u7b80\u5355\u7684\u56fe\u50cf\u5904\u7406\uff1bscikit-image\u63d0\u4f9b\u4e86\u8bb8\u591a\u56fe\u50cf\u5904\u7406\u7b97\u6cd5\u548c\u7279\u5f81\u63d0\u53d6\u5de5\u5177\uff0c\u9002\u5408\u5b66\u672f\u7814\u7a76\u548c\u5e94\u7528\u5f00\u53d1\u3002<\/p>\n<p><strong>\u5982\u4f55\u63d0\u53d6\u56fe\u7247\u7684\u8fb9\u7f18\u7279\u5f81\uff1f<\/strong><br \/>\u63d0\u53d6\u8fb9\u7f18\u7279\u5f81\u901a\u5e38\u53ef\u4ee5\u4f7f\u7528Canny\u8fb9\u7f18\u68c0\u6d4b\u7b97\u6cd5\u3002\u4f7f\u7528OpenCV\u7684cv2.Canny\u51fd\u6570\u53ef\u4ee5\u8f7b\u677e\u5b9e\u73b0\u8fd9\u4e00\u529f\u80fd\u3002\u9996\u5148\uff0c\u5c06\u56fe\u7247\u8f6c\u6362\u4e3a\u7070\u5ea6\u56fe\uff0c\u7136\u540e\u5e94\u7528\u9ad8\u65af\u6a21\u7cca\u53bb\u566a\uff0c\u63a5\u7740\u4f7f\u7528Canny\u7b97\u6cd5\u68c0\u6d4b\u8fb9\u7f18\u3002\u63d0\u53d6\u5230\u7684\u8fb9\u7f18\u4fe1\u606f\u53ef\u4ee5\u4f5c\u4e3a\u56fe\u7247\u7684\u5173\u952e\u7279\u5f81\u4e4b\u4e00\uff0c\u7528\u4e8e\u540e\u7eed\u7684\u56fe\u50cf\u8bc6\u522b\u6216\u5206\u7c7b\u4efb\u52a1\u3002<\/p>\n","protected":false},"excerpt":{"rendered":"\u5728\u4f7f\u7528Python\u63d0\u53d6\u56fe\u7247\u7279\u5f81\u65f6\uff0c\u4e3b\u8981\u7684\u65b9\u6cd5\u5305\u62ec\u4f7f\u7528\u5377\u79ef\u795e\u7ecf\u7f51\u7edc\uff08CNN\uff09\u3001\u76f4\u65b9\u56fe\u3001\u989c\u8272\u7279\u5f81\u3001\u5f62\u72b6\u7279\u5f81\u3001\u5173\u952e\u70b9 [&hellip;]","protected":false},"author":3,"featured_media":1024472,"comment_status":"closed","ping_status":"","sticky":false,"template":"","format":"standard","meta":{"_acf_changed":false,"footnotes":""},"categories":[37],"tags":[],"acf":[],"_links":{"self":[{"href":"https:\/\/docs.pingcode.com\/wp-json\/wp\/v2\/posts\/1024458"}],"collection":[{"href":"https:\/\/docs.pingcode.com\/wp-json\/wp\/v2\/posts"}],"about":[{"href":"https:\/\/docs.pingcode.com\/wp-json\/wp\/v2\/types\/post"}],"author":[{"embeddable":true,"href":"https:\/\/docs.pingcode.com\/wp-json\/wp\/v2\/users\/3"}],"replies":[{"embeddable":true,"href":"https:\/\/docs.pingcode.com\/wp-json\/wp\/v2\/comments?post=1024458"}],"version-history":[{"count":"1","href":"https:\/\/docs.pingcode.com\/wp-json\/wp\/v2\/posts\/1024458\/revisions"}],"predecessor-version":[{"id":1024476,"href":"https:\/\/docs.pingcode.com\/wp-json\/wp\/v2\/posts\/1024458\/revisions\/1024476"}],"wp:featuredmedia":[{"embeddable":true,"href":"https:\/\/docs.pingcode.com\/wp-json\/wp\/v2\/media\/1024472"}],"wp:attachment":[{"href":"https:\/\/docs.pingcode.com\/wp-json\/wp\/v2\/media?parent=1024458"}],"wp:term":[{"taxonomy":"category","embeddable":true,"href":"https:\/\/docs.pingcode.com\/wp-json\/wp\/v2\/categories?post=1024458"},{"taxonomy":"post_tag","embeddable":true,"href":"https:\/\/docs.pingcode.com\/wp-json\/wp\/v2\/tags?post=1024458"}],"curies":[{"name":"wp","href":"https:\/\/api.w.org\/{rel}","templated":true}]}}