{"id":966654,"date":"2024-12-27T04:49:38","date_gmt":"2024-12-26T20:49:38","guid":{"rendered":"https:\/\/docs.pingcode.com\/ask\/ask-ask\/966654.html"},"modified":"2024-12-27T04:49:40","modified_gmt":"2024-12-26T20:49:40","slug":"python%e4%b8%ad%e5%a6%82%e4%bd%95%e6%b8%85%e9%99%a4%e8%83%8c%e6%99%af","status":"publish","type":"post","link":"https:\/\/docs.pingcode.com\/ask\/966654.html","title":{"rendered":"python\u4e2d\u5982\u4f55\u6e05\u9664\u80cc\u666f"},"content":{"rendered":"<p style=\"text-align:center;\" ><img decoding=\"async\" src=\"https:\/\/cdn-kb.worktile.com\/kb\/wp-content\/uploads\/2024\/04\/24182330\/93e0a2a3-b372-407f-a657-a36110a79329.webp\" alt=\"python\u4e2d\u5982\u4f55\u6e05\u9664\u80cc\u666f\" \/><\/p>\n<p><p> \u5728Python\u4e2d\u6e05\u9664\u80cc\u666f\u53ef\u4ee5\u901a\u8fc7\u591a\u79cd\u65b9\u6cd5\u5b9e\u73b0\uff0c\u4e3b\u8981\u5305\u62ec<strong>\u4f7f\u7528OpenCV\u5e93\u8fdb\u884c\u56fe\u50cf\u5904\u7406\u3001\u5229\u7528\u6df1\u5ea6\u5b66\u4e60\u6a21\u578b\u8fdb\u884c\u56fe\u50cf\u5206\u5272<\/strong>\u3002\u5176\u4e2d\uff0cOpenCV\u5e93\u63d0\u4f9b\u4e86\u4e00\u4e9b\u7b80\u5355\u7684\u56fe\u50cf\u5904\u7406\u51fd\u6570\uff0c\u53ef\u4ee5\u5feb\u901f\u53bb\u9664\u80cc\u666f\uff0c\u800c\u6df1\u5ea6\u5b66\u4e60\u6a21\u578b\u5219\u80fd\u591f\u63d0\u4f9b\u66f4\u7cbe\u786e\u7684\u80cc\u666f\u5206\u5272\u7ed3\u679c\u3002\u4e0b\u9762\u5c06\u8be6\u7ec6\u4ecb\u7ecd\u5982\u4f55\u4f7f\u7528\u8fd9\u4e9b\u65b9\u6cd5\u6765\u6e05\u9664\u56fe\u50cf\u80cc\u666f\u3002<\/p>\n<\/p>\n<p><h3>\u4e00\u3001\u4f7f\u7528OPENCV\u8fdb\u884c\u56fe\u50cf\u5904\u7406<\/h3>\n<\/p>\n<p><p>OpenCV\u662f\u4e00\u4e2a\u5f3a\u5927\u7684\u8ba1\u7b97\u673a\u89c6\u89c9\u5e93\uff0c\u63d0\u4f9b\u4e86\u591a\u79cd\u56fe\u50cf\u5904\u7406\u529f\u80fd\u3002\u4ee5\u4e0b\u662f\u4e00\u4e9b\u5e38\u7528\u7684\u65b9\u6cd5\u6765\u6e05\u9664\u80cc\u666f\uff1a<\/p>\n<\/p>\n<p><h4>1.1 \u4f7f\u7528\u989c\u8272\u5206\u5272<\/h4>\n<\/p>\n<p><p>\u901a\u8fc7\u989c\u8272\u5206\u5272\u53ef\u4ee5\u6709\u6548\u53bb\u9664\u5355\u8272\u80cc\u666f\u3002\u6211\u4eec\u53ef\u4ee5\u5229\u7528OpenCV\u4e2d\u7684\u989c\u8272\u7a7a\u95f4\u8f6c\u6362\u529f\u80fd\uff0c\u5c06\u56fe\u50cf\u4eceRGB\u7a7a\u95f4\u8f6c\u6362\u5230HSV\u7a7a\u95f4\uff0c\u7136\u540e\u901a\u8fc7\u8bbe\u5b9a\u989c\u8272\u8303\u56f4\u6765\u5206\u5272\u51fa\u524d\u666f\u3002<\/p>\n<\/p>\n<p><pre><code class=\"language-python\">import cv2<\/p>\n<p>import numpy as np<\/p>\n<h2><strong>\u8bfb\u53d6\u56fe\u50cf<\/strong><\/h2>\n<p>image = cv2.imread(&#39;image.jpg&#39;)<\/p>\n<h2><strong>\u8f6c\u6362\u989c\u8272\u7a7a\u95f4<\/strong><\/h2>\n<p>hsv = cv2.cvtColor(image, cv2.COLOR_BGR2HSV)<\/p>\n<h2><strong>\u5b9a\u4e49\u989c\u8272\u8303\u56f4<\/strong><\/h2>\n<p>lower_color = np.array([0, 0, 0])<\/p>\n<p>upper_color = np.array([180, 255, 30])<\/p>\n<h2><strong>\u521b\u5efa\u63a9\u7801<\/strong><\/h2>\n<p>mask = cv2.inRange(hsv, lower_color, upper_color)<\/p>\n<h2><strong>\u5e94\u7528\u63a9\u7801<\/strong><\/h2>\n<p>result = cv2.bitwise_and(image, image, mask=~mask)<\/p>\n<p>cv2.imshow(&#39;Result&#39;, result)<\/p>\n<p>cv2.w<a href=\"https:\/\/docs.pingcode.com\/blog\/59162.html\" target=\"_blank\">AI<\/a>tKey(0)<\/p>\n<p>cv2.destroyAllWindows()<\/p>\n<p><\/code><\/pre>\n<\/p>\n<p><h4>1.2 \u4f7f\u7528GrabCut\u7b97\u6cd5<\/h4>\n<\/p>\n<p><p>GrabCut\u662f\u4e00\u79cd\u57fa\u4e8e\u56fe\u5272\u7684\u4ea4\u4e92\u5f0f\u5206\u5272\u7b97\u6cd5\uff0c\u9002\u7528\u4e8e\u590d\u6742\u80cc\u666f\u7684\u53bb\u9664\u3002\u7528\u6237\u9700\u8981\u63d0\u4f9b\u4e00\u4e2a\u7c97\u7565\u7684\u8fb9\u754c\u6846\uff0c\u7b97\u6cd5\u4f1a\u81ea\u52a8\u4f18\u5316\u5e76\u5206\u5272\u524d\u666f\u548c\u80cc\u666f\u3002<\/p>\n<\/p>\n<p><pre><code class=\"language-python\">import cv2<\/p>\n<p>import numpy as np<\/p>\n<h2><strong>\u8bfb\u53d6\u56fe\u50cf<\/strong><\/h2>\n<p>image = cv2.imread(&#39;image.jpg&#39;)<\/p>\n<h2><strong>\u521b\u5efa\u521d\u59cb\u63a9\u7801<\/strong><\/h2>\n<p>mask = np.zeros(image.shape[:2], np.uint8)<\/p>\n<h2><strong>\u521b\u5efa\u6a21\u578b<\/strong><\/h2>\n<p>bgdModel = np.zeros((1, 65), np.float64)<\/p>\n<p>fgdModel = np.zeros((1, 65), np.float64)<\/p>\n<h2><strong>\u5b9a\u4e49\u77e9\u5f62\u6846<\/strong><\/h2>\n<p>rect = (50, 50, 450, 290)<\/p>\n<h2><strong>\u5e94\u7528GrabCut\u7b97\u6cd5<\/strong><\/h2>\n<p>cv2.grabCut(image, mask, rect, bgdModel, fgdModel, 5, cv2.GC_INIT_WITH_RECT)<\/p>\n<h2><strong>\u4fee\u6539\u63a9\u7801<\/strong><\/h2>\n<p>mask2 = np.where((mask == 2) | (mask == 0), 0, 1).astype(&#39;uint8&#39;)<\/p>\n<p>result = image * mask2[:, :, np.newaxis]<\/p>\n<p>cv2.imshow(&#39;Result&#39;, result)<\/p>\n<p>cv2.waitKey(0)<\/p>\n<p>cv2.destroyAllWindows()<\/p>\n<p><\/code><\/pre>\n<\/p>\n<p><h3>\u4e8c\u3001\u5229\u7528\u6df1\u5ea6\u5b66\u4e60\u6a21\u578b\u8fdb\u884c\u56fe\u50cf\u5206\u5272<\/h3>\n<\/p>\n<p><p>\u6df1\u5ea6\u5b66\u4e60\u6a21\u578b\u5728\u56fe\u50cf\u5206\u5272\u4efb\u52a1\u4e0a\u6709\u7740\u51fa\u8272\u7684\u8868\u73b0\uff0c\u53ef\u4ee5\u5b9e\u73b0\u7cbe\u51c6\u7684\u80cc\u666f\u53bb\u9664\u3002\u4e0b\u9762\u4ecb\u7ecd\u5982\u4f55\u4f7f\u7528\u9884\u8bad\u7ec3\u6a21\u578b\u6765\u5b9e\u73b0\u8fd9\u4e00\u529f\u80fd\u3002<\/p>\n<\/p>\n<p><h4>2.1 \u4f7f\u7528DeepLab\u6a21\u578b<\/h4>\n<\/p>\n<p><p>DeepLab\u662f\u4e00\u4e2a\u5f3a\u5927\u7684\u56fe\u50cf\u5206\u5272\u6a21\u578b\uff0c\u53ef\u4ee5\u7528\u4e8e\u80cc\u666f\u53bb\u9664\u3002\u6211\u4eec\u53ef\u4ee5\u4f7f\u7528TensorFlow\u6216PyTorch\u52a0\u8f7d\u9884\u8bad\u7ec3\u7684DeepLab\u6a21\u578b\uff0c\u5e76\u5bf9\u56fe\u50cf\u8fdb\u884c\u5206\u5272\u3002<\/p>\n<\/p>\n<p><pre><code class=\"language-python\">import torch<\/p>\n<p>from torchvision import models<\/p>\n<p>from torchvision.transforms import transforms<\/p>\n<p>from PIL import Image<\/p>\n<p>import numpy as np<\/p>\n<h2><strong>\u52a0\u8f7d\u9884\u8bad\u7ec3DeepLab\u6a21\u578b<\/strong><\/h2>\n<p>model = models.segmentation.deeplabv3_resnet101(pretrained=True).eval()<\/p>\n<h2><strong>\u56fe\u50cf\u9884\u5904\u7406<\/strong><\/h2>\n<p>transform = transforms.Compose([<\/p>\n<p>    transforms.ToTensor(),<\/p>\n<p>    transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]),<\/p>\n<p>])<\/p>\n<h2><strong>\u8bfb\u53d6\u56fe\u50cf<\/strong><\/h2>\n<p>input_image = Image.open(&#39;image.jpg&#39;)<\/p>\n<p>input_tensor = transform(input_image)<\/p>\n<p>input_batch = input_tensor.unsqueeze(0)<\/p>\n<h2><strong>\u68c0\u67e5CUDA\u662f\u5426\u53ef\u7528<\/strong><\/h2>\n<p>if torch.cuda.is_available():<\/p>\n<p>    input_batch = input_batch.to(&#39;cuda&#39;)<\/p>\n<p>    model.to(&#39;cuda&#39;)<\/p>\n<h2><strong>\u6267\u884c\u524d\u5411\u4f20\u64ad<\/strong><\/h2>\n<p>with torch.no_grad():<\/p>\n<p>    output = model(input_batch)[&#39;out&#39;][0]<\/p>\n<p>output_predictions = output.argmax(0)<\/p>\n<h2><strong>\u521b\u5efa\u63a9\u7801<\/strong><\/h2>\n<p>mask = output_predictions.byte().cpu().numpy()<\/p>\n<p>mask = np.where(mask == 15, 255, 0).astype(np.uint8)  # \u5047\u8bbe15\u662f\u524d\u666f\u7c7b<\/p>\n<h2><strong>\u5e94\u7528\u63a9\u7801<\/strong><\/h2>\n<p>result = cv2.bitwise_and(np.array(input_image), np.array(input_image), mask=mask)<\/p>\n<p>cv2.imshow(&#39;Result&#39;, result)<\/p>\n<p>cv2.waitKey(0)<\/p>\n<p>cv2.destroyAllWindows()<\/p>\n<p><\/code><\/pre>\n<\/p>\n<p><h4>2.2 \u4f7f\u7528U-Net\u6a21\u578b<\/h4>\n<\/p>\n<p><p>U-Net\u662f\u4e00\u79cd\u5e38\u7528\u7684\u56fe\u50cf\u5206\u5272\u7f51\u7edc\uff0c\u7279\u522b\u9002\u5408\u533b\u5b66\u56fe\u50cf\u7684\u5206\u5272\uff0c\u4f46\u540c\u6837\u53ef\u4ee5\u7528\u4e8e\u5176\u4ed6\u56fe\u50cf\u7684\u80cc\u666f\u53bb\u9664\u3002<\/p>\n<\/p>\n<p><pre><code class=\"language-python\">from keras.models import load_model<\/p>\n<p>from keras.preprocessing.image import img_to_array, load_img<\/p>\n<p>import numpy as np<\/p>\n<p>import cv2<\/p>\n<h2><strong>\u52a0\u8f7d\u9884\u8bad\u7ec3U-Net\u6a21\u578b<\/strong><\/h2>\n<p>model = load_model(&#39;unet_model.h5&#39;)<\/p>\n<h2><strong>\u8bfb\u53d6\u56fe\u50cf<\/strong><\/h2>\n<p>image = load_img(&#39;image.jpg&#39;, target_size=(256, 256))<\/p>\n<p>image = img_to_array(image) \/ 255.0<\/p>\n<p>image = np.expand_dims(image, axis=0)<\/p>\n<h2><strong>\u6267\u884c\u9884\u6d4b<\/strong><\/h2>\n<p>mask = model.predict(image)[0]<\/p>\n<p>mask = (mask &gt; 0.5).astype(np.uint8) * 255<\/p>\n<h2><strong>\u5e94\u7528\u63a9\u7801<\/strong><\/h2>\n<p>result = cv2.bitwise_and(np.array(image[0] * 255, dtype=np.uint8), np.array(image[0] * 255, dtype=np.uint8), mask=mask)<\/p>\n<p>cv2.imshow(&#39;Result&#39;, result)<\/p>\n<p>cv2.waitKey(0)<\/p>\n<p>cv2.destroyAllWindows()<\/p>\n<p><\/code><\/pre>\n<\/p>\n<p><h3>\u4e09\u3001\u603b\u7ed3<\/h3>\n<\/p>\n<p><p><strong>\u5728Python\u4e2d\u6e05\u9664\u80cc\u666f\u7684\u65b9\u6cd5\u591a\u79cd\u591a\u6837\uff0cOpenCV\u63d0\u4f9b\u4e86\u4e00\u4e9b\u7b80\u5355\u5feb\u901f\u7684\u5de5\u5177\uff0c\u800c\u6df1\u5ea6\u5b66\u4e60\u6a21\u578b\u5219\u63d0\u4f9b\u4e86\u66f4\u4e3a\u7cbe\u786e\u7684\u7ed3\u679c\u3002\u9009\u62e9\u5408\u9002\u7684\u65b9\u6cd5\u53d6\u51b3\u4e8e\u5177\u4f53\u7684\u5e94\u7528\u573a\u666f\u548c\u5bf9\u7ed3\u679c\u7684\u8981\u6c42<\/strong>\u3002\u5bf9\u4e8e\u7b80\u5355\u7684\u80cc\u666f\u53bb\u9664\u4efb\u52a1\uff0cOpenCV\u7684\u989c\u8272\u5206\u5272\u548cGrabCut\u7b97\u6cd5\u5df2\u7ecf\u8db3\u591f\uff1b\u800c\u5bf9\u4e8e\u590d\u6742\u80cc\u666f\u7684\u7cbe\u786e\u5206\u5272\uff0c\u4f7f\u7528\u6df1\u5ea6\u5b66\u4e60\u6a21\u578b\u5982DeepLab\u548cU-Net\u5219\u662f\u66f4\u597d\u7684\u9009\u62e9\u3002\u901a\u8fc7\u7ed3\u5408\u8fd9\u4e9b\u65b9\u6cd5\uff0c\u5f00\u53d1\u8005\u53ef\u4ee5\u6709\u6548\u5730\u89e3\u51b3\u56fe\u50cf\u80cc\u666f\u53bb\u9664\u7684\u95ee\u9898\u3002<\/p>\n<\/p>\n<h2><strong>\u76f8\u5173\u95ee\u7b54FAQs\uff1a<\/strong><\/h2>\n<p> <strong>\u5982\u4f55\u5728Python\u4e2d\u4f7f\u7528OpenCV\u6e05\u9664\u56fe\u50cf\u80cc\u666f\uff1f<\/strong><br \/>OpenCV\u662f\u4e00\u4e2a\u5f3a\u5927\u7684\u8ba1\u7b97\u673a\u89c6\u89c9\u5e93\uff0c\u53ef\u4ee5\u6709\u6548\u5730\u5904\u7406\u56fe\u50cf\u3002\u8981\u6e05\u9664\u56fe\u50cf\u80cc\u666f\uff0c\u53ef\u4ee5\u4f7f\u7528\u56fe\u50cf\u5206\u5272\u6280\u672f\uff0c\u6bd4\u5982GrabCut\u7b97\u6cd5\u3002\u9996\u5148\uff0c\u52a0\u8f7d\u56fe\u50cf\u5e76\u521b\u5efa\u4e00\u4e2a\u63a9\u7801\uff0c\u7136\u540e\u5b9a\u4e49\u524d\u666f\u548c\u80cc\u666f\u7684\u533a\u57df\uff0c\u6700\u540e\u8c03\u7528GrabCut\u7b97\u6cd5\u8fdb\u884c\u5904\u7406\uff0c\u6700\u540e\u63d0\u53d6\u51fa\u6e05\u6670\u7684\u524d\u666f\u56fe\u50cf\u3002<\/p>\n<p><strong>Python\u4e2d\u6709\u54ea\u4e9b\u5e93\u53ef\u4ee5\u7528\u4e8e\u80cc\u666f\u53bb\u9664\uff1f<\/strong><br \/>\u9664\u4e86OpenCV\uff0cPython\u8fd8\u6709\u5176\u4ed6\u4e00\u4e9b\u5e93\u53ef\u4ee5\u5e2e\u52a9\u5b9e\u73b0\u80cc\u666f\u53bb\u9664\u3002\u6bd4\u5982\uff0cPillow\u5e93\u63d0\u4f9b\u4e86\u57fa\u672c\u7684\u56fe\u50cf\u5904\u7406\u529f\u80fd\uff0c\u9002\u5408\u7b80\u5355\u7684\u80cc\u666f\u53bb\u9664\uff1b\u800crembg\u5e93\u4e13\u95e8\u7528\u4e8e\u53bb\u9664\u56fe\u7247\u80cc\u666f\uff0c\u4f7f\u7528\u7b80\u5355\u4e14\u6548\u679c\u663e\u8457\u3002\u6839\u636e\u9700\u6c42\u9009\u62e9\u5408\u9002\u7684\u5e93\uff0c\u53ef\u4ee5\u63d0\u9ad8\u5de5\u4f5c\u6548\u7387\u3002<\/p>\n<p><strong>\u5982\u4f55\u4f7f\u7528\u6df1\u5ea6\u5b66\u4e60\u6a21\u578b\u8fdb\u884c\u80cc\u666f\u53bb\u9664\uff1f<\/strong><br \/>\u6df1\u5ea6\u5b66\u4e60\u6a21\u578b\uff0c\u5982U-Net\u548cMask R-CNN\uff0c\u80fd\u591f\u5b9e\u73b0\u9ad8\u8d28\u91cf\u7684\u80cc\u666f\u53bb\u9664\u3002\u7528\u6237\u53ef\u4ee5\u4f7f\u7528\u9884\u8bad\u7ec3\u6a21\u578b\uff0c\u52a0\u8f7d\u81ea\u5df1\u7684\u6570\u636e\u96c6\u8fdb\u884c\u5fae\u8c03\uff0c\u6216\u8005\u76f4\u63a5\u4f7f\u7528\u5f00\u6e90\u4ee3\u7801\u5e93\u4e2d\u7684\u6a21\u578b\u3002\u5728\u8bad\u7ec3\u5b8c\u6210\u540e\uff0c\u6a21\u578b\u53ef\u4ee5\u51c6\u786e\u5730\u8bc6\u522b\u56fe\u50cf\u4e2d\u7684\u524d\u666f\u4e0e\u80cc\u666f\uff0c\u4ece\u800c\u5b9e\u73b0\u81ea\u52a8\u5316\u80cc\u666f\u6e05\u9664\u3002<\/p>\n","protected":false},"excerpt":{"rendered":"\u5728Python\u4e2d\u6e05\u9664\u80cc\u666f\u53ef\u4ee5\u901a\u8fc7\u591a\u79cd\u65b9\u6cd5\u5b9e\u73b0\uff0c\u4e3b\u8981\u5305\u62ec\u4f7f\u7528OpenCV\u5e93\u8fdb\u884c\u56fe\u50cf\u5904\u7406\u3001\u5229\u7528\u6df1\u5ea6\u5b66\u4e60\u6a21\u578b\u8fdb\u884c\u56fe\u50cf [&hellip;]","protected":false},"author":3,"featured_media":966659,"comment_status":"closed","ping_status":"","sticky":false,"template":"","format":"standard","meta":{"_acf_changed":false,"footnotes":""},"categories":[37],"tags":[],"acf":[],"_links":{"self":[{"href":"https:\/\/docs.pingcode.com\/wp-json\/wp\/v2\/posts\/966654"}],"collection":[{"href":"https:\/\/docs.pingcode.com\/wp-json\/wp\/v2\/posts"}],"about":[{"href":"https:\/\/docs.pingcode.com\/wp-json\/wp\/v2\/types\/post"}],"author":[{"embeddable":true,"href":"https:\/\/docs.pingcode.com\/wp-json\/wp\/v2\/users\/3"}],"replies":[{"embeddable":true,"href":"https:\/\/docs.pingcode.com\/wp-json\/wp\/v2\/comments?post=966654"}],"version-history":[{"count":"1","href":"https:\/\/docs.pingcode.com\/wp-json\/wp\/v2\/posts\/966654\/revisions"}],"predecessor-version":[{"id":966662,"href":"https:\/\/docs.pingcode.com\/wp-json\/wp\/v2\/posts\/966654\/revisions\/966662"}],"wp:featuredmedia":[{"embeddable":true,"href":"https:\/\/docs.pingcode.com\/wp-json\/wp\/v2\/media\/966659"}],"wp:attachment":[{"href":"https:\/\/docs.pingcode.com\/wp-json\/wp\/v2\/media?parent=966654"}],"wp:term":[{"taxonomy":"category","embeddable":true,"href":"https:\/\/docs.pingcode.com\/wp-json\/wp\/v2\/categories?post=966654"},{"taxonomy":"post_tag","embeddable":true,"href":"https:\/\/docs.pingcode.com\/wp-json\/wp\/v2\/tags?post=966654"}],"curies":[{"name":"wp","href":"https:\/\/api.w.org\/{rel}","templated":true}]}}