{"id":1160515,"date":"2025-01-13T19:05:40","date_gmt":"2025-01-13T11:05:40","guid":{"rendered":"https:\/\/docs.pingcode.com\/ask\/ask-ask\/1160515.html"},"modified":"2025-01-13T19:05:43","modified_gmt":"2025-01-13T11:05:43","slug":"%e5%a6%82%e4%bd%95%e7%94%a8python%e8%87%aa%e5%8a%a8%e5%a4%84%e7%90%86%e6%96%87%e7%ab%a0","status":"publish","type":"post","link":"https:\/\/docs.pingcode.com\/ask\/1160515.html","title":{"rendered":"\u5982\u4f55\u7528Python\u81ea\u52a8\u5904\u7406\u6587\u7ae0"},"content":{"rendered":"<p style=\"text-align:center;\" ><img decoding=\"async\" src=\"https:\/\/cdn-kb.worktile.com\/kb\/wp-content\/uploads\/2024\/04\/25201937\/09670586-3a44-4661-b705-acc15b758486.webp\" alt=\"\u5982\u4f55\u7528Python\u81ea\u52a8\u5904\u7406\u6587\u7ae0\" \/><\/p>\n<p><p> <strong>\u5982\u4f55\u7528Python\u81ea\u52a8\u5904\u7406\u6587\u7ae0\u7684\u6838\u5fc3\u89c2\u70b9\u6709\uff1a\u4f7f\u7528\u81ea\u7136\u8bed\u8a00\u5904\u7406\u5e93\u3001\u6587\u672c\u9884\u5904\u7406\u3001\u5173\u952e\u8bcd\u63d0\u53d6\u3001\u60c5\u611f\u5206\u6790\u3001\u81ea\u52a8\u6458\u8981\u751f\u6210\u3002<\/strong> \u5176\u4e2d\uff0c\u6587\u672c\u9884\u5904\u7406\u662f\u81ea\u52a8\u5904\u7406\u6587\u7ae0\u7684\u57fa\u7840\u6b65\u9aa4\uff0c\u6d89\u53ca\u5230\u53bb\u9664\u505c\u7528\u8bcd\u3001\u6807\u70b9\u7b26\u53f7\uff0c\u8fdb\u884c\u5206\u8bcd\u548c\u8bcd\u6027\u6807\u6ce8\uff0c\u786e\u4fdd\u540e\u7eed\u5904\u7406\u7684\u51c6\u786e\u6027\u548c\u6709\u6548\u6027\u3002\u6587\u672c\u9884\u5904\u7406\u7684\u826f\u597d\u5b9e\u73b0\u80fd\u591f\u6781\u5927\u5730\u63d0\u9ad8\u6574\u4e2a\u81ea\u52a8\u5904\u7406\u6587\u7ae0\u8fc7\u7a0b\u7684\u8d28\u91cf\u548c\u6548\u7387\u3002<\/p>\n<\/p>\n<hr>\n<p><h3>\u4e00\u3001\u4f7f\u7528\u81ea\u7136\u8bed\u8a00\u5904\u7406\u5e93<\/h3>\n<\/p>\n<p><p>\u81ea\u7136\u8bed\u8a00\u5904\u7406\uff08NLP\uff09\u5e93\u662fPython\u81ea\u52a8\u5904\u7406\u6587\u7ae0\u7684\u57fa\u7840\u5de5\u5177\uff0c\u8fd9\u4e9b\u5e93\u63d0\u4f9b\u4e86\u4e30\u5bcc\u7684\u529f\u80fd\u548c\u65b9\u6cd5\u6765\u5904\u7406\u548c\u5206\u6790\u6587\u672c\u6570\u636e\u3002\u5e38\u7528\u7684\u81ea\u7136\u8bed\u8a00\u5904\u7406\u5e93\u5305\u62ecNLTK\u3001spaCy\u3001TextBlob\u3001Gensim\u7b49\u3002<\/p>\n<\/p>\n<p><h4>1\u3001NLTK<\/h4>\n<\/p>\n<p><p>NLTK\uff08Natural Language Toolkit\uff09\u662f\u4e00\u4e2a\u529f\u80fd\u5f3a\u5927\u7684NLP\u5e93\uff0c\u63d0\u4f9b\u4e86\u4e30\u5bcc\u7684\u5de5\u5177\u548c\u8d44\u6e90\u6765\u5904\u7406\u6587\u672c\u6570\u636e\u3002\u5b83\u5305\u542b\u4e86\u5927\u91cf\u7684\u6587\u672c\u5904\u7406\u548c\u5206\u6790\u529f\u80fd\uff0c\u5982\u5206\u8bcd\u3001\u8bcd\u6027\u6807\u6ce8\u3001\u547d\u540d\u5b9e\u4f53\u8bc6\u522b\u3001\u8bed\u6cd5\u89e3\u6790\u7b49\u3002<\/p>\n<\/p>\n<p><pre><code class=\"language-python\">import nltk<\/p>\n<p>from nltk.tokenize import word_tokenize<\/p>\n<p>from nltk.corpus import stopwords<\/p>\n<h2><strong>\u4e0b\u8f7d\u5fc5\u8981\u7684\u6570\u636e\u5305<\/strong><\/h2>\n<p>nltk.download(&#39;punkt&#39;)<\/p>\n<p>nltk.download(&#39;stopwords&#39;)<\/p>\n<h2><strong>\u793a\u4f8b\u6587\u672c<\/strong><\/h2>\n<p>text = &quot;Python is a powerful programming language.&quot;<\/p>\n<h2><strong>\u5206\u8bcd<\/strong><\/h2>\n<p>tokens = word_tokenize(text)<\/p>\n<h2><strong>\u53bb\u9664\u505c\u7528\u8bcd<\/strong><\/h2>\n<p>stop_words = set(stopwords.words(&#39;english&#39;))<\/p>\n<p>filtered_tokens = [word for word in tokens if word.lower() not in stop_words]<\/p>\n<p>print(filtered_tokens)<\/p>\n<p><\/code><\/pre>\n<\/p>\n<p><h4>2\u3001spaCy<\/h4>\n<\/p>\n<p><p>spaCy\u662f\u4e00\u4e2a\u5de5\u4e1a\u7ea7\u7684NLP\u5e93\uff0c\u5177\u6709\u9ad8\u6548\u548c\u5feb\u901f\u7684\u7279\u70b9\uff0c\u9002\u7528\u4e8e\u5927\u89c4\u6a21\u7684\u6587\u672c\u6570\u636e\u5904\u7406\u3002\u5b83\u63d0\u4f9b\u4e86\u5206\u8bcd\u3001\u8bcd\u6027\u6807\u6ce8\u3001\u4f9d\u5b58\u89e3\u6790\u3001\u547d\u540d\u5b9e\u4f53\u8bc6\u522b\u7b49\u529f\u80fd\u3002<\/p>\n<\/p>\n<p><pre><code class=\"language-python\">import spacy<\/p>\n<h2><strong>\u52a0\u8f7d\u82f1\u8bed\u6a21\u578b<\/strong><\/h2>\n<p>nlp = spacy.load(&#39;en_core_web_sm&#39;)<\/p>\n<h2><strong>\u793a\u4f8b\u6587\u672c<\/strong><\/h2>\n<p>text = &quot;Python is a powerful programming language.&quot;<\/p>\n<h2><strong>\u5904\u7406\u6587\u672c<\/strong><\/h2>\n<p>doc = nlp(text)<\/p>\n<h2><strong>\u5206\u8bcd\u548c\u8bcd\u6027\u6807\u6ce8<\/strong><\/h2>\n<p>for token in doc:<\/p>\n<p>    print(token.text, token.pos_)<\/p>\n<p><\/code><\/pre>\n<\/p>\n<p><h3>\u4e8c\u3001\u6587\u672c\u9884\u5904\u7406<\/h3>\n<\/p>\n<p><p>\u6587\u672c\u9884\u5904\u7406\u662f\u81ea\u52a8\u5904\u7406\u6587\u7ae0\u7684\u91cd\u8981\u6b65\u9aa4\uff0c\u786e\u4fdd\u6587\u672c\u6570\u636e\u7684\u8d28\u91cf\u548c\u4e00\u81f4\u6027\uff0c\u4e3a\u540e\u7eed\u7684\u5206\u6790\u548c\u5904\u7406\u5960\u5b9a\u57fa\u7840\u3002\u5e38\u89c1\u7684\u6587\u672c\u9884\u5904\u7406\u6b65\u9aa4\u5305\u62ec\u53bb\u9664\u505c\u7528\u8bcd\u3001\u6807\u70b9\u7b26\u53f7\uff0c\u8fdb\u884c\u5206\u8bcd\u548c\u8bcd\u6027\u6807\u6ce8\u3002<\/p>\n<\/p>\n<p><h4>1\u3001\u53bb\u9664\u505c\u7528\u8bcd\u548c\u6807\u70b9\u7b26\u53f7<\/h4>\n<\/p>\n<p><p>\u505c\u7528\u8bcd\u548c\u6807\u70b9\u7b26\u53f7\u901a\u5e38\u5728\u6587\u672c\u5206\u6790\u4e2d\u6ca1\u6709\u5b9e\u9645\u610f\u4e49\uff0c\u9700\u8981\u5728\u9884\u5904\u7406\u9636\u6bb5\u53bb\u9664\u3002<\/p>\n<\/p>\n<p><pre><code class=\"language-python\">import string<\/p>\n<h2><strong>\u793a\u4f8b\u6587\u672c<\/strong><\/h2>\n<p>text = &quot;Python is a powerful programming language.&quot;<\/p>\n<h2><strong>\u53bb\u9664\u6807\u70b9\u7b26\u53f7<\/strong><\/h2>\n<p>text = text.translate(str.maketrans(&#39;&#39;, &#39;&#39;, string.punctuation))<\/p>\n<h2><strong>\u5206\u8bcd<\/strong><\/h2>\n<p>tokens = word_tokenize(text)<\/p>\n<h2><strong>\u53bb\u9664\u505c\u7528\u8bcd<\/strong><\/h2>\n<p>filtered_tokens = [word for word in tokens if word.lower() not in stop_words]<\/p>\n<p>print(filtered_tokens)<\/p>\n<p><\/code><\/pre>\n<\/p>\n<p><h4>2\u3001\u5206\u8bcd\u548c\u8bcd\u6027\u6807\u6ce8<\/h4>\n<\/p>\n<p><p>\u5206\u8bcd\u548c\u8bcd\u6027\u6807\u6ce8\u662f\u6587\u672c\u9884\u5904\u7406\u7684\u91cd\u8981\u6b65\u9aa4\uff0c\u6709\u52a9\u4e8e\u540e\u7eed\u7684\u6587\u672c\u5206\u6790\u548c\u5904\u7406\u3002<\/p>\n<\/p>\n<p><pre><code class=\"language-python\"># \u4f7f\u7528spaCy\u8fdb\u884c\u5206\u8bcd\u548c\u8bcd\u6027\u6807\u6ce8<\/p>\n<p>doc = nlp(text)<\/p>\n<h2><strong>\u5206\u8bcd\u548c\u8bcd\u6027\u6807\u6ce8<\/strong><\/h2>\n<p>for token in doc:<\/p>\n<p>    print(token.text, token.pos_)<\/p>\n<p><\/code><\/pre>\n<\/p>\n<p><h3>\u4e09\u3001\u5173\u952e\u8bcd\u63d0\u53d6<\/h3>\n<\/p>\n<p><p>\u5173\u952e\u8bcd\u63d0\u53d6\u662f\u81ea\u52a8\u5904\u7406\u6587\u7ae0\u7684\u91cd\u8981\u6b65\u9aa4\uff0c\u6709\u52a9\u4e8e\u8bc6\u522b\u548c\u63d0\u53d6\u6587\u672c\u4e2d\u7684\u91cd\u8981\u4fe1\u606f\u3002\u5e38\u7528\u7684\u5173\u952e\u8bcd\u63d0\u53d6\u65b9\u6cd5\u5305\u62ecTF-IDF\u3001RAKE\u3001TextRank\u7b49\u3002<\/p>\n<\/p>\n<p><h4>1\u3001TF-IDF<\/h4>\n<\/p>\n<p><p>TF-IDF\uff08Term Frequency-Inverse Document Frequency\uff09\u662f\u4e00\u79cd\u5e38\u7528\u7684\u5173\u952e\u8bcd\u63d0\u53d6\u65b9\u6cd5\uff0c\u901a\u8fc7\u8ba1\u7b97\u8bcd\u8bed\u5728\u6587\u6863\u4e2d\u7684\u9891\u7387\u548c\u5728\u6574\u4e2a\u8bed\u6599\u5e93\u4e2d\u7684\u9006\u9891\u7387\u6765\u8861\u91cf\u8bcd\u8bed\u7684\u91cd\u8981\u6027\u3002<\/p>\n<\/p>\n<p><pre><code class=\"language-python\">from sklearn.feature_extraction.text import TfidfVectorizer<\/p>\n<h2><strong>\u793a\u4f8b\u6587\u672c<\/strong><\/h2>\n<p>documents = [&quot;Python is a powerful programming language.&quot;, <\/p>\n<p>             &quot;Machine learning is a fascinating field.&quot;]<\/p>\n<h2><strong>\u8ba1\u7b97TF-IDF<\/strong><\/h2>\n<p>vectorizer = TfidfVectorizer()<\/p>\n<p>tfidf_matrix = vectorizer.fit_transform(documents)<\/p>\n<h2><strong>\u8f93\u51faTF-IDF\u77e9\u9635<\/strong><\/h2>\n<p>print(tfidf_matrix.toarray())<\/p>\n<p><\/code><\/pre>\n<\/p>\n<p><h4>2\u3001RAKE<\/h4>\n<\/p>\n<p><p>RAKE\uff08Rapid Automatic Keyword Extraction\uff09\u662f\u4e00\u79cd\u7b80\u5355\u9ad8\u6548\u7684\u5173\u952e\u8bcd\u63d0\u53d6\u7b97\u6cd5\uff0c\u901a\u8fc7\u5206\u6790\u8bcd\u8bed\u7684\u5171\u73b0\u5173\u7cfb\u6765\u63d0\u53d6\u5173\u952e\u8bcd\u3002<\/p>\n<\/p>\n<p><pre><code class=\"language-python\">from rake_nltk import Rake<\/p>\n<h2><strong>\u793a\u4f8b\u6587\u672c<\/strong><\/h2>\n<p>text = &quot;Python is a powerful programming language.&quot;<\/p>\n<h2><strong>\u4f7f\u7528RAKE\u63d0\u53d6\u5173\u952e\u8bcd<\/strong><\/h2>\n<p>rake = Rake()<\/p>\n<p>rake.extract_keywords_from_text(text)<\/p>\n<p>keywords = rake.get_ranked_phrases()<\/p>\n<p>print(keywords)<\/p>\n<p><\/code><\/pre>\n<\/p>\n<p><h3>\u56db\u3001\u60c5\u611f\u5206\u6790<\/h3>\n<\/p>\n<p><p>\u60c5\u611f\u5206\u6790\u662f\u81ea\u52a8\u5904\u7406\u6587\u7ae0\u7684\u91cd\u8981\u5e94\u7528\uff0c\u901a\u8fc7\u5206\u6790\u6587\u672c\u4e2d\u7684\u60c5\u611f\u503e\u5411\u6765\u8bc6\u522b\u548c\u5206\u7c7b\u6587\u672c\u7684\u60c5\u611f\u3002\u5e38\u7528\u7684\u60c5\u611f\u5206\u6790\u5e93\u5305\u62ecTextBlob\u3001VADER\u7b49\u3002<\/p>\n<\/p>\n<p><h4>1\u3001TextBlob<\/h4>\n<\/p>\n<p><p>TextBlob\u662f\u4e00\u4e2a\u7b80\u5355\u6613\u7528\u7684NLP\u5e93\uff0c\u63d0\u4f9b\u4e86\u6587\u672c\u5904\u7406\u548c\u60c5\u611f\u5206\u6790\u529f\u80fd\u3002<\/p>\n<\/p>\n<p><pre><code class=\"language-python\">from textblob import TextBlob<\/p>\n<h2><strong>\u793a\u4f8b\u6587\u672c<\/strong><\/h2>\n<p>text = &quot;Python is a powerful programming language.&quot;<\/p>\n<h2><strong>\u60c5\u611f\u5206\u6790<\/strong><\/h2>\n<p>blob = TextBlob(text)<\/p>\n<p>sentiment = blob.sentiment<\/p>\n<p>print(sentiment)<\/p>\n<p><\/code><\/pre>\n<\/p>\n<p><h4>2\u3001VADER<\/h4>\n<\/p>\n<p><p>VADER\uff08Valence Aware Dictionary and sEntiment Reasoner\uff09\u662f\u4e00\u79cd\u4e13\u4e3a\u793e\u4ea4\u5a92\u4f53\u6587\u672c\u8bbe\u8ba1\u7684\u60c5\u611f\u5206\u6790\u5de5\u5177\uff0c\u5177\u6709\u9ad8\u6548\u548c\u51c6\u786e\u7684\u7279\u70b9\u3002<\/p>\n<\/p>\n<p><pre><code class=\"language-python\">from vaderSentiment.vaderSentiment import SentimentIntensityAnalyzer<\/p>\n<h2><strong>\u793a\u4f8b\u6587\u672c<\/strong><\/h2>\n<p>text = &quot;Python is a powerful programming language.&quot;<\/p>\n<h2><strong>\u4f7f\u7528VADER\u8fdb\u884c\u60c5\u611f\u5206\u6790<\/strong><\/h2>\n<p>analyzer = SentimentIntensityAnalyzer()<\/p>\n<p>sentiment = analyzer.polarity_scores(text)<\/p>\n<p>print(sentiment)<\/p>\n<p><\/code><\/pre>\n<\/p>\n<p><h3>\u4e94\u3001\u81ea\u52a8\u6458\u8981\u751f\u6210<\/h3>\n<\/p>\n<p><p>\u81ea\u52a8\u6458\u8981\u751f\u6210\u662f\u81ea\u52a8\u5904\u7406\u6587\u7ae0\u7684\u9ad8\u7ea7\u5e94\u7528\uff0c\u901a\u8fc7\u63d0\u53d6\u6587\u672c\u4e2d\u7684\u91cd\u8981\u4fe1\u606f\u6765\u751f\u6210\u7b80\u6d01\u7684\u6458\u8981\u3002\u5e38\u7528\u7684\u81ea\u52a8\u6458\u8981\u751f\u6210\u65b9\u6cd5\u5305\u62ec\u63d0\u53d6\u5f0f\u6458\u8981\u548c\u751f\u6210\u5f0f\u6458\u8981\u3002<\/p>\n<\/p>\n<p><h4>1\u3001\u63d0\u53d6\u5f0f\u6458\u8981<\/h4>\n<\/p>\n<p><p>\u63d0\u53d6\u5f0f\u6458\u8981\u901a\u8fc7\u63d0\u53d6\u539f\u6587\u4e2d\u7684\u91cd\u8981\u53e5\u5b50\u6765\u751f\u6210\u6458\u8981\uff0c\u5e38\u7528\u7684\u65b9\u6cd5\u5305\u62ecTextRank\u3001LSA\u7b49\u3002<\/p>\n<\/p>\n<p><pre><code class=\"language-python\">from gensim.summarization import summarize<\/p>\n<h2><strong>\u793a\u4f8b\u6587\u672c<\/strong><\/h2>\n<p>text = &quot;Python is a powerful programming language. It is widely used in various fields such as data science, machine learning, web development, etc. Python has a simple syntax and a large number of libraries, making it easy to learn and use.&quot;<\/p>\n<h2><strong>\u4f7f\u7528Gensim\u751f\u6210\u6458\u8981<\/strong><\/h2>\n<p>summary = summarize(text)<\/p>\n<p>print(summary)<\/p>\n<p><\/code><\/pre>\n<\/p>\n<p><h4>2\u3001\u751f\u6210\u5f0f\u6458\u8981<\/h4>\n<\/p>\n<p><p>\u751f\u6210\u5f0f\u6458\u8981\u901a\u8fc7\u751f\u6210\u65b0\u7684\u53e5\u5b50\u6765\u751f\u6210\u6458\u8981\uff0c\u5e38\u7528\u7684\u65b9\u6cd5\u5305\u62ec\u5e8f\u5217\u5230\u5e8f\u5217\uff08Seq2Seq\uff09\u6a21\u578b\u3001Transformer\u7b49\u3002<\/p>\n<\/p>\n<p><pre><code class=\"language-python\">from transformers import pipeline<\/p>\n<h2><strong>\u793a\u4f8b\u6587\u672c<\/strong><\/h2>\n<p>text = &quot;Python is a powerful programming language. It is widely used in various fields such as data science, machine learning, web development, etc. Python has a simple syntax and a large number of libraries, making it easy to learn and use.&quot;<\/p>\n<h2><strong>\u4f7f\u7528Transformer\u751f\u6210\u6458\u8981<\/strong><\/h2>\n<p>summarizer = pipeline(&quot;summarization&quot;)<\/p>\n<p>summary = summarizer(text, max_length=50, min_length=25, do_sample=False)<\/p>\n<p>print(summary)<\/p>\n<p><\/code><\/pre>\n<\/p>\n<p><h3>\u516d\u3001\u6587\u672c\u5206\u7c7b<\/h3>\n<\/p>\n<p><p>\u6587\u672c\u5206\u7c7b\u662f\u81ea\u52a8\u5904\u7406\u6587\u7ae0\u7684\u91cd\u8981\u5e94\u7528\uff0c\u901a\u8fc7\u5c06\u6587\u672c\u5206\u914d\u5230\u9884\u5b9a\u4e49\u7684\u7c7b\u522b\u4e2d\u6765\u5b9e\u73b0\u5206\u7c7b\u3002\u5e38\u7528\u7684\u6587\u672c\u5206\u7c7b\u65b9\u6cd5\u5305\u62ec\u6734\u7d20\u8d1d\u53f6\u65af\u3001\u652f\u6301\u5411\u91cf\u673a\u3001\u6df1\u5ea6\u5b66\u4e60\u7b49\u3002<\/p>\n<\/p>\n<p><h4>1\u3001\u6734\u7d20\u8d1d\u53f6\u65af<\/h4>\n<\/p>\n<p><p>\u6734\u7d20\u8d1d\u53f6\u65af\u662f\u4e00\u79cd\u7b80\u5355\u9ad8\u6548\u7684\u6587\u672c\u5206\u7c7b\u65b9\u6cd5\uff0c\u9002\u7528\u4e8e\u5c0f\u89c4\u6a21\u6587\u672c\u6570\u636e\u5206\u7c7b\u3002<\/p>\n<\/p>\n<p><pre><code class=\"language-python\">from sklearn.feature_extraction.text import CountVectorizer<\/p>\n<p>from sklearn.n<a href=\"https:\/\/docs.pingcode.com\/blog\/59162.html\" target=\"_blank\">AI<\/a>ve_bayes import MultinomialNB<\/p>\n<h2><strong>\u793a\u4f8b\u6587\u672c<\/strong><\/h2>\n<p>documents = [&quot;Python is a powerful programming language.&quot;, <\/p>\n<p>             &quot;Machine learning is a fascinating field.&quot;]<\/p>\n<p>labels = [&quot;programming&quot;, &quot;machine_learning&quot;]<\/p>\n<h2><strong>\u7279\u5f81\u63d0\u53d6<\/strong><\/h2>\n<p>vectorizer = CountVectorizer()<\/p>\n<p>X = vectorizer.fit_transform(documents)<\/p>\n<h2><strong>\u8bad\u7ec3\u6734\u7d20\u8d1d\u53f6\u65af\u5206\u7c7b\u5668<\/strong><\/h2>\n<p>classifier = MultinomialNB()<\/p>\n<p>classifier.fit(X, labels)<\/p>\n<h2><strong>\u6d4b\u8bd5\u5206\u7c7b\u5668<\/strong><\/h2>\n<p>test_document = [&quot;Python is widely used in data science.&quot;]<\/p>\n<p>test_X = vectorizer.transform(test_document)<\/p>\n<p>predicted_label = classifier.predict(test_X)<\/p>\n<p>print(predicted_label)<\/p>\n<p><\/code><\/pre>\n<\/p>\n<p><h4>2\u3001\u6df1\u5ea6\u5b66\u4e60<\/h4>\n<\/p>\n<p><p>\u6df1\u5ea6\u5b66\u4e60\u65b9\u6cd5\uff0c\u5982\u5377\u79ef\u795e\u7ecf\u7f51\u7edc\uff08CNN\uff09\u3001\u5faa\u73af\u795e\u7ecf\u7f51\u7edc\uff08RNN\uff09\u3001Transformer\u7b49\uff0c\u9002\u7528\u4e8e\u5927\u89c4\u6a21\u6587\u672c\u6570\u636e\u5206\u7c7b\uff0c\u5177\u6709\u8f83\u9ad8\u7684\u5206\u7c7b\u51c6\u786e\u7387\u3002<\/p>\n<\/p>\n<p><pre><code class=\"language-python\">from keras.preprocessing.text import Tokenizer<\/p>\n<p>from keras.preprocessing.sequence import pad_sequences<\/p>\n<p>from keras.models import Sequential<\/p>\n<p>from keras.layers import Embedding, LSTM, Dense<\/p>\n<h2><strong>\u793a\u4f8b\u6587\u672c<\/strong><\/h2>\n<p>documents = [&quot;Python is a powerful programming language.&quot;, <\/p>\n<p>             &quot;Machine learning is a fascinating field.&quot;]<\/p>\n<p>labels = [1, 0]<\/p>\n<h2><strong>\u6587\u672c\u9884\u5904\u7406<\/strong><\/h2>\n<p>tokenizer = Tokenizer(num_words=5000)<\/p>\n<p>tokenizer.fit_on_texts(documents)<\/p>\n<p>X = tokenizer.texts_to_sequences(documents)<\/p>\n<p>X = pad_sequences(X, maxlen=100)<\/p>\n<h2><strong>\u6784\u5efaLSTM\u6a21\u578b<\/strong><\/h2>\n<p>model = Sequential()<\/p>\n<p>model.add(Embedding(input_dim=5000, output_dim=128, input_length=100))<\/p>\n<p>model.add(LSTM(units=128))<\/p>\n<p>model.add(Dense(units=1, activation=&#39;sigmoid&#39;))<\/p>\n<h2><strong>\u7f16\u8bd1\u6a21\u578b<\/strong><\/h2>\n<p>model.compile(optimizer=&#39;adam&#39;, loss=&#39;binary_crossentropy&#39;, metrics=[&#39;accuracy&#39;])<\/p>\n<h2><strong>\u8bad\u7ec3\u6a21\u578b<\/strong><\/h2>\n<p>model.fit(X, labels, epochs=10, batch_size=32)<\/p>\n<h2><strong>\u6d4b\u8bd5\u6a21\u578b<\/strong><\/h2>\n<p>test_document = [&quot;Python is widely used in data science.&quot;]<\/p>\n<p>test_X = tokenizer.texts_to_sequences(test_document)<\/p>\n<p>test_X = pad_sequences(test_X, maxlen=100)<\/p>\n<p>predicted_label = model.predict(test_X)<\/p>\n<p>print(predicted_label)<\/p>\n<p><\/code><\/pre>\n<\/p>\n<p><h3>\u4e03\u3001\u6587\u672c\u751f\u6210<\/h3>\n<\/p>\n<p><p>\u6587\u672c\u751f\u6210\u662f\u81ea\u52a8\u5904\u7406\u6587\u7ae0\u7684\u9ad8\u7ea7\u5e94\u7528\uff0c\u901a\u8fc7\u751f\u6210\u65b0\u7684\u6587\u672c\u5185\u5bb9\u6765\u5b9e\u73b0\u81ea\u52a8\u5316\u5199\u4f5c\u3002\u5e38\u7528\u7684\u6587\u672c\u751f\u6210\u65b9\u6cd5\u5305\u62ec\u5e8f\u5217\u5230\u5e8f\u5217\uff08Seq2Seq\uff09\u6a21\u578b\u3001GPT\u7b49\u3002<\/p>\n<\/p>\n<p><h4>1\u3001\u5e8f\u5217\u5230\u5e8f\u5217\uff08Seq2Seq\uff09\u6a21\u578b<\/h4>\n<\/p>\n<p><p>\u5e8f\u5217\u5230\u5e8f\u5217\u6a21\u578b\u901a\u8fc7\u7f16\u7801\u5668-\u89e3\u7801\u5668\u67b6\u6784\u6765\u5b9e\u73b0\u6587\u672c\u751f\u6210\uff0c\u9002\u7528\u4e8e\u673a\u5668\u7ffb\u8bd1\u3001\u5bf9\u8bdd\u751f\u6210\u7b49\u4efb\u52a1\u3002<\/p>\n<\/p>\n<p><pre><code class=\"language-python\">from keras.models import Model<\/p>\n<p>from keras.layers import Input, LSTM, Dense<\/p>\n<h2><strong>\u793a\u4f8b\u6570\u636e<\/strong><\/h2>\n<p>input_texts = [&quot;Python is a powerful programming language.&quot;]<\/p>\n<p>target_texts = [&quot;Python \u662f\u4e00\u79cd\u5f3a\u5927\u7684\u7f16\u7a0b\u8bed\u8a00\u3002&quot;]<\/p>\n<h2><strong>\u6587\u672c\u9884\u5904\u7406<\/strong><\/h2>\n<p>input_tokenizer = Tokenizer(num_words=5000)<\/p>\n<p>target_tokenizer = Tokenizer(num_words=5000)<\/p>\n<p>input_tokenizer.fit_on_texts(input_texts)<\/p>\n<p>target_tokenizer.fit_on_texts(target_texts)<\/p>\n<p>input_sequences = input_tokenizer.texts_to_sequences(input_texts)<\/p>\n<p>target_sequences = target_tokenizer.texts_to_sequences(target_texts)<\/p>\n<p>input_sequences = pad_sequences(input_sequences, maxlen=100)<\/p>\n<p>target_sequences = pad_sequences(target_sequences, maxlen=100)<\/p>\n<h2><strong>\u6784\u5efaSeq2Seq\u6a21\u578b<\/strong><\/h2>\n<p>encoder_inputs = Input(shape=(None,))<\/p>\n<p>encoder_embedding = Embedding(input_dim=5000, output_dim=128)(encoder_inputs)<\/p>\n<p>encoder_lstm = LSTM(units=128, return_state=True)<\/p>\n<p>encoder_outputs, state_h, state_c = encoder_lstm(encoder_embedding)<\/p>\n<p>encoder_states = [state_h, state_c]<\/p>\n<p>decoder_inputs = Input(shape=(None,))<\/p>\n<p>decoder_embedding = Embedding(input_dim=5000, output_dim=128)(decoder_inputs)<\/p>\n<p>decoder_lstm = LSTM(units=128, return_sequences=True, return_state=True)<\/p>\n<p>decoder_outputs, _, _ = decoder_lstm(decoder_embedding, initial_state=encoder_states)<\/p>\n<p>decoder_dense = Dense(units=5000, activation=&#39;softmax&#39;)<\/p>\n<p>decoder_outputs = decoder_dense(decoder_outputs)<\/p>\n<p>model = Model([encoder_inputs, decoder_inputs], decoder_outputs)<\/p>\n<h2><strong>\u7f16\u8bd1\u6a21\u578b<\/strong><\/h2>\n<p>model.compile(optimizer=&#39;adam&#39;, loss=&#39;sparse_categorical_crossentropy&#39;, metrics=[&#39;accuracy&#39;])<\/p>\n<h2><strong>\u8bad\u7ec3\u6a21\u578b<\/strong><\/h2>\n<p>model.fit([input_sequences, target_sequences], target_sequences, epochs=10, batch_size=32)<\/p>\n<h2><strong>\u6d4b\u8bd5\u6a21\u578b<\/strong><\/h2>\n<p>test_input_text = [&quot;Python is a powerful programming language.&quot;]<\/p>\n<p>test_input_sequences = input_tokenizer.texts_to_sequences(test_input_text)<\/p>\n<p>test_input_sequences = pad_sequences(test_input_sequences, maxlen=100)<\/p>\n<p>predicted_sequence = model.predict([test_input_sequences, target_sequences])<\/p>\n<p>print(predicted_sequence)<\/p>\n<p><\/code><\/pre>\n<\/p>\n<p><h4>2\u3001GPT<\/h4>\n<\/p>\n<p><p>GPT\uff08Generative Pre-trained Transformer\uff09\u662f\u4e00\u79cd\u57fa\u4e8eTransformer\u7684\u6587\u672c\u751f\u6210\u6a21\u578b\uff0c\u5177\u6709\u5f3a\u5927\u7684\u6587\u672c\u751f\u6210\u80fd\u529b\uff0c\u9002\u7528\u4e8e\u81ea\u52a8\u5316\u5199\u4f5c\u3001\u5bf9\u8bdd\u751f\u6210\u7b49\u4efb\u52a1\u3002<\/p>\n<\/p>\n<p><pre><code class=\"language-python\">from transformers import GPT2LMHeadModel, GPT2Tokenizer<\/p>\n<h2><strong>\u52a0\u8f7dGPT\u6a21\u578b\u548c\u5206\u8bcd\u5668<\/strong><\/h2>\n<p>model_name = &#39;gpt2&#39;<\/p>\n<p>model = GPT2LMHeadModel.from_pretrained(model_name)<\/p>\n<p>tokenizer = GPT2Tokenizer.from_pretrained(model_name)<\/p>\n<h2><strong>\u793a\u4f8b\u6587\u672c<\/strong><\/h2>\n<p>input_text = &quot;Python is a powerful programming language.&quot;<\/p>\n<h2><strong>\u6587\u672c\u751f\u6210<\/strong><\/h2>\n<p>input_ids = tokenizer.encode(input_text, return_tensors=&#39;pt&#39;)<\/p>\n<p>output = model.generate(input_ids, max_length=50)<\/p>\n<p>generated_text = tokenizer.decode(output[0], skip_special_tokens=True)<\/p>\n<p>print(generated_text)<\/p>\n<p><\/code><\/pre>\n<\/p>\n<p><h3>\u516b\u3001\u6587\u672c\u76f8\u4f3c\u5ea6\u8ba1\u7b97<\/h3>\n<\/p>\n<p><p>\u6587\u672c\u76f8\u4f3c\u5ea6\u8ba1\u7b97\u662f\u81ea\u52a8\u5904\u7406\u6587\u7ae0\u7684\u5e38\u89c1\u4efb\u52a1\uff0c\u901a\u8fc7\u8ba1\u7b97\u6587\u672c\u4e4b\u95f4\u7684\u76f8\u4f3c\u5ea6\u6765\u5b9e\u73b0\u6587\u672c\u5339\u914d\u3001\u63a8\u8350\u7b49\u529f\u80fd\u3002\u5e38\u7528\u7684\u6587\u672c\u76f8\u4f3c\u5ea6\u8ba1\u7b97\u65b9\u6cd5\u5305\u62ec\u4f59\u5f26\u76f8\u4f3c\u5ea6\u3001Jaccard\u76f8\u4f3c\u5ea6\u3001Word2Vec\u7b49\u3002<\/p>\n<\/p>\n<p><h4>1\u3001\u4f59\u5f26\u76f8\u4f3c\u5ea6<\/h4>\n<\/p>\n<p><p>\u4f59\u5f26\u76f8\u4f3c\u5ea6\u901a\u8fc7\u8ba1\u7b97\u4e24\u4e2a\u6587\u672c\u5411\u91cf\u7684\u4f59\u5f26\u503c\u6765\u8861\u91cf\u6587\u672c\u4e4b\u95f4\u7684\u76f8\u4f3c\u5ea6\u3002<\/p>\n<\/p>\n<p><pre><code class=\"language-python\">from sklearn.feature_extraction.text import TfidfVectorizer<\/p>\n<p>from sklearn.metrics.pairwise import cosine_similarity<\/p>\n<h2><strong>\u793a\u4f8b\u6587\u672c<\/strong><\/h2>\n<p>documents = [&quot;Python is a powerful programming language.&quot;, <\/p>\n<p>             &quot;Machine learning is a fascinating field.&quot;]<\/p>\n<h2><strong>\u8ba1\u7b97TF-IDF<\/strong><\/h2>\n<p>vectorizer = TfidfVectorizer()<\/p>\n<p>tfidf_matrix = vectorizer.fit_transform(documents)<\/p>\n<h2><strong>\u8ba1\u7b97\u4f59\u5f26\u76f8\u4f3c\u5ea6<\/strong><\/h2>\n<p>cosine_sim = cosine_similarity(tfidf_matrix[0:1], tfidf_matrix)<\/p>\n<p>print(cosine_sim)<\/p>\n<p><\/code><\/pre>\n<\/p>\n<p><h4>2\u3001Word2Vec<\/h4>\n<\/p>\n<p><p>Word2Vec\u901a\u8fc7\u5c06\u8bcd\u8bed\u6620\u5c04\u5230\u5411\u91cf\u7a7a\u95f4\u6765\u8ba1\u7b97\u6587\u672c\u4e4b\u95f4\u7684\u76f8\u4f3c\u5ea6\uff0c\u9002\u7528\u4e8e\u5927\u89c4\u6a21\u6587\u672c\u6570\u636e\u76f8\u4f3c\u5ea6\u8ba1\u7b97\u3002<\/p>\n<\/p>\n<p><pre><code class=\"language-python\">from gensim.models import Word2Vec<\/p>\n<p>from nltk.tokenize import word_tokenize<\/p>\n<h2><strong>\u793a\u4f8b\u6587\u672c<\/strong><\/h2>\n<p>documents = [&quot;Python is a powerful programming language.&quot;, <\/p>\n<p>             &quot;Machine learning is a fascinating field.&quot;]<\/p>\n<h2><strong>\u5206\u8bcd<\/strong><\/h2>\n<p>tokenized_documents = [word_tokenize(doc.lower()) for doc in documents]<\/p>\n<h2><strong>\u8bad\u7ec3Word2Vec\u6a21\u578b<\/strong><\/h2>\n<p>model = Word2Vec(sentences=tokenized_documents, vector_size=100, window=5, min_count=1, workers=4)<\/p>\n<h2><strong>\u8ba1\u7b97\u8bcd\u8bed\u76f8\u4f3c\u5ea6<\/strong><\/h2>\n<p>similarity = model.wv.similarity(&#39;python&#39;, &#39;machine&#39;)<\/p>\n<p>print(similarity)<\/p>\n<p><\/code><\/pre>\n<\/p>\n<p><h3>\u4e5d\u3001\u547d\u540d\u5b9e\u4f53\u8bc6\u522b<\/h3>\n<\/p>\n<p><p>\u547d\u540d\u5b9e\u4f53\u8bc6\u522b\uff08NER\uff09\u662f\u81ea\u52a8\u5904\u7406\u6587\u7ae0\u7684\u91cd\u8981\u4efb\u52a1\uff0c\u901a\u8fc7\u8bc6\u522b\u548c\u5206\u7c7b\u6587\u672c\u4e2d\u7684\u5b9e\u4f53\u6765\u5b9e\u73b0\u4fe1\u606f\u62bd\u53d6\u3002\u5e38\u7528\u7684NER\u65b9\u6cd5\u5305\u62ec\u89c4\u5219\u57fa\u65b9\u6cd5\u3001\u7edf\u8ba1\u5b66\u4e60\u65b9\u6cd5\u3001\u6df1\u5ea6\u5b66\u4e60\u65b9\u6cd5\u7b49\u3002<\/p>\n<\/p>\n<p><h4>1\u3001spaCy NER<\/h4>\n<\/p>\n<p><p>spaCy\u63d0\u4f9b\u4e86\u5f3a\u5927\u7684NER\u529f\u80fd\uff0c\u80fd\u591f\u8bc6\u522b\u6587\u672c\u4e2d\u7684\u4eba\u540d\u3001\u5730\u540d\u3001\u7ec4\u7ec7\u7b49\u5b9e\u4f53\u3002<\/p>\n<\/p>\n<p><pre><code class=\"language-python\">import spacy<\/p>\n<h2><strong>\u52a0\u8f7d\u82f1\u8bed\u6a21\u578b<\/strong><\/h2>\n<p>nlp = spacy.load(&#39;en_core_web_sm&#39;)<\/p>\n<h2><strong>\u793a\u4f8b\u6587\u672c<\/strong><\/h2>\n<p>text = &quot;Python is developed by Guido van Rossum.&quot;<\/p>\n<h2><strong>\u547d\u540d\u5b9e\u4f53\u8bc6\u522b<\/strong><\/h2>\n<p>doc = nlp(text)<\/p>\n<p>for ent in doc.ents:<\/p>\n<p>    print(ent.text, ent.label_)<\/p>\n<p><\/code><\/pre>\n<\/p>\n<p><h4>2\u3001\u6df1\u5ea6\u5b66\u4e60 NER<\/h4>\n<\/p>\n<p><p>\u6df1\u5ea6\u5b66\u4e60\u65b9\u6cd5\u901a\u8fc7\u8bad\u7ec3\u795e\u7ecf\u7f51\u7edc\u6a21\u578b\u6765\u5b9e\u73b0NER\uff0c\u5177\u6709\u8f83\u9ad8\u7684\u8bc6\u522b\u51c6\u786e\u7387\u3002<\/p>\n<\/p>\n<p><pre><code class=\"language-python\">from transformers import BertTokenizer, BertForTokenClassification<\/p>\n<p>from transformers import pipeline<\/p>\n<h2><strong>\u52a0\u8f7dBERT\u6a21\u578b\u548c\u5206\u8bcd\u5668<\/strong><\/h2>\n<p>model_name = &#39;dbmdz\/bert-large-cased-finetuned-conll03-english&#39;<\/p>\n<p>tokenizer = BertTokenizer.from_pretrained(model_name)<\/p>\n<p>model = BertForTokenClassification.from_pretrained(model_name)<\/p>\n<h2><strong>\u793a\u4f8b\u6587\u672c<\/strong><\/h2>\n<p>text = &quot;Python is developed by Guido van Rossum.&quot;<\/p>\n<h2><strong>\u547d\u540d\u5b9e\u4f53\u8bc6\u522b<\/strong><\/h2>\n<p>ner_pipeline = pipeline(&#39;ner&#39;, model=model, tokenizer=tokenizer)<\/p>\n<p>entities = ner_pipeline(text)<\/p>\n<p>print(entities)<\/p>\n<p><\/code><\/pre>\n<\/p>\n<p><h3>\u5341\u3001\u4e3b\u9898\u6a21\u578b<\/h3>\n<\/p>\n<p><p>\u4e3b\u9898\u6a21\u578b\u662f\u81ea\u52a8\u5904\u7406\u6587\u7ae0\u7684\u91cd\u8981\u65b9\u6cd5\uff0c\u901a\u8fc7\u8bc6\u522b\u548c\u63d0\u53d6\u6587\u672c\u4e2d\u7684\u4e3b\u9898\u6765\u5b9e\u73b0\u6587\u672c\u5206\u7c7b\u3001\u4fe1\u606f\u68c0\u7d22\u7b49\u4efb\u52a1\u3002\u5e38\u7528\u7684\u4e3b\u9898\u6a21\u578b\u65b9\u6cd5\u5305\u62ecLDA\u3001NMF\u7b49\u3002<\/p>\n<\/p>\n<p><h4>1\u3001LDA<\/h4>\n<\/p>\n<p><p>LDA\uff08Latent Dirichlet Allocation\uff09\u662f\u4e00\u79cd\u5e38\u7528\u7684\u4e3b\u9898\u6a21\u578b\u65b9\u6cd5\uff0c\u901a\u8fc7\u5047\u8bbe\u6587\u672c\u7531\u591a\u4e2a\u4e3b\u9898\u6df7\u5408\u751f\u6210\u6765\u63d0\u53d6\u6587\u672c\u4e2d\u7684\u4e3b\u9898\u3002<\/p>\n<\/p>\n<p><pre><code class=\"language-python\">from sklearn.feature_extraction.text import CountVectorizer<\/p>\n<p>from sklearn.decomposition import LatentDirichletAllocation<\/p>\n<h2><strong>\u793a\u4f8b\u6587\u672c<\/strong><\/h2>\n<p>documents = [&quot;Python is a powerful programming language.&quot;, <\/p>\n<p>             &quot;Machine learning is a fascinating field.&quot;]<\/p>\n<h2><strong>\u7279\u5f81\u63d0\u53d6<\/strong><\/h2>\n<p>vectorizer = CountVectorizer()<\/p>\n<p>X = vectorizer.fit_transform(documents)<\/p>\n<h2><strong>\u8bad\u7ec3LDA\u6a21\u578b<\/strong><\/h2>\n<p>lda = LatentDirichletAllocation(n_components=2, random_state=0)<\/p>\n<p>lda.fit(X)<\/p>\n<h2><strong>\u8f93\u51fa\u4e3b\u9898\u8bcd<\/strong><\/h2>\n<p>for index, topic in enumerate(lda.components_):<\/p>\n<p>    print(f&quot;Topic {index+1}:&quot;)<\/p>\n<p>    print([vectorizer<\/p>\n<p><\/code><\/pre>\n<\/p>\n<h2><strong>\u76f8\u5173\u95ee\u7b54FAQs\uff1a<\/strong><\/h2>\n<p> <strong>\u5982\u4f55\u7528Python\u5904\u7406\u4e0d\u540c\u683c\u5f0f\u7684\u6587\u7ae0\uff1f<\/strong><br \/>Python\u63d0\u4f9b\u4e86\u4e30\u5bcc\u7684\u5e93\u548c\u5de5\u5177\uff0c\u53ef\u4ee5\u5904\u7406\u591a\u79cd\u683c\u5f0f\u7684\u6587\u7ae0\uff0c\u4f8b\u5982\u6587\u672c\u6587\u4ef6\u3001Markdown\u3001HTML\u7b49\u3002\u4f7f\u7528<code>pandas<\/code>\u53ef\u4ee5\u8f7b\u677e\u8bfb\u53d6CSV\u6216Excel\u6587\u4ef6\uff0c<code>BeautifulSoup<\/code>\u9002\u5408\u89e3\u6790HTML\uff0c<code>markdown<\/code>\u5e93\u53ef\u4ee5\u5c06Markdown\u8f6c\u6362\u4e3aHTML\u683c\u5f0f\u3002\u6839\u636e\u6587\u7ae0\u683c\u5f0f\u7684\u4e0d\u540c\uff0c\u9009\u62e9\u5408\u9002\u7684\u5e93\u548c\u65b9\u6cd5\uff0c\u53ef\u4ee5\u9ad8\u6548\u5730\u5904\u7406\u548c\u5206\u6790\u6587\u7ae0\u5185\u5bb9\u3002<\/p>\n<p><strong>Python\u4e2d\u6709\u54ea\u4e9b\u5e93\u53ef\u4ee5\u5e2e\u52a9\u6211\u81ea\u52a8\u5316\u6587\u7ae0\u5904\u7406\uff1f<\/strong><br \/>\u5728Python\u4e2d\uff0c\u6709\u591a\u4e2a\u5e93\u53ef\u4ee5\u5e2e\u52a9\u81ea\u52a8\u5316\u6587\u7ae0\u5904\u7406\u3002<code>nltk<\/code>\u548c<code>spaCy<\/code>\u662f\u5904\u7406\u81ea\u7136\u8bed\u8a00\u7684\u91cd\u8981\u5de5\u5177\uff0c\u80fd\u591f\u8fdb\u884c\u5206\u8bcd\u3001\u8bcd\u6027\u6807\u6ce8\u7b49\u64cd\u4f5c\u3002<code>gensim<\/code>\u9002\u5408\u8fdb\u884c\u4e3b\u9898\u5efa\u6a21\uff0c\u800c<code>PyPDF2<\/code>\u548c<code>pdfminer<\/code>\u5219\u7528\u4e8e\u5904\u7406PDF\u6587\u4ef6\u3002\u6b64\u5916\uff0c<code>docx<\/code>\u5e93\u53ef\u4ee5\u5904\u7406Word\u6587\u6863\u3002\u8fd9\u4e9b\u5e93\u5404\u5177\u7279\u8272\uff0c\u80fd\u591f\u6ee1\u8db3\u4e0d\u540c\u7684\u9700\u6c42\u3002<\/p>\n<p><strong>\u5982\u4f55\u7528Python\u81ea\u52a8\u751f\u6210\u6587\u7ae0\u6458\u8981\uff1f<\/strong><br \/>\u751f\u6210\u6587\u7ae0\u6458\u8981\u53ef\u4ee5\u901a\u8fc7\u81ea\u7136\u8bed\u8a00\u5904\u7406\u6280\u672f\u5b9e\u73b0\u3002\u4f7f\u7528<code>nltk<\/code>\u6216<code>spaCy<\/code>\u63d0\u53d6\u5173\u952e\u8bcd\u6216\u91cd\u8981\u53e5\u5b50\uff0c\u53ef\u4ee5\u521b\u5efa\u7b80\u6d01\u7684\u6458\u8981\u3002\u6b64\u5916\uff0c<code>transformers<\/code>\u5e93\u4e2d\u7684\u9884\u8bad\u7ec3\u6a21\u578b\uff0c\u5982BERT\u548cGPT\uff0c\u53ef\u4ee5\u751f\u6210\u66f4\u4e3a\u81ea\u7136\u548c\u8fde\u8d2f\u7684\u6458\u8981\u3002\u901a\u8fc7\u5bf9\u8f93\u5165\u6587\u7ae0\u8fdb\u884c\u5206\u6790\uff0c\u53ef\u4ee5\u81ea\u52a8\u5316\u751f\u6210\u9ad8\u8d28\u91cf\u7684\u6458\u8981\uff0c\u8282\u7701\u65f6\u95f4\u548c\u7cbe\u529b\u3002<\/p>\n","protected":false},"excerpt":{"rendered":"\u5982\u4f55\u7528Python\u81ea\u52a8\u5904\u7406\u6587\u7ae0\u7684\u6838\u5fc3\u89c2\u70b9\u6709\uff1a\u4f7f\u7528\u81ea\u7136\u8bed\u8a00\u5904\u7406\u5e93\u3001\u6587\u672c\u9884\u5904\u7406\u3001\u5173\u952e\u8bcd\u63d0\u53d6\u3001\u60c5\u611f\u5206\u6790\u3001\u81ea\u52a8\u6458\u8981\u751f\u6210 [&hellip;]","protected":false},"author":3,"featured_media":1160520,"comment_status":"closed","ping_status":"","sticky":false,"template":"","format":"standard","meta":{"_acf_changed":false,"footnotes":""},"categories":[37],"tags":[],"acf":[],"_links":{"self":[{"href":"https:\/\/docs.pingcode.com\/wp-json\/wp\/v2\/posts\/1160515"}],"collection":[{"href":"https:\/\/docs.pingcode.com\/wp-json\/wp\/v2\/posts"}],"about":[{"href":"https:\/\/docs.pingcode.com\/wp-json\/wp\/v2\/types\/post"}],"author":[{"embeddable":true,"href":"https:\/\/docs.pingcode.com\/wp-json\/wp\/v2\/users\/3"}],"replies":[{"embeddable":true,"href":"https:\/\/docs.pingcode.com\/wp-json\/wp\/v2\/comments?post=1160515"}],"version-history":[{"count":"1","href":"https:\/\/docs.pingcode.com\/wp-json\/wp\/v2\/posts\/1160515\/revisions"}],"predecessor-version":[{"id":1160522,"href":"https:\/\/docs.pingcode.com\/wp-json\/wp\/v2\/posts\/1160515\/revisions\/1160522"}],"wp:featuredmedia":[{"embeddable":true,"href":"https:\/\/docs.pingcode.com\/wp-json\/wp\/v2\/media\/1160520"}],"wp:attachment":[{"href":"https:\/\/docs.pingcode.com\/wp-json\/wp\/v2\/media?parent=1160515"}],"wp:term":[{"taxonomy":"category","embeddable":true,"href":"https:\/\/docs.pingcode.com\/wp-json\/wp\/v2\/categories?post=1160515"},{"taxonomy":"post_tag","embeddable":true,"href":"https:\/\/docs.pingcode.com\/wp-json\/wp\/v2\/tags?post=1160515"}],"curies":[{"name":"wp","href":"https:\/\/api.w.org\/{rel}","templated":true}]}}