{"id":6207,"date":"2024-08-15T15:01:01","date_gmt":"2024-08-15T07:01:01","guid":{"rendered":""},"modified":"2024-08-15T15:01:01","modified_gmt":"2024-08-15T07:01:01","slug":"\u722c\u866b\u6280\u672f\u6709\u54ea\u4e9b\u65b9\u6cd5_\u96f6\u57fa\u7840\u5982\u4f55\u5b66\u722c\u866b\u6280\u672f","status":"publish","type":"post","link":"https:\/\/mushiming.com\/6207.html","title":{"rendered":"\u722c\u866b\u6280\u672f\u6709\u54ea\u4e9b\u65b9\u6cd5_\u96f6\u57fa\u7840\u5982\u4f55\u5b66\u722c\u866b\u6280\u672f"},"content":{"rendered":"
\u2003\u2003\u524d\u51e0\u5929\uff0c\u5728\u5fae\u4fe1\u516c\u4f17\u53f7\uff08Python\u722c\u866b\u53ca\u7b97\u6cd5\uff09\u4e0a\u6709\u4e2a\u4eba\u95ee\u4e86\u7b14\u8005\u4e00\u4e2a\u95ee\u9898\uff0c\u5982\u4f55\u5229\u7528\u722c\u866b\u6765\u5b9e\u73b0\u5982\u4e0b\u7684\u9700\u6c42\uff0c\u9700\u8981\u722c\u53d6\u7684\u7f51\u9875\u5982\u4e0b\uff08\u7f51\u5740\u4e3a\uff1ahttps:\/\/www.wikidata.org\/w\/index.php?title=Special:WhatLinksHere\/Q5&limit=500&from=0\uff09\uff1a<\/p>\n
<\/p>\n
\u2003\u2003\u6211\u4eec\u7684\u9700\u6c42\u4e3a\u722c\u53d6\u7ea2\u8272\u6846\u6846\u5185\u7684\u540d\u4eba\uff08\u6709500\u6761\u8bb0\u5f55\uff0c\u56fe\u7247\u53ea\u5c55\u793a\u4e86\u4e00\u90e8\u5206\uff09\u7684 \u540d\u5b57\u4ee5\u53ca\u5176\u4ecb\u7ecd\uff0c\u5173\u4e8e\u5176\u4ecb\u7ecd\uff0c\u70b9\u51fb\u8be5\u540d\u4eba\u7684\u540d\u5b57\u5373\u53ef\uff0c\u5982\u4e0b\u56fe\uff1a<\/p>\n
<\/p>\n
\u8fd9\u5c31\u610f\u5473\u7740\u6211\u4eec\u9700\u8981\u722c\u53d6500\u4e2a\u8fd9\u6837\u7684\u9875\u9762\uff0c\u5373500\u4e2aHTTP\u8bf7\u6c42\uff08\u6682\u4e14\u8fd9\u4e48\u8ba4\u4e3a\u5427\uff09\uff0c\u7136\u540e\u9700\u8981\u63d0\u53d6\u8fd9\u4e9b\u7f51\u9875\u4e2d\u7684\u540d\u5b57\u548c\u63cf\u8ff0\uff0c\u5f53\u7136\u6709\u4e9b\u4e0d\u662f\u540d\u4eba\uff0c\u4e5f\u6ca1\u6709\u63cf\u8ff0\uff0c\u6211\u4eec\u53ef\u4ee5\u8df3\u8fc7\u3002\u6700\u540e\uff0c\u8fd9\u4e9b\u7f51\u9875\u7684\u7f51\u5740\u5728\u7b2c\u4e00\u9875\u4e2d\u7684\u540d\u4eba\u540e\u9762\u53ef\u4ee5\u627e\u5230\uff0c\u5982George Washington\u7684\u7f51\u9875\u540e\u7f00\u4e3aQ23.
\u2003\u2003\u722c\u866b\u7684\u9700\u6c42\u5927\u6982\u5c31\u662f\u8fd9\u6837\u3002<\/p>\n
\u2003\u2003\u9996\u5148\uff0c\u5206\u6790\u6765\u722c\u866b\u7684\u601d\u8def\uff1a\u5148\u5728\u7b2c\u4e00\u4e2a\u7f51\u9875\uff08https:\/\/www.wikidata.org\/w\/index.php?title=Special:WhatLinksHere\/Q5&limit=500&from=0\uff09\u4e2d\u5f97\u5230500\u4e2a\u540d\u4eba\u6240\u5728\u7684\u7f51\u5740\uff0c\u63a5\u4e0b\u6765\u5c31\u722c\u53d6\u8fd9500\u4e2a\u7f51\u9875\u4e2d\u7684\u540d\u4eba\u7684\u540d\u5b57\u53ca\u63cf\u8ff0\uff0c\u5982\u65e0\u63cf\u8ff0\uff0c\u5219\u8df3\u8fc7\u3002
\u2003\u2003\u63a5\u4e0b\u6765\uff0c\u6211\u4eec\u5c06\u4ecb\u7ecd\u5b9e\u73b0\u8fd9\u4e2a\u722c\u866b\u76844\u79cd\u65b9\u6cd5\uff0c\u5e76\u5206\u6790\u5b83\u4eec\u5404\u81ea\u7684\u4f18\u7f3a\u70b9\uff0c\u5e0c\u671b\u80fd\u8ba9\u8bfb\u8005\u5bf9\u722c\u866b\u6709\u66f4\u591a\u7684\u4f53\u4f1a\u3002\u5b9e\u73b0\u722c\u866b\u7684\u65b9\u6cd5\u4e3a\uff1a<\/p>\n
\u2003\u2003\u4e00\u822c\u65b9\u6cd5\u5373\u4e3a\u540c\u6b65\u65b9\u6cd5\uff0c\u4e3b\u8981\u4f7f\u7528requests+BeautifulSoup\uff0c\u6309\u987a\u5e8f\u6267\u884c\u3002\u5b8c\u6574\u7684Python\u4ee3\u7801\u5982\u4e0b\uff1a<\/p>\n
import requests from bs4 import BeautifulSoup import time # \u5f00\u59cb\u65f6\u95f4 t1 = time.time() print('#' * 50) url = \"http:\/\/www.wikidata.org\/w\/index.php?title=Special:WhatLinksHere\/Q5&limit=500&from=0\" # \u8bf7\u6c42\u5934\u90e8 headers = {'User-Agent': 'Mozilla\/5.0 (Windows NT 10.0; WOW64) AppleWebKit\/537.36 (KHTML, like Gecko) Chrome\/67.0.3396.87 Safari\/537.36'} # \u53d1\u9001HTTP\u8bf7\u6c42 req = requests.get(url, headers=headers) # \u89e3\u6790\u7f51\u9875 soup = BeautifulSoup(req.text, \"lxml\") # \u627e\u5230name\u548cDescription\u6240\u5728\u7684\u8bb0\u5f55 human_list = soup.find(id='mw-whatlinkshere-list')('li') urls = [] # \u83b7\u53d6\u7f51\u5740 for human in human_list: url = human.find('a')['href'] urls.append('https:\/\/www.wikidata.org'+url) # \u83b7\u53d6\u6bcf\u4e2a\u7f51\u9875\u7684name\u548cdescription def parser(url): req = requests.get(url) # \u5229\u7528BeautifulSoup\u5c06\u83b7\u53d6\u5230\u7684\u6587\u672c\u89e3\u6790\u6210HTML soup = BeautifulSoup(req.text, \"lxml\") # \u83b7\u53d6name\u548cdescription name = soup.find('span', class_=\"wikibase-title-label\") desc = soup.find('span', class_=\"wikibase-descriptionview-text\") if name is not None and desc is not None: print('%-40s,\\t%s'%(name.text, desc.text)) for url in urls: parser(url) t2 = time.time() # \u7ed3\u675f\u65f6\u95f4 print('\u4e00\u822c\u65b9\u6cd5\uff0c\u603b\u5171\u8017\u65f6\uff1a%s' % (t2 - t1)) print('#' * 50)<\/code><\/pre>\n\u8f93\u51fa\u7684\u7ed3\u679c\u5982\u4e0b(\u7701\u7565\u4e2d\u95f4\u7684\u8f93\u51fa\uff0c\u4ee5......\u4ee3\u66ff)\uff1a<\/p>\n
################################################## George Washington , first President of the United States Douglas Adams , British author and humorist (1952\u20132001) ...... Willoughby Newton , Politician from Virginia, USA Mack Wilberg , American conductor \u4e00\u822c\u65b9\u6cd5\uff0c\u603b\u5171\u8017\u65f6\uff1a724.43 ##################################################<\/code><\/pre>\n\u4f7f\u7528\u540c\u6b65\u65b9\u6cd5\uff0c\u603b\u8017\u65f6\u7ea6725\u79d2\uff0c\u537312\u5206\u949f\u591a\u3002
\u2003\u2003\u4e00\u822c\u65b9\u6cd5\u867d\u7136\u601d\u8def\u7b80\u5355\uff0c\u5bb9\u6613\u5b9e\u73b0\uff0c\u4f46\u6548\u7387\u4e0d\u9ad8\uff0c\u8017\u65f6\u957f\u3002\u90a3\u4e48\uff0c\u4f7f\u7528\u5e76\u53d1\u8bd5\u8bd5\u770b\u3002<\/p>\n
\u5e76\u53d1\u65b9\u6cd5<\/h4>\n
\u2003\u2003\u5e76\u53d1\u65b9\u6cd5\u4f7f\u7528\u591a\u7ebf\u7a0b\u6765\u52a0\u901f\u4e00\u822c\u65b9\u6cd5\uff0c\u6211\u4eec\u4f7f\u7528\u7684\u5e76\u53d1\u6a21\u5757\u4e3aconcurrent.futures\u6a21\u5757\uff0c\u8bbe\u7f6e\u591a\u7ebf\u7a0b\u7684\u4e2a\u6570\u4e3a20\u4e2a\uff08\u5b9e\u9645\u4e0d\u4e00\u5b9a\u80fd\u8fbe\u5230\uff0c\u89c6\u8ba1\u7b97\u673a\u800c\u5b9a\uff09\u3002\u5b8c\u6574\u7684Python\u4ee3\u7801\u5982\u4e0b\uff1a<\/p>\n
import requests from bs4 import BeautifulSoup import time from concurrent.futures import ThreadPoolExecutor, wait, ALL_COMPLETED # \u5f00\u59cb\u65f6\u95f4 t1 = time.time() print('#' * 50) url = \"http:\/\/www.wikidata.org\/w\/index.php?title=Special:WhatLinksHere\/Q5&limit=500&from=0\" # \u8bf7\u6c42\u5934\u90e8 headers = {'User-Agent': 'Mozilla\/5.0 (Windows NT 10.0; WOW64) AppleWebKit\/537.36 (KHTML, like Gecko) Chrome\/67.0.3396.87 Safari\/537.36'} # \u53d1\u9001HTTP\u8bf7\u6c42 req = requests.get(url, headers=headers) # \u89e3\u6790\u7f51\u9875 soup = BeautifulSoup(req.text, \"lxml\") # \u627e\u5230name\u548cDescription\u6240\u5728\u7684\u8bb0\u5f55 human_list = soup.find(id='mw-whatlinkshere-list')('li') urls = [] # \u83b7\u53d6\u7f51\u5740 for human in human_list: url = human.find('a')['href'] urls.append('https:\/\/www.wikidata.org'+url) # \u83b7\u53d6\u6bcf\u4e2a\u7f51\u9875\u7684name\u548cdescription def parser(url): req = requests.get(url) # \u5229\u7528BeautifulSoup\u5c06\u83b7\u53d6\u5230\u7684\u6587\u672c\u89e3\u6790\u6210HTML soup = BeautifulSoup(req.text, \"lxml\") # \u83b7\u53d6name\u548cdescription name = soup.find('span', class_=\"wikibase-title-label\") desc = soup.find('span', class_=\"wikibase-descriptionview-text\") if name is not None and desc is not None: print('%-40s,\\t%s'%(name.text, desc.text)) # \u5229\u7528\u5e76\u53d1\u52a0\u901f\u722c\u53d6 executor = ThreadPoolExecutor(max_workers=20) # submit()\u7684\u53c2\u6570\uff1a \u7b2c\u4e00\u4e2a\u4e3a\u51fd\u6570\uff0c \u4e4b\u540e\u4e3a\u8be5\u51fd\u6570\u7684\u4f20\u5165\u53c2\u6570\uff0c\u5141\u8bb8\u6709\u591a\u4e2a future_tasks = [executor.submit(parser, url) for url in urls] # \u7b49\u5f85\u6240\u6709\u7684\u7ebf\u7a0b\u5b8c\u6210\uff0c\u624d\u8fdb\u5165\u540e\u7eed\u7684\u6267\u884c wait(future_tasks, return_when=ALL_COMPLETED) t2 = time.time() # \u7ed3\u675f\u65f6\u95f4 print('\u5e76\u53d1\u65b9\u6cd5\uff0c\u603b\u5171\u8017\u65f6\uff1a%s' % (t2 - t1)) print('#' * 50)<\/code><\/pre>\n\u8f93\u51fa\u7684\u7ed3\u679c\u5982\u4e0b\uff08\u7701\u7565\u4e2d\u95f4\u7684\u8f93\u51fa\uff0c\u4ee5......\u4ee3\u66ff)\uff1a<\/p>\n
################################################## Larry Sanger , American former professor, co-founder of Wikipedia, founder of Citizendium and other projects Ken Jennings , American game show contestant and writer ...... Antoine de Saint-Exupery , French writer and aviator Michael Jackson , American singer, songwriter and dancer \u5e76\u53d1\u65b9\u6cd5\uff0c\u603b\u5171\u8017\u65f6\uff1a226.33 ##################################################<\/code><\/pre>\n\u4f7f\u7528\u591a\u7ebf\u7a0b\u5e76\u53d1\u540e\u7684\u722c\u866b\u6267\u884c\u65f6\u95f4\u7ea6\u4e3a227\u79d2\uff0c\u5927\u6982\u662f\u4e00\u822c\u65b9\u6cd5\u7684\u4e09\u5206\u4e4b\u4e00\u7684\u65f6\u95f4\uff0c\u901f\u5ea6\u6709\u4e86\u660e\u663e\u7684\u63d0\u5347\u554a\uff01\u591a\u7ebf\u7a0b\u5728\u901f\u5ea6\u4e0a\u6709\u660e\u663e\u63d0\u5347\uff0c\u4f46\u6267\u884c\u7684\u7f51\u9875\u987a\u5e8f\u662f\u65e0\u5e8f\u7684\uff0c\u5728\u7ebf\u7a0b\u7684\u5207\u6362\u4e0a\u5f00\u9500\u4e5f\u6bd4\u8f83\u5927\uff0c\u7ebf\u7a0b\u8d8a\u591a\uff0c\u5f00\u9500\u8d8a\u5927\u3002
\u2003\u2003\u5173\u4e8e\u591a\u7ebf\u7a0b\u4e0e\u4e00\u822c\u65b9\u6cd5\u5728\u901f\u5ea6\u4e0a\u7684\u6bd4\u8f83\uff0c\u53ef\u4ee5\u53c2\u8003\u6587\u7ae0\uff1aPython\u722c\u866b\u4e4b\u591a\u7ebf\u7a0b\u4e0b\u8f7d\u8c46\u74e3Top250\u7535\u5f71\u56fe\u7247\u3002<\/p>\n
\u5f02\u6b65\u65b9\u6cd5<\/h4>\n
\u2003\u2003\u5f02\u6b65\u65b9\u6cd5\u5728\u722c\u866b\u4e2d\u662f\u6709\u6548\u7684\u901f\u5ea6\u63d0\u5347\u624b\u6bb5\uff0c\u4f7f\u7528aiohttp\u53ef\u4ee5\u5f02\u6b65\u5730\u5904\u7406HTTP\u8bf7\u6c42\uff0c\u4f7f\u7528asyncio\u53ef\u4ee5\u5b9e\u73b0\u5f02\u6b65IO\uff0c\u9700\u8981\u6ce8\u610f\u7684\u662f\uff0caiohttp\u53ea\u652f\u63013.5.3\u4ee5\u540e\u7684Python\u7248\u672c\u3002\u4f7f\u7528\u5f02\u6b65\u65b9\u6cd5\u5b9e\u73b0\u8be5\u722c\u866b\u7684\u5b8c\u6574Python\u4ee3\u7801\u5982\u4e0b\uff1a<\/p>\n
import requests\nfrom bs4 import BeautifulSoup\nimport time\nimport aiohttp\nimport asyncio\n\n# \u5f00\u59cb\u65f6\u95f4\nt1 = time.time()\nprint('#' * 50)\n\nurl = \"http:\/\/www.wikidata.org\/w\/index.php?title=Special:WhatLinksHere\/Q5&limit=500&from=0\"\n# \u8bf7\u6c42\u5934\u90e8\nheaders = {'User-Agent': 'Mozilla\/5.0 (Windows NT 10.0; WOW64) AppleWebKit\/537.36 (KHTML, like Gecko) Chrome\/67.0.3396.87 Safari\/537.36'}\n# \u53d1\u9001HTTP\u8bf7\u6c42\nreq = requests.get(url, headers=headers)\n# \u89e3\u6790\u7f51\u9875\nsoup = BeautifulSoup(req.text, \"lxml\")\n# \u627e\u5230name\u548cDescription\u6240\u5728\u7684\u8bb0\u5f55\nhuman_list = soup.find(id='mw-whatlinkshere-list')('li')\n\nurls = []\n# \u83b7\u53d6\u7f51\u5740\nfor human in human_list:\n url = human.find('a')['href']\n urls.append('https:\/\/www.wikidata.org'+url)\n\n# \u5f02\u6b65HTTP\u8bf7\u6c42\nasync def fetch(session, url):\n async with session.get(url) as response:\n return await response.text()\n \n# \u89e3\u6790\u7f51\u9875\nasync def parser(html):\n # \u5229\u7528BeautifulSoup\u5c06\u83b7\u53d6\u5230\u7684\u6587\u672c\u89e3\u6790\u6210HTML\n soup = BeautifulSoup(html, \"lxml\")\n # \u83b7\u53d6name\u548cdescription\n name = soup.find('span', class_=\"wikibase-title-label\")\n desc = soup.find('span', class_=\"wikibase-descriptionview-text\")\n if name is not None and desc is not None:\n print('%-40s,\\t%s'%(name.text, desc.text))\n\n# \u5904\u7406\u7f51\u9875\uff0c\u83b7\u53d6name\u548cdescription\nasync def download(url):\n async with aiohttp.ClientSession() as session:\n try:\n html = await fetch(session, url)\n await parser(html)\n except Exception as err:\n print(err)\n\n# \u5229\u7528asyncio\u6a21\u5757\u8fdb\u884c\u5f02\u6b65IO\u5904\u7406\nloop = asyncio.get_event_loop()\ntasks = [asyncio.ensure_future(download(url)) for url in urls]\ntasks = asyncio.gather(*tasks)\nloop.run_until_complete(tasks)\n\nt2 = time.time() # \u7ed3\u675f\u65f6\u95f4\nprint('\u4f7f\u7528\u5f02\u6b65\uff0c\u603b\u5171\u8017\u65f6\uff1a%s' % (t2 - t1))\nprint('#' * 50)<\/code><\/pre>\n\u8f93\u51fa\u7ed3\u679c\u5982\u4e0b\uff08\u7701\u7565\u4e2d\u95f4\u7684\u8f93\u51fa\uff0c\u4ee5......\u4ee3\u66ff)\uff1a<\/p>\n
################################################## Fr\u00e9d\u00e9ric Tadde\u00ef , French journalist and TV host Gabriel Gonz\u00e1les Videla , Chilean politician ...... Denmark , sovereign state and Scandinavian country in northern Europe Usain Bolt , Jamaican sprinter and soccer player \u4f7f\u7528\u5f02\u6b65\uff0c\u603b\u5171\u8017\u65f6\uff1a126.86 ##################################################<\/code><\/pre>\n\u663e\u7136\uff0c\u5f02\u6b65\u65b9\u6cd5\u4f7f\u7528\u4e86\u5f02\u6b65\u548c\u5e76\u53d1\u4e24\u79cd\u63d0\u901f\u65b9\u6cd5\uff0c\u81ea\u7136\u5728\u901f\u5ea6\u6709\u660e\u663e\u63d0\u5347\uff0c\u5927\u7ea6\u4e3a\u4e00\u822c\u65b9\u6cd5\u7684\u516d\u5206\u4e4b\u4e00\u3002\u5f02\u6b65\u65b9\u6cd5\u867d\u7136\u6548\u7387\u9ad8\uff0c\u4f46\u9700\u8981\u638c\u63e1\u5f02\u6b65\u7f16\u7a0b\uff0c\u8fd9\u9700\u8981\u5b66\u4e60\u4e00\u6bb5\u65f6\u95f4\u3002
\u2003\u2003\u5173\u4e8e\u5f02\u6b65\u65b9\u6cd5\u4e0e\u4e00\u822c\u65b9\u6cd5\u5728\u901f\u5ea6\u4e0a\u7684\u6bd4\u8f83\uff0c\u53ef\u4ee5\u53c2\u8003\u6587\u7ae0\uff1a\u5229\u7528aiohttp\u5b9e\u73b0\u5f02\u6b65\u722c\u866b\u3002
\u2003\u2003\u5982\u679c\u6709\u4eba\u89c9\u5f97127\u79d2\u7684\u722c\u866b\u901f\u5ea6\u8fd8\u662f\u6162\uff0c\u53ef\u4ee5\u5c1d\u8bd5\u4e00\u4e0b\u5f02\u6b65\u4ee3\u7801\uff08\u4e0e\u4e4b\u524d\u7684\u5f02\u6b65\u4ee3\u7801\u7684\u533a\u522b\u5728\u4e8e\uff1a\u4ec5\u4ec5\u4f7f\u7528\u4e86\u6b63\u5219\u8868\u8fbe\u5f0f\u4ee3\u66ffBeautifulSoup\u6765\u89e3\u6790\u7f51\u9875\uff0c\u4ee5\u63d0\u53d6\u7f51\u9875\u4e2d\u7684\u5185\u5bb9\uff09\uff1a<\/p>\n
import requests\nfrom bs4 import BeautifulSoup\nimport time\nimport aiohttp\nimport asyncio\nimport re\n\n# \u5f00\u59cb\u65f6\u95f4\nt1 = time.time()\nprint('#' * 50)\n\nurl = \"http:\/\/www.wikidata.org\/w\/index.php?title=Special:WhatLinksHere\/Q5&limit=500&from=0\"\n# \u8bf7\u6c42\u5934\u90e8\nheaders = {\n 'User-Agent': 'Mozilla\/5.0 (Windows NT 10.0; WOW64) AppleWebKit\/537.36 (KHTML, like Gecko) Chrome\/67.0.3396.87 Safari\/537.36'}\n# \u53d1\u9001HTTP\u8bf7\u6c42\nreq = requests.get(url, headers=headers)\n# \u89e3\u6790\u7f51\u9875\nsoup = BeautifulSoup(req.text, \"lxml\")\n# \u627e\u5230name\u548cDescription\u6240\u5728\u7684\u8bb0\u5f55\nhuman_list = soup.find(id='mw-whatlinkshere-list')('li')\n\nurls = []\n# \u83b7\u53d6\u7f51\u5740\nfor human in human_list:\n url = human.find('a')['href']\n urls.append('https:\/\/www.wikidata.org' + url)\n\n# \u5f02\u6b65HTTP\u8bf7\u6c42\nasync def fetch(session, url):\n async with session.get(url) as response:\n return await response.text()\n\n# \u89e3\u6790\u7f51\u9875\nasync def parser(html):\n # \u5229\u7528\u6b63\u5219\u8868\u8fbe\u5f0f\u89e3\u6790\u7f51\u9875\n try:\n name = re.findall(r'<span class=\"wikibase-title-label\">(.+?)<\/span>', html)[0]\n desc = re.findall(r'<span class=\"wikibase-descriptionview-text\">(.+?)<\/span>', html)[0]\n print('%-40s,\\t%s' % (name, desc))\n except Exception as err:\n pass\n\n# \u5904\u7406\u7f51\u9875\uff0c\u83b7\u53d6name\u548cdescription\nasync def download(url):\n async with aiohttp.ClientSession() as session:\n try:\n html = await fetch(session, url)\n await parser(html)\n except Exception as err:\n print(err)\n\n# \u5229\u7528asyncio\u6a21\u5757\u8fdb\u884c\u5f02\u6b65IO\u5904\u7406\nloop = asyncio.get_event_loop()\ntasks = [asyncio.ensure_future(download(url)) for url in urls]\ntasks = asyncio.gather(*tasks)\nloop.run_until_complete(tasks)\n\nt2 = time.time() # \u7ed3\u675f\u65f6\u95f4\nprint('\u4f7f\u7528\u5f02\u6b65\uff08\u6b63\u5219\u8868\u8fbe\u5f0f\uff09\uff0c\u603b\u5171\u8017\u65f6\uff1a%s' % (t2 - t1))\nprint('#' * 50)<\/code><\/pre>\n\u8f93\u51fa\u7684\u7ed3\u679c\u5982\u4e0b\uff08\u7701\u7565\u4e2d\u95f4\u7684\u8f93\u51fa\uff0c\u4ee5......\u4ee3\u66ff)\uff1a<\/p>\n
################################################## Dejen Gebremeskel , Ethiopian long-distance runner Erik Kynard , American high jumper ...... Buzz Aldrin , American astronaut Egon Krenz , former General Secretary of the Socialist Unity Party of East Germany \u4f7f\u7528\u5f02\u6b65\uff08\u6b63\u5219\u8868\u8fbe\u5f0f\uff09\uff0c\u603b\u5171\u8017\u65f6\uff1a16.4824 ##################################################<\/code><\/pre>\n16.5\u79d2\uff0c\u4ec5\u4ec5\u4e3a\u4e00\u822c\u65b9\u6cd5\u768443\u5206\u4e4b\u4e00\uff0c\u901f\u5ea6\u5982\u6b64\u4e4b\u5feb\uff0c\u4ee4\u4eba\u548b\u820c\uff08\u611f\u8c22\u67d0\u4eba\u63d0\u4f9b\u7684\u5c1d\u8bd5\uff09\u3002\u7b14\u8005\u867d\u7136\u81ea\u5df1\u5b9e\u73b0\u4e86\u5f02\u6b65\u65b9\u6cd5\uff0c\u4f46\u7528\u7684\u662fBeautifulSoup\u6765\u89e3\u6790\u7f51\u9875\uff0c\u8017\u65f6127\u79d2\uff0c\u6ca1\u60f3\u5230\u4f7f\u7528\u6b63\u5219\u8868\u8fbe\u5f0f\u5c31\u53d6\u5f97\u4e86\u5982\u6b64\u60ca\u4eba\u7684\u6548\u679c\u3002\u53ef\u89c1\uff0cBeautifulSoup\u89e3\u6790\u7f51\u9875\u867d\u7136\u5feb\uff0c\u4f46\u5728\u5f02\u6b65\u65b9\u6cd5\u4e2d\uff0c\u8fd8\u662f\u9650\u5236\u4e86\u901f\u5ea6\u3002\u4f46\u8fd9\u79cd\u65b9\u6cd5\u7684\u7f3a\u70b9\u4e3a\uff0c\u5f53\u4f60\u9700\u8981\u722c\u53d6\u7684\u5185\u5bb9\u6bd4\u8f83\u590d\u6742\u65f6\uff0c\u4e00\u822c\u7684\u6b63\u5219\u8868\u8fbe\u5f0f\u5c31\u96be\u4ee5\u80dc\u4efb\u4e86\uff0c\u9700\u8981\u53e6\u60f3\u529e\u6cd5\u3002<\/p>\n
\u722c\u866b\u6846\u67b6Scrapy<\/h4>\n
\u2003\u2003\u6700\u540e\uff0c\u6211\u4eec\u4f7f\u7528\u8457\u540d\u7684Python\u722c\u866b\u6846\u67b6Scrapy\u6765\u89e3\u51b3\u8fd9\u4e2a\u722c\u866b\u3002\u6211\u4eec\u521b\u5efa\u7684\u722c\u866b\u9879\u76ee\u4e3awikiDataScrapy\uff0c\u9879\u76ee\u7ed3\u6784\u5982\u4e0b\uff1a<\/p>\n
<\/p>\n
\u5728settings.py\u4e2d\u8bbe\u7f6e\u201cROBOTSTXT_OBEY = False\u201d. \u4fee\u6539items.py\uff0c\u4ee3\u7801\u5982\u4e0b\uff1a<\/p>\n
# -*- coding: utf-8 -*- import scrapy class WikidatascrapyItem(scrapy.Item): # define the fields for your item here like: name = scrapy.Field() desc = scrapy.Field()<\/code><\/pre>\n\u7136\u540e\uff0c\u5728spiders\u6587\u4ef6\u5939\u4e0b\u65b0\u5efawikiSpider.py\uff0c\u4ee3\u7801\u5982\u4e0b:<\/p>\n
import scrapy.cmdline from wikiDataScrapy.items import WikidatascrapyItem import requests from bs4 import BeautifulSoup # \u83b7\u53d6\u8bf7\u6c42\u7684500\u4e2a\u7f51\u5740\uff0c\u7528requests+BeautifulSoup\u641e\u5b9a def get_urls(): url = \"http:\/\/www.wikidata.org\/w\/index.php?title=Special:WhatLinksHere\/Q5&limit=500&from=0\" # \u8bf7\u6c42\u5934\u90e8 headers = { 'User-Agent': 'Mozilla\/5.0 (Windows NT 10.0; WOW64) AppleWebKit\/537.36 (KHTML, like Gecko) Chrome\/67.0.3396.87 Safari\/537.36'} # \u53d1\u9001HTTP\u8bf7\u6c42 req = requests.get(url, headers=headers) # \u89e3\u6790\u7f51\u9875 soup = BeautifulSoup(req.text, \"lxml\") # \u627e\u5230name\u548cDescription\u6240\u5728\u7684\u8bb0\u5f55 human_list = soup.find(id='mw-whatlinkshere-list')('li') urls = [] # \u83b7\u53d6\u7f51\u5740 for human in human_list: url = human.find('a')['href'] urls.append('https:\/\/www.wikidata.org' + url) # print(urls) return urls # \u4f7f\u7528scrapy\u6846\u67b6\u722c\u53d6 class bookSpider(scrapy.Spider): name = 'wikiScrapy' # \u722c\u866b\u540d\u79f0 start_urls = get_urls() # \u9700\u8981\u722c\u53d6\u7684500\u4e2a\u7f51\u5740 def parse(self, response): item = WikidatascrapyItem() # name and description item['name'] = response.css('span.wikibase-title-label').xpath('text()').extract_first() item['desc'] = response.css('span.wikibase-descriptionview-text').xpath('text()').extract_first() yield item # \u6267\u884c\u8be5\u722c\u866b\uff0c\u5e76\u8f6c\u5316\u4e3acsv\u6587\u4ef6 scrapy.cmdline.execute(['scrapy', 'crawl', 'wikiScrapy', '-o', 'wiki.csv', '-t', 'csv'])<\/code><\/pre>\n\u8f93\u51fa\u7ed3\u679c\u5982\u4e0b\uff08\u53ea\u5305\u542b\u6700\u540e\u7684Scrapy\u4fe1\u606f\u603b\u7ed3\u90e8\u5206\uff09\uff1a<\/p>\n
{'downloader\/request_bytes': , 'downloader\/request_count': 500, 'downloader\/request_method_count\/GET': 500, 'downloader\/response_bytes': , 'downloader\/response_count': 500, 'downloader\/response_status_count\/200': 500, 'finish_reason': 'finished', 'finish_time': datetime.datetime(2018, 10, 16, 9, 49, 15, ), 'item_scraped_count': 500, 'log_count\/DEBUG': 1001, 'log_count\/INFO': 8, 'response_received_count': 500, 'scheduler\/dequeued': 500, 'scheduler\/dequeued\/memory': 500, 'scheduler\/enqueued': 500, 'scheduler\/enqueued\/memory': 500, 'start_time': datetime.datetime(2018, 10, 16, 9, 48, 44, 58673)}<\/code><\/pre>\n\u53ef\u4ee5\u770b\u5230\uff0c\u5df2\u6210\u529f\u722c\u53d6500\u4e2a\u7f51\u9875\uff0c\u8017\u65f631\u79d2\uff0c\u901f\u5ea6\u4e5f\u76f8\u5f53OK\u3002\u518d\u6765\u770b\u4e00\u4e0b\u751f\u6210\u7684wiki.csv\u6587\u4ef6\uff0c\u5b83\u5305\u542b\u4e86\u6240\u6709\u7684\u8f93\u51fa\u7684name\u548cdescription\uff0c\u5982\u4e0b\u56fe\uff1a<\/p>\n
<\/p>\n
\u53ef\u4ee5\u770b\u5230\uff0c\u8f93\u51fa\u7684CSV\u6587\u4ef6\u7684\u5217\u5e76\u4e0d\u662f\u6709\u5e8f\u7684\u3002\u81f3\u4e8e\u5982\u4f55\u89e3\u51b3Scrapy\u8f93\u51fa\u7684CSV\u6587\u4ef6\u6709\u6362\u884c\u7684\u95ee\u9898\uff0c\u8bf7\u53c2\u8003stackoverflow\u4e0a\u7684\u56de\u7b54\uff1ahttps:\/\/stackoverflow.com\/questions\/\/scrapy-csv-file-has-uniform-empty-rows\/# \u3002<\/p>\n
\u2003\u2003Scrapy\u6765\u5236\u4f5c\u722c\u866b\u7684\u4f18\u52bf\u5728\u4e8e\u5b83\u662f\u4e00\u4e2a\u6210\u719f\u7684\u722c\u866b\u6846\u67b6\uff0c\u652f\u6301\u5f02\u6b65\uff0c\u5e76\u53d1\uff0c\u5bb9\u9519\u6027\u8f83\u597d\uff08\u6bd4\u5982\u672c\u4ee3\u7801\u4e2d\u5c31\u6ca1\u6709\u5904\u7406\u627e\u4e0d\u5230name\u548cdescription\u7684\u60c5\u5f62\uff09\uff0c\u4f46\u5982\u679c\u9700\u8981\u9891\u7e41\u5730\u4fee\u6539\u4e2d\u95f4\u4ef6\uff0c\u5219\u8fd8\u662f\u81ea\u5df1\u5199\u4e2a\u722c\u866b\u6bd4\u8f83\u597d\uff0c\u800c\u4e14\u5b83\u5728\u901f\u5ea6\u4e0a\u6ca1\u6709\u8d85\u8fc7\u6211\u4eec\u81ea\u5df1\u5199\u7684\u5f02\u6b65\u722c\u866b\uff0c\u81f3\u4e8e\u80fd\u81ea\u52a8\u5bfc\u51faCSV\u6587\u4ef6\u8fd9\u4e2a\u529f\u80fd\uff0c\u8fd8\u662f\u76f8\u5f53\u5b9e\u5728\u7684\u3002<\/p>\n
\u603b\u7ed3<\/h4>\n
\u2003\u2003\u672c\u6587\u5185\u5bb9\u8f83\u591a\uff0c\u6bd4\u8f83\u4e864\u79cd\u722c\u866b\u65b9\u6cd5\uff0c\u6bcf\u79cd\u65b9\u6cd5\u90fd\u6709\u81ea\u5df1\u7684\u5229\u5f0a\uff0c\u5df2\u5728\u4e4b\u524d\u7684\u9648\u8ff0\u4e2d\u7ed9\u51fa\uff0c\u5f53\u7136\uff0c\u5728\u5b9e\u9645\u7684\u95ee\u9898\u4e2d\uff0c\u5e76\u4e0d\u662f\u7528\u7684\u5de5\u5177\u6216\u65b9\u6cd5\u8d8a\u9ad8\u7ea7\u5c31\u8d8a\u597d\uff0c\u5177\u4f53\u95ee\u9898\u5177\u4f53\u5206\u6790\u561b~<\/p>\n","protected":false},"excerpt":{"rendered":"\u722c\u866b\u6280\u672f\u6709\u54ea\u4e9b\u65b9\u6cd5_\u96f6\u57fa\u7840\u5982\u4f55\u5b66\u722c\u866b\u6280\u672f\u8be5\u6587\u975e\u539f\u521b\u6587\u5b57\uff0c\u6587\u5b57\u8f6c\u8f7d\u81f3jclian91\u94fe\u63a5\uff1ahttps:\/\/www.cnblogs.com\/jclian91\/p\/979...","protected":false},"author":1,"featured_media":0,"comment_status":"open","ping_status":"open","sticky":false,"template":"","format":"standard","meta":{"footnotes":""},"categories":[],"tags":[],"_links":{"self":[{"href":"https:\/\/mushiming.com\/wp-json\/wp\/v2\/posts\/6207"}],"collection":[{"href":"https:\/\/mushiming.com\/wp-json\/wp\/v2\/posts"}],"about":[{"href":"https:\/\/mushiming.com\/wp-json\/wp\/v2\/types\/post"}],"author":[{"embeddable":true,"href":"https:\/\/mushiming.com\/wp-json\/wp\/v2\/users\/1"}],"replies":[{"embeddable":true,"href":"https:\/\/mushiming.com\/wp-json\/wp\/v2\/comments?post=6207"}],"version-history":[{"count":0,"href":"https:\/\/mushiming.com\/wp-json\/wp\/v2\/posts\/6207\/revisions"}],"wp:attachment":[{"href":"https:\/\/mushiming.com\/wp-json\/wp\/v2\/media?parent=6207"}],"wp:term":[{"taxonomy":"category","embeddable":true,"href":"https:\/\/mushiming.com\/wp-json\/wp\/v2\/categories?post=6207"},{"taxonomy":"post_tag","embeddable":true,"href":"https:\/\/mushiming.com\/wp-json\/wp\/v2\/tags?post=6207"}],"curies":[{"name":"wp","href":"https:\/\/api.w.org\/{rel}","templated":true}]}}