单线程多任务异步爬虫

import requests
import aiohttp
import time
import asyncio
s = time.time()
urls = [
    'http://127.0.0.1:5000/bobo',
    'http://127.0.0.1:5000/jay'
]

# async def get_request(url):
#     page_text = requests.get(url).text
#     return page_text
async def get_request(url):
   async with aiohttp.ClientSession() as s:
       async with await s.get(url=url) as response:
           page_text = await response.text()
           print(page_text)
   return page_text
tasks = []
for url in urls:
    c = get_request(url)
    task = asyncio.ensure_future(c)
    tasks.append(task)

loop = asyncio.get_event_loop()
loop.run_until_complete(asyncio.wait(tasks))

print(time.time()-s)

  

原文地址:https://www.cnblogs.com/well-666/p/12203022.html