在編寫爬蟲時,性能的消耗主要在IO請求中,當單進程單線程模式下請求URL時必然會引起等待,從而使得請求整體變慢。
1、同步執行
import requestsdef fetch_async(url):response = requests.get(url)return responseurl_list = ['http://www.github.com', 'http://www.bing.com']for url in url_list:fetch_async(url)
2、多線程執行
from concurrent.futures import ThreadPoolExecutor
import requestsdef fetch_async(url):response = requests.get(url)return responseurl_list = ['http://www.github.com', 'http://www.bing.com']
pool = ThreadPoolExecutor(5)
for url in url_list:pool.submit(fetch_async, url)
pool.shutdown(wait=True)
3、多線程+回調函數
from concurrent.futures import ThreadPoolExecutor
import requestsdef fetch_async(url):response = requests.get(url)return responsedef callback(future):print(future.result())url_list = ['http://www.github.com', 'http://www.bing.com']
pool = ThreadPoolExecutor(5)
for url in url_list:v = pool.submit(fetch_async, url)v.add_done_callback(callback)
pool.shutdown(wait=True)
4、多進程執行
from concurrent.futures import ProcessPoolExecutor
import requestsdef fetch_async(url):response = requests.get(url)return responseurl_list = ['http://www.github.com', 'http://www.bing.com']
pool = ProcessPoolExecutor(5)
for url in url_list:pool.submit(fetch_async, url)
pool.shutdown(wait=True)
5、多進程+回調函數
from concurrent.futures import ProcessPoolExecutor
import requestsdef fetch_async(url):response = requests.get(url)return responsedef callback(future):print(future.result())url_list = ['http://www.github.com', 'http://www.bing.com']
pool = ProcessPoolExecutor(5)
for url in url_list:v = pool.submit(fetch_async, url)v.add_done_callback(callback)
pool.shutdown(wait=True)
通過上述代碼均可以完成對請求性能的提高,對于多線程和多進行的缺點是在IO阻塞時會造成了線程和進程的浪費,所以首選異步IO:
1、asyncio 1
import asyncio@asyncio.coroutine
def func1():print('before...func1......')yield from asyncio.sleep(5)print('end...func1......')tasks = [func1(), func1()]
loop = asyncio.get_event_loop()
loop.run_until_complete(asyncio.gather(*tasks))
loop.close()
2、asyncio 2
import asyncio@asyncio.coroutine
def fetch_async(host, url='/'):print(host, url)reader, writer = yield from asyncio.open_connection(host, 80)request_header_content = """GET %s HTTP/1.0\r\nHost: %s\r\n\r\n""" % (url, host,)request_header_content = bytes(request_header_content, encoding='utf-8')writer.write(request_header_content)yield from writer.drain()text = yield from reader.read()print(host, url, text)writer.close()tasks = [fetch_async('www.cnblogs.com', '/wupeiqi/'),fetch_async('dig.chouti.com', '/pic/show?nid=4073644713430508&lid=10273091')
]loop = asyncio.get_event_loop()
results = loop.run_until_complete(asyncio.gather(*tasks))
loop.close()
3、asyncio+aiohttp
import aiohttp
import asyncio@asyncio.coroutine
def fetch_async(url):print(url)response = yield from aiohttp.request('GET', url)# data = yield from response.read()# print(url, data)print(url, response)response.close()tasks = [fetch_async('http://www.google.com/'), fetch_async('http://www.chouti.com/')]event_loop = asyncio.get_event_loop()
results = event_loop.run_until_complete(asyncio.gather(*tasks))
event_loop.close()
4、asynico+requests
import asyncio
import requests@asyncio.coroutine
def fetch_async(func, *args):loop = asyncio.get_event_loop()future = loop.run_in_executor(None, func, *args)response = yield from futureprint(response.url, response.content)tasks = [fetch_async(requests.get, 'http://www.cnblogs.com/wupeiqi/'),fetch_async(requests.get, 'http://dig.chouti.com/pic/show?nid=4073644713430508&lid=10273091')
]loop = asyncio.get_event_loop()
results = loop.run_until_complete(asyncio.gather(*tasks))
loop.close()
5、gevert+requests
import gevent
import requests
from gevent import monkeymonkey.patch_all()def fetch_async(method, url, req_kwargs):print(method, url, req_kwargs)response = requests.request(method=method, url=url, **req_kwargs)print(response.url, response.content)# ##### 發送請求 #####
gevent.joinall([gevent.spawn(fetch_async, method='get', url='https://www.python.org/', req_kwargs={}),gevent.spawn(fetch_async, method='get', url='https://www.yahoo.com/', req_kwargs={}),gevent.spawn(fetch_async, method='get', url='https://github.com/', req_kwargs={}),
])# ##### 發送請求(協程池控制最大協程數量) #####
# from gevent.pool import Pool
# pool = Pool(None)
# gevent.joinall([
# pool.spawn(fetch_async, method='get', url='https://www.python.org/', req_kwargs={}),
# pool.spawn(fetch_async, method='get', url='https://www.yahoo.com/', req_kwargs={}),
# pool.spawn(fetch_async, method='get', url='https://www.github.com/', req_kwargs={}),
# ])
6、grequests
import grequestsrequest_list = [grequests.get('http://httpbin.org/delay/1', timeout=0.001),grequests.get('http://fakedomain/'),grequests.get('http://httpbin.org/status/500')
]# ##### 執行并獲取響應列表 #####
# response_list = grequests.map(request_list)
# print(response_list)# ##### 執行并獲取響應列表(處理異常) #####
# def exception_handler(request, exception):
# print(request,exception)
# print("Request failed")# response_list = grequests.map(request_list, exception_handler=exception_handler)
# print(response_list)
7、Twisted 示例
from twisted.web.client import getPage, defer
from twisted.internet import reactordef all_done(arg):reactor.stop()def callback(contents):print(contents)deferred_list = []url_list = ['http://www.bing.com', 'http://www.baidu.com', ]
for url in url_list:deferred = getPage(bytes(url, encoding='utf8'))deferred.addCallback(callback)deferred_list.append(deferred)dlist = defer.DeferredList(deferred_list)
dlist.addBoth(all_done)
reactor.run()
8、tornado
from tornado.httpclient import AsyncHTTPClient
from tornado.httpclient import HTTPRequest
from tornado import ioloopdef handle_response(response):"""處理返回值內容(需要維護計數器,來停止IO循環),調用 ioloop.IOLoop.current().stop():param response: :return: """if response.error:print("Error:", response.error)else:print(response.body)def func():url_list = ['http://www.baidu.com','http://www.bing.com',]for url in url_list:print(url)http_client = AsyncHTTPClient()http_client.fetch(HTTPRequest(url), handle_response)ioloop.IOLoop.current().add_callback(func)
ioloop.IOLoop.current().start()
Twisted 更多
from twisted.internet import reactor
from twisted.web.client import getPage
import urllib.parse
def one_done(arg):
print(arg)
reactor.stop()
post_data = urllib.parse.urlencode({‘check_data’: ‘adf’})
post_data = bytes(post_data, encoding=‘utf8’)
headers = {b’Content-Type’: b’application/x-www-form-urlencoded’}
response = getPage(bytes(‘http://dig.chouti.com/login’, encoding=‘utf8’),
method=bytes(‘POST’, encoding=‘utf8’),
postdata=post_data,
cookies={},
headers=headers)
response.addBoth(one_done)
reactor.run()
以上均是Python內置以及第三方模塊提供異步IO請求模塊,使用簡便大大提高效率,而對于異步IO請求的本質則是【非阻塞Socket】+【IO多路復用】:
異步IO
import select
import socket
import timeclass AsyncTimeoutException(TimeoutError):"""請求超時異常類"""def __init__(self, msg):self.msg = msgsuper(AsyncTimeoutException, self).__init__(msg)class HttpContext(object):"""封裝請求和相應的基本數據"""def __init__(self, sock, host, port, method, url, data, callback, timeout=5):"""sock: 請求的客戶端socket對象host: 請求的主機名port: 請求的端口port: 請求的端口method: 請求方式url: 請求的URLdata: 請求時請求體中的數據callback: 請求完成后的回調函數timeout: 請求的超時時間"""self.sock = sockself.callback = callbackself.host = hostself.port = portself.method = methodself.url = urlself.data = dataself.timeout = timeoutself.__start_time = time.time()self.__buffer = []def is_timeout(self):"""當前請求是否已經超時"""current_time = time.time()if (self.__start_time + self.timeout) < current_time:return Truedef fileno(self):"""請求sockect對象的文件描述符,用于select監聽"""return self.sock.fileno()def write(self, data):"""在buffer中寫入響應內容"""self.__buffer.append(data)def finish(self, exc=None):"""在buffer中寫入響應內容完成,執行請求的回調函數"""if not exc:response = b''.join(self.__buffer)self.callback(self, response, exc)else:self.callback(self, None, exc)def send_request_data(self):content = """%s %s HTTP/1.0\r\nHost: %s\r\n\r\n%s""" % (self.method.upper(), self.url, self.host, self.data,)return content.encode(encoding='utf8')class AsyncRequest(object):def __init__(self):self.fds = []self.connections = []def add_request(self, host, port, method, url, data, callback, timeout):"""創建一個要請求"""client = socket.socket()client.setblocking(False)try:client.connect((host, port))except BlockingIOError as e:pass# print('已經向遠程發送連接的請求')req = HttpContext(client, host, port, method, url, data, callback, timeout)self.connections.append(req)self.fds.append(req)def check_conn_timeout(self):"""檢查所有的請求,是否有已經連接超時,如果有則終止"""timeout_list = []for context in self.connections:if context.is_timeout():timeout_list.append(context)for context in timeout_list:context.finish(AsyncTimeoutException('請求超時'))self.fds.remove(context)self.connections.remove(context)def running(self):"""事件循環,用于檢測請求的socket是否已經就緒,從而執行相關操作"""while True:r, w, e = select.select(self.fds, self.connections, self.fds, 0.05)if not self.fds:returnfor context in r:sock = context.sockwhile True:try:data = sock.recv(8096)if not data:self.fds.remove(context)context.finish()breakelse:context.write(data)except BlockingIOError as e:breakexcept TimeoutError as e:self.fds.remove(context)self.connections.remove(context)context.finish(e)breakfor context in w:# 已經連接成功遠程服務器,開始向遠程發送請求數據if context in self.fds:data = context.send_request_data()context.sock.sendall(data)self.connections.remove(context)self.check_conn_timeout()if __name__ == '__main__':def callback_func(context, response, ex):""":param context: HttpContext對象,內部封裝了請求相關信息:param response: 請求響應內容:param ex: 是否出現異常(如果有異常則值為異常對象;否則值為None):return:"""print(context, response, ex)obj = AsyncRequest()url_list = [{'host': 'www.google.com', 'port': 80, 'method': 'GET', 'url': '/', 'data': '', 'timeout': 5,'callback': callback_func},{'host': 'www.baidu.com', 'port': 80, 'method': 'GET', 'url': '/', 'data': '', 'timeout': 5,'callback': callback_func},{'host': 'www.bing.com', 'port': 80, 'method': 'GET', 'url': '/', 'data': '', 'timeout': 5,'callback': callback_func},]for item in url_list:print(item)obj.add_request(**item)obj.running()
此文為轉載!!!