多进程实现并发请求

Posted jintian

tags:

篇首语:本文由小常识网(cha138.com)小编为大家整理,主要介绍了多进程实现并发请求相关的知识,希望对你有一定的参考价值。

协程:在一个线程内指定切换

协程+异步IO

aiohttp      asyncio  模块

封装http数据包   异步IO

技术图片
"""
可以实现并发
但是,请求发送出去后和返回之前,中间时期进程空闲
编写方式:
    - 直接返回处理
    - 通过回调函数处理
"""

########### 编写方式一 ###########
"""
from concurrent.futures import ProcessPoolExecutor
import requests
import time

def task(url):
    response = requests.get(url)
    print(url,response)
    # 写正则表达式


pool = ProcessPoolExecutor(7)
url_list = [
    ‘http://www.cnblogs.com/wupeiqi‘,
    ‘http://huaban.com/favorite/beauty/‘,
    ‘http://www.bing.com‘,
    ‘http://www.zhihu.com‘,
    ‘http://www.sina.com‘,
    ‘http://www.baidu.com‘,
    ‘http://www.autohome.com.cn‘,
]
for url in url_list:
    pool.submit(task,url)

pool.shutdown(wait=True)
"""

########### 编写方式二 ###########
from concurrent.futures import ProcessPoolExecutor
import requests
import time

def task(url):
    response = requests.get(url)
    return response

def done(future,*args,**kwargs):
    response = future.result()
    print(response.status_code,response.content)

pool = ProcessPoolExecutor(7)
url_list = [
    http://www.cnblogs.com/wupeiqi,
    http://huaban.com/favorite/beauty/,
    http://www.bing.com,
    http://www.zhihu.com,
    http://www.sina.com,
    http://www.baidu.com,
    http://www.autohome.com.cn,
]
for url in url_list:
    v = pool.submit(task,url)
    v.add_done_callback(done)

pool.shutdown(wait=True)
多进程.py
技术图片
import asyncio

"""
@asyncio.coroutine
def task():
    print(‘before...task......‘)
    yield from asyncio.sleep(5) # 发送Http请求,支持TCP获取结果..
    print(‘end...task......‘)


tasks = [task(), task()]

loop = asyncio.get_event_loop()
loop.run_until_complete(asyncio.gather(*tasks))
loop.close()
"""


"""
import asyncio


@asyncio.coroutine
def task(host, url=‘/‘):
    print(‘start‘,host,url)
    reader, writer = yield from asyncio.open_connection(host, 80)

    request_header_content = "GET %s HTTP/1.0\\r\\nHost: %s\\r\\n\\r\\n" % (url, host,)
    request_header_content = bytes(request_header_content, encoding=‘utf-8‘)

    writer.write(request_header_content)
    yield from writer.drain()
    text = yield from reader.read()
    print(‘end‘,host, url, text)
    writer.close()

tasks = [
    task(‘www.cnblogs.com‘, ‘/wupeiqi/‘),
    task(‘dig.chouti.com‘, ‘/pic/show?nid=4073644713430508&lid=10273091‘)
]

loop = asyncio.get_event_loop()
results = loop.run_until_complete(asyncio.gather(*tasks))
loop.close()
"""

"""
import aiohttp
import asyncio


@asyncio.coroutine
def fetch_async(url):
    print(url)
    response = yield from aiohttp.request(‘GET‘, url)
    print(url, response)
    response.close()


tasks = [fetch_async(‘http://www.baidu.com/‘), fetch_async(‘http://www.chouti.com/‘)]

event_loop = asyncio.get_event_loop()
results = event_loop.run_until_complete(asyncio.gather(*tasks))
event_loop.close()
"""

import asyncio
import requests


@asyncio.coroutine
def task(func, *args):
    print(func,args)
    loop = asyncio.get_event_loop()
    future = loop.run_in_executor(None, func, *args) # requests.get(‘http://www.cnblogs.com/wupeiqi/‘)
    response = yield from future
    print(response.url, response.content)


tasks = [
    task(requests.get, http://www.cnblogs.com/wupeiqi/),
    task(requests.get, http://dig.chouti.com/pic/show?nid=4073644713430508&lid=10273091)
]

loop = asyncio.get_event_loop()
results = loop.run_until_complete(asyncio.gather(*tasks))
loop.close()
asyncio.py

技术图片技术图片技术图片技术图片技术图片技术图片技术图片技术图片技术图片技术图片技术图片技术图片技术图片技术图片技术图片技术图片技术图片技术图片技术图片技术图片技术图片技术图片技术图片

以上是关于多进程实现并发请求的主要内容,如果未能解决你的问题,请参考以下文章

多进程实现并发服务器(TCP)

python socket多线程和多进程

爬虫性能分析

linux 多进程并发服务__关于子进程回收的方法

nginx 多进程 + io多路复用 实现高并发

TCP编程:多进程(fork)并发处理客户端请求