파이썬, multiple HTTP request (synchronous,multiprocessing,multithreading,asyncio)

 

처리 속도 (빠름) asynico, multithreading > multiprocessing > synchronous (느림)

 

 

https://www.youtube.com/watch?v=R4Oz8JUuM4s 

 

[소스 코드] https://github.com/nikhilkumarsingh/async-http-requests-tut

 

GitHub - nikhilkumarsingh/async-http-requests-tut: Making multiple HTTP requests using Python (synchronous, multiprocessing, mul

Making multiple HTTP requests using Python (synchronous, multiprocessing, multithreading, asyncio) - GitHub - nikhilkumarsingh/async-http-requests-tut: Making multiple HTTP requests using Python (s...

github.com

 

requirements.txt ]

requests
aiohttp

timer

 

timer.py ]

import timeit

def timer(number, repeat):
    def wrapper(func):
        runs = timeit.repeat(func, number=number, repeat=repeat)
        print(sum(runs) / len(runs))
    return wrapper

 

----------------------------------------------------------------------------

[ test_synchronous.py ]

import requests
from timer import timer

URL = 'https://httpbin.org/uuid'

def fetch(session, url):
    with session.get(url, ssl=False) as response:
        print(response.json()['uuid'])

 

# 테스트 실행
@timer(1, 1)
def main():
    with requests.Session() as session:
        for _ in range(100):
            fetch(session, URL)

 

----------------------------------------------------------------------------

[ test_multiprocessing.py ]

from multiprocessing.pool import Pool
import requests
from timer import timer

URL = 'https://httpbin.org/uuid'

def fetch(session, url):
    with session.get(url, ssl=False) as response:
        print(response.json()['uuid'])

 

# 테스트 실행
@timer(1, 1)
def main():
    with Pool() as pool:
        with requests.Session() as session:
            pool.starmap(fetch, [(session, URL) for _ in range(100)])

 

----------------------------------------------------------------------------

test_multithreading.py

from concurrent.futures import ThreadPoolExecutor
import requests
from timer import timer

URL = 'https://httpbin.org/uuid'

def fetch(session, url):
    with session.get(url, ssl=False) as response:
        print(response.json()['uuid'])

 

# 테스트 실행
@timer(1, 5)
def main():
    with ThreadPoolExecutor(max_workers=100) as executor:
        with requests.Session() as session:
            executor.map(fetch, [session] * 100, [URL] * 100)
            executor.shutdown(wait=True)

----------------------------------------------------------------------------

[ test_asyncio.py ]

import asyncio
import aiohttp
from timer import timer

URL = 'https://httpbin.org/uuid'

async def fetch(session, url):
    async with session.get(url, ssl=False) as response:
        json_response = await response.json()
        print(json_response['uuid'])

async def main():
    async with aiohttp.ClientSession() as session:
        tasks = [fetch(session, URL) for _ in range(100)]
        await asyncio.gather(*tasks)

 

# 테스트 실행
@timer(1, 5)
def func():
    asyncio.run(main())
----------------------------------------------------------------------------

 

반응형

+ Recent posts