Problem: I'd like to use aiohttp producers to download large amount of files and aiofiles consumers pull files from the pool.
Here's my draft structure, any thoughts please?
async def producer() #downloader
async with session.get(url) as r:
#...something
await queue.put(r.read())
async def consumer() #writer
await queue.get()
#job for async aiofiles
queue.task_done()
async def main()
queue = asyncio.Queue()
async with aiohttp.ClientSession() as session:
for url in urls:
#...
asyncio.create_task(producer())
#append task to task list
for n in range(number_of_consumers):
asyncio.create_task(consumer())
#append task to task list
await queue.join()
await asyncio.gather(*tasks, return_exceptions=True)