How to request multiple url using aiohttp

this is how aiohttp officially requests a single url. What should I do if I request multiple url and get the content?

import aiohttp
import asyncio

async def fetch(session, url):
    async with session.get(url) as response:
        return await response.text()

async def main():
    async with aiohttp.ClientSession() as session:
        html = await fetch(session, "http://python.org")
        print(html)

loop = asyncio.get_event_loop()
loop.run_until_complete(main())

urls = ['http://baidu.com', 'http://qq.com']
async def get(url):
    async with aiohttp.ClientSession() as session:
        html = await fetch(session, url)
        print(html)
tasks = [get(x) for x in urls]
loop = asyncio.get_event_loop()
loop.run_until_complete(asyncio.gather(*tasks))
Menu