Untitled
unknown
python
a year ago
2.7 kB
132
Indexable
Never
### output and Error output ### 3 Exception ignored in: <function _ProactorBasePipeTransport.__del__ at 0x000001FB7EF135E0> Traceback (most recent call last): File "C:\Python39\lib\asyncio\proactor_events.py", line 116, in __del__ self.close() File "C:\Python39\lib\asyncio\proactor_events.py", line 108, in close self._loop.call_soon(self._call_connection_lost, None) File "C:\Python39\lib\asyncio\base_events.py", line 751, in call_soon self._check_closed() File "C:\Python39\lib\asyncio\base_events.py", line 515, in _check_closed raise RuntimeError('Event loop is closed') RuntimeError: Event loop is closed Exception ignored in: <function _ProactorBasePipeTransport.__del__ at 0x000001FB7EF135E0> Traceback (most recent call last): File "C:\Python39\lib\asyncio\proactor_events.py", line 116, in __del__ self.close() File "C:\Python39\lib\asyncio\proactor_events.py", line 108, in close self._loop.call_soon(self._call_connection_lost, None) File "C:\Python39\lib\asyncio\base_events.py", line 751, in call_soon self._check_closed() File "C:\Python39\lib\asyncio\base_events.py", line 515, in _check_closed raise RuntimeError('Event loop is closed') RuntimeError: Event loop is closed Exception ignored in: <function _ProactorBasePipeTransport.__del__ at 0x000001FB7EF135E0> Traceback (most recent call last): File "C:\Python39\lib\asyncio\proactor_events.py", line 116, in __del__ self.close() File "C:\Python39\lib\asyncio\proactor_events.py", line 108, in close self._loop.call_soon(self._call_connection_lost, None) File "C:\Python39\lib\asyncio\base_events.py", line 751, in call_soon self._check_closed() File "C:\Python39\lib\asyncio\base_events.py", line 515, in _check_closed raise RuntimeError('Event loop is closed') RuntimeError: Event loop is closed [Finished in 1.1s] ### Code below ### import aiohttp import asyncio async def get_page(session, url): async with session.get(url) as r: return await r.text() async def get_all(session, urls): tasks = [] for url in urls: task = asyncio.create_task(get_page(session, url)) tasks.append(task) results = await asyncio.gather(*tasks) return results async def main(urls): async with aiohttp.ClientSession() as session: data = await get_all(session, urls) return data if __name__ == "__main__": urls = [ "https://books.toscrape.com/catalogue/page-1.html", "https://books.toscrape.com/catalogue/page-2.html", "https://books.toscrape.com/catalogue/page-3.html", ] results = asyncio.run(main(urls)) print(len(results))