async/await
The async and await keywords are Pythons native syntax for asynchronous programming. They let you write concurrent code that doesnt block the main thread while waiting for I/O operations like network requests, file reads, or database queries.
Syntax
async def function_name(parameters):
await some_async_operation()
return result
How It Works
async def
The async keyword marks a function as a coroutine. When you call an async function, it returns a coroutine object without executing the function body:
async def fetch_data():
print("Fetching data...")
return {"data": 42}
# Calling an async function returns a coroutine object
coro = fetch_data()
print(coro)
# <coroutine object fetch_data at 0x...>
To actually run the coroutine, you need an event loop:
import asyncio
async def main():
result = await fetch_data()
print(result)
# Run the coroutine
asyncio.run(main())
# Fetching data...
# {"data": 42}
await
The await keyword pauses the execution of a coroutine until another coroutine or awaitable completes:
import asyncio
async def step_one():
await asyncio.sleep(1) # Simulate I/O
return "Step 1 done"
async def step_two():
await asyncio.sleep(0.5)
return "Step 2 done"
async def main():
# Sequential execution (takes ~1.5s total)
result1 = await step_one()
result2 = await step_two()
print(result1, result2)
asyncio.run(main())
Running Tasks Concurrently
Use asyncio.gather() to run multiple coroutines concurrently:
import asyncio
async def fetch_url(url):
await asyncio.sleep(1) # Simulate network request
return f"Got {url}"
async def main():
# Run both concurrently (takes ~1s, not 2s)
results = await asyncio.gather(
fetch_url("http://example.com"),
fetch_url("http://python.org")
)
for r in results:
print(r)
asyncio.run(main())
# Got http://example.com
# Got http://python.org
Common Use Cases
HTTP Requests
import asyncio
import aiohttp
async def fetch_all(urls):
async with aiohttp.ClientSession() as session:
tasks = [session.get(url) for url in urls]
responses = await asyncio.gather(*tasks)
return [await r.text() for r in responses]
# Concurrent HTTP requests
urls = ["http://example.com", "http://python.org", "http://github.com"]
html_pages = asyncio.run(fetch_all(urls))
Database Operations
import asyncio
import asyncpg
async def query_users():
conn = await asyncpg.connect(host="localhost", database="mydb")
try:
users = await conn.fetch("SELECT * FROM users LIMIT 10")
return users
finally:
await conn.close()
users = asyncio.run(query_users())
File I/O (with aiofiles)
import asyncio
import aiofiles
async def read_files():
async with aiofiles.open("file1.txt") as f1, \
aiofiles.open("file2.txt") as f2:
content1, content2 = await f1.read(), await f2.read()
return content1, content2
text1, text2 = asyncio.run(read_files())
Web Servers
from aiohttp import web
async def handle_request(request):
name = request.query.get("name", "World")
return web.Response(text=f"Hello, {name}!")
async def create_app():
app = web.Application()
app.router.add_get("/", handle_request)
return app
web.run_app(create_app())
Background Tasks
import asyncio
async def periodic_task():
while True:
print("Running background task...")
await asyncio.sleep(60) # Run every minute
async def main():
# Run periodic task in background while doing other work
task = asyncio.create_task(periodic_task())
await asyncio.sleep(5)
print("Main work done")
task.cancel()
await asyncio.gather(task, return_exceptions=True)
asyncio.run(main())
Task Management
create_task
Schedule a coroutine to run soon:
async def background_task():
await asyncio.sleep(3)
print("Background task complete!")
async def main():
task = asyncio.create_task(background_task())
print("Task scheduled")
await asyncio.sleep(1)
print("Doing other work...")
await task # Wait for it to complete
asyncio.run(main())
# Task scheduled
# Doing other work...
# Background task complete!
Task Groups (Python 3.11+)
async def main():
async with asyncio.TaskGroup() as tg:
task1 = tg.create_task(fetch_url("http://example.com"))
task2 = tg.create_task(fetch_url("http://python.org"))
# All tasks complete when exiting the block
print(task1.result(), task2.result())
Timeout Handling
import asyncio
async def slow_operation():
await asyncio.sleep(10)
return "Done"
async def main():
try:
result = await asyncio.wait_for(slow_operation(), timeout=5)
except asyncio.TimeoutError:
print("Operation timed out!")
asyncio.run(main())
# Operation timed out!
Error Handling
async def risky_operation():
await asyncio.sleep(1)
raise ValueError("Something went wrong")
async def main():
try:
await risky_operation()
except ValueError as e:
print(f"Caught: {e}")
asyncio.run(main())
# Caught: Something went wrong
async vs sync
Not everything needs to be async. Use async when you have I/O-bound operations that would benefit from concurrency:
# Good async candidates:
# - Network requests
# - Database queries
# - File I/O (with aiofiles)
# - External API calls
# Keep sync (no async needed):
# - CPU-bound calculations
# - Simple string operations
# - Data transformations
See Also
- asyncio module — async programming utilities
- Threading in Python — thread-based concurrency
- Multiprocessing in Python — process-based parallelism
- concurrent.futures — thread/process pools