Entendiendo la programacion asincrona
La programacion asincrona permite que tu programa maneje multiples operaciones concurrentemente sin hilos. Es especialmente util para tareas limitadas por E/S como solicitudes web, consultas a bases de datos y operaciones de archivos. El modulo asyncio de Python proporciona la base para la programacion asincrona usando la sintaxis async/await.
Cuando usar async
- Tareas limitadas por E/S: Solicitudes HTTP, consultas a bases de datos, E/S de archivos
- Alta concurrencia: Manejar miles de conexiones simultaneas
- Servidores web: FastAPI, aiohttp y vistas async de Django
- NO para tareas limitadas por CPU: Usa multiprocessing para computacion pesada
Conceptos basicos de async/await
import asyncio
# Define an async function (coroutine)
async def greet(name: str) -> str:
print(f"Starting to greet {name}")
await asyncio.sleep(1) # Simulates async I/O
print(f"Done greeting {name}")
return f"Hello, {name}!"
# Run a single coroutine
async def main():
result = await greet("Alice")
print(result)
asyncio.run(main())
# Run multiple coroutines concurrently with gather
async def main_concurrent():
results = await asyncio.gather(
greet("Alice"),
greet("Bob"),
greet("Charlie"),
)
for result in results:
print(result)
asyncio.run(main_concurrent())
# All three greetings run concurrently!
# Total time: ~1 second (not 3 seconds)
Async en el mundo real: solicitudes HTTP
import asyncio
import aiohttp # pip install aiohttp
import time
# Synchronous approach (slow)
import requests
def fetch_sync(urls):
results = []
for url in urls:
response = requests.get(url)
results.append(response.status_code)
return results
# Async approach (fast)
async def fetch_url(session, url):
async with session.get(url) as response:
return {"url": url, "status": response.status}
async def fetch_all(urls):
async with aiohttp.ClientSession() as session:
tasks = [fetch_url(session, url) for url in urls]
results = await asyncio.gather(*tasks)
return results
async def main():
urls = [
"https://httpbin.org/delay/1",
"https://httpbin.org/delay/1",
"https://httpbin.org/delay/1",
]
start = time.perf_counter()
results = await fetch_all(urls)
elapsed = time.perf_counter() - start
for r in results:
print(f"{r['url']}: {r['status']}")
print(f"Total time: {elapsed:.2f}s") # ~1s instead of ~3s
asyncio.run(main())
Tasks y TaskGroups
import asyncio
# Creating tasks for concurrent execution
async def process_item(item: str, delay: float) -> str:
await asyncio.sleep(delay)
return f"Processed: {item}"
async def main():
# Create tasks (they start running immediately)
task1 = asyncio.create_task(process_item("A", 2))
task2 = asyncio.create_task(process_item("B", 1))
task3 = asyncio.create_task(process_item("C", 3))
# Wait for all tasks
result1 = await task1
result2 = await task2
result3 = await task3
print(result1, result2, result3)
# TaskGroup (Python 3.11+) - better error handling
async def main_taskgroup():
async with asyncio.TaskGroup() as tg:
task1 = tg.create_task(process_item("A", 2))
task2 = tg.create_task(process_item("B", 1))
task3 = tg.create_task(process_item("C", 3))
# All tasks are guaranteed to be done here
print(task1.result(), task2.result(), task3.result())
asyncio.run(main_taskgroup())
# Timeouts
async def main_timeout():
try:
async with asyncio.timeout(2):
await asyncio.sleep(10)
except TimeoutError:
print("Operation timed out!")
asyncio.run(main_timeout())
Iteradores y generadores async
import asyncio
# Async generator
async def async_countdown(n: int):
while n > 0:
yield n
await asyncio.sleep(0.5)
n -= 1
async def main():
async for num in async_countdown(5):
print(num)
asyncio.run(main())
# Async context manager
class AsyncDatabase:
async def __aenter__(self):
print("Connecting to database...")
await asyncio.sleep(0.5)
return self
async def __aexit__(self, exc_type, exc_val, exc_tb):
print("Closing database connection...")
await asyncio.sleep(0.1)
async def query(self, sql: str):
await asyncio.sleep(0.2)
return [{"id": 1, "name": "Alice"}]
async def main():
async with AsyncDatabase() as db:
results = await db.query("SELECT * FROM users")
print(results)
asyncio.run(main())
Puntos clave
- async/await para E/S: Usa async para operaciones de red, base de datos y archivos
- gather para concurrencia: Ejecuta multiples coroutines a la vez
- TaskGroup para seguridad: Python 3.11+ TaskGroup proporciona concurrencia estructurada
- Usa aiohttp: Para solicitudes HTTP async en lugar de requests