Skip to main content

Async RabbitMQ worker utilities

Project description

WorkerLib - асинхронная работа с RabbitMQ

Быстрый старт

import asyncio
from workerlib import WorkerPool

async def task_handler(data: dict) -> bool:
    print(f"Обработка: {data}")
    return True

async def main():
    async with WorkerPool() as pool:
        pool.add_worker("tasks", task_handler)
        await pool.send("tasks", {"id": 1, "cmd": "start"})
        await asyncio.sleep(2)

asyncio.run(main())

Формат сообщений

JSON сообщение Библиотека автоматически сериализует dict в JSON при отправке:

# Отправка простого сообщения
await pool.send("queue", {
    "event": "user_created",
    "user_id": 123,
    "email": "user@example.com",
    "timestamp": "2024-01-15T10:30:00Z"
})

# Отправка вложенных структур
await pool.send("queue", {
    "type": "order",
    "data": {
        "order_id": "ORD-12345",
        "items": [
            {"id": 1, "quantity": 2},
            {"id": 2, "quantity": 1}
        ],
        "total": 299.99
    },
    "metadata": {
        "source": "api",
        "version": "1.0"
    }
})

Основные примеры

  1. Пул с несколькими воркерами
from workerlib import WorkerPool, ErrorHandlingStrategy

async def main():
    async with WorkerPool() as pool:
        # Email воркер с DLQ
        pool.add_worker(
            queue_name="emails",
            handler=email_handler,
            error_strategy=ErrorHandlingStrategy.RETRY_THEN_DLQ,
            prefetch_count=5
        )
        
        # Обработчик платежей
        pool.add_worker(
            queue_name="payments",
            handler=payment_handler,
            error_strategy=ErrorHandlingStrategy.REQUEUE_END
        )
        
        # Отправка задач
        await pool.send("emails", {"to": "user@test.com"})
        await pool.send("payments", {"amount": 100})
  1. Кастомное подключение и retry
from workerlib import ConnectionParams, RetryConfig

params = ConnectionParams(
    host="rabbit.local",
    username="admin",
    password="secret"
)

retry_config = RetryConfig(
    max_attempts=3,
    initial_delay=1.0,
    backoff_factor=2.0
)

async with WorkerPool(connection_params=params) as pool:
    pool.add_worker(
        queue_name="critical",
        handler=critical_handler,
        retry_config=retry_config
    )
  1. Обработка ошибок
from workerlib import ErrorHandlingStrategy

# Варианты:
# IGNORE - проигнорировать ошибку
# REQUEUE_END - в конец очереди с задержкой
# REQUEUE_FRONT - в начало очереди
# DLQ - в Dead Letter Queue
# RETRY_THEN_DLQ - повторить, затем в DLQ

pool.add_worker(
    queue_name="tasks",
    handler=my_handler,
    error_strategy=ErrorHandlingStrategy.RETRY_THEN_DLQ,
    dlq_enabled=True,
    requeue_delay=5.0  # задержка повторной обработки
)
  1. Отдельные компоненты
from workerlib import (
    RabbitMQConnection,
    RabbitMQQueue,
    RabbitMQConsumer,
    RabbitMQProducer
)

# Создание вручную
connection = RabbitMQConnection()
await connection.connect()

queue = RabbitMQQueue(connection, QueueConfig(name="my_queue"))

producer = RabbitMQProducer(connection, queue)
await producer.send({"test": "data"})

consumer = RabbitMQConsumer(queue, my_handler)
await consumer.consume()
  1. Batch отправка
async with WorkerPool() as pool:
    messages = [
        {"id": i, "data": f"item_{i}"}
        for i in range(100)
    ]
    
    tasks = [
        pool.send("batch_queue", msg)
        for msg in messages
    ]
    
    await asyncio.gather(*tasks)
  1. Метрики
async with WorkerPool() as pool:
    pool.add_worker("monitored", handler)
    
    # Отправляем задачи
    for i in range(10):
        await pool.send("monitored", {"task": i})
    
    # Получаем метрики
    metrics = pool.get_metrics("monitored")
    print(f"Обработано: {metrics['consumer']['processed']}")
    print(f"Ошибок: {metrics['consumer']['failed']}")
  1. FastAPI интеграция
from fastapi import FastAPI
from workerlib import WorkerPool

app = FastAPI()
worker_pool = WorkerPool(auto_start=False)

@app.on_event("startup")
async def startup():
    await worker_pool.start()
    worker_pool.add_worker("api_tasks", task_handler)

@app.on_event("shutdown")
async def shutdown():
    await worker_pool.stop()

@app.post("/task")
async def create_task(data: dict):
    await worker_pool.send("api_tasks", data)
    return {"status": "queued"}

Конфигурация

ConnectionParams

ConnectionParams(
    host="127.0.0.1",
    port=5672,
    username="guest",
    password="guest",
    heartbeat=60,
    timeout=10
)

QueueConfig

QueueConfig(
    name="queue_name",
    durable=True,
    prefetch_count=1
)

RetryConfig

RetryConfig(
    max_attempts=3,
    initial_delay=1.0,
    backoff_factor=2.0,
    max_delay=60.0
)

Установка

pip install workerlib

Требования: Python 3.8+, aio_pika

Project details


Download files

Download the file for your platform. If you're not sure which to choose, learn more about installing packages.

Source Distributions

No source distribution files available for this release.See tutorial on generating distribution archives.

Built Distribution

If you're not sure about the file name format, learn more about wheel file names.

workerlib-0.3.0-py3-none-any.whl (12.3 kB view details)

Uploaded Python 3

File details

Details for the file workerlib-0.3.0-py3-none-any.whl.

File metadata

  • Download URL: workerlib-0.3.0-py3-none-any.whl
  • Upload date:
  • Size: 12.3 kB
  • Tags: Python 3
  • Uploaded using Trusted Publishing? No
  • Uploaded via: twine/6.2.0 CPython/3.13.11

File hashes

Hashes for workerlib-0.3.0-py3-none-any.whl
Algorithm Hash digest
SHA256 fda84715e2f0a48a578e47ab08ccf32c4fbeff54937249d8be10865f9fc3e0a9
MD5 788d42df782262dc911510c6d894427f
BLAKE2b-256 92fe75c130fab1968faa783a8c71e4b7cdd9ed80d07864d81a83885cefcbfdb8

See more details on using hashes here.

Supported by

AWS Cloud computing and Security Sponsor Datadog Monitoring Depot Continuous Integration Fastly CDN Google Download Analytics Pingdom Monitoring Sentry Error logging StatusPage Status page