rabbitmq implemented and tested

This commit is contained in:
2025-08-17 21:14:46 +03:00
parent 9543d136aa
commit 61529f7d94
43 changed files with 5433 additions and 315 deletions

View File

@@ -1,18 +1,22 @@
FROM python:3.12-slim
FROM python:3.11-slim
ENV PYTHONDONTWRITEBYTECODE=1 PYTHONUNBUFFERED=1
ENV PYTHONUNBUFFERED=1
ENV PYTHONDONTWRITEBYTECODE=1
ENV VIRTUAL_ENV=/opt/venv
ENV PRISMA_SCHEMA_PATH=/app/services/database/schema.prisma
ENV PATH="$VIRTUAL_ENV/bin:$PATH"
ENV PYTHONPATH=/app
WORKDIR /
COPY app/services/database/pyproject.toml ./
COPY app/services/database/README.md ./
COPY app/core ./app/core
COPY app/services/common/ ./app/services/common/
COPY app/services/database/ ./app/services/database/
RUN pip install --upgrade pip && pip install --no-cache-dir .
RUN apt-get update && apt-get install -y bash
RUN mkdir -p /app/data
RUN chmod +x /app/services/database/entrypoint.sh
CMD ["python", "-m", "app.services.database.main"]
CMD ["bash", "/app/services/database/entrypoint.sh"]

View File

@@ -0,0 +1,21 @@
#!/bin/bash
VENV_PATH="/opt/venv"
REQUIREMENTS_PATH="/app/services/database/requirements.txt"
SCHEMA_PATH="/app/services/database/schema.prisma"
PRISMA_BINARY_PATH="/root/.cache/prisma-python/binaries"
if [ ! -x "$VENV_PATH/bin/python" ]; then
python -m venv "$VENV_PATH"
. "$VENV_PATH/bin/activate"
"$VENV_PATH/bin/pip" install pip --upgrade
"$VENV_PATH/bin/pip" install -r "$REQUIREMENTS_PATH"
"$VENV_PATH/bin/prisma" generate --schema "$SCHEMA_PATH"
fi
if ! find "$PRISMA_BINARY_PATH" -type f -name "prisma-query-engine-debian-openssl-3.0.x" 2>/dev/null | grep -q .; then
"$VENV_PATH/bin/pip" install prisma
"$VENV_PATH/bin/prisma" py fetch
fi
exec "$VENV_PATH/bin/python" -u /app/services/database/main.py

View File

@@ -1,28 +1,48 @@
import os
import uuid
import asyncio
from app.services.common.service_base_async import ServiceBaseAsync
from prisma_client import PrismaService
from services.common.service_base_async import ServiceBaseAsync
PRODUCE_ENABLED = os.getenv("PRODUCE_ENABLED", "true").lower() == "true"
PRODUCE_BATCH = int(os.getenv("PRODUCE_BATCH", "3")) # her produce tick'inde kaç iş
TASK_TYPE = os.getenv("TASK_TYPE", "db-task") # iş tipi (task_id'de de kullanılır)
CONSUME_SLEEP_SEC = float(os.getenv("CONSUME_SLEEP_SEC", "0.5")) # işleme süresi simülasyonu (sn)
STATIC_IDS = ["2c47f1073a9d4f05aad6c15484894a72", "65827e3452b545d6845e050a503401f3", "5c663088f09d4062b4e567f47335fb1a"]
PRODUCE_BURST = int(os.getenv("PRODUCE_BURST", "10"))
PRODUCE_ONCE = os.getenv("PRODUCE_ONCE", "true").lower() == "true"
EVENT_TYPE = os.getenv("EVENT_TYPE", "db-event")
PROCESS_SEC = 10
async def produce(service: ServiceBaseAsync):
for biz_id in STATIC_IDS:
deterministic_task_id = f"{TASK_TYPE}:{biz_id}"
payload = {"id": biz_id, "op": "sync", "source": "db-service"}
await service.enqueue(payload, TASK_TYPE, task_id=deterministic_task_id)
print(f"[DB] produce tick attempted ids={','.join(STATIC_IDS)}")
async def produce(svc: ServiceBaseAsync):
prisma_service = PrismaService()
async with prisma_service._asession() as db:
result = await db.account_records.find_many(take=10, skip=0)
result: list = prisma_service.to_dict(result, select={"id": True, "uu_id": True, "iban": True, "bank_reference_code": True, "bank_date": True, "bank_balance": True})
for row in result:
await svc.enqueue(task_id=row["uu_id"], payload=row, type_="database.account.records")
await asyncio.sleep(PROCESS_SEC)
print(f"Produced From Database Producer: {len(result)} events to '{svc.produce_key}")
async def consume(service: ServiceBaseAsync, job: dict):
await asyncio.sleep(CONSUME_SLEEP_SEC)
print(f"[DB] consumed task={job['task_id']}")
async def handle_mail_publish(svc: ServiceBaseAsync, job):
await asyncio.sleep(PROCESS_SEC)
await svc.ack_current()
print("Database Consumer from mail:", job)
async def handle_mongo_publish(svc: ServiceBaseAsync, job):
prisma_service = PrismaService()
await asyncio.sleep(PROCESS_SEC)
await svc.ack_current()
print("Database Consumer from mongo:", job)
async def consume_default(svc: ServiceBaseAsync, job):
prisma_service = PrismaService()
await asyncio.sleep(PROCESS_SEC)
print("Database Consumer default (DLQ):", job.get("task_id"))
await svc.dlq_current(job, error="unsupported_routing_key")
if __name__ == "__main__":
asyncio.run(ServiceBaseAsync(produce, consume).run())
svc = ServiceBaseAsync(produce_fn=produce, consume_fn=consume_default, handlers={"mail.service.publish": handle_mail_publish, "mongo.service.publish": handle_mongo_publish})
asyncio.run(svc.run())

View File

@@ -0,0 +1,189 @@
import asyncio
import time
import logging
import uvloop
import threading
import datetime
import uuid
from typing import Optional, AsyncGenerator, Any, TypeVar, Union
from contextlib import asynccontextmanager
from prisma import Prisma
from prisma.client import _PrismaModel
_PrismaModelT = TypeVar('_PrismaModelT', bound='_PrismaModel')
logger = logging.getLogger("prisma-service")
logging.basicConfig(level=logging.INFO, format="%(asctime)s %(levelname)s %(message)s")
logging.getLogger("httpx").setLevel(logging.WARNING)
logging.getLogger("httpcore").setLevel(logging.WARNING)
class PrismaService:
def __init__(self) -> None:
self._lock = asyncio.Lock()
self._loop: Optional[asyncio.AbstractEventLoop] = None
self._thread: Optional[threading.Thread] = None
self._client: Optional[Prisma] = None
self.result: Optional[Any] = None
self.select: Optional[dict] = None
self._start_loop_thread()
def _loop_runner(self) -> None:
asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
self._loop = asyncio.new_event_loop()
asyncio.set_event_loop(self._loop)
try:
self._loop.run_forever()
finally:
self._loop.close()
def _submit(self, coro):
if self._loop is None or not self._loop.is_running():
raise RuntimeError("PrismaService event loop is not running.")
fut = asyncio.run_coroutine_threadsafe(coro, self._loop)
return fut.result()
async def _lock(self):
lock = asyncio.Lock()
async with lock:
return
async def _aconnect(self) -> Prisma:
if self._client is not None:
return self._client
logger.info("Connecting Prisma client...")
client = Prisma()
await client.connect()
self._client = client
logger.info("Prisma client connected.")
return self._client
async def _adisconnect(self) -> None:
if self._client is not None:
logger.info("Disconnecting Prisma client...")
try:
await self._client.disconnect()
finally:
self._client = None
logger.info("Prisma client disconnected.")
@asynccontextmanager
async def _asession(self) -> AsyncGenerator[Prisma, None]:
yield await self._aconnect()
def _start_loop_thread(self) -> None:
t = threading.Thread(target=self._loop_runner, name="PrismaLoop", daemon=True)
t.start()
self._thread = t
while self._loop is None:
time.sleep(0.005)
async def _connect(self) -> Prisma:
if self._client is not None:
return self._client
async with self._lock:
if self._client is None:
logger.info("Connecting Prisma client...")
client = Prisma()
await client.connect()
self._client = client
logger.info("Prisma client connected.")
return self._client
async def _disconnect(self) -> None:
async with self._lock:
if self._client is not None:
try:
logger.info("Disconnecting Prisma client...")
await self._client.disconnect()
logger.info("Prisma client disconnected.")
finally:
self._client = None
@staticmethod
def to_dict(result: Union[list, Any], select: dict = None):
if isinstance(result, list):
list_result = []
for item_iter in result:
item = {}
for k, v in item_iter:
if k not in select:
continue
if isinstance(v, datetime.datetime):
item[k] = str(v)
if isinstance(v, uuid.UUID):
item[k] = str(v)
if isinstance(v, int):
item[k] = int(v)
if isinstance(v, float):
item[k] = float(v)
if isinstance(v, bool):
item[k] = bool(v)
else:
item[k] = str(v)
list_result.append(item)
return list_result
else:
dict_result = {}
for k,v in result:
if k not in select:
continue
if isinstance(v, datetime.datetime):
dict_result[k] = str(v)
if isinstance(v, uuid.UUID):
dict_result[k] = str(v)
if isinstance(v, int):
dict_result[k] = int(v)
if isinstance(v, float):
dict_result[k] = float(v)
if isinstance(v, bool):
dict_result[k] = bool(v)
else:
dict_result[k] = str(v)
return dict_result
@asynccontextmanager
async def _session(self) -> AsyncGenerator[Prisma, None]:
client = await self._connect()
try:
yield client
except Exception:
logger.exception("Database operation error")
raise
def _run(self, coro):
try:
asyncio.get_running_loop()
raise RuntimeError("Async run is not allowed. Use sync methods instead.")
except RuntimeError as e:
asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
with asyncio.Runner() as runner:
return runner.run(coro)
def disconnect(self) -> None:
try:
self._submit(self._adisconnect())
finally:
if self._loop and self._loop.is_running():
self._loop.call_soon_threadsafe(self._loop.stop)
if self._thread and self._thread.is_alive():
self._thread.join(timeout=2.0)
self._loop = None
self._thread = None

View File

@@ -7,14 +7,17 @@ name = "dual-queue-services"
version = "0.1.0"
description = "Async dual queue system with Redis Streams and SQLite persistence"
readme = "README.md"
requires-python = ">=3.11"
requires-python = ">=3.11,<4.0"
authors = [
{ name = "Berkay Karatay", email = "karatay.berkay@gmail.com" }
]
dependencies = [
"nats-py>=2.6.0",
"aio-pika>=9.4.1",
"prometheus-client>=0.20.0",
"uvloop>=0.19.0"
"uvloop>=0.19.0",
"prisma==0.9.1",
"asyncio==3.4.3",
"arrow>=1.3.0"
]
[project.optional-dependencies]
@@ -34,3 +37,4 @@ profile = "black"
[tool.setuptools.packages.find]
where = ["app"]
include = ["app*"]

View File

@@ -0,0 +1,6 @@
aio-pika>=9.4.1
prometheus-client>=0.20.0
uvloop>=0.19.0
prisma==0.9.1
asyncio==3.4.3
arrow>=1.3.0

File diff suppressed because it is too large Load Diff