rabbitmq implemented and tested
This commit is contained in:
parent
9543d136aa
commit
61529f7d94
|
|
@ -66,14 +66,12 @@ def initialize_service():
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
|
||||||
logger.info("Starting IsBank Email Service")
|
logger.info("Starting IsBank Email Service")
|
||||||
print(f"Starting Service Mail Reader.")
|
print(f"Starting Service Mail Reader.")
|
||||||
runner = initialize_service()
|
runner = initialize_service()
|
||||||
normal_sleep_time = 10
|
consecutive_errors, normal_sleep_time, error_sleep_time = 0, 10, 30
|
||||||
error_sleep_time = 30
|
max_consecutive_errors, extended_error_sleep = 5, 120
|
||||||
max_consecutive_errors = 5
|
|
||||||
extended_error_sleep = 120
|
|
||||||
consecutive_errors = 0
|
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
try:
|
try:
|
||||||
|
|
@ -96,6 +94,7 @@ if __name__ == "__main__":
|
||||||
sleep(extended_error_sleep)
|
sleep(extended_error_sleep)
|
||||||
else:
|
else:
|
||||||
sleep(error_sleep_time)
|
sleep(error_sleep_time)
|
||||||
|
|
||||||
except socket.error as e:
|
except socket.error as e:
|
||||||
consecutive_errors += 1
|
consecutive_errors += 1
|
||||||
logger.error(f"Email connection error (attempt {consecutive_errors}): {str(e)}")
|
logger.error(f"Email connection error (attempt {consecutive_errors}): {str(e)}")
|
||||||
|
|
@ -114,6 +113,7 @@ if __name__ == "__main__":
|
||||||
sleep(extended_error_sleep)
|
sleep(extended_error_sleep)
|
||||||
else:
|
else:
|
||||||
sleep(error_sleep_time)
|
sleep(error_sleep_time)
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
consecutive_errors += 1
|
consecutive_errors += 1
|
||||||
logger.error(f"Unexpected error (attempt {consecutive_errors}): {str(e)}")
|
logger.error(f"Unexpected error (attempt {consecutive_errors}): {str(e)}")
|
||||||
|
|
|
||||||
|
|
@ -455,7 +455,6 @@ class EmailServiceRunner:
|
||||||
redis_handler: Redis handler for Redis operations
|
redis_handler: Redis handler for Redis operations
|
||||||
email_service: Email service for email operations
|
email_service: Email service for email operations
|
||||||
"""
|
"""
|
||||||
# Use MailReaderService singleton for Redis operations
|
|
||||||
self.redis_handler = redis_handler
|
self.redis_handler = redis_handler
|
||||||
self.email_service = email_service
|
self.email_service = email_service
|
||||||
self.mails = None
|
self.mails = None
|
||||||
|
|
|
||||||
|
|
@ -84,6 +84,11 @@ class PrismaService:
|
||||||
while self._loop is None:
|
while self._loop is None:
|
||||||
time.sleep(0.005)
|
time.sleep(0.005)
|
||||||
|
|
||||||
|
async def _lock(self):
|
||||||
|
lock = asyncio.Lock()
|
||||||
|
async with lock:
|
||||||
|
return
|
||||||
|
|
||||||
async def _connect(self) -> Prisma:
|
async def _connect(self) -> Prisma:
|
||||||
if self._client is not None:
|
if self._client is not None:
|
||||||
return self._client
|
return self._client
|
||||||
|
|
|
||||||
|
|
@ -1,79 +1,147 @@
|
||||||
import os
|
import os
|
||||||
|
import json
|
||||||
import uuid
|
import uuid
|
||||||
import asyncio
|
import asyncio
|
||||||
import json
|
import fnmatch
|
||||||
|
import aio_pika
|
||||||
|
|
||||||
from typing import Any, Dict, Awaitable, Callable, Optional
|
from core.utils import now_ms
|
||||||
|
from contextvars import ContextVar
|
||||||
|
from aio_pika.abc import AbstractIncomingMessage
|
||||||
|
from typing import Any, Dict, Awaitable, Callable, Optional, List, NamedTuple
|
||||||
|
|
||||||
from app.core.utils import now_ms
|
|
||||||
from app.core import metrics
|
|
||||||
|
|
||||||
from nats.aio.client import Client as NATS
|
class _MsgCtx(NamedTuple):
|
||||||
from nats.js.api import StreamConfig, ConsumerConfig, AckPolicy
|
msg: AbstractIncomingMessage
|
||||||
from nats.errors import NoRespondersError
|
rk: str
|
||||||
|
attempts: int
|
||||||
|
|
||||||
|
|
||||||
|
_MSG_CTX: ContextVar[_MsgCtx | None] = ContextVar("_MSG_CTX", default=None)
|
||||||
|
|
||||||
|
|
||||||
class ServiceBaseAsync:
|
class ServiceBaseAsync:
|
||||||
"""
|
"""
|
||||||
JetStream tabanlı base:
|
RabbitMQ tabanlı async servis iskeleti.
|
||||||
- TASKS subject: publish + consume
|
- Topic exchange: EXCHANGE_EVENTS (default: app.events)
|
||||||
- PUBLISH subject: event yayını (enqueued / duplicate_skipped / done / retry / failed)
|
- Çoklu consume binding: CONSUME_BINDINGS="parser.publish,mail.publish"
|
||||||
- Dedup: Nats-Msg-Id = task_id (JetStream duplicate window içinde yazmaz)
|
- Kendi ürettiğini tüketmez: payload.source == SERVICE_NAME -> ACK & skip
|
||||||
- Retry: msg.nak(); MAX_DELIVER aşılınca msg.term() (DLQ yoksa “failed”)
|
- Retry: TTL'li retry kuyruğu (RETRY_DELAY_MS), sonra main'e geri DLX
|
||||||
|
- Max deneme üstünde DLQ: q.<service>.events.dlq
|
||||||
|
- Handler map: routing key -> özel callback (pattern destekli)
|
||||||
|
- Geriye uyumluluk: enqueue(payload, type_, routing_key=None, message_id=None)
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
produce_fn: Callable[["ServiceBaseAsync"], Awaitable[None]],
|
produce_fn: Callable[["ServiceBaseAsync"], Awaitable[None]],
|
||||||
consume_fn: Callable[["ServiceBaseAsync", Dict[str, Any]], Awaitable[None]],
|
consume_fn: Callable[["ServiceBaseAsync", Dict[str, Any]], Awaitable[None]],
|
||||||
|
handlers: Optional[Dict[str, Callable[["ServiceBaseAsync", Dict[str, Any]], Awaitable[None]]]] = None,
|
||||||
):
|
):
|
||||||
self.nats_url = os.getenv("NATS_URL", "nats://nats:4222")
|
self.service_name = os.getenv("SERVICE_NAME", "db-service")
|
||||||
|
self.amqp_url = os.getenv("RABBITMQ_URL", "amqp://guest:guest@localhost/")
|
||||||
|
self.exchange_name = os.getenv("EXCHANGE_EVENTS", "app.events")
|
||||||
|
self.produce_key: str = os.getenv("PRODUCE_KEY", f"{self.service_name}.publish")
|
||||||
|
|
||||||
self.stream_name = os.getenv("JS_STREAM", "ACCOUNT_SERVICES_DATABASE")
|
raw = os.getenv("CONSUME_BINDINGS", "")
|
||||||
self.tasks_subject = os.getenv("JS_TASKS_SUBJECT", "ACCOUNT.SERVICES.DATABASE.TASKS")
|
self.consume_bindings: List[str] = [s.strip() for s in raw.split(",") if s.strip()]
|
||||||
self.publish_subject = os.getenv("JS_PUBLISH_SUBJECT", "ACCOUNT.SERVICES.DATABASE.PUBLISH")
|
base = self.service_name.replace("/", "_")
|
||||||
|
|
||||||
self.durable = os.getenv("JS_DURABLE", "DB_WORKERS")
|
self.queue_main = f"q.{base}.events"
|
||||||
self.batch_size = int(os.getenv("BATCH_SIZE", "5"))
|
self.queue_retry = f"{self.queue_main}.retry"
|
||||||
self.ack_wait_sec = int(os.getenv("ACK_WAIT_SEC", "30"))
|
self.queue_dlq = f"{self.queue_main}.dlq"
|
||||||
self.max_deliver = int(os.getenv("MAX_DELIVER", "3"))
|
self.retry_delay_ms = int(os.getenv("RETRY_DELAY_MS", "5000"))
|
||||||
|
self.max_retries = int(os.getenv("MAX_RETRIES", "3"))
|
||||||
self.retry_enabled = os.getenv("RETRY_ENABLED", "true").lower() == "true"
|
self.prefetch = int(os.getenv("PREFETCH", "16"))
|
||||||
self.dedup_header = os.getenv("DEDUP_HEADER", "Nats-Msg-Id")
|
self.ignore_self = os.getenv("IGNORE_SELF_PRODUCED", "true").lower() == "true"
|
||||||
|
|
||||||
self.produce_fn = produce_fn
|
self.produce_fn = produce_fn
|
||||||
self.consume_fn = consume_fn
|
self.consume_fn = consume_fn
|
||||||
|
|
||||||
self.nc: Optional[NATS] = None
|
self.handlers = handlers or {}
|
||||||
self.js = None
|
self.conn: Optional[aio_pika.RobustConnection] = None
|
||||||
|
self.chan: Optional[aio_pika.RobustChannel] = None
|
||||||
|
self.ex: Optional[aio_pika.Exchange] = None
|
||||||
|
|
||||||
async def run(self) -> None:
|
async def _connect_with_retry(self, max_wait: int = 300):
|
||||||
metrics.start_server()
|
delay = 1
|
||||||
self.nc = NATS()
|
deadline = asyncio.get_event_loop().time() + (max_wait or 10**9)
|
||||||
await self.nc.connect(self.nats_url)
|
last_err = None
|
||||||
self.js = self.nc.jetstream()
|
while True:
|
||||||
|
|
||||||
await self._ensure_stream_and_consumer()
|
|
||||||
await asyncio.gather(self._produce_loop(), self._consume_loop())
|
|
||||||
|
|
||||||
async def _ensure_stream_and_consumer(self) -> None:
|
|
||||||
try:
|
try:
|
||||||
await self.js.add_stream(StreamConfig(name=self.stream_name, subjects=[self.tasks_subject, self.publish_subject]))
|
conn = await aio_pika.connect_robust(self.amqp_url, client_properties={"connection_name": self.service_name}, timeout=10)
|
||||||
print(f"[js] stream created: {self.stream_name}")
|
print(f"[amqp] connected: {self.amqp_url} : {self.service_name} : {self.exchange_name} : {str(self.consume_bindings)}")
|
||||||
except Exception:
|
return conn
|
||||||
pass
|
except Exception as e:
|
||||||
try:
|
last_err = e
|
||||||
await self.js.add_consumer(
|
now = asyncio.get_event_loop().time()
|
||||||
self.stream_name,
|
if now + delay > deadline:
|
||||||
ConsumerConfig(
|
raise last_err
|
||||||
durable_name=self.durable, ack_policy=AckPolicy.EXPLICIT,
|
await asyncio.sleep(delay)
|
||||||
ack_wait=self.ack_wait_sec, max_deliver=self.max_deliver, filter_subject=self.tasks_subject),
|
delay = min(delay * 2, 10)
|
||||||
)
|
|
||||||
print(f"[js] consumer created: durable={self.durable}")
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
|
|
||||||
async def _produce_loop(self) -> None:
|
async def run(self):
|
||||||
|
self.conn = await self._connect_with_retry()
|
||||||
|
self.chan = await self.conn.channel()
|
||||||
|
await self.chan.set_qos(prefetch_count=self.prefetch)
|
||||||
|
self.ex = await self.chan.declare_exchange(self.exchange_name, aio_pika.ExchangeType.TOPIC, durable=True)
|
||||||
|
self.ex_retry = await self.chan.declare_exchange(f"{self.exchange_name}.retry", aio_pika.ExchangeType.TOPIC, durable=True)
|
||||||
|
self.ex_dlx = await self.chan.declare_exchange(f"{self.exchange_name}.dlx", aio_pika.ExchangeType.TOPIC, durable=True)
|
||||||
|
args_main = {"x-dead-letter-exchange": f"{self.exchange_name}.retry", "x-queue-mode": "lazy"}
|
||||||
|
q_main = await self.chan.declare_queue(self.queue_main, durable=True, arguments=args_main)
|
||||||
|
args_retry = {"x-message-ttl": self.retry_delay_ms, "x-dead-letter-exchange": self.exchange_name}
|
||||||
|
q_retry = await self.chan.declare_queue(self.queue_retry, durable=True, arguments=args_retry)
|
||||||
|
q_dlq = await self.chan.declare_queue(self.queue_dlq, durable=True)
|
||||||
|
await q_dlq.bind(self.ex_dlx, routing_key="#")
|
||||||
|
if not self.consume_bindings:
|
||||||
|
print("[warn] No CONSUME_BINDINGS configured; only producing.")
|
||||||
|
for rk in (self.consume_bindings or []):
|
||||||
|
await q_main.bind(self.ex, routing_key=rk)
|
||||||
|
await q_retry.bind(self.ex_retry, routing_key=rk)
|
||||||
|
await q_main.consume(self._on_message, no_ack=False)
|
||||||
|
await asyncio.gather(self._produce_loop())
|
||||||
|
|
||||||
|
async def enqueue(self, task_id: str, payload: Dict[str, Any], type_: Optional[str] = None, routing_key: Optional[str] = None, message_id: Optional[str] = None) -> str:
|
||||||
|
assert self.ex is not None
|
||||||
|
payload.setdefault("task_id", task_id)
|
||||||
|
payload.setdefault("source", self.service_name)
|
||||||
|
body = json.dumps({"task_id": task_id, "type": type_, "payload": payload, "created_at": now_ms()}).encode()
|
||||||
|
msg = aio_pika.Message(body, delivery_mode=aio_pika.DeliveryMode.PERSISTENT, message_id=message_id or task_id, headers={"x-attempts": 0})
|
||||||
|
rk = routing_key or self.produce_key
|
||||||
|
await self.ex.publish(msg, routing_key=rk)
|
||||||
|
return task_id
|
||||||
|
|
||||||
|
async def ack_current(self) -> None:
|
||||||
|
ctx = _MSG_CTX.get()
|
||||||
|
if ctx and ctx.msg:
|
||||||
|
await ctx.msg.ack()
|
||||||
|
|
||||||
|
async def nack_current(self, requeue: bool = False) -> None:
|
||||||
|
ctx = _MSG_CTX.get()
|
||||||
|
if ctx and ctx.msg:
|
||||||
|
await ctx.msg.nack(requeue=requeue)
|
||||||
|
|
||||||
|
async def retry_current(self, job: dict, attempts: int | None = None) -> None:
|
||||||
|
"""Retry kuyruğuna kopyala ve orijinali ACK'le."""
|
||||||
|
ctx = _MSG_CTX.get()
|
||||||
|
if not (ctx and ctx.msg):
|
||||||
|
return
|
||||||
|
att = attempts if attempts is not None else (ctx.attempts + 1)
|
||||||
|
await self._publish_retry(ctx.msg, job, att)
|
||||||
|
await ctx.msg.ack()
|
||||||
|
|
||||||
|
async def dlq_current(self, job: dict, error: str | None = None) -> None:
|
||||||
|
"""DLQ'ya gönder ve orijinali ACK'le."""
|
||||||
|
ctx = _MSG_CTX.get()
|
||||||
|
if not (ctx and ctx.msg):
|
||||||
|
return
|
||||||
|
await self._publish_dlq(ctx.msg, job, error=error)
|
||||||
|
await ctx.msg.ack()
|
||||||
|
|
||||||
|
def register_handler(self, pattern: str, fn: Callable[["ServiceBaseAsync", Dict[str, Any]], Awaitable[None]]):
|
||||||
|
self.handlers[pattern] = fn
|
||||||
|
|
||||||
|
async def _produce_loop(self):
|
||||||
while True:
|
while True:
|
||||||
try:
|
try:
|
||||||
await self.produce_fn(self)
|
await self.produce_fn(self)
|
||||||
|
|
@ -81,85 +149,61 @@ class ServiceBaseAsync:
|
||||||
print(f"[produce] ERROR: {e}")
|
print(f"[produce] ERROR: {e}")
|
||||||
await asyncio.sleep(2)
|
await asyncio.sleep(2)
|
||||||
|
|
||||||
async def _consume_loop(self) -> None:
|
async def _on_message(self, msg: AbstractIncomingMessage):
|
||||||
sub = await self.js.pull_subscribe(self.tasks_subject, durable=self.durable)
|
async with msg.process(ignore_processed=True, requeue=False):
|
||||||
while True:
|
|
||||||
try:
|
try:
|
||||||
msgs = await sub.fetch(self.batch_size, timeout=2)
|
job = json.loads(msg.body.decode())
|
||||||
except Exception:
|
except Exception:
|
||||||
msgs = []
|
job = {"payload": {}, "task_id": None}
|
||||||
|
src = (job.get("payload") or {}).get("source")
|
||||||
if not msgs:
|
if self.ignore_self and src == self.service_name:
|
||||||
await asyncio.sleep(0.2)
|
return
|
||||||
continue
|
attempts = 0
|
||||||
|
|
||||||
for msg in msgs:
|
|
||||||
job = self._decode_msg(msg)
|
|
||||||
attempts = self._delivery_attempts(msg)
|
|
||||||
try:
|
try:
|
||||||
await self.consume_fn(self, job)
|
attempts = int(msg.headers.get("x-attempts", 0))
|
||||||
await msg.ack()
|
except Exception:
|
||||||
await self._publish({"task_id": job.get("task_id"), "status": "done"})
|
pass
|
||||||
|
|
||||||
|
handler = self._resolve_handler(msg.routing_key) or self.consume_fn
|
||||||
|
meta = job.setdefault("_meta", {})
|
||||||
|
meta["routing_key"] = msg.routing_key
|
||||||
|
meta["attempts"] = attempts
|
||||||
|
meta["exchange"] = self.exchange_name
|
||||||
|
ctx_token = _MSG_CTX.set(_MsgCtx(msg=msg, rk=msg.routing_key, attempts=attempts))
|
||||||
|
try:
|
||||||
|
await handler(self, job)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
err = str(e)
|
if attempts + 1 >= self.max_retries:
|
||||||
if (not self.retry_enabled) or (attempts >= self.max_deliver):
|
await self._publish_dlq(msg, job, error=str(e))
|
||||||
await msg.term()
|
await msg.ack()
|
||||||
await self._publish({"task_id": job.get("task_id"), "status": "failed", "error": err})
|
|
||||||
else:
|
else:
|
||||||
await msg.nak()
|
await self._publish_retry(msg, job, attempts + 1)
|
||||||
await self._publish({"task_id": job.get("task_id"), "status": "retry", "attempts": attempts, "error": err})
|
await msg.ack()
|
||||||
|
finally:
|
||||||
|
_MSG_CTX.reset(ctx_token)
|
||||||
|
|
||||||
async def enqueue(self, payload: Dict[str, Any], type_: str, task_id: Optional[str] = None) -> str:
|
def _resolve_handler(self, routing_key: str):
|
||||||
"""
|
if routing_key in self.handlers:
|
||||||
Dedup: Nats-Msg-Id = task_id
|
return self.handlers[routing_key]
|
||||||
duplicate ise publish.duplicate True döner ve JS yazmaz.
|
for pat, fn in self.handlers.items():
|
||||||
"""
|
if fnmatch.fnmatch(routing_key, pat):
|
||||||
_task_id = task_id or payload.get("task_id") or str(uuid.uuid4())
|
return fn
|
||||||
payload.setdefault("task_id", _task_id)
|
return None
|
||||||
|
|
||||||
task = {"task_id": _task_id, "type": type_, "payload": payload, "created_at": now_ms(), "_attempts": 0}
|
async def _publish_retry(self, msg: AbstractIncomingMessage, job: Dict[str, Any], attempts: int):
|
||||||
data = json.dumps(task).encode()
|
chan = self.chan; assert chan is not None
|
||||||
try:
|
retry_ex = await chan.get_exchange(f"{self.exchange_name}.retry")
|
||||||
ack = await self.js.publish(self.tasks_subject, data, headers={self.dedup_header: _task_id})
|
rk = msg.routing_key
|
||||||
except NoRespondersError:
|
body = json.dumps(job).encode()
|
||||||
raise RuntimeError("NATS/JetStream not available")
|
m = aio_pika.Message(body, delivery_mode=aio_pika.DeliveryMode.PERSISTENT, message_id=msg.message_id, headers={"x-attempts": attempts})
|
||||||
|
await retry_ex.publish(m, routing_key=rk)
|
||||||
|
|
||||||
if getattr(ack, "duplicate", False):
|
async def _publish_dlq(self, msg: AbstractIncomingMessage, job: Dict[str, Any], error: Optional[str] = None):
|
||||||
# await self._publish({"task_id": _task_id, "status": "duplicate_skipped"})
|
chan = self.chan; assert chan is not None
|
||||||
return _task_id
|
dlx_ex = await chan.get_exchange(f"{self.exchange_name}.dlx")
|
||||||
|
body_obj = dict(job)
|
||||||
await self._publish({"task_id": _task_id, "status": "enqueued"})
|
if error:
|
||||||
return _task_id
|
body_obj.setdefault("_error", str(error))
|
||||||
|
body = json.dumps(body_obj).encode()
|
||||||
async def _publish(self, event: Dict[str, Any]) -> None:
|
m = aio_pika.Message(body, delivery_mode=aio_pika.DeliveryMode.PERSISTENT, message_id=msg.message_id, headers={"x-attempts": msg.headers.get("x-attempts", 0)})
|
||||||
evt = dict(event)
|
await dlx_ex.publish(m, routing_key=msg.routing_key)
|
||||||
evt.setdefault("ts", now_ms())
|
|
||||||
evt.setdefault("queue", self.tasks_subject)
|
|
||||||
try:
|
|
||||||
metrics.observe(evt.get("status","unknown"), evt["queue"], evt.get("type"))
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
try:
|
|
||||||
await self.js.publish(self.publish_subject, json.dumps(evt).encode())
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _decode_msg(msg) -> Dict[str, Any]:
|
|
||||||
try:
|
|
||||||
obj = json.loads(msg.data.decode())
|
|
||||||
if "payload" in obj and isinstance(obj["payload"], str):
|
|
||||||
try:
|
|
||||||
obj["payload"] = json.loads(obj["payload"])
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
return obj
|
|
||||||
except Exception:
|
|
||||||
return {"payload": {}, "task_id": None}
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _delivery_attempts(msg) -> int:
|
|
||||||
try:
|
|
||||||
return msg.metadata.num_delivered
|
|
||||||
except Exception:
|
|
||||||
return 1
|
|
||||||
|
|
|
||||||
|
|
@ -1,18 +1,22 @@
|
||||||
FROM python:3.12-slim
|
FROM python:3.11-slim
|
||||||
|
|
||||||
ENV PYTHONDONTWRITEBYTECODE=1 PYTHONUNBUFFERED=1
|
ENV PYTHONUNBUFFERED=1
|
||||||
|
ENV PYTHONDONTWRITEBYTECODE=1
|
||||||
|
ENV VIRTUAL_ENV=/opt/venv
|
||||||
|
ENV PRISMA_SCHEMA_PATH=/app/services/database/schema.prisma
|
||||||
|
ENV PATH="$VIRTUAL_ENV/bin:$PATH"
|
||||||
ENV PYTHONPATH=/app
|
ENV PYTHONPATH=/app
|
||||||
|
|
||||||
WORKDIR /
|
WORKDIR /
|
||||||
|
|
||||||
COPY app/services/database/pyproject.toml ./
|
|
||||||
COPY app/services/database/README.md ./
|
COPY app/services/database/README.md ./
|
||||||
|
|
||||||
COPY app/core ./app/core
|
COPY app/core ./app/core
|
||||||
COPY app/services/common/ ./app/services/common/
|
COPY app/services/common/ ./app/services/common/
|
||||||
COPY app/services/database/ ./app/services/database/
|
COPY app/services/database/ ./app/services/database/
|
||||||
|
|
||||||
RUN pip install --upgrade pip && pip install --no-cache-dir .
|
RUN apt-get update && apt-get install -y bash
|
||||||
RUN mkdir -p /app/data
|
RUN mkdir -p /app/data
|
||||||
|
RUN chmod +x /app/services/database/entrypoint.sh
|
||||||
|
|
||||||
CMD ["python", "-m", "app.services.database.main"]
|
CMD ["bash", "/app/services/database/entrypoint.sh"]
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,21 @@
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
VENV_PATH="/opt/venv"
|
||||||
|
REQUIREMENTS_PATH="/app/services/database/requirements.txt"
|
||||||
|
SCHEMA_PATH="/app/services/database/schema.prisma"
|
||||||
|
PRISMA_BINARY_PATH="/root/.cache/prisma-python/binaries"
|
||||||
|
|
||||||
|
if [ ! -x "$VENV_PATH/bin/python" ]; then
|
||||||
|
python -m venv "$VENV_PATH"
|
||||||
|
. "$VENV_PATH/bin/activate"
|
||||||
|
"$VENV_PATH/bin/pip" install pip --upgrade
|
||||||
|
"$VENV_PATH/bin/pip" install -r "$REQUIREMENTS_PATH"
|
||||||
|
"$VENV_PATH/bin/prisma" generate --schema "$SCHEMA_PATH"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if ! find "$PRISMA_BINARY_PATH" -type f -name "prisma-query-engine-debian-openssl-3.0.x" 2>/dev/null | grep -q .; then
|
||||||
|
"$VENV_PATH/bin/pip" install prisma
|
||||||
|
"$VENV_PATH/bin/prisma" py fetch
|
||||||
|
fi
|
||||||
|
|
||||||
|
exec "$VENV_PATH/bin/python" -u /app/services/database/main.py
|
||||||
|
|
@ -1,28 +1,48 @@
|
||||||
import os
|
import os
|
||||||
import uuid
|
|
||||||
import asyncio
|
import asyncio
|
||||||
|
|
||||||
from app.services.common.service_base_async import ServiceBaseAsync
|
from prisma_client import PrismaService
|
||||||
|
from services.common.service_base_async import ServiceBaseAsync
|
||||||
|
|
||||||
|
|
||||||
PRODUCE_ENABLED = os.getenv("PRODUCE_ENABLED", "true").lower() == "true"
|
PRODUCE_BURST = int(os.getenv("PRODUCE_BURST", "10"))
|
||||||
PRODUCE_BATCH = int(os.getenv("PRODUCE_BATCH", "3")) # her produce tick'inde kaç iş
|
PRODUCE_ONCE = os.getenv("PRODUCE_ONCE", "true").lower() == "true"
|
||||||
TASK_TYPE = os.getenv("TASK_TYPE", "db-task") # iş tipi (task_id'de de kullanılır)
|
EVENT_TYPE = os.getenv("EVENT_TYPE", "db-event")
|
||||||
CONSUME_SLEEP_SEC = float(os.getenv("CONSUME_SLEEP_SEC", "0.5")) # işleme süresi simülasyonu (sn)
|
PROCESS_SEC = 10
|
||||||
STATIC_IDS = ["2c47f1073a9d4f05aad6c15484894a72", "65827e3452b545d6845e050a503401f3", "5c663088f09d4062b4e567f47335fb1a"]
|
|
||||||
|
|
||||||
|
|
||||||
async def produce(service: ServiceBaseAsync):
|
async def produce(svc: ServiceBaseAsync):
|
||||||
for biz_id in STATIC_IDS:
|
prisma_service = PrismaService()
|
||||||
deterministic_task_id = f"{TASK_TYPE}:{biz_id}"
|
async with prisma_service._asession() as db:
|
||||||
payload = {"id": biz_id, "op": "sync", "source": "db-service"}
|
result = await db.account_records.find_many(take=10, skip=0)
|
||||||
await service.enqueue(payload, TASK_TYPE, task_id=deterministic_task_id)
|
result: list = prisma_service.to_dict(result, select={"id": True, "uu_id": True, "iban": True, "bank_reference_code": True, "bank_date": True, "bank_balance": True})
|
||||||
print(f"[DB] produce tick attempted ids={','.join(STATIC_IDS)}")
|
for row in result:
|
||||||
|
await svc.enqueue(task_id=row["uu_id"], payload=row, type_="database.account.records")
|
||||||
|
await asyncio.sleep(PROCESS_SEC)
|
||||||
|
print(f"Produced From Database Producer: {len(result)} events to '{svc.produce_key}")
|
||||||
|
|
||||||
|
|
||||||
async def consume(service: ServiceBaseAsync, job: dict):
|
async def handle_mail_publish(svc: ServiceBaseAsync, job):
|
||||||
await asyncio.sleep(CONSUME_SLEEP_SEC)
|
await asyncio.sleep(PROCESS_SEC)
|
||||||
print(f"[DB] consumed task={job['task_id']}")
|
await svc.ack_current()
|
||||||
|
print("Database Consumer from mail:", job)
|
||||||
|
|
||||||
|
|
||||||
|
async def handle_mongo_publish(svc: ServiceBaseAsync, job):
|
||||||
|
prisma_service = PrismaService()
|
||||||
|
await asyncio.sleep(PROCESS_SEC)
|
||||||
|
await svc.ack_current()
|
||||||
|
print("Database Consumer from mongo:", job)
|
||||||
|
|
||||||
|
|
||||||
|
async def consume_default(svc: ServiceBaseAsync, job):
|
||||||
|
prisma_service = PrismaService()
|
||||||
|
await asyncio.sleep(PROCESS_SEC)
|
||||||
|
print("Database Consumer default (DLQ):", job.get("task_id"))
|
||||||
|
await svc.dlq_current(job, error="unsupported_routing_key")
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
asyncio.run(ServiceBaseAsync(produce, consume).run())
|
|
||||||
|
svc = ServiceBaseAsync(produce_fn=produce, consume_fn=consume_default, handlers={"mail.service.publish": handle_mail_publish, "mongo.service.publish": handle_mongo_publish})
|
||||||
|
asyncio.run(svc.run())
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,189 @@
|
||||||
|
import asyncio
|
||||||
|
import time
|
||||||
|
import logging
|
||||||
|
import uvloop
|
||||||
|
import threading
|
||||||
|
import datetime
|
||||||
|
import uuid
|
||||||
|
|
||||||
|
from typing import Optional, AsyncGenerator, Any, TypeVar, Union
|
||||||
|
from contextlib import asynccontextmanager
|
||||||
|
from prisma import Prisma
|
||||||
|
from prisma.client import _PrismaModel
|
||||||
|
|
||||||
|
|
||||||
|
_PrismaModelT = TypeVar('_PrismaModelT', bound='_PrismaModel')
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger("prisma-service")
|
||||||
|
|
||||||
|
|
||||||
|
logging.basicConfig(level=logging.INFO, format="%(asctime)s %(levelname)s %(message)s")
|
||||||
|
logging.getLogger("httpx").setLevel(logging.WARNING)
|
||||||
|
logging.getLogger("httpcore").setLevel(logging.WARNING)
|
||||||
|
|
||||||
|
|
||||||
|
class PrismaService:
|
||||||
|
|
||||||
|
def __init__(self) -> None:
|
||||||
|
|
||||||
|
self._lock = asyncio.Lock()
|
||||||
|
self._loop: Optional[asyncio.AbstractEventLoop] = None
|
||||||
|
self._thread: Optional[threading.Thread] = None
|
||||||
|
self._client: Optional[Prisma] = None
|
||||||
|
self.result: Optional[Any] = None
|
||||||
|
self.select: Optional[dict] = None
|
||||||
|
self._start_loop_thread()
|
||||||
|
|
||||||
|
def _loop_runner(self) -> None:
|
||||||
|
|
||||||
|
asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
|
||||||
|
self._loop = asyncio.new_event_loop()
|
||||||
|
asyncio.set_event_loop(self._loop)
|
||||||
|
try:
|
||||||
|
self._loop.run_forever()
|
||||||
|
finally:
|
||||||
|
self._loop.close()
|
||||||
|
|
||||||
|
def _submit(self, coro):
|
||||||
|
|
||||||
|
if self._loop is None or not self._loop.is_running():
|
||||||
|
raise RuntimeError("PrismaService event loop is not running.")
|
||||||
|
fut = asyncio.run_coroutine_threadsafe(coro, self._loop)
|
||||||
|
return fut.result()
|
||||||
|
|
||||||
|
async def _lock(self):
|
||||||
|
|
||||||
|
lock = asyncio.Lock()
|
||||||
|
async with lock:
|
||||||
|
return
|
||||||
|
|
||||||
|
async def _aconnect(self) -> Prisma:
|
||||||
|
|
||||||
|
if self._client is not None:
|
||||||
|
return self._client
|
||||||
|
logger.info("Connecting Prisma client...")
|
||||||
|
client = Prisma()
|
||||||
|
await client.connect()
|
||||||
|
self._client = client
|
||||||
|
logger.info("Prisma client connected.")
|
||||||
|
return self._client
|
||||||
|
|
||||||
|
async def _adisconnect(self) -> None:
|
||||||
|
|
||||||
|
if self._client is not None:
|
||||||
|
logger.info("Disconnecting Prisma client...")
|
||||||
|
try:
|
||||||
|
await self._client.disconnect()
|
||||||
|
finally:
|
||||||
|
self._client = None
|
||||||
|
logger.info("Prisma client disconnected.")
|
||||||
|
|
||||||
|
@asynccontextmanager
|
||||||
|
async def _asession(self) -> AsyncGenerator[Prisma, None]:
|
||||||
|
yield await self._aconnect()
|
||||||
|
|
||||||
|
def _start_loop_thread(self) -> None:
|
||||||
|
t = threading.Thread(target=self._loop_runner, name="PrismaLoop", daemon=True)
|
||||||
|
t.start()
|
||||||
|
self._thread = t
|
||||||
|
while self._loop is None:
|
||||||
|
time.sleep(0.005)
|
||||||
|
|
||||||
|
async def _connect(self) -> Prisma:
|
||||||
|
|
||||||
|
if self._client is not None:
|
||||||
|
return self._client
|
||||||
|
async with self._lock:
|
||||||
|
if self._client is None:
|
||||||
|
logger.info("Connecting Prisma client...")
|
||||||
|
client = Prisma()
|
||||||
|
await client.connect()
|
||||||
|
self._client = client
|
||||||
|
logger.info("Prisma client connected.")
|
||||||
|
return self._client
|
||||||
|
|
||||||
|
async def _disconnect(self) -> None:
|
||||||
|
|
||||||
|
async with self._lock:
|
||||||
|
if self._client is not None:
|
||||||
|
try:
|
||||||
|
logger.info("Disconnecting Prisma client...")
|
||||||
|
await self._client.disconnect()
|
||||||
|
logger.info("Prisma client disconnected.")
|
||||||
|
finally:
|
||||||
|
self._client = None
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def to_dict(result: Union[list, Any], select: dict = None):
|
||||||
|
if isinstance(result, list):
|
||||||
|
list_result = []
|
||||||
|
for item_iter in result:
|
||||||
|
item = {}
|
||||||
|
for k, v in item_iter:
|
||||||
|
if k not in select:
|
||||||
|
continue
|
||||||
|
if isinstance(v, datetime.datetime):
|
||||||
|
item[k] = str(v)
|
||||||
|
if isinstance(v, uuid.UUID):
|
||||||
|
item[k] = str(v)
|
||||||
|
if isinstance(v, int):
|
||||||
|
item[k] = int(v)
|
||||||
|
if isinstance(v, float):
|
||||||
|
item[k] = float(v)
|
||||||
|
if isinstance(v, bool):
|
||||||
|
item[k] = bool(v)
|
||||||
|
else:
|
||||||
|
item[k] = str(v)
|
||||||
|
list_result.append(item)
|
||||||
|
return list_result
|
||||||
|
else:
|
||||||
|
dict_result = {}
|
||||||
|
for k,v in result:
|
||||||
|
if k not in select:
|
||||||
|
continue
|
||||||
|
if isinstance(v, datetime.datetime):
|
||||||
|
dict_result[k] = str(v)
|
||||||
|
if isinstance(v, uuid.UUID):
|
||||||
|
dict_result[k] = str(v)
|
||||||
|
if isinstance(v, int):
|
||||||
|
dict_result[k] = int(v)
|
||||||
|
if isinstance(v, float):
|
||||||
|
dict_result[k] = float(v)
|
||||||
|
if isinstance(v, bool):
|
||||||
|
dict_result[k] = bool(v)
|
||||||
|
else:
|
||||||
|
dict_result[k] = str(v)
|
||||||
|
return dict_result
|
||||||
|
|
||||||
|
@asynccontextmanager
|
||||||
|
async def _session(self) -> AsyncGenerator[Prisma, None]:
|
||||||
|
|
||||||
|
client = await self._connect()
|
||||||
|
try:
|
||||||
|
yield client
|
||||||
|
except Exception:
|
||||||
|
logger.exception("Database operation error")
|
||||||
|
raise
|
||||||
|
|
||||||
|
def _run(self, coro):
|
||||||
|
|
||||||
|
try:
|
||||||
|
asyncio.get_running_loop()
|
||||||
|
raise RuntimeError("Async run is not allowed. Use sync methods instead.")
|
||||||
|
except RuntimeError as e:
|
||||||
|
asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
|
||||||
|
with asyncio.Runner() as runner:
|
||||||
|
return runner.run(coro)
|
||||||
|
|
||||||
|
def disconnect(self) -> None:
|
||||||
|
|
||||||
|
try:
|
||||||
|
self._submit(self._adisconnect())
|
||||||
|
finally:
|
||||||
|
if self._loop and self._loop.is_running():
|
||||||
|
self._loop.call_soon_threadsafe(self._loop.stop)
|
||||||
|
if self._thread and self._thread.is_alive():
|
||||||
|
self._thread.join(timeout=2.0)
|
||||||
|
self._loop = None
|
||||||
|
self._thread = None
|
||||||
|
|
@ -7,14 +7,17 @@ name = "dual-queue-services"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
description = "Async dual queue system with Redis Streams and SQLite persistence"
|
description = "Async dual queue system with Redis Streams and SQLite persistence"
|
||||||
readme = "README.md"
|
readme = "README.md"
|
||||||
requires-python = ">=3.11"
|
requires-python = ">=3.11,<4.0"
|
||||||
authors = [
|
authors = [
|
||||||
{ name = "Berkay Karatay", email = "karatay.berkay@gmail.com" }
|
{ name = "Berkay Karatay", email = "karatay.berkay@gmail.com" }
|
||||||
]
|
]
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"nats-py>=2.6.0",
|
"aio-pika>=9.4.1",
|
||||||
"prometheus-client>=0.20.0",
|
"prometheus-client>=0.20.0",
|
||||||
"uvloop>=0.19.0"
|
"uvloop>=0.19.0",
|
||||||
|
"prisma==0.9.1",
|
||||||
|
"asyncio==3.4.3",
|
||||||
|
"arrow>=1.3.0"
|
||||||
]
|
]
|
||||||
|
|
||||||
[project.optional-dependencies]
|
[project.optional-dependencies]
|
||||||
|
|
@ -34,3 +37,4 @@ profile = "black"
|
||||||
[tool.setuptools.packages.find]
|
[tool.setuptools.packages.find]
|
||||||
where = ["app"]
|
where = ["app"]
|
||||||
include = ["app*"]
|
include = ["app*"]
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,6 @@
|
||||||
|
aio-pika>=9.4.1
|
||||||
|
prometheus-client>=0.20.0
|
||||||
|
uvloop>=0.19.0
|
||||||
|
prisma==0.9.1
|
||||||
|
asyncio==3.4.3
|
||||||
|
arrow>=1.3.0
|
||||||
File diff suppressed because it is too large
Load Diff
|
|
@ -0,0 +1,18 @@
|
||||||
|
import os
|
||||||
|
|
||||||
|
from ..config import ConfigServices
|
||||||
|
|
||||||
|
class IsBankConfig:
|
||||||
|
|
||||||
|
MAILBOX: str = os.getenv("MAILBOX", "bilgilendirme@ileti.isbank.com.tr")
|
||||||
|
AUTHORIZE_IBAN: str = os.getenv("AUTHORIZE_IBAN", "4245-0093333")
|
||||||
|
NO_ATTACHMENT_FOLDER: str = "NoAttachment"
|
||||||
|
COMPLETED_FOLDER: str = "Completed"
|
||||||
|
SERVICE_NAME: str = "IsBankEmailService"
|
||||||
|
TASK_DATA_PREFIX: str = ConfigServices.MAIN_TASK_PREFIX
|
||||||
|
TASK_MAILID_INDEX_PREFIX: str = ConfigServices.TASK_MAILID_INDEX_PREFIX
|
||||||
|
TASK_UUID_INDEX_PREFIX: str = ConfigServices.TASK_UUID_INDEX_PREFIX
|
||||||
|
TASK_SEEN_PREFIX: str = ConfigServices.TASK_SEEN_PREFIX
|
||||||
|
SERVICE_PREFIX: str = ConfigServices.SERVICE_PREFIX_MAIL_READER
|
||||||
|
NEXT_SERVICE_PREFIX: str = ConfigServices.SERVICE_PREFIX_MAIL_PARSER
|
||||||
|
|
||||||
|
|
@ -0,0 +1,29 @@
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from time import sleep
|
||||||
|
from logging import getLogger, basicConfig, INFO, StreamHandler, FileHandler
|
||||||
|
|
||||||
|
from ..mail_handler import EmailReaderService
|
||||||
|
from .params import IsBankConfig
|
||||||
|
|
||||||
|
|
||||||
|
format = '%(asctime)s - %(name)s - %(levelname)s - %(message)s'
|
||||||
|
handlers = [StreamHandler(sys.stdout), FileHandler('isbank_email_service.log')]
|
||||||
|
basicConfig(level=INFO, format=format, handlers=handlers)
|
||||||
|
logger = getLogger(IsBankConfig.SERVICE_NAME)
|
||||||
|
|
||||||
|
|
||||||
|
def initialize_service():
|
||||||
|
"""Initialize the service with proper error handling"""
|
||||||
|
try:
|
||||||
|
logger.info("Creating EmailReaderService")
|
||||||
|
email_service = EmailReaderService(IsBankConfig())
|
||||||
|
|
||||||
|
logger.info("Connecting to email service")
|
||||||
|
email_service.login_and_connect()
|
||||||
|
return email_service
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Service initialization failed: {str(e)}")
|
||||||
|
sleep(5)
|
||||||
|
return initialize_service()
|
||||||
|
|
||||||
|
|
@ -0,0 +1,155 @@
|
||||||
|
import os
|
||||||
|
from re import TEMPLATE
|
||||||
|
from pydantic import BaseModel
|
||||||
|
from typing import Any, List, Optional, Union
|
||||||
|
|
||||||
|
|
||||||
|
class FromToHeader(BaseModel):
|
||||||
|
|
||||||
|
display_name: Optional[str]
|
||||||
|
username: Optional[str]
|
||||||
|
domain: Optional[str]
|
||||||
|
mail: Optional[str]
|
||||||
|
|
||||||
|
|
||||||
|
class MailReader(BaseModel):
|
||||||
|
|
||||||
|
id: str
|
||||||
|
subject: str
|
||||||
|
from_: FromToHeader
|
||||||
|
to: List[FromToHeader]
|
||||||
|
date: str
|
||||||
|
body_text: str
|
||||||
|
|
||||||
|
|
||||||
|
class MailParser(BaseModel):
|
||||||
|
|
||||||
|
filename: str
|
||||||
|
content_type: str
|
||||||
|
charset: str
|
||||||
|
data: str
|
||||||
|
|
||||||
|
class FinderIban(BaseModel):
|
||||||
|
|
||||||
|
filename: str
|
||||||
|
iban: str
|
||||||
|
bank_date: str
|
||||||
|
channel_branch: str
|
||||||
|
currency_value: float
|
||||||
|
balance: float
|
||||||
|
additional_balance: float
|
||||||
|
process_name: str
|
||||||
|
process_type: str
|
||||||
|
process_comment: str
|
||||||
|
bank_reference_code: str
|
||||||
|
|
||||||
|
|
||||||
|
class FinderComment(FinderIban):
|
||||||
|
|
||||||
|
build_id: Optional[int] = None
|
||||||
|
build_uu_id: Optional[str] = None
|
||||||
|
decision_book_id: Optional[int] = None
|
||||||
|
decision_book_uu_id: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
class RedisData(BaseModel):
|
||||||
|
MailReader: MailReader
|
||||||
|
MailParser: List[MailParser]
|
||||||
|
FinderIban: List[FinderIban]
|
||||||
|
FinderComment: List[FinderComment]
|
||||||
|
|
||||||
|
|
||||||
|
class Status:
|
||||||
|
PENDING: str = "PENDING"
|
||||||
|
IN_PROGRESS: str = "IN_PROGRESS"
|
||||||
|
COMPLETED: str = "COMPLETED"
|
||||||
|
FAILED: str = "FAILED"
|
||||||
|
|
||||||
|
|
||||||
|
class RedisTaskObject(BaseModel):
|
||||||
|
task: str
|
||||||
|
data: RedisData
|
||||||
|
completed: bool
|
||||||
|
service: str
|
||||||
|
status: str
|
||||||
|
created_at: str
|
||||||
|
is_completed: bool
|
||||||
|
|
||||||
|
|
||||||
|
class MailSendModel(BaseModel):
|
||||||
|
receivers: List[str]
|
||||||
|
subject: str
|
||||||
|
template_name: str
|
||||||
|
data: dict
|
||||||
|
|
||||||
|
|
||||||
|
class RedisMailSender(BaseModel):
|
||||||
|
task: RedisTaskObject
|
||||||
|
data: MailSendModel
|
||||||
|
completed: bool
|
||||||
|
service: str
|
||||||
|
status: str
|
||||||
|
created_at: str
|
||||||
|
completed: bool
|
||||||
|
|
||||||
|
|
||||||
|
class EmailConfig:
|
||||||
|
|
||||||
|
HOST: str = os.getenv("EMAIL_HOST", "10.10.2.34")
|
||||||
|
USERNAME: str = os.getenv("EMAIL_USERNAME", "isbank@mehmetkaratay.com.tr")
|
||||||
|
PASSWORD: str = os.getenv("EMAIL_PASSWORD", "system")
|
||||||
|
PORT: int = int(os.getenv("EMAIL_PORT", 993))
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def as_dict(cls):
|
||||||
|
return dict(host=EmailConfig.HOST, port=EmailConfig.PORT, username=EmailConfig.USERNAME, password=EmailConfig.PASSWORD)
|
||||||
|
|
||||||
|
|
||||||
|
class RedisConfig:
|
||||||
|
|
||||||
|
HOST: str = os.getenv("REDIS_HOST", "10.10.2.15")
|
||||||
|
PASSWORD: str = os.getenv("REDIS_PASSWORD", "your_strong_password_here")
|
||||||
|
PORT: int = int(os.getenv("REDIS_PORT", 6379))
|
||||||
|
DB: int = int(os.getenv("REDIS_DB", 0))
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def as_dict(cls):
|
||||||
|
return dict(host=RedisConfig.HOST, port=int(RedisConfig.PORT), password=RedisConfig.PASSWORD, db=int(RedisConfig.DB))
|
||||||
|
|
||||||
|
|
||||||
|
class MailReaderMainConfig:
|
||||||
|
|
||||||
|
MAILBOX: str
|
||||||
|
AUTHORIZE_IBAN: str
|
||||||
|
NO_ATTACHMENT_FOLDER: str
|
||||||
|
COMPLETED_FOLDER: str
|
||||||
|
TASK_DATA_PREFIX: str
|
||||||
|
TASK_MAILID_INDEX_PREFIX: str
|
||||||
|
TASK_UUID_INDEX_PREFIX: str
|
||||||
|
TASK_SEEN_PREFIX: str
|
||||||
|
SERVICE_PREFIX: str
|
||||||
|
NEXT_SERVICE_PREFIX: str
|
||||||
|
|
||||||
|
|
||||||
|
class ConfigServices:
|
||||||
|
|
||||||
|
MAIN_TASK_PREFIX: str = "BANK:SERVICES:TASK:DATA"
|
||||||
|
|
||||||
|
TASK_MAILID_INDEX_PREFIX: str = "BANK:SERVICES:TASK:MAILID"
|
||||||
|
TASK_UUID_INDEX_PREFIX: str = "BANK:SERVICES:TASK:UUID"
|
||||||
|
TASK_SEEN_PREFIX: str = "BANK:SERVICES:TASK:SEEN"
|
||||||
|
TASK_DELETED_PREFIX: str = "BANK:SERVICES:TASK:DELETED"
|
||||||
|
TASK_COMMENT_PARSER: str = "BANK:SERVICES:TASK:COMMENT:PARSER"
|
||||||
|
TASK_PREDICT_RESULT: str = "BANK:SERVICES:TASK:COMMENT:RESULT"
|
||||||
|
|
||||||
|
SERVICE_PREFIX_MAIL_READER: str = "MailReader"
|
||||||
|
SERVICE_PREFIX_MAIL_PARSER: str = "MailParser"
|
||||||
|
SERVICE_PREFIX_FINDER_IBAN: str = "FinderIban"
|
||||||
|
SERVICE_PREFIX_FINDER_COMMENT: str = "FinderComment"
|
||||||
|
SERVICE_PREFIX_MAIL_SENDER: str = "MailSender"
|
||||||
|
|
||||||
|
TEMPLATE_ACCOUNT_RECORDS: str = "template_accounts.html"
|
||||||
|
|
||||||
|
|
||||||
|
paramsRedisData = Union[MailReader, MailParser, FinderIban, FinderComment]
|
||||||
|
|
||||||
|
|
@ -0,0 +1,381 @@
|
||||||
|
import os
|
||||||
|
import socket
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from functools import wraps
|
||||||
|
from base64 import b64encode
|
||||||
|
from time import sleep
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import List, Dict, Any, Union, TypeVar, Tuple
|
||||||
|
|
||||||
|
from email.message import EmailMessage
|
||||||
|
from email.policy import default as policy
|
||||||
|
from email.headerregistry import UniqueDateHeader, UniqueAddressHeader, UniqueUnstructuredHeader
|
||||||
|
from email.parser import BytesParser
|
||||||
|
from imaplib import IMAP4_SSL, IMAP4
|
||||||
|
|
||||||
|
from .config import EmailConfig, MailReaderMainConfig
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger('Email Reader Service')
|
||||||
|
|
||||||
|
T = TypeVar('T')
|
||||||
|
|
||||||
|
|
||||||
|
def retry_on_connection_error(max_retries: int = 3, delay: int = 5, backoff: int = 2, exceptions=(Exception,)):
|
||||||
|
"""
|
||||||
|
Retry decorator with exponential backoff for handling connection errors
|
||||||
|
|
||||||
|
Args:
|
||||||
|
max_retries: Maximum number of retries
|
||||||
|
delay: Initial delay between retries in seconds
|
||||||
|
backoff: Backoff multiplier
|
||||||
|
exceptions: Tuple of exceptions to catch
|
||||||
|
Returns: Decorated function
|
||||||
|
"""
|
||||||
|
def decorator(func):
|
||||||
|
@wraps(func)
|
||||||
|
def wrapper(*args, **kwargs):
|
||||||
|
mtries, mdelay = max_retries, delay
|
||||||
|
while mtries > 0:
|
||||||
|
try:
|
||||||
|
return func(*args, **kwargs)
|
||||||
|
except exceptions as e:
|
||||||
|
logger.warning(f"Connection error in {func.__name__}: {str(e)}, retrying in {mdelay}s...")
|
||||||
|
sleep(mdelay)
|
||||||
|
mtries -= 1
|
||||||
|
mdelay *= backoff
|
||||||
|
return func(*args, **kwargs)
|
||||||
|
return wrapper
|
||||||
|
return decorator
|
||||||
|
|
||||||
|
|
||||||
|
class Mails:
|
||||||
|
"""Class representing an email with attachments and metadata"""
|
||||||
|
|
||||||
|
def __init__(self, mail_id: bytes, mail_data: bytes):
|
||||||
|
"""
|
||||||
|
Initialize a mail object
|
||||||
|
Args: mail_id: Unique identifier for the email, mail_data: Raw email data
|
||||||
|
"""
|
||||||
|
self.id: bytes = mail_id
|
||||||
|
self.raw_data: bytes = mail_data
|
||||||
|
self.attachments: List[Dict[str, Union[str, bytes]]] = []
|
||||||
|
self.message: EmailMessage = BytesParser(policy=policy).parsebytes(mail_data)
|
||||||
|
self.subject: UniqueUnstructuredHeader = self.message.get('Subject', '') or ''
|
||||||
|
self.from_: UniqueAddressHeader = self.message.get('From', '') or ''
|
||||||
|
self.to: UniqueAddressHeader = self.message.get('To', '') or ''
|
||||||
|
self.date: UniqueDateHeader = self.message.get('Date', '') or ''
|
||||||
|
self.body_text: str = self._get_body_text()
|
||||||
|
self._extract_attachments()
|
||||||
|
|
||||||
|
def to_dict(self) -> Dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Convert mail object to dictionary representation
|
||||||
|
Returns: Dictionary representation of mail
|
||||||
|
"""
|
||||||
|
return {
|
||||||
|
'id': self.id.decode('utf-8'),
|
||||||
|
'attachments': [{
|
||||||
|
'filename': attachment['filename'], 'content_type': attachment['content_type'], 'charset': attachment['charset'],
|
||||||
|
'data': b64encode(attachment['data']).decode(attachment['charset'], errors='replace')
|
||||||
|
} for attachment in self.attachments],
|
||||||
|
'subject': str(self.subject),
|
||||||
|
'from_': {
|
||||||
|
"display_name": self.from_.addresses[0].display_name, "username": self.from_.addresses[0].username,
|
||||||
|
"domain": self.from_.addresses[0].domain, "mail": f"{self.from_.addresses[0].username}@{self.from_.addresses[0].domain}"
|
||||||
|
},
|
||||||
|
'to': [
|
||||||
|
{
|
||||||
|
"display_name": address.display_name, "username": address.username, "domain": address.domain,
|
||||||
|
"mail": f"{address.username}@{address.domain}" } for address in self.to.addresses
|
||||||
|
], 'date': str(self.date.datetime), 'body_text': str(self.body_text)
|
||||||
|
}
|
||||||
|
|
||||||
|
def _get_body_text(self) -> str:
|
||||||
|
"""
|
||||||
|
Extract plain text body from email
|
||||||
|
Returns: Plain text body of email
|
||||||
|
"""
|
||||||
|
body = self.message.get_body(preferencelist=('plain',))
|
||||||
|
if body is not None:
|
||||||
|
return body.get_content() or ''
|
||||||
|
if self.message.is_multipart():
|
||||||
|
for part in self.message.walk():
|
||||||
|
if part.get_content_type() == 'text/plain' and (part.get_content_disposition() or '') != 'attachment':
|
||||||
|
try:
|
||||||
|
return part.get_content() or ''
|
||||||
|
except Exception:
|
||||||
|
payload = part.get_payload(decode=True) or b''
|
||||||
|
return payload.decode(part.get_content_charset() or 'utf-8', errors='replace')
|
||||||
|
else:
|
||||||
|
if self.message.get_content_type() == 'text/plain':
|
||||||
|
try:
|
||||||
|
return self.message.get_content() or ''
|
||||||
|
except Exception:
|
||||||
|
payload = self.message.get_payload(decode=True) or b''
|
||||||
|
return payload.decode(self.message.get_content_charset() or 'utf-8', errors='replace')
|
||||||
|
return ''
|
||||||
|
|
||||||
|
def _extract_attachments(self) -> None:
|
||||||
|
"""Extract attachments from email"""
|
||||||
|
for part in self.message.walk():
|
||||||
|
if part.get_content_disposition() == 'attachment':
|
||||||
|
filename = part.get_filename()
|
||||||
|
if not filename:
|
||||||
|
continue
|
||||||
|
data = part.get_payload(decode=True) or b''
|
||||||
|
charset = part.get_charset() or 'utf-8'
|
||||||
|
self.attachments.append({'filename': filename, 'content_type': part.get_content_type(), 'data': data, 'charset': charset})
|
||||||
|
|
||||||
|
def save_attachments(self, folder: str) -> None:
|
||||||
|
"""
|
||||||
|
Save attachments to folder
|
||||||
|
Args: folder: Folder to save attachments to
|
||||||
|
"""
|
||||||
|
os.makedirs(folder, exist_ok=True)
|
||||||
|
for att in self.attachments:
|
||||||
|
with open(os.path.join(folder, att['filename']), 'wb') as f:
|
||||||
|
f.write(att['data'])
|
||||||
|
|
||||||
|
|
||||||
|
class EmailReaderService:
|
||||||
|
|
||||||
|
"""Service for reading emails from mailbox with improved connection resilience"""
|
||||||
|
|
||||||
|
def __init__(self, config: MailReaderMainConfig):
|
||||||
|
"""
|
||||||
|
Initialize email reader service
|
||||||
|
Args: config: Application configuration
|
||||||
|
"""
|
||||||
|
self.email_config = EmailConfig()
|
||||||
|
self.config = config
|
||||||
|
self.mail = None
|
||||||
|
self.data: List[Mails] = []
|
||||||
|
self.mail_count = 0
|
||||||
|
self.is_connected = False
|
||||||
|
self.connect_imap()
|
||||||
|
|
||||||
|
def connect_imap(self) -> bool:
|
||||||
|
"""
|
||||||
|
Establish IMAP connection with retry mechanism
|
||||||
|
Returns: True if connection successful, False otherwise
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
if self.mail:
|
||||||
|
try:
|
||||||
|
self.mail.close()
|
||||||
|
self.mail.logout()
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
logger.info(f"Connecting to IMAP server {self.email_config.HOST}:{self.email_config.PORT}")
|
||||||
|
self.mail = IMAP4_SSL(self.email_config.HOST, self.email_config.PORT)
|
||||||
|
self.is_connected = True
|
||||||
|
return True
|
||||||
|
except (socket.error, IMAP4.error) as e:
|
||||||
|
logger.error(f"Failed to connect to IMAP server: {str(e)}")
|
||||||
|
self.is_connected = False
|
||||||
|
return False
|
||||||
|
|
||||||
|
@retry_on_connection_error(max_retries=3, delay=5, exceptions=(socket.error, IMAP4.error, OSError))
|
||||||
|
def login_and_connect(self) -> bool:
|
||||||
|
"""
|
||||||
|
Login to IMAP server and connect to inbox with retry mechanism
|
||||||
|
Returns: True if login successful, False otherwise
|
||||||
|
Raises: ConnectionError: If connection cannot be established
|
||||||
|
"""
|
||||||
|
if not self.is_connected:
|
||||||
|
if not self.connect_imap():
|
||||||
|
raise ConnectionError("Cannot establish connection to IMAP server")
|
||||||
|
|
||||||
|
try:
|
||||||
|
logger.info(f"Logging in as {self.email_config.USERNAME}")
|
||||||
|
self.mail.login(self.email_config.USERNAME, self.email_config.PASSWORD)
|
||||||
|
self._connect_inbox()
|
||||||
|
logger.info("Successfully logged in and connected to inbox")
|
||||||
|
return True
|
||||||
|
except (socket.error, IMAP4.error) as e:
|
||||||
|
logger.error(f"Login failed: {str(e)}")
|
||||||
|
self.is_connected = False
|
||||||
|
raise
|
||||||
|
|
||||||
|
@retry_on_connection_error(max_retries=2, delay=3, exceptions=(socket.error, IMAP4.error, OSError))
|
||||||
|
def refresh(self) -> Tuple[List[Mails], int, int]:
|
||||||
|
"""
|
||||||
|
Refresh mail data with connection retry
|
||||||
|
Returns: Tuple of (mail data, mail count, data length)
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
self.mail_count = self._fetch_count()
|
||||||
|
self.data = self._fetch_all()
|
||||||
|
return self.data, self.mail_count, len(self.data)
|
||||||
|
except (socket.error, IMAP4.error) as e:
|
||||||
|
logger.error(f"Refresh failed, attempting to reconnect: {str(e)}")
|
||||||
|
self.connect_imap()
|
||||||
|
self.login_and_connect()
|
||||||
|
self.mail_count = self._fetch_count()
|
||||||
|
self.data = self._fetch_all()
|
||||||
|
return self.data, self.mail_count, len(self.data)
|
||||||
|
|
||||||
|
@retry_on_connection_error(max_retries=2, delay=2, exceptions=(socket.error, IMAP4.error))
|
||||||
|
def _connect_inbox(self) -> None:
|
||||||
|
"""
|
||||||
|
Connect to INBOX with retry mechanism
|
||||||
|
Raises: IMAP4.error: If connection to INBOX fails
|
||||||
|
"""
|
||||||
|
logger.info("Selecting INBOX folder")
|
||||||
|
status, _ = self.mail.select("INBOX")
|
||||||
|
if status != 'OK':
|
||||||
|
error_msg = "Failed to connect to INBOX"
|
||||||
|
logger.error(error_msg)
|
||||||
|
raise IMAP4.error(error_msg)
|
||||||
|
|
||||||
|
@retry_on_connection_error(max_retries=2, delay=2, exceptions=(socket.error, IMAP4.error))
|
||||||
|
def _fetch_count(self) -> int:
|
||||||
|
"""
|
||||||
|
Fetch mail count with retry mechanism
|
||||||
|
Returns: Number of emails
|
||||||
|
Raises: IMAP4.error: If fetching mail count fails
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
status, uids = self.mail.uid('SORT', '(REVERSE DATE)', 'UTF-8', 'ALL', 'FROM', f'"{self.config.MAILBOX}"')
|
||||||
|
if status != 'OK':
|
||||||
|
raise IMAP4.error("Failed to get mail count")
|
||||||
|
count = len(uids[0].split()) if uids[0] else 0
|
||||||
|
logger.info(f"Found {count} emails from {self.config.MAILBOX}")
|
||||||
|
return count
|
||||||
|
except (socket.error, IMAP4.error) as e:
|
||||||
|
logger.error(f"Error fetching mail count: {str(e)}")
|
||||||
|
raise
|
||||||
|
|
||||||
|
@retry_on_connection_error(max_retries=2, delay=2, exceptions=(socket.error, IMAP4.error))
|
||||||
|
def _fetch_all(self) -> List[Mails]:
|
||||||
|
"""
|
||||||
|
Fetch all mails with retry mechanism
|
||||||
|
Returns: List of mail objects
|
||||||
|
Raises: IMAP4.error: If fetching mails fails
|
||||||
|
"""
|
||||||
|
self.data = []
|
||||||
|
try:
|
||||||
|
status, uids = self.mail.uid('SORT', '(REVERSE DATE)', 'UTF-8', 'ALL', 'FROM', f'"{self.config.MAILBOX}"')
|
||||||
|
if status != 'OK':
|
||||||
|
raise IMAP4.error("Mail search failed")
|
||||||
|
if not uids[0]:
|
||||||
|
logger.info("No emails found matching criteria")
|
||||||
|
return self.data
|
||||||
|
uid_list = uids[0].split()
|
||||||
|
logger.info(f"Processing {len(uid_list)} emails")
|
||||||
|
for uid in uid_list:
|
||||||
|
try:
|
||||||
|
status, msg_data = self.mail.uid('fetch', uid, '(RFC822)')
|
||||||
|
if status == 'OK' and msg_data[0] is not None:
|
||||||
|
self.data.append(Mails(uid, msg_data[0][1]))
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"Failed to fetch email with UID {uid}: {str(e)}")
|
||||||
|
continue
|
||||||
|
logger.info(f"Successfully fetched {len(self.data)} emails")
|
||||||
|
return self.data
|
||||||
|
except (socket.error, IMAP4.error) as e:
|
||||||
|
logger.error(f"Error fetching emails: {str(e)}")
|
||||||
|
raise
|
||||||
|
|
||||||
|
@retry_on_connection_error(max_retries=2, delay=1, exceptions=(socket.error, IMAP4.error))
|
||||||
|
def move_to_folder(self, uid: Union[str, bytes], folder: str):
|
||||||
|
"""
|
||||||
|
Move message to folder with retry mechanism
|
||||||
|
Args: uid: Email UID, folder: Destination folder
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
log_uid = uid
|
||||||
|
if isinstance(uid, bytes):
|
||||||
|
log_uid = uid.decode('utf-8', errors='replace')
|
||||||
|
elif isinstance(uid, str):
|
||||||
|
uid = uid.encode('utf-8')
|
||||||
|
logger.info(f"Moving email {log_uid} to {folder} folder")
|
||||||
|
self.mail.uid('MOVE', uid, folder)
|
||||||
|
self.commit()
|
||||||
|
return True
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to move email to folder: {str(e)}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
@retry_on_connection_error(max_retries=2, delay=1, exceptions=(socket.error, IMAP4.error))
|
||||||
|
def copy_to_folder(self, uid: Union[str, bytes], folder: str):
|
||||||
|
"""
|
||||||
|
Copy message to folder with retry mechanism
|
||||||
|
Args: uid: Email UID, folder: Destination folder
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
log_uid = uid
|
||||||
|
if isinstance(uid, bytes):
|
||||||
|
log_uid = uid.decode('utf-8', errors='replace')
|
||||||
|
elif isinstance(uid, str):
|
||||||
|
uid = uid.encode('utf-8')
|
||||||
|
logger.info(f"Copying email {log_uid} to {folder} folder")
|
||||||
|
self.mail.uid('COPY', uid, folder)
|
||||||
|
self.commit()
|
||||||
|
return True
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to copy email to folder: {str(e)}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
@retry_on_connection_error(max_retries=2, delay=1, exceptions=(socket.error, IMAP4.error))
|
||||||
|
def mark_no_attachment(self, uid: Union[str, bytes]):
|
||||||
|
"""
|
||||||
|
Move message to no attachment folder with retry mechanism
|
||||||
|
Args: uid: Email UID
|
||||||
|
"""
|
||||||
|
self.move_to_folder(uid, self.config.NO_ATTACHMENT_FOLDER)
|
||||||
|
|
||||||
|
@retry_on_connection_error(max_retries=2, delay=1, exceptions=(socket.error, IMAP4.error))
|
||||||
|
def mark_completed(self, uid: Union[str, bytes]):
|
||||||
|
"""
|
||||||
|
Move message to completed folder with retry mechanism
|
||||||
|
Args: uid: Email UID
|
||||||
|
"""
|
||||||
|
self.move_to_folder(uid, self.config.COMPLETED_FOLDER)
|
||||||
|
|
||||||
|
@retry_on_connection_error(max_retries=2, delay=1, exceptions=(socket.error, IMAP4.error))
|
||||||
|
def delete(self, uid):
|
||||||
|
"""
|
||||||
|
Delete message with retry mechanism
|
||||||
|
Args: uid: Email UID
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
log_uid = uid
|
||||||
|
if isinstance(uid, bytes):
|
||||||
|
log_uid = uid.decode('utf-8', errors='replace')
|
||||||
|
logger.info(f"Marking email {log_uid} for deletion")
|
||||||
|
self.mail.uid('STORE', uid, '+FLAGS', r'(\Deleted)')
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to delete email: {str(e)}")
|
||||||
|
raise
|
||||||
|
|
||||||
|
@retry_on_connection_error(max_retries=2, delay=1, exceptions=(socket.error, IMAP4.error))
|
||||||
|
def commit(self):
|
||||||
|
"""
|
||||||
|
Commit pending operations with retry mechanism
|
||||||
|
Raises: Exception: If commit fails
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
logger.info("Committing changes (expunge)")
|
||||||
|
self.mail.expunge()
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to commit changes: {str(e)}")
|
||||||
|
raise
|
||||||
|
|
||||||
|
def logout(self):
|
||||||
|
"""Logout from IMAP server"""
|
||||||
|
if self.mail and self.is_connected:
|
||||||
|
try:
|
||||||
|
logger.info("Logging out from IMAP server")
|
||||||
|
self.mail.close()
|
||||||
|
self.mail.logout()
|
||||||
|
self.is_connected = False
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"Logout failed: {str(e)}")
|
||||||
|
|
||||||
|
@property
|
||||||
|
def count(self):
|
||||||
|
"""Get count of emails"""
|
||||||
|
return len(self.data)
|
||||||
|
|
@ -1,29 +1,104 @@
|
||||||
import os
|
import os
|
||||||
import uuid
|
|
||||||
import asyncio
|
import asyncio
|
||||||
|
from typing import List
|
||||||
|
|
||||||
|
from app.services.mail.IsBank.runner import initialize_service
|
||||||
from app.services.common.service_base_async import ServiceBaseAsync
|
from app.services.common.service_base_async import ServiceBaseAsync
|
||||||
|
|
||||||
|
from .mail_handler import Mails
|
||||||
PRODUCE_ENABLED = os.getenv("PRODUCE_ENABLED", "true").lower() == "true"
|
from .IsBank.params import IsBankConfig
|
||||||
PRODUCE_BATCH = int(os.getenv("PRODUCE_BATCH", "3")) # her produce tick'inde kaç iş
|
|
||||||
TASK_TYPE = os.getenv("TASK_TYPE", "db-task") # iş tipi (task_id'de de kullanılır)
|
|
||||||
CONSUME_SLEEP_SEC = float(os.getenv("CONSUME_SLEEP_SEC", "0.5")) # işleme süresi simülasyonu (sn)
|
|
||||||
STATIC_IDS = ["2c47f1073a9d4f05aad6c15484894a74", "65827e3452b545d6845e050a503401f4", "5c663088f09d4062b4e567f47335fb1e"]
|
|
||||||
|
|
||||||
|
|
||||||
async def produce(service: ServiceBaseAsync):
|
PRODUCE_BURST = int(os.getenv("PRODUCE_BURST", "10"))
|
||||||
for biz_id in STATIC_IDS:
|
PRODUCE_ONCE = os.getenv("PRODUCE_ONCE", "true").lower() == "true"
|
||||||
deterministic_task_id = f"{TASK_TYPE}:{biz_id}"
|
EVENT_TYPE = os.getenv("EVENT_TYPE", "db-event")
|
||||||
payload = {"id": biz_id, "op": "sync", "source": "db-service"}
|
|
||||||
await service.enqueue(payload, TASK_TYPE, task_id=deterministic_task_id)
|
_produced = False
|
||||||
print(f"[DB] produce tick attempted ids={','.join(STATIC_IDS)}")
|
PROCESS_SEC = 10
|
||||||
|
email_service = initialize_service()
|
||||||
|
|
||||||
|
|
||||||
async def consume(service: ServiceBaseAsync, job: dict):
|
def generate_unique_with_mail_id(mail_id: str, service_prefix: str):
|
||||||
await asyncio.sleep(CONSUME_SLEEP_SEC)
|
return f"{service_prefix}_{mail_id}"
|
||||||
print(f"[DB] consumed task={job['task_id']} attempts={job.get('_attempts', 0)}")
|
|
||||||
|
|
||||||
|
def process_mail_with_attachments(mail: Mails, mail_id: str):
|
||||||
|
"""
|
||||||
|
Process an email with attachments using MailReaderService
|
||||||
|
Args: mail: Mail object, mail_id: Mail ID
|
||||||
|
Raises: Exception: If processing mail fails
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
mail_to_dict = mail.to_dict()
|
||||||
|
task_uuid = generate_unique_with_mail_id(mail_id, IsBankConfig.SERVICE_NAME)
|
||||||
|
process_mail_dict = dict(mail_id=mail_id, mail_data=mail_to_dict, service_prefix=email_service.config.SERVICE_PREFIX)
|
||||||
|
return task_uuid, process_mail_dict
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Email Service Runner Error processing mail {mail_id}: {str(e)}")
|
||||||
|
raise
|
||||||
|
|
||||||
|
|
||||||
|
def drop():
|
||||||
|
"""Clean up resources"""
|
||||||
|
try:
|
||||||
|
email_service.commit()
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error during commit on drop: {str(e)}")
|
||||||
|
try:
|
||||||
|
email_service.logout()
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error during logout on drop: {str(e)}")
|
||||||
|
|
||||||
|
|
||||||
|
async def produce(svc: ServiceBaseAsync):
|
||||||
|
mails, count, length = email_service.refresh()
|
||||||
|
for mail in mails:
|
||||||
|
if not getattr(mail, 'id', None):
|
||||||
|
print("Skipping email with no ID")
|
||||||
|
continue
|
||||||
|
mail_id, mail_dict = mail.id.decode('utf-8'), mail.to_dict()
|
||||||
|
try:
|
||||||
|
if mail.attachments:
|
||||||
|
if any([str(attachment['filename']).lower().endswith('.pdf') for attachment in mail_dict['attachments']]):
|
||||||
|
email_service.mark_no_attachment(mail_id)
|
||||||
|
else:
|
||||||
|
task_uuid, process_mail_dict = process_mail_with_attachments(mail, mail_id)
|
||||||
|
await svc.enqueue(task_id=task_uuid, payload=process_mail_dict, type_="mail.service.isbank")
|
||||||
|
else:
|
||||||
|
email_service.mark_no_attachment(mail_id)
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error processing email {mail_id}: {str(e)}")
|
||||||
|
continue
|
||||||
|
await asyncio.sleep(PROCESS_SEC)
|
||||||
|
|
||||||
|
|
||||||
|
async def handle_from_parser(svc: ServiceBaseAsync, job):
|
||||||
|
print("Mail Consumer from parser:", job)
|
||||||
|
await asyncio.sleep(PROCESS_SEC)
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
|
async def handle_database_publish(svc: ServiceBaseAsync, job):
|
||||||
|
await asyncio.sleep(PROCESS_SEC)
|
||||||
|
print("Mail Consumer from database:", job)
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
|
async def handle_from_mail(svc: ServiceBaseAsync, job):
|
||||||
|
await asyncio.sleep(PROCESS_SEC)
|
||||||
|
print("Mail Consumer from mail:", job)
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
|
async def consume_default(svc, job):
|
||||||
|
await asyncio.sleep(PROCESS_SEC)
|
||||||
|
print("Mail Consumer default:", job)
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
asyncio.run(ServiceBaseAsync(produce, consume).run())
|
|
||||||
|
svc = ServiceBaseAsync(produce, consume_default,
|
||||||
|
handlers={"parser.publish": handle_from_parser, "mail.publish": handle_from_mail, "database.publish": handle_database_publish}
|
||||||
|
)
|
||||||
|
asyncio.run(svc.run())
|
||||||
|
|
|
||||||
|
|
@ -11,9 +11,15 @@ requires-python = ">=3.11"
|
||||||
authors = [
|
authors = [
|
||||||
{ name = "Berkay Karatay", email = "karatay.berkay@gmail.com" }
|
{ name = "Berkay Karatay", email = "karatay.berkay@gmail.com" }
|
||||||
]
|
]
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"redis>=5.0.0",
|
"aio-pika>=9.4.1",
|
||||||
"aiosqlite>=0.19.0",
|
"prometheus-client>=0.20.0",
|
||||||
|
"uvloop>=0.19.0",
|
||||||
|
"arrow>=1.3.0",
|
||||||
|
"pydantic>=2.0.0",
|
||||||
|
"pydantic-settings>=2.0.0",
|
||||||
|
"email-validator>=2.0.0",
|
||||||
]
|
]
|
||||||
|
|
||||||
[project.optional-dependencies]
|
[project.optional-dependencies]
|
||||||
|
|
|
||||||
|
|
@ -5,14 +5,14 @@ ENV PYTHONPATH=/app
|
||||||
|
|
||||||
WORKDIR /
|
WORKDIR /
|
||||||
|
|
||||||
COPY app/services/queue/pyproject.toml ./
|
COPY app/services/mongo/pyproject.toml ./
|
||||||
COPY app/services/queue/README.md ./
|
COPY app/services/mongo/README.md ./
|
||||||
|
|
||||||
COPY app/core ./app/core
|
COPY app/core ./app/core
|
||||||
COPY app/services/common/ ./app/services/common/
|
COPY app/services/common/ ./app/services/common/
|
||||||
COPY app/services/queue/ ./app/services/queue/
|
COPY app/services/mongo/ ./app/services/mongo/
|
||||||
|
|
||||||
RUN pip install --upgrade pip && pip install --no-cache-dir .
|
RUN pip install --upgrade pip && pip install --no-cache-dir .
|
||||||
RUN mkdir -p /app/data
|
RUN mkdir -p /app/data
|
||||||
|
|
||||||
CMD ["python", "-m", "app.services.queue.main"]
|
CMD ["python", "-m", "app.services.mongo.main"]
|
||||||
|
|
@ -0,0 +1,40 @@
|
||||||
|
import os
|
||||||
|
import uuid
|
||||||
|
import asyncio
|
||||||
|
|
||||||
|
from app.services.common.service_base_async import ServiceBaseAsync
|
||||||
|
|
||||||
|
|
||||||
|
PRODUCE_BURST = int(os.getenv("PRODUCE_BURST", "10"))
|
||||||
|
PRODUCE_ONCE = os.getenv("PRODUCE_ONCE", "true").lower() == "true"
|
||||||
|
EVENT_TYPE = os.getenv("EVENT_TYPE", "db-mongo")
|
||||||
|
PROCESS_SEC = 10
|
||||||
|
|
||||||
|
|
||||||
|
async def produce(svc: ServiceBaseAsync):
|
||||||
|
await asyncio.sleep(PROCESS_SEC)
|
||||||
|
print(f"Produced From Mongo Producer: {len([1,2])} events to '{svc.produce_key}'")
|
||||||
|
|
||||||
|
|
||||||
|
async def handle_db_publish(svc: ServiceBaseAsync, job):
|
||||||
|
await asyncio.sleep(PROCESS_SEC)
|
||||||
|
await svc.ack_current()
|
||||||
|
print("Mongo Consumer from db:", job["task_id"])
|
||||||
|
|
||||||
|
|
||||||
|
async def handle_mail_publish(svc: ServiceBaseAsync, job):
|
||||||
|
await asyncio.sleep(PROCESS_SEC)
|
||||||
|
await svc.ack_current()
|
||||||
|
print("Mongo Consumer from mail:", job["task_id"])
|
||||||
|
|
||||||
|
|
||||||
|
async def consume_default(svc, job):
|
||||||
|
await asyncio.sleep(PROCESS_SEC)
|
||||||
|
print("Mongo Consumer default:", job["task_id"])
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
|
||||||
|
svc = ServiceBaseAsync(produce_fn=produce, consume_fn=consume_default, handlers={"database.service.publish": handle_db_publish, "mail.service.publish": handle_mail_publish})
|
||||||
|
asyncio.run(svc.run())
|
||||||
|
|
@ -11,9 +11,11 @@ requires-python = ">=3.11"
|
||||||
authors = [
|
authors = [
|
||||||
{ name = "Berkay Karatay", email = "karatay.berkay@gmail.com" }
|
{ name = "Berkay Karatay", email = "karatay.berkay@gmail.com" }
|
||||||
]
|
]
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"redis>=5.0.0",
|
"aio-pika>=9.4.1",
|
||||||
"aiosqlite>=0.19.0",
|
"prometheus-client>=0.20.0",
|
||||||
|
"uvloop>=0.19.0"
|
||||||
]
|
]
|
||||||
|
|
||||||
[project.optional-dependencies]
|
[project.optional-dependencies]
|
||||||
|
|
@ -0,0 +1,18 @@
|
||||||
|
FROM python:3.12-slim
|
||||||
|
|
||||||
|
ENV PYTHONDONTWRITEBYTECODE=1 PYTHONUNBUFFERED=1
|
||||||
|
ENV PYTHONPATH=/app
|
||||||
|
|
||||||
|
WORKDIR /
|
||||||
|
|
||||||
|
COPY app/services/parser/comment/pyproject.toml ./
|
||||||
|
COPY app/services/parser/comment/README.md ./
|
||||||
|
|
||||||
|
COPY app/core ./app/core
|
||||||
|
COPY app/services/common/ ./app/services/common/
|
||||||
|
COPY app/services/parser/comment/ ./app/services/parser/comment/
|
||||||
|
|
||||||
|
RUN pip install --upgrade pip && pip install --no-cache-dir .
|
||||||
|
RUN mkdir -p /app/data
|
||||||
|
|
||||||
|
CMD ["python", "-m", "app.services.parser.comment.main"]
|
||||||
|
|
@ -0,0 +1,30 @@
|
||||||
|
import asyncio
|
||||||
|
|
||||||
|
from app.services.common.service_base_async import ServiceBaseAsync
|
||||||
|
|
||||||
|
|
||||||
|
PROCESS_SEC = 10
|
||||||
|
|
||||||
|
|
||||||
|
async def handle_mail_publish(svc: ServiceBaseAsync, job: dict):
|
||||||
|
await asyncio.sleep(PROCESS_SEC)
|
||||||
|
print("Parser Mail Consumer parsed:", job)
|
||||||
|
# await svc.ack_current()
|
||||||
|
# await svc.enqueue({"source": "parser-mail", "from_task": job}, "parser-mail-done", routing_key="parser.comment.publish")
|
||||||
|
|
||||||
|
|
||||||
|
async def consume_default(svc: ServiceBaseAsync, job):
|
||||||
|
print("Parser Mail Consumer default:", job)
|
||||||
|
await asyncio.sleep(PROCESS_SEC)
|
||||||
|
await svc.ack_current()
|
||||||
|
|
||||||
|
|
||||||
|
async def produce(_svc: ServiceBaseAsync):
|
||||||
|
print("Parser Mail Producer produce")
|
||||||
|
await asyncio.sleep(PROCESS_SEC)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
|
||||||
|
svc = ServiceBaseAsync(produce_fn=produce, consume_fn=consume_default, handlers={"mail.service.publish": handle_mail_publish})
|
||||||
|
asyncio.run(svc.run())
|
||||||
|
|
@ -0,0 +1,37 @@
|
||||||
|
[build-system]
|
||||||
|
requires = ["setuptools>=61.0"]
|
||||||
|
build-backend = "setuptools.build_meta"
|
||||||
|
|
||||||
|
[project]
|
||||||
|
name = "dual-queue-services"
|
||||||
|
version = "0.1.0"
|
||||||
|
description = "Async dual queue system with Redis Streams and SQLite persistence"
|
||||||
|
readme = "README.md"
|
||||||
|
requires-python = ">=3.11"
|
||||||
|
authors = [
|
||||||
|
{ name = "Berkay Karatay", email = "karatay.berkay@gmail.com" }
|
||||||
|
]
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
"aio-pika>=9.4.1",
|
||||||
|
"prometheus-client>=0.20.0",
|
||||||
|
"uvloop>=0.19.0"
|
||||||
|
]
|
||||||
|
|
||||||
|
[project.optional-dependencies]
|
||||||
|
dev = [
|
||||||
|
"pytest>=7.4",
|
||||||
|
"black>=23.0",
|
||||||
|
"isort>=5.12"
|
||||||
|
]
|
||||||
|
|
||||||
|
[tool.black]
|
||||||
|
line-length = 88
|
||||||
|
target-version = ["py311"]
|
||||||
|
|
||||||
|
[tool.isort]
|
||||||
|
profile = "black"
|
||||||
|
|
||||||
|
[tool.setuptools.packages.find]
|
||||||
|
where = ["app"]
|
||||||
|
include = ["app*"]
|
||||||
|
|
@ -0,0 +1,18 @@
|
||||||
|
FROM python:3.12-slim
|
||||||
|
|
||||||
|
ENV PYTHONDONTWRITEBYTECODE=1 PYTHONUNBUFFERED=1
|
||||||
|
ENV PYTHONPATH=/app
|
||||||
|
|
||||||
|
WORKDIR /
|
||||||
|
|
||||||
|
COPY app/services/parser/excel/pyproject.toml ./
|
||||||
|
COPY app/services/parser/excel/README.md ./
|
||||||
|
|
||||||
|
COPY app/core ./app/core
|
||||||
|
COPY app/services/common/ ./app/services/common/
|
||||||
|
COPY app/services/parser/excel/ ./app/services/parser/excel/
|
||||||
|
|
||||||
|
RUN pip install --upgrade pip && pip install --no-cache-dir .
|
||||||
|
RUN mkdir -p /app/data
|
||||||
|
|
||||||
|
CMD ["python", "-m", "app.services.parser.excel.main"]
|
||||||
|
|
@ -0,0 +1,40 @@
|
||||||
|
import os
|
||||||
|
import uuid
|
||||||
|
import asyncio
|
||||||
|
|
||||||
|
from app.services.common.service_base_async import ServiceBaseAsync
|
||||||
|
|
||||||
|
|
||||||
|
PRODUCE_BURST = int(os.getenv("PRODUCE_BURST", "10"))
|
||||||
|
PRODUCE_ONCE = os.getenv("PRODUCE_ONCE", "true").lower() == "true"
|
||||||
|
EVENT_TYPE = os.getenv("EVENT_TYPE", "db-mongo")
|
||||||
|
PROCESS_SEC = 10
|
||||||
|
|
||||||
|
|
||||||
|
async def produce(svc: ServiceBaseAsync):
|
||||||
|
await asyncio.sleep(PROCESS_SEC)
|
||||||
|
print(f"Parser Excel Producer produced {len([1,2])} events to '{svc.produce_key}'")
|
||||||
|
|
||||||
|
|
||||||
|
async def handle_from_parser(svc: ServiceBaseAsync, job):
|
||||||
|
print("Parser Excel Consumer from parser:", job)
|
||||||
|
await svc.ack_current()
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
|
async def handle_from_mail(svc: ServiceBaseAsync, job):
|
||||||
|
print("Parser Excel Consumer from mail:", job)
|
||||||
|
await svc.ack_current()
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
|
async def consume_default(svc, job):
|
||||||
|
print("Parser Excel Consumer default:", job)
|
||||||
|
await svc.ack_current()
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
|
||||||
|
svc = ServiceBaseAsync(produce_fn=produce, consume_fn=consume_default, handlers={"parser.publish": handle_from_parser, "mail.publish": handle_from_mail})
|
||||||
|
asyncio.run(svc.run())
|
||||||
|
|
@ -0,0 +1,37 @@
|
||||||
|
[build-system]
|
||||||
|
requires = ["setuptools>=61.0"]
|
||||||
|
build-backend = "setuptools.build_meta"
|
||||||
|
|
||||||
|
[project]
|
||||||
|
name = "dual-queue-services"
|
||||||
|
version = "0.1.0"
|
||||||
|
description = "Async dual queue system with Redis Streams and SQLite persistence"
|
||||||
|
readme = "README.md"
|
||||||
|
requires-python = ">=3.11"
|
||||||
|
authors = [
|
||||||
|
{ name = "Berkay Karatay", email = "karatay.berkay@gmail.com" }
|
||||||
|
]
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
"aio-pika>=9.4.1",
|
||||||
|
"prometheus-client>=0.20.0",
|
||||||
|
"uvloop>=0.19.0"
|
||||||
|
]
|
||||||
|
|
||||||
|
[project.optional-dependencies]
|
||||||
|
dev = [
|
||||||
|
"pytest>=7.4",
|
||||||
|
"black>=23.0",
|
||||||
|
"isort>=5.12"
|
||||||
|
]
|
||||||
|
|
||||||
|
[tool.black]
|
||||||
|
line-length = 88
|
||||||
|
target-version = ["py311"]
|
||||||
|
|
||||||
|
[tool.isort]
|
||||||
|
profile = "black"
|
||||||
|
|
||||||
|
[tool.setuptools.packages.find]
|
||||||
|
where = ["app"]
|
||||||
|
include = ["app*"]
|
||||||
|
|
@ -0,0 +1,18 @@
|
||||||
|
FROM python:3.12-slim
|
||||||
|
|
||||||
|
ENV PYTHONDONTWRITEBYTECODE=1 PYTHONUNBUFFERED=1
|
||||||
|
ENV PYTHONPATH=/app
|
||||||
|
|
||||||
|
WORKDIR /
|
||||||
|
|
||||||
|
COPY app/services/parser/mail/pyproject.toml ./
|
||||||
|
COPY app/services/parser/mail/README.md ./
|
||||||
|
|
||||||
|
COPY app/core ./app/core
|
||||||
|
COPY app/services/common/ ./app/services/common/
|
||||||
|
COPY app/services/parser/mail/ ./app/services/parser/mail/
|
||||||
|
|
||||||
|
RUN pip install --upgrade pip && pip install --no-cache-dir .
|
||||||
|
RUN mkdir -p /app/data
|
||||||
|
|
||||||
|
CMD ["python", "-m", "app.services.parser.mail.main"]
|
||||||
|
|
@ -0,0 +1,42 @@
|
||||||
|
import os
|
||||||
|
import asyncio
|
||||||
|
|
||||||
|
from app.services.common.service_base_async import ServiceBaseAsync
|
||||||
|
|
||||||
|
|
||||||
|
PRODUCE_BURST = int(os.getenv("PRODUCE_BURST", "10"))
|
||||||
|
PRODUCE_ONCE = os.getenv("PRODUCE_ONCE", "true").lower() == "true"
|
||||||
|
EVENT_TYPE = os.getenv("EVENT_TYPE", "db-mongo")
|
||||||
|
PROCESS_SEC = 10
|
||||||
|
|
||||||
|
|
||||||
|
async def produce(svc: ServiceBaseAsync):
|
||||||
|
await asyncio.sleep(PROCESS_SEC)
|
||||||
|
print(f"Parser Mail Producer produced {len([1,2])} events to '{svc.produce_key}'")
|
||||||
|
|
||||||
|
|
||||||
|
async def handle_db_publish(svc: ServiceBaseAsync, job):
|
||||||
|
await asyncio.sleep(PROCESS_SEC)
|
||||||
|
await svc.ack_current()
|
||||||
|
print("Parser Mail Consumer from db:", job)
|
||||||
|
|
||||||
|
|
||||||
|
async def handle_mongo_publish(svc: ServiceBaseAsync, job):
|
||||||
|
await asyncio.sleep(PROCESS_SEC)
|
||||||
|
await svc.ack_current()
|
||||||
|
print("Parser Mail Consumer from mongo:", job)
|
||||||
|
|
||||||
|
|
||||||
|
async def consume_default(svc: ServiceBaseAsync, job):
|
||||||
|
await asyncio.sleep(PROCESS_SEC)
|
||||||
|
print("Parser Mail Consumer default:", job)
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
|
||||||
|
svc = ServiceBaseAsync(
|
||||||
|
produce_fn=produce, consume_fn=consume_default,
|
||||||
|
handlers={"database.service.publish": handle_db_publish, "mongo.service.publish": handle_mongo_publish},
|
||||||
|
)
|
||||||
|
asyncio.run(svc.run())
|
||||||
|
|
@ -0,0 +1,37 @@
|
||||||
|
[build-system]
|
||||||
|
requires = ["setuptools>=61.0"]
|
||||||
|
build-backend = "setuptools.build_meta"
|
||||||
|
|
||||||
|
[project]
|
||||||
|
name = "dual-queue-services"
|
||||||
|
version = "0.1.0"
|
||||||
|
description = "Async dual queue system with Redis Streams and SQLite persistence"
|
||||||
|
readme = "README.md"
|
||||||
|
requires-python = ">=3.11"
|
||||||
|
authors = [
|
||||||
|
{ name = "Berkay Karatay", email = "karatay.berkay@gmail.com" }
|
||||||
|
]
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
"aio-pika>=9.4.1",
|
||||||
|
"prometheus-client>=0.20.0",
|
||||||
|
"uvloop>=0.19.0"
|
||||||
|
]
|
||||||
|
|
||||||
|
[project.optional-dependencies]
|
||||||
|
dev = [
|
||||||
|
"pytest>=7.4",
|
||||||
|
"black>=23.0",
|
||||||
|
"isort>=5.12"
|
||||||
|
]
|
||||||
|
|
||||||
|
[tool.black]
|
||||||
|
line-length = 88
|
||||||
|
target-version = ["py311"]
|
||||||
|
|
||||||
|
[tool.isort]
|
||||||
|
profile = "black"
|
||||||
|
|
||||||
|
[tool.setuptools.packages.find]
|
||||||
|
where = ["app"]
|
||||||
|
include = ["app*"]
|
||||||
|
|
@ -0,0 +1,18 @@
|
||||||
|
FROM python:3.12-slim
|
||||||
|
|
||||||
|
ENV PYTHONDONTWRITEBYTECODE=1 PYTHONUNBUFFERED=1
|
||||||
|
ENV PYTHONPATH=/app
|
||||||
|
|
||||||
|
WORKDIR /
|
||||||
|
|
||||||
|
COPY app/services/parser/payment/pyproject.toml ./
|
||||||
|
COPY app/services/parser/payment/README.md ./
|
||||||
|
|
||||||
|
COPY app/core ./app/core
|
||||||
|
COPY app/services/common/ ./app/services/common/
|
||||||
|
COPY app/services/parser/payment/ ./app/services/parser/payment/
|
||||||
|
|
||||||
|
RUN pip install --upgrade pip && pip install --no-cache-dir .
|
||||||
|
RUN mkdir -p /app/data
|
||||||
|
|
||||||
|
CMD ["python", "-m", "app.services.parser.payment.main"]
|
||||||
|
|
@ -0,0 +1,43 @@
|
||||||
|
import os
|
||||||
|
import asyncio
|
||||||
|
|
||||||
|
from app.services.common.service_base_async import ServiceBaseAsync
|
||||||
|
|
||||||
|
|
||||||
|
PRODUCE_BURST = int(os.getenv("PRODUCE_BURST", "10"))
|
||||||
|
PRODUCE_ONCE = os.getenv("PRODUCE_ONCE", "true").lower() == "true"
|
||||||
|
EVENT_TYPE = os.getenv("EVENT_TYPE", "db-mongo")
|
||||||
|
|
||||||
|
PROCESS_SEC = 10
|
||||||
|
|
||||||
|
|
||||||
|
async def produce(svc: ServiceBaseAsync):
|
||||||
|
await asyncio.sleep(PROCESS_SEC)
|
||||||
|
print(f"Parser Payment Producer produced {len([1,2])} events to '{svc.produce_key}'")
|
||||||
|
|
||||||
|
|
||||||
|
async def handle_from_parser(svc: ServiceBaseAsync, job):
|
||||||
|
await asyncio.sleep(PROCESS_SEC)
|
||||||
|
print("Parser Payment Consumer from parser:", job)
|
||||||
|
await svc.ack_current()
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
|
async def handle_from_mail(svc: ServiceBaseAsync, job):
|
||||||
|
await asyncio.sleep(PROCESS_SEC)
|
||||||
|
print("Parser Payment Consumer from mail:", job)
|
||||||
|
await svc.ack_current()
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
|
async def consume_default(svc: ServiceBaseAsync, job):
|
||||||
|
await asyncio.sleep(PROCESS_SEC)
|
||||||
|
print("Parser Payment Consumer default:", job)
|
||||||
|
await svc.ack_current()
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
|
||||||
|
svc = ServiceBaseAsync(produce_fn=produce, consume_fn=consume_default, handlers={"parser.publish": handle_from_parser, "mail.publish": handle_from_mail})
|
||||||
|
asyncio.run(svc.run())
|
||||||
|
|
@ -0,0 +1,37 @@
|
||||||
|
[build-system]
|
||||||
|
requires = ["setuptools>=61.0"]
|
||||||
|
build-backend = "setuptools.build_meta"
|
||||||
|
|
||||||
|
[project]
|
||||||
|
name = "dual-queue-services"
|
||||||
|
version = "0.1.0"
|
||||||
|
description = "Async dual queue system with Redis Streams and SQLite persistence"
|
||||||
|
readme = "README.md"
|
||||||
|
requires-python = ">=3.11"
|
||||||
|
authors = [
|
||||||
|
{ name = "Berkay Karatay", email = "karatay.berkay@gmail.com" }
|
||||||
|
]
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
"aio-pika>=9.4.1",
|
||||||
|
"prometheus-client>=0.20.0",
|
||||||
|
"uvloop>=0.19.0"
|
||||||
|
]
|
||||||
|
|
||||||
|
[project.optional-dependencies]
|
||||||
|
dev = [
|
||||||
|
"pytest>=7.4",
|
||||||
|
"black>=23.0",
|
||||||
|
"isort>=5.12"
|
||||||
|
]
|
||||||
|
|
||||||
|
[tool.black]
|
||||||
|
line-length = 88
|
||||||
|
target-version = ["py311"]
|
||||||
|
|
||||||
|
[tool.isort]
|
||||||
|
profile = "black"
|
||||||
|
|
||||||
|
[tool.setuptools.packages.find]
|
||||||
|
where = ["app"]
|
||||||
|
include = ["app*"]
|
||||||
|
|
@ -1,17 +0,0 @@
|
||||||
import uuid
|
|
||||||
import asyncio
|
|
||||||
|
|
||||||
from app.services.common.service_base_async import ServiceBaseAsync
|
|
||||||
|
|
||||||
async def produce(service: ServiceBaseAsync):
|
|
||||||
print(f"Queue Reader Service up and running.")
|
|
||||||
while True:
|
|
||||||
await asyncio.sleep(1)
|
|
||||||
|
|
||||||
async def consume(service: ServiceBaseAsync, job: dict):
|
|
||||||
await asyncio.sleep(0.1)
|
|
||||||
print(f"Queue Sender Service up and running. Job: {job}")
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
asyncio.run(ServiceBaseAsync(produce, consume).run())
|
|
||||||
|
|
@ -7,40 +7,41 @@ volumes:
|
||||||
sqlite_data:
|
sqlite_data:
|
||||||
prom_data:
|
prom_data:
|
||||||
grafana_data:
|
grafana_data:
|
||||||
nats_data:
|
rabbitmq_data:
|
||||||
nui_data:
|
|
||||||
|
|
||||||
services:
|
services:
|
||||||
nats:
|
|
||||||
image: nats:latest
|
|
||||||
command: ["-js", "-m", "8222"]
|
|
||||||
ports:
|
|
||||||
- "4222:4222"
|
|
||||||
- "8222:8222"
|
|
||||||
volumes:
|
|
||||||
- ./app/core/nats/nats.conf:/etc/nats/nats.conf:ro
|
|
||||||
- nats_data:/data/jetstream
|
|
||||||
networks: [servicesNetwork]
|
|
||||||
restart: unless-stopped
|
|
||||||
|
|
||||||
nats-exporter:
|
rabbitmq:
|
||||||
image: natsio/prometheus-nats-exporter:latest
|
image: rabbitmq:3.13-management
|
||||||
command:
|
container_name: rabbitmq
|
||||||
- "-varz"
|
ports:
|
||||||
- "-connz"
|
- "127.0.0.1:5672:5672"
|
||||||
- "-subz"
|
- "127.0.0.1:15672:15672"
|
||||||
- "-routez"
|
- "127.0.0.1:15692:15692"
|
||||||
- "-jsz=all"
|
environment:
|
||||||
- "http://nats:8222"
|
RABBITMQ_DEFAULT_USER: admin
|
||||||
depends_on: [nats]
|
RABBITMQ_DEFAULT_PASS: admin
|
||||||
expose:
|
command: >
|
||||||
- "7777"
|
sh -lc "rabbitmq-plugins enable --offline rabbitmq_prometheus && exec docker-entrypoint.sh rabbitmq-server"
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD", "rabbitmq-diagnostics", "-q", "ping"]
|
||||||
|
interval: 5s
|
||||||
|
timeout: 3s
|
||||||
|
retries: 20
|
||||||
|
start_period: 10s
|
||||||
|
volumes:
|
||||||
|
- rabbitmq_data:/var/lib/rabbitmq
|
||||||
networks: [servicesNetwork]
|
networks: [servicesNetwork]
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
|
logging:
|
||||||
|
driver: "json-file"
|
||||||
|
options:
|
||||||
|
max-size: "10m"
|
||||||
|
max-file: "3"
|
||||||
|
|
||||||
prometheus:
|
prometheus:
|
||||||
image: prom/prometheus:latest
|
image: prom/prometheus:latest
|
||||||
depends_on: [nats-exporter]
|
depends_on: [rabbitmq]
|
||||||
networks: [servicesNetwork]
|
networks: [servicesNetwork]
|
||||||
volumes:
|
volumes:
|
||||||
- ./monitor/prometheus/prometheus.yml:/etc/prometheus/prometheus.yml:ro
|
- ./monitor/prometheus/prometheus.yml:/etc/prometheus/prometheus.yml:ro
|
||||||
|
|
@ -52,6 +53,11 @@ services:
|
||||||
ports:
|
ports:
|
||||||
- "9090:9090"
|
- "9090:9090"
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
|
logging:
|
||||||
|
driver: "json-file"
|
||||||
|
options:
|
||||||
|
max-size: "10m"
|
||||||
|
max-file: "3"
|
||||||
|
|
||||||
grafana:
|
grafana:
|
||||||
image: grafana/grafana:latest
|
image: grafana/grafana:latest
|
||||||
|
|
@ -67,104 +73,199 @@ services:
|
||||||
ports:
|
ports:
|
||||||
- "3000:3000"
|
- "3000:3000"
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
|
logging:
|
||||||
|
driver: "json-file"
|
||||||
|
options:
|
||||||
|
max-size: "10m"
|
||||||
|
max-file: "3"
|
||||||
|
|
||||||
nats-ui:
|
mongo-service:
|
||||||
image: ghcr.io/nats-nui/nui:latest
|
build:
|
||||||
ports:
|
context: .
|
||||||
- "127.0.0.1:31311:31311"
|
dockerfile: app/services/mongo/Dockerfile
|
||||||
|
depends_on:
|
||||||
|
rabbitmq:
|
||||||
|
condition: service_healthy
|
||||||
|
env_file: [.env]
|
||||||
|
environment:
|
||||||
|
SERVICE_NAME: "mongo-service"
|
||||||
|
PRODUCE_KEY: "mongo.service.publish"
|
||||||
|
CONSUME_BINDINGS: "database.service.publish,mail.service.publish"
|
||||||
|
RABBITMQ_URL: "amqp://admin:admin@rabbitmq:5672/"
|
||||||
|
EXCHANGE_EVENTS: "app.events"
|
||||||
|
PRODUCE_BURST: "10"
|
||||||
|
PRODUCE_ONCE: "true"
|
||||||
|
EVENT_TYPE: "mongo-event"
|
||||||
|
RETRY_DELAY_MS: "5000"
|
||||||
|
MAX_RETRIES: "3"
|
||||||
|
PREFETCH: "16"
|
||||||
|
IGNORE_SELF_PRODUCED: "true"
|
||||||
networks: [servicesNetwork]
|
networks: [servicesNetwork]
|
||||||
volumes:
|
|
||||||
- nui_data:/db
|
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
|
logging:
|
||||||
|
driver: "json-file"
|
||||||
|
options: { max-size: "10m", max-file: "3" }
|
||||||
|
|
||||||
db-service:
|
db-service:
|
||||||
build:
|
build:
|
||||||
context: .
|
context: .
|
||||||
dockerfile: app/services/database/Dockerfile
|
dockerfile: app/services/database/Dockerfile
|
||||||
depends_on: [nats]
|
depends_on:
|
||||||
|
rabbitmq:
|
||||||
|
condition: service_healthy
|
||||||
networks: [servicesNetwork]
|
networks: [servicesNetwork]
|
||||||
env_file: [.env]
|
env_file: [.env]
|
||||||
environment:
|
environment:
|
||||||
NATS_URL: "nats://nats:4222"
|
SERVICE_NAME: "database-service"
|
||||||
JS_STREAM: "ACCOUNT_SERVICES_DATABASE"
|
PRODUCE_KEY: "database.service.publish"
|
||||||
JS_TASKS_SUBJECT: "ACCOUNT.SERVICES.DATABASE.TASKS"
|
CONSUME_BINDINGS: "mail.service.publish,mongo.service.publish"
|
||||||
JS_PUBLISH_SUBJECT: "ACCOUNT.SERVICES.DATABASE.PUBLISH"
|
RABBITMQ_URL: amqp://admin:admin@rabbitmq:5672/
|
||||||
JS_DURABLE: "DB_WORKERS"
|
EXCHANGE_EVENTS: "app.events"
|
||||||
BATCH_SIZE: "5"
|
PRODUCE_ONCE: "true"
|
||||||
ACK_WAIT_SEC: "30"
|
RETRY_DELAY_MS: "5000"
|
||||||
MAX_DELIVER: "3"
|
MAX_RETRIES: "3"
|
||||||
SQLITE_PATH: "/app/data/queue.db"
|
PREFETCH: "16"
|
||||||
TASK_TYPE: "db-task"
|
IGNORE_SELF_PRODUCED: "true"
|
||||||
CONSUME_SLEEP_SEC: "0.5"
|
|
||||||
SERVICE_NAME: "db-service"
|
|
||||||
METRICS_PORT: "8000"
|
|
||||||
volumes:
|
volumes:
|
||||||
- sqlite_data:/app/data
|
- ./app/services/database/venv:/opt/venv
|
||||||
|
- ./app/services/database/.prisma-cache:/root/.cache/prisma-python
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
|
logging:
|
||||||
|
driver: "json-file"
|
||||||
|
options:
|
||||||
|
max-size: "10m"
|
||||||
|
max-file: "3"
|
||||||
|
|
||||||
# mail-service:
|
mail-service:
|
||||||
# build:
|
build:
|
||||||
# context: .
|
context: .
|
||||||
# dockerfile: app/services/mail/Dockerfile
|
dockerfile: app/services/mail/Dockerfile
|
||||||
# volumes:
|
env_file: [.env]
|
||||||
# - sqlite_data:/app/data
|
depends_on:
|
||||||
# env_file: [.env]
|
rabbitmq:
|
||||||
# environment:
|
condition: service_healthy
|
||||||
# REDIS_STREAM_PUBLISH: ACCOUNT:SERVICES:MAIL:PUBLISH
|
environment:
|
||||||
# REDIS_STREAM_TASKS: ACCOUNT:SERVICES:MAIL:TASKS
|
SERVICE_NAME: "mail-service"
|
||||||
# CONSUME_BACKLOG: true
|
PRODUCE_KEY: "mail.service.publish"
|
||||||
# depends_on: [redis]
|
CONSUME_BINDINGS: "database.service.publish,mongo.service.publish"
|
||||||
# networks: [servicesNetwork]
|
RABBITMQ_URL: amqp://admin:admin@rabbitmq:5672/
|
||||||
# restart: unless-stopped
|
EXCHANGE_EVENTS: "app.events"
|
||||||
# logging:
|
PRODUCE_ONCE: "true"
|
||||||
# driver: "json-file"
|
RETRY_DELAY_MS: "5000"
|
||||||
# options:
|
MAX_RETRIES: "3"
|
||||||
# max-size: "10m"
|
PREFETCH: "16"
|
||||||
# max-file: "3"
|
IGNORE_SELF_PRODUCED: "true"
|
||||||
|
networks: [servicesNetwork]
|
||||||
|
restart: unless-stopped
|
||||||
|
logging:
|
||||||
|
driver: "json-file"
|
||||||
|
options:
|
||||||
|
max-size: "10m"
|
||||||
|
max-file: "3"
|
||||||
|
|
||||||
# queue-service:
|
parser-mail-service:
|
||||||
# build:
|
build:
|
||||||
# context: .
|
context: .
|
||||||
# dockerfile: app/services/queue/Dockerfile
|
dockerfile: app/services/parser/mail/Dockerfile
|
||||||
# volumes:
|
env_file: [.env]
|
||||||
# - sqlite_data:/app/data
|
depends_on:
|
||||||
# env_file: [.env]
|
rabbitmq:
|
||||||
# environment:
|
condition: service_healthy
|
||||||
# REDIS_STREAM_PUBLISH: ACCOUNT:SERVICES:QUEUE:PUBLISH
|
environment:
|
||||||
# REDIS_STREAM_TASKS: ACCOUNT:SERVICES:QUEUE:TASKS
|
SERVICE_NAME: "parser-mail-service"
|
||||||
# depends_on: [redis]
|
RABBITMQ_URL: amqp://admin:admin@rabbitmq:5672/
|
||||||
# networks: [servicesNetwork]
|
EXCHANGE_EVENTS: "app.events"
|
||||||
# restart: unless-stopped
|
CONSUME_BINDINGS: "mail.service.publish"
|
||||||
# logging:
|
PRODUCE_KEY: "parser.mail.publish"
|
||||||
# driver: "json-file"
|
RETRY_DELAY_MS: "5000"
|
||||||
# options:
|
MAX_RETRIES: "3"
|
||||||
# max-size: "10m"
|
PREFETCH: "16"
|
||||||
# max-file: "3"
|
IGNORE_SELF_PRODUCED: "true"
|
||||||
|
networks: [servicesNetwork]
|
||||||
|
restart: unless-stopped
|
||||||
|
logging:
|
||||||
|
driver: "json-file"
|
||||||
|
options:
|
||||||
|
max-size: "10m"
|
||||||
|
max-file: "3"
|
||||||
|
|
||||||
# tester:
|
parser-excel-service:
|
||||||
# build:
|
build:
|
||||||
# context: .
|
context: .
|
||||||
# dockerfile: app/services/test/Dockerfile
|
dockerfile: app/services/parser/excel/Dockerfile
|
||||||
# volumes:
|
env_file: [.env]
|
||||||
# - sqlite_data:/app/data
|
depends_on:
|
||||||
# env_file: [.env]
|
rabbitmq:
|
||||||
# environment:
|
condition: service_healthy
|
||||||
# REDIS_STREAM_DATABASE_PUBLISH: ACCOUNT:SERVICES:DATABASE:PUBLISH
|
environment:
|
||||||
# REDIS_STREAM_DATABASE_TASKS: ACCOUNT:SERVICES:DATABASE:TASKS
|
SERVICE_NAME: "parser-excel-service"
|
||||||
# REDIS_STREAM_MAIL_PUBLISH: ACCOUNT:SERVICES:MAIL:PUBLISH
|
RABBITMQ_URL: amqp://admin:admin@rabbitmq:5672/
|
||||||
# REDIS_STREAM_MAIL_TASKS: ACCOUNT:SERVICES:MAIL:TASKS
|
EXCHANGE_EVENTS: "app.events"
|
||||||
# REDIS_STREAM_QUEUE_PUBLISH: ACCOUNT:SERVICES:QUEUE:PUBLISH
|
CONSUME_BINDINGS: "parser.mail.publish"
|
||||||
# REDIS_STREAM_QUEUE_TASKS: ACCOUNT:SERVICES:QUEUE:TASKS
|
PRODUCE_KEY: "parser.excel.publish"
|
||||||
# depends_on:
|
PRODUCE_ONCE: "true"
|
||||||
# - redis
|
RETRY_DELAY_MS: "5000"
|
||||||
# # - db-service
|
MAX_RETRIES: "3"
|
||||||
# # - mail-service
|
PREFETCH: "16"
|
||||||
# # - queue-service
|
IGNORE_SELF_PRODUCED: "true"
|
||||||
# networks: [servicesNetwork]
|
networks: [servicesNetwork]
|
||||||
# restart: "no"
|
restart: unless-stopped
|
||||||
# logging:
|
logging:
|
||||||
# driver: "json-file"
|
driver: "json-file"
|
||||||
# options:
|
options:
|
||||||
# max-size: "10m"
|
max-size: "10m"
|
||||||
# max-file: "3"
|
max-file: "3"
|
||||||
|
|
||||||
|
parser-comment-service:
|
||||||
|
build:
|
||||||
|
context: .
|
||||||
|
dockerfile: app/services/parser/comment/Dockerfile
|
||||||
|
env_file: [.env]
|
||||||
|
depends_on:
|
||||||
|
rabbitmq:
|
||||||
|
condition: service_healthy
|
||||||
|
environment:
|
||||||
|
SERVICE_NAME: "parser-comment-service"
|
||||||
|
RABBITMQ_URL: amqp://admin:admin@rabbitmq:5672/
|
||||||
|
EXCHANGE_EVENTS: "app.events"
|
||||||
|
CONSUME_BINDINGS: "parser.excel.publish"
|
||||||
|
PRODUCE_KEY: "parser.comment.publish"
|
||||||
|
PRODUCE_ONCE: "true"
|
||||||
|
RETRY_DELAY_MS: "5000"
|
||||||
|
MAX_RETRIES: "3"
|
||||||
|
PREFETCH: "16"
|
||||||
|
IGNORE_SELF_PRODUCED: "true"
|
||||||
|
networks: [servicesNetwork]
|
||||||
|
restart: unless-stopped
|
||||||
|
logging:
|
||||||
|
driver: "json-file"
|
||||||
|
options:
|
||||||
|
max-size: "10m"
|
||||||
|
max-file: "3"
|
||||||
|
|
||||||
|
parser-payment-service:
|
||||||
|
build:
|
||||||
|
context: .
|
||||||
|
dockerfile: app/services/parser/payment/Dockerfile
|
||||||
|
env_file: [.env]
|
||||||
|
depends_on:
|
||||||
|
rabbitmq:
|
||||||
|
condition: service_healthy
|
||||||
|
environment:
|
||||||
|
SERVICE_NAME: "parser-payment-service"
|
||||||
|
RABBITMQ_URL: amqp://admin:admin@rabbitmq:5672/
|
||||||
|
EXCHANGE_EVENTS: "app.events"
|
||||||
|
CONSUME_BINDINGS: "parser.comment.publish"
|
||||||
|
PRODUCE_KEY: "parser.payment.publish"
|
||||||
|
PRODUCE_ONCE: "true"
|
||||||
|
RETRY_DELAY_MS: "5000"
|
||||||
|
MAX_RETRIES: "3"
|
||||||
|
PREFETCH: "16"
|
||||||
|
IGNORE_SELF_PRODUCED: "true"
|
||||||
|
networks: [servicesNetwork]
|
||||||
|
restart: unless-stopped
|
||||||
|
logging:
|
||||||
|
driver: "json-file"
|
||||||
|
options:
|
||||||
|
max-size: "10m"
|
||||||
|
max-file: "3"
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue