task services added

This commit is contained in:
2025-08-15 22:30:21 +03:00
parent 456203f5cf
commit 9543d136aa
38 changed files with 1065 additions and 0 deletions

View File

View File

@@ -0,0 +1,165 @@
import os
import uuid
import asyncio
import json
from typing import Any, Dict, Awaitable, Callable, Optional
from app.core.utils import now_ms
from app.core import metrics
from nats.aio.client import Client as NATS
from nats.js.api import StreamConfig, ConsumerConfig, AckPolicy
from nats.errors import NoRespondersError
class ServiceBaseAsync:
"""
JetStream tabanlı base:
- TASKS subject: publish + consume
- PUBLISH subject: event yayını (enqueued / duplicate_skipped / done / retry / failed)
- Dedup: Nats-Msg-Id = task_id (JetStream duplicate window içinde yazmaz)
- Retry: msg.nak(); MAX_DELIVER aşılınca msg.term() (DLQ yoksa “failed”)
"""
def __init__(
self,
produce_fn: Callable[["ServiceBaseAsync"], Awaitable[None]],
consume_fn: Callable[["ServiceBaseAsync", Dict[str, Any]], Awaitable[None]],
):
self.nats_url = os.getenv("NATS_URL", "nats://nats:4222")
self.stream_name = os.getenv("JS_STREAM", "ACCOUNT_SERVICES_DATABASE")
self.tasks_subject = os.getenv("JS_TASKS_SUBJECT", "ACCOUNT.SERVICES.DATABASE.TASKS")
self.publish_subject = os.getenv("JS_PUBLISH_SUBJECT", "ACCOUNT.SERVICES.DATABASE.PUBLISH")
self.durable = os.getenv("JS_DURABLE", "DB_WORKERS")
self.batch_size = int(os.getenv("BATCH_SIZE", "5"))
self.ack_wait_sec = int(os.getenv("ACK_WAIT_SEC", "30"))
self.max_deliver = int(os.getenv("MAX_DELIVER", "3"))
self.retry_enabled = os.getenv("RETRY_ENABLED", "true").lower() == "true"
self.dedup_header = os.getenv("DEDUP_HEADER", "Nats-Msg-Id")
self.produce_fn = produce_fn
self.consume_fn = consume_fn
self.nc: Optional[NATS] = None
self.js = None
async def run(self) -> None:
metrics.start_server()
self.nc = NATS()
await self.nc.connect(self.nats_url)
self.js = self.nc.jetstream()
await self._ensure_stream_and_consumer()
await asyncio.gather(self._produce_loop(), self._consume_loop())
async def _ensure_stream_and_consumer(self) -> None:
try:
await self.js.add_stream(StreamConfig(name=self.stream_name, subjects=[self.tasks_subject, self.publish_subject]))
print(f"[js] stream created: {self.stream_name}")
except Exception:
pass
try:
await self.js.add_consumer(
self.stream_name,
ConsumerConfig(
durable_name=self.durable, ack_policy=AckPolicy.EXPLICIT,
ack_wait=self.ack_wait_sec, max_deliver=self.max_deliver, filter_subject=self.tasks_subject),
)
print(f"[js] consumer created: durable={self.durable}")
except Exception:
pass
async def _produce_loop(self) -> None:
while True:
try:
await self.produce_fn(self)
except Exception as e:
print(f"[produce] ERROR: {e}")
await asyncio.sleep(2)
async def _consume_loop(self) -> None:
sub = await self.js.pull_subscribe(self.tasks_subject, durable=self.durable)
while True:
try:
msgs = await sub.fetch(self.batch_size, timeout=2)
except Exception:
msgs = []
if not msgs:
await asyncio.sleep(0.2)
continue
for msg in msgs:
job = self._decode_msg(msg)
attempts = self._delivery_attempts(msg)
try:
await self.consume_fn(self, job)
await msg.ack()
await self._publish({"task_id": job.get("task_id"), "status": "done"})
except Exception as e:
err = str(e)
if (not self.retry_enabled) or (attempts >= self.max_deliver):
await msg.term()
await self._publish({"task_id": job.get("task_id"), "status": "failed", "error": err})
else:
await msg.nak()
await self._publish({"task_id": job.get("task_id"), "status": "retry", "attempts": attempts, "error": err})
async def enqueue(self, payload: Dict[str, Any], type_: str, task_id: Optional[str] = None) -> str:
"""
Dedup: Nats-Msg-Id = task_id
duplicate ise publish.duplicate True döner ve JS yazmaz.
"""
_task_id = task_id or payload.get("task_id") or str(uuid.uuid4())
payload.setdefault("task_id", _task_id)
task = {"task_id": _task_id, "type": type_, "payload": payload, "created_at": now_ms(), "_attempts": 0}
data = json.dumps(task).encode()
try:
ack = await self.js.publish(self.tasks_subject, data, headers={self.dedup_header: _task_id})
except NoRespondersError:
raise RuntimeError("NATS/JetStream not available")
if getattr(ack, "duplicate", False):
# await self._publish({"task_id": _task_id, "status": "duplicate_skipped"})
return _task_id
await self._publish({"task_id": _task_id, "status": "enqueued"})
return _task_id
async def _publish(self, event: Dict[str, Any]) -> None:
evt = dict(event)
evt.setdefault("ts", now_ms())
evt.setdefault("queue", self.tasks_subject)
try:
metrics.observe(evt.get("status","unknown"), evt["queue"], evt.get("type"))
except Exception:
pass
try:
await self.js.publish(self.publish_subject, json.dumps(evt).encode())
except Exception:
pass
@staticmethod
def _decode_msg(msg) -> Dict[str, Any]:
try:
obj = json.loads(msg.data.decode())
if "payload" in obj and isinstance(obj["payload"], str):
try:
obj["payload"] = json.loads(obj["payload"])
except Exception:
pass
return obj
except Exception:
return {"payload": {}, "task_id": None}
@staticmethod
def _delivery_attempts(msg) -> int:
try:
return msg.metadata.num_delivered
except Exception:
return 1

View File

@@ -0,0 +1,18 @@
FROM python:3.12-slim
ENV PYTHONDONTWRITEBYTECODE=1 PYTHONUNBUFFERED=1
ENV PYTHONPATH=/app
WORKDIR /
COPY app/services/database/pyproject.toml ./
COPY app/services/database/README.md ./
COPY app/core ./app/core
COPY app/services/common/ ./app/services/common/
COPY app/services/database/ ./app/services/database/
RUN pip install --upgrade pip && pip install --no-cache-dir .
RUN mkdir -p /app/data
CMD ["python", "-m", "app.services.database.main"]

View File

@@ -0,0 +1,28 @@
import os
import uuid
import asyncio
from app.services.common.service_base_async import ServiceBaseAsync
PRODUCE_ENABLED = os.getenv("PRODUCE_ENABLED", "true").lower() == "true"
PRODUCE_BATCH = int(os.getenv("PRODUCE_BATCH", "3")) # her produce tick'inde kaç iş
TASK_TYPE = os.getenv("TASK_TYPE", "db-task") # iş tipi (task_id'de de kullanılır)
CONSUME_SLEEP_SEC = float(os.getenv("CONSUME_SLEEP_SEC", "0.5")) # işleme süresi simülasyonu (sn)
STATIC_IDS = ["2c47f1073a9d4f05aad6c15484894a72", "65827e3452b545d6845e050a503401f3", "5c663088f09d4062b4e567f47335fb1a"]
async def produce(service: ServiceBaseAsync):
for biz_id in STATIC_IDS:
deterministic_task_id = f"{TASK_TYPE}:{biz_id}"
payload = {"id": biz_id, "op": "sync", "source": "db-service"}
await service.enqueue(payload, TASK_TYPE, task_id=deterministic_task_id)
print(f"[DB] produce tick attempted ids={','.join(STATIC_IDS)}")
async def consume(service: ServiceBaseAsync, job: dict):
await asyncio.sleep(CONSUME_SLEEP_SEC)
print(f"[DB] consumed task={job['task_id']}")
if __name__ == "__main__":
asyncio.run(ServiceBaseAsync(produce, consume).run())

View File

@@ -0,0 +1,36 @@
[build-system]
requires = ["setuptools>=61.0"]
build-backend = "setuptools.build_meta"
[project]
name = "dual-queue-services"
version = "0.1.0"
description = "Async dual queue system with Redis Streams and SQLite persistence"
readme = "README.md"
requires-python = ">=3.11"
authors = [
{ name = "Berkay Karatay", email = "karatay.berkay@gmail.com" }
]
dependencies = [
"nats-py>=2.6.0",
"prometheus-client>=0.20.0",
"uvloop>=0.19.0"
]
[project.optional-dependencies]
dev = [
"pytest>=7.4",
"black>=23.0",
"isort>=5.12"
]
[tool.black]
line-length = 88
target-version = ["py311"]
[tool.isort]
profile = "black"
[tool.setuptools.packages.find]
where = ["app"]
include = ["app*"]

View File

@@ -0,0 +1,18 @@
FROM python:3.12-slim
ENV PYTHONDONTWRITEBYTECODE=1 PYTHONUNBUFFERED=1
ENV PYTHONPATH=/app
WORKDIR /
COPY app/services/mail/pyproject.toml ./
COPY app/services/mail/README.md ./
COPY app/core ./app/core
COPY app/services/common/ ./app/services/common/
COPY app/services/mail/ ./app/services/mail/
RUN pip install --upgrade pip && pip install --no-cache-dir .
RUN mkdir -p /app/data
CMD ["python", "-m", "app.services.mail.main"]

View File

View File

@@ -0,0 +1,15 @@
import asyncio
import uuid
from services.service_base_async import ServiceBaseAsync
async def produce(service: ServiceBaseAsync):
fake_mails = [{"to": "user@example.com", "subj": "Hi", "body": "Hello!"}]
for mail in fake_mails:
await service.enqueue(mail, "send-mail")
async def consume(service: ServiceBaseAsync, job: dict):
print(f"[MAIL] Gönderiliyor: {job}")
await asyncio.sleep(0.1)
if __name__ == "__main__":
asyncio.run(ServiceBaseAsync(produce, consume).run())

View File

@@ -0,0 +1,29 @@
import os
import uuid
import asyncio
from app.services.common.service_base_async import ServiceBaseAsync
PRODUCE_ENABLED = os.getenv("PRODUCE_ENABLED", "true").lower() == "true"
PRODUCE_BATCH = int(os.getenv("PRODUCE_BATCH", "3")) # her produce tick'inde kaç iş
TASK_TYPE = os.getenv("TASK_TYPE", "db-task") # iş tipi (task_id'de de kullanılır)
CONSUME_SLEEP_SEC = float(os.getenv("CONSUME_SLEEP_SEC", "0.5")) # işleme süresi simülasyonu (sn)
STATIC_IDS = ["2c47f1073a9d4f05aad6c15484894a74", "65827e3452b545d6845e050a503401f4", "5c663088f09d4062b4e567f47335fb1e"]
async def produce(service: ServiceBaseAsync):
for biz_id in STATIC_IDS:
deterministic_task_id = f"{TASK_TYPE}:{biz_id}"
payload = {"id": biz_id, "op": "sync", "source": "db-service"}
await service.enqueue(payload, TASK_TYPE, task_id=deterministic_task_id)
print(f"[DB] produce tick attempted ids={','.join(STATIC_IDS)}")
async def consume(service: ServiceBaseAsync, job: dict):
await asyncio.sleep(CONSUME_SLEEP_SEC)
print(f"[DB] consumed task={job['task_id']} attempts={job.get('_attempts', 0)}")
if __name__ == "__main__":
asyncio.run(ServiceBaseAsync(produce, consume).run())

View File

@@ -0,0 +1,35 @@
[build-system]
requires = ["setuptools>=61.0"]
build-backend = "setuptools.build_meta"
[project]
name = "dual-queue-services"
version = "0.1.0"
description = "Async dual queue system with Redis Streams and SQLite persistence"
readme = "README.md"
requires-python = ">=3.11"
authors = [
{ name = "Berkay Karatay", email = "karatay.berkay@gmail.com" }
]
dependencies = [
"redis>=5.0.0",
"aiosqlite>=0.19.0",
]
[project.optional-dependencies]
dev = [
"pytest>=7.4",
"black>=23.0",
"isort>=5.12"
]
[tool.black]
line-length = 88
target-version = ["py311"]
[tool.isort]
profile = "black"
[tool.setuptools.packages.find]
where = ["app"]
include = ["app*"]

View File

@@ -0,0 +1,18 @@
FROM python:3.12-slim
ENV PYTHONDONTWRITEBYTECODE=1 PYTHONUNBUFFERED=1
ENV PYTHONPATH=/app
WORKDIR /
COPY app/services/queue/pyproject.toml ./
COPY app/services/queue/README.md ./
COPY app/core ./app/core
COPY app/services/common/ ./app/services/common/
COPY app/services/queue/ ./app/services/queue/
RUN pip install --upgrade pip && pip install --no-cache-dir .
RUN mkdir -p /app/data
CMD ["python", "-m", "app.services.queue.main"]

View File

@@ -0,0 +1,17 @@
import uuid
import asyncio
from app.services.common.service_base_async import ServiceBaseAsync
async def produce(service: ServiceBaseAsync):
print(f"Queue Reader Service up and running.")
while True:
await asyncio.sleep(1)
async def consume(service: ServiceBaseAsync, job: dict):
await asyncio.sleep(0.1)
print(f"Queue Sender Service up and running. Job: {job}")
if __name__ == "__main__":
asyncio.run(ServiceBaseAsync(produce, consume).run())

View File

@@ -0,0 +1,35 @@
[build-system]
requires = ["setuptools>=61.0"]
build-backend = "setuptools.build_meta"
[project]
name = "dual-queue-services"
version = "0.1.0"
description = "Async dual queue system with Redis Streams and SQLite persistence"
readme = "README.md"
requires-python = ">=3.11"
authors = [
{ name = "Berkay Karatay", email = "karatay.berkay@gmail.com" }
]
dependencies = [
"redis>=5.0.0",
"aiosqlite>=0.19.0",
]
[project.optional-dependencies]
dev = [
"pytest>=7.4",
"black>=23.0",
"isort>=5.12"
]
[tool.black]
line-length = 88
target-version = ["py311"]
[tool.isort]
profile = "black"
[tool.setuptools.packages.find]
where = ["app"]
include = ["app*"]

View File

@@ -0,0 +1,16 @@
import asyncio
from services.service_base_async import ServiceBaseAsync
async def produce(service: ServiceBaseAsync):
fake_jobs = [{"action": "cleanup", "target": "old-tasks"}]
for job in fake_jobs:
await service.enqueue(job, "queue-maintenance")
async def consume(service: ServiceBaseAsync, job: dict):
print(f"[QUEUE CONTROL] İşleme alındı: {job}")
await asyncio.sleep(0.05)
if __name__ == "__main__":
asyncio.run(ServiceBaseAsync(produce, consume).run())

View File

@@ -0,0 +1,18 @@
FROM python:3.12-slim
ENV PYTHONDONTWRITEBYTECODE=1 PYTHONUNBUFFERED=1
ENV PYTHONPATH=/app
WORKDIR /
COPY app/services/test/pyproject.toml ./
COPY app/services/test/README.md ./
COPY app/core ./app/core
COPY app/services/common/ ./app/services/common/
COPY app/services/test/ ./app/services/test/
RUN pip install --upgrade pip && pip install --no-cache-dir .
RUN mkdir -p /app/data
CMD ["python", "-m", "app.services.test.main"]

View File

View File

@@ -0,0 +1,71 @@
import os
import asyncio
import uuid
import json
import aiosqlite
import redis.asyncio as aioredis
from app.core.config import RedisConfig, Env
from app.core.utils import now_ms
SQLITE_PATH = Env.SQLITE_PATH
REDIS_STREAM_DATABASE_PUBLISH = os.getenv("REDIS_STREAM_DATABASE_PUBLISH", "ACCOUNT:SERVICES:DATABASE:PUBLISH")
REDIS_STREAM_DATABASE_TASKS = os.getenv("REDIS_STREAM_DATABASE_TASKS", "ACCOUNT:SERVICES:DATABASE:TASKS")
REDIS_STREAM_MAIL_PUBLISH = os.getenv("REDIS_STREAM_MAIL_PUBLISH", "ACCOUNT:SERVICES:MAIL:PUBLISH")
REDIS_STREAM_MAIL_TASKS = os.getenv("REDIS_STREAM_MAIL_TASKS", "ACCOUNT:SERVICES:MAIL:TASKS")
REDIS_STREAM_QUEUE_PUBLISH = os.getenv("REDIS_STREAM_QUEUE_PUBLISH", "ACCOUNT:SERVICES:QUEUE:PUBLISH")
REDIS_STREAM_QUEUE_TASKS = os.getenv("REDIS_STREAM_QUEUE_TASKS", "ACCOUNT:SERVICES:QUEUE:TASKS")
async def ensure_schema(sqlite_path: str):
async with aiosqlite.connect(sqlite_path) as db:
await db.execute("""
CREATE TABLE IF NOT EXISTS tasks(
task_id TEXT PRIMARY KEY,
queue TEXT NOT NULL,
type TEXT NOT NULL,
payload_json TEXT NOT NULL,
created_at INTEGER NOT NULL,
status TEXT DEFAULT 'pending',
attempts INTEGER DEFAULT 0,
last_error TEXT
);
""")
await db.commit()
async def enqueue(r: aioredis.Redis, sqlite_path: str, stream: str, payload: dict, type_: str):
task_id = payload.get("task_id") or str(uuid.uuid4())
task = {"task_id": task_id, "queue": stream, "type": type_, "payload": payload, "created_at": now_ms(), "_attempts": 0}
await r.xadd(stream, {"data": json.dumps(task)})
async with aiosqlite.connect(sqlite_path) as db:
await db.execute("""INSERT OR REPLACE INTO tasks(task_id, queue, type, payload_json, created_at, status, attempts) VALUES(?,?,?,?,?,'pending',?)""",
(task_id, stream, type_, json.dumps(payload), task["created_at"], 0))
await db.commit()
async def push_db_mocks(r: aioredis.Redis, sqlite_path: str, n: int = 3):
for i in range(n):
payload = {"id": uuid.uuid4().hex, "op": "sync", "source": "tester"}
await enqueue(r, sqlite_path, REDIS_STREAM_DATABASE_TASKS, payload, "db-sync")
async def push_mail_mocks(r: aioredis.Redis, sqlite_path: str, n: int = 3):
for i in range(n):
payload = {"to": f"user{i}@example.com", "subj": "Hello", "body": "Hi!", "source": "tester"}
await enqueue(r, sqlite_path, REDIS_STREAM_MAIL_TASKS, payload, "send-mail")
async def push_queue_mocks(r: aioredis.Redis, sqlite_path: str, n: int = 3):
for i in range(n):
payload = {"action": "cleanup", "target": f"old-tasks-{i}", "source": "tester"}
await enqueue(r, sqlite_path, REDIS_STREAM_QUEUE_TASKS, payload, "queue-maintenance")
async def main():
db_n, mail_n, queue_n = 3, 3, 3
cfg = RedisConfig()
r = aioredis.Redis(host=cfg.host, port=cfg.port, db=cfg.db, username=cfg.username, password=cfg.password)
await ensure_schema(SQLITE_PATH)
await push_db_mocks(r, SQLITE_PATH, db_n)
await push_mail_mocks(r, SQLITE_PATH, mail_n)
await push_queue_mocks(r, SQLITE_PATH, queue_n)
if __name__ == "__main__":
asyncio.run(main())

View File

@@ -0,0 +1,35 @@
[build-system]
requires = ["setuptools>=61.0"]
build-backend = "setuptools.build_meta"
[project]
name = "dual-queue-services"
version = "0.1.0"
description = "Async dual queue system with Redis Streams and SQLite persistence"
readme = "README.md"
requires-python = ">=3.11"
authors = [
{ name = "Berkay Karatay", email = "karatay.berkay@gmail.com" }
]
dependencies = [
"redis>=5.0.0",
"aiosqlite>=0.19.0",
]
[project.optional-dependencies]
dev = [
"pytest>=7.4",
"black>=23.0",
"isort>=5.12"
]
[tool.black]
line-length = 88
target-version = ["py311"]
[tool.isort]
profile = "black"
[tool.setuptools.packages.find]
where = ["app"]
include = ["app*"]