task services added

This commit is contained in:
2025-08-15 22:30:21 +03:00
parent 456203f5cf
commit 9543d136aa
38 changed files with 1065 additions and 0 deletions

View File

@@ -0,0 +1,50 @@
import os
import redis
from typing import Optional
class Env:
REDIS_HOST: str = os.getenv("REDIS_HOST", "redis")
REDIS_PORT: int = int(os.getenv("REDIS_PORT", "6379"))
REDIS_DB: int = int(os.getenv("REDIS_DB", "0"))
REDIS_USERNAME: Optional[str] = os.getenv("REDIS_USERNAME")
REDIS_PASSWORD: Optional[str] = os.getenv("REDIS_PASSWORD")
REDIS_STREAM_PUBLISH: str = os.getenv("REDIS_STREAM_PUBLISH", "DEFAULT:REGISTER:DONTUSE")
REDIS_STREAM_TASKS: str = os.getenv("REDIS_STREAM_TASKS", "DEFAULT:REGISTER:DONTUSE")
BATCH_SIZE: int = int(os.getenv("BATCH_SIZE", "5"))
MAX_RETRIES: int = int(os.getenv("MAX_RETRIES", "3"))
IDLE_RECLAIM_MS: int = int(os.getenv("IDLE_RECLAIM_MS", "30000"))
SQLITE_PATH: str = os.getenv("SQLITE_PATH", "/app/data/queue.db")
class RedisConfig:
def __init__(
self,
host: str = Env.REDIS_HOST,
port: int = Env.REDIS_PORT,
db: int = Env.REDIS_DB,
username: Optional[str] = Env.REDIS_USERNAME,
password: Optional[str] = Env.REDIS_PASSWORD,
batch_size: int = Env.BATCH_SIZE,
max_retries: int = Env.MAX_RETRIES,
idle_reclaim_ms: int = Env.IDLE_RECLAIM_MS,
):
self.host = host
self.port = port
self.db = db
self.username = username
self.password = password
self.batch_size = batch_size
self.max_retries = max_retries
self.idle_reclaim_ms = idle_reclaim_ms
def client(self) -> redis.Redis:
return redis.Redis(
host=self.host,
port=self.port,
db=self.db,
username=self.username,
password=self.password,
)

View File

@@ -0,0 +1,49 @@
import os
from prometheus_client import Counter, start_http_server
_METRICS_STARTED = False
NS = os.getenv("METRICS_NS", "servicestask")
SERVICE_NAME = os.getenv("SERVICE_NAME", "db-service")
QUEUE_ENQUEUED = Counter(
f"{NS}_queue_enqueued_total", "Enqueued tasks",
labelnames=("service","queue","type")
)
QUEUE_DUPLICATE = Counter(
f"{NS}_queue_duplicate_skipped_total", "Duplicate skipped",
labelnames=("service","queue","type")
)
QUEUE_DONE = Counter(
f"{NS}_queue_done_total", "Done tasks",
labelnames=("service","queue","type")
)
QUEUE_FAILED = Counter(
f"{NS}_queue_failed_total", "Failed tasks",
labelnames=("service","queue","type")
)
QUEUE_RETRY = Counter(
f"{NS}_queue_retry_total", "Retry attempts",
labelnames=("service","queue","type")
)
def start_server():
global _METRICS_STARTED
if _METRICS_STARTED:
return
port = int(os.getenv("METRICS_PORT", "8000"))
start_http_server(port)
_METRICS_STARTED = True
def observe(status: str, queue: str, type_: str):
labels = (SERVICE_NAME, queue, type_ or "unknown")
if status == "enqueued":
QUEUE_ENQUEUED.labels(*labels).inc()
elif status == "duplicate_skipped":
QUEUE_DUPLICATE.labels(*labels).inc()
elif status == "done":
QUEUE_DONE.labels(*labels).inc()
elif status == "failed":
QUEUE_FAILED.labels(*labels).inc()
elif status == "retry":
QUEUE_RETRY.labels(*labels).inc()

View File

@@ -0,0 +1,11 @@
server_name: "nats-main"
port: 4222
http: 8222
jetstream: {
store_dir: "/data/jetstream",
max_mem_store: 512MB,
max_file_store: 10GB
}

View File

@@ -0,0 +1,68 @@
import json
import aiosqlite
from typing import Any, Dict, List, Optional
class TasksRepoAsync:
"""
SQLITE Task Manager
"""
def __init__(self, db_path: str):
self.db_path = db_path
async def init(self) -> None:
async with aiosqlite.connect(self.db_path) as db:
await db.execute("PRAGMA journal_mode=WAL;")
await db.execute("PRAGMA synchronous=NORMAL;")
await db.execute("""
CREATE TABLE IF NOT EXISTS tasks(
task_id TEXT PRIMARY KEY,
queue TEXT NOT NULL,
type TEXT NOT NULL,
payload_json TEXT NOT NULL,
created_at INTEGER NOT NULL,
status TEXT DEFAULT 'pending',
attempts INTEGER DEFAULT 0,
last_error TEXT,
last_msg_id TEXT
);
""")
cols = await self._columns(db, "tasks")
if "last_msg_id" not in cols:
await db.execute("ALTER TABLE tasks ADD COLUMN last_msg_id TEXT;")
await db.commit()
async def insert_task(self, task: Dict[str, Any], last_msg_id: Optional[str]=None) -> None:
async with aiosqlite.connect(self.db_path) as db:
await db.execute("""INSERT OR REPLACE INTO tasks (task_id, queue, type, payload_json, created_at, status, attempts, last_msg_id) VALUES(?,?,?,?,?,'pending',?,?)""",
(task["task_id"], task["queue"], task["type"], json.dumps(task["payload"]), task["created_at"], int(task.get("_attempts", 0)), last_msg_id))
await db.commit()
async def mark_done(self, task_id: str, attempts: int) -> None:
await self._update(task_id, status="done", attempts=attempts, error=None)
async def mark_failed(self, task_id: str, attempts: int, error: str) -> None:
await self._update(task_id, status="failed", attempts=attempts, error=error)
async def mark_retry(self, task_id: str, attempts: int, error: str, last_msg_id: str) -> None:
await self._update(task_id, status="retry", attempts=attempts, error=error, last_msg_id=last_msg_id)
async def _update(self, task_id: str, *, status: str, attempts: Optional[int]=None, error: Optional[str]=None, last_msg_id: Optional[str]=None) -> None:
sets, params = ["status=?","last_error=?"], [status, error]
if attempts is not None:
sets.append("attempts=?"); params.append(int(attempts))
if last_msg_id is not None:
sets.append("last_msg_id=?"); params.append(last_msg_id)
params.append(task_id)
sql = f"UPDATE tasks SET {', '.join(sets)} WHERE task_id=?"
async with aiosqlite.connect(self.db_path) as db:
await db.execute(sql, tuple(params))
await db.commit()
async def _columns(self, db: aiosqlite.Connection, table: str) -> List[str]:
cols: List[str] = []
async with db.execute(f"PRAGMA table_info({table})") as cur:
async for row in cur:
cols.append(row[1])
return cols

View File

@@ -0,0 +1,15 @@
import json
import time
from typing import Any, Dict
def now_ms() -> int:
return int(time.time() * 1000)
def jd(obj: Dict[str, Any]) -> Dict[bytes, bytes]:
"""JSON to Redis fields."""
return {"data": json.dumps(obj).encode("utf-8")}
def jl(fields: Dict[bytes, bytes]) -> Dict[str, Any]:
"""Redis fields to JSON."""
return json.loads(fields[b"data"].decode("utf-8"))