updated Services Task database
This commit is contained in:
0
ServicesTask/app/services/common/=6.4.0
Normal file
0
ServicesTask/app/services/common/=6.4.0
Normal file
14
ServicesTask/app/services/common/config.py
Normal file
14
ServicesTask/app/services/common/config.py
Normal file
@@ -0,0 +1,14 @@
|
||||
import os
|
||||
|
||||
|
||||
class RedisConfig:
|
||||
|
||||
HOST: str = os.getenv("REDIS_HOST", "10.10.2.15")
|
||||
PASSWORD: str = os.getenv("REDIS_PASSWORD", "your_strong_password_here")
|
||||
PORT: int = int(os.getenv("REDIS_PORT", 6379))
|
||||
DB: int = int(os.getenv("REDIS_DB", 0))
|
||||
|
||||
@classmethod
|
||||
def as_dict(cls):
|
||||
return dict(host=RedisConfig.HOST, port=int(RedisConfig.PORT), password=RedisConfig.PASSWORD, db=int(RedisConfig.DB))
|
||||
|
||||
93
ServicesTask/app/services/common/models.py
Normal file
93
ServicesTask/app/services/common/models.py
Normal file
@@ -0,0 +1,93 @@
|
||||
from typing import Optional, List
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
class User(BaseModel):
|
||||
id: int
|
||||
uu_id: str
|
||||
user_tag: str
|
||||
user_type: str
|
||||
email: str
|
||||
phone_number: str
|
||||
related_company: str
|
||||
is_confirmed: bool
|
||||
active: bool
|
||||
|
||||
|
||||
class Person(BaseModel):
|
||||
id: int
|
||||
uu_id: str
|
||||
firstname: str
|
||||
surname: str
|
||||
middle_name: Optional[str] = ""
|
||||
birthname: Optional[str] = ""
|
||||
# national_identity_id: str
|
||||
is_confirmed: bool
|
||||
active: bool
|
||||
user: Optional[User] = None
|
||||
|
||||
|
||||
class OccupantType(BaseModel):
|
||||
id: int
|
||||
uu_id: str
|
||||
occupant_code: str
|
||||
occupant_type: str
|
||||
is_confirmed: bool
|
||||
active: bool
|
||||
user_type_uu_id: Optional[str] = None
|
||||
|
||||
|
||||
class BuildPart(BaseModel):
|
||||
id: int
|
||||
uu_id: str
|
||||
part_no: str
|
||||
part_level: str
|
||||
part_code: str
|
||||
part_gross_size: float
|
||||
part_net_size: float
|
||||
human_livable: bool
|
||||
build_id: int
|
||||
build_uu_id: str
|
||||
is_confirmed: bool
|
||||
active: bool
|
||||
living_spaces: Optional[List['BuildLivingSpace']] = None
|
||||
|
||||
|
||||
class BuildLivingSpace(BaseModel):
|
||||
id: int
|
||||
uu_id: str
|
||||
expiry_starts: str
|
||||
expiry_ends: str
|
||||
fix_value: float
|
||||
fix_percent: float
|
||||
agreement_no: str
|
||||
marketing_process: bool
|
||||
build_parts_id: int
|
||||
build_parts_uu_id: str
|
||||
person_id: int
|
||||
person_uu_id: str
|
||||
occupant_type_id: int
|
||||
occupant_type_uu_id: str
|
||||
is_confirmed: bool
|
||||
active: bool
|
||||
person: Optional[Person] = None
|
||||
occupant_type: Optional[OccupantType] = None
|
||||
|
||||
|
||||
class BuildingCluster(BaseModel):
|
||||
id: int
|
||||
uu_id: str
|
||||
build_name: str
|
||||
build_no: str
|
||||
build_date: str
|
||||
decision_period_date: str
|
||||
expiry_starts: str
|
||||
expiry_ends: str
|
||||
is_confirmed: bool
|
||||
active: bool
|
||||
build_parts: List['BuildPart'] = []
|
||||
|
||||
|
||||
# Update forward references for models with circular dependencies
|
||||
BuildPart.update_forward_refs()
|
||||
BuildingCluster.update_forward_refs()
|
||||
167
ServicesTask/app/services/common/redis_handler.py
Normal file
167
ServicesTask/app/services/common/redis_handler.py
Normal file
@@ -0,0 +1,167 @@
|
||||
import logging
|
||||
|
||||
from json import loads, dumps
|
||||
from contextlib import contextmanager
|
||||
from time import sleep
|
||||
from redis import Redis, RedisError, ConnectionError as RedisConnectionError
|
||||
|
||||
from config import RedisConfig
|
||||
|
||||
|
||||
logger = logging.getLogger('RedisHandler')
|
||||
|
||||
|
||||
@contextmanager
|
||||
def safe_redis_operation(redis_client: Redis, operation_name: str = "Redis operation"):
|
||||
"""
|
||||
Context manager for safely executing Redis operations with error handling
|
||||
"""
|
||||
try:
|
||||
yield redis_client
|
||||
except RedisConnectionError as e:
|
||||
logger.error(f"{operation_name} failed due to Redis connection error: {str(e)}")
|
||||
raise
|
||||
except RedisError as e:
|
||||
logger.error(f"{operation_name} failed due to Redis error: {str(e)}")
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"{operation_name} failed with unexpected error: {str(e)}")
|
||||
raise
|
||||
|
||||
|
||||
class RedisHandler:
|
||||
|
||||
"""Singleton Redis handler class for centralized Redis operations"""
|
||||
|
||||
_instance = None
|
||||
REDIS_EXCEPTIONS = (RedisConnectionError, RedisError)
|
||||
|
||||
def __new__(cls):
|
||||
if cls._instance is None:
|
||||
cls._instance = super(RedisHandler, cls).__new__(cls)
|
||||
cls._instance._initialized = False
|
||||
return cls._instance
|
||||
|
||||
def __init__(self):
|
||||
# Initialize only once
|
||||
if self._initialized:
|
||||
return
|
||||
|
||||
# Initialize Redis client with retry logic
|
||||
self.redis_client = self._create_redis_client()
|
||||
self.redis_connected = self._check_redis_connection()
|
||||
self._initialized = True
|
||||
|
||||
def _create_redis_client(self):
|
||||
"""Create a Redis client with connection retry"""
|
||||
max_retries = 5
|
||||
retry_delay = 5
|
||||
|
||||
for attempt in range(max_retries):
|
||||
try:
|
||||
client = Redis(**RedisConfig.as_dict())
|
||||
client.ping() # Test the connection
|
||||
logger.info("Redis connection established successfully")
|
||||
return client
|
||||
except (RedisConnectionError, RedisError) as e:
|
||||
if attempt < max_retries - 1:
|
||||
logger.warning(f"Redis connection attempt {attempt + 1} failed: {str(e)}. Retrying in {retry_delay} seconds...")
|
||||
sleep(retry_delay)
|
||||
retry_delay *= 2 # Exponential backoff
|
||||
else:
|
||||
logger.error(f"Failed to connect to Redis after {max_retries} attempts: {str(e)}")
|
||||
# Continue with a new Redis client instance even if ping fails
|
||||
# This allows the service to start and retry connections later
|
||||
return Redis(**RedisConfig.as_dict())
|
||||
|
||||
def _check_redis_connection(self) -> bool:
|
||||
"""Check if Redis connection is alive"""
|
||||
try:
|
||||
self.ping()
|
||||
return True
|
||||
except Exception as e:
|
||||
return False
|
||||
|
||||
def ping(self):
|
||||
"""Ping Redis server to check connection"""
|
||||
return self.redis_client.ping()
|
||||
|
||||
def sadd(self, key: str, value):
|
||||
"""Add a value to a Redis set"""
|
||||
return self.redis_client.sadd(key, value)
|
||||
|
||||
def ismember(self, key: str, value):
|
||||
"""Check if a value is a member of a Redis set"""
|
||||
return self.redis_client.sismember(key, value)
|
||||
|
||||
def get(self, key: str):
|
||||
"""Get a value from Redis by key"""
|
||||
return self.redis_client.get(key)
|
||||
|
||||
def set(self, key: str, value):
|
||||
"""Set a key-value pair in Redis"""
|
||||
return self.redis_client.set(key, value)
|
||||
|
||||
def delete_value(self, key: str, value):
|
||||
"""Delete a value from a Redis value by finding key"""
|
||||
get_redis = self.get(key)
|
||||
if get_redis:
|
||||
get_redis: dict = loads(get_redis)
|
||||
get_redis.pop(value)
|
||||
self.set(key, dumps(get_redis))
|
||||
|
||||
def rpush(self, key: str, value):
|
||||
"""Append a value to a Redis list"""
|
||||
return self.redis_client.rpush(key, value)
|
||||
|
||||
def lindex(self, key: str, index: int):
|
||||
"""Get an element from a Redis list by its index"""
|
||||
return self.redis_client.lindex(key, index)
|
||||
|
||||
def spop(self, key: str, count=1):
|
||||
"""Remove and return random members from a Redis set"""
|
||||
return self.redis_client.spop(key, count)
|
||||
|
||||
def srem(self, key: str, value):
|
||||
"""Remove a specific member from a Redis set"""
|
||||
return self.redis_client.srem(key, value)
|
||||
|
||||
def ensure_connection(self) -> bool:
|
||||
"""Check if Redis connection is alive and reconnect if needed"""
|
||||
if not self.redis_connected:
|
||||
try:
|
||||
self.redis_client = self._create_redis_client()
|
||||
self.redis_connected = self._check_redis_connection()
|
||||
if self.redis_connected:
|
||||
logger.info("Redis connection re-established successfully")
|
||||
return self.redis_connected
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to re-establish Redis connection: {str(e)}")
|
||||
return False
|
||||
return True
|
||||
|
||||
@classmethod
|
||||
def handle_reconnection(cls, consecutive_errors=0, max_consecutive_errors=5):
|
||||
"""
|
||||
Handle Redis reconnection with exponential backoff based on consecutive errors
|
||||
Args:
|
||||
consecutive_errors: Number of consecutive errors encountered
|
||||
max_consecutive_errors: Threshold for extended sleep time
|
||||
Returns:
|
||||
tuple: (RedisHandler instance, bool indicating if extended sleep is needed)
|
||||
"""
|
||||
try:
|
||||
# Get a fresh instance (will reconnect internally)
|
||||
instance = cls()
|
||||
instance.redis_connected = instance._check_redis_connection()
|
||||
logger.info("Recreated Redis handler using singleton pattern")
|
||||
|
||||
# Determine if extended sleep is needed
|
||||
need_extended_sleep = consecutive_errors >= max_consecutive_errors
|
||||
if need_extended_sleep:
|
||||
logger.warning(f"Hit {max_consecutive_errors} consecutive Redis errors, taking longer pause")
|
||||
return instance, need_extended_sleep
|
||||
except Exception as redis_retry_error:
|
||||
logger.error(f"Failed to recreate Redis handler: {str(redis_retry_error)}")
|
||||
return None, consecutive_errors >= max_consecutive_errors
|
||||
|
||||
@@ -4,34 +4,10 @@ import asyncio
|
||||
import fnmatch
|
||||
import aio_pika
|
||||
|
||||
from pydantic import BaseModel
|
||||
from core.utils import now_ms
|
||||
from contextvars import ContextVar
|
||||
from aio_pika.abc import AbstractIncomingMessage
|
||||
from typing import Any, Dict, Awaitable, Callable, Optional, List, NamedTuple
|
||||
|
||||
|
||||
class _MsgCtx(NamedTuple):
|
||||
msg: AbstractIncomingMessage
|
||||
rk: str
|
||||
attempts: int
|
||||
|
||||
|
||||
_MSG_CTX: ContextVar[_MsgCtx | None] = ContextVar("_MSG_CTX", default=None)
|
||||
|
||||
|
||||
class Meta(BaseModel):
|
||||
routing_key: str
|
||||
attempts: int
|
||||
exchange: str
|
||||
|
||||
|
||||
class Job(BaseModel):
|
||||
task_id: str
|
||||
type: str
|
||||
payload: dict
|
||||
created_at: int
|
||||
_meta: Meta
|
||||
from typing import Any, Dict, Awaitable, Callable, Optional, List
|
||||
from app.services.types.task import _MsgCtx, _MSG_CTX
|
||||
from app.services.types.queue import Enqueue
|
||||
|
||||
|
||||
class ServiceBaseAsync:
|
||||
@@ -43,7 +19,7 @@ class ServiceBaseAsync:
|
||||
- Retry: TTL'li retry kuyruğu (RETRY_DELAY_MS), sonra main'e geri DLX
|
||||
- Max deneme üstünde DLQ: q.<service>.events.dlq
|
||||
- Handler map: routing key -> özel callback (pattern destekli)
|
||||
- Geriye uyumluluk: enqueue(payload, type_, routing_key=None, message_id=None)
|
||||
- Geriye uyumluluk: enqueue(payload, action, routing_key=None, message_id=None)
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
@@ -115,15 +91,16 @@ class ServiceBaseAsync:
|
||||
await q_main.consume(self._on_message, no_ack=False)
|
||||
await asyncio.gather(self._produce_loop())
|
||||
|
||||
async def enqueue(self, task_id: str, payload: Dict[str, Any], type_: Optional[str] = None, routing_key: Optional[str] = None, message_id: Optional[str] = None) -> str:
|
||||
async def enqueue(self, enqueue: Enqueue) -> str:
|
||||
assert self.ex is not None
|
||||
payload.setdefault("task_id", task_id)
|
||||
payload: dict = enqueue.payload
|
||||
payload.setdefault("task_id", enqueue.task_id)
|
||||
payload.setdefault("source", self.service_name)
|
||||
body = json.dumps({"task_id": task_id, "type": type_, "payload": payload, "created_at": now_ms()}).encode()
|
||||
msg = aio_pika.Message(body, delivery_mode=aio_pika.DeliveryMode.PERSISTENT, message_id=message_id or task_id, headers={"x-attempts": 0})
|
||||
rk = routing_key or self.produce_key
|
||||
await self.ex.publish(msg, routing_key=rk)
|
||||
return task_id
|
||||
enqueue.payload = payload
|
||||
msg = aio_pika.Message(enqueue.body, delivery_mode=aio_pika.DeliveryMode.PERSISTENT, message_id=enqueue.message_id or enqueue.task_id, headers={"x-attempts": 0}, type=enqueue.action)
|
||||
routing_key = enqueue.routing_key or self.produce_key
|
||||
await self.ex.publish(msg, routing_key=routing_key)
|
||||
return enqueue.task_id
|
||||
|
||||
async def ack_current(self) -> None:
|
||||
ctx = _MSG_CTX.get()
|
||||
|
||||
Reference in New Issue
Block a user