redis handler implemented Servies Task has redis object
This commit is contained in:
parent
4e6774a15b
commit
9dd8740171
|
|
@ -1,6 +1,7 @@
|
||||||
import sys
|
import sys
|
||||||
import socket
|
import socket
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
from time import sleep
|
from time import sleep
|
||||||
from config import IsBankConfig
|
from config import IsBankConfig
|
||||||
from Depends.mail_handler import EmailReaderService, EmailServiceRunner
|
from Depends.mail_handler import EmailReaderService, EmailServiceRunner
|
||||||
|
|
|
||||||
|
|
@ -88,6 +88,13 @@ class BuildingCluster(BaseModel):
|
||||||
build_parts: List['BuildPart'] = []
|
build_parts: List['BuildPart'] = []
|
||||||
|
|
||||||
|
|
||||||
|
class BuildRequirements(BaseModel):
|
||||||
|
|
||||||
|
building_count: int
|
||||||
|
living_space: int
|
||||||
|
build_parts: int
|
||||||
|
|
||||||
|
|
||||||
# Update forward references for models with circular dependencies
|
# Update forward references for models with circular dependencies
|
||||||
BuildPart.update_forward_refs()
|
BuildPart.update_forward_refs()
|
||||||
BuildingCluster.update_forward_refs()
|
BuildingCluster.update_forward_refs()
|
||||||
|
|
|
||||||
|
|
@ -4,8 +4,7 @@ from json import loads, dumps
|
||||||
from contextlib import contextmanager
|
from contextlib import contextmanager
|
||||||
from time import sleep
|
from time import sleep
|
||||||
from redis import Redis, RedisError, ConnectionError as RedisConnectionError
|
from redis import Redis, RedisError, ConnectionError as RedisConnectionError
|
||||||
|
from .config import RedisConfig
|
||||||
from config import RedisConfig
|
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger('RedisHandler')
|
logger = logging.getLogger('RedisHandler')
|
||||||
|
|
@ -43,35 +42,28 @@ class RedisHandler:
|
||||||
return cls._instance
|
return cls._instance
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
# Initialize only once
|
|
||||||
if self._initialized:
|
if self._initialized:
|
||||||
return
|
return
|
||||||
|
|
||||||
# Initialize Redis client with retry logic
|
|
||||||
self.redis_client = self._create_redis_client()
|
self.redis_client = self._create_redis_client()
|
||||||
self.redis_connected = self._check_redis_connection()
|
self.redis_connected = self._check_redis_connection()
|
||||||
self._initialized = True
|
self._initialized = True
|
||||||
|
|
||||||
def _create_redis_client(self):
|
def _create_redis_client(self):
|
||||||
"""Create a Redis client with connection retry"""
|
"""Create a Redis client with connection retry"""
|
||||||
max_retries = 5
|
max_retries, retry_delay = 5, 5
|
||||||
retry_delay = 5
|
|
||||||
|
|
||||||
for attempt in range(max_retries):
|
for attempt in range(max_retries):
|
||||||
try:
|
try:
|
||||||
client = Redis(**RedisConfig.as_dict())
|
client = Redis(**RedisConfig.as_dict())
|
||||||
client.ping() # Test the connection
|
client.ping()
|
||||||
logger.info("Redis connection established successfully")
|
logger.info("Redis connection established successfully")
|
||||||
return client
|
return client
|
||||||
except (RedisConnectionError, RedisError) as e:
|
except (RedisConnectionError, RedisError) as e:
|
||||||
if attempt < max_retries - 1:
|
if attempt < max_retries - 1:
|
||||||
logger.warning(f"Redis connection attempt {attempt + 1} failed: {str(e)}. Retrying in {retry_delay} seconds...")
|
logger.warning(f"Redis connection attempt {attempt + 1} failed: {str(e)}. Retrying in {retry_delay} seconds...")
|
||||||
sleep(retry_delay)
|
sleep(retry_delay)
|
||||||
retry_delay *= 2 # Exponential backoff
|
retry_delay *= 2
|
||||||
else:
|
else:
|
||||||
logger.error(f"Failed to connect to Redis after {max_retries} attempts: {str(e)}")
|
logger.error(f"Failed to connect to Redis after {max_retries} attempts: {str(e)}")
|
||||||
# Continue with a new Redis client instance even if ping fails
|
|
||||||
# This allows the service to start and retry connections later
|
|
||||||
return Redis(**RedisConfig.as_dict())
|
return Redis(**RedisConfig.as_dict())
|
||||||
|
|
||||||
def _check_redis_connection(self) -> bool:
|
def _check_redis_connection(self) -> bool:
|
||||||
|
|
@ -98,6 +90,17 @@ class RedisHandler:
|
||||||
"""Get a value from Redis by key"""
|
"""Get a value from Redis by key"""
|
||||||
return self.redis_client.get(key)
|
return self.redis_client.get(key)
|
||||||
|
|
||||||
|
def get_json(self, key: str) -> dict:
|
||||||
|
"""Get a value from Redis by key"""
|
||||||
|
obj = self.redis_client.get(key)
|
||||||
|
if obj:
|
||||||
|
return loads(obj)
|
||||||
|
return None
|
||||||
|
|
||||||
|
def set_json(self, key: str, value):
|
||||||
|
"""Set a key-value pair in Redis"""
|
||||||
|
return self.redis_client.set(key, dumps(value))
|
||||||
|
|
||||||
def set(self, key: str, value):
|
def set(self, key: str, value):
|
||||||
"""Set a key-value pair in Redis"""
|
"""Set a key-value pair in Redis"""
|
||||||
return self.redis_client.set(key, value)
|
return self.redis_client.set(key, value)
|
||||||
|
|
@ -151,12 +154,9 @@ class RedisHandler:
|
||||||
tuple: (RedisHandler instance, bool indicating if extended sleep is needed)
|
tuple: (RedisHandler instance, bool indicating if extended sleep is needed)
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
# Get a fresh instance (will reconnect internally)
|
|
||||||
instance = cls()
|
instance = cls()
|
||||||
instance.redis_connected = instance._check_redis_connection()
|
instance.redis_connected = instance._check_redis_connection()
|
||||||
logger.info("Recreated Redis handler using singleton pattern")
|
logger.info("Recreated Redis handler using singleton pattern")
|
||||||
|
|
||||||
# Determine if extended sleep is needed
|
|
||||||
need_extended_sleep = consecutive_errors >= max_consecutive_errors
|
need_extended_sleep = consecutive_errors >= max_consecutive_errors
|
||||||
if need_extended_sleep:
|
if need_extended_sleep:
|
||||||
logger.warning(f"Hit {max_consecutive_errors} consecutive Redis errors, taking longer pause")
|
logger.warning(f"Hit {max_consecutive_errors} consecutive Redis errors, taking longer pause")
|
||||||
|
|
@ -165,3 +165,8 @@ class RedisHandler:
|
||||||
logger.error(f"Failed to recreate Redis handler: {str(redis_retry_error)}")
|
logger.error(f"Failed to recreate Redis handler: {str(redis_retry_error)}")
|
||||||
return None, consecutive_errors >= max_consecutive_errors
|
return None, consecutive_errors >= max_consecutive_errors
|
||||||
|
|
||||||
|
|
||||||
|
class RedisSaveModels:
|
||||||
|
|
||||||
|
COMMENT_BUILDING_CLUSTER = "COMMENT:PARSER:BUILDING:CLUSTER"
|
||||||
|
COMMENT_BUILDING_INFO = "COMMENT:PARSER:BUILDING:INFO"
|
||||||
|
|
|
||||||
|
|
@ -6,8 +6,9 @@ import aio_pika
|
||||||
|
|
||||||
from aio_pika.abc import AbstractIncomingMessage
|
from aio_pika.abc import AbstractIncomingMessage
|
||||||
from typing import Any, Dict, Awaitable, Callable, Optional, List
|
from typing import Any, Dict, Awaitable, Callable, Optional, List
|
||||||
from app.services.types.task import _MsgCtx, _MSG_CTX
|
|
||||||
from app.services.types.queue import Enqueue
|
from services.types.task import _MsgCtx, _MSG_CTX
|
||||||
|
from services.types.queue import Enqueue
|
||||||
|
|
||||||
|
|
||||||
class ServiceBaseAsync:
|
class ServiceBaseAsync:
|
||||||
|
|
|
||||||
|
|
@ -14,6 +14,7 @@ COPY app/services/database/README.md ./
|
||||||
COPY app/core ./app/core
|
COPY app/core ./app/core
|
||||||
COPY app/services/common/ ./app/services/common/
|
COPY app/services/common/ ./app/services/common/
|
||||||
COPY app/services/database/ ./app/services/database/
|
COPY app/services/database/ ./app/services/database/
|
||||||
|
COPY app/services/types/ ./app/services/types/
|
||||||
|
|
||||||
RUN apt-get update && apt-get install -y bash
|
RUN apt-get update && apt-get install -y bash
|
||||||
RUN mkdir -p /app/data
|
RUN mkdir -p /app/data
|
||||||
|
|
|
||||||
|
|
@ -1,136 +1,181 @@
|
||||||
import arrow
|
import arrow
|
||||||
|
|
||||||
from app.services.common.models import BuildingCluster, BuildPart, BuildLivingSpace, Person, User, OccupantType
|
from typing import Optional, Dict
|
||||||
from app.services.database.prisma_client import PrismaService
|
|
||||||
|
from services.common.redis_handler import RedisHandler, RedisSaveModels
|
||||||
|
from services.common.models import BuildingCluster, BuildPart, BuildLivingSpace, Person, User, OccupantType, BuildRequirements
|
||||||
|
from prisma_client import PrismaService
|
||||||
|
|
||||||
|
|
||||||
prisma_service = PrismaService()
|
class DefaultImportsToMemory:
|
||||||
|
|
||||||
|
def __init__(self, prisma_service: Optional[PrismaService] = None):
|
||||||
|
self.prisma_service = None
|
||||||
|
if prisma_service:
|
||||||
|
self.prisma_service = prisma_service
|
||||||
|
self.redis_handler = RedisHandler()
|
||||||
|
self.today = arrow.now().to('GMT+3').datetime
|
||||||
|
|
||||||
async def get_count_person_data_due_to_build():
|
# Redis Actions
|
||||||
today = arrow.now().to('GMT+3').datetime
|
async def get_count_person_data_due_to_build_info(self) -> Optional[BuildRequirements]:
|
||||||
async with prisma_service._asession() as db:
|
"""Get count of person data due to build with comprehensive inner joins"""
|
||||||
occupant_flat_owner = await db.occupant_types.find_first(query={"occupant_code": "FL-OWN", "active": True, "is_confirmed": True}, include={"user_types": True})
|
return self.redis_handler.get_json(RedisSaveModels.COMMENT_BUILDING_INFO)
|
||||||
occupant_tenant = await db.occupant_types.find_first(query={"occupant_code": "FL-TEN", "active": True, "is_confirmed": True}, include={"user_types": True})
|
|
||||||
possible_money_sender_occupants = [occupant_flat_owner.id, occupant_tenant.id]
|
|
||||||
building_count = await db.build.count(where={"active": True, "is_confirmed": True,"expiry_starts": {"lte": today}, "expiry_ends": {"gte": today}})
|
|
||||||
build_parts_count = await db.build_parts.count(where={"active": True, "is_confirmed": True, "human_livable": True, "expiry_starts": {"lte": today}, "expiry_ends": {"gte": today}})
|
|
||||||
living_spaces_count = await db.build_living_space.count(
|
|
||||||
where={"active": True, "is_confirmed": True, "expiry_starts": {"lte": today}, "expiry_ends": {"gte": today}, "occupant_type_id": {"in": possible_money_sender_occupants}},
|
|
||||||
)
|
|
||||||
return {"building_count": building_count, "living_space": living_spaces_count, "build_parts": build_parts_count}
|
|
||||||
# return {"building_count": 0, "living_space": 0, "build_parts": 0}
|
|
||||||
|
|
||||||
|
async def set_count_person_data_due_to_build_info(self, data: BuildRequirements):
|
||||||
|
"""Set count of person data due to build with comprehensive inner joins"""
|
||||||
|
return self.redis_handler.set_json(RedisSaveModels.COMMENT_BUILDING_INFO, data.dict())
|
||||||
|
|
||||||
async def get_all_person_data_due_to_build():
|
async def get_count_person_data_due_to_build_data(self):
|
||||||
"""
|
"""Get count of person data due to build with comprehensive inner joins"""
|
||||||
Get all person data due to build with comprehensive inner joins
|
data = self.redis_handler.get_json(RedisSaveModels.COMMENT_BUILDING_CLUSTER)
|
||||||
Returns a dictionary of buildings clustered with their build parts, people, and living spaces
|
return {i: BuildingCluster(**v) for i, v in data.items()}
|
||||||
"""
|
|
||||||
buildings_dict, today = {}, arrow.now().to('GMT+3').datetime
|
async def set_count_person_data_due_to_build_data(self, data: Dict[str, BuildingCluster]):
|
||||||
async with prisma_service._asession() as db:
|
"""Set count of person data due to build with comprehensive inner joins"""
|
||||||
occupant_flat_owner = await db.occupant_types.find_first(query={"occupant_code": "FL-OWN", "active": True, "is_confirmed": True}, include={"user_types": True})
|
excluded_dict = {i: v.dict(exclude_none=True) for i, v in data.items()}
|
||||||
occupant_tenant = await db.occupant_types.find_first(query={"occupant_code": "FL-TEN", "active": True, "is_confirmed": True}, include={"user_types": True})
|
return self.redis_handler.set_json(RedisSaveModels.COMMENT_BUILDING_CLUSTER, excluded_dict)
|
||||||
possible_money_sender_occupants = [occupant_flat_owner.id, occupant_tenant.id]
|
|
||||||
buildings = await db.build.find_many(where={"active": True, "is_confirmed": True,"expiry_starts": {"lte": today}, "expiry_ends": {"gte": today}})
|
# Database Actions
|
||||||
for build in buildings:
|
def check_if_database_is_available(self):
|
||||||
buildings_dict[str(build.id)] = BuildingCluster(
|
if not self.prisma_service:
|
||||||
id=build.id,
|
raise ValueError("PrismaService is not initialized")
|
||||||
uu_id=build.uu_id,
|
|
||||||
build_name=build.build_name,
|
async def get_count_person_data_due_to_build(self) -> BuildRequirements:
|
||||||
build_no=build.build_no,
|
"""Get count of person data due to build with comprehensive inner joins"""
|
||||||
build_date=str(build.build_date),
|
self.check_if_database_is_available()
|
||||||
decision_period_date=str(build.decision_period_date),
|
async with self.prisma_service._asession() as db:
|
||||||
expiry_starts=str(build.expiry_starts),
|
occupant_flat_owner = await db.occupant_types.find_first(where={"occupant_code": "FL-OWN", "active": True, "is_confirmed": True}, include={"user_types": True})
|
||||||
expiry_ends=str(build.expiry_ends),
|
occupant_tenant = await db.occupant_types.find_first(where={"occupant_code": "FL-TEN", "active": True, "is_confirmed": True}, include={"user_types": True})
|
||||||
is_confirmed=build.is_confirmed,
|
possible_money_sender_occupants = [occupant_flat_owner.id, occupant_tenant.id]
|
||||||
active=build.active,
|
building_count = await db.build.count(where={"active": True, "is_confirmed": True,"expiry_starts": {"lte": self.today}, "expiry_ends": {"gte": self.today}})
|
||||||
build_parts=[]
|
build_parts_count = await db.build_parts.count(where={"active": True, "is_confirmed": True, "human_livable": True, "expiry_starts": {"lte": self.today}, "expiry_ends": {"gte": self.today}})
|
||||||
|
living_spaces_count = await db.build_living_space.count(
|
||||||
|
where={"active": True, "is_confirmed": True, "expiry_starts": {"lte": self.today}, "expiry_ends": {"gte": self.today}, "occupant_type_id": {"in": possible_money_sender_occupants}},
|
||||||
)
|
)
|
||||||
build_parts = db.build_parts(where={"build_id": build.id, "active": True, "is_confirmed": True, "human_livable": True, "expiry_starts": {"lte": today}, "expiry_ends": {"gte": today}})
|
return BuildRequirements(building_count=building_count, living_space=living_spaces_count, build_parts=build_parts_count)
|
||||||
for build_part in build_parts:
|
|
||||||
part_obj = BuildPart(
|
async def retrieve_all_person_data_due_to_build(self) -> Dict[str, BuildingCluster]:
|
||||||
id=build_part.id,
|
"""
|
||||||
uu_id=build_part.uu_id,
|
Get all person data due to build with comprehensive inner joins
|
||||||
part_no=build_part.part_no,
|
Returns a dictionary of buildings clustered with their build parts, people, and living spaces
|
||||||
part_level=build_part.part_level,
|
"""
|
||||||
part_code=build_part.part_code,
|
self.check_if_database_is_available()
|
||||||
part_gross_size=build_part.part_gross_size,
|
buildings_dict = {}
|
||||||
part_net_size=build_part.part_net_size,
|
async with self.prisma_service._asession() as db:
|
||||||
human_livable=build_part.human_livable,
|
occupant_flat_owner = await db.occupant_types.find_first(where={"occupant_code": "FL-OWN", "active": True, "is_confirmed": True}, include={"user_types": True})
|
||||||
build_id=build_part.build_id,
|
occupant_tenant = await db.occupant_types.find_first(where={"occupant_code": "FL-TEN", "active": True, "is_confirmed": True}, include={"user_types": True})
|
||||||
build_uu_id=build_part.build_uu_id,
|
possible_money_sender_occupants = [occupant_flat_owner.id, occupant_tenant.id]
|
||||||
is_confirmed=build_part.is_confirmed,
|
buildings = await db.build.find_many(where={"active": True, "is_confirmed": True,"expiry_starts": {"lte": self.today}, "expiry_ends": {"gte": self.today}})
|
||||||
active=build_part.active,
|
for build in buildings:
|
||||||
living_spaces=[],
|
buildings_dict[str(build.id)] = BuildingCluster(
|
||||||
build=None
|
id=build.id, uu_id=build.uu_id, build_name=build.build_name, build_no=build.build_no, build_date=str(build.build_date),
|
||||||
|
decision_period_date=str(build.decision_period_date), expiry_starts=str(build.expiry_starts), expiry_ends=str(build.expiry_ends),
|
||||||
|
is_confirmed=build.is_confirmed, active=build.active, build_parts=[]
|
||||||
)
|
)
|
||||||
living_spaces = db.build_living_space.find_many(
|
build_parts = await db.build_parts.find_many(where={"build_id": build.id, "active": True, "is_confirmed": True, "human_livable": True, "expiry_starts": {"lte": self.today}, "expiry_ends": {"gte": self.today}})
|
||||||
include={"occupant_types": True, "people": {"include": {"users": True}}},
|
for build_part in build_parts:
|
||||||
query={"build_parts_id": build_part.id, "active": True, "is_confirmed": True, "expiry_starts": {"lte": today}, "expiry_ends": {"gte": today}, "occupant_type_id": {"in": possible_money_sender_occupants}},
|
part_obj = BuildPart(
|
||||||
)
|
id=build_part.id,
|
||||||
for living_space in living_spaces:
|
uu_id=build_part.uu_id,
|
||||||
person = living_space.people
|
part_no=build_part.part_no,
|
||||||
user = db.users.find_first(where={"person_id": person.id, "active": True, "is_confirmed": True})
|
part_level=build_part.part_level,
|
||||||
user_of_person = None
|
part_code=build_part.part_code,
|
||||||
if user:
|
part_gross_size=build_part.part_gross_size,
|
||||||
user_of_person = User(
|
part_net_size=build_part.part_net_size,
|
||||||
id=user.id,
|
human_livable=build_part.human_livable,
|
||||||
uu_id=user.uu_id,
|
build_id=build_part.build_id,
|
||||||
user_tag=user.user_tag,
|
build_uu_id=build_part.build_uu_id,
|
||||||
user_type=user.user_type,
|
is_confirmed=build_part.is_confirmed,
|
||||||
email=user.email,
|
active=build_part.active,
|
||||||
phone_number=user.phone_number,
|
living_spaces=[],
|
||||||
related_company=user.related_company,
|
build=None
|
||||||
is_confirmed=user.is_confirmed,
|
)
|
||||||
active=user.active
|
living_spaces = await db.build_living_space.find_many(
|
||||||
|
include={"occupant_types": True, "people": {"include": {"users": True}}},
|
||||||
|
where={"build_parts_id": build_part.id, "active": True, "is_confirmed": True,
|
||||||
|
"expiry_starts": {"lte": self.today}, "expiry_ends": {"gte": self.today}, "occupant_type_id": {"in": possible_money_sender_occupants}},
|
||||||
|
)
|
||||||
|
for living_space in living_spaces:
|
||||||
|
person = living_space.people
|
||||||
|
user = await db.users.find_first(where={"person_id": person.id, "active": True, "is_confirmed": True})
|
||||||
|
user_of_person = None
|
||||||
|
if user:
|
||||||
|
user_of_person = User(
|
||||||
|
id=user.id,
|
||||||
|
uu_id=user.uu_id,
|
||||||
|
user_tag=user.user_tag,
|
||||||
|
user_type=user.user_type,
|
||||||
|
email=user.email,
|
||||||
|
phone_number=user.phone_number,
|
||||||
|
related_company=user.related_company,
|
||||||
|
is_confirmed=user.is_confirmed,
|
||||||
|
active=user.active
|
||||||
|
)
|
||||||
|
person_obj = Person(
|
||||||
|
id=person.id,
|
||||||
|
uu_id=person.uu_id,
|
||||||
|
firstname=person.firstname,
|
||||||
|
surname=person.surname,
|
||||||
|
middle_name=person.middle_name,
|
||||||
|
birthname=person.birthname,
|
||||||
|
is_confirmed=person.is_confirmed,
|
||||||
|
active=person.active,
|
||||||
|
user=user_of_person
|
||||||
)
|
)
|
||||||
person_obj = Person(
|
occupant_type = living_space.occupant_types
|
||||||
id=person.id,
|
occupant_type_obj = OccupantType(
|
||||||
uu_id=person.uu_id,
|
id=occupant_type.id,
|
||||||
firstname=person.firstname,
|
uu_id=occupant_type.uu_id,
|
||||||
surname=person.surname,
|
occupant_code=occupant_type.occupant_code,
|
||||||
middle_name=person.middle_name,
|
occupant_type=occupant_type.occupant_type,
|
||||||
birthname=person.birthname,
|
is_confirmed=occupant_type.is_confirmed,
|
||||||
is_confirmed=person.is_confirmed,
|
active=occupant_type.active,
|
||||||
active=person.active,
|
user_type_uu_id=occupant_type.user_type_uu_id
|
||||||
user=user_of_person
|
)
|
||||||
)
|
living_space_obj = BuildLivingSpace(
|
||||||
occupant_type = living_space.occupant_types
|
id=living_space.id,
|
||||||
occupant_type_obj = OccupantType(
|
uu_id=living_space.uu_id,
|
||||||
id=occupant_type.id,
|
expiry_starts=str(living_space.expiry_starts),
|
||||||
uu_id=occupant_type.uu_id,
|
expiry_ends=str(living_space.expiry_ends),
|
||||||
occupant_code=occupant_type.occupant_code,
|
fix_value=float(living_space.fix_value),
|
||||||
occupant_type=occupant_type.occupant_type,
|
fix_percent=float(living_space.fix_percent),
|
||||||
is_confirmed=occupant_type.is_confirmed,
|
agreement_no=living_space.agreement_no,
|
||||||
active=occupant_type.active,
|
marketing_process=living_space.marketing_process,
|
||||||
user_type_uu_id=occupant_type.user_type_uu_id
|
build_parts_id=living_space.build_parts_id,
|
||||||
)
|
build_parts_uu_id=living_space.build_parts_uu_id,
|
||||||
living_space_obj = BuildLivingSpace(
|
person_id=living_space.person_id,
|
||||||
id=living_space.id,
|
person_uu_id=living_space.person_uu_id,
|
||||||
uu_id=living_space.uu_id,
|
occupant_type_id=living_space.occupant_type_id,
|
||||||
expiry_starts=str(living_space.expiry_starts),
|
occupant_type_uu_id=living_space.occupant_type_uu_id,
|
||||||
expiry_ends=str(living_space.expiry_ends),
|
is_confirmed=living_space.is_confirmed,
|
||||||
fix_value=float(living_space.fix_value),
|
active=living_space.active,
|
||||||
fix_percent=float(living_space.fix_percent),
|
person=person_obj,
|
||||||
agreement_no=living_space.agreement_no,
|
occupant_types=occupant_type_obj
|
||||||
marketing_process=living_space.marketing_process,
|
)
|
||||||
build_parts_id=living_space.build_parts_id,
|
part_obj.living_spaces.append(living_space_obj)
|
||||||
build_parts_uu_id=living_space.build_parts_uu_id,
|
buildings_dict[str(build.id)].build_parts.append(part_obj)
|
||||||
person_id=living_space.person_id,
|
return buildings_dict
|
||||||
person_uu_id=living_space.person_uu_id,
|
|
||||||
occupant_type_id=living_space.occupant_type_id,
|
|
||||||
occupant_type_uu_id=living_space.occupant_type_uu_id,
|
|
||||||
is_confirmed=living_space.is_confirmed,
|
|
||||||
active=living_space.active,
|
|
||||||
person=person_obj,
|
|
||||||
occupant_types=occupant_type_obj
|
|
||||||
)
|
|
||||||
part_obj.living_spaces.append(living_space_obj)
|
|
||||||
buildings_dict[str(build.id)].build_parts.append(part_obj)
|
|
||||||
return {i: v.dict(exclude_none=True) for i, v in buildings_dict.items()}
|
|
||||||
|
|
||||||
|
async def retrieve_all_companies_data(self):
|
||||||
|
self.check_if_database_is_available()
|
||||||
|
async with self.prisma_service._asession() as db:
|
||||||
|
return db.companies.find_many(where={"active": True, "is_confirmed": True})
|
||||||
|
|
||||||
def get_all_companies_data():
|
async def renew_requirements(self):
|
||||||
return prisma_service.find_many(table="companies", query={"active": True, "is_confirmed": True})
|
self.check_if_database_is_available()
|
||||||
|
|
||||||
|
async def set_to_redis():
|
||||||
|
await self.set_count_person_data_due_to_build_info(count_person_data_due_to_build_info_db)
|
||||||
|
all_person_data = await self.retrieve_all_person_data_due_to_build()
|
||||||
|
await self.set_count_person_data_due_to_build_data(all_person_data)
|
||||||
|
return
|
||||||
|
|
||||||
|
count_person_data_due_to_build_info_db = await self.get_count_person_data_due_to_build()
|
||||||
|
count_person_data_due_to_build_info_memory = await self.get_count_person_data_due_to_build_info()
|
||||||
|
if not count_person_data_due_to_build_info_memory:
|
||||||
|
return await set_to_redis()
|
||||||
|
|
||||||
|
all_counts_in_memory = [count_person_data_due_to_build_info_memory.building_count, count_person_data_due_to_build_info_memory.living_space, count_person_data_due_to_build_info_memory.build_parts]
|
||||||
|
all_counts_in_db = [count_person_data_due_to_build_info_db.building_count, count_person_data_due_to_build_info_db.living_space, count_person_data_due_to_build_info_db.build_parts]
|
||||||
|
if not all_counts_in_memory == all_counts_in_db:
|
||||||
|
return await set_to_redis()
|
||||||
|
|
|
||||||
|
|
@ -1,9 +1,11 @@
|
||||||
import os
|
import os
|
||||||
import asyncio
|
import asyncio
|
||||||
|
|
||||||
from prisma_client import PrismaService
|
from comment_requirements import DefaultImportsToMemory
|
||||||
from services.common.service_base_async import ServiceBaseAsync
|
from services.common.service_base_async import ServiceBaseAsync
|
||||||
from app.services.types.task import Job
|
from services.types.task import Job
|
||||||
|
|
||||||
|
from prisma_client import PrismaService
|
||||||
|
|
||||||
|
|
||||||
PRODUCE_BURST = int(os.getenv("PRODUCE_BURST", "10"))
|
PRODUCE_BURST = int(os.getenv("PRODUCE_BURST", "10"))
|
||||||
|
|
@ -12,13 +14,20 @@ EVENT_TYPE = os.getenv("EVENT_TYPE", "db-event")
|
||||||
PROCESS_SEC = 10
|
PROCESS_SEC = 10
|
||||||
|
|
||||||
prisma_service = PrismaService()
|
prisma_service = PrismaService()
|
||||||
|
is_db_pulled = False
|
||||||
|
|
||||||
|
|
||||||
async def produce(svc: ServiceBaseAsync):
|
async def produce(svc: ServiceBaseAsync):
|
||||||
|
global is_db_pulled
|
||||||
|
|
||||||
|
# Get build info to memory
|
||||||
|
if not is_db_pulled:
|
||||||
|
await default_imports()
|
||||||
|
is_db_pulled = True
|
||||||
async with prisma_service._asession() as db:
|
async with prisma_service._asession() as db:
|
||||||
# Routine Email Service
|
# Routine Email Service
|
||||||
routine_email_service_result = await db.account_records.find_many(
|
routine_email_service_result = await db.account_records.find_many(
|
||||||
where={"is_email_send": False,"is_active": True, "is_confirmed": True, "is_deleted": False}, take=3, skip=0
|
where={"is_email_send": False,"active": True, "is_confirmed": True, "deleted": False}, take=3, skip=0
|
||||||
)
|
)
|
||||||
if not routine_email_service_result:
|
if not routine_email_service_result:
|
||||||
await asyncio.sleep(PROCESS_SEC)
|
await asyncio.sleep(PROCESS_SEC)
|
||||||
|
|
@ -29,8 +38,6 @@ async def produce(svc: ServiceBaseAsync):
|
||||||
generate_task__uuid += str(row["uu_id"])[:4]
|
generate_task__uuid += str(row["uu_id"])[:4]
|
||||||
await svc.enqueue(task_id=generate_task__uuid, payload=routine_email_service_result, action="routine.email.send.service")
|
await svc.enqueue(task_id=generate_task__uuid, payload=routine_email_service_result, action="routine.email.send.service")
|
||||||
# Get Build and Company Requirements
|
# Get Build and Company Requirements
|
||||||
|
|
||||||
|
|
||||||
await asyncio.sleep(PROCESS_SEC)
|
await asyncio.sleep(PROCESS_SEC)
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -55,7 +62,10 @@ async def consume_default(svc: ServiceBaseAsync, job: dict):
|
||||||
await svc.dlq_current(job_model, error="unsupported_routing_key")
|
await svc.dlq_current(job_model, error="unsupported_routing_key")
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
async def default_imports():
|
||||||
|
update_comment_requirements = DefaultImportsToMemory(prisma_service)
|
||||||
|
await update_comment_requirements.renew_requirements()
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
svc = ServiceBaseAsync(produce_fn=produce, consume_fn=consume_default, handlers={"parser.comment.publish": handle_comment_publish, "mail.service.publish": handle_routine_email_send_service_ack})
|
svc = ServiceBaseAsync(produce_fn=produce, consume_fn=consume_default, handlers={"parser.comment.publish": handle_comment_publish, "mail.service.publish": handle_routine_email_send_service_ack})
|
||||||
asyncio.run(svc.run())
|
asyncio.run(svc.run())
|
||||||
|
|
|
||||||
|
|
@ -4,3 +4,4 @@ uvloop>=0.19.0
|
||||||
prisma==0.9.1
|
prisma==0.9.1
|
||||||
asyncio==3.4.3
|
asyncio==3.4.3
|
||||||
arrow>=1.3.0
|
arrow>=1.3.0
|
||||||
|
redis>=6.4.0
|
||||||
|
|
@ -2,7 +2,7 @@ from json import dumps
|
||||||
from typing import Any, Optional, Dict
|
from typing import Any, Optional, Dict
|
||||||
from pydantic import BaseModel
|
from pydantic import BaseModel
|
||||||
|
|
||||||
from app.core.utils import now_ms
|
from core.utils import now_ms
|
||||||
|
|
||||||
|
|
||||||
class Enqueue(BaseModel):
|
class Enqueue(BaseModel):
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue