black shift

This commit is contained in:
berkay 2025-04-22 11:10:29 +03:00
parent d7f1da8de8
commit e5f88f2eb4
30 changed files with 671 additions and 521 deletions

View File

@ -26,8 +26,8 @@ class CommonHeaders(BaseModel):
# Extract operation_id from the route # Extract operation_id from the route
operation_id = None operation_id = None
if hasattr(request.scope.get('route'), 'operation_id'): if hasattr(request.scope.get("route"), "operation_id"):
operation_id = request.scope.get('route').operation_id operation_id = request.scope.get("route").operation_id
return cls( return cls(
language=language, language=language,
@ -36,7 +36,7 @@ class CommonHeaders(BaseModel):
token=token, token=token,
request=request, request=request,
response=response, response=response,
operation_id=operation_id operation_id=operation_id,
) )
def get_headers_dict(self): def get_headers_dict(self):

View File

@ -6,12 +6,17 @@ from fastapi.responses import RedirectResponse
def create_events_if_any_cluster_set(): def create_events_if_any_cluster_set():
import Events import Events
if not Events.__all__: if not Events.__all__:
return return
router_cluster_stack: list[RouterCluster] = [getattr(Events, e, None) for e in Events.__all__] router_cluster_stack: list[RouterCluster] = [
getattr(Events, e, None) for e in Events.__all__
]
for router_cluster in router_cluster_stack: for router_cluster in router_cluster_stack:
event_cluster_stack: list[EventCluster] = list(router_cluster.event_clusters.values()) event_cluster_stack: list[EventCluster] = list(
router_cluster.event_clusters.values()
)
for event_cluster in event_cluster_stack: for event_cluster in event_cluster_stack:
print(f"Creating event:", event_cluster.name) print(f"Creating event:", event_cluster.name)
try: try:

View File

@ -200,7 +200,6 @@ def init_applications_for_tenant(super_user: BuildLivingSpace, db_session=None)
application_type="Dash", application_type="Dash",
description="Individual Page for tenant account view", description="Individual Page for tenant account view",
), ),
] ]
for list_of_created_app in list_of_created_apps: for list_of_created_app in list_of_created_apps:

View File

@ -4,10 +4,8 @@ from fastapi import APIRouter
def get_routes() -> list[APIRouter]: def get_routes() -> list[APIRouter]:
from .people.route import people_route from .people.route import people_route
from .user.route import user_route from .user.route import user_route
return [
user_route, return [user_route, people_route]
people_route
]
def get_safe_endpoint_urls() -> list[tuple[str, str]]: def get_safe_endpoint_urls() -> list[tuple[str, str]]:

View File

@ -28,7 +28,9 @@ def user_list_route(
endpoint_code = "5bc09312-d3f2-4f47-baba-17c928706da8" endpoint_code = "5bc09312-d3f2-4f47-baba-17c928706da8"
token = request.headers.get(api_config.ACCESS_TOKEN_TAG, None) token = request.headers.get(api_config.ACCESS_TOKEN_TAG, None)
token_object = TokenProvider.get_dict_from_redis(token=token) token_object = TokenProvider.get_dict_from_redis(token=token)
event_key = TokenProvider.retrieve_event_codes(endpoint_code=endpoint_code, token=token_object) event_key = TokenProvider.retrieve_event_codes(
endpoint_code=endpoint_code, token=token_object
)
headers = { headers = {
"language": language or "", "language": language or "",
"domain": domain or "", "domain": domain or "",
@ -36,7 +38,9 @@ def user_list_route(
"tz": tz or "GMT+3", "tz": tz or "GMT+3",
"token": token, "token": token,
} }
event_cluster_matched = UserRouterCluster.get_event_cluster("UserList").match_event(event_key=event_key) event_cluster_matched = UserRouterCluster.get_event_cluster("UserList").match_event(
event_key=event_key
)
response.headers["X-Header"] = "Test Header GET" response.headers["X-Header"] = "Test Header GET"
if runner_callable := event_cluster_matched.event_callable(): if runner_callable := event_cluster_matched.event_callable():
return runner_callable return runner_callable
@ -71,7 +75,9 @@ def user_create_route(
"tz": tz or "GMT+3", "tz": tz or "GMT+3",
"token": token, "token": token,
} }
event_cluster_matched = UserRouterCluster.get_event_cluster("UserCreate").match_event(event_key=event_key) event_cluster_matched = UserRouterCluster.get_event_cluster(
"UserCreate"
).match_event(event_key=event_key)
response.headers["X-Header"] = "Test Header POST" response.headers["X-Header"] = "Test Header POST"
if runner_callable := event_cluster_matched.event_callable(): if runner_callable := event_cluster_matched.event_callable():
return runner_callable return runner_callable
@ -100,7 +106,9 @@ def user_update_route(request: Request, response: Response):
"tz": tz or "GMT+3", "tz": tz or "GMT+3",
"token": token, "token": token,
} }
event_cluster_matched = UserRouterCluster.get_event_cluster("UserUpdate").match_event(event_key=event_key) event_cluster_matched = UserRouterCluster.get_event_cluster(
"UserUpdate"
).match_event(event_key=event_key)
response.headers["X-Header"] = "Test Header POST" response.headers["X-Header"] = "Test Header POST"
if runner_callable := event_cluster_matched.event_callable(): if runner_callable := event_cluster_matched.event_callable():
return runner_callable return runner_callable

View File

@ -2,31 +2,24 @@ from ApiControllers.abstracts.event_clusters import EventCluster, RouterCluster
from .supers_events import ( from .supers_events import (
SupersPeopleCreateEvent, SupersPeopleCreateEvent,
SupersPeopleUpdateEvent, SupersPeopleUpdateEvent,
SupersPeopleListEvent SupersPeopleListEvent,
) )
PeopleRouterCluster = RouterCluster( PeopleRouterCluster = RouterCluster(name="PeopleRouterCluster")
name="PeopleRouterCluster"
)
PeopleEventClusterList = EventCluster( PeopleEventClusterList = EventCluster(
name="PeopleList", name="PeopleList", endpoint_uu_id="f102db46-031a-43e4-966a-dae6896f985b"
endpoint_uu_id="f102db46-031a-43e4-966a-dae6896f985b"
) )
PeopleEventClusterList.add_event(SupersPeopleListEvent) PeopleEventClusterList.add_event(SupersPeopleListEvent)
PeopleEventClusterCreate = EventCluster( PeopleEventClusterCreate = EventCluster(
name="PeopleCreate", name="PeopleCreate", endpoint_uu_id="eb465fde-337f-4b81-94cf-28c6d4f2b1b6"
endpoint_uu_id="eb465fde-337f-4b81-94cf-28c6d4f2b1b6"
) )
PeopleEventClusterCreate.add_event(SupersPeopleCreateEvent) PeopleEventClusterCreate.add_event(SupersPeopleCreateEvent)
PeopleEventClusterUpdate = EventCluster( PeopleEventClusterUpdate = EventCluster(
name="PeopleUpdate", name="PeopleUpdate", endpoint_uu_id="c9e5ba69-6915-43f5-8f9c-a5c2aa865b89"
endpoint_uu_id="c9e5ba69-6915-43f5-8f9c-a5c2aa865b89"
) )
PeopleEventClusterUpdate.add_event(SupersPeopleUpdateEvent) PeopleEventClusterUpdate.add_event(SupersPeopleUpdateEvent)
PeopleRouterCluster.set_event_cluster(PeopleEventClusterList) PeopleRouterCluster.set_event_cluster(PeopleEventClusterList)
PeopleRouterCluster.set_event_cluster(PeopleEventClusterCreate) PeopleRouterCluster.set_event_cluster(PeopleEventClusterCreate)
PeopleRouterCluster.set_event_cluster(PeopleEventClusterUpdate) PeopleRouterCluster.set_event_cluster(PeopleEventClusterUpdate)

View File

@ -5,25 +5,20 @@ from .supers_events import (
SuperUsersUpdateEvent, SuperUsersUpdateEvent,
) )
UserRouterCluster = RouterCluster( UserRouterCluster = RouterCluster(name="UserRouterCluster")
name="UserRouterCluster"
)
UserEventClusterList = EventCluster( UserEventClusterList = EventCluster(
name="UserList", name="UserList", endpoint_uu_id="5bc09312-d3f2-4f47-baba-17c928706da8"
endpoint_uu_id="5bc09312-d3f2-4f47-baba-17c928706da8"
) )
UserEventClusterList.add_event(SuperUsersListEvent) UserEventClusterList.add_event(SuperUsersListEvent)
UserEventClusterCreate = EventCluster( UserEventClusterCreate = EventCluster(
name="UserCreate", name="UserCreate", endpoint_uu_id="08d4b572-1584-47bb-aa42-8d068e5514e7"
endpoint_uu_id="08d4b572-1584-47bb-aa42-8d068e5514e7"
) )
UserEventClusterCreate.add_event(SuperUsersCreateEvent) UserEventClusterCreate.add_event(SuperUsersCreateEvent)
UserEventClusterUpdate = EventCluster( UserEventClusterUpdate = EventCluster(
name="UserUpdate", name="UserUpdate", endpoint_uu_id="b641236a-928d-4f19-a1d2-5edf611d1e56"
endpoint_uu_id="b641236a-928d-4f19-a1d2-5edf611d1e56"
) )
UserEventClusterUpdate.add_event(SuperUsersUpdateEvent) UserEventClusterUpdate.add_event(SuperUsersUpdateEvent)

View File

@ -71,6 +71,7 @@ def supers_users_create_callable():
}, },
} }
SuperUsersCreateEvent.event_callable = supers_users_create_callable SuperUsersCreateEvent.event_callable = supers_users_create_callable
# Update endpoint # Update endpoint
@ -96,4 +97,5 @@ def supers_users_update_callable():
}, },
} }
SuperUsersUpdateEvent.event_callable = supers_users_update_callable SuperUsersUpdateEvent.event_callable = supers_users_update_callable

View File

@ -1,10 +1,9 @@
from .template.cluster import ( from .template.cluster import TemplateEventClusterSet
TemplateEventClusterSet
)
__all__ = [ __all__ = [
"TemplateEventClusterSet", "TemplateEventClusterSet",
] ]
def retrieve_all_clusters(): def retrieve_all_clusters():
return [TemplateEventClusterSet] return [TemplateEventClusterSet]

View File

@ -1,4 +1,7 @@
from ApiServices.TemplateService.initializer.event_clusters import EventCluster, SetEventCluster from ApiServices.TemplateService.initializer.event_clusters import (
EventCluster,
SetEventCluster,
)
TemplateEventCluster = EventCluster( TemplateEventCluster = EventCluster(
@ -12,4 +15,3 @@ OtherTemplateEventCluster = EventCluster(
TemplateEventClusterSet = SetEventCluster() TemplateEventClusterSet = SetEventCluster()
TemplateEventClusterSet.add_event_cluster(TemplateEventCluster) TemplateEventClusterSet.add_event_cluster(TemplateEventCluster)
TemplateEventClusterSet.add_event_cluster(OtherTemplateEventCluster) TemplateEventClusterSet.add_event_cluster(OtherTemplateEventCluster)

View File

@ -6,6 +6,7 @@ class EventCluster:
""" """
EventCluster EventCluster
""" """
def __init__(self, endpoint_uu_id: str): def __init__(self, endpoint_uu_id: str):
self.endpoint_uu_id = endpoint_uu_id self.endpoint_uu_id = endpoint_uu_id
self.events = [] self.events = []
@ -96,13 +97,17 @@ class SetEventCluster:
""" """
SetEventCluster SetEventCluster
""" """
list_of_event_clusters: list[EventCluster] = [] list_of_event_clusters: list[EventCluster] = []
def add_event_cluster(self, event_cluster: EventCluster): def add_event_cluster(self, event_cluster: EventCluster):
""" """
Add an event cluster to the set Add an event cluster to the set
""" """
endpoint_uu_id_list = [event_cluster_uuid.endpoint_uu_id for event_cluster_uuid in self.list_of_event_clusters] endpoint_uu_id_list = [
event_cluster_uuid.endpoint_uu_id
for event_cluster_uuid in self.list_of_event_clusters
]
if event_cluster.endpoint_uu_id not in endpoint_uu_id_list: if event_cluster.endpoint_uu_id not in endpoint_uu_id_list:
self.list_of_event_clusters.append(event_cluster) self.list_of_event_clusters.append(event_cluster)

View File

@ -43,28 +43,28 @@ class EmailProcessingContext:
if exc_type is not None or not self.success: if exc_type is not None or not self.success:
# If an exception occurred or processing wasn't successful, mark as unread # If an exception occurred or processing wasn't successful, mark as unread
try: try:
if hasattr(self.email_message, 'mark_as_unread'): if hasattr(self.email_message, "mark_as_unread"):
self.email_message.mark_as_unread() self.email_message.mark_as_unread()
print(f"[EMAIL_SERVICE] Marked email as UNREAD due to processing error: {exc_val if exc_val else 'Unknown error'}") print(
f"[EMAIL_SERVICE] Marked email as UNREAD due to processing error: {exc_val if exc_val else 'Unknown error'}"
)
except Exception as e: except Exception as e:
print(f"[EMAIL_SERVICE] Failed to mark email as unread: {str(e)}") print(f"[EMAIL_SERVICE] Failed to mark email as unread: {str(e)}")
elif self.mark_as_read: elif self.mark_as_read:
# If processing was successful and mark_as_read is True, ensure it's marked as read # If processing was successful and mark_as_read is True, ensure it's marked as read
try: try:
if hasattr(self.email_message, 'mark_as_read'): if hasattr(self.email_message, "mark_as_read"):
self.email_message.mark_as_read() self.email_message.mark_as_read()
except Exception as e: except Exception as e:
print(f"[EMAIL_SERVICE] Failed to mark email as read: {str(e)}") print(f"[EMAIL_SERVICE] Failed to mark email as read: {str(e)}")
return False # Don't suppress exceptions return False # Don't suppress exceptions
def publish_payload_to_redis( def publish_payload_to_redis(payload, filename: str, mail_info: dict) -> bool:
payload, filename: str, mail_info: dict
) -> bool:
# Create message document # Create message document
# Use base64 encoding for binary payloads to ensure proper transmission # Use base64 encoding for binary payloads to ensure proper transmission
if isinstance(payload, bytes): if isinstance(payload, bytes):
encoded_payload = base64.b64encode(payload).decode('utf-8') encoded_payload = base64.b64encode(payload).decode("utf-8")
is_base64 = True is_base64 = True
else: else:
encoded_payload = payload encoded_payload = payload
@ -84,7 +84,9 @@ def publish_payload_to_redis(
result = redis_pubsub.publisher.publish(REDIS_CHANNEL, message) result = redis_pubsub.publisher.publish(REDIS_CHANNEL, message)
if result.status: if result.status:
print(f"[EMAIL_SERVICE] Published message with filename: {filename} to channel: {REDIS_CHANNEL}") print(
f"[EMAIL_SERVICE] Published message with filename: {filename} to channel: {REDIS_CHANNEL}"
)
return True return True
else: else:
print(f"[EMAIL_SERVICE] Publish error: {result.error}") print(f"[EMAIL_SERVICE] Publish error: {result.error}")
@ -136,7 +138,7 @@ def app():
filter_mail = OR(FROM(Config.MAILBOX), FROM(Config.MAIN_MAIL)) filter_mail = OR(FROM(Config.MAILBOX), FROM(Config.MAIN_MAIL))
filter_print = f"{Config.MAILBOX} & {Config.MAIN_MAIL}" filter_print = f"{Config.MAILBOX} & {Config.MAIN_MAIL}"
# Determine if this is the first run of the day # Determine if this is the first run of the day
# Store last run date in a file # Store last run date in a file
last_run_file = "/tmp/email_service_last_run.json" last_run_file = "/tmp/email_service_last_run.json"
current_date = datetime.now().strftime("%Y-%m-%d") current_date = datetime.now().strftime("%Y-%m-%d")
@ -144,32 +146,42 @@ def app():
try: try:
if os.path.exists(last_run_file): if os.path.exists(last_run_file):
with open(last_run_file, 'r') as f: with open(last_run_file, "r") as f:
last_run_data = json.load(f) last_run_data = json.load(f)
last_run_date = last_run_data.get('last_run_date') last_run_date = last_run_data.get("last_run_date")
# If this is the first run of a new day, check 90 days # If this is the first run of a new day, check 90 days
if last_run_date != current_date: if last_run_date != current_date:
days_to_check = full_check days_to_check = full_check
print(f"[EMAIL_SERVICE] First run of the day. Checking emails from the past {days_to_check} days") print(
f"[EMAIL_SERVICE] First run of the day. Checking emails from the past {days_to_check} days"
)
else: else:
print(f"[EMAIL_SERVICE] Subsequent run today. Checking emails from the past {days_to_check} days") print(
f"[EMAIL_SERVICE] Subsequent run today. Checking emails from the past {days_to_check} days"
)
else: else:
# If no last run file exists, this is the first run ever - check 90 days # If no last run file exists, this is the first run ever - check 90 days
days_to_check = full_check days_to_check = full_check
print(f"[EMAIL_SERVICE] First run detected. Checking emails from the past {days_to_check} days") print(
f"[EMAIL_SERVICE] First run detected. Checking emails from the past {days_to_check} days"
)
except Exception as e: except Exception as e:
print(f"[EMAIL_SERVICE] Error reading last run file: {str(e)}. Using default of {days_to_check} days") print(
f"[EMAIL_SERVICE] Error reading last run file: {str(e)}. Using default of {days_to_check} days"
)
# Update the last run file # Update the last run file
try: try:
with open(last_run_file, 'w') as f: with open(last_run_file, "w") as f:
json.dump({'last_run_date': current_date}, f) json.dump({"last_run_date": current_date}, f)
except Exception as e: except Exception as e:
print(f"[EMAIL_SERVICE] Error writing last run file: {str(e)}") print(f"[EMAIL_SERVICE] Error writing last run file: {str(e)}")
# Calculate the date to check from # Calculate the date to check from
check_since_date = (datetime.now() - timedelta(days=days_to_check)).strftime("%d-%b-%Y") check_since_date = (datetime.now() - timedelta(days=days_to_check)).strftime(
"%d-%b-%Y"
)
for folder in mail_folders: for folder in mail_folders:
if folder.name == "INBOX": if folder.name == "INBOX":
@ -184,7 +196,9 @@ def app():
# Use context manager to handle errors and mark email as unread if needed # Use context manager to handle errors and mark email as unread if needed
with EmailProcessingContext(banks_mail) as ctx: with EmailProcessingContext(banks_mail) as ctx:
try: try:
headers = {k.lower(): v for k, v in banks_mail.headers.items()} headers = {
k.lower(): v for k, v in banks_mail.headers.items()
}
mail_info = { mail_info = {
"from": headers["from"], "from": headers["from"],
"to": headers["to"], "to": headers["to"],
@ -201,9 +215,13 @@ def app():
ctx.success = success ctx.success = success
if success: if success:
print(f"[EMAIL_SERVICE] Successfully processed email with subject: {mail_info['subject']}") print(
f"[EMAIL_SERVICE] Successfully processed email with subject: {mail_info['subject']}"
)
else: else:
print(f"[EMAIL_SERVICE] No matching attachments found in email with subject: {mail_info['subject']}") print(
f"[EMAIL_SERVICE] No matching attachments found in email with subject: {mail_info['subject']}"
)
except Exception as e: except Exception as e:
print(f"[EMAIL_SERVICE] Error processing email: {str(e)}") print(f"[EMAIL_SERVICE] Error processing email: {str(e)}")
@ -213,7 +231,7 @@ def app():
if __name__ == "__main__": if __name__ == "__main__":
print("=== Starting Email Service with Redis Pub/Sub ===") print("=== Starting Email Service with Redis Pub/Sub ===")
print(f"Publishing to channel: {REDIS_CHANNEL}") print(f"Publishing to channel: {REDIS_CHANNEL}")
time.sleep(20) # Wait for 20 seconds to other services to kick in time.sleep(20) # Wait for 20 seconds to other services to kick in
while True: while True:
print("\n[EMAIL_SERVICE] Checking for new emails...") print("\n[EMAIL_SERVICE] Checking for new emails...")

View File

@ -18,7 +18,9 @@ REDIS_CHANNEL_OUT = "parser" # Publish to Parser Service channel
delimiter = "|" delimiter = "|"
def publish_parsed_data_to_redis(data, collected_data_dict: list[dict], filename: str) -> bool: def publish_parsed_data_to_redis(
data, collected_data_dict: list[dict], filename: str
) -> bool:
"""Publish parsed data to Redis. """Publish parsed data to Redis.
Args: Args:
@ -49,7 +51,9 @@ def publish_parsed_data_to_redis(data, collected_data_dict: list[dict], filename
result = redis_pubsub.publisher.publish(REDIS_CHANNEL_OUT, message) result = redis_pubsub.publisher.publish(REDIS_CHANNEL_OUT, message)
if result.status: if result.status:
print(f"[PARSER_SERVICE] Published parsed data for {filename} with stage: {message['stage']}") print(
f"[PARSER_SERVICE] Published parsed data for {filename} with stage: {message['stage']}"
)
return True return True
else: else:
print(f"[PARSER_SERVICE] Publish error: {result.error}") print(f"[PARSER_SERVICE] Publish error: {result.error}")
@ -76,13 +80,17 @@ def parse_excel_file(excel_frame: DataFrame) -> list[dict]:
dict( dict(
iban=str(iban), iban=str(iban),
bank_date=arrow.get( bank_date=arrow.get(
datetime.datetime.strptime(str(row[1]), "%d/%m/%Y-%H:%M:%S") datetime.datetime.strptime(
str(row[1]), "%d/%m/%Y-%H:%M:%S"
)
).__str__(), ).__str__(),
channel_branch=unidecode(str(row[3])), channel_branch=unidecode(str(row[3])),
currency_value=( currency_value=(
float(str(row[4]).replace(",", "")) if row[4] else 0 float(str(row[4]).replace(",", "")) if row[4] else 0
), ),
balance=float(str(row[5]).replace(",", "")) if row[5] else 0, balance=(
float(str(row[5]).replace(",", "")) if row[5] else 0
),
additional_balance=( additional_balance=(
float(str(row[6]).replace(",", "")) if row[6] else 0 float(str(row[6]).replace(",", "")) if row[6] else 0
), ),
@ -92,7 +100,9 @@ def parse_excel_file(excel_frame: DataFrame) -> list[dict]:
bank_reference_code=str(row[15]), bank_reference_code=str(row[15]),
) )
) )
print(f"[PARSER_SERVICE] Successfully parsed {len(data_list)} records from Excel file") print(
f"[PARSER_SERVICE] Successfully parsed {len(data_list)} records from Excel file"
)
except Exception as e: except Exception as e:
print(f"[PARSER_SERVICE] Error parsing Excel file: {str(e)}") print(f"[PARSER_SERVICE] Error parsing Excel file: {str(e)}")
return data_list return data_list
@ -128,12 +138,14 @@ def process_message(message):
try: try:
# Decode base64 string to bytes # Decode base64 string to bytes
payload = base64.b64decode(payload) payload = base64.b64decode(payload)
print(f"[PARSER_SERVICE] Successfully decoded base64 payload, size: {len(payload)} bytes") print(
f"[PARSER_SERVICE] Successfully decoded base64 payload, size: {len(payload)} bytes"
)
except Exception as e: except Exception as e:
print(f"[PARSER_SERVICE] Error decoding base64 payload: {str(e)}") print(f"[PARSER_SERVICE] Error decoding base64 payload: {str(e)}")
# Convert regular string payload to bytes if needed # Convert regular string payload to bytes if needed
elif isinstance(payload, str): elif isinstance(payload, str):
payload = payload.encode('utf-8') payload = payload.encode("utf-8")
# Create an in-memory file-like object and try multiple approaches # Create an in-memory file-like object and try multiple approaches
excel_frame = None excel_frame = None
@ -142,20 +154,32 @@ def process_message(message):
# Save payload to a temporary file for debugging if needed # Save payload to a temporary file for debugging if needed
temp_file_path = f"/tmp/{filename}" temp_file_path = f"/tmp/{filename}"
try: try:
with open(temp_file_path, 'wb') as f: with open(temp_file_path, "wb") as f:
f.write(payload) f.write(payload)
print(f"[PARSER_SERVICE] Saved payload to {temp_file_path} for debugging") print(
f"[PARSER_SERVICE] Saved payload to {temp_file_path} for debugging"
)
except Exception as e: except Exception as e:
print(f"[PARSER_SERVICE] Could not save debug file: {str(e)}") print(f"[PARSER_SERVICE] Could not save debug file: {str(e)}")
# Try different approaches to read the Excel file # Try different approaches to read the Excel file
approaches = [ approaches = [
# Approach 1: Try xlrd for .xls files # Approach 1: Try xlrd for .xls files
lambda: DataFrame(read_excel(io.BytesIO(payload), engine='xlrd')) if filename.lower().endswith('.xls') else None, lambda: (
DataFrame(read_excel(io.BytesIO(payload), engine="xlrd"))
if filename.lower().endswith(".xls")
else None
),
# Approach 2: Try openpyxl for .xlsx files # Approach 2: Try openpyxl for .xlsx files
lambda: DataFrame(read_excel(io.BytesIO(payload), engine='openpyxl')) if filename.lower().endswith('.xlsx') else None, lambda: (
DataFrame(read_excel(io.BytesIO(payload), engine="openpyxl"))
if filename.lower().endswith(".xlsx")
else None
),
# Approach 3: Try xlrd with explicit sheet name # Approach 3: Try xlrd with explicit sheet name
lambda: DataFrame(read_excel(io.BytesIO(payload), engine='xlrd', sheet_name=0)), lambda: DataFrame(
read_excel(io.BytesIO(payload), engine="xlrd", sheet_name=0)
),
# Approach 4: Try with temporary file # Approach 4: Try with temporary file
lambda: DataFrame(read_excel(temp_file_path)), lambda: DataFrame(read_excel(temp_file_path)),
] ]
@ -166,7 +190,9 @@ def process_message(message):
result = approach() result = approach()
if result is not None: if result is not None:
excel_frame = result excel_frame = result
print(f"[PARSER_SERVICE] Successfully read Excel file using approach {i+1}") print(
f"[PARSER_SERVICE] Successfully read Excel file using approach {i+1}"
)
break break
except Exception as e: except Exception as e:
errors.append(f"Approach {i+1}: {str(e)}") errors.append(f"Approach {i+1}: {str(e)}")
@ -174,21 +200,23 @@ def process_message(message):
# If all approaches failed, raise an exception # If all approaches failed, raise an exception
if excel_frame is None: if excel_frame is None:
error_details = "\n".join(errors) error_details = "\n".join(errors)
raise Exception(f"Failed to read Excel file using all approaches:\n{error_details}") raise Exception(
f"Failed to read Excel file using all approaches:\n{error_details}"
)
# Extract data from the Excel file # Extract data from the Excel file
collected_data_dict = parse_excel_file(excel_frame) collected_data_dict = parse_excel_file(excel_frame)
# Publish parsed data to Redis # Publish parsed data to Redis
publish_parsed_data_to_redis( publish_parsed_data_to_redis(
data=data, data=data, collected_data_dict=collected_data_dict, filename=filename
collected_data_dict=collected_data_dict,
filename=filename
) )
except Exception as e: except Exception as e:
print(f"[PARSER_SERVICE] Error processing message: {str(e)}") print(f"[PARSER_SERVICE] Error processing message: {str(e)}")
else: else:
print(f"[PARSER_SERVICE] Skipped message with UUID: {data.get('uuid')} (stage is not 'red')") print(
f"[PARSER_SERVICE] Skipped message with UUID: {data.get('uuid')} (stage is not 'red')"
)
def app(): def app():

View File

@ -35,7 +35,7 @@ def render_email_template(
today=str(arrow.now().date()), today=str(arrow.now().date()),
) )
except Exception as e: except Exception as e:
print('Exception render template:',e) print("Exception render template:", e)
err = e err = e
raise raise
@ -59,7 +59,7 @@ def send_email_to_given_address(send_to: str, html_template: str) -> bool:
subject=subject, subject=subject,
html=html_template, html=html_template,
receivers=[send_to], receivers=[send_to],
text=f"Gunes Apt. Cari Durum Bilgilendirme Raporu - {today.date()}" text=f"Gunes Apt. Cari Durum Bilgilendirme Raporu - {today.date()}",
) )
try: try:
@ -69,7 +69,7 @@ def send_email_to_given_address(send_to: str, html_template: str) -> bool:
EmailService.send_email(email_session, email_params) EmailService.send_email(email_session, email_params)
return True return True
except Exception as e: except Exception as e:
print(f'Exception send email: {e}') print(f"Exception send email: {e}")
return False return False
@ -91,7 +91,7 @@ def set_account_records_to_send_email() -> bool:
account_records: List[AccountRecords] | [] = ( account_records: List[AccountRecords] | [] = (
account_records_query.order_by( account_records_query.order_by(
AccountRecords.bank_date.desc(), AccountRecords.bank_date.desc(),
AccountRecords.bank_reference_code.desc() AccountRecords.bank_reference_code.desc(),
) )
.limit(3) .limit(3)
.all() .all()
@ -103,7 +103,9 @@ def set_account_records_to_send_email() -> bool:
# Check for balance discrepancy # Check for balance discrepancy
first_record, second_record = account_records[0], account_records[1] first_record, second_record = account_records[0], account_records[1]
expected_second_balance = first_record.bank_balance - first_record.currency_value expected_second_balance = (
first_record.bank_balance - first_record.currency_value
)
balance_error = expected_second_balance != second_record.bank_balance balance_error = expected_second_balance != second_record.bank_balance
if balance_error: if balance_error:
@ -112,16 +114,25 @@ def set_account_records_to_send_email() -> bool:
# Format rows for the email template # Format rows for the email template
list_of_rows = [] list_of_rows = []
for record in account_records: for record in account_records:
list_of_rows.append([ list_of_rows.append(
record.bank_date.strftime("%d/%m/%Y %H:%M"), [
record.process_comment, record.bank_date.strftime("%d/%m/%Y %H:%M"),
f"{record.currency_value:,.2f}", record.process_comment,
f"{record.bank_balance:,.2f}" f"{record.currency_value:,.2f}",
]) f"{record.bank_balance:,.2f}",
]
)
# Get the most recent bank balance # Get the most recent bank balance
last_bank_balance = sorted(account_records, key=lambda x: x.bank_date, reverse=True)[0].bank_balance last_bank_balance = sorted(
account_records, key=lambda x: x.bank_date, reverse=True
)[0].bank_balance
# Define headers for the table # Define headers for the table
headers = ["Ulaştığı Tarih", "Banka Transaksiyonu Ek Bilgi", "Aktarım Değeri", "Banka Bakiyesi"] headers = [
"Ulaştığı Tarih",
"Banka Transaksiyonu Ek Bilgi",
"Aktarım Değeri",
"Banka Bakiyesi",
]
# Recipient email address # Recipient email address
send_to = "karatay@mehmetkaratay.com.tr" send_to = "karatay@mehmetkaratay.com.tr"

View File

@ -35,11 +35,13 @@ def render_email_template(
today=str(arrow.now().date()), today=str(arrow.now().date()),
) )
except Exception as e: except Exception as e:
print(f'Template rendering failed: {e}') print(f"Template rendering failed: {e}")
raise raise
def send_email_to_given_address(send_to: str, html_template: str, count_of_records: int) -> bool: def send_email_to_given_address(
send_to: str, html_template: str, count_of_records: int
) -> bool:
""" """
Send email with the rendered HTML template to the specified address. Send email with the rendered HTML template to the specified address.
@ -58,7 +60,7 @@ def send_email_to_given_address(send_to: str, html_template: str, count_of_recor
subject=subject + f" ({count_of_records} kayıt)", subject=subject + f" ({count_of_records} kayıt)",
html=html_template, html=html_template,
receivers=[send_to], receivers=[send_to],
text=f"Gunes Apt. Cari Durum Kayıt Giriş Raporu - {today.date()}" text=f"Gunes Apt. Cari Durum Kayıt Giriş Raporu - {today.date()}",
) )
try: try:
@ -69,7 +71,7 @@ def send_email_to_given_address(send_to: str, html_template: str, count_of_recor
print(f"Email successfully sent to: {send_to}") print(f"Email successfully sent to: {send_to}")
return True return True
except Exception as e: except Exception as e:
print(f'Failed to send email: {e}') print(f"Failed to send email: {e}")
return False return False
@ -87,10 +89,14 @@ def process_unsent_email_records() -> bool:
# Use the context manager to handle database connections # Use the context manager to handle database connections
with AccountRecords.new_session() as db_session: with AccountRecords.new_session() as db_session:
# Query un-sent mail rows - with limit for display only # Query un-sent mail rows - with limit for display only
account_records_query = AccountRecords.filter_all( account_records_query = (
AccountRecords.is_email_send == False, AccountRecords.filter_all(
db=db_session, AccountRecords.is_email_send == False,
).query.order_by(AccountRecords.bank_date.asc()).limit(20) db=db_session,
)
.query.order_by(AccountRecords.bank_date.asc())
.limit(20)
)
account_records: List[AccountRecords] = account_records_query.all() account_records: List[AccountRecords] = account_records_query.all()
if not account_records: if not account_records:
@ -104,21 +110,30 @@ def process_unsent_email_records() -> bool:
# Format rows for the email template # Format rows for the email template
list_of_rows = [] list_of_rows = []
for record in account_records: for record in account_records:
list_of_rows.append([ list_of_rows.append(
record.bank_date.strftime("%d/%m/%Y %H:%M"), [
record.process_comment, record.bank_date.strftime("%d/%m/%Y %H:%M"),
f"{record.currency_value:,.2f}", record.process_comment,
f"{record.bank_balance:,.2f}" f"{record.currency_value:,.2f}",
]) f"{record.bank_balance:,.2f}",
]
)
# Reverse list by date # Reverse list by date
list_of_rows = list_of_rows[::-1] list_of_rows = list_of_rows[::-1]
# Get the most recent bank balance # Get the most recent bank balance
last_bank_balance = sorted(account_records, key=lambda x: x.bank_date, reverse=True)[0].bank_balance last_bank_balance = sorted(
account_records, key=lambda x: x.bank_date, reverse=True
)[0].bank_balance
# Define headers for the table # Define headers for the table
headers = ["Ulaştığı Tarih", "Banka Transaksiyonu Ek Bilgi", "Aktarım Değeri", "Banka Bakiyesi"] headers = [
"Ulaştığı Tarih",
"Banka Transaksiyonu Ek Bilgi",
"Aktarım Değeri",
"Banka Bakiyesi",
]
# Recipient email address # Recipient email address
send_to = "karatay@mehmetkaratay.com.tr" send_to = "karatay@mehmetkaratay.com.tr"
@ -132,11 +147,14 @@ def process_unsent_email_records() -> bool:
) )
# Send the email # Send the email
if send_email_to_given_address(send_to=send_to, html_template=html_template, count_of_records=len(list_of_rows)): if send_email_to_given_address(
send_to=send_to,
html_template=html_template,
count_of_records=len(list_of_rows),
):
# Create a new query without limit for updating # Create a new query without limit for updating
update_query = AccountRecords.filter_all( update_query = AccountRecords.filter_all(
AccountRecords.id.in_(record_ids), AccountRecords.id.in_(record_ids), db=db_session
db=db_session
).query ).query
# Update records as sent # Update records as sent
@ -149,7 +167,7 @@ def process_unsent_email_records() -> bool:
return False return False
except Exception as e: except Exception as e:
print(f'Error processing unsent email records: {e}') print(f"Error processing unsent email records: {e}")
return False return False

View File

@ -22,7 +22,7 @@ class Config:
class EmailConfig: class EmailConfig:
EMAIL_HOST: str = os.getenv("EMAIL_HOST", "10.10.2.34") EMAIL_HOST: str = os.getenv("EMAIL_HOST", "10.10.2.34")
EMAIL_USERNAME: str = Config.EMAIL_SENDER_USERNAME EMAIL_USERNAME: str = Config.EMAIL_SENDER_USERNAME
EMAIL_PASSWORD: str = Config.EMAIL_PASSWORD EMAIL_PASSWORD: str = Config.EMAIL_PASSWORD
EMAIL_PORT: int = Config.EMAIL_SEND_PORT EMAIL_PORT: int = Config.EMAIL_SEND_PORT

View File

@ -39,7 +39,9 @@ def publish_written_data_to_redis(data: Dict[str, Any], file_name: str) -> bool:
result = redis_pubsub.publisher.publish(REDIS_CHANNEL_OUT, message) result = redis_pubsub.publisher.publish(REDIS_CHANNEL_OUT, message)
if result.status: if result.status:
print(f"[WRITER_SERVICE] Published written status for {file_name} with stage: written") print(
f"[WRITER_SERVICE] Published written status for {file_name} with stage: written"
)
return True return True
else: else:
print(f"[WRITER_SERVICE] Publish error: {result.error}") print(f"[WRITER_SERVICE] Publish error: {result.error}")
@ -61,7 +63,7 @@ def write_parsed_data_to_account_records(data_dict: dict, file_name: str) -> boo
data_dict["bank_balance"] = data_dict.pop("balance") data_dict["bank_balance"] = data_dict.pop("balance")
data_dict["import_file_name"] = file_name data_dict["import_file_name"] = file_name
data_dict = BankReceive(**data_dict).model_dump() data_dict = BankReceive(**data_dict).model_dump()
print('data_dict', data_dict) print("data_dict", data_dict)
# Process date fields # Process date fields
bank_date = arrow.get(str(data_dict["bank_date"])) bank_date = arrow.get(str(data_dict["bank_date"]))
@ -90,16 +92,20 @@ def write_parsed_data_to_account_records(data_dict: dict, file_name: str) -> boo
AccountRecords.bank_date, AccountRecords.bank_date,
AccountRecords.iban, AccountRecords.iban,
AccountRecords.bank_reference_code, AccountRecords.bank_reference_code,
AccountRecords.bank_balance AccountRecords.bank_balance,
] ],
) )
if new_account_record.meta_data.created: if new_account_record.meta_data.created:
new_account_record.is_confirmed = True new_account_record.is_confirmed = True
new_account_record.save(db=db_session) new_account_record.save(db=db_session)
print(f"[WRITER_SERVICE] Created new record in database: {new_account_record.id}") print(
f"[WRITER_SERVICE] Created new record in database: {new_account_record.id}"
)
return True return True
else: else:
print(f"[WRITER_SERVICE] Record already exists in database: {new_account_record.id}") print(
f"[WRITER_SERVICE] Record already exists in database: {new_account_record.id}"
)
return False return False
except Exception as e: except Exception as e:
print(f"[WRITER_SERVICE] Error writing to database: {str(e)}") print(f"[WRITER_SERVICE] Error writing to database: {str(e)}")
@ -138,7 +144,9 @@ def process_message(message):
# Process each parsed data item # Process each parsed data item
success = True success = True
for item in parsed_data: for item in parsed_data:
result = write_parsed_data_to_account_records(data_dict=item, file_name=file_name) result = write_parsed_data_to_account_records(
data_dict=item, file_name=file_name
)
if not result: if not result:
success = False success = False
@ -148,7 +156,9 @@ def process_message(message):
except Exception as e: except Exception as e:
print(f"[WRITER_SERVICE] Error processing message: {str(e)}") print(f"[WRITER_SERVICE] Error processing message: {str(e)}")
else: else:
print(f"[WRITER_SERVICE] Skipped message with UUID: {data.get('uuid')} (stage is not 'parsed')") print(
f"[WRITER_SERVICE] Skipped message with UUID: {data.get('uuid')} (stage is not 'parsed')"
)
def app(): def app():

View File

@ -135,7 +135,9 @@ class CollectionContext:
try: try:
# Create a new client connection # Create a new client connection
self.client = MongoClient(self.db_handler.uri, **self.db_handler.client_options) self.client = MongoClient(
self.db_handler.uri, **self.db_handler.client_options
)
if self.db_handler._debug_mode: if self.db_handler._debug_mode:
# In debug mode, we explicitly use the configured DB # In debug mode, we explicitly use the configured DB
@ -164,8 +166,12 @@ class CollectionContext:
# Try a direct connection without authentication for testing # Try a direct connection without authentication for testing
direct_uri = f"mongodb://{mongo_configs.HOST}:{mongo_configs.PORT}/{mongo_configs.DB}" direct_uri = f"mongodb://{mongo_configs.HOST}:{mongo_configs.PORT}/{mongo_configs.DB}"
print(f"Trying direct connection: {direct_uri}") print(f"Trying direct connection: {direct_uri}")
self.client = MongoClient(direct_uri, **self.db_handler.client_options) self.client = MongoClient(
self.collection = self.client[mongo_configs.DB][self.collection_name] direct_uri, **self.db_handler.client_options
)
self.collection = self.client[mongo_configs.DB][
self.collection_name
]
self._add_retry_capabilities() self._add_retry_capabilities()
return self.collection return self.collection
except Exception as inner_e: except Exception as inner_e:
@ -197,10 +203,12 @@ class CollectionContext:
if self.db_handler._mock_mode: if self.db_handler._mock_mode:
print(f"MOCK MODE: Using mock collection '{self.collection_name}'") print(f"MOCK MODE: Using mock collection '{self.collection_name}'")
else: else:
print(f"Using mock MongoDB collection '{self.collection_name}' for graceful degradation") print(
f"Using mock MongoDB collection '{self.collection_name}' for graceful degradation"
)
# Create in-memory storage for this mock collection # Create in-memory storage for this mock collection
if not hasattr(self.db_handler, '_mock_storage'): if not hasattr(self.db_handler, "_mock_storage"):
self.db_handler._mock_storage = {} self.db_handler._mock_storage = {}
if self.collection_name not in self.db_handler._mock_storage: if self.collection_name not in self.db_handler._mock_storage:
@ -222,11 +230,11 @@ class CollectionContext:
def mock_insert_one(document, *args, **kwargs): def mock_insert_one(document, *args, **kwargs):
# Add _id if not present # Add _id if not present
if '_id' not in document: if "_id" not in document:
document['_id'] = f"mock_id_{len(mock_data)}" document["_id"] = f"mock_id_{len(mock_data)}"
mock_data.append(document) mock_data.append(document)
result = MagicMock() result = MagicMock()
result.inserted_id = document['_id'] result.inserted_id = document["_id"]
return result return result
def mock_insert_many(documents, *args, **kwargs): def mock_insert_many(documents, *args, **kwargs):
@ -328,9 +336,17 @@ class CollectionContext:
""" """
# List of common MongoDB collection methods to add retry capabilities to # List of common MongoDB collection methods to add retry capabilities to
methods = [ methods = [
'insert_one', 'insert_many', 'find_one', 'find', "insert_one",
'update_one', 'update_many', 'delete_one', 'delete_many', "insert_many",
'replace_one', 'count_documents', 'aggregate' "find_one",
"find",
"update_one",
"update_many",
"delete_one",
"delete_many",
"replace_one",
"count_documents",
"aggregate",
] ]
# Add retry decorator to each method # Add retry decorator to each method
@ -340,7 +356,7 @@ class CollectionContext:
setattr( setattr(
mock_collection, mock_collection,
method_name, method_name,
retry_operation(max_retries=1, retry_interval=0)(original_method) retry_operation(max_retries=1, retry_interval=0)(original_method),
) )
def __exit__(self, exc_type, exc_val, exc_tb): def __exit__(self, exc_type, exc_val, exc_tb):

View File

@ -110,14 +110,16 @@ def test_nested_documents():
print(f"Found updated laptop: {updated_laptop is not None}") print(f"Found updated laptop: {updated_laptop is not None}")
if updated_laptop: if updated_laptop:
print(f"Updated laptop specs: {updated_laptop.get('specs')}") print(f"Updated laptop specs: {updated_laptop.get('specs')}")
if 'specs' in updated_laptop: if "specs" in updated_laptop:
print(f"Updated RAM: {updated_laptop['specs'].get('ram')}") print(f"Updated RAM: {updated_laptop['specs'].get('ram')}")
# Check each condition separately # Check each condition separately
condition1 = laptop is not None condition1 = laptop is not None
condition2 = laptop and laptop.get('specs', {}).get('ram') == "16GB" condition2 = laptop and laptop.get("specs", {}).get("ram") == "16GB"
condition3 = update_result.modified_count == 1 condition3 = update_result.modified_count == 1
condition4 = updated_laptop and updated_laptop.get('specs', {}).get('ram') == "32GB" condition4 = (
updated_laptop and updated_laptop.get("specs", {}).get("ram") == "32GB"
)
print(f"Condition 1 (laptop found): {condition1}") print(f"Condition 1 (laptop found): {condition1}")
print(f"Condition 2 (original RAM is 16GB): {condition2}") print(f"Condition 2 (original RAM is 16GB): {condition2}")
@ -172,8 +174,10 @@ def test_array_operations():
print(f"Found updated order: {updated_order is not None}") print(f"Found updated order: {updated_order is not None}")
if updated_order: if updated_order:
print(f"Number of items in order: {len(updated_order.get('items', []))}") print(
items = updated_order.get('items', []) f"Number of items in order: {len(updated_order.get('items', []))}"
)
items = updated_order.get("items", [])
if items: if items:
last_item = items[-1] if items else None last_item = items[-1] if items else None
print(f"Last item in order: {last_item}") print(f"Last item in order: {last_item}")
@ -181,8 +185,12 @@ def test_array_operations():
# Check each condition separately # Check each condition separately
condition1 = len(laptop_orders) == 1 condition1 = len(laptop_orders) == 1
condition2 = update_result.modified_count == 1 condition2 = update_result.modified_count == 1
condition3 = updated_order and len(updated_order.get('items', [])) == 3 condition3 = updated_order and len(updated_order.get("items", [])) == 3
condition4 = updated_order and updated_order.get('items', []) and updated_order['items'][-1].get('product') == "Keyboard" condition4 = (
updated_order
and updated_order.get("items", [])
and updated_order["items"][-1].get("product") == "Keyboard"
)
print(f"Condition 1 (found 1 laptop order): {condition1}") print(f"Condition 1 (found 1 laptop order): {condition1}")
print(f"Condition 2 (update modified 1 doc): {condition2}") print(f"Condition 2 (update modified 1 doc): {condition2}")
@ -219,7 +227,7 @@ def test_aggregation():
# Calculate total sales by product - use a simpler aggregation pipeline # Calculate total sales by product - use a simpler aggregation pipeline
pipeline = [ pipeline = [
{"$match": {}}, # Match all documents {"$match": {}}, # Match all documents
{"$group": {"_id": "$product", "total": {"$sum": "$amount"}}} {"$group": {"_id": "$product", "total": {"$sum": "$amount"}}},
] ]
# Execute the aggregation # Execute the aggregation
@ -233,7 +241,8 @@ def test_aggregation():
# Check each condition separately # Check each condition separately
condition1 = len(sales_summary) == 3 condition1 = len(sales_summary) == 3
condition2 = any( condition2 = any(
item.get("_id") == "Laptop" and abs(item.get("total", 0) - 999.99) < 0.01 item.get("_id") == "Laptop"
and abs(item.get("total", 0) - 999.99) < 0.01
for item in sales_summary for item in sales_summary
) )
condition3 = any( condition3 = any(
@ -241,7 +250,8 @@ def test_aggregation():
for item in sales_summary for item in sales_summary
) )
condition4 = any( condition4 = any(
item.get("_id") == "Keyboard" and abs(item.get("total", 0) - 59.99) < 0.01 item.get("_id") == "Keyboard"
and abs(item.get("total", 0) - 59.99) < 0.01
for item in sales_summary for item in sales_summary
) )
@ -325,14 +335,12 @@ def test_complex_queries():
# Update with multiple conditions - split into separate operations for better compatibility # Update with multiple conditions - split into separate operations for better compatibility
# First set the discount # First set the discount
products_collection.update_many( products_collection.update_many(
{"price": {"$lt": 100}, "in_stock": True}, {"price": {"$lt": 100}, "in_stock": True}, {"$set": {"discount": 0.1}}
{"$set": {"discount": 0.1}}
) )
# Then update the price # Then update the price
update_result = products_collection.update_many( update_result = products_collection.update_many(
{"price": {"$lt": 100}, "in_stock": True}, {"price": {"$lt": 100}, "in_stock": True}, {"$inc": {"price": -10}}
{"$inc": {"price": -10}}
) )
# Verify the update # Verify the update
@ -341,7 +349,9 @@ def test_complex_queries():
# Print debug information # Print debug information
print(f"Found expensive electronics: {len(expensive_electronics)}") print(f"Found expensive electronics: {len(expensive_electronics)}")
if expensive_electronics: if expensive_electronics:
print(f"First expensive product: {expensive_electronics[0].get('name')}") print(
f"First expensive product: {expensive_electronics[0].get('name')}"
)
print(f"Modified count: {update_result.modified_count}") print(f"Modified count: {update_result.modified_count}")
if updated_product: if updated_product:
print(f"Updated product price: {updated_product.get('price')}") print(f"Updated product price: {updated_product.get('price')}")
@ -350,10 +360,12 @@ def test_complex_queries():
# More flexible verification with approximate float comparison # More flexible verification with approximate float comparison
success = ( success = (
len(expensive_electronics) >= 1 len(expensive_electronics) >= 1
and expensive_electronics[0].get("name") in ["Expensive Laptop", "Laptop"] and expensive_electronics[0].get("name")
in ["Expensive Laptop", "Laptop"]
and update_result.modified_count >= 1 and update_result.modified_count >= 1
and updated_product is not None and updated_product is not None
and updated_product.get("discount", 0) > 0 # Just check that discount exists and is positive and updated_product.get("discount", 0)
> 0 # Just check that discount exists and is positive
) )
print(f"Test {'passed' if success else 'failed'}") print(f"Test {'passed' if success else 'failed'}")
return success return success
@ -385,19 +397,20 @@ def run_concurrent_operation_test(num_threads=100):
with mongo_handler.collection(collection_name) as collection: with mongo_handler.collection(collection_name) as collection:
# Insert a document # Insert a document
collection.insert_one({ collection.insert_one(
"thread_id": thread_id, {
"uuid": unique_id, "thread_id": thread_id,
"timestamp": time.time() "uuid": unique_id,
}) "timestamp": time.time(),
}
)
# Find the document # Find the document
doc = collection.find_one({"thread_id": thread_id}) doc = collection.find_one({"thread_id": thread_id})
# Update the document # Update the document
collection.update_one( collection.update_one(
{"thread_id": thread_id}, {"thread_id": thread_id}, {"$set": {"updated": True}}
{"$set": {"updated": True}}
) )
# Verify update # Verify update
@ -406,9 +419,11 @@ def run_concurrent_operation_test(num_threads=100):
# Clean up # Clean up
collection.delete_many({"thread_id": thread_id}) collection.delete_many({"thread_id": thread_id})
success = (doc is not None and success = (
updated_doc is not None and doc is not None
updated_doc.get("updated") is True) and updated_doc is not None
and updated_doc.get("updated") is True
)
# Update results with thread safety # Update results with thread safety
with results_lock: with results_lock:
@ -440,7 +455,9 @@ def run_concurrent_operation_test(num_threads=100):
if results["failed"] > 0: if results["failed"] > 0:
print("\nErrors:") print("\nErrors:")
for error in results["errors"][:10]: # Show only first 10 errors to avoid flooding output for error in results["errors"][
:10
]: # Show only first 10 errors to avoid flooding output
print(f"- {error}") print(f"- {error}")
if len(results["errors"]) > 10: if len(results["errors"]) > 10:
print(f"- ... and {len(results['errors']) - 10} more errors") print(f"- ... and {len(results['errors']) - 10} more errors")

View File

@ -1,10 +1,12 @@
""" """
Test script for MongoDB handler with a local MongoDB instance. Test script for MongoDB handler with a local MongoDB instance.
""" """
import os import os
from Controllers.Mongo.database import MongoDBHandler, CollectionContext from Controllers.Mongo.database import MongoDBHandler, CollectionContext
from datetime import datetime from datetime import datetime
# Create a custom handler class for local testing # Create a custom handler class for local testing
class LocalMongoDBHandler(MongoDBHandler): class LocalMongoDBHandler(MongoDBHandler):
"""A MongoDB handler for local testing without authentication.""" """A MongoDB handler for local testing without authentication."""
@ -22,6 +24,7 @@ class LocalMongoDBHandler(MongoDBHandler):
} }
self._initialized = True self._initialized = True
# Create a custom handler for local testing # Create a custom handler for local testing
def create_local_handler(): def create_local_handler():
"""Create a MongoDB handler for local testing.""" """Create a MongoDB handler for local testing."""
@ -29,6 +32,7 @@ def create_local_handler():
handler = LocalMongoDBHandler() handler = LocalMongoDBHandler()
return handler return handler
def test_connection_monitoring(): def test_connection_monitoring():
"""Test connection monitoring with the MongoDB handler.""" """Test connection monitoring with the MongoDB handler."""
print("\nTesting connection monitoring...") print("\nTesting connection monitoring...")
@ -85,5 +89,6 @@ def test_connection_monitoring():
CollectionContext.__enter__ = original_enter CollectionContext.__enter__ = original_enter
CollectionContext.__exit__ = original_exit CollectionContext.__exit__ = original_exit
if __name__ == "__main__": if __name__ == "__main__":
test_connection_monitoring() test_connection_monitoring()

View File

@ -38,4 +38,4 @@ class Configs(BaseSettings):
# singleton instance of the POSTGRESQL configuration settings # singleton instance of the POSTGRESQL configuration settings
postgres_configs = Configs() postgres_configs = Configs()
print('url', postgres_configs.url) print("url", postgres_configs.url)

View File

@ -504,7 +504,9 @@ def run_simple_concurrent_test(num_threads=10):
results["passed"] += 1 results["passed"] += 1
else: else:
results["failed"] += 1 results["failed"] += 1
results["errors"].append(f"Thread {thread_id} failed to get count") results["errors"].append(
f"Thread {thread_id} failed to get count"
)
except Exception as e: except Exception as e:
with results_lock: with results_lock:
results["failed"] += 1 results["failed"] += 1
@ -528,7 +530,9 @@ def run_simple_concurrent_test(num_threads=10):
if results["failed"] > 0: if results["failed"] > 0:
print("\nErrors:") print("\nErrors:")
for error in results["errors"][:10]: # Show only first 10 errors to avoid flooding output for error in results["errors"][
:10
]: # Show only first 10 errors to avoid flooding output
print(f"- {error}") print(f"- {error}")
if len(results["errors"]) > 10: if len(results["errors"]) > 10:
print(f"- ... and {len(results['errors']) - 10} more errors") print(f"- ... and {len(results['errors']) - 10} more errors")

View File

@ -33,13 +33,13 @@ class RedisPublisher:
return RedisResponse( return RedisResponse(
status=True, status=True,
message=f"Message published successfully to {channel}.", message=f"Message published successfully to {channel}.",
data={"recipients": recipient_count} data={"recipients": recipient_count},
) )
except Exception as e: except Exception as e:
return RedisResponse( return RedisResponse(
status=False, status=False,
message=f"Failed to publish message to {channel}.", message=f"Failed to publish message to {channel}.",
error=str(e) error=str(e),
) )
@ -51,7 +51,9 @@ class RedisSubscriber:
self.pubsub = self.redis_client.pubsub() self.pubsub = self.redis_client.pubsub()
self.active_threads = {} self.active_threads = {}
def subscribe(self, channel: str, callback: Callable[[Dict], None]) -> RedisResponse: def subscribe(
self, channel: str, callback: Callable[[Dict], None]
) -> RedisResponse:
"""Subscribe to a Redis channel with a callback function. """Subscribe to a Redis channel with a callback function.
Args: Args:
@ -66,17 +68,16 @@ class RedisSubscriber:
self.pubsub.subscribe(**{channel: self._message_handler(callback)}) self.pubsub.subscribe(**{channel: self._message_handler(callback)})
return RedisResponse( return RedisResponse(
status=True, status=True, message=f"Successfully subscribed to {channel}."
message=f"Successfully subscribed to {channel}."
) )
except Exception as e: except Exception as e:
return RedisResponse( return RedisResponse(
status=False, status=False, message=f"Failed to subscribe to {channel}.", error=str(e)
message=f"Failed to subscribe to {channel}.",
error=str(e)
) )
def psubscribe(self, pattern: str, callback: Callable[[Dict], None]) -> RedisResponse: def psubscribe(
self, pattern: str, callback: Callable[[Dict], None]
) -> RedisResponse:
"""Subscribe to Redis channels matching a pattern. """Subscribe to Redis channels matching a pattern.
Args: Args:
@ -91,27 +92,27 @@ class RedisSubscriber:
self.pubsub.psubscribe(**{pattern: self._message_handler(callback)}) self.pubsub.psubscribe(**{pattern: self._message_handler(callback)})
return RedisResponse( return RedisResponse(
status=True, status=True, message=f"Successfully pattern-subscribed to {pattern}."
message=f"Successfully pattern-subscribed to {pattern}."
) )
except Exception as e: except Exception as e:
return RedisResponse( return RedisResponse(
status=False, status=False,
message=f"Failed to pattern-subscribe to {pattern}.", message=f"Failed to pattern-subscribe to {pattern}.",
error=str(e) error=str(e),
) )
def _message_handler(self, callback: Callable[[Dict], None]): def _message_handler(self, callback: Callable[[Dict], None]):
"""Create a message handler function for the subscription.""" """Create a message handler function for the subscription."""
def handler(message): def handler(message):
# Skip subscription confirmation messages # Skip subscription confirmation messages
if message['type'] in ('subscribe', 'psubscribe'): if message["type"] in ("subscribe", "psubscribe"):
return return
# Parse JSON if the message is a JSON string # Parse JSON if the message is a JSON string
data = message['data'] data = message["data"]
if isinstance(data, bytes): if isinstance(data, bytes):
data = data.decode('utf-8') data = data.decode("utf-8")
try: try:
data = json.loads(data) data = json.loads(data)
except json.JSONDecodeError: except json.JSONDecodeError:
@ -119,11 +120,21 @@ class RedisSubscriber:
pass pass
# Call the callback with the message data # Call the callback with the message data
callback({ callback(
'channel': message.get('channel', b'').decode('utf-8') if isinstance(message.get('channel', b''), bytes) else message.get('channel', ''), {
'pattern': message.get('pattern', b'').decode('utf-8') if isinstance(message.get('pattern', b''), bytes) else message.get('pattern', ''), "channel": (
'data': data message.get("channel", b"").decode("utf-8")
}) if isinstance(message.get("channel", b""), bytes)
else message.get("channel", "")
),
"pattern": (
message.get("pattern", b"").decode("utf-8")
if isinstance(message.get("pattern", b""), bytes)
else message.get("pattern", "")
),
"data": data,
}
)
return handler return handler
@ -140,23 +151,19 @@ class RedisSubscriber:
if in_thread: if in_thread:
thread = Thread(target=self._listen_thread, daemon=True) thread = Thread(target=self._listen_thread, daemon=True)
thread.start() thread.start()
self.active_threads['listener'] = thread self.active_threads["listener"] = thread
return RedisResponse( return RedisResponse(
status=True, status=True, message="Listening thread started successfully."
message="Listening thread started successfully."
) )
else: else:
# This will block the current thread # This will block the current thread
self._listen_thread() self._listen_thread()
return RedisResponse( return RedisResponse(
status=True, status=True, message="Listening started successfully (blocking)."
message="Listening started successfully (blocking)."
) )
except Exception as e: except Exception as e:
return RedisResponse( return RedisResponse(
status=False, status=False, message="Failed to start listening.", error=str(e)
message="Failed to start listening.",
error=str(e)
) )
def _listen_thread(self): def _listen_thread(self):
@ -167,15 +174,10 @@ class RedisSubscriber:
"""Stop listening for messages.""" """Stop listening for messages."""
try: try:
self.pubsub.close() self.pubsub.close()
return RedisResponse( return RedisResponse(status=True, message="Successfully stopped listening.")
status=True,
message="Successfully stopped listening."
)
except Exception as e: except Exception as e:
return RedisResponse( return RedisResponse(
status=False, status=False, message="Failed to stop listening.", error=str(e)
message="Failed to stop listening.",
error=str(e)
) )
def unsubscribe(self, channel: Optional[str] = None) -> RedisResponse: def unsubscribe(self, channel: Optional[str] = None) -> RedisResponse:
@ -195,15 +197,12 @@ class RedisSubscriber:
self.pubsub.unsubscribe() self.pubsub.unsubscribe()
message = "Successfully unsubscribed from all channels." message = "Successfully unsubscribed from all channels."
return RedisResponse( return RedisResponse(status=True, message=message)
status=True,
message=message
)
except Exception as e: except Exception as e:
return RedisResponse( return RedisResponse(
status=False, status=False,
message=f"Failed to unsubscribe from {'channel' if channel else 'all channels'}.", message=f"Failed to unsubscribe from {'channel' if channel else 'all channels'}.",
error=str(e) error=str(e),
) )
def punsubscribe(self, pattern: Optional[str] = None) -> RedisResponse: def punsubscribe(self, pattern: Optional[str] = None) -> RedisResponse:
@ -223,15 +222,12 @@ class RedisSubscriber:
self.pubsub.punsubscribe() self.pubsub.punsubscribe()
message = "Successfully unsubscribed from all patterns." message = "Successfully unsubscribed from all patterns."
return RedisResponse( return RedisResponse(status=True, message=message)
status=True,
message=message
)
except Exception as e: except Exception as e:
return RedisResponse( return RedisResponse(
status=False, status=False,
message=f"Failed to unsubscribe from {'pattern' if pattern else 'all patterns'}.", message=f"Failed to unsubscribe from {'pattern' if pattern else 'all patterns'}.",
error=str(e) error=str(e),
) )

View File

@ -15,6 +15,7 @@ CHANNEL_WRITER = "chain:writer"
# Flag to control the demo # Flag to control the demo
running = True running = True
def generate_mock_data(): def generate_mock_data():
"""Generate a mock message with UUID, timestamp, and sample data.""" """Generate a mock message with UUID, timestamp, and sample data."""
return { return {
@ -24,10 +25,11 @@ def generate_mock_data():
"data": { "data": {
"value": f"Sample data {int(time.time())}", "value": f"Sample data {int(time.time())}",
"status": "new", "status": "new",
"counter": 0 "counter": 0,
} },
} }
def reader_function(): def reader_function():
""" """
First function in the chain. First function in the chain.
@ -52,12 +54,14 @@ def reader_function():
# Wait before generating next message # Wait before generating next message
time.sleep(2) time.sleep(2)
def processor_function(): def processor_function():
""" """
Second function in the chain. Second function in the chain.
Subscribes to reader channel, processes messages, and publishes to processor channel. Subscribes to reader channel, processes messages, and publishes to processor channel.
""" """
print("[PROCESSOR] Function started") print("[PROCESSOR] Function started")
def on_reader_message(message): def on_reader_message(message):
# The message structure from the subscriber has 'data' containing our actual message # The message structure from the subscriber has 'data' containing our actual message
# If data is a string, parse it as JSON # If data is a string, parse it as JSON
@ -82,14 +86,16 @@ def processor_function():
# Add some processing metadata # Add some processing metadata
data["processing"] = { data["processing"] = {
"duration_ms": 150, # Mock processing time "duration_ms": 150, # Mock processing time
"processor_id": "main-processor" "processor_id": "main-processor",
} }
# Publish to processor channel # Publish to processor channel
result = redis_pubsub.publisher.publish(CHANNEL_PROCESSOR, data) result = redis_pubsub.publisher.publish(CHANNEL_PROCESSOR, data)
if result.status: if result.status:
print(f"[PROCESSOR] {time.time():.6f} | Received UUID: {data['uuid']} | Published UUID: {data['uuid']}") print(
f"[PROCESSOR] {time.time():.6f} | Received UUID: {data['uuid']} | Published UUID: {data['uuid']}"
)
else: else:
print(f"[PROCESSOR] Publish error: {result.error}") print(f"[PROCESSOR] Publish error: {result.error}")
else: else:
@ -103,12 +109,14 @@ def processor_function():
else: else:
print(f"[PROCESSOR] Subscribe error: {result.error}") print(f"[PROCESSOR] Subscribe error: {result.error}")
def writer_function(): def writer_function():
""" """
Third function in the chain. Third function in the chain.
Subscribes to processor channel and performs final processing. Subscribes to processor channel and performs final processing.
""" """
print("[WRITER] Function started") print("[WRITER] Function started")
def on_processor_message(message): def on_processor_message(message):
# The message structure from the subscriber has 'data' containing our actual message # The message structure from the subscriber has 'data' containing our actual message
# If data is a string, parse it as JSON # If data is a string, parse it as JSON
@ -131,26 +139,29 @@ def writer_function():
data["stage"] = "completed" data["stage"] = "completed"
# Add some writer metadata # Add some writer metadata
data["storage"] = { data["storage"] = {"location": "main-db", "partition": "events-2025-04"}
"location": "main-db",
"partition": "events-2025-04"
}
# Calculate elapsed time if start_time is available # Calculate elapsed time if start_time is available
current_time = time.time() current_time = time.time()
elapsed_ms = "" elapsed_ms = ""
if "start_time" in data: if "start_time" in data:
elapsed_ms = f" | Elapsed: {(current_time - data['start_time']) * 1000:.2f}ms" elapsed_ms = (
f" | Elapsed: {(current_time - data['start_time']) * 1000:.2f}ms"
)
# Optionally publish to writer channel for any downstream listeners # Optionally publish to writer channel for any downstream listeners
result = redis_pubsub.publisher.publish(CHANNEL_WRITER, data) result = redis_pubsub.publisher.publish(CHANNEL_WRITER, data)
if result.status: if result.status:
print(f"[WRITER] {current_time:.6f} | Received UUID: {data['uuid']} | Published UUID: {data['uuid']}{elapsed_ms}") print(
f"[WRITER] {current_time:.6f} | Received UUID: {data['uuid']} | Published UUID: {data['uuid']}{elapsed_ms}"
)
else: else:
print(f"[WRITER] Publish error: {result.error}") print(f"[WRITER] Publish error: {result.error}")
else: else:
print(f"[WRITER] Skipped message: {data['uuid']} (stage is not 'processed')") print(
f"[WRITER] Skipped message: {data['uuid']} (stage is not 'processed')"
)
# Subscribe to processor channel # Subscribe to processor channel
result = redis_pubsub.subscriber.subscribe(CHANNEL_PROCESSOR, on_processor_message) result = redis_pubsub.subscriber.subscribe(CHANNEL_PROCESSOR, on_processor_message)

View File

@ -113,7 +113,9 @@ def run_all_examples() -> None:
def run_concurrent_test(num_threads=100): def run_concurrent_test(num_threads=100):
"""Run a comprehensive concurrent test with multiple threads to verify Redis connection handling.""" """Run a comprehensive concurrent test with multiple threads to verify Redis connection handling."""
print(f"\nStarting comprehensive Redis concurrent test with {num_threads} threads...") print(
f"\nStarting comprehensive Redis concurrent test with {num_threads} threads..."
)
# Results tracking with detailed metrics # Results tracking with detailed metrics
results = { results = {
@ -124,7 +126,7 @@ def run_concurrent_test(num_threads=100):
"operation_times": [], "operation_times": [],
"retry_count": 0, "retry_count": 0,
"max_retries": 3, "max_retries": 3,
"retry_delay": 0.1 "retry_delay": 0.1,
} }
results_lock = threading.Lock() results_lock = threading.Lock()
@ -158,7 +160,7 @@ def run_concurrent_test(num_threads=100):
set_ok = results_list[0] set_ok = results_list[0]
retrieved_value = results_list[1] retrieved_value = results_list[1]
if isinstance(retrieved_value, bytes): if isinstance(retrieved_value, bytes):
retrieved_value = retrieved_value.decode('utf-8') retrieved_value = retrieved_value.decode("utf-8")
# Verify data # Verify data
success = set_ok and retrieved_value == test_value success = set_ok and retrieved_value == test_value
@ -170,7 +172,9 @@ def run_concurrent_test(num_threads=100):
retry_count += 1 retry_count += 1
with results_lock: with results_lock:
results["retry_count"] += 1 results["retry_count"] += 1
time.sleep(results["retry_delay"] * (2 ** retry_count)) # Exponential backoff time.sleep(
results["retry_delay"] * (2**retry_count)
) # Exponential backoff
except Exception as e: except Exception as e:
error_message = str(e) error_message = str(e)
@ -181,7 +185,7 @@ def run_concurrent_test(num_threads=100):
# Check if it's a connection error and retry # Check if it's a connection error and retry
if "Too many connections" in str(e) or "Connection" in str(e): if "Too many connections" in str(e) or "Connection" in str(e):
# Exponential backoff for connection issues # Exponential backoff for connection issues
backoff_time = results["retry_delay"] * (2 ** retry_count) backoff_time = results["retry_delay"] * (2**retry_count)
time.sleep(backoff_time) time.sleep(backoff_time)
else: else:
# For other errors, use a smaller delay # For other errors, use a smaller delay
@ -200,9 +204,13 @@ def run_concurrent_test(num_threads=100):
else: else:
results["failed"] += 1 results["failed"] += 1
if error_message: if error_message:
results["errors"].append(f"Thread {thread_id} failed after {retry_count} retries: {error_message}") results["errors"].append(
f"Thread {thread_id} failed after {retry_count} retries: {error_message}"
)
else: else:
results["errors"].append(f"Thread {thread_id} failed after {retry_count} retries with unknown error") results["errors"].append(
f"Thread {thread_id} failed after {retry_count} retries with unknown error"
)
# Create and start threads using a thread pool # Create and start threads using a thread pool
start_time = time.time() start_time = time.time()
@ -227,7 +235,11 @@ def run_concurrent_test(num_threads=100):
# Calculate 95th percentile # Calculate 95th percentile
sorted_times = sorted(results["operation_times"]) sorted_times = sorted(results["operation_times"])
p95_index = int(len(sorted_times) * 0.95) p95_index = int(len(sorted_times) * 0.95)
p95_op_time = sorted_times[p95_index] if p95_index < len(sorted_times) else sorted_times[-1] p95_op_time = (
sorted_times[p95_index]
if p95_index < len(sorted_times)
else sorted_times[-1]
)
# Print detailed results # Print detailed results
print("\nConcurrent Redis Test Results:") print("\nConcurrent Redis Test Results:")

View File

@ -351,9 +351,7 @@ class BuildParts(CrudCollection):
if build_type := BuildTypes.filter_by_one( if build_type := BuildTypes.filter_by_one(
system=True, id=self.part_type_id, db=db system=True, id=self.part_type_id, db=db
).data: ).data:
return ( return f"{str(build_type.type_name).upper()} : {str(self.part_no).upper()}"
f"{str(build_type.type_name).upper()} : {str(self.part_no).upper()}"
)
return f"Undefined:{str(build_type.type_name).upper()}" return f"Undefined:{str(build_type.type_name).upper()}"