Compare commits
11 Commits
7452e05a92
...
developmen
| Author | SHA1 | Date | |
|---|---|---|---|
| 9dd8740171 | |||
| 4e6774a15b | |||
| e4f6afbc93 | |||
| 61529f7d94 | |||
| 9543d136aa | |||
| 456203f5cf | |||
| 82b1d4825b | |||
| 4ec9031005 | |||
| 7a5521648c | |||
| ca98adc338 | |||
| 405ba2e95d |
7
.gitignore
vendored
7
.gitignore
vendored
@@ -56,8 +56,9 @@ pids
|
||||
report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
|
||||
env
|
||||
.env
|
||||
ServicesRunner/AccountRecordServices/Finder/Iban/.prisma-cache
|
||||
venv/
|
||||
**/.prisma-cache
|
||||
|
||||
venv/
|
||||
.vscode/
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*.py[cod]
|
||||
|
||||
@@ -403,6 +403,7 @@ model account_records {
|
||||
accounting_receipt_number Int @default(0)
|
||||
status_id Int @default(0) @db.SmallInt
|
||||
approved_record Boolean @default(false)
|
||||
is_predicted Boolean @default(false)
|
||||
import_file_name String? @db.VarChar
|
||||
receive_debit Int?
|
||||
receive_debit_uu_id String? @db.VarChar
|
||||
|
||||
@@ -6,6 +6,7 @@ import { PrismaService } from '@/src/prisma.service';
|
||||
import { EventsService } from '@/src/navigator/events/events.service';
|
||||
import { PagesService } from '@/src/navigator/pages/pages.service';
|
||||
import { MenusService } from '@/src/navigator/menus/menu.service';
|
||||
import { includes } from 'zod';
|
||||
|
||||
@Injectable()
|
||||
export class SelectService {
|
||||
|
||||
@@ -0,0 +1,14 @@
|
||||
__pycache__/
|
||||
*.pyc
|
||||
*.pyo
|
||||
*.pyd
|
||||
*.db
|
||||
*.sqlite3
|
||||
*.log
|
||||
*.env
|
||||
venv/
|
||||
.env.*
|
||||
node_modules/
|
||||
.prisma/
|
||||
.prisma-cache/
|
||||
ServicesRunnner/AccountRecordServices/Test/venv/
|
||||
@@ -0,0 +1,22 @@
|
||||
FROM python:3.12-slim
|
||||
|
||||
ENV PYTHONUNBUFFERED=1
|
||||
ENV PYTHONDONTWRITEBYTECODE=1
|
||||
ENV VIRTUAL_ENV=/opt/venv
|
||||
ENV PRISMA_SCHEMA_PATH=/app/Depends/schema.prisma
|
||||
ENV PATH="$VIRTUAL_ENV/bin:$PATH"
|
||||
ENV PYTHONPATH=/app
|
||||
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends gcc curl && rm -rf /var/lib/apt/lists/*
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY ServicesRunner/Depends/ /app/Depends/
|
||||
COPY ServicesRunner/AccountRecordServices/Finder/Comment /app/
|
||||
|
||||
COPY ServicesRunner/requirements.txt /app/requirements.txt
|
||||
COPY ServicesRunner/AccountRecordServices/Finder/Comment/entrypoint.sh /entrypoint.sh
|
||||
|
||||
RUN chmod +x /entrypoint.sh
|
||||
|
||||
CMD ["/entrypoint.sh"]
|
||||
194
ServicesRunner/AccountRecordServices/Finder/Accounts/app.py
Normal file
194
ServicesRunner/AccountRecordServices/Finder/Accounts/app.py
Normal file
@@ -0,0 +1,194 @@
|
||||
import time
|
||||
import arrow
|
||||
import pprint
|
||||
|
||||
from json import dumps, loads
|
||||
|
||||
from decimal import Decimal
|
||||
from pydantic import BaseModel
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
|
||||
from Depends.prisma_client import PrismaService
|
||||
from Depends.service_handler import ProcessCommentFinderService
|
||||
from Depends.config import ConfigServices, MailSendModel, RedisMailSender, Status, RedisTaskObject, FinderComment
|
||||
|
||||
|
||||
class BankReceive(BaseModel):
|
||||
import_file_name: str
|
||||
iban: str
|
||||
bank_date: datetime
|
||||
channel_branch: str
|
||||
currency: Optional[str] = "TL"
|
||||
currency_value: Decimal
|
||||
bank_balance: Decimal
|
||||
additional_balance: Decimal
|
||||
process_name: str
|
||||
process_type: str
|
||||
process_comment: str
|
||||
bank_reference_code: str
|
||||
bank_date_w: int
|
||||
bank_date_m: int
|
||||
bank_date_d: int
|
||||
bank_date_y: int
|
||||
|
||||
|
||||
def check_task_belong_to_this_service(task: RedisTaskObject):
|
||||
"""
|
||||
Check if task belongs to this service
|
||||
"""
|
||||
if not task.service == ConfigServices.SERVICE_PREFIX_FINDER_IBAN:
|
||||
return False
|
||||
if not task.completed:
|
||||
return False
|
||||
if task.is_completed:
|
||||
return False
|
||||
if not task.data:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def write_account_records_row_from_finder_comment(finder_comments: list[FinderComment], prisma_service: PrismaService, saved_list_of_account_records: dict):
|
||||
"""
|
||||
Write account records row from finder comment
|
||||
"""
|
||||
finder_comments = list(finder_comments)
|
||||
for finder_comment in finder_comments:
|
||||
bank_date = arrow.get(finder_comment.bank_date).replace(tzinfo='GMT+3').datetime
|
||||
bank_receive_record = BankReceive(
|
||||
import_file_name=finder_comment.filename, iban=finder_comment.iban, bank_date=bank_date, channel_branch=finder_comment.channel_branch, currency="TL", currency_value=finder_comment.currency_value,
|
||||
bank_balance=finder_comment.balance, additional_balance=finder_comment.additional_balance, process_name=finder_comment.process_name, process_type=finder_comment.process_type,
|
||||
process_comment=finder_comment.process_comment, bank_reference_code=finder_comment.bank_reference_code, build_id=finder_comment.build_id, build_uu_id=finder_comment.build_uu_id,
|
||||
decision_book_id=finder_comment.decision_book_id, decision_book_uu_id=finder_comment.decision_book_uu_id, bank_date_w=bank_date.weekday(), bank_date_m=bank_date.month,
|
||||
bank_date_d=bank_date.day, bank_date_y=bank_date.year
|
||||
)
|
||||
account_record_found = prisma_service.find_first(table="account_records", query={"iban": bank_receive_record.iban, "bank_reference_code": bank_receive_record.bank_reference_code,
|
||||
"bank_date": bank_receive_record.bank_date, "bank_balance": bank_receive_record.bank_balance, "currency_value": bank_receive_record.currency_value},
|
||||
select={"id": True, "iban": True, "bank_reference_code": True, "bank_date": True, "bank_balance": True}
|
||||
)
|
||||
if not account_record_found:
|
||||
created_account_record = prisma_service.create(table="account_records", data=bank_receive_record.dict(), select={"id": True, "iban": True, "bank_reference_code": True, "bank_date": True, "bank_balance": True} )
|
||||
if created_account_record['build_id'] in saved_list_of_account_records.keys():
|
||||
saved_list_of_account_records[created_account_record['build_id']] = [*saved_list_of_account_records[created_account_record['build_id']], created_account_record]
|
||||
else:
|
||||
saved_list_of_account_records[created_account_record['build_id']] = [created_account_record]
|
||||
return saved_list_of_account_records
|
||||
|
||||
|
||||
def enclose_task_and_send_mail_to_build_manager(prisma_service: PrismaService, saved_list_of_account_records: dict, process_comment_finder_service: ProcessCommentFinderService, task: RedisTaskObject):
|
||||
"""
|
||||
Enclose task and send mail to build manager
|
||||
"""
|
||||
if not saved_list_of_account_records:
|
||||
return
|
||||
list_of_new_set, today = [], arrow.now().to('GMT+3').datetime
|
||||
for build_id, saved_list_of_account_record in saved_list_of_account_records.items():
|
||||
build_manager_occupant_type = prisma_service.find_first(table="occupant_types", query={"occupant_code":"BU-MNG", "is_confirmed": True, "active": True})
|
||||
living_space = prisma_service.find_first(
|
||||
table="build_living_space", query={
|
||||
"build_id": build_id, "occupant_type_id": build_manager_occupant_type['id'], "expiry_starts": {"lte": today}, "expiry_ends": {"gte": today}}
|
||||
)
|
||||
build = prisma_service.find_first(table="builds", query={"id": build_id})
|
||||
person = prisma_service.find_first(table="people", query={"id": living_space['person_id']})
|
||||
user = prisma_service.find_first(table="users", query={"person_id": person['id']})
|
||||
send_object = MailSendModel(
|
||||
receivers=[user.email], data=saved_list_of_account_record, template_name=ConfigServices.TEMPLATE_ACCOUNT_RECORDS,
|
||||
subject=f"{build['name']} Cari Durum Bilgilendirme Raporu - {today.strftime('%d/%m/%Y %H:%M')}",
|
||||
)
|
||||
set_mail_object = RedisMailSender(
|
||||
task=task, data=send_object, service=ConfigServices.SERVICE_PREFIX_MAIL_SENDER, status=Status.PENDING, completed=False, created_at=today.strftime('%Y-%m-%d %H:%M:%S')
|
||||
)
|
||||
list_of_new_set.append(set_mail_object)
|
||||
if list_of_new_set:
|
||||
process_comment_finder_service.service_retriever.redis_client.set(ConfigServices.SERVICE_PREFIX_MAIL_SENDER, dumps(
|
||||
{"type": "mail_sender", "data": list_of_new_set, "count": len(list_of_new_set), "created_at": today.strftime('%Y-%m-%d %H:%M:%S')}
|
||||
))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
prisma_service = PrismaService()
|
||||
process_comment_finder_service = ProcessCommentFinderService()
|
||||
|
||||
print("Process Comment service started")
|
||||
try:
|
||||
print("Process Comment service started sleeping for 5 seconds")
|
||||
while True:
|
||||
time.sleep(5)
|
||||
saved_list_of_account_records = dict()
|
||||
tasks = process_comment_finder_service.fetch_all_tasks()
|
||||
for task in tasks:
|
||||
if not check_task_belong_to_this_service(task):
|
||||
continue
|
||||
write_account_records_row_from_finder_comment(
|
||||
finder_comments=task.data.FinderComment, prisma_service=prisma_service, saved_list_of_account_records=saved_list_of_account_records
|
||||
)
|
||||
save_task_object_for_comment_parsing(task=task, process_comment_finder_service=process_comment_finder_service)
|
||||
process_comment_finder_service.update_task_status(task_uuid=task.task, is_completed=True, status=Status.COMPLETED)
|
||||
process_comment_finder_service.delete_task(task_uuid=task.task)
|
||||
enclose_task_and_send_mail_to_build_manager(
|
||||
prisma_service=prisma_service, saved_list_of_account_records=saved_list_of_account_records, process_comment_finder_service=process_comment_finder_service, task=task
|
||||
)
|
||||
except Exception as e:
|
||||
raise
|
||||
finally:
|
||||
prisma_service.disconnect()
|
||||
|
||||
|
||||
def fix_account_records_bank_date(prisma_service: PrismaService, bank_receive_record: BankReceive):
|
||||
account_record_from_other_fields = prisma_service.find_first(
|
||||
table="account_records",
|
||||
query={
|
||||
"iban": bank_receive_record.iban,
|
||||
"bank_reference_code": bank_receive_record.bank_reference_code,
|
||||
"bank_balance": bank_receive_record.bank_balance,
|
||||
"currency_value": bank_receive_record.currency_value,
|
||||
# "process_comment": {"contains": str(bank_receive_record.process_comment), "mode": "insensitive"},
|
||||
},
|
||||
select={
|
||||
"id": True, "iban": True, "bank_reference_code": True, "bank_date": True,
|
||||
"bank_balance": True, "currency_value": True, "process_comment": True
|
||||
}
|
||||
)
|
||||
if account_record_from_other_fields:
|
||||
prisma_service.update(
|
||||
table="account_records", where={"id": account_record_from_other_fields['id']}, data={"bank_date": bank_receive_record.bank_date},
|
||||
)
|
||||
if not account_record_from_other_fields:
|
||||
pprint.pprint({"not_found_bank_receive_record": bank_receive_record})
|
||||
# prisma_service.update(
|
||||
# table="account_records", where={"id": account_record_from_other_fields['id']}, data={"bank_date": bank_receive_record.bank_date},
|
||||
# )
|
||||
# from_database = arrow.get(account_record_from_other_fields['bank_date']).to('GMT+3').datetime
|
||||
# print('old date', from_database, " - new date ", bank_receive_record.bank_date)
|
||||
|
||||
|
||||
def commented_out_code():
|
||||
account_record_found = None
|
||||
|
||||
old_bank_date=arrow.get(finder_comment.bank_date).datetime
|
||||
|
||||
if not account_record_found:
|
||||
account_record_found_with_old_date = prisma_service.find_first(
|
||||
table="account_records",
|
||||
query={
|
||||
"iban": bank_receive_record.iban, "bank_reference_code": bank_receive_record.bank_reference_code,
|
||||
"bank_date": old_bank_date, "bank_balance": bank_receive_record.bank_balance,
|
||||
},
|
||||
)
|
||||
if account_record_found_with_old_date:
|
||||
prisma_service.update(
|
||||
table="account_records", where={"id": account_record_found_with_old_date.id}, data={"bank_date": bank_receive_record.bank_date},
|
||||
)
|
||||
if account_record_found:
|
||||
print('-' * 150)
|
||||
pprint.pprint(
|
||||
{
|
||||
"account_record_found": dict(account_record_found),
|
||||
"bank_receive_record": bank_receive_record.dict(),
|
||||
"bank_receive_record.bank_date": bank_receive_record.bank_date,
|
||||
"account_record_found.bank_date": account_record_found["bank_date"],
|
||||
}
|
||||
)
|
||||
print('-' * 150)
|
||||
return
|
||||
@@ -0,0 +1,19 @@
|
||||
#!/bin/sh
|
||||
|
||||
VENV_PATH="/opt/venv"
|
||||
REQUIREMENTS_PATH="/app/requirements.txt"
|
||||
SCHEMA_PATH="/app/Depends/schema.prisma"
|
||||
PRISMA_BINARY_PATH="/root/.cache/prisma-python/binaries"
|
||||
|
||||
if [ ! -x "$VENV_PATH/bin/python" ]; then
|
||||
python -m venv "$VENV_PATH"
|
||||
"$VENV_PATH/bin/pip" install pip --upgrade
|
||||
"$VENV_PATH/bin/pip" install --no-cache-dir -r "$REQUIREMENTS_PATH"
|
||||
"$VENV_PATH/bin/prisma" generate --schema "$SCHEMA_PATH"
|
||||
fi
|
||||
|
||||
if ! find "$PRISMA_BINARY_PATH" -type f -name "prisma-query-engine-debian-openssl-3.0.x" | grep -q .; then
|
||||
"$VENV_PATH/bin/prisma" py fetch
|
||||
fi
|
||||
|
||||
exec "$VENV_PATH/bin/python" -u app.py
|
||||
@@ -1,74 +1,108 @@
|
||||
import uvloop
|
||||
import asyncio
|
||||
import sys
|
||||
import signal
|
||||
import time
|
||||
import arrow
|
||||
|
||||
from pydantic import BaseModel
|
||||
from datetime import datetime
|
||||
from Depends.prisma_client import prisma_client, disconnect_prisma
|
||||
|
||||
asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
|
||||
from Depends.prisma_client import PrismaService
|
||||
from Depends.service_handler import IbanFinderService
|
||||
from Depends.config import ConfigServices, Status, FinderIban, RedisTaskObject
|
||||
|
||||
|
||||
# sys.stdout.reconfigure(line_buffering=True) # alternatif: python -u veya PYTHONUNBUFFERED=1
|
||||
class IbanRecord(BaseModel):
|
||||
id: int
|
||||
uu_id: str
|
||||
iban: str
|
||||
build_id: int
|
||||
build_uu_id: str
|
||||
expiry_starts: datetime
|
||||
expiry_ends: datetime
|
||||
|
||||
|
||||
async def tick():
|
||||
start = time.time()
|
||||
print(f"[{datetime.now()}] Attempting database query...")
|
||||
async with prisma_client() as db:
|
||||
rows = await db.account_records.find_many(
|
||||
take=5, skip=0, order=[{"bank_date": "desc"}]
|
||||
)
|
||||
print(f"[{datetime.now()}] Query completed in {time.time()-start:.2f}s")
|
||||
|
||||
for i, r in enumerate(rows):
|
||||
# Dilersen burada formatı değiştir
|
||||
print(f" Row: {i} | id={r.id} bank_date={r.bank_date} currency_value={r.currency_value}")
|
||||
print("-" * 80)
|
||||
class DecisionBookRecord(BaseModel):
|
||||
id: int
|
||||
uu_id: str
|
||||
build_id: int
|
||||
build_uu_id: str
|
||||
expiry_starts: datetime
|
||||
expiry_ends: datetime
|
||||
|
||||
|
||||
async def service():
|
||||
print(f"[{datetime.now()}] IBAN Finder service starting")
|
||||
try:
|
||||
iteration = 0
|
||||
while True:
|
||||
iteration += 1
|
||||
print(f"\n[{datetime.now()}] Loop iteration {iteration}")
|
||||
try:
|
||||
await tick()
|
||||
except Exception as e:
|
||||
print(f"[{datetime.now()}] Error in service tick: {e}")
|
||||
await asyncio.sleep(1) # bloklamayan bekleme
|
||||
finally:
|
||||
# Her durumda DB'yi temiz kapat
|
||||
await disconnect_prisma()
|
||||
print(f"[{datetime.now()}] Cleaning up database connection...")
|
||||
def check_task_belong_to_this_service(task: RedisTaskObject):
|
||||
if not task.service == ConfigServices.SERVICE_PREFIX_MAIL_PARSER:
|
||||
return False
|
||||
if not task.completed:
|
||||
return False
|
||||
if not task.data:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
async def _graceful_shutdown(sig: signal.Signals):
|
||||
print(f"\n[{datetime.now()}] Shutting down due to signal: {sig.name}")
|
||||
# Burada istersen tüm pending task'leri iptal edebilirsin:
|
||||
# for t in asyncio.all_tasks():
|
||||
# if t is not asyncio.current_task():
|
||||
# t.cancel()
|
||||
await disconnect_prisma()
|
||||
def extract_build_iban_from_task(task: RedisTaskObject, finder_iban: FinderIban, write_object: dict) -> tuple[bool, dict]:
|
||||
bank_date = arrow.get(finder_iban.bank_date).datetime
|
||||
iban_record_db = prisma_service.find_first(
|
||||
table="build_ibans",
|
||||
query={
|
||||
"active": True, "deleted": False, "is_confirmed": True, "iban": finder_iban.iban,
|
||||
"expiry_starts": {"lte": bank_date}, "expiry_ends": {"gte": bank_date},
|
||||
},
|
||||
select={"id": None, "uu_id": None, "iban": None, "build_id": None, "build_uu_id": None, "expiry_starts": None, "expiry_ends": None}
|
||||
)
|
||||
if iban_record_db:
|
||||
iban_record = IbanRecord(**iban_record_db)
|
||||
write_object["build_id"] = iban_record.build_id
|
||||
write_object["build_uu_id"] = iban_record.build_uu_id
|
||||
return True, write_object
|
||||
return False, write_object
|
||||
|
||||
|
||||
def _install_signal_handlers(loop: asyncio.AbstractEventLoop):
|
||||
# Linux/Unix: SIGINT (Ctrl+C) ve SIGTERM (docker stop) için kibar kapanış
|
||||
for s in (signal.SIGINT, signal.SIGTERM):
|
||||
loop.add_signal_handler(s, lambda s=s: asyncio.create_task(_graceful_shutdown(s)))
|
||||
def extract_decision_book_from_task(write_object: dict) -> tuple[bool, dict]:
|
||||
bank_date = arrow.get(write_object["bank_date"]).datetime
|
||||
decision_book_record_db = prisma_service.find_first(
|
||||
table="build_decision_book",
|
||||
query={
|
||||
"active": True, "deleted": False, "is_confirmed": True, "build_id": write_object["build_id"],
|
||||
"expiry_starts": {"lte": bank_date}, "expiry_ends": {"gte": bank_date},
|
||||
},
|
||||
select={"id": None, "uu_id": None, "build_id": None, "build_uu_id": None, "expiry_starts": None, "expiry_ends": None}
|
||||
)
|
||||
if decision_book_record_db:
|
||||
decision_book_record = DecisionBookRecord(**decision_book_record_db)
|
||||
write_object["build_decision_book_id"] = decision_book_record.id
|
||||
write_object["build_decision_book_uu_id"] = decision_book_record.uu_id
|
||||
return True, write_object
|
||||
return False, write_object
|
||||
|
||||
|
||||
async def main():
|
||||
loop = asyncio.get_running_loop()
|
||||
try:
|
||||
_install_signal_handlers(loop)
|
||||
except NotImplementedError:
|
||||
# (Gerekirse Windows vs., ama sen Linux/Docker kullanıyorsun)
|
||||
pass
|
||||
await service()
|
||||
|
||||
if __name__ == "__main__":
|
||||
# uvloop policy zaten yukarıda set edildi; burada normal asyncio.run kullanıyoruz
|
||||
asyncio.run(main())
|
||||
|
||||
prisma_service = PrismaService()
|
||||
iban_finder_service = IbanFinderService()
|
||||
print("Find Build Iban service started")
|
||||
try:
|
||||
print("Find Build Iban service started sleeping for 5 seconds")
|
||||
while True:
|
||||
time.sleep(5)
|
||||
tasks = iban_finder_service.fetch_all_tasks()
|
||||
for task in tasks:
|
||||
if not check_task_belong_to_this_service(task):
|
||||
continue
|
||||
if list(task.data.FinderIban):
|
||||
finder_iban_list = []
|
||||
for finder_iban in list(task.data.FinderIban):
|
||||
write_object = finder_iban.dict()
|
||||
is_build_found, is_decision_book_found = False, False
|
||||
is_build_found, write_object = extract_build_iban_from_task(task, finder_iban, write_object)
|
||||
if is_build_found:
|
||||
is_decision_book_found, write_object = extract_decision_book_from_task(write_object)
|
||||
if is_build_found or is_decision_book_found:
|
||||
finder_iban_list.append(write_object)
|
||||
if finder_iban_list:
|
||||
iban_finder_service.update_service_data(task.task, ConfigServices.SERVICE_PREFIX_FINDER_COMMENT, finder_iban_list)
|
||||
iban_finder_service.change_service(task.task, ConfigServices.SERVICE_PREFIX_FINDER_IBAN, Status.COMPLETED, True)
|
||||
continue
|
||||
iban_finder_service.change_service(task.task, ConfigServices.SERVICE_PREFIX_FINDER_IBAN, Status.FAILED, True)
|
||||
except Exception as e:
|
||||
raise
|
||||
finally:
|
||||
prisma_service.disconnect()
|
||||
|
||||
@@ -7,6 +7,7 @@ PRISMA_BINARY_PATH="/root/.cache/prisma-python/binaries"
|
||||
|
||||
if [ ! -x "$VENV_PATH/bin/python" ]; then
|
||||
python -m venv "$VENV_PATH"
|
||||
"$VENV_PATH/bin/pip" install pip --upgrade
|
||||
"$VENV_PATH/bin/pip" install --no-cache-dir -r "$REQUIREMENTS_PATH"
|
||||
"$VENV_PATH/bin/prisma" generate --schema "$SCHEMA_PATH"
|
||||
fi
|
||||
|
||||
@@ -0,0 +1,14 @@
|
||||
__pycache__/
|
||||
*.pyc
|
||||
*.pyo
|
||||
*.pyd
|
||||
*.db
|
||||
*.sqlite3
|
||||
*.log
|
||||
*.env
|
||||
venv/
|
||||
.env.*
|
||||
node_modules/
|
||||
.prisma/
|
||||
.prisma-cache/
|
||||
ServicesRunnner/AccountRecordServices/Test/venv/
|
||||
@@ -0,0 +1,22 @@
|
||||
FROM python:3.12-slim
|
||||
|
||||
ENV PYTHONUNBUFFERED=1
|
||||
ENV PYTHONDONTWRITEBYTECODE=1
|
||||
ENV VIRTUAL_ENV=/opt/venv
|
||||
ENV PRISMA_SCHEMA_PATH=/app/Depends/schema.prisma
|
||||
ENV PATH="$VIRTUAL_ENV/bin:$PATH"
|
||||
ENV PYTHONPATH=/app
|
||||
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends gcc curl && rm -rf /var/lib/apt/lists/*
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY ServicesRunner/Depends/ /app/Depends/
|
||||
COPY ServicesRunner/AccountRecordServices/Finder/Parser/Comment /app/
|
||||
|
||||
COPY ServicesRunner/requirements.txt /app/requirements.txt
|
||||
COPY ServicesRunner/AccountRecordServices/Finder/Parser/Comment/entrypoint.sh /entrypoint.sh
|
||||
|
||||
RUN chmod +x /entrypoint.sh
|
||||
|
||||
CMD ["/entrypoint.sh"]
|
||||
@@ -0,0 +1,243 @@
|
||||
import time
|
||||
import arrow
|
||||
|
||||
from typing import Optional
|
||||
from pydantic import BaseModel
|
||||
from matchers import ParsedComment, Parser
|
||||
from models import BuildingCluster, BuildPart, BuildLivingSpace, Person, User, OccupantType
|
||||
|
||||
from Depends.prisma_client import PrismaService
|
||||
from Depends.config import ConfigServices, RedisTaskObject
|
||||
from Depends.service_handler import ProcessCommentParserService
|
||||
|
||||
|
||||
def check_task_belong_to_this_service(task: RedisTaskObject):
|
||||
"""
|
||||
Check if task belongs to this service
|
||||
"""
|
||||
if not task.service == ConfigServices.TASK_COMMENT_PARSER:
|
||||
return False
|
||||
if not task.completed:
|
||||
return False
|
||||
if task.is_completed:
|
||||
return False
|
||||
if not task.data:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def get_all_person_data_due_to_build(prisma_service: PrismaService):
|
||||
"""
|
||||
Get all person data due to build with comprehensive inner joins
|
||||
Returns a dictionary of buildings clustered with their build parts, people, and living spaces
|
||||
"""
|
||||
buildings_dict, today = {}, arrow.now().to('GMT+3').datetime
|
||||
occupant_flat_owner = prisma_service.find_first(table="occupant_types", query={"occupant_code": "FL-OWN", "active": True, "is_confirmed": True}, include={"user_types": True})
|
||||
occupant_tenant = prisma_service.find_first(table="occupant_types", query={"occupant_code": "FL-TEN", "active": True, "is_confirmed": True}, include={"user_types": True})
|
||||
possible_money_sender_occupants = [occupant_flat_owner.id, occupant_tenant.id]
|
||||
buildings = prisma_service.find_many(table="build", query={"active": True, "is_confirmed": True,"expiry_starts": {"lte": today}, "expiry_ends": {"gte": today}})
|
||||
for build in buildings:
|
||||
buildings_dict[str(build.id)] = BuildingCluster(
|
||||
id=build.id,
|
||||
uu_id=build.uu_id,
|
||||
build_name=build.build_name,
|
||||
build_no=build.build_no,
|
||||
build_date=str(build.build_date),
|
||||
decision_period_date=str(build.decision_period_date),
|
||||
expiry_starts=str(build.expiry_starts),
|
||||
expiry_ends=str(build.expiry_ends),
|
||||
is_confirmed=build.is_confirmed,
|
||||
active=build.active,
|
||||
build_parts=[]
|
||||
)
|
||||
build_parts = prisma_service.find_many(table="build_parts", query={"build_id": build.id, "active": True, "is_confirmed": True, "human_livable": True, "expiry_starts": {"lte": today}, "expiry_ends": {"gte": today}})
|
||||
for build_part in build_parts:
|
||||
part_obj = BuildPart(
|
||||
id=build_part.id,
|
||||
uu_id=build_part.uu_id,
|
||||
part_no=build_part.part_no,
|
||||
part_level=build_part.part_level,
|
||||
part_code=build_part.part_code,
|
||||
part_gross_size=build_part.part_gross_size,
|
||||
part_net_size=build_part.part_net_size,
|
||||
human_livable=build_part.human_livable,
|
||||
build_id=build_part.build_id,
|
||||
build_uu_id=build_part.build_uu_id,
|
||||
is_confirmed=build_part.is_confirmed,
|
||||
active=build_part.active,
|
||||
living_spaces=[],
|
||||
build=None
|
||||
)
|
||||
living_spaces = prisma_service.find_many(
|
||||
table="build_living_space", include={"occupant_types": True, "people": {"include": {"users": True}}},
|
||||
query={"build_parts_id": build_part.id, "active": True, "is_confirmed": True, "expiry_starts": {"lte": today}, "expiry_ends": {"gte": today}, "occupant_type_id": {"in": possible_money_sender_occupants}},
|
||||
)
|
||||
for living_space in living_spaces:
|
||||
person = living_space.people
|
||||
user = prisma_service.find_first(table="users", query={"person_id": person.id, "active": True, "is_confirmed": True})
|
||||
user_of_person = None
|
||||
if user:
|
||||
user_of_person = User(
|
||||
id=user.id,
|
||||
uu_id=user.uu_id,
|
||||
user_tag=user.user_tag,
|
||||
user_type=user.user_type,
|
||||
email=user.email,
|
||||
phone_number=user.phone_number,
|
||||
related_company=user.related_company,
|
||||
is_confirmed=user.is_confirmed,
|
||||
active=user.active
|
||||
)
|
||||
person_obj = Person(
|
||||
id=person.id,
|
||||
uu_id=person.uu_id,
|
||||
firstname=person.firstname,
|
||||
surname=person.surname,
|
||||
middle_name=person.middle_name,
|
||||
birthname=person.birthname,
|
||||
is_confirmed=person.is_confirmed,
|
||||
active=person.active,
|
||||
user=user_of_person
|
||||
)
|
||||
occupant_type = living_space.occupant_types
|
||||
occupant_type_obj = OccupantType(
|
||||
id=occupant_type.id,
|
||||
uu_id=occupant_type.uu_id,
|
||||
occupant_code=occupant_type.occupant_code,
|
||||
occupant_type=occupant_type.occupant_type,
|
||||
is_confirmed=occupant_type.is_confirmed,
|
||||
active=occupant_type.active,
|
||||
user_type_uu_id=occupant_type.user_type_uu_id
|
||||
)
|
||||
living_space_obj = BuildLivingSpace(
|
||||
id=living_space.id,
|
||||
uu_id=living_space.uu_id,
|
||||
expiry_starts=str(living_space.expiry_starts),
|
||||
expiry_ends=str(living_space.expiry_ends),
|
||||
fix_value=float(living_space.fix_value),
|
||||
fix_percent=float(living_space.fix_percent),
|
||||
agreement_no=living_space.agreement_no,
|
||||
marketing_process=living_space.marketing_process,
|
||||
build_parts_id=living_space.build_parts_id,
|
||||
build_parts_uu_id=living_space.build_parts_uu_id,
|
||||
person_id=living_space.person_id,
|
||||
person_uu_id=living_space.person_uu_id,
|
||||
occupant_type_id=living_space.occupant_type_id,
|
||||
occupant_type_uu_id=living_space.occupant_type_uu_id,
|
||||
is_confirmed=living_space.is_confirmed,
|
||||
active=living_space.active,
|
||||
person=person_obj,
|
||||
occupant_types=occupant_type_obj
|
||||
)
|
||||
part_obj.living_spaces.append(living_space_obj)
|
||||
buildings_dict[str(build.id)].build_parts.append(part_obj)
|
||||
return {i: v.dict(exclude_none=True) for i, v in buildings_dict.items()}
|
||||
|
||||
|
||||
def get_all_companies_data(prisma_service: PrismaService):
|
||||
return prisma_service.find_many(table="companies", query={"active": True, "is_confirmed": True})
|
||||
|
||||
|
||||
def get_all_person_data_due_to_account_record(prisma_service: PrismaService):
|
||||
arriving_account_records = prisma_service.find_many(table="account_records", query={"is_predicted": False, "active": True, "is_confirmed": True, "approved_record": False, "currency_value": {"gt": 0}})
|
||||
debt_account_records = prisma_service.find_many(table="account_records", query={"is_predicted": False, "active": True, "is_confirmed": True, "approved_record": False, "currency_value": {"lt": 0}})
|
||||
return arriving_account_records, debt_account_records
|
||||
|
||||
|
||||
def check_if_any_account_record_added(prisma_service: PrismaService):
|
||||
any_record = prisma_service.find_first(table="account_records", query={"is_predicted": False, "active": True, "is_confirmed": True, "approved_record": False})
|
||||
return any_record is not None
|
||||
|
||||
|
||||
def check_if_any_building_added(prisma_service: PrismaService, build_id_list: list[str | int]):
|
||||
already_build_ids_list = [int(i) for i in build_id_list]
|
||||
any_building = prisma_service.find_first(table="build", query={"active": True, "is_confirmed": True, "id": {"not": {"in": already_build_ids_list} }})
|
||||
return any_building is not None
|
||||
|
||||
|
||||
def update_account_record_set_is_predict_true(prisma_service: PrismaService, account_record_id: int):
|
||||
return prisma_service.update(table="account_records", query={"id": account_record_id}, data={"is_predicted": True})
|
||||
|
||||
|
||||
def update_account_records(prisma_service: PrismaService, parsed_record: ParsedComment, collect_possible_parts_dict: dict[str, list[dict]]):
|
||||
payment_type_result = None
|
||||
if not parsed_record.people:
|
||||
return
|
||||
person = parsed_record.people[0]
|
||||
if parsed_record.payment_types:
|
||||
if parsed_record.payment_types[0] == "aidat":
|
||||
payment_type_result = prisma_service.find_first(table="api_enum_dropdown", query={"key":"BDT-D"})
|
||||
elif parsed_record.payment_types[0] == "tadilat":
|
||||
payment_type_result = prisma_service.find_first(table="api_enum_dropdown", query={"key":"BDT-R"})
|
||||
build_parts_id = collect_possible_parts_dict[str(person.id)][0]["id"]
|
||||
build_parts_uu_id = collect_possible_parts_dict[str(person.id)][0]["uu_id"]
|
||||
add_dict = {
|
||||
"build_parts": {"connect": {"id": int(build_parts_id)}}, "build_parts_uu_id": str(build_parts_uu_id),
|
||||
"people_account_records_send_person_idTopeople": {"connect": {"id": int(person.id)}}, "send_person_uu_id": str(person.uu_id), "is_predicted": True
|
||||
}
|
||||
if payment_type_result:
|
||||
add_dict["api_enum_dropdown_account_records_payment_result_typeToapi_enum_dropdown"] = {"connect": {"id": int(payment_type_result.id)}}
|
||||
add_dict["payment_result_type_uu_id"] = str(payment_type_result.uu_id)
|
||||
return prisma_service.update(table="account_records", where={"id": int(parsed_record.account_record_id)}, data=add_dict)
|
||||
|
||||
|
||||
def set_prediction_to_redis(process_comment_parser_service: ProcessCommentParserService, parsed_record: ParsedComment, possible: list[dict]):
|
||||
predict_account_records = process_comment_parser_service.get_predict_account_record()
|
||||
predict_account_records[str(parsed_record.account_record_id)] = {
|
||||
"account_record_id": parsed_record.account_record_id, "build_id": parsed_record.build_id, "payment_type": parsed_record.payment_types,
|
||||
"months": parsed_record.months, "years": parsed_record.years, "parts": parsed_record.parts, "predictions": possible,
|
||||
}
|
||||
process_comment_parser_service.set_predict_account_record(predict_account_record=predict_account_records)
|
||||
update_account_record_set_is_predict_true(prisma_service=prisma_service, account_record_id=parsed_record.account_record_id)
|
||||
return
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
print("Process Comment Parser service started")
|
||||
renew = False
|
||||
prisma_service = PrismaService()
|
||||
process_comment_parser_service = ProcessCommentParserService()
|
||||
search_people = get_all_person_data_due_to_build(prisma_service)
|
||||
process_comment_parser_service.set_task_requirements(search_people)
|
||||
arriving_account_records, debt_account_records = get_all_person_data_due_to_account_record(prisma_service)
|
||||
try:
|
||||
while True:
|
||||
if not check_if_any_account_record_added(prisma_service) or renew:
|
||||
arriving_account_records, debt_account_records = get_all_person_data_due_to_account_record(prisma_service)
|
||||
renew = False
|
||||
print("Process Comment Parser service started sleeping for 5 seconds")
|
||||
tasks_dict: dict[str, BuildingCluster] = process_comment_parser_service.get_task_requirements()
|
||||
task_requirements: dict[str, BuildingCluster] = {idx: BuildingCluster(**value) for idx, value in tasks_dict.items()}
|
||||
if not check_if_any_building_added(prisma_service, list(task_requirements.keys())):
|
||||
search_people = get_all_person_data_due_to_build(prisma_service)
|
||||
process_comment_parser_service.set_task_requirements(search_people)
|
||||
parser = Parser(account_records=arriving_account_records, task_requirements=task_requirements)
|
||||
parsed_records = parser.parse()
|
||||
for parsed_record in parsed_records:
|
||||
collect_possible_parts_dict = {}
|
||||
if not parsed_record.people:
|
||||
continue
|
||||
for person in parsed_record.people:
|
||||
build_id = parsed_record.build_id
|
||||
person_id = person.id
|
||||
building = task_requirements[str(build_id)]
|
||||
for build_part in building.build_parts:
|
||||
for living_space in build_part.living_spaces:
|
||||
if str(living_space.person_id) == str(person_id):
|
||||
if str(person_id) in collect_possible_parts_dict:
|
||||
collect_possible_parts_dict[str(person_id)] = [*collect_possible_parts_dict[str(person_id)], build_part.dict()]
|
||||
else:
|
||||
collect_possible_parts_dict[str(person_id)] = [build_part.dict()]
|
||||
if len(collect_possible_parts_dict.keys()) == 1:
|
||||
for key, possible in collect_possible_parts_dict.items():
|
||||
if len(possible) == 1:
|
||||
update_account_records(prisma_service=prisma_service, parsed_record=parsed_record, collect_possible_parts_dict=collect_possible_parts_dict)
|
||||
else:
|
||||
set_prediction_to_redis(process_comment_parser_service=process_comment_parser_service, parsed_record=parsed_record, possible=possible)
|
||||
renew = True
|
||||
time.sleep(5)
|
||||
except Exception as e:
|
||||
print(f"Process Comment Parser service error: {str(e)}")
|
||||
raise e
|
||||
finally:
|
||||
prisma_service.disconnect()
|
||||
@@ -0,0 +1,19 @@
|
||||
#!/bin/sh
|
||||
|
||||
VENV_PATH="/opt/venv"
|
||||
REQUIREMENTS_PATH="/app/requirements.txt"
|
||||
SCHEMA_PATH="/app/Depends/schema.prisma"
|
||||
PRISMA_BINARY_PATH="/root/.cache/prisma-python/binaries"
|
||||
|
||||
if [ ! -x "$VENV_PATH/bin/python" ]; then
|
||||
python -m venv "$VENV_PATH"
|
||||
"$VENV_PATH/bin/pip" install pip --upgrade
|
||||
"$VENV_PATH/bin/pip" install --no-cache-dir -r "$REQUIREMENTS_PATH"
|
||||
"$VENV_PATH/bin/prisma" generate --schema "$SCHEMA_PATH"
|
||||
fi
|
||||
|
||||
if ! find "$PRISMA_BINARY_PATH" -type f -name "prisma-query-engine-debian-openssl-3.0.x" | grep -q .; then
|
||||
"$VENV_PATH/bin/prisma" py fetch
|
||||
fi
|
||||
|
||||
exec "$VENV_PATH/bin/python" -u app.py
|
||||
@@ -0,0 +1,604 @@
|
||||
import pprint
|
||||
import re
|
||||
import arrow
|
||||
|
||||
from json import loads, dumps
|
||||
from unidecode import unidecode
|
||||
from models import BuildingCluster, Person
|
||||
|
||||
|
||||
turkish_months = ["OCAK", "ŞUBAT", "MART", "NİSAN", "MAYIS", "HAZİRAN", "TEMMUZ", "AĞUSTOS", "EYLÜL", "EKİM", "KASIM", "ARALIK"]
|
||||
turkish_months_abbr = {
|
||||
"OCA": "OCAK", "SUB": "ŞUBAT", "ŞUB": "ŞUBAT", "MAR": "MART", "NIS": "NİSAN", "MAY": "MAYIS", "HAZ": "HAZİRAN", "HZR": "HAZİRAN",
|
||||
"TEM": "TEMMUZ", "AGU": "AĞUSTOS", "AGT": "AĞUSTOS", "EYL": "EYLÜL", "EKI": "EKİM", "KAS": "KASIM", "ARA": "ARALIK", "AGUSTOS": "AĞUSTOS"
|
||||
}
|
||||
month_to_number_dict = {
|
||||
"ocak": 1, "şubat": 2, "mart": 3, "nisan": 4, "mayıs": 5, "haziran": 6, "temmuz": 7, "ağustos": 8, "eylül": 9, "ekim": 10, "kasım": 11, "aralık": 12,
|
||||
"ocak": 1, "subat": 2, "mart": 3, "nisan": 4, "mayis": 5, "haziran": 6, "temmuz": 7, "agustos": 8, "eylul": 9, "ekim": 10, "kasim": 11, "aralik": 12
|
||||
}
|
||||
start_year = 1950
|
||||
current_year = arrow.now().year
|
||||
|
||||
|
||||
class ParsedComment:
|
||||
|
||||
def __init__(self, account_record_id: int, org_comment: str, build_id: int) -> None:
|
||||
self.account_record_id: int = account_record_id
|
||||
self.org_comment: str = org_comment
|
||||
self.build_id: int = build_id
|
||||
self.comment: str = None
|
||||
self.people: list[dict] = []
|
||||
self.parts: list[dict] = []
|
||||
self.months: list[str] = []
|
||||
self.years: list[str] = []
|
||||
self.payment_types: list[str] = []
|
||||
|
||||
def set_people(self, people: list[dict]) -> None:
|
||||
self.people = people
|
||||
|
||||
def set_parts(self, parts: list[dict]) -> None:
|
||||
self.parts = parts
|
||||
|
||||
def set_months(self, months: list[str]) -> None:
|
||||
self.months = months
|
||||
|
||||
def set_years(self, years: list[str]) -> None:
|
||||
self.years = years
|
||||
|
||||
def set_payment_types(self, payment_types: list[str]) -> None:
|
||||
self.payment_types = payment_types
|
||||
|
||||
|
||||
class ParserHelpers:
|
||||
|
||||
@staticmethod
|
||||
def normalize_text(text: str) -> str:
|
||||
text = text.replace('İ', 'i')
|
||||
text = text.replace('I', 'ı')
|
||||
text = text.replace('Ş', 'ş')
|
||||
text = text.replace('Ğ', 'ğ')
|
||||
text = text.replace('Ü', 'ü')
|
||||
text = text.replace('Ö', 'ö')
|
||||
text = text.replace('Ç', 'ç')
|
||||
return unidecode(text).lower()
|
||||
|
||||
|
||||
class ParserRequirements(ParserHelpers):
|
||||
|
||||
def create_pattern(parts, formats, separators=None):
|
||||
"""
|
||||
parts: dict
|
||||
formats: list[list[tuple[str, str]]]
|
||||
separators: list[str]
|
||||
"""
|
||||
if separators is None:
|
||||
separators = [""]
|
||||
patterns = []
|
||||
for fmt in formats:
|
||||
for sep in separators:
|
||||
pattern_parts = []
|
||||
for part_type, part_name in fmt:
|
||||
if part_name in parts and part_type in parts[part_name]:
|
||||
pattern_parts.append(re.escape(parts[part_name][part_type]))
|
||||
if pattern_parts:
|
||||
patterns.append(r"\b" + sep.join(pattern_parts) + r"\b")
|
||||
return patterns
|
||||
|
||||
@classmethod
|
||||
def generate_dictonary_of_patterns(cls, person: Person):
|
||||
"""Completly remove middle_name instead do regex firstName + SomeWord + surname"""
|
||||
patterns_dict = {}
|
||||
person_patterns, firstname, birthname = set(), person.firstname.strip() if person.firstname else "", person.birthname.strip() if person.birthname else ""
|
||||
middle_name, surname = person.middle_name.strip() if person.middle_name else "", person.surname.strip() if person.surname else ""
|
||||
if not firstname or not surname:
|
||||
return patterns_dict
|
||||
name_parts = {
|
||||
'firstname': {'orig': firstname, 'norm': cls.normalize_text(firstname) if firstname else "", 'init': cls.normalize_text(firstname)[0] if firstname else ""},
|
||||
'surname': {'orig': surname, 'norm': cls.normalize_text(surname) if surname else "", 'init': cls.normalize_text(surname)[0] if surname else ""}
|
||||
}
|
||||
if middle_name:
|
||||
name_parts['middle_name'] = {'orig': middle_name, 'norm': cls.normalize_text(middle_name) if middle_name else "", 'init': cls.normalize_text(middle_name)[0] if middle_name else ""}
|
||||
if birthname and cls.normalize_text(birthname) != cls.normalize_text(surname):
|
||||
name_parts['birthname'] = {'orig': birthname, 'norm': cls.normalize_text(birthname), 'init': cls.normalize_text(birthname)[0] if birthname else ""}
|
||||
name_formats = [[('orig', 'firstname'), ('orig', 'surname')], [('norm', 'firstname'), ('norm', 'surname')], [('orig', 'surname'), ('orig', 'firstname')], [('norm', 'surname'), ('norm', 'firstname')]]
|
||||
if 'middle_name' in name_parts:
|
||||
name_formats = [[('orig', 'firstname'), ('orig', 'middle_name'), ('orig', 'surname')], [('norm', 'firstname'), ('norm', 'middle_name'), ('norm', 'surname')]]
|
||||
person_patterns.update(cls.create_pattern(name_parts, name_formats, [" ", ""]))
|
||||
if 'middle_name' in name_parts:
|
||||
middle_name_formats = [[('orig', 'firstname'), ('orig', 'middle_name')], [('norm', 'firstname'), ('norm', 'middle_name')], [('orig', 'middle_name'), ('orig', 'surname')], [('norm', 'middle_name'), ('norm', 'surname')],]
|
||||
person_patterns.update(cls.create_pattern(name_parts, middle_name_formats, [" ", ""]))
|
||||
if 'birthname' in name_parts and name_parts['surname']['orig'] != name_parts['birthname']['orig']:
|
||||
birthname_formats = [
|
||||
[('orig', 'firstname'), ('orig', 'birthname')], [('norm', 'firstname'), ('norm', 'birthname')],
|
||||
[('orig', 'birthname'), ('orig', 'firstname')], [('norm', 'birthname'), ('norm', 'firstname')]
|
||||
]
|
||||
person_patterns.update(cls.create_pattern(name_parts, birthname_formats, [" ", ""]))
|
||||
initial_formats = [[('init', 'firstname'), ('init', 'middle_name'), ('init', 'surname')], [('init', 'firstname'), ('init', 'surname')]]
|
||||
person_patterns.update(cls.create_pattern(name_parts, initial_formats, ["", ".", " ", ". "]))
|
||||
if 'middle_name' in name_parts:
|
||||
triple_initial_formats = [[('init', 'firstname'), ('init', 'middle_name'), ('init', 'surname')]]
|
||||
person_patterns.update(cls.create_pattern(name_parts, triple_initial_formats, ["", ".", " ", ". "]))
|
||||
compiled_patterns = [re.compile(pattern, re.IGNORECASE) for pattern in person_patterns]
|
||||
patterns_dict[str(person.id)] = compiled_patterns
|
||||
return patterns_dict
|
||||
|
||||
|
||||
class CommentParser(ParserHelpers):
|
||||
|
||||
def __init__(self, account_record, people_regex_dict: dict, people_dict: dict) -> None:
|
||||
self.original_comment: str = account_record.process_comment
|
||||
self.comment: str = self.clean_text(account_record.process_comment)
|
||||
self.people_regex_dict: dict = people_regex_dict
|
||||
self.people: dict = people_dict
|
||||
self.account_record_id: str = str(account_record.id)
|
||||
self.build_id: str = str(account_record.build_id)
|
||||
self.parsed_comment: ParsedComment = ParsedComment(account_record_id=self.account_record_id, org_comment=self.original_comment, build_id=self.build_id)
|
||||
|
||||
@staticmethod
|
||||
def clean_text_apartment_number(text: str, match):
|
||||
clean_text = text.replace(match.group(0), '').strip()
|
||||
clean_text = re.sub(r'\s+', ' ', clean_text).strip()
|
||||
return clean_text
|
||||
|
||||
@staticmethod
|
||||
def clean_text(text: str) -> str:
|
||||
text = str(text)
|
||||
text = re.sub(r'\d{8,}', ' ', text)
|
||||
# text = re.sub(r'\b[A-Za-z0-9]*?[0-9]+[A-Za-z0-9]*?[A-Za-z]+[A-Za-z0-9]*\b|\b[A-Za-z0-9]*?[A-Za-z]+[A-Za-z0-9]*?[0-9]+[A-Za-z0-9]*\b', ' ', text)
|
||||
text = text.replace("/", " ")
|
||||
text = text.replace("_", " ")
|
||||
text_remove_underscore = text.replace("-", " ").replace("+", " ")
|
||||
text_remove_asterisk = text_remove_underscore.replace("*", " ")
|
||||
text_remove_comma = text_remove_asterisk.replace(",", " ")
|
||||
text_remove_dots = text_remove_comma.replace(".", " ")
|
||||
text_remove_dots = re.sub(r'\s+', ' ', text_remove_dots)
|
||||
text_remove_dots = text_remove_dots.strip()
|
||||
return text_remove_dots
|
||||
|
||||
def get_people_regex_by_build_id(self) -> dict:
|
||||
"""
|
||||
Get people regex by build id
|
||||
"""
|
||||
return self.people_regex_dict.get(self.build_id, {})
|
||||
|
||||
def get_person(self, person_id: str) -> Person | None:
|
||||
return self.people[str(self.build_id)].get(person_id, None)
|
||||
|
||||
def parse_comment(self) -> ParsedComment:
|
||||
"""
|
||||
Parse comment and extract information
|
||||
"""
|
||||
self.extract_person_name_with_regex()
|
||||
self.extract_build_parts_info()
|
||||
self.extract_months()
|
||||
self.extract_years()
|
||||
self.extract_payment_type()
|
||||
self.comment = self.comment.strip()
|
||||
self.parsed_comment.comment = self.comment
|
||||
return self.parsed_comment
|
||||
|
||||
def get_text_initials(matched_text: str):
|
||||
return [unidecode(word.strip())[0].upper() for word in matched_text.split() if word.strip()]
|
||||
|
||||
def extract_person_name_with_regex(self):
|
||||
all_matches, found_dict = [], {}
|
||||
build_regex = self.get_people_regex_by_build_id()
|
||||
for person_id, patterns in build_regex.items():
|
||||
person_matches = []
|
||||
person = self.get_person(str(person_id))
|
||||
if not person:
|
||||
continue
|
||||
firstname_norm = str(self.normalize_text(person.firstname)).strip() if person.firstname else ""
|
||||
# middle_name_norm = str(self.normalize_text(person.middle_name)).strip() if person.middle_name else ""
|
||||
surname_norm = str(self.normalize_text(person.surname)).strip() if person.surname else ""
|
||||
birthname_norm = str(self.normalize_text(person.birthname)).strip() if person.birthname else ""
|
||||
text_norm = str(self.normalize_text(self.comment))
|
||||
for pattern in patterns[str(person_id)]:
|
||||
for match in pattern.finditer(text_norm):
|
||||
start, end = match.span()
|
||||
matched_text: str = self.comment[start:end]
|
||||
matched_text_norm = self.normalize_text(matched_text)
|
||||
is_valid_match = False
|
||||
if len(matched_text_norm.split()) <= 1:
|
||||
is_valid_match = False
|
||||
else:
|
||||
has_firstname = firstname_norm and firstname_norm in matched_text_norm
|
||||
has_surname = surname_norm and surname_norm in matched_text_norm
|
||||
has_birthname = birthname_norm and birthname_norm in matched_text_norm
|
||||
if (has_firstname and has_surname) or (has_firstname and has_birthname):
|
||||
is_valid_match = True
|
||||
if is_valid_match:
|
||||
person_matches.append({'matched_text': matched_text, 'start': start, 'end': end})
|
||||
if person_matches:
|
||||
person_matches.sort(key=lambda x: len(x['matched_text']), reverse=True)
|
||||
non_overlapping_matches = []
|
||||
for match in person_matches:
|
||||
overlaps = False
|
||||
for existing_match in non_overlapping_matches:
|
||||
if (match['start'] < existing_match['end'] and match['end'] > existing_match['start']):
|
||||
overlaps = True
|
||||
break
|
||||
if not overlaps:
|
||||
non_overlapping_matches.append(match)
|
||||
if non_overlapping_matches:
|
||||
found_dict["name_match"] = person
|
||||
all_matches.extend([(match, person) for match in non_overlapping_matches])
|
||||
if all_matches:
|
||||
all_matches.sort(key=lambda x: x[0]['start'], reverse=True)
|
||||
for match, person in all_matches:
|
||||
matched_text: str = match['matched_text']
|
||||
matched_words = matched_text.split()
|
||||
for word in matched_words:
|
||||
word_norm = str(self.normalize_text(word)).strip()
|
||||
if not word_norm:
|
||||
continue
|
||||
text_norm = self.normalize_text(self.comment)
|
||||
if not any([person_com for person_com in self.parsed_comment.people if str(person_com.id) == str(person.id)]):
|
||||
self.parsed_comment.people.append(person)
|
||||
for word_match in re.finditer(rf'\b{re.escape(word_norm)}\b', text_norm, re.IGNORECASE):
|
||||
start, end = word_match.span()
|
||||
self.comment = self.comment[:start] + ' ' * (end - start) + self.comment[end:]
|
||||
self.comment = re.sub(r'\s+', ' ', self.comment).strip()
|
||||
|
||||
def extract_build_parts_info(self):
|
||||
"""
|
||||
Daire numarasını çeşitli Türkçe yazım biçimlerinden tek regex ile ayıklar.
|
||||
Eşleşme bulunursa:
|
||||
- numarayı self.parsed_comment.parts'a ekler
|
||||
- metni temizler (senin clean_text_apartment_number metodunla)
|
||||
"""
|
||||
COMBINED_APT_PATTERN = re.compile(
|
||||
r"""
|
||||
\b(?:
|
||||
(?P<n1>\d+)\s*nolu\s*dair\w* # 2 nolu daire / 3 nolu dairenin
|
||||
| (?P<n2>\d+)\s*no\s*lu\s*dair\w* # 12 No lu daire
|
||||
| (?P<n3>\d+)nolu\s*dair\w* # 11nolu daire / 2NOLU DAIRE
|
||||
| (?P<n4>\d+)\s*numaral[ıi]\s*dai\w* # 9 numaralı dai/daire
|
||||
| dair[eé]?\s*no\.?\s*(?P<n5>\d+) # Daire No 12 / Daire No. 12
|
||||
| \bd\s*[:\-]?\s*(?P<n6>\d+) # D:10 / D-10
|
||||
| \bno\b(?!\s*lu)\s*[:\-]?\s*(?P<n7>\d+) # NO:11 / NO :3 (nolu hariç)
|
||||
| dair[eé]?\s*(?P<n8>\d+) # daire 3
|
||||
| (?P<n9>\d+)\s*numara # 9 NUMARA
|
||||
| \bno\s*/\s*(?P<n10>\d+) # NO/11
|
||||
| /(?P<n11>\d+) # /11
|
||||
)\b
|
||||
""",
|
||||
re.IGNORECASE | re.VERBOSE
|
||||
)
|
||||
m = COMBINED_APT_PATTERN.search(self.comment)
|
||||
if not m:
|
||||
return
|
||||
for g in m.groups():
|
||||
if g:
|
||||
apartment_number = g
|
||||
break
|
||||
self.parsed_comment.parts.append(apartment_number)
|
||||
self.comment = self.clean_text_apartment_number(self.comment, m)
|
||||
return
|
||||
|
||||
def extract_months(self):
|
||||
"""
|
||||
Extract Turkish month names and abbreviations from the process comment
|
||||
"""
|
||||
original_text = self.comment
|
||||
working_text = original_text
|
||||
for month in turkish_months:
|
||||
pattern = re.compile(r'\b' + re.escape(month) + r'\b', re.IGNORECASE)
|
||||
for match in pattern.finditer(original_text):
|
||||
matched_text = match.group(0)
|
||||
normalized_month = self.normalize_text(month)
|
||||
month_number = None
|
||||
if month.lower() in month_to_number_dict:
|
||||
month_number = month_to_number_dict[month.lower()]
|
||||
elif normalized_month in month_to_number_dict:
|
||||
month_number = month_to_number_dict[normalized_month]
|
||||
month_info = {'name': month, 'number': month_number}
|
||||
self.parsed_comment.months.append(month_info)
|
||||
working_text = working_text.replace(matched_text, '', 1)
|
||||
|
||||
for abbr, full_month in turkish_months_abbr.items():
|
||||
pattern = re.compile(r'\b' + re.escape(abbr) + r'\b', re.IGNORECASE)
|
||||
for match in pattern.finditer(working_text):
|
||||
matched_text = match.group(0)
|
||||
normalized_month = self.normalize_text(full_month)
|
||||
month_number = None
|
||||
if full_month.lower() in month_to_number_dict:
|
||||
month_number = month_to_number_dict[full_month.lower()]
|
||||
elif normalized_month in month_to_number_dict:
|
||||
month_number = month_to_number_dict[normalized_month]
|
||||
month_info = {'name': full_month, 'number': month_number}
|
||||
self.parsed_comment.months.append(month_info)
|
||||
working_text = working_text.replace(matched_text, '', 1)
|
||||
self.comment = working_text
|
||||
|
||||
def extract_years(self):
|
||||
"""
|
||||
Extract years from the process comment
|
||||
"""
|
||||
original_text = self.comment
|
||||
working_text = original_text
|
||||
for year in range(start_year, current_year + 1):
|
||||
pattern = re.compile(r'\b' + str(year) + r'\b', re.IGNORECASE)
|
||||
for match in pattern.finditer(original_text):
|
||||
matched_text = match.group(0)
|
||||
if str(matched_text).isdigit():
|
||||
self.parsed_comment.years.append(int(matched_text))
|
||||
working_text = working_text.replace(matched_text, '', 1)
|
||||
self.comment = working_text
|
||||
|
||||
def extract_payment_type(self):
|
||||
"""
|
||||
Extract payment type from the process comment : aidat, AİD, aidatı, TADİLAT, YAKIT, yakıt, yakit
|
||||
"""
|
||||
original_text = self.comment
|
||||
working_text = original_text
|
||||
payment_keywords = {
|
||||
'aidat': ['aidat', 'aİd', 'aid', 'aidatı', 'aidati'],
|
||||
'tadilat': ['tadilat', 'tadİlat', 'tadilatı'],
|
||||
'yakit': ['yakit', 'yakıt', 'yakıtı', 'yakiti']
|
||||
}
|
||||
for payment_type, keywords in payment_keywords.items():
|
||||
for keyword in keywords:
|
||||
pattern = re.compile(r'\b' + keyword + r'\b', re.IGNORECASE)
|
||||
for match in pattern.finditer(original_text):
|
||||
matched_text = match.group(0)
|
||||
if payment_type not in self.parsed_comment.payment_types:
|
||||
self.parsed_comment.payment_types.append(payment_type)
|
||||
working_text = working_text.replace(matched_text, '', 1)
|
||||
self.comment = working_text
|
||||
|
||||
|
||||
class Parser:
|
||||
|
||||
def __init__(self, account_records: list, task_requirements: dict[str, BuildingCluster]) -> None:
|
||||
"""
|
||||
Initialize parser with account records and task requirements
|
||||
"""
|
||||
self.account_records: list = account_records
|
||||
self.task_requirements: dict[str, BuildingCluster] = task_requirements
|
||||
self.people_dict: dict[str, Person] = {}
|
||||
self.people_regex_dict: dict = self.prepare_people_regex_dict()
|
||||
self.parsed_records: list[ParsedComment] = []
|
||||
|
||||
def prepare_people_regex_dict(self):
|
||||
"""Prepare regex dictionary for people"""
|
||||
regex_pattern_dict = {}
|
||||
for build_id, build_cluster in self.task_requirements.items():
|
||||
for build_part in build_cluster.build_parts:
|
||||
for living_space in build_part.living_spaces:
|
||||
person: Person = living_space.person
|
||||
if str(build_id) in self.people_dict:
|
||||
if not str(person.id) in self.people_dict[str(build_id)]:
|
||||
self.people_dict[str(build_id)][str(person.id)] = person
|
||||
else:
|
||||
self.people_dict[str(build_id)] = {str(person.id): person}
|
||||
for build_id, people in self.people_dict.items():
|
||||
people: dict[str, Person] = people
|
||||
for person_id, person in people.items():
|
||||
if str(build_id) not in regex_pattern_dict:
|
||||
regex_pattern_dict[str(build_id)] = {}
|
||||
regex_pattern_dict[str(build_id)][str(person_id)] = ParserRequirements.generate_dictonary_of_patterns(person)
|
||||
return regex_pattern_dict
|
||||
|
||||
def parse(self):
|
||||
"""Parse account records based on task requirements"""
|
||||
for account_record in self.account_records:
|
||||
if not account_record.build_id:
|
||||
continue
|
||||
comment_parser = CommentParser(account_record=account_record, people_regex_dict=self.people_regex_dict, people_dict=self.people_dict)
|
||||
parsed_comment = comment_parser.parse_comment()
|
||||
self.parsed_records.append(parsed_comment)
|
||||
return self.parsed_records
|
||||
|
||||
|
||||
def commented_code():
|
||||
def main(account_records, people):
|
||||
|
||||
list_of_regex_patterns = generate_dictonary_of_patterns(people=people)
|
||||
dicts_found, dicts_not_found, count_extracted = dict(), dict(), 0
|
||||
for account_record in account_records:
|
||||
account_record_id = str(account_record["id"])
|
||||
found_dict = {}
|
||||
process_comment_iteration = clean_text(text=account_record["process_comment"])
|
||||
found_dict, cleaned_process_comment = extract_person_name_with_regex(found_dict=found_dict, process_comment=process_comment_iteration, patterns_dict=list_of_regex_patterns, people=people)
|
||||
found_dict, cleaned_process_comment = extract_build_parts_info(found_dict=found_dict, process_comment=cleaned_process_comment)
|
||||
found_dict, cleaned_process_comment = extract_months(found_dict=found_dict, process_comment=cleaned_process_comment)
|
||||
found_dict, cleaned_process_comment = extract_year(found_dict=found_dict, process_comment=cleaned_process_comment)
|
||||
found_dict, cleaned_process_comment = extract_payment_type(found_dict=found_dict, process_comment=cleaned_process_comment)
|
||||
if found_dict:
|
||||
dicts_found[str(account_record_id)] = found_dict
|
||||
else:
|
||||
dicts_not_found[str(account_record_id)] = account_record_id
|
||||
|
||||
for id_, item in dicts_found.items():
|
||||
months_are_valid = bool(item.get("months", []))
|
||||
years_are_valid = bool(item.get("years", []))
|
||||
payment_types_are_valid = bool(item.get("payment_types", []))
|
||||
apartment_number_are_valid = bool(item.get("apartment_number", []))
|
||||
person_name_are_valid = bool(item.get("name_match", []))
|
||||
account_record_to_save = AccountRecords.query.filter_by(id=int(id_)).first()
|
||||
save_dict = dict(account_records_id=account_record_to_save.id, account_records_uu_id=str(account_record_to_save.uu_id), prediction_model="regex", treshold=1, is_first_prediction=False)
|
||||
update_dict = dict(prediction_model="regex", treshold=1, is_first_prediction=False)
|
||||
if any([months_are_valid, years_are_valid, payment_types_are_valid, apartment_number_are_valid, person_name_are_valid]):
|
||||
count_extracted += 1
|
||||
if months_are_valid:
|
||||
print(f"months: {item['months']}")
|
||||
data_to_save = dumps({"data": item['months']})
|
||||
prediction_result = AccountRecordsPredict.query.filter_by(account_records_id=account_record_to_save.id, prediction_field="months", prediction_model="regex").first()
|
||||
if not prediction_result:
|
||||
created_account_prediction = AccountRecordsPredict.create(**save_dict, prediction_field="months", prediction_result=data_to_save)
|
||||
created_account_prediction.save()
|
||||
else:
|
||||
prediction_result.update(**update_dict, prediction_result=data_to_save)
|
||||
prediction_result.save()
|
||||
if years_are_valid:
|
||||
print(f"years: {item['years']}")
|
||||
data_to_save = dumps({"data": item['years']})
|
||||
prediction_result = AccountRecordsPredict.query.filter_by(account_records_id=account_record_to_save.id, prediction_field="years", prediction_model="regex").first()
|
||||
if not prediction_result:
|
||||
created_account_prediction = AccountRecordsPredict.create(**save_dict, prediction_field="years", prediction_result=data_to_save)
|
||||
created_account_prediction.save()
|
||||
else:
|
||||
prediction_result.update(**update_dict, prediction_result=data_to_save)
|
||||
prediction_result.save()
|
||||
if payment_types_are_valid:
|
||||
print(f"payment_types: {item['payment_types']}")
|
||||
data_to_save = dumps({"data": item['payment_types']})
|
||||
prediction_result = AccountRecordsPredict.query.filter_by(account_records_id=account_record_to_save.id, prediction_field="payment_types", prediction_model="regex").first()
|
||||
if not prediction_result:
|
||||
created_account_prediction = AccountRecordsPredict.create(**save_dict, prediction_field="payment_types", prediction_result=data_to_save)
|
||||
created_account_prediction.save()
|
||||
else:
|
||||
prediction_result.update(**update_dict, prediction_result=data_to_save)
|
||||
prediction_result.save()
|
||||
if apartment_number_are_valid:
|
||||
print(f"apartment_number: {item['apartment_number']}")
|
||||
prediction_result = AccountRecordsPredict.query.filter_by(account_records_id=account_record_to_save.id, prediction_field="apartment_number", prediction_model="regex").first()
|
||||
if not prediction_result:
|
||||
created_account_prediction = AccountRecordsPredict.create(**save_dict, prediction_field="apartment_number", prediction_result=item['apartment_number'])
|
||||
created_account_prediction.save()
|
||||
else:
|
||||
prediction_result.update(**update_dict, prediction_result=item['apartment_number'])
|
||||
prediction_result.save()
|
||||
if person_name_are_valid:
|
||||
print(f"person_name: {item['name_match']}")
|
||||
data_to_save = dumps({"data": item['name_match']})
|
||||
prediction_result = AccountRecordsPredict.query.filter_by(account_records_id=account_record_to_save.id, prediction_field="person_name", prediction_model="regex").first()
|
||||
if not prediction_result:
|
||||
created_account_prediction = AccountRecordsPredict.create(**save_dict, prediction_field="person_name", prediction_result=data_to_save)
|
||||
created_account_prediction.save()
|
||||
else:
|
||||
prediction_result.update(**update_dict, prediction_result=data_to_save)
|
||||
prediction_result.save()
|
||||
|
||||
print("\n===== SUMMARY =====")
|
||||
print(f"extracted data total : {count_extracted}")
|
||||
print(f"not extracted data total : {len(account_records) - count_extracted}")
|
||||
print(f"Total account records processed : {len(account_records)}")
|
||||
|
||||
|
||||
|
||||
# def extract_build_parts_info(self):
|
||||
# """
|
||||
# Regex of parts such as :
|
||||
# 2 nolu daire
|
||||
# 9 NUMARALI DAI
|
||||
# daire 3
|
||||
# 3 nolu dairenin
|
||||
# 11nolu daire
|
||||
# Daire No 12
|
||||
# 2NOLU DAIRE
|
||||
# 12 No lu daire
|
||||
# D:10
|
||||
# NO:11
|
||||
# NO :3
|
||||
# """
|
||||
# apartment_number = None
|
||||
# pattern1 = re.compile(r'(\d+)\s*nolu\s*daire', re.IGNORECASE)
|
||||
# match = pattern1.search(self.comment)
|
||||
# if match:
|
||||
# apartment_number = match.group(1)
|
||||
# self.parsed_comment.parts.append(apartment_number)
|
||||
# self.comment = self.clean_text_apartment_number(self.comment, match)
|
||||
# return
|
||||
# pattern4 = re.compile(r'(\d+)\s*nolu\s*daire\w*', re.IGNORECASE)
|
||||
# match = pattern4.search(self.comment)
|
||||
# if match:
|
||||
# apartment_number = match.group(1)
|
||||
# self.parsed_comment.parts.append(apartment_number)
|
||||
# self.comment = self.clean_text_apartment_number(self.comment, match)
|
||||
# return
|
||||
# pattern5 = re.compile(r'(\d+)nolu\s*daire', re.IGNORECASE)
|
||||
# match = pattern5.search(self.comment)
|
||||
# if match:
|
||||
# apartment_number = match.group(1)
|
||||
# self.parsed_comment.parts.append(apartment_number)
|
||||
# self.comment = self.clean_text_apartment_number(self.comment, match)
|
||||
# return
|
||||
# pattern7 = re.compile(r'(\d+)nolu\s*daire', re.IGNORECASE)
|
||||
# match = pattern7.search(self.comment)
|
||||
# if match:
|
||||
# apartment_number = match.group(1)
|
||||
# self.parsed_comment.parts.append(apartment_number)
|
||||
# self.comment = self.clean_text_apartment_number(self.comment, match)
|
||||
# return
|
||||
# pattern8 = re.compile(r'(\d+)\s*no\s*lu\s*daire', re.IGNORECASE)
|
||||
# match = pattern8.search(self.comment)
|
||||
# if match:
|
||||
# apartment_number = match.group(1)
|
||||
# self.parsed_comment.parts.append(apartment_number)
|
||||
# self.comment = self.clean_text_apartment_number(self.comment, match)
|
||||
# return
|
||||
# pattern6 = re.compile(r'daire\s*no\s*(\d+)', re.IGNORECASE)
|
||||
# match = pattern6.search(self.comment)
|
||||
# if match:
|
||||
# apartment_number = match.group(1)
|
||||
# self.parsed_comment.parts.append(apartment_number)
|
||||
# self.comment = self.clean_text_apartment_number(self.comment, match)
|
||||
# return
|
||||
# pattern2 = re.compile(r'(\d+)\s*numarali\s*dai', re.IGNORECASE)
|
||||
# match = pattern2.search(self.comment)
|
||||
# if match:
|
||||
# apartment_number = match.group(1)
|
||||
# self.parsed_comment.parts.append(apartment_number)
|
||||
# self.comment = self.clean_text_apartment_number(self.comment, match)
|
||||
# return
|
||||
# pattern3 = re.compile(r'daire\s*(\d+)', re.IGNORECASE)
|
||||
# match = pattern3.search(self.comment)
|
||||
# if match:
|
||||
# apartment_number = match.group(1)
|
||||
# self.parsed_comment.parts.append(apartment_number)
|
||||
# self.comment = self.clean_text_apartment_number(self.comment, match)
|
||||
# return
|
||||
# pattern9 = re.compile(r'd\s*:\s*(\d+)', re.IGNORECASE)
|
||||
# match = pattern9.search(self.comment)
|
||||
# if match:
|
||||
# apartment_number = match.group(1)
|
||||
# self.parsed_comment.parts.append(apartment_number)
|
||||
# self.comment = self.clean_text_apartment_number(self.comment, match)
|
||||
# return
|
||||
# pattern10 = re.compile(r'no\s*:\s*(\d+)', re.IGNORECASE)
|
||||
# match = pattern10.search(self.comment)
|
||||
# if match:
|
||||
# apartment_number = match.group(1)
|
||||
# self.parsed_comment.parts.append(apartment_number)
|
||||
# self.comment = self.clean_text_apartment_number(self.comment, match)
|
||||
# return
|
||||
# # return found_dict, self.comment
|
||||
|
||||
|
||||
# if __name__ == "__main__":
|
||||
|
||||
# people_query = sqlalchemy_text("""
|
||||
# SELECT DISTINCT ON (p.id) p.firstname, p.middle_name, p.surname, p.birthname, bl.id
|
||||
# FROM public.people as p
|
||||
# INNER JOIN public.build_living_space as bl ON bl.person_id = p.id
|
||||
# INNER JOIN public.build_parts as bp ON bp.id = bl.build_parts_id
|
||||
# INNER JOIN public.build as b ON b.id = bp.build_id
|
||||
# WHERE b.id = 1
|
||||
# ORDER BY p.id
|
||||
# """)
|
||||
|
||||
# people_raw = session.execute(people_query).all()
|
||||
# remove_duplicate = list()
|
||||
# clean_people_list = list()
|
||||
# for person in people_raw:
|
||||
# merged_name = f"{person[0]} {person[1]} {person[2]} {person[3]}"
|
||||
# if merged_name not in remove_duplicate:
|
||||
# clean_people_list.append(person)
|
||||
# remove_duplicate.append(merged_name)
|
||||
|
||||
# people = [{"firstname": p[0], "middle_name": p[1], "surname": p[2], "birthname": p[3], 'id': p[4]} for p in clean_people_list]
|
||||
# query_account_records = sqlalchemy_text("""
|
||||
# SELECT a.id, a.iban, a.bank_date, a.process_comment FROM public.account_records as a where currency_value > 0
|
||||
# """) # and bank_date::date >= '2020-01-01'
|
||||
# account_records = session.execute(query_account_records).all()
|
||||
# account_records = [{"id": ar[0], "iban": ar[1], "bank_date": ar[2], "process_comment": ar[3]} for ar in account_records]
|
||||
|
||||
# try:
|
||||
# main(session=session, account_records=account_records, people=people)
|
||||
# except Exception as e:
|
||||
# print(f"{e}")
|
||||
|
||||
# session.close()
|
||||
# session_factory.remove()
|
||||
@@ -0,0 +1,93 @@
|
||||
from typing import Optional, List
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
class User(BaseModel):
|
||||
id: int
|
||||
uu_id: str
|
||||
user_tag: str
|
||||
user_type: str
|
||||
email: str
|
||||
phone_number: str
|
||||
related_company: str
|
||||
is_confirmed: bool
|
||||
active: bool
|
||||
|
||||
|
||||
class Person(BaseModel):
|
||||
id: int
|
||||
uu_id: str
|
||||
firstname: str
|
||||
surname: str
|
||||
middle_name: Optional[str] = ""
|
||||
birthname: Optional[str] = ""
|
||||
# national_identity_id: str
|
||||
is_confirmed: bool
|
||||
active: bool
|
||||
user: Optional[User] = None
|
||||
|
||||
|
||||
class OccupantType(BaseModel):
|
||||
id: int
|
||||
uu_id: str
|
||||
occupant_code: str
|
||||
occupant_type: str
|
||||
is_confirmed: bool
|
||||
active: bool
|
||||
user_type_uu_id: Optional[str] = None
|
||||
|
||||
|
||||
class BuildPart(BaseModel):
|
||||
id: int
|
||||
uu_id: str
|
||||
part_no: str
|
||||
part_level: str
|
||||
part_code: str
|
||||
part_gross_size: float
|
||||
part_net_size: float
|
||||
human_livable: bool
|
||||
build_id: int
|
||||
build_uu_id: str
|
||||
is_confirmed: bool
|
||||
active: bool
|
||||
living_spaces: Optional[List['BuildLivingSpace']] = None
|
||||
|
||||
|
||||
class BuildLivingSpace(BaseModel):
|
||||
id: int
|
||||
uu_id: str
|
||||
expiry_starts: str
|
||||
expiry_ends: str
|
||||
fix_value: float
|
||||
fix_percent: float
|
||||
agreement_no: str
|
||||
marketing_process: bool
|
||||
build_parts_id: int
|
||||
build_parts_uu_id: str
|
||||
person_id: int
|
||||
person_uu_id: str
|
||||
occupant_type_id: int
|
||||
occupant_type_uu_id: str
|
||||
is_confirmed: bool
|
||||
active: bool
|
||||
person: Optional[Person] = None
|
||||
occupant_type: Optional[OccupantType] = None
|
||||
|
||||
|
||||
class BuildingCluster(BaseModel):
|
||||
id: int
|
||||
uu_id: str
|
||||
build_name: str
|
||||
build_no: str
|
||||
build_date: str
|
||||
decision_period_date: str
|
||||
expiry_starts: str
|
||||
expiry_ends: str
|
||||
is_confirmed: bool
|
||||
active: bool
|
||||
build_parts: List['BuildPart'] = []
|
||||
|
||||
|
||||
# Update forward references for models with circular dependencies
|
||||
BuildPart.update_forward_refs()
|
||||
BuildingCluster.update_forward_refs()
|
||||
@@ -152,12 +152,11 @@ if __name__ == "__main__":
|
||||
|
||||
# Process each task
|
||||
for active_task in all_tasks:
|
||||
if active_task.service == ConfigServices.SERVICE_PREFIX_MAIL_PARSER and active_task.completed:
|
||||
# logger.info(f"Task {active_task.task} is already processed.")
|
||||
if active_task.service == ConfigServices.SERVICE_PREFIX_MAIL_READER and active_task.completed:
|
||||
logger.info(f"Processing task {active_task.task}")
|
||||
parser.process_task(active_task)
|
||||
else:
|
||||
continue
|
||||
|
||||
logger.info(f"Processing task {active_task.task}")
|
||||
parser.process_task(active_task)
|
||||
else:
|
||||
logger.info("No tasks found to process")
|
||||
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import sys
|
||||
import socket
|
||||
import logging
|
||||
|
||||
from time import sleep
|
||||
from config import IsBankConfig
|
||||
from Depends.mail_handler import EmailReaderService, EmailServiceRunner
|
||||
@@ -37,14 +38,14 @@ def initialize_service():
|
||||
# Create singleton instances directly
|
||||
logger.info("Creating Redis handler singleton")
|
||||
redis_handler = MailReaderService()
|
||||
|
||||
|
||||
logger.info("Creating EmailReaderService")
|
||||
email_service = EmailReaderService(IsBankConfig())
|
||||
|
||||
|
||||
# Initialize email service and connect
|
||||
logger.info("Connecting to email service")
|
||||
email_service.login_and_connect()
|
||||
|
||||
|
||||
# Create email service runner with the singletons
|
||||
logger.info("Creating EmailServiceRunner")
|
||||
runner = EmailServiceRunner(redis_handler=redis_handler, email_service=email_service)
|
||||
@@ -66,104 +67,71 @@ def initialize_service():
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
logger.info("Starting IsBank Email Service")
|
||||
print(f"Starting Service Mail Reader.")
|
||||
|
||||
# Initialize service
|
||||
runner = initialize_service()
|
||||
|
||||
# Configurable parameters
|
||||
normal_sleep_time = 10 # seconds between normal operations
|
||||
error_sleep_time = 30 # seconds to wait after an error before retrying
|
||||
max_consecutive_errors = 5 # maximum number of consecutive errors before longer pause
|
||||
extended_error_sleep = 120 # seconds to wait after hitting max consecutive errors
|
||||
consecutive_errors = 0
|
||||
|
||||
# Main service loop
|
||||
consecutive_errors, normal_sleep_time, error_sleep_time = 0, 10, 30
|
||||
max_consecutive_errors, extended_error_sleep = 5, 120
|
||||
|
||||
while True:
|
||||
try:
|
||||
# Main processing
|
||||
print("Fetching and setting mails...")
|
||||
runner.fetch_and_set_mails()
|
||||
|
||||
# Reset error counter on success
|
||||
if consecutive_errors > 0:
|
||||
logger.info(f"Service recovered after {consecutive_errors} consecutive errors")
|
||||
consecutive_errors = 0
|
||||
|
||||
# Normal operation sleep
|
||||
sleep(normal_sleep_time)
|
||||
|
||||
except MailReaderService.REDIS_EXCEPTIONS as e:
|
||||
# Redis-specific errors
|
||||
consecutive_errors += 1
|
||||
logger.error(f"Redis error (attempt {consecutive_errors}): {str(e)}")
|
||||
|
||||
# Use centralized reconnection handler from RedisHandler
|
||||
redis_handler, need_extended_sleep = MailReaderService.handle_reconnection(
|
||||
consecutive_errors=consecutive_errors, max_consecutive_errors=max_consecutive_errors
|
||||
)
|
||||
|
||||
if redis_handler:
|
||||
# Update runner's redis handler with the new instance
|
||||
runner.redis_handler = redis_handler
|
||||
runner.redis_connected = False # Will trigger reconnection on next cycle
|
||||
|
||||
# Sleep based on error count
|
||||
runner.redis_connected = False
|
||||
if need_extended_sleep:
|
||||
sleep(extended_error_sleep)
|
||||
else:
|
||||
sleep(error_sleep_time)
|
||||
|
||||
|
||||
except socket.error as e:
|
||||
# Email connection errors
|
||||
consecutive_errors += 1
|
||||
logger.error(f"Email connection error (attempt {consecutive_errors}): {str(e)}")
|
||||
|
||||
# Try to re-establish email connection
|
||||
try:
|
||||
logger.info("Attempting to re-establish email connection...")
|
||||
# Create new email service directly
|
||||
email_service = EmailReaderService(IsBankConfig())
|
||||
email_service.login_and_connect()
|
||||
|
||||
# Create new runner with existing Redis handler and new email service
|
||||
redis_handler = runner.redis_handler # Preserve existing Redis handler
|
||||
redis_handler = runner.redis_handler
|
||||
runner = EmailServiceRunner(redis_handler=redis_handler, email_service=email_service)
|
||||
logger.info("Successfully re-established email connection")
|
||||
except Exception as email_retry_error:
|
||||
logger.error(f"Failed to re-establish email connection: {str(email_retry_error)}")
|
||||
|
||||
# Determine sleep time based on consecutive errors
|
||||
|
||||
if consecutive_errors >= max_consecutive_errors:
|
||||
logger.warning(f"Hit {max_consecutive_errors} consecutive email errors, taking longer pause")
|
||||
sleep(extended_error_sleep)
|
||||
else:
|
||||
sleep(error_sleep_time)
|
||||
|
||||
|
||||
except Exception as e:
|
||||
# Any other unexpected errors
|
||||
consecutive_errors += 1
|
||||
logger.error(f"Unexpected error (attempt {consecutive_errors}): {str(e)}")
|
||||
|
||||
# For any other error, try to reinitialize everything after some delay
|
||||
if consecutive_errors >= max_consecutive_errors:
|
||||
logger.warning(f"Hit {max_consecutive_errors} consecutive errors, reinitializing service")
|
||||
try:
|
||||
# Try to clean up existing connections
|
||||
try:
|
||||
runner.drop()
|
||||
except Exception as cleanup_error:
|
||||
logger.warning(f"Error during cleanup: {str(cleanup_error)}")
|
||||
|
||||
# Reinitialize the service directly
|
||||
redis_handler = MailReaderService()
|
||||
email_service = EmailReaderService(IsBankConfig())
|
||||
email_service.login_and_connect()
|
||||
runner = EmailServiceRunner(redis_handler=redis_handler, email_service=email_service)
|
||||
|
||||
if runner:
|
||||
logger.info("Successfully reinitialized email service runner")
|
||||
consecutive_errors = 0 # Reset counter after reinitialization
|
||||
consecutive_errors = 0
|
||||
else:
|
||||
logger.error("Failed to reinitialize email service runner")
|
||||
except Exception as reinit_error:
|
||||
@@ -171,6 +139,5 @@ if __name__ == "__main__":
|
||||
|
||||
sleep(extended_error_sleep)
|
||||
else:
|
||||
# For fewer consecutive errors, just retry the current runner
|
||||
print(f"Error: {str(e)}")
|
||||
sleep(error_sleep_time)
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import os
|
||||
|
||||
from re import TEMPLATE
|
||||
from pydantic import BaseModel
|
||||
from typing import Any, List, Optional, Union
|
||||
|
||||
@@ -29,20 +29,34 @@ class MailParser(BaseModel):
|
||||
charset: str
|
||||
data: str
|
||||
|
||||
|
||||
class FinderIban(BaseModel):
|
||||
...
|
||||
|
||||
filename: str
|
||||
iban: str
|
||||
bank_date: str
|
||||
channel_branch: str
|
||||
currency_value: float
|
||||
balance: float
|
||||
additional_balance: float
|
||||
process_name: str
|
||||
process_type: str
|
||||
process_comment: str
|
||||
bank_reference_code: str
|
||||
|
||||
|
||||
class FinderComment(BaseModel):
|
||||
...
|
||||
class FinderComment(FinderIban):
|
||||
|
||||
build_id: Optional[int] = None
|
||||
build_uu_id: Optional[str] = None
|
||||
decision_book_id: Optional[int] = None
|
||||
decision_book_uu_id: Optional[str] = None
|
||||
|
||||
|
||||
class RedisData(BaseModel):
|
||||
MailReader: MailReader
|
||||
MailParser: List[MailParser]
|
||||
FinderIban: FinderIban | Any
|
||||
FinderComment: FinderComment | Any
|
||||
FinderIban: List[FinderIban]
|
||||
FinderComment: List[FinderComment]
|
||||
|
||||
|
||||
class Status:
|
||||
@@ -61,6 +75,24 @@ class RedisTaskObject(BaseModel):
|
||||
created_at: str
|
||||
is_completed: bool
|
||||
|
||||
|
||||
class MailSendModel(BaseModel):
|
||||
receivers: List[str]
|
||||
subject: str
|
||||
template_name: str
|
||||
data: dict
|
||||
|
||||
|
||||
class RedisMailSender(BaseModel):
|
||||
task: RedisTaskObject
|
||||
data: MailSendModel
|
||||
completed: bool
|
||||
service: str
|
||||
status: str
|
||||
created_at: str
|
||||
completed: bool
|
||||
|
||||
|
||||
class EmailConfig:
|
||||
|
||||
HOST: str = os.getenv("EMAIL_HOST", "10.10.2.34")
|
||||
@@ -105,12 +137,19 @@ class ConfigServices:
|
||||
TASK_MAILID_INDEX_PREFIX: str = "BANK:SERVICES:TASK:MAILID"
|
||||
TASK_UUID_INDEX_PREFIX: str = "BANK:SERVICES:TASK:UUID"
|
||||
TASK_SEEN_PREFIX: str = "BANK:SERVICES:TASK:SEEN"
|
||||
TASK_DELETED_PREFIX: str = "BANK:SERVICES:TASK:DELETED"
|
||||
TASK_COMMENT_PARSER: str = "BANK:SERVICES:TASK:COMMENT:PARSER"
|
||||
TASK_PREDICT_RESULT: str = "BANK:SERVICES:TASK:COMMENT:RESULT"
|
||||
|
||||
SERVICE_PREFIX_MAIL_READER: str = "MailReader"
|
||||
SERVICE_PREFIX_MAIL_PARSER: str = "MailParser"
|
||||
SERVICE_PREFIX_FINDER_IBAN: str = "FinderIban"
|
||||
SERVICE_PREFIX_FINDER_COMMENT: str = "FinderComment"
|
||||
SERVICE_PREFIX_MAIL_SENDER: str = "MailSender"
|
||||
|
||||
|
||||
TEMPLATE_ACCOUNT_RECORDS: str = "template_accounts.html"
|
||||
|
||||
|
||||
paramsRedisData = Union[MailReader, MailParser, FinderIban, FinderComment]
|
||||
|
||||
|
||||
@@ -15,7 +15,7 @@ from email.parser import BytesParser
|
||||
from imaplib import IMAP4_SSL, IMAP4
|
||||
|
||||
from Depends.redis_handlers import RedisHandler
|
||||
from Depends.config import EmailConfig, MailReaderMainConfig, MailReader, MailParser, RedisData
|
||||
from Depends.config import ConfigServices, EmailConfig, MailReaderMainConfig, MailReader, MailParser, RedisData, Status
|
||||
from Depends.service_handler import MailReaderService
|
||||
|
||||
# Configure logging
|
||||
@@ -328,41 +328,70 @@ class EmailReaderService:
|
||||
raise
|
||||
|
||||
@retry_on_connection_error(max_retries=2, delay=1, exceptions=(socket.error, IMAP4.error))
|
||||
def mark_no_attachment(self, uid):
|
||||
def move_to_folder(self, uid: Union[str, bytes], folder: str):
|
||||
"""
|
||||
Move message to folder with retry mechanism
|
||||
|
||||
Args:
|
||||
uid: Email UID
|
||||
folder: Destination folder
|
||||
"""
|
||||
try:
|
||||
log_uid = uid
|
||||
if isinstance(uid, bytes):
|
||||
log_uid = uid.decode('utf-8', errors='replace')
|
||||
elif isinstance(uid, str):
|
||||
uid = uid.encode('utf-8')
|
||||
logger.info(f"Moving email {log_uid} to {folder} folder")
|
||||
self.mail.uid('MOVE', uid, folder)
|
||||
self.commit()
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to move email to folder: {str(e)}")
|
||||
return False
|
||||
|
||||
@retry_on_connection_error(max_retries=2, delay=1, exceptions=(socket.error, IMAP4.error))
|
||||
def copy_to_folder(self, uid: Union[str, bytes], folder: str):
|
||||
"""
|
||||
Copy message to folder with retry mechanism
|
||||
|
||||
Args:
|
||||
uid: Email UID
|
||||
folder: Destination folder
|
||||
"""
|
||||
try:
|
||||
log_uid = uid
|
||||
if isinstance(uid, bytes):
|
||||
log_uid = uid.decode('utf-8', errors='replace')
|
||||
elif isinstance(uid, str):
|
||||
uid = uid.encode('utf-8')
|
||||
logger.info(f"Copying email {log_uid} to {folder} folder")
|
||||
self.mail.uid('COPY', uid, folder)
|
||||
self.commit()
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to copy email to folder: {str(e)}")
|
||||
return False
|
||||
|
||||
@retry_on_connection_error(max_retries=2, delay=1, exceptions=(socket.error, IMAP4.error))
|
||||
def mark_no_attachment(self, uid: Union[str, bytes]):
|
||||
"""
|
||||
Move message to no attachment folder with retry mechanism
|
||||
|
||||
Args:
|
||||
uid: Email UID
|
||||
"""
|
||||
try:
|
||||
# Handle both string and bytes types for logging
|
||||
log_uid = uid
|
||||
if isinstance(uid, bytes):
|
||||
log_uid = uid.decode('utf-8', errors='replace')
|
||||
|
||||
logger.info(f"Moving email {log_uid} to {self.config.NO_ATTACHMENT_FOLDER} folder")
|
||||
self.mail.uid('COPY', uid, self.config.NO_ATTACHMENT_FOLDER)
|
||||
self.delete(uid)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to mark email as no attachment: {str(e)}")
|
||||
raise
|
||||
self.move_to_folder(uid, self.config.NO_ATTACHMENT_FOLDER)
|
||||
|
||||
@retry_on_connection_error(max_retries=2, delay=1, exceptions=(socket.error, IMAP4.error))
|
||||
def mark_completed(self, uid: bytes):
|
||||
def mark_completed(self, uid: Union[str, bytes]):
|
||||
"""
|
||||
Move message to completed folder with retry mechanism
|
||||
|
||||
Args:
|
||||
uid: Email UID
|
||||
"""
|
||||
try:
|
||||
logger.info(f"Moving email {uid.decode('utf-8', errors='replace')} to {self.config.COMPLETED_FOLDER} folder")
|
||||
self.mail.uid('COPY', uid, self.config.COMPLETED_FOLDER)
|
||||
# self.delete(uid)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to mark email as completed: {str(e)}")
|
||||
raise
|
||||
self.move_to_folder(uid, self.config.COMPLETED_FOLDER)
|
||||
|
||||
@retry_on_connection_error(max_retries=2, delay=1, exceptions=(socket.error, IMAP4.error))
|
||||
def delete(self, uid):
|
||||
@@ -426,7 +455,6 @@ class EmailServiceRunner:
|
||||
redis_handler: Redis handler for Redis operations
|
||||
email_service: Email service for email operations
|
||||
"""
|
||||
# Use MailReaderService singleton for Redis operations
|
||||
self.redis_handler = redis_handler
|
||||
self.email_service = email_service
|
||||
self.mails = None
|
||||
@@ -499,7 +527,6 @@ class EmailServiceRunner:
|
||||
if not getattr(mail, 'id', None):
|
||||
logger.warning("Skipping email with no ID")
|
||||
continue
|
||||
|
||||
mail_id = mail.id.decode('utf-8')
|
||||
|
||||
# check mail has .pdf extension
|
||||
@@ -515,7 +542,7 @@ class EmailServiceRunner:
|
||||
except Exception as e:
|
||||
logger.error(f"Error processing email {mail_id}: {str(e)}")
|
||||
continue
|
||||
|
||||
|
||||
try:
|
||||
self.email_service.commit()
|
||||
except Exception as e:
|
||||
@@ -545,26 +572,18 @@ class EmailServiceRunner:
|
||||
"""
|
||||
try:
|
||||
mail_to_dict = mail.to_dict()
|
||||
result = self.redis_handler.process_mail(
|
||||
task_uuid, self.counter = self.redis_handler.process_mail(
|
||||
mail_id=mail_id, mail_data=mail_to_dict, service_prefix=self.email_service.config.SERVICE_PREFIX, counter=self.counter
|
||||
)
|
||||
if result['status'] == 'success':
|
||||
if result['action'] == 'stored_new_mail':
|
||||
self.counter = result.get('counter', self.counter)
|
||||
logger.info(f"Successfully processed new email {mail_id}")
|
||||
elif result['action'] == 'checked_existing_mail':
|
||||
if result.get('is_completed', False):
|
||||
logger.info(f"Marking completed email {mail_id}")
|
||||
self.email_service.mark_completed(mail_id)
|
||||
elif result['status'] == 'error':
|
||||
if result['action'] == 'id_mismatch':
|
||||
logger.error(f"Mail ID mismatch: {mail_id} != {result.get('stored_id')}")
|
||||
raise ValueError("Mail id does not match with id from Redis")
|
||||
else:
|
||||
logger.error(f"Email Service Runner Error processing mail {mail_id}: {result.get('error', 'Unknown error')}")
|
||||
raise Exception(result.get('error', 'Unknown error during mail processing'))
|
||||
if task_uuid:
|
||||
self.redis_handler.change_service(
|
||||
task_uuid=task_uuid, service_name=ConfigServices.SERVICE_PREFIX_MAIL_READER, status=Status.COMPLETED, completed=True
|
||||
)
|
||||
else:
|
||||
logger.warning(f"Unexpected result status: {result['status']} for mail {mail_id}")
|
||||
if self.redis_handler.check_mail_is_ready_to_delete(mail_id):
|
||||
self.email_service.mark_completed(mail_id)
|
||||
self.redis_handler.pop_mail(mail_id)
|
||||
|
||||
except MailReaderService.REDIS_EXCEPTIONS as e:
|
||||
logger.error(f"Redis error while processing mail {mail_id}: {str(e)}")
|
||||
self.redis_connected = False
|
||||
|
||||
@@ -1,49 +1,294 @@
|
||||
from prisma import Prisma
|
||||
import asyncio
|
||||
import time
|
||||
import logging
|
||||
import uvloop
|
||||
import threading
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Optional, AsyncGenerator, Protocol, Any
|
||||
from contextlib import asynccontextmanager
|
||||
from typing import AsyncGenerator, Optional
|
||||
from prisma import Prisma
|
||||
|
||||
# Singleton pattern for Prisma client
|
||||
_prisma_client: Optional[Prisma] = None
|
||||
|
||||
async def get_prisma_client() -> Prisma:
|
||||
"""
|
||||
Get or initialize the Prisma client singleton.
|
||||
|
||||
Returns:
|
||||
Prisma: The initialized Prisma client instance
|
||||
"""
|
||||
global _prisma_client
|
||||
|
||||
if _prisma_client is None:
|
||||
_prisma_client = Prisma()
|
||||
await _prisma_client.connect()
|
||||
|
||||
return _prisma_client
|
||||
logger = logging.getLogger("prisma-service")
|
||||
|
||||
@asynccontextmanager
|
||||
async def prisma_client() -> AsyncGenerator[Prisma, None]:
|
||||
"""
|
||||
Context manager for Prisma client operations.
|
||||
|
||||
Yields:
|
||||
Prisma: The initialized Prisma client instance
|
||||
|
||||
Example:
|
||||
```python
|
||||
async with prisma_client() as db:
|
||||
users = await db.user.find_many()
|
||||
```
|
||||
"""
|
||||
client = await get_prisma_client()
|
||||
try:
|
||||
yield client
|
||||
except Exception as e:
|
||||
print(f"Database operation error: {e}")
|
||||
raise
|
||||
|
||||
async def disconnect_prisma():
|
||||
"""Disconnect the Prisma client when shutting down the application."""
|
||||
global _prisma_client
|
||||
if _prisma_client is not None:
|
||||
await _prisma_client.disconnect()
|
||||
_prisma_client = None
|
||||
logging.basicConfig(level=logging.INFO, format="%(asctime)s %(levelname)s %(message)s")
|
||||
logging.getLogger("httpx").setLevel(logging.WARNING)
|
||||
logging.getLogger("httpcore").setLevel(logging.WARNING)
|
||||
|
||||
|
||||
class BaseModelClient(Protocol):
|
||||
|
||||
async def find_many(self, **kwargs) -> list[Any]: ...
|
||||
async def find_first(self, **kwargs) -> Any: ...
|
||||
async def find_first_or_raise(self, **kwargs) -> Any: ...
|
||||
async def find_unique(self, **kwargs) -> Any: ...
|
||||
async def find_unique_or_raise(self, **kwargs) -> Any: ...
|
||||
async def create(self, **kwargs) -> Any: ...
|
||||
async def update(self, **kwargs) -> Any: ...
|
||||
async def delete(self, **kwargs) -> Any: ...
|
||||
async def delete_many(self, **kwargs) -> Any: ...
|
||||
|
||||
|
||||
class PrismaService:
|
||||
|
||||
def __init__(self) -> None:
|
||||
self._loop: Optional[asyncio.AbstractEventLoop] = None
|
||||
self._thread: Optional[threading.Thread] = None
|
||||
self._client: Optional[Prisma] = None
|
||||
self._start_loop_thread()
|
||||
|
||||
def _loop_runner(self) -> None:
|
||||
asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
|
||||
self._loop = asyncio.new_event_loop()
|
||||
asyncio.set_event_loop(self._loop)
|
||||
try:
|
||||
self._loop.run_forever()
|
||||
finally:
|
||||
self._loop.close()
|
||||
|
||||
def _submit(self, coro):
|
||||
if self._loop is None or not self._loop.is_running():
|
||||
raise RuntimeError("PrismaService event loop is not running.")
|
||||
fut = asyncio.run_coroutine_threadsafe(coro, self._loop)
|
||||
return fut.result()
|
||||
|
||||
async def _aconnect(self) -> Prisma:
|
||||
if self._client is not None:
|
||||
return self._client
|
||||
logger.info("Connecting Prisma client...")
|
||||
client = Prisma()
|
||||
await client.connect()
|
||||
self._client = client
|
||||
logger.info("Prisma client connected.")
|
||||
return self._client
|
||||
|
||||
async def _adisconnect(self) -> None:
|
||||
if self._client is not None:
|
||||
logger.info("Disconnecting Prisma client...")
|
||||
try:
|
||||
await self._client.disconnect()
|
||||
finally:
|
||||
self._client = None
|
||||
logger.info("Prisma client disconnected.")
|
||||
|
||||
@asynccontextmanager
|
||||
async def _asession(self) -> AsyncGenerator[Prisma, None]:
|
||||
yield await self._aconnect()
|
||||
|
||||
def _start_loop_thread(self) -> None:
|
||||
t = threading.Thread(target=self._loop_runner, name="PrismaLoop", daemon=True)
|
||||
t.start()
|
||||
self._thread = t
|
||||
while self._loop is None:
|
||||
time.sleep(0.005)
|
||||
|
||||
async def _lock(self):
|
||||
lock = asyncio.Lock()
|
||||
async with lock:
|
||||
return
|
||||
|
||||
async def _connect(self) -> Prisma:
|
||||
if self._client is not None:
|
||||
return self._client
|
||||
async with self._lock:
|
||||
if self._client is None:
|
||||
logger.info("Connecting Prisma client...")
|
||||
client = Prisma()
|
||||
await client.connect()
|
||||
self._client = client
|
||||
logger.info("Prisma client connected.")
|
||||
return self._client
|
||||
|
||||
async def _disconnect(self) -> None:
|
||||
async with self._lock:
|
||||
if self._client is not None:
|
||||
try:
|
||||
logger.info("Disconnecting Prisma client...")
|
||||
await self._client.disconnect()
|
||||
logger.info("Prisma client disconnected.")
|
||||
finally:
|
||||
self._client = None
|
||||
|
||||
@asynccontextmanager
|
||||
async def _session(self) -> AsyncGenerator[Prisma, None]:
|
||||
client = await self._connect()
|
||||
try:
|
||||
yield client
|
||||
except Exception:
|
||||
logger.exception("Database operation error")
|
||||
raise
|
||||
|
||||
def _run(self, coro):
|
||||
try:
|
||||
asyncio.get_running_loop()
|
||||
raise RuntimeError("Async run is not allowed. Use sync methods instead.")
|
||||
except RuntimeError as e:
|
||||
asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
|
||||
with asyncio.Runner() as runner:
|
||||
return runner.run(coro)
|
||||
|
||||
async def _a_find_many(self, table: str, query: Optional[dict] = None, take: int = None, skip: int = None,
|
||||
order: Optional[list[dict]] = None, select: Optional[dict] = None, include: Optional[dict] = None
|
||||
) -> list[dict]:
|
||||
start = time.time()
|
||||
async with self._asession() as db:
|
||||
table_selected: BaseModelClient = getattr(db, table, None)
|
||||
if not table_selected:
|
||||
raise ValueError(f"Table {table} not found")
|
||||
rows = await table_selected.find_many(where=query, take=take, skip=skip, order=order or [], include=include)
|
||||
# print(f"[{datetime.now()}] Find many query completed in {time.time() - start:.2f}s")
|
||||
return rows
|
||||
|
||||
async def _a_find_first(self, table: str, query: Optional[dict] = None, order : Optional[list[dict]] = None, include: Optional[dict] = None) -> Any:
|
||||
start = time.time()
|
||||
async with self._asession() as db:
|
||||
table_selected: BaseModelClient = getattr(db, table, None)
|
||||
if not table_selected:
|
||||
raise ValueError(f"Table {table} not found")
|
||||
result = await table_selected.find_first(where=query, order=order or [], include=include)
|
||||
# print(f"[{datetime.now()}] Find first query completed in {time.time() - start:.2f}s")
|
||||
return result
|
||||
|
||||
async def _a_find_first_or_throw(self, table: str, query: Optional[dict] = None, order : Optional[list[dict]] = None,
|
||||
include: Optional[dict] = None
|
||||
) -> Any:
|
||||
start = time.time()
|
||||
async with self._asession() as db:
|
||||
table_selected: BaseModelClient = getattr(db, table, None)
|
||||
if not table_selected:
|
||||
raise ValueError(f"Table {table} not found")
|
||||
result = await table_selected.find_first_or_raise(where=query, order=order or [], include=include)
|
||||
# print(f"[{datetime.now()}] Find first or throw query completed in {time.time() - start:.2f}s")
|
||||
return result
|
||||
|
||||
async def _a_create(self, table: str, data: dict, include: Optional[dict] = None) -> Any:
|
||||
start = time.time()
|
||||
async with self._asession() as db:
|
||||
table_selected: BaseModelClient = getattr(db, table, None)
|
||||
if not table_selected:
|
||||
raise ValueError(f"Table {table} not found")
|
||||
result = await table_selected.create(data=data, include=include)
|
||||
# print(f"[{datetime.now()}] Create operation completed in {time.time() - start:.2f}s")
|
||||
return result
|
||||
|
||||
async def _a_update(self, table: str, where: dict, data: dict, include: Optional[dict] = None) -> Any:
|
||||
start = time.time()
|
||||
async with self._asession() as db:
|
||||
table_selected: BaseModelClient = getattr(db, table, None)
|
||||
if not table_selected:
|
||||
raise ValueError(f"Table {table} not found")
|
||||
result = await table_selected.update(where=where, data=data, include=include)
|
||||
# print(f"[{datetime.now()}] Update operation completed in {time.time() - start:.2f}s")
|
||||
return result
|
||||
|
||||
async def _a_delete(self, table: str, where: dict, include: Optional[dict] = None) -> Any:
|
||||
start = time.time()
|
||||
async with self._asession() as db:
|
||||
table_selected: BaseModelClient = getattr(db, table, None)
|
||||
if not table_selected:
|
||||
raise ValueError(f"Table {table} not found")
|
||||
result = await table_selected.delete(where=where, include=include)
|
||||
# print(f"[{datetime.now()}] Delete operation completed in {time.time() - start:.2f}s")
|
||||
return result
|
||||
|
||||
async def _a_delete_many(self, table: str, where: dict, include: Optional[dict] = None):
|
||||
start = time.time()
|
||||
async with self._asession() as db:
|
||||
table_selected: BaseModelClient = getattr(db, table, None)
|
||||
if not table_selected:
|
||||
raise ValueError(f"Table {table} not found")
|
||||
result = await table_selected.delete_many(where=where, include=include)
|
||||
# print(f"[{datetime.now()}] Delete many operation completed in {time.time() - start:.2f}s")
|
||||
return result
|
||||
|
||||
async def _a_find_unique(self, table: str, query: dict, include: Optional[dict] = None) -> Any:
|
||||
start = time.time()
|
||||
async with self._asession() as db:
|
||||
table_selected: BaseModelClient = getattr(db, table, None)
|
||||
if not table_selected:
|
||||
raise ValueError(f"Table {table} not found")
|
||||
result = await table_selected.find_unique(where=query, include=include)
|
||||
# print(f"[{datetime.now()}] Find unique query completed in {time.time() - start:.2f}s")
|
||||
return result
|
||||
|
||||
async def _a_find_unique_or_throw(self, table: str, query: dict, include: Optional[dict] = None) -> Any:
|
||||
start = time.time()
|
||||
async with self._asession() as db:
|
||||
table_selected: BaseModelClient = getattr(db, table, None)
|
||||
if not table_selected:
|
||||
raise ValueError(f"Table {table} not found")
|
||||
result = await table_selected.find_unique_or_raise(where=query, include=include)
|
||||
# print(f"[{datetime.now()}] Find unique or throw query completed in {time.time() - start:.2f}s")
|
||||
return result
|
||||
|
||||
def find_unique_or_throw(self, table: str, query: dict, select: Optional[dict] = None, include: Optional[dict] = None):
|
||||
result = self._submit(self._a_find_unique_or_throw(table=table, query=query, include=include))
|
||||
if select:
|
||||
result = {k: v for k, v in result if k in select}
|
||||
return result
|
||||
|
||||
def find_unique(self, table: str, query: dict, select: Optional[dict] = None, include: Optional[dict] = None):
|
||||
result = self._submit(self._a_find_unique(table=table, query=query, include=include))
|
||||
if select and result:
|
||||
result = {k: v for k, v in result if k in select}
|
||||
return result
|
||||
|
||||
def find_many(
|
||||
self, table: str, query: Optional[dict] = None, take: int = None, skip: int = None,
|
||||
order: Optional[list[dict]] = None, select: Optional[dict] = None, include: Optional[dict] = None
|
||||
):
|
||||
result = self._submit(self._a_find_many(table=table, query=query, take=take, skip=skip, order=order, include=include))
|
||||
if select and result:
|
||||
result = [{k: v for k, v in item.items() if k in select} for item in result]
|
||||
return result
|
||||
|
||||
def create(self, table: str, data: dict, select: Optional[dict] = None, include: Optional[dict] = None):
|
||||
result = self._submit(self._a_create(table=table, data=data, include=include))
|
||||
if select and result:
|
||||
result = {k: v for k, v in result if k in select}
|
||||
return result
|
||||
|
||||
def find_first_or_throw(self, table: str, query: Optional[dict] = None,
|
||||
order: Optional[list[dict]] = None, select: Optional[dict] = None, include: Optional[dict] = None
|
||||
):
|
||||
result = self._submit(self._a_find_first_or_throw(table=table, query=query, order=order, include=include))
|
||||
if select and result:
|
||||
result = {k: v for k, v in result if k in select}
|
||||
return result
|
||||
|
||||
def find_first(self, table: str, query: Optional[dict] = None, select: Optional[dict] = None, order: Optional[list[dict]] = None, include: Optional[dict] = None):
|
||||
result = self._submit(self._a_find_first(table=table, query=query, order=order, include=include))
|
||||
if select and result:
|
||||
result = {k: v for k, v in result if k in select}
|
||||
return result
|
||||
|
||||
def update(self, table: str, where: dict, data: dict, select: Optional[dict] = None, include: Optional[dict] = None):
|
||||
result = self._submit(self._a_update(table=table, where=where, data=data, include=include))
|
||||
if select and result:
|
||||
result = {k: v for k, v in result if k in select}
|
||||
return result
|
||||
|
||||
def delete(self, table: str, where: dict, select: Optional[dict] = None, include: Optional[dict] = None):
|
||||
result = self._submit(self._a_delete(table=table, where=where, select=select, include=include))
|
||||
if select and result:
|
||||
result = {k: v for k, v in result if k in select}
|
||||
return result
|
||||
|
||||
def delete_many(self, table: str, where: dict, select: Optional[dict] = None, include: Optional[dict] = None):
|
||||
result = self._submit(self._a_delete_many(table=table, where=where, select=select, include=include))
|
||||
if select and result:
|
||||
result = [{k: v for k, v in item if k in select} for item in result]
|
||||
return result
|
||||
|
||||
def disconnect(self) -> None:
|
||||
try:
|
||||
self._submit(self._adisconnect())
|
||||
finally:
|
||||
if self._loop and self._loop.is_running():
|
||||
self._loop.call_soon_threadsafe(self._loop.stop)
|
||||
if self._thread and self._thread.is_alive():
|
||||
self._thread.join(timeout=2.0)
|
||||
self._loop = None
|
||||
self._thread = None
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import logging
|
||||
|
||||
from json import loads, dumps
|
||||
from contextlib import contextmanager
|
||||
from time import sleep
|
||||
from redis import Redis, RedisError, ConnectionError as RedisConnectionError
|
||||
@@ -90,6 +91,10 @@ class RedisHandler:
|
||||
def sadd(self, key: str, value):
|
||||
"""Add a value to a Redis set"""
|
||||
return self.redis_client.sadd(key, value)
|
||||
|
||||
def ismember(self, key: str, value):
|
||||
"""Check if a value is a member of a Redis set"""
|
||||
return self.redis_client.sismember(key, value)
|
||||
|
||||
def get(self, key: str):
|
||||
"""Get a value from Redis by key"""
|
||||
@@ -98,7 +103,15 @@ class RedisHandler:
|
||||
def set(self, key: str, value):
|
||||
"""Set a key-value pair in Redis"""
|
||||
return self.redis_client.set(key, value)
|
||||
|
||||
|
||||
def delete_value(self, key: str, value):
|
||||
"""Delete a value from a Redis value by finding key"""
|
||||
get_redis = self.get(key)
|
||||
if get_redis:
|
||||
get_redis: dict = loads(get_redis)
|
||||
get_redis.pop(value)
|
||||
self.set(key, dumps(get_redis))
|
||||
|
||||
def rpush(self, key: str, value):
|
||||
"""Append a value to a Redis list"""
|
||||
return self.redis_client.rpush(key, value)
|
||||
@@ -107,9 +120,13 @@ class RedisHandler:
|
||||
"""Get an element from a Redis list by its index"""
|
||||
return self.redis_client.lindex(key, index)
|
||||
|
||||
def spop(self, key: str, value):
|
||||
"""Remove and return a random member from a Redis set"""
|
||||
return self.redis_client.spop(key, value)
|
||||
def spop(self, key: str, count=1):
|
||||
"""Remove and return random members from a Redis set"""
|
||||
return self.redis_client.spop(key, count)
|
||||
|
||||
def srem(self, key: str, value):
|
||||
"""Remove a specific member from a Redis set"""
|
||||
return self.redis_client.srem(key, value)
|
||||
|
||||
def get_all_tasks(self):
|
||||
"""Get all keys matching the task prefix pattern"""
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
generator client {
|
||||
provider = "prisma-client-py"
|
||||
// binaryTargets = ["native", "linux-musl-openssl-3.0.x"]
|
||||
enable_experimental_decimal = true
|
||||
recursive_type_depth = -1
|
||||
interface = "asyncio"
|
||||
@@ -406,6 +405,7 @@ model account_records {
|
||||
accounting_receipt_number Int @default(0)
|
||||
status_id Int @default(0) @db.SmallInt
|
||||
approved_record Boolean @default(false)
|
||||
is_predicted Boolean @default(false)
|
||||
import_file_name String? @db.VarChar
|
||||
receive_debit Int?
|
||||
receive_debit_uu_id String? @db.VarChar
|
||||
|
||||
12
ServicesRunner/Depends/schema.txt
Normal file
12
ServicesRunner/Depends/schema.txt
Normal file
@@ -0,0 +1,12 @@
|
||||
generator client {
|
||||
provider = "prisma-client-py"
|
||||
// binaryTargets = ["native", "linux-musl-openssl-3.0.x"]
|
||||
enable_experimental_decimal = true
|
||||
recursive_type_depth = -1
|
||||
interface = "asyncio"
|
||||
}
|
||||
|
||||
datasource db {
|
||||
provider = "postgresql"
|
||||
url = "postgresql://postgres:password@10.10.2.14:5432/postgres?schema=public"
|
||||
}
|
||||
@@ -1,12 +1,15 @@
|
||||
import logging
|
||||
|
||||
from time import sleep
|
||||
from json import loads, dumps
|
||||
from uuid import uuid4
|
||||
from datetime import datetime
|
||||
|
||||
from Depends.config import Status, ConfigServices, RedisTaskObject, RedisData
|
||||
from Depends.redis_handlers import RedisHandler
|
||||
from redis import Redis
|
||||
from redis.exceptions import WatchError, ResponseError
|
||||
|
||||
# Configure logging
|
||||
logger = logging.getLogger('Service Task Retriever')
|
||||
|
||||
|
||||
@@ -15,6 +18,7 @@ class ServiceTaskRetriever:
|
||||
Class for retrieving and updating Redis task objects by UUID or mail ID.
|
||||
Provides direct access to task objects and service-specific data without iteration.
|
||||
"""
|
||||
SENTINEL = "__DEL__SENTINEL__"
|
||||
|
||||
def __init__(self, redis_handler=None):
|
||||
"""
|
||||
@@ -28,7 +32,7 @@ class ServiceTaskRetriever:
|
||||
else:
|
||||
self.redis_handler = RedisHandler()
|
||||
|
||||
self.redis_client = self.redis_handler.redis_client
|
||||
self.redis_client: Redis = self.redis_handler.redis_client
|
||||
self.redis_prefix = ConfigServices.MAIN_TASK_PREFIX
|
||||
self.mailid_index_key = ConfigServices.TASK_MAILID_INDEX_PREFIX
|
||||
self.uuid_index_key = ConfigServices.TASK_UUID_INDEX_PREFIX
|
||||
@@ -89,13 +93,13 @@ class ServiceTaskRetriever:
|
||||
task_uuid: UUID of the task
|
||||
index: Index of the task
|
||||
"""
|
||||
already_dict = self.redis_handler.get(self.mailid_index_key)
|
||||
already_dict = self.redis_handler.get(self.uuid_index_key)
|
||||
if already_dict:
|
||||
already_dict = loads(already_dict)
|
||||
already_dict[str(task_uuid)] = index
|
||||
self.redis_handler.set(self.mailid_index_key, dumps(already_dict))
|
||||
self.redis_handler.set(self.uuid_index_key, dumps(already_dict))
|
||||
else:
|
||||
self.redis_handler.set(self.mailid_index_key, dumps({str(task_uuid): index}))
|
||||
self.redis_handler.set(self.uuid_index_key, dumps({str(task_uuid): index}))
|
||||
|
||||
def set_index_mail_id(self, mail_id: str, index: int):
|
||||
"""
|
||||
@@ -135,6 +139,65 @@ class ServiceTaskRetriever:
|
||||
if get_index_by_mail_id := self.get_index_by_mail_id(mail_id):
|
||||
self.set_index_uuid(task_uuid, get_index_by_mail_id)
|
||||
|
||||
def delete_task(self, task_uuid: str, max_retries: int = 20, base_sleep: float = 0.01):
|
||||
"""
|
||||
Delete a task object by its UUID
|
||||
Args:
|
||||
task_uuid: UUID of the task
|
||||
max_retries: Maximum number of retries
|
||||
"""
|
||||
for attempt in range(max_retries):
|
||||
try:
|
||||
with self.redis_client.pipeline() as pipe:
|
||||
pipe.watch(ConfigServices.MAIN_TASK_PREFIX, ConfigServices.TASK_UUID_INDEX_PREFIX, ConfigServices.TASK_MAILID_INDEX_PREFIX)
|
||||
raw_uuid = pipe.get(ConfigServices.TASK_UUID_INDEX_PREFIX)
|
||||
raw_mail = pipe.get(ConfigServices.TASK_MAILID_INDEX_PREFIX)
|
||||
llen = pipe.llen(ConfigServices.MAIN_TASK_PREFIX)
|
||||
if not llen:
|
||||
pipe.unwatch()
|
||||
return False
|
||||
uuid_map = loads(raw_uuid.decode()) if raw_uuid else {}
|
||||
mail_map = loads(raw_mail.decode()) if raw_mail else {}
|
||||
if task_uuid not in uuid_map:
|
||||
pipe.unwatch()
|
||||
return False
|
||||
index = int(uuid_map[task_uuid])
|
||||
if index < 0:
|
||||
index = int(llen) + index
|
||||
if index < 0 or index >= int(llen):
|
||||
pipe.unwatch()
|
||||
return False
|
||||
uuid_key_to_del = next((k for k, v in uuid_map.items() if int(v) == index), None)
|
||||
mail_key_to_del = next((k for k, v in mail_map.items() if int(v) == index), None)
|
||||
dup_uuid_count = sum(1 for v in uuid_map.values() if int(v) == index)
|
||||
dup_mail_count = sum(1 for v in mail_map.values() if int(v) == index)
|
||||
if dup_uuid_count > 1:
|
||||
pass
|
||||
if dup_mail_count > 1:
|
||||
pass
|
||||
if uuid_key_to_del is not None:
|
||||
uuid_map.pop(uuid_key_to_del, None)
|
||||
if mail_key_to_del is not None:
|
||||
mail_map.pop(mail_key_to_del, None)
|
||||
for k, v in list(uuid_map.items()):
|
||||
if int(v) > index: uuid_map[k] = int(v) - 1
|
||||
for k, v in list(mail_map.items()):
|
||||
if int(v) > index: mail_map[k] = int(v) - 1
|
||||
sentinel = f"__DEL__{uuid4()}__"
|
||||
pipe.multi()
|
||||
pipe.lset(ConfigServices.MAIN_TASK_PREFIX, index, sentinel)
|
||||
pipe.lrem(ConfigServices.MAIN_TASK_PREFIX, 1, sentinel)
|
||||
pipe.set(ConfigServices.TASK_UUID_INDEX_PREFIX, dumps(uuid_map))
|
||||
pipe.set(ConfigServices.TASK_MAILID_INDEX_PREFIX, dumps(mail_map))
|
||||
pipe.execute()
|
||||
mail_key_to_del = int(mail_key_to_del)
|
||||
self.redis_client.sadd(ConfigServices.TASK_DELETED_PREFIX, mail_key_to_del)
|
||||
return True
|
||||
except (WatchError, ResponseError):
|
||||
sleep(base_sleep * (1.5 ** attempt))
|
||||
continue
|
||||
return False
|
||||
|
||||
def get_task_by_uuid(self, task_uuid: str) -> RedisTaskObject:
|
||||
"""
|
||||
Get a task object directly by its UUID without iteration
|
||||
@@ -248,7 +311,7 @@ class ServiceTaskRetriever:
|
||||
self._validate_service_name(service_name)
|
||||
|
||||
# Create new RedisData with proper defaults for all services
|
||||
data_dict = {'MailReader': None, 'MailParser': [], 'FinderIban': {}, 'FinderComment': {}}
|
||||
data_dict = {'MailReader': None, 'MailParser': [], 'FinderIban': [], 'FinderComment': []}
|
||||
# Set the actual service data
|
||||
data_dict['MailReader'] = mail_reader
|
||||
data_dict['MailParser'] = mail_parser
|
||||
@@ -258,17 +321,11 @@ class ServiceTaskRetriever:
|
||||
|
||||
# Create new task object
|
||||
write_object = RedisTaskObject(
|
||||
task=task_uuid,
|
||||
data=redis_data,
|
||||
completed=False,
|
||||
service=service_name,
|
||||
status=Status.COMPLETED,
|
||||
created_at=datetime.now().isoformat(),
|
||||
is_completed=False
|
||||
task=task_uuid, data=redis_data, completed=False, service=service_name, status=Status.COMPLETED, created_at=datetime.now().isoformat(), is_completed=False
|
||||
)
|
||||
|
||||
# Convert to dict for serialization
|
||||
write_object = write_object.model_dump()
|
||||
write_object = write_object.dict()
|
||||
|
||||
# Push new task to Redis list
|
||||
redis_write_ = self.redis_client.rpush(self.redis_prefix, dumps(write_object))
|
||||
@@ -428,7 +485,6 @@ class ServiceTaskRetriever:
|
||||
if not self.redis_client.lset(self.redis_prefix, int(index_value), dumps(task_object_dict)):
|
||||
raise ValueError(f"Failed to write updated task data for UUID {task_uuid}")
|
||||
return True
|
||||
|
||||
|
||||
def update_task_status(self, task_uuid: str, is_completed: bool = True, status: str = Status.COMPLETED) -> bool:
|
||||
"""
|
||||
@@ -549,15 +605,6 @@ class MailReaderService:
|
||||
self.service_retriever = ServiceTaskRetriever(self.redis_handler)
|
||||
self._initialized = True
|
||||
|
||||
def ensure_connection(self):
|
||||
"""
|
||||
Ensure Redis connection is established
|
||||
|
||||
Returns:
|
||||
bool: True if connection is established, False otherwise
|
||||
"""
|
||||
return self.redis_handler.ensure_connection()
|
||||
|
||||
def get_task_by_uuid(self, task_uuid: str) -> RedisTaskObject:
|
||||
"""
|
||||
Get a task object by its UUID
|
||||
@@ -655,6 +702,23 @@ class MailReaderService:
|
||||
"""
|
||||
return self.service_retriever.update_task_status(task_uuid, is_completed, status)
|
||||
|
||||
def change_service(self, task_uuid: str, service_name: str, status: str = Status.COMPLETED, completed: bool = False):
|
||||
"""
|
||||
Change the service of a task by UUID
|
||||
|
||||
Args:
|
||||
task_uuid: UUID of the task to update
|
||||
service_name: Name of the service to update
|
||||
|
||||
Returns:
|
||||
bool: True if successful
|
||||
|
||||
Raises:
|
||||
FileNotFoundError: If the task is not found
|
||||
ValueError: If the update fails
|
||||
"""
|
||||
return self.service_retriever.update_task_service(task_uuid, service_name, status, completed)
|
||||
|
||||
def process_mail(self, mail_id: str, mail_data: dict, service_prefix: str, counter: int) -> dict:
|
||||
"""
|
||||
Process mail data and store it in Redis
|
||||
@@ -675,20 +739,42 @@ class MailReaderService:
|
||||
attachments = mail_without_attachments.pop('attachments', [])
|
||||
create_task = dict(task_uuid=task_uuid, service_name=service_prefix, mail_reader=mail_without_attachments, mail_parser=attachments)
|
||||
self.service_retriever.create_task_with_uuid(**create_task)
|
||||
return {'status': 'success', 'action': 'stored_new_mail', 'counter': counter}
|
||||
else:
|
||||
try:
|
||||
task = self.service_retriever.get_task_by_mail_id(mail_id)
|
||||
if task is None and task.data and task.data.MailReader:
|
||||
stored_id = task.data.MailReader.id
|
||||
if stored_id != mail_id:
|
||||
return {'status': 'error', 'action': 'id_mismatch', 'stored_id': stored_id}
|
||||
return {'status': 'success', 'action': 'checked_existing_mail', 'is_completed': task.is_completed if task else False}
|
||||
except FileNotFoundError:
|
||||
return {'status': 'error', 'action': 'not_found', 'error': f'Mail with ID {mail_id} not found in index'}
|
||||
return task_uuid, counter
|
||||
except Exception as e:
|
||||
logger.error(f"Mail Reader Service Error processing mail {mail_id}: {str(e)}")
|
||||
return {'status': 'error', 'action': 'exception', 'error': str(e)}
|
||||
return None, counter
|
||||
|
||||
def pop_mail(self, mail_id: str):
|
||||
"""
|
||||
Pop a mail from Redis
|
||||
|
||||
Args:
|
||||
mail_id: ID of the mail to pop
|
||||
|
||||
Returns:
|
||||
bool: True if successful
|
||||
|
||||
Raises:
|
||||
FileNotFoundError: If the mail is not found
|
||||
ValueError: If the pop fails
|
||||
"""
|
||||
try:
|
||||
if self.redis_handler.ismember(f'{ConfigServices.TASK_SEEN_PREFIX}', int(mail_id)):
|
||||
self.redis_handler.srem(f'{ConfigServices.TASK_SEEN_PREFIX}', int(mail_id))
|
||||
return True
|
||||
return False
|
||||
except Exception as e:
|
||||
logger.error(f"Mail Reader Service Error popping mail {int(mail_id)}: {str(e)}")
|
||||
return False
|
||||
|
||||
def check_mail_is_ready_to_delete(self, mail_id: str):
|
||||
try:
|
||||
if self.redis_handler.ismember(f'{ConfigServices.TASK_DELETED_PREFIX}', int(mail_id)):
|
||||
return True
|
||||
return False
|
||||
except Exception as e:
|
||||
logger.error(f"Mail Reader Service Error checking mail {int(mail_id)}: {str(e)}")
|
||||
return False
|
||||
|
||||
|
||||
class MailParserService:
|
||||
@@ -745,6 +831,166 @@ class MailParserService:
|
||||
def fetch_all_tasks(self) -> list[RedisTaskObject]:
|
||||
return self.service_retriever.fetch_all_tasks()
|
||||
|
||||
def get_task_by_uuid(self, task_uuid: str) -> RedisTaskObject:
|
||||
"""
|
||||
Get a task object by its UUID
|
||||
|
||||
Args:
|
||||
task_uuid: UUID of the task to retrieve
|
||||
|
||||
Returns:
|
||||
RedisTaskObject: The task object if found
|
||||
|
||||
Raises:
|
||||
FileNotFoundError: If the UUID index or task is not found
|
||||
"""
|
||||
return self.service_retriever.get_task_by_uuid(task_uuid)
|
||||
|
||||
def get_service_data_by_uuid(self, task_uuid: str, service_name: str):
|
||||
"""
|
||||
Get service-specific data from a task by UUID
|
||||
|
||||
Args:
|
||||
task_uuid: UUID of the task
|
||||
service_name: Name of the service to extract data for
|
||||
|
||||
Returns:
|
||||
Any: Service-specific data if found
|
||||
|
||||
Raises:
|
||||
FileNotFoundError: If the task or service data is not found
|
||||
"""
|
||||
return self.service_retriever.get_service_data_by_uuid(task_uuid, service_name)
|
||||
|
||||
def update_service_data(self, task_uuid: str, service_name: str, service_data: dict) -> bool:
|
||||
"""
|
||||
Update service-specific data in a task by UUID
|
||||
|
||||
Args:
|
||||
task_uuid: UUID of the task to update
|
||||
service_name: Name of the service data to update
|
||||
service_data: New service data
|
||||
|
||||
Returns:
|
||||
bool: True if successful
|
||||
|
||||
Raises:
|
||||
FileNotFoundError: If the task is not found
|
||||
ValueError: If the update fails or service name is invalid
|
||||
"""
|
||||
return self.service_retriever.update_service_data(task_uuid, service_name, service_data)
|
||||
|
||||
def change_service(self, task_uuid: str, service_name: str, status: str = Status.COMPLETED, completed: bool = False) -> bool:
|
||||
"""
|
||||
Update the service of a task by UUID
|
||||
"""
|
||||
return self.service_retriever.update_task_service(task_uuid, service_name, status, completed)
|
||||
|
||||
def update_task_status(self, task_uuid: str, is_completed: bool = True, status: str = Status.COMPLETED) -> bool:
|
||||
"""
|
||||
Update the status of a task by UUID
|
||||
"""
|
||||
return self.service_retriever.update_task_status(task_uuid, is_completed, status)
|
||||
|
||||
|
||||
class IbanFinderService:
|
||||
"""
|
||||
Iban Finder Service
|
||||
"""
|
||||
|
||||
# Singleton instance
|
||||
_instance = None
|
||||
REDIS_EXCEPTIONS = RedisHandler.REDIS_EXCEPTIONS
|
||||
|
||||
def __init__(self):
|
||||
if hasattr(self, '_initialized') and self._initialized:
|
||||
return
|
||||
self.service_retriever = ServiceTaskRetriever()
|
||||
self._initialized = True
|
||||
|
||||
def fetch_all_tasks(self) -> list[RedisTaskObject]:
|
||||
return self.service_retriever.fetch_all_tasks()
|
||||
|
||||
def get_task_by_uuid(self, task_uuid: str) -> RedisTaskObject:
|
||||
"""
|
||||
Get a task object by its UUID
|
||||
|
||||
Args:
|
||||
task_uuid: UUID of the task to retrieve
|
||||
|
||||
Returns:
|
||||
RedisTaskObject: The task object if found
|
||||
|
||||
Raises:
|
||||
FileNotFoundError: If the UUID index or task is not found
|
||||
"""
|
||||
return self.service_retriever.get_task_by_uuid(task_uuid)
|
||||
|
||||
def get_service_data_by_uuid(self, task_uuid: str, service_name: str):
|
||||
"""
|
||||
Get service-specific data from a task by UUID
|
||||
|
||||
Args:
|
||||
task_uuid: UUID of the task
|
||||
service_name: Name of the service to extract data for
|
||||
|
||||
Returns:
|
||||
Any: Service-specific data if found
|
||||
|
||||
Raises:
|
||||
FileNotFoundError: If the task or service data is not found
|
||||
"""
|
||||
return self.service_retriever.get_service_data_by_uuid(task_uuid, service_name)
|
||||
|
||||
def update_service_data(self, task_uuid: str, service_name: str, service_data: dict) -> bool:
|
||||
"""
|
||||
Update service-specific data in a task by UUID
|
||||
|
||||
Args:
|
||||
task_uuid: UUID of the task to update
|
||||
service_name: Name of the service data to update
|
||||
service_data: New service data
|
||||
|
||||
Returns:
|
||||
bool: True if successful
|
||||
|
||||
Raises:
|
||||
FileNotFoundError: If the task is not found
|
||||
ValueError: If the update fails or service name is invalid
|
||||
"""
|
||||
return self.service_retriever.update_service_data(task_uuid, service_name, service_data)
|
||||
|
||||
def change_service(self, task_uuid: str, service_name: str, status: str = Status.COMPLETED, completed: bool = False) -> bool:
|
||||
"""
|
||||
Update the service of a task by UUID
|
||||
"""
|
||||
return self.service_retriever.update_task_service(task_uuid, service_name, status, completed)
|
||||
|
||||
def update_task_status(self, task_uuid: str, is_completed: bool = True, status: str = Status.COMPLETED) -> bool:
|
||||
"""
|
||||
Update the status of a task by UUID
|
||||
"""
|
||||
return self.service_retriever.update_task_status(task_uuid, is_completed, status)
|
||||
|
||||
|
||||
class ProcessCommentFinderService:
|
||||
"""
|
||||
Process Comment Finder Service
|
||||
"""
|
||||
|
||||
# Singleton instance
|
||||
_instance = None
|
||||
REDIS_EXCEPTIONS = RedisHandler.REDIS_EXCEPTIONS
|
||||
|
||||
def __init__(self):
|
||||
if hasattr(self, '_initialized') and self._initialized:
|
||||
return
|
||||
self.service_retriever = ServiceTaskRetriever()
|
||||
self._initialized = True
|
||||
|
||||
def fetch_all_tasks(self) -> list[RedisTaskObject]:
|
||||
return self.service_retriever.fetch_all_tasks()
|
||||
|
||||
def ensure_connection(self):
|
||||
"""
|
||||
Ensure Redis connection is established
|
||||
@@ -832,4 +1078,60 @@ class MailParserService:
|
||||
"""
|
||||
Update the status of a task by UUID
|
||||
"""
|
||||
return self.service_retriever.update_task_status(task_uuid, is_completed, status)
|
||||
return self.service_retriever.update_task_status(task_uuid, is_completed, status)
|
||||
|
||||
def delete_task(self, task_uuid: str, max_retries: int = 5):
|
||||
"""
|
||||
Delete a task object by its UUID
|
||||
"""
|
||||
return self.service_retriever.delete_task(task_uuid, max_retries)
|
||||
|
||||
|
||||
class ProcessCommentParserService:
|
||||
"""
|
||||
Class for processing comment parser tasks
|
||||
"""
|
||||
|
||||
instance = None
|
||||
REDIS_EXCEPTIONS = RedisHandler.REDIS_EXCEPTIONS
|
||||
|
||||
def __init__(self):
|
||||
if hasattr(self, '_initialized') and self._initialized:
|
||||
return
|
||||
self.service_retriever: ServiceTaskRetriever = ServiceTaskRetriever()
|
||||
self._initialized = True
|
||||
|
||||
def fetch_all_tasks(self) -> list[RedisTaskObject]:
|
||||
"""
|
||||
Get all tasks from Redis
|
||||
|
||||
Returns:
|
||||
list: List of task objects
|
||||
"""
|
||||
return self.service_retriever.fetch_all_tasks_parser()
|
||||
|
||||
def get_task_requirements(self) -> dict:
|
||||
"""
|
||||
Get task requirements from Redis
|
||||
Returns:
|
||||
dict: Task requirements if found
|
||||
"""
|
||||
if task_object := self.service_retriever.redis_handler.get(f'{ConfigServices.TASK_COMMENT_PARSER}'):
|
||||
return loads(task_object)
|
||||
return None
|
||||
|
||||
def set_task_requirements(self, task_object: RedisTaskObject):
|
||||
"""
|
||||
Set task requirements in Redis
|
||||
"""
|
||||
return self.service_retriever.redis_handler.set(f'{ConfigServices.TASK_COMMENT_PARSER}', dumps(task_object))
|
||||
|
||||
|
||||
def set_predict_account_record(self, predict_account_record: dict):
|
||||
return self.service_retriever.redis_handler.set(f'{ConfigServices.TASK_PREDICT_RESULT}', dumps(predict_account_record))
|
||||
|
||||
|
||||
def get_predict_account_record(self):
|
||||
if predict_account_record := self.service_retriever.redis_handler.get(f'{ConfigServices.TASK_PREDICT_RESULT}'):
|
||||
return loads(predict_account_record)
|
||||
return None
|
||||
54
ServicesRunner/Depends/template_accounts.html
Normal file
54
ServicesRunner/Depends/template_accounts.html
Normal file
@@ -0,0 +1,54 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>Gelen Banka Kayıtları</title>
|
||||
<style>
|
||||
body {
|
||||
font-family: Arial, sans-serif;
|
||||
margin: 20px;
|
||||
}
|
||||
table {
|
||||
width: 100%;
|
||||
border-collapse: collapse;
|
||||
}
|
||||
table, th, td {
|
||||
border: 1px solid black;
|
||||
}
|
||||
th, td {
|
||||
padding: 10px;
|
||||
text-align: left;
|
||||
}
|
||||
th {
|
||||
background-color: #f2f2f2;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<h1>Günaydın, Admin</h1>
|
||||
<br>
|
||||
<p>Banka Kayıtları : {{today}} </p>
|
||||
<p><b>Son Bakiye : {{bank_balance}} </b></p>
|
||||
<p><b>{{"Status : İkinci Bakiye Hatalı" if balance_error else "Status :OK"}}</b></p>
|
||||
<table border="1">
|
||||
<thead>
|
||||
<tr>
|
||||
{% for header in headers %}
|
||||
<th>{{ header }}</th>
|
||||
{% endfor %}
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{% for row in rows %}
|
||||
<tr>
|
||||
{% for cell in row %}
|
||||
<td>{{ cell }}</td>
|
||||
{% endfor %}
|
||||
</tr>
|
||||
{% endfor %}
|
||||
</tbody>
|
||||
</table>
|
||||
<p>Teşekkür ederiz,<br>Evyos Yönetim<br>Saygılarımızla</p>
|
||||
</body>
|
||||
</html>
|
||||
@@ -1,4 +1,6 @@
|
||||
prisma==0.9.1
|
||||
python-dotenv==1.0.0
|
||||
asyncio==3.4.3
|
||||
uvloop>=0.19
|
||||
uvloop>=0.19
|
||||
redis>=6.4.0
|
||||
unidecode>=1.3.8
|
||||
arrow>=1.3.0
|
||||
1
ServicesTask/.python-version
Normal file
1
ServicesTask/.python-version
Normal file
@@ -0,0 +1 @@
|
||||
3.12
|
||||
0
ServicesTask/README.md
Normal file
0
ServicesTask/README.md
Normal file
50
ServicesTask/app/core/config.py
Normal file
50
ServicesTask/app/core/config.py
Normal file
@@ -0,0 +1,50 @@
|
||||
import os
|
||||
import redis
|
||||
|
||||
from typing import Optional
|
||||
|
||||
|
||||
class Env:
|
||||
REDIS_HOST: str = os.getenv("REDIS_HOST", "redis")
|
||||
REDIS_PORT: int = int(os.getenv("REDIS_PORT", "6379"))
|
||||
REDIS_DB: int = int(os.getenv("REDIS_DB", "0"))
|
||||
REDIS_USERNAME: Optional[str] = os.getenv("REDIS_USERNAME")
|
||||
REDIS_PASSWORD: Optional[str] = os.getenv("REDIS_PASSWORD")
|
||||
REDIS_STREAM_PUBLISH: str = os.getenv("REDIS_STREAM_PUBLISH", "DEFAULT:REGISTER:DONTUSE")
|
||||
REDIS_STREAM_TASKS: str = os.getenv("REDIS_STREAM_TASKS", "DEFAULT:REGISTER:DONTUSE")
|
||||
|
||||
BATCH_SIZE: int = int(os.getenv("BATCH_SIZE", "5"))
|
||||
MAX_RETRIES: int = int(os.getenv("MAX_RETRIES", "3"))
|
||||
IDLE_RECLAIM_MS: int = int(os.getenv("IDLE_RECLAIM_MS", "30000"))
|
||||
|
||||
SQLITE_PATH: str = os.getenv("SQLITE_PATH", "/app/data/queue.db")
|
||||
|
||||
class RedisConfig:
|
||||
def __init__(
|
||||
self,
|
||||
host: str = Env.REDIS_HOST,
|
||||
port: int = Env.REDIS_PORT,
|
||||
db: int = Env.REDIS_DB,
|
||||
username: Optional[str] = Env.REDIS_USERNAME,
|
||||
password: Optional[str] = Env.REDIS_PASSWORD,
|
||||
batch_size: int = Env.BATCH_SIZE,
|
||||
max_retries: int = Env.MAX_RETRIES,
|
||||
idle_reclaim_ms: int = Env.IDLE_RECLAIM_MS,
|
||||
):
|
||||
self.host = host
|
||||
self.port = port
|
||||
self.db = db
|
||||
self.username = username
|
||||
self.password = password
|
||||
self.batch_size = batch_size
|
||||
self.max_retries = max_retries
|
||||
self.idle_reclaim_ms = idle_reclaim_ms
|
||||
|
||||
def client(self) -> redis.Redis:
|
||||
return redis.Redis(
|
||||
host=self.host,
|
||||
port=self.port,
|
||||
db=self.db,
|
||||
username=self.username,
|
||||
password=self.password,
|
||||
)
|
||||
49
ServicesTask/app/core/metrics.py
Normal file
49
ServicesTask/app/core/metrics.py
Normal file
@@ -0,0 +1,49 @@
|
||||
import os
|
||||
from prometheus_client import Counter, start_http_server
|
||||
|
||||
_METRICS_STARTED = False
|
||||
|
||||
NS = os.getenv("METRICS_NS", "servicestask")
|
||||
SERVICE_NAME = os.getenv("SERVICE_NAME", "db-service")
|
||||
|
||||
QUEUE_ENQUEUED = Counter(
|
||||
f"{NS}_queue_enqueued_total", "Enqueued tasks",
|
||||
labelnames=("service","queue","type")
|
||||
)
|
||||
QUEUE_DUPLICATE = Counter(
|
||||
f"{NS}_queue_duplicate_skipped_total", "Duplicate skipped",
|
||||
labelnames=("service","queue","type")
|
||||
)
|
||||
QUEUE_DONE = Counter(
|
||||
f"{NS}_queue_done_total", "Done tasks",
|
||||
labelnames=("service","queue","type")
|
||||
)
|
||||
QUEUE_FAILED = Counter(
|
||||
f"{NS}_queue_failed_total", "Failed tasks",
|
||||
labelnames=("service","queue","type")
|
||||
)
|
||||
QUEUE_RETRY = Counter(
|
||||
f"{NS}_queue_retry_total", "Retry attempts",
|
||||
labelnames=("service","queue","type")
|
||||
)
|
||||
|
||||
def start_server():
|
||||
global _METRICS_STARTED
|
||||
if _METRICS_STARTED:
|
||||
return
|
||||
port = int(os.getenv("METRICS_PORT", "8000"))
|
||||
start_http_server(port)
|
||||
_METRICS_STARTED = True
|
||||
|
||||
def observe(status: str, queue: str, type_: str):
|
||||
labels = (SERVICE_NAME, queue, type_ or "unknown")
|
||||
if status == "enqueued":
|
||||
QUEUE_ENQUEUED.labels(*labels).inc()
|
||||
elif status == "duplicate_skipped":
|
||||
QUEUE_DUPLICATE.labels(*labels).inc()
|
||||
elif status == "done":
|
||||
QUEUE_DONE.labels(*labels).inc()
|
||||
elif status == "failed":
|
||||
QUEUE_FAILED.labels(*labels).inc()
|
||||
elif status == "retry":
|
||||
QUEUE_RETRY.labels(*labels).inc()
|
||||
11
ServicesTask/app/core/nats.conf
Normal file
11
ServicesTask/app/core/nats.conf
Normal file
@@ -0,0 +1,11 @@
|
||||
server_name: "nats-main"
|
||||
|
||||
port: 4222
|
||||
http: 8222
|
||||
|
||||
jetstream: {
|
||||
store_dir: "/data/jetstream",
|
||||
max_mem_store: 512MB,
|
||||
max_file_store: 10GB
|
||||
}
|
||||
|
||||
68
ServicesTask/app/core/sqlite_tasks.py
Normal file
68
ServicesTask/app/core/sqlite_tasks.py
Normal file
@@ -0,0 +1,68 @@
|
||||
import json
|
||||
import aiosqlite
|
||||
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
|
||||
class TasksRepoAsync:
|
||||
"""
|
||||
SQLITE Task Manager
|
||||
"""
|
||||
def __init__(self, db_path: str):
|
||||
self.db_path = db_path
|
||||
|
||||
async def init(self) -> None:
|
||||
async with aiosqlite.connect(self.db_path) as db:
|
||||
await db.execute("PRAGMA journal_mode=WAL;")
|
||||
await db.execute("PRAGMA synchronous=NORMAL;")
|
||||
await db.execute("""
|
||||
CREATE TABLE IF NOT EXISTS tasks(
|
||||
task_id TEXT PRIMARY KEY,
|
||||
queue TEXT NOT NULL,
|
||||
type TEXT NOT NULL,
|
||||
payload_json TEXT NOT NULL,
|
||||
created_at INTEGER NOT NULL,
|
||||
status TEXT DEFAULT 'pending',
|
||||
attempts INTEGER DEFAULT 0,
|
||||
last_error TEXT,
|
||||
last_msg_id TEXT
|
||||
);
|
||||
""")
|
||||
cols = await self._columns(db, "tasks")
|
||||
if "last_msg_id" not in cols:
|
||||
await db.execute("ALTER TABLE tasks ADD COLUMN last_msg_id TEXT;")
|
||||
await db.commit()
|
||||
|
||||
async def insert_task(self, task: Dict[str, Any], last_msg_id: Optional[str]=None) -> None:
|
||||
async with aiosqlite.connect(self.db_path) as db:
|
||||
await db.execute("""INSERT OR REPLACE INTO tasks (task_id, queue, type, payload_json, created_at, status, attempts, last_msg_id) VALUES(?,?,?,?,?,'pending',?,?)""",
|
||||
(task["task_id"], task["queue"], task["type"], json.dumps(task["payload"]), task["created_at"], int(task.get("_attempts", 0)), last_msg_id))
|
||||
await db.commit()
|
||||
|
||||
async def mark_done(self, task_id: str, attempts: int) -> None:
|
||||
await self._update(task_id, status="done", attempts=attempts, error=None)
|
||||
|
||||
async def mark_failed(self, task_id: str, attempts: int, error: str) -> None:
|
||||
await self._update(task_id, status="failed", attempts=attempts, error=error)
|
||||
|
||||
async def mark_retry(self, task_id: str, attempts: int, error: str, last_msg_id: str) -> None:
|
||||
await self._update(task_id, status="retry", attempts=attempts, error=error, last_msg_id=last_msg_id)
|
||||
|
||||
async def _update(self, task_id: str, *, status: str, attempts: Optional[int]=None, error: Optional[str]=None, last_msg_id: Optional[str]=None) -> None:
|
||||
sets, params = ["status=?","last_error=?"], [status, error]
|
||||
if attempts is not None:
|
||||
sets.append("attempts=?"); params.append(int(attempts))
|
||||
if last_msg_id is not None:
|
||||
sets.append("last_msg_id=?"); params.append(last_msg_id)
|
||||
params.append(task_id)
|
||||
sql = f"UPDATE tasks SET {', '.join(sets)} WHERE task_id=?"
|
||||
async with aiosqlite.connect(self.db_path) as db:
|
||||
await db.execute(sql, tuple(params))
|
||||
await db.commit()
|
||||
|
||||
async def _columns(self, db: aiosqlite.Connection, table: str) -> List[str]:
|
||||
cols: List[str] = []
|
||||
async with db.execute(f"PRAGMA table_info({table})") as cur:
|
||||
async for row in cur:
|
||||
cols.append(row[1])
|
||||
return cols
|
||||
15
ServicesTask/app/core/utils.py
Normal file
15
ServicesTask/app/core/utils.py
Normal file
@@ -0,0 +1,15 @@
|
||||
import json
|
||||
import time
|
||||
|
||||
from typing import Any, Dict
|
||||
|
||||
def now_ms() -> int:
|
||||
return int(time.time() * 1000)
|
||||
|
||||
def jd(obj: Dict[str, Any]) -> Dict[bytes, bytes]:
|
||||
"""JSON to Redis fields."""
|
||||
return {"data": json.dumps(obj).encode("utf-8")}
|
||||
|
||||
def jl(fields: Dict[bytes, bytes]) -> Dict[str, Any]:
|
||||
"""Redis fields to JSON."""
|
||||
return json.loads(fields[b"data"].decode("utf-8"))
|
||||
71
ServicesTask/app/queue/dual_queue.py
Normal file
71
ServicesTask/app/queue/dual_queue.py
Normal file
@@ -0,0 +1,71 @@
|
||||
import uuid
|
||||
import time
|
||||
import random
|
||||
from typing import Any, Dict, Optional, Callable
|
||||
|
||||
from core.utils import now_ms, jd, jl
|
||||
from core.config import RedisConfig, Env
|
||||
from storage.sqlite_queue import SqliteQueue
|
||||
|
||||
class DualQueueProducer:
|
||||
"""
|
||||
Her job’ı hem Redis Stream’e hem de SQLite’e yazar.
|
||||
"""
|
||||
def __init__(self, stream: str, sqlite_db: SqliteQueue, cfg: RedisConfig):
|
||||
self.stream = stream
|
||||
self.sqlite = sqlite_db
|
||||
self.cfg = cfg
|
||||
self.r = cfg.client()
|
||||
|
||||
def enqueue(self, payload: Dict[str, Any], type_: str) -> str:
|
||||
task_id = payload.get("task_id") or str(uuid.uuid4())
|
||||
task = {"task_id": task_id, "queue": self.stream, "type": type_, "payload": payload, "created_at": now_ms(), "_attempts": 0}
|
||||
self.r.xadd(self.stream, jd(task))
|
||||
self.sqlite.add_task(task)
|
||||
return task_id
|
||||
|
||||
|
||||
class DualQueueConsumer:
|
||||
"""
|
||||
Consumer Group ile işler: retry / failed SQLite’e işler.
|
||||
(XAUTOCLAIM eklenebilir; önce temel akış)
|
||||
"""
|
||||
def __init__(self, stream: str, sqlite_db: SqliteQueue, cfg: RedisConfig,
|
||||
group: Optional[str] = None, consumer_name: Optional[str] = None):
|
||||
self.stream = stream
|
||||
self.sqlite = sqlite_db
|
||||
self.cfg = cfg
|
||||
self.group = group or f"g:{stream}"
|
||||
self.consumer = consumer_name or f"w-{random.randint(1000,9999)}"
|
||||
self.r = cfg.client()
|
||||
try:
|
||||
self.r.xgroup_create(self.stream, self.group, id="$", mkstream=True)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def run(self, process_fn: Callable[[Dict[str, Any]], None]) -> None:
|
||||
print(f"[{self.consumer}] listening {self.stream} …")
|
||||
while True:
|
||||
msgs = self.r.xreadgroup(self.group, self.consumer, {self.stream: ">"}, count=self.cfg.batch_size, block=5000)
|
||||
if not msgs:
|
||||
continue
|
||||
for _, entries in msgs:
|
||||
for msg_id, fields in entries:
|
||||
job = jl(fields)
|
||||
task_id = job["task_id"]
|
||||
attempts = int(job.get("_attempts", 0))
|
||||
try:
|
||||
process_fn(job)
|
||||
self.r.xack(self.stream, self.group, msg_id)
|
||||
self.sqlite.update_task(task_id, status="done", attempts=attempts)
|
||||
except Exception as e:
|
||||
attempts += 1
|
||||
if attempts > self.cfg.max_retries:
|
||||
self.r.xack(self.stream, self.group, msg_id)
|
||||
self.sqlite.update_task(task_id, status="failed", error=str(e), attempts=attempts)
|
||||
else:
|
||||
self.r.xack(self.stream, self.group, msg_id)
|
||||
job["_attempts"] = attempts
|
||||
time.sleep(min(0.2*(2**(attempts-1)), 3.0))
|
||||
self.r.xadd(self.stream, jd(job))
|
||||
self.sqlite.update_task(task_id, status="retry", error=str(e), attempts=attempts)
|
||||
0
ServicesTask/app/services/common/__init__.py
Normal file
0
ServicesTask/app/services/common/__init__.py
Normal file
14
ServicesTask/app/services/common/config.py
Normal file
14
ServicesTask/app/services/common/config.py
Normal file
@@ -0,0 +1,14 @@
|
||||
import os
|
||||
|
||||
|
||||
class RedisConfig:
|
||||
|
||||
HOST: str = os.getenv("REDIS_HOST", "10.10.2.15")
|
||||
PASSWORD: str = os.getenv("REDIS_PASSWORD", "your_strong_password_here")
|
||||
PORT: int = int(os.getenv("REDIS_PORT", 6379))
|
||||
DB: int = int(os.getenv("REDIS_DB", 0))
|
||||
|
||||
@classmethod
|
||||
def as_dict(cls):
|
||||
return dict(host=RedisConfig.HOST, port=int(RedisConfig.PORT), password=RedisConfig.PASSWORD, db=int(RedisConfig.DB))
|
||||
|
||||
100
ServicesTask/app/services/common/models.py
Normal file
100
ServicesTask/app/services/common/models.py
Normal file
@@ -0,0 +1,100 @@
|
||||
from typing import Optional, List
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
class User(BaseModel):
|
||||
id: int
|
||||
uu_id: str
|
||||
user_tag: str
|
||||
user_type: str
|
||||
email: str
|
||||
phone_number: str
|
||||
related_company: str
|
||||
is_confirmed: bool
|
||||
active: bool
|
||||
|
||||
|
||||
class Person(BaseModel):
|
||||
id: int
|
||||
uu_id: str
|
||||
firstname: str
|
||||
surname: str
|
||||
middle_name: Optional[str] = ""
|
||||
birthname: Optional[str] = ""
|
||||
# national_identity_id: str
|
||||
is_confirmed: bool
|
||||
active: bool
|
||||
user: Optional[User] = None
|
||||
|
||||
|
||||
class OccupantType(BaseModel):
|
||||
id: int
|
||||
uu_id: str
|
||||
occupant_code: str
|
||||
occupant_type: str
|
||||
is_confirmed: bool
|
||||
active: bool
|
||||
user_type_uu_id: Optional[str] = None
|
||||
|
||||
|
||||
class BuildPart(BaseModel):
|
||||
id: int
|
||||
uu_id: str
|
||||
part_no: str
|
||||
part_level: str
|
||||
part_code: str
|
||||
part_gross_size: float
|
||||
part_net_size: float
|
||||
human_livable: bool
|
||||
build_id: int
|
||||
build_uu_id: str
|
||||
is_confirmed: bool
|
||||
active: bool
|
||||
living_spaces: Optional[List['BuildLivingSpace']] = None
|
||||
|
||||
|
||||
class BuildLivingSpace(BaseModel):
|
||||
id: int
|
||||
uu_id: str
|
||||
expiry_starts: str
|
||||
expiry_ends: str
|
||||
fix_value: float
|
||||
fix_percent: float
|
||||
agreement_no: str
|
||||
marketing_process: bool
|
||||
build_parts_id: int
|
||||
build_parts_uu_id: str
|
||||
person_id: int
|
||||
person_uu_id: str
|
||||
occupant_type_id: int
|
||||
occupant_type_uu_id: str
|
||||
is_confirmed: bool
|
||||
active: bool
|
||||
person: Optional[Person] = None
|
||||
occupant_type: Optional[OccupantType] = None
|
||||
|
||||
|
||||
class BuildingCluster(BaseModel):
|
||||
id: int
|
||||
uu_id: str
|
||||
build_name: str
|
||||
build_no: str
|
||||
build_date: str
|
||||
decision_period_date: str
|
||||
expiry_starts: str
|
||||
expiry_ends: str
|
||||
is_confirmed: bool
|
||||
active: bool
|
||||
build_parts: List['BuildPart'] = []
|
||||
|
||||
|
||||
class BuildRequirements(BaseModel):
|
||||
|
||||
building_count: int
|
||||
living_space: int
|
||||
build_parts: int
|
||||
|
||||
|
||||
# Update forward references for models with circular dependencies
|
||||
BuildPart.update_forward_refs()
|
||||
BuildingCluster.update_forward_refs()
|
||||
172
ServicesTask/app/services/common/redis_handler.py
Normal file
172
ServicesTask/app/services/common/redis_handler.py
Normal file
@@ -0,0 +1,172 @@
|
||||
import logging
|
||||
|
||||
from json import loads, dumps
|
||||
from contextlib import contextmanager
|
||||
from time import sleep
|
||||
from redis import Redis, RedisError, ConnectionError as RedisConnectionError
|
||||
from .config import RedisConfig
|
||||
|
||||
|
||||
logger = logging.getLogger('RedisHandler')
|
||||
|
||||
|
||||
@contextmanager
|
||||
def safe_redis_operation(redis_client: Redis, operation_name: str = "Redis operation"):
|
||||
"""
|
||||
Context manager for safely executing Redis operations with error handling
|
||||
"""
|
||||
try:
|
||||
yield redis_client
|
||||
except RedisConnectionError as e:
|
||||
logger.error(f"{operation_name} failed due to Redis connection error: {str(e)}")
|
||||
raise
|
||||
except RedisError as e:
|
||||
logger.error(f"{operation_name} failed due to Redis error: {str(e)}")
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"{operation_name} failed with unexpected error: {str(e)}")
|
||||
raise
|
||||
|
||||
|
||||
class RedisHandler:
|
||||
|
||||
"""Singleton Redis handler class for centralized Redis operations"""
|
||||
|
||||
_instance = None
|
||||
REDIS_EXCEPTIONS = (RedisConnectionError, RedisError)
|
||||
|
||||
def __new__(cls):
|
||||
if cls._instance is None:
|
||||
cls._instance = super(RedisHandler, cls).__new__(cls)
|
||||
cls._instance._initialized = False
|
||||
return cls._instance
|
||||
|
||||
def __init__(self):
|
||||
if self._initialized:
|
||||
return
|
||||
self.redis_client = self._create_redis_client()
|
||||
self.redis_connected = self._check_redis_connection()
|
||||
self._initialized = True
|
||||
|
||||
def _create_redis_client(self):
|
||||
"""Create a Redis client with connection retry"""
|
||||
max_retries, retry_delay = 5, 5
|
||||
for attempt in range(max_retries):
|
||||
try:
|
||||
client = Redis(**RedisConfig.as_dict())
|
||||
client.ping()
|
||||
logger.info("Redis connection established successfully")
|
||||
return client
|
||||
except (RedisConnectionError, RedisError) as e:
|
||||
if attempt < max_retries - 1:
|
||||
logger.warning(f"Redis connection attempt {attempt + 1} failed: {str(e)}. Retrying in {retry_delay} seconds...")
|
||||
sleep(retry_delay)
|
||||
retry_delay *= 2
|
||||
else:
|
||||
logger.error(f"Failed to connect to Redis after {max_retries} attempts: {str(e)}")
|
||||
return Redis(**RedisConfig.as_dict())
|
||||
|
||||
def _check_redis_connection(self) -> bool:
|
||||
"""Check if Redis connection is alive"""
|
||||
try:
|
||||
self.ping()
|
||||
return True
|
||||
except Exception as e:
|
||||
return False
|
||||
|
||||
def ping(self):
|
||||
"""Ping Redis server to check connection"""
|
||||
return self.redis_client.ping()
|
||||
|
||||
def sadd(self, key: str, value):
|
||||
"""Add a value to a Redis set"""
|
||||
return self.redis_client.sadd(key, value)
|
||||
|
||||
def ismember(self, key: str, value):
|
||||
"""Check if a value is a member of a Redis set"""
|
||||
return self.redis_client.sismember(key, value)
|
||||
|
||||
def get(self, key: str):
|
||||
"""Get a value from Redis by key"""
|
||||
return self.redis_client.get(key)
|
||||
|
||||
def get_json(self, key: str) -> dict:
|
||||
"""Get a value from Redis by key"""
|
||||
obj = self.redis_client.get(key)
|
||||
if obj:
|
||||
return loads(obj)
|
||||
return None
|
||||
|
||||
def set_json(self, key: str, value):
|
||||
"""Set a key-value pair in Redis"""
|
||||
return self.redis_client.set(key, dumps(value))
|
||||
|
||||
def set(self, key: str, value):
|
||||
"""Set a key-value pair in Redis"""
|
||||
return self.redis_client.set(key, value)
|
||||
|
||||
def delete_value(self, key: str, value):
|
||||
"""Delete a value from a Redis value by finding key"""
|
||||
get_redis = self.get(key)
|
||||
if get_redis:
|
||||
get_redis: dict = loads(get_redis)
|
||||
get_redis.pop(value)
|
||||
self.set(key, dumps(get_redis))
|
||||
|
||||
def rpush(self, key: str, value):
|
||||
"""Append a value to a Redis list"""
|
||||
return self.redis_client.rpush(key, value)
|
||||
|
||||
def lindex(self, key: str, index: int):
|
||||
"""Get an element from a Redis list by its index"""
|
||||
return self.redis_client.lindex(key, index)
|
||||
|
||||
def spop(self, key: str, count=1):
|
||||
"""Remove and return random members from a Redis set"""
|
||||
return self.redis_client.spop(key, count)
|
||||
|
||||
def srem(self, key: str, value):
|
||||
"""Remove a specific member from a Redis set"""
|
||||
return self.redis_client.srem(key, value)
|
||||
|
||||
def ensure_connection(self) -> bool:
|
||||
"""Check if Redis connection is alive and reconnect if needed"""
|
||||
if not self.redis_connected:
|
||||
try:
|
||||
self.redis_client = self._create_redis_client()
|
||||
self.redis_connected = self._check_redis_connection()
|
||||
if self.redis_connected:
|
||||
logger.info("Redis connection re-established successfully")
|
||||
return self.redis_connected
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to re-establish Redis connection: {str(e)}")
|
||||
return False
|
||||
return True
|
||||
|
||||
@classmethod
|
||||
def handle_reconnection(cls, consecutive_errors=0, max_consecutive_errors=5):
|
||||
"""
|
||||
Handle Redis reconnection with exponential backoff based on consecutive errors
|
||||
Args:
|
||||
consecutive_errors: Number of consecutive errors encountered
|
||||
max_consecutive_errors: Threshold for extended sleep time
|
||||
Returns:
|
||||
tuple: (RedisHandler instance, bool indicating if extended sleep is needed)
|
||||
"""
|
||||
try:
|
||||
instance = cls()
|
||||
instance.redis_connected = instance._check_redis_connection()
|
||||
logger.info("Recreated Redis handler using singleton pattern")
|
||||
need_extended_sleep = consecutive_errors >= max_consecutive_errors
|
||||
if need_extended_sleep:
|
||||
logger.warning(f"Hit {max_consecutive_errors} consecutive Redis errors, taking longer pause")
|
||||
return instance, need_extended_sleep
|
||||
except Exception as redis_retry_error:
|
||||
logger.error(f"Failed to recreate Redis handler: {str(redis_retry_error)}")
|
||||
return None, consecutive_errors >= max_consecutive_errors
|
||||
|
||||
|
||||
class RedisSaveModels:
|
||||
|
||||
COMMENT_BUILDING_CLUSTER = "COMMENT:PARSER:BUILDING:CLUSTER"
|
||||
COMMENT_BUILDING_INFO = "COMMENT:PARSER:BUILDING:INFO"
|
||||
200
ServicesTask/app/services/common/service_base_async.py
Normal file
200
ServicesTask/app/services/common/service_base_async.py
Normal file
@@ -0,0 +1,200 @@
|
||||
import os
|
||||
import json
|
||||
import asyncio
|
||||
import fnmatch
|
||||
import aio_pika
|
||||
|
||||
from aio_pika.abc import AbstractIncomingMessage
|
||||
from typing import Any, Dict, Awaitable, Callable, Optional, List
|
||||
|
||||
from services.types.task import _MsgCtx, _MSG_CTX
|
||||
from services.types.queue import Enqueue
|
||||
|
||||
|
||||
class ServiceBaseAsync:
|
||||
"""
|
||||
RabbitMQ tabanlı async servis iskeleti.
|
||||
- Topic exchange: EXCHANGE_EVENTS (default: app.events)
|
||||
- Çoklu consume binding: CONSUME_BINDINGS="parser.publish,mail.publish"
|
||||
- Kendi ürettiğini tüketmez: payload.source == SERVICE_NAME -> ACK & skip
|
||||
- Retry: TTL'li retry kuyruğu (RETRY_DELAY_MS), sonra main'e geri DLX
|
||||
- Max deneme üstünde DLQ: q.<service>.events.dlq
|
||||
- Handler map: routing key -> özel callback (pattern destekli)
|
||||
- Geriye uyumluluk: enqueue(payload, action, routing_key=None, message_id=None)
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
produce_fn: Callable[["ServiceBaseAsync"], Awaitable[None]],
|
||||
consume_fn: Callable[["ServiceBaseAsync", Dict[str, Any]], Awaitable[None]],
|
||||
handlers: Optional[Dict[str, Callable[["ServiceBaseAsync", Dict[str, Any]], Awaitable[None]]]] = None,
|
||||
):
|
||||
self.service_name = os.getenv("SERVICE_NAME", "db-service")
|
||||
self.amqp_url = os.getenv("RABBITMQ_URL", "amqp://guest:guest@localhost/")
|
||||
self.exchange_name = os.getenv("EXCHANGE_EVENTS", "app.events")
|
||||
self.produce_key: str = os.getenv("PRODUCE_KEY", f"{self.service_name}.publish")
|
||||
|
||||
raw = os.getenv("CONSUME_BINDINGS", "")
|
||||
self.consume_bindings: List[str] = [s.strip() for s in raw.split(",") if s.strip()]
|
||||
base = self.service_name.replace("/", "_")
|
||||
|
||||
self.queue_main = f"q.{base}.events"
|
||||
self.queue_retry = f"{self.queue_main}.retry"
|
||||
self.queue_dlq = f"{self.queue_main}.dlq"
|
||||
self.retry_delay_ms = int(os.getenv("RETRY_DELAY_MS", "5000"))
|
||||
self.max_retries = int(os.getenv("MAX_RETRIES", "3"))
|
||||
self.prefetch = int(os.getenv("PREFETCH", "5"))
|
||||
self.ignore_self = os.getenv("IGNORE_SELF_PRODUCED", "true").lower() == "true"
|
||||
|
||||
self.produce_fn = produce_fn
|
||||
self.consume_fn = consume_fn
|
||||
|
||||
self.handlers = handlers or {}
|
||||
self.conn: Optional[aio_pika.RobustConnection] = None
|
||||
self.chan: Optional[aio_pika.RobustChannel] = None
|
||||
self.ex: Optional[aio_pika.Exchange] = None
|
||||
|
||||
async def _connect_with_retry(self, max_wait: int = 300):
|
||||
delay = 1
|
||||
deadline = asyncio.get_event_loop().time() + (max_wait or 10**9)
|
||||
last_err = None
|
||||
while True:
|
||||
try:
|
||||
conn = await aio_pika.connect_robust(self.amqp_url, client_properties={"connection_name": self.service_name}, timeout=10)
|
||||
print(f"[amqp] connected: {self.amqp_url} : {self.service_name} : {self.exchange_name} : {str(self.consume_bindings)}")
|
||||
return conn
|
||||
except Exception as e:
|
||||
last_err = e
|
||||
now = asyncio.get_event_loop().time()
|
||||
if now + delay > deadline:
|
||||
raise last_err
|
||||
await asyncio.sleep(delay)
|
||||
delay = min(delay * 2, 10)
|
||||
|
||||
async def run(self):
|
||||
self.conn = await self._connect_with_retry()
|
||||
self.chan = await self.conn.channel()
|
||||
await self.chan.set_qos(prefetch_count=self.prefetch)
|
||||
self.ex = await self.chan.declare_exchange(self.exchange_name, aio_pika.ExchangeType.TOPIC, durable=True)
|
||||
self.ex_retry = await self.chan.declare_exchange(f"{self.exchange_name}.retry", aio_pika.ExchangeType.TOPIC, durable=True)
|
||||
self.ex_dlx = await self.chan.declare_exchange(f"{self.exchange_name}.dlx", aio_pika.ExchangeType.TOPIC, durable=True)
|
||||
args_main = {"x-dead-letter-exchange": f"{self.exchange_name}.retry", "x-queue-mode": "lazy"}
|
||||
q_main = await self.chan.declare_queue(self.queue_main, durable=True, arguments=args_main)
|
||||
args_retry = {"x-message-ttl": self.retry_delay_ms, "x-dead-letter-exchange": self.exchange_name}
|
||||
q_retry = await self.chan.declare_queue(self.queue_retry, durable=True, arguments=args_retry)
|
||||
q_dlq = await self.chan.declare_queue(self.queue_dlq, durable=True)
|
||||
await q_dlq.bind(self.ex_dlx, routing_key="#")
|
||||
if not self.consume_bindings:
|
||||
print("[warn] No CONSUME_BINDINGS configured; only producing.")
|
||||
for rk in (self.consume_bindings or []):
|
||||
await q_main.bind(self.ex, routing_key=rk)
|
||||
await q_retry.bind(self.ex_retry, routing_key=rk)
|
||||
await q_main.consume(self._on_message, no_ack=False)
|
||||
await asyncio.gather(self._produce_loop())
|
||||
|
||||
async def enqueue(self, enqueue: Enqueue) -> str:
|
||||
assert self.ex is not None
|
||||
payload: dict = enqueue.payload
|
||||
payload.setdefault("task_id", enqueue.task_id)
|
||||
payload.setdefault("source", self.service_name)
|
||||
enqueue.payload = payload
|
||||
msg = aio_pika.Message(enqueue.body, delivery_mode=aio_pika.DeliveryMode.PERSISTENT, message_id=enqueue.message_id or enqueue.task_id, headers={"x-attempts": 0}, type=enqueue.action)
|
||||
routing_key = enqueue.routing_key or self.produce_key
|
||||
await self.ex.publish(msg, routing_key=routing_key)
|
||||
return enqueue.task_id
|
||||
|
||||
async def ack_current(self) -> None:
|
||||
ctx = _MSG_CTX.get()
|
||||
if ctx and ctx.msg:
|
||||
await ctx.msg.ack()
|
||||
|
||||
async def nack_current(self, requeue: bool = False) -> None:
|
||||
ctx = _MSG_CTX.get()
|
||||
if ctx and ctx.msg:
|
||||
await ctx.msg.nack(requeue=requeue)
|
||||
|
||||
async def retry_current(self, job: dict, attempts: int | None = None) -> None:
|
||||
"""Retry kuyruğuna kopyala ve orijinali ACK'le."""
|
||||
ctx = _MSG_CTX.get()
|
||||
if not (ctx and ctx.msg):
|
||||
return
|
||||
att = attempts if attempts is not None else (ctx.attempts + 1)
|
||||
await self._publish_retry(ctx.msg, job, att)
|
||||
await ctx.msg.ack()
|
||||
|
||||
async def dlq_current(self, job: dict, error: str | None = None) -> None:
|
||||
"""DLQ'ya gönder ve orijinali ACK'le."""
|
||||
ctx = _MSG_CTX.get()
|
||||
if not (ctx and ctx.msg):
|
||||
return
|
||||
await self._publish_dlq(ctx.msg, job, error=error)
|
||||
await ctx.msg.ack()
|
||||
|
||||
def register_handler(self, pattern: str, fn: Callable[["ServiceBaseAsync", Dict[str, Any]], Awaitable[None]]):
|
||||
self.handlers[pattern] = fn
|
||||
|
||||
async def _produce_loop(self):
|
||||
while True:
|
||||
try:
|
||||
await self.produce_fn(self)
|
||||
except Exception as e:
|
||||
print(f"[produce] ERROR: {e}")
|
||||
await asyncio.sleep(2)
|
||||
|
||||
async def _on_message(self, msg: AbstractIncomingMessage):
|
||||
async with msg.process(ignore_processed=True, requeue=False):
|
||||
try:
|
||||
job = json.loads(msg.body.decode())
|
||||
except Exception:
|
||||
job = {"payload": {}, "task_id": None}
|
||||
src = (job.get("payload") or {}).get("source")
|
||||
if self.ignore_self and src == self.service_name:
|
||||
return
|
||||
attempts = 0
|
||||
try:
|
||||
attempts = int(msg.headers.get("x-attempts", 0))
|
||||
except Exception:
|
||||
pass
|
||||
handler = self._resolve_handler(msg.routing_key) or self.consume_fn
|
||||
meta = job.setdefault("_meta", {})
|
||||
meta["routing_key"] = msg.routing_key
|
||||
meta["attempts"] = attempts
|
||||
meta["exchange"] = self.exchange_name
|
||||
ctx_token = _MSG_CTX.set(_MsgCtx(msg=msg, rk=msg.routing_key, attempts=attempts))
|
||||
try:
|
||||
await handler(self, job)
|
||||
except Exception as e:
|
||||
if attempts + 1 >= self.max_retries:
|
||||
await self._publish_dlq(msg, job, error=str(e))
|
||||
await msg.ack()
|
||||
else:
|
||||
await self._publish_retry(msg, job, attempts + 1)
|
||||
await msg.ack()
|
||||
finally:
|
||||
_MSG_CTX.reset(ctx_token)
|
||||
|
||||
def _resolve_handler(self, routing_key: str):
|
||||
if routing_key in self.handlers:
|
||||
return self.handlers[routing_key]
|
||||
for pat, fn in self.handlers.items():
|
||||
if fnmatch.fnmatch(routing_key, pat):
|
||||
return fn
|
||||
return None
|
||||
|
||||
async def _publish_retry(self, msg: AbstractIncomingMessage, job: Dict[str, Any], attempts: int):
|
||||
chan = self.chan; assert chan is not None
|
||||
retry_ex = await chan.get_exchange(f"{self.exchange_name}.retry")
|
||||
rk = msg.routing_key
|
||||
body = json.dumps(job).encode()
|
||||
m = aio_pika.Message(body, delivery_mode=aio_pika.DeliveryMode.PERSISTENT, message_id=msg.message_id, headers={"x-attempts": attempts})
|
||||
await retry_ex.publish(m, routing_key=rk)
|
||||
|
||||
async def _publish_dlq(self, msg: AbstractIncomingMessage, job: Dict[str, Any], error: Optional[str] = None):
|
||||
chan = self.chan; assert chan is not None
|
||||
dlx_ex = await chan.get_exchange(f"{self.exchange_name}.dlx")
|
||||
body_obj = dict(job)
|
||||
if error:
|
||||
body_obj.setdefault("_error", str(error))
|
||||
body = json.dumps(body_obj).encode()
|
||||
m = aio_pika.Message(body, delivery_mode=aio_pika.DeliveryMode.PERSISTENT, message_id=msg.message_id, headers={"x-attempts": msg.headers.get("x-attempts", 0)})
|
||||
await dlx_ex.publish(m, routing_key=msg.routing_key)
|
||||
23
ServicesTask/app/services/database/Dockerfile
Normal file
23
ServicesTask/app/services/database/Dockerfile
Normal file
@@ -0,0 +1,23 @@
|
||||
FROM python:3.11-slim
|
||||
|
||||
ENV PYTHONUNBUFFERED=1
|
||||
ENV PYTHONDONTWRITEBYTECODE=1
|
||||
ENV VIRTUAL_ENV=/opt/venv
|
||||
ENV PRISMA_SCHEMA_PATH=/app/services/database/schema.prisma
|
||||
ENV PATH="$VIRTUAL_ENV/bin:$PATH"
|
||||
ENV PYTHONPATH=/app
|
||||
|
||||
WORKDIR /
|
||||
|
||||
COPY app/services/database/README.md ./
|
||||
|
||||
COPY app/core ./app/core
|
||||
COPY app/services/common/ ./app/services/common/
|
||||
COPY app/services/database/ ./app/services/database/
|
||||
COPY app/services/types/ ./app/services/types/
|
||||
|
||||
RUN apt-get update && apt-get install -y bash
|
||||
RUN mkdir -p /app/data
|
||||
RUN chmod +x /app/services/database/entrypoint.sh
|
||||
|
||||
CMD ["bash", "/app/services/database/entrypoint.sh"]
|
||||
0
ServicesTask/app/services/database/README.md
Normal file
0
ServicesTask/app/services/database/README.md
Normal file
0
ServicesTask/app/services/database/__init__.py
Normal file
0
ServicesTask/app/services/database/__init__.py
Normal file
181
ServicesTask/app/services/database/comment_requirements.py
Normal file
181
ServicesTask/app/services/database/comment_requirements.py
Normal file
@@ -0,0 +1,181 @@
|
||||
import arrow
|
||||
|
||||
from typing import Optional, Dict
|
||||
|
||||
from services.common.redis_handler import RedisHandler, RedisSaveModels
|
||||
from services.common.models import BuildingCluster, BuildPart, BuildLivingSpace, Person, User, OccupantType, BuildRequirements
|
||||
from prisma_client import PrismaService
|
||||
|
||||
|
||||
class DefaultImportsToMemory:
|
||||
|
||||
def __init__(self, prisma_service: Optional[PrismaService] = None):
|
||||
self.prisma_service = None
|
||||
if prisma_service:
|
||||
self.prisma_service = prisma_service
|
||||
self.redis_handler = RedisHandler()
|
||||
self.today = arrow.now().to('GMT+3').datetime
|
||||
|
||||
# Redis Actions
|
||||
async def get_count_person_data_due_to_build_info(self) -> Optional[BuildRequirements]:
|
||||
"""Get count of person data due to build with comprehensive inner joins"""
|
||||
return self.redis_handler.get_json(RedisSaveModels.COMMENT_BUILDING_INFO)
|
||||
|
||||
async def set_count_person_data_due_to_build_info(self, data: BuildRequirements):
|
||||
"""Set count of person data due to build with comprehensive inner joins"""
|
||||
return self.redis_handler.set_json(RedisSaveModels.COMMENT_BUILDING_INFO, data.dict())
|
||||
|
||||
async def get_count_person_data_due_to_build_data(self):
|
||||
"""Get count of person data due to build with comprehensive inner joins"""
|
||||
data = self.redis_handler.get_json(RedisSaveModels.COMMENT_BUILDING_CLUSTER)
|
||||
return {i: BuildingCluster(**v) for i, v in data.items()}
|
||||
|
||||
async def set_count_person_data_due_to_build_data(self, data: Dict[str, BuildingCluster]):
|
||||
"""Set count of person data due to build with comprehensive inner joins"""
|
||||
excluded_dict = {i: v.dict(exclude_none=True) for i, v in data.items()}
|
||||
return self.redis_handler.set_json(RedisSaveModels.COMMENT_BUILDING_CLUSTER, excluded_dict)
|
||||
|
||||
# Database Actions
|
||||
def check_if_database_is_available(self):
|
||||
if not self.prisma_service:
|
||||
raise ValueError("PrismaService is not initialized")
|
||||
|
||||
async def get_count_person_data_due_to_build(self) -> BuildRequirements:
|
||||
"""Get count of person data due to build with comprehensive inner joins"""
|
||||
self.check_if_database_is_available()
|
||||
async with self.prisma_service._asession() as db:
|
||||
occupant_flat_owner = await db.occupant_types.find_first(where={"occupant_code": "FL-OWN", "active": True, "is_confirmed": True}, include={"user_types": True})
|
||||
occupant_tenant = await db.occupant_types.find_first(where={"occupant_code": "FL-TEN", "active": True, "is_confirmed": True}, include={"user_types": True})
|
||||
possible_money_sender_occupants = [occupant_flat_owner.id, occupant_tenant.id]
|
||||
building_count = await db.build.count(where={"active": True, "is_confirmed": True,"expiry_starts": {"lte": self.today}, "expiry_ends": {"gte": self.today}})
|
||||
build_parts_count = await db.build_parts.count(where={"active": True, "is_confirmed": True, "human_livable": True, "expiry_starts": {"lte": self.today}, "expiry_ends": {"gte": self.today}})
|
||||
living_spaces_count = await db.build_living_space.count(
|
||||
where={"active": True, "is_confirmed": True, "expiry_starts": {"lte": self.today}, "expiry_ends": {"gte": self.today}, "occupant_type_id": {"in": possible_money_sender_occupants}},
|
||||
)
|
||||
return BuildRequirements(building_count=building_count, living_space=living_spaces_count, build_parts=build_parts_count)
|
||||
|
||||
async def retrieve_all_person_data_due_to_build(self) -> Dict[str, BuildingCluster]:
|
||||
"""
|
||||
Get all person data due to build with comprehensive inner joins
|
||||
Returns a dictionary of buildings clustered with their build parts, people, and living spaces
|
||||
"""
|
||||
self.check_if_database_is_available()
|
||||
buildings_dict = {}
|
||||
async with self.prisma_service._asession() as db:
|
||||
occupant_flat_owner = await db.occupant_types.find_first(where={"occupant_code": "FL-OWN", "active": True, "is_confirmed": True}, include={"user_types": True})
|
||||
occupant_tenant = await db.occupant_types.find_first(where={"occupant_code": "FL-TEN", "active": True, "is_confirmed": True}, include={"user_types": True})
|
||||
possible_money_sender_occupants = [occupant_flat_owner.id, occupant_tenant.id]
|
||||
buildings = await db.build.find_many(where={"active": True, "is_confirmed": True,"expiry_starts": {"lte": self.today}, "expiry_ends": {"gte": self.today}})
|
||||
for build in buildings:
|
||||
buildings_dict[str(build.id)] = BuildingCluster(
|
||||
id=build.id, uu_id=build.uu_id, build_name=build.build_name, build_no=build.build_no, build_date=str(build.build_date),
|
||||
decision_period_date=str(build.decision_period_date), expiry_starts=str(build.expiry_starts), expiry_ends=str(build.expiry_ends),
|
||||
is_confirmed=build.is_confirmed, active=build.active, build_parts=[]
|
||||
)
|
||||
build_parts = await db.build_parts.find_many(where={"build_id": build.id, "active": True, "is_confirmed": True, "human_livable": True, "expiry_starts": {"lte": self.today}, "expiry_ends": {"gte": self.today}})
|
||||
for build_part in build_parts:
|
||||
part_obj = BuildPart(
|
||||
id=build_part.id,
|
||||
uu_id=build_part.uu_id,
|
||||
part_no=build_part.part_no,
|
||||
part_level=build_part.part_level,
|
||||
part_code=build_part.part_code,
|
||||
part_gross_size=build_part.part_gross_size,
|
||||
part_net_size=build_part.part_net_size,
|
||||
human_livable=build_part.human_livable,
|
||||
build_id=build_part.build_id,
|
||||
build_uu_id=build_part.build_uu_id,
|
||||
is_confirmed=build_part.is_confirmed,
|
||||
active=build_part.active,
|
||||
living_spaces=[],
|
||||
build=None
|
||||
)
|
||||
living_spaces = await db.build_living_space.find_many(
|
||||
include={"occupant_types": True, "people": {"include": {"users": True}}},
|
||||
where={"build_parts_id": build_part.id, "active": True, "is_confirmed": True,
|
||||
"expiry_starts": {"lte": self.today}, "expiry_ends": {"gte": self.today}, "occupant_type_id": {"in": possible_money_sender_occupants}},
|
||||
)
|
||||
for living_space in living_spaces:
|
||||
person = living_space.people
|
||||
user = await db.users.find_first(where={"person_id": person.id, "active": True, "is_confirmed": True})
|
||||
user_of_person = None
|
||||
if user:
|
||||
user_of_person = User(
|
||||
id=user.id,
|
||||
uu_id=user.uu_id,
|
||||
user_tag=user.user_tag,
|
||||
user_type=user.user_type,
|
||||
email=user.email,
|
||||
phone_number=user.phone_number,
|
||||
related_company=user.related_company,
|
||||
is_confirmed=user.is_confirmed,
|
||||
active=user.active
|
||||
)
|
||||
person_obj = Person(
|
||||
id=person.id,
|
||||
uu_id=person.uu_id,
|
||||
firstname=person.firstname,
|
||||
surname=person.surname,
|
||||
middle_name=person.middle_name,
|
||||
birthname=person.birthname,
|
||||
is_confirmed=person.is_confirmed,
|
||||
active=person.active,
|
||||
user=user_of_person
|
||||
)
|
||||
occupant_type = living_space.occupant_types
|
||||
occupant_type_obj = OccupantType(
|
||||
id=occupant_type.id,
|
||||
uu_id=occupant_type.uu_id,
|
||||
occupant_code=occupant_type.occupant_code,
|
||||
occupant_type=occupant_type.occupant_type,
|
||||
is_confirmed=occupant_type.is_confirmed,
|
||||
active=occupant_type.active,
|
||||
user_type_uu_id=occupant_type.user_type_uu_id
|
||||
)
|
||||
living_space_obj = BuildLivingSpace(
|
||||
id=living_space.id,
|
||||
uu_id=living_space.uu_id,
|
||||
expiry_starts=str(living_space.expiry_starts),
|
||||
expiry_ends=str(living_space.expiry_ends),
|
||||
fix_value=float(living_space.fix_value),
|
||||
fix_percent=float(living_space.fix_percent),
|
||||
agreement_no=living_space.agreement_no,
|
||||
marketing_process=living_space.marketing_process,
|
||||
build_parts_id=living_space.build_parts_id,
|
||||
build_parts_uu_id=living_space.build_parts_uu_id,
|
||||
person_id=living_space.person_id,
|
||||
person_uu_id=living_space.person_uu_id,
|
||||
occupant_type_id=living_space.occupant_type_id,
|
||||
occupant_type_uu_id=living_space.occupant_type_uu_id,
|
||||
is_confirmed=living_space.is_confirmed,
|
||||
active=living_space.active,
|
||||
person=person_obj,
|
||||
occupant_types=occupant_type_obj
|
||||
)
|
||||
part_obj.living_spaces.append(living_space_obj)
|
||||
buildings_dict[str(build.id)].build_parts.append(part_obj)
|
||||
return buildings_dict
|
||||
|
||||
async def retrieve_all_companies_data(self):
|
||||
self.check_if_database_is_available()
|
||||
async with self.prisma_service._asession() as db:
|
||||
return db.companies.find_many(where={"active": True, "is_confirmed": True})
|
||||
|
||||
async def renew_requirements(self):
|
||||
self.check_if_database_is_available()
|
||||
|
||||
async def set_to_redis():
|
||||
await self.set_count_person_data_due_to_build_info(count_person_data_due_to_build_info_db)
|
||||
all_person_data = await self.retrieve_all_person_data_due_to_build()
|
||||
await self.set_count_person_data_due_to_build_data(all_person_data)
|
||||
return
|
||||
|
||||
count_person_data_due_to_build_info_db = await self.get_count_person_data_due_to_build()
|
||||
count_person_data_due_to_build_info_memory = await self.get_count_person_data_due_to_build_info()
|
||||
if not count_person_data_due_to_build_info_memory:
|
||||
return await set_to_redis()
|
||||
|
||||
all_counts_in_memory = [count_person_data_due_to_build_info_memory.building_count, count_person_data_due_to_build_info_memory.living_space, count_person_data_due_to_build_info_memory.build_parts]
|
||||
all_counts_in_db = [count_person_data_due_to_build_info_db.building_count, count_person_data_due_to_build_info_db.living_space, count_person_data_due_to_build_info_db.build_parts]
|
||||
if not all_counts_in_memory == all_counts_in_db:
|
||||
return await set_to_redis()
|
||||
21
ServicesTask/app/services/database/entrypoint.sh
Normal file
21
ServicesTask/app/services/database/entrypoint.sh
Normal file
@@ -0,0 +1,21 @@
|
||||
#!/bin/bash
|
||||
|
||||
VENV_PATH="/opt/venv"
|
||||
REQUIREMENTS_PATH="/app/services/database/requirements.txt"
|
||||
SCHEMA_PATH="/app/services/database/schema.prisma"
|
||||
PRISMA_BINARY_PATH="/root/.cache/prisma-python/binaries"
|
||||
|
||||
if [ ! -x "$VENV_PATH/bin/python" ]; then
|
||||
python -m venv "$VENV_PATH"
|
||||
. "$VENV_PATH/bin/activate"
|
||||
"$VENV_PATH/bin/pip" install pip --upgrade
|
||||
"$VENV_PATH/bin/pip" install -r "$REQUIREMENTS_PATH"
|
||||
"$VENV_PATH/bin/prisma" generate --schema "$SCHEMA_PATH"
|
||||
fi
|
||||
|
||||
if ! find "$PRISMA_BINARY_PATH" -type f -name "prisma-query-engine-debian-openssl-3.0.x" 2>/dev/null | grep -q .; then
|
||||
"$VENV_PATH/bin/pip" install prisma
|
||||
"$VENV_PATH/bin/prisma" py fetch
|
||||
fi
|
||||
|
||||
exec "$VENV_PATH/bin/python" -u /app/services/database/main.py
|
||||
71
ServicesTask/app/services/database/main.py
Normal file
71
ServicesTask/app/services/database/main.py
Normal file
@@ -0,0 +1,71 @@
|
||||
import os
|
||||
import asyncio
|
||||
|
||||
from comment_requirements import DefaultImportsToMemory
|
||||
from services.common.service_base_async import ServiceBaseAsync
|
||||
from services.types.task import Job
|
||||
|
||||
from prisma_client import PrismaService
|
||||
|
||||
|
||||
PRODUCE_BURST = int(os.getenv("PRODUCE_BURST", "10"))
|
||||
PRODUCE_ONCE = os.getenv("PRODUCE_ONCE", "true").lower() == "true"
|
||||
EVENT_TYPE = os.getenv("EVENT_TYPE", "db-event")
|
||||
PROCESS_SEC = 10
|
||||
|
||||
prisma_service = PrismaService()
|
||||
is_db_pulled = False
|
||||
|
||||
|
||||
async def produce(svc: ServiceBaseAsync):
|
||||
global is_db_pulled
|
||||
|
||||
# Get build info to memory
|
||||
if not is_db_pulled:
|
||||
await default_imports()
|
||||
is_db_pulled = True
|
||||
async with prisma_service._asession() as db:
|
||||
# Routine Email Service
|
||||
routine_email_service_result = await db.account_records.find_many(
|
||||
where={"is_email_send": False,"active": True, "is_confirmed": True, "deleted": False}, take=3, skip=0
|
||||
)
|
||||
if not routine_email_service_result:
|
||||
await asyncio.sleep(PROCESS_SEC)
|
||||
return
|
||||
routine_email_service_result: list = prisma_service.to_dict(routine_email_service_result, select={"id": True, "uu_id": True, "iban": True, "bank_reference_code": True, "bank_date": True, "bank_balance": True})
|
||||
generate_task__uuid = ""
|
||||
for row in routine_email_service_result:
|
||||
generate_task__uuid += str(row["uu_id"])[:4]
|
||||
await svc.enqueue(task_id=generate_task__uuid, payload=routine_email_service_result, action="routine.email.send.service")
|
||||
# Get Build and Company Requirements
|
||||
await asyncio.sleep(PROCESS_SEC)
|
||||
|
||||
|
||||
async def handle_comment_publish(svc: ServiceBaseAsync, job: dict):
|
||||
job_model = Job(**job)
|
||||
await asyncio.sleep(PROCESS_SEC)
|
||||
await svc.ack_current()
|
||||
print("handle_comment_publish Database Consumer from comment:", job_model.task_id)
|
||||
|
||||
|
||||
async def handle_routine_email_send_service_ack(svc: ServiceBaseAsync, job: dict):
|
||||
job_model = Job(**job)
|
||||
await svc.ack_current()
|
||||
print("handle_routine_email_send_service_ack Database Consumer from routine.email.send.service:", job_model.task_id)
|
||||
return
|
||||
|
||||
|
||||
async def consume_default(svc: ServiceBaseAsync, job: dict):
|
||||
job_model = Job(**job)
|
||||
await asyncio.sleep(PROCESS_SEC)
|
||||
print("consume_default Database Consumer default (DLQ):", job_model.task_id)
|
||||
await svc.dlq_current(job_model, error="unsupported_routing_key")
|
||||
|
||||
|
||||
async def default_imports():
|
||||
update_comment_requirements = DefaultImportsToMemory(prisma_service)
|
||||
await update_comment_requirements.renew_requirements()
|
||||
|
||||
if __name__ == "__main__":
|
||||
svc = ServiceBaseAsync(produce_fn=produce, consume_fn=consume_default, handlers={"parser.comment.publish": handle_comment_publish, "mail.service.publish": handle_routine_email_send_service_ack})
|
||||
asyncio.run(svc.run())
|
||||
189
ServicesTask/app/services/database/prisma_client.py
Normal file
189
ServicesTask/app/services/database/prisma_client.py
Normal file
@@ -0,0 +1,189 @@
|
||||
import asyncio
|
||||
import time
|
||||
import logging
|
||||
import uvloop
|
||||
import threading
|
||||
import datetime
|
||||
import uuid
|
||||
|
||||
from typing import Optional, AsyncGenerator, Any, TypeVar, Union
|
||||
from contextlib import asynccontextmanager
|
||||
from prisma import Prisma
|
||||
from prisma.client import _PrismaModel
|
||||
|
||||
|
||||
_PrismaModelT = TypeVar('_PrismaModelT', bound='_PrismaModel')
|
||||
|
||||
|
||||
logger = logging.getLogger("prisma-service")
|
||||
|
||||
|
||||
logging.basicConfig(level=logging.INFO, format="%(asctime)s %(levelname)s %(message)s")
|
||||
logging.getLogger("httpx").setLevel(logging.WARNING)
|
||||
logging.getLogger("httpcore").setLevel(logging.WARNING)
|
||||
|
||||
|
||||
class PrismaService:
|
||||
|
||||
def __init__(self) -> None:
|
||||
|
||||
self._lock = asyncio.Lock()
|
||||
self._loop: Optional[asyncio.AbstractEventLoop] = None
|
||||
self._thread: Optional[threading.Thread] = None
|
||||
self._client: Optional[Prisma] = None
|
||||
self.result: Optional[Any] = None
|
||||
self.select: Optional[dict] = None
|
||||
self._start_loop_thread()
|
||||
|
||||
def _loop_runner(self) -> None:
|
||||
|
||||
asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
|
||||
self._loop = asyncio.new_event_loop()
|
||||
asyncio.set_event_loop(self._loop)
|
||||
try:
|
||||
self._loop.run_forever()
|
||||
finally:
|
||||
self._loop.close()
|
||||
|
||||
def _submit(self, coro):
|
||||
|
||||
if self._loop is None or not self._loop.is_running():
|
||||
raise RuntimeError("PrismaService event loop is not running.")
|
||||
fut = asyncio.run_coroutine_threadsafe(coro, self._loop)
|
||||
return fut.result()
|
||||
|
||||
async def _lock(self):
|
||||
|
||||
lock = asyncio.Lock()
|
||||
async with lock:
|
||||
return
|
||||
|
||||
async def _aconnect(self) -> Prisma:
|
||||
|
||||
if self._client is not None:
|
||||
return self._client
|
||||
logger.info("Connecting Prisma client...")
|
||||
client = Prisma()
|
||||
await client.connect()
|
||||
self._client = client
|
||||
logger.info("Prisma client connected.")
|
||||
return self._client
|
||||
|
||||
async def _adisconnect(self) -> None:
|
||||
|
||||
if self._client is not None:
|
||||
logger.info("Disconnecting Prisma client...")
|
||||
try:
|
||||
await self._client.disconnect()
|
||||
finally:
|
||||
self._client = None
|
||||
logger.info("Prisma client disconnected.")
|
||||
|
||||
@asynccontextmanager
|
||||
async def _asession(self) -> AsyncGenerator[Prisma, None]:
|
||||
yield await self._aconnect()
|
||||
|
||||
def _start_loop_thread(self) -> None:
|
||||
t = threading.Thread(target=self._loop_runner, name="PrismaLoop", daemon=True)
|
||||
t.start()
|
||||
self._thread = t
|
||||
while self._loop is None:
|
||||
time.sleep(0.005)
|
||||
|
||||
async def _connect(self) -> Prisma:
|
||||
|
||||
if self._client is not None:
|
||||
return self._client
|
||||
async with self._lock:
|
||||
if self._client is None:
|
||||
logger.info("Connecting Prisma client...")
|
||||
client = Prisma()
|
||||
await client.connect()
|
||||
self._client = client
|
||||
logger.info("Prisma client connected.")
|
||||
return self._client
|
||||
|
||||
async def _disconnect(self) -> None:
|
||||
|
||||
async with self._lock:
|
||||
if self._client is not None:
|
||||
try:
|
||||
logger.info("Disconnecting Prisma client...")
|
||||
await self._client.disconnect()
|
||||
logger.info("Prisma client disconnected.")
|
||||
finally:
|
||||
self._client = None
|
||||
|
||||
@staticmethod
|
||||
def to_dict(result: Union[list, Any], select: dict = None):
|
||||
if isinstance(result, list):
|
||||
list_result = []
|
||||
for item_iter in result:
|
||||
item = {}
|
||||
for k, v in item_iter:
|
||||
if k not in select:
|
||||
continue
|
||||
if isinstance(v, datetime.datetime):
|
||||
item[k] = str(v)
|
||||
if isinstance(v, uuid.UUID):
|
||||
item[k] = str(v)
|
||||
if isinstance(v, int):
|
||||
item[k] = int(v)
|
||||
if isinstance(v, float):
|
||||
item[k] = float(v)
|
||||
if isinstance(v, bool):
|
||||
item[k] = bool(v)
|
||||
else:
|
||||
item[k] = str(v)
|
||||
list_result.append(item)
|
||||
return list_result
|
||||
else:
|
||||
dict_result = {}
|
||||
for k,v in result:
|
||||
if k not in select:
|
||||
continue
|
||||
if isinstance(v, datetime.datetime):
|
||||
dict_result[k] = str(v)
|
||||
if isinstance(v, uuid.UUID):
|
||||
dict_result[k] = str(v)
|
||||
if isinstance(v, int):
|
||||
dict_result[k] = int(v)
|
||||
if isinstance(v, float):
|
||||
dict_result[k] = float(v)
|
||||
if isinstance(v, bool):
|
||||
dict_result[k] = bool(v)
|
||||
else:
|
||||
dict_result[k] = str(v)
|
||||
return dict_result
|
||||
|
||||
@asynccontextmanager
|
||||
async def _session(self) -> AsyncGenerator[Prisma, None]:
|
||||
|
||||
client = await self._connect()
|
||||
try:
|
||||
yield client
|
||||
except Exception:
|
||||
logger.exception("Database operation error")
|
||||
raise
|
||||
|
||||
def _run(self, coro):
|
||||
|
||||
try:
|
||||
asyncio.get_running_loop()
|
||||
raise RuntimeError("Async run is not allowed. Use sync methods instead.")
|
||||
except RuntimeError as e:
|
||||
asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
|
||||
with asyncio.Runner() as runner:
|
||||
return runner.run(coro)
|
||||
|
||||
def disconnect(self) -> None:
|
||||
|
||||
try:
|
||||
self._submit(self._adisconnect())
|
||||
finally:
|
||||
if self._loop and self._loop.is_running():
|
||||
self._loop.call_soon_threadsafe(self._loop.stop)
|
||||
if self._thread and self._thread.is_alive():
|
||||
self._thread.join(timeout=2.0)
|
||||
self._loop = None
|
||||
self._thread = None
|
||||
41
ServicesTask/app/services/database/pyproject.toml
Normal file
41
ServicesTask/app/services/database/pyproject.toml
Normal file
@@ -0,0 +1,41 @@
|
||||
[build-system]
|
||||
requires = ["setuptools>=61.0"]
|
||||
build-backend = "setuptools.build_meta"
|
||||
|
||||
[project]
|
||||
name = "dual-queue-services"
|
||||
version = "0.1.0"
|
||||
description = "Async dual queue system with Redis Streams and SQLite persistence"
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.11,<4.0"
|
||||
authors = [
|
||||
{ name = "Berkay Karatay", email = "karatay.berkay@gmail.com" }
|
||||
]
|
||||
dependencies = [
|
||||
"aio-pika>=9.4.1",
|
||||
"prometheus-client>=0.20.0",
|
||||
"uvloop>=0.19.0",
|
||||
"prisma==0.9.1",
|
||||
"asyncio==3.4.3",
|
||||
"arrow>=1.3.0",
|
||||
"redis>=6.4.0"
|
||||
]
|
||||
|
||||
[project.optional-dependencies]
|
||||
dev = [
|
||||
"pytest>=7.4",
|
||||
"black>=23.0",
|
||||
"isort>=5.12"
|
||||
]
|
||||
|
||||
[tool.black]
|
||||
line-length = 88
|
||||
target-version = ["py311"]
|
||||
|
||||
[tool.isort]
|
||||
profile = "black"
|
||||
|
||||
[tool.setuptools.packages.find]
|
||||
where = ["app"]
|
||||
include = ["app*"]
|
||||
|
||||
7
ServicesTask/app/services/database/requirements.txt
Normal file
7
ServicesTask/app/services/database/requirements.txt
Normal file
@@ -0,0 +1,7 @@
|
||||
aio-pika>=9.4.1
|
||||
prometheus-client>=0.20.0
|
||||
uvloop>=0.19.0
|
||||
prisma==0.9.1
|
||||
asyncio==3.4.3
|
||||
arrow>=1.3.0
|
||||
redis>=6.4.0
|
||||
3661
ServicesTask/app/services/database/schema.prisma
Normal file
3661
ServicesTask/app/services/database/schema.prisma
Normal file
File diff suppressed because it is too large
Load Diff
19
ServicesTask/app/services/mail/Dockerfile
Normal file
19
ServicesTask/app/services/mail/Dockerfile
Normal file
@@ -0,0 +1,19 @@
|
||||
FROM python:3.12-slim
|
||||
|
||||
ENV PYTHONDONTWRITEBYTECODE=1 PYTHONUNBUFFERED=1
|
||||
ENV PYTHONPATH=/app
|
||||
|
||||
WORKDIR /
|
||||
|
||||
COPY app/services/mail/pyproject.toml ./
|
||||
COPY app/services/mail/README.md ./
|
||||
|
||||
COPY app/core ./app/core
|
||||
COPY app/services/common/ ./app/services/common/
|
||||
COPY app/services/mail/ ./app/services/mail/
|
||||
COPY app/services/types/ ./app/services/types/
|
||||
|
||||
RUN pip install --upgrade pip && pip install --no-cache-dir .
|
||||
RUN mkdir -p /app/data
|
||||
|
||||
CMD ["python", "-m", "app.services.mail.main"]
|
||||
19
ServicesTask/app/services/mail/IsBank/params.py
Normal file
19
ServicesTask/app/services/mail/IsBank/params.py
Normal file
@@ -0,0 +1,19 @@
|
||||
import os
|
||||
|
||||
from ..config import ConfigServices
|
||||
|
||||
class IsBankConfig:
|
||||
|
||||
MAILBOX: str = os.getenv("MAILBOX", "bilgilendirme@ileti.isbank.com.tr")
|
||||
AUTHORIZE_IBAN: str = os.getenv("AUTHORIZE_IBAN", "4245-0093333")
|
||||
NO_ATTACHMENT_FOLDER: str = "NoAttachment"
|
||||
COMPLETED_FOLDER: str = "Completed"
|
||||
SERVICE_NAME: str = "IsBankEmailService"
|
||||
BANK_NAME: str = "IsBank"
|
||||
TASK_DATA_PREFIX: str = ConfigServices.MAIN_TASK_PREFIX
|
||||
TASK_MAILID_INDEX_PREFIX: str = ConfigServices.TASK_MAILID_INDEX_PREFIX
|
||||
TASK_UUID_INDEX_PREFIX: str = ConfigServices.TASK_UUID_INDEX_PREFIX
|
||||
TASK_SEEN_PREFIX: str = ConfigServices.TASK_SEEN_PREFIX
|
||||
SERVICE_PREFIX: str = ConfigServices.SERVICE_PREFIX_MAIL_READER
|
||||
NEXT_SERVICE_PREFIX: str = ConfigServices.SERVICE_PREFIX_MAIL_PARSER
|
||||
|
||||
41
ServicesTask/app/services/mail/IsBank/runner.py
Normal file
41
ServicesTask/app/services/mail/IsBank/runner.py
Normal file
@@ -0,0 +1,41 @@
|
||||
import sys
|
||||
|
||||
from time import sleep
|
||||
from logging import getLogger, basicConfig, INFO, StreamHandler, FileHandler
|
||||
|
||||
from ..mail_handler import EmailReaderService
|
||||
from .params import IsBankConfig
|
||||
|
||||
|
||||
format = '%(asctime)s - %(name)s - %(levelname)s - %(message)s'
|
||||
handlers = [StreamHandler(sys.stdout), FileHandler('isbank_email_service.log')]
|
||||
basicConfig(level=INFO, format=format, handlers=handlers)
|
||||
logger = getLogger(IsBankConfig.SERVICE_NAME)
|
||||
|
||||
|
||||
def drop(email_service: EmailReaderService):
|
||||
"""Clean up resources"""
|
||||
try:
|
||||
email_service.commit()
|
||||
except Exception as e:
|
||||
print(f"Error during commit on drop: {str(e)}")
|
||||
try:
|
||||
email_service.logout()
|
||||
except Exception as e:
|
||||
print(f"Error during logout on drop: {str(e)}")
|
||||
|
||||
|
||||
def initialize_service() -> EmailReaderService:
|
||||
"""Initialize the service with proper error handling"""
|
||||
try:
|
||||
logger.info("Creating EmailReaderService")
|
||||
email_service = EmailReaderService(IsBankConfig())
|
||||
logger.info("Connecting to email service")
|
||||
email_service.login_and_connect()
|
||||
return email_service
|
||||
except Exception as e:
|
||||
drop(email_service)
|
||||
logger.error(f"Service initialization failed: {str(e)}")
|
||||
sleep(5)
|
||||
return initialize_service()
|
||||
|
||||
0
ServicesTask/app/services/mail/README.md
Normal file
0
ServicesTask/app/services/mail/README.md
Normal file
0
ServicesTask/app/services/mail/__init__.py
Normal file
0
ServicesTask/app/services/mail/__init__.py
Normal file
155
ServicesTask/app/services/mail/config.py
Normal file
155
ServicesTask/app/services/mail/config.py
Normal file
@@ -0,0 +1,155 @@
|
||||
import os
|
||||
from re import TEMPLATE
|
||||
from pydantic import BaseModel
|
||||
from typing import Any, List, Optional, Union
|
||||
|
||||
|
||||
class FromToHeader(BaseModel):
|
||||
|
||||
display_name: Optional[str]
|
||||
username: Optional[str]
|
||||
domain: Optional[str]
|
||||
mail: Optional[str]
|
||||
|
||||
|
||||
class MailReader(BaseModel):
|
||||
|
||||
id: str
|
||||
subject: str
|
||||
from_: FromToHeader
|
||||
to: List[FromToHeader]
|
||||
date: str
|
||||
body_text: str
|
||||
|
||||
|
||||
class MailParser(BaseModel):
|
||||
|
||||
filename: str
|
||||
content_type: str
|
||||
charset: str
|
||||
data: str
|
||||
|
||||
class FinderIban(BaseModel):
|
||||
|
||||
filename: str
|
||||
iban: str
|
||||
bank_date: str
|
||||
channel_branch: str
|
||||
currency_value: float
|
||||
balance: float
|
||||
additional_balance: float
|
||||
process_name: str
|
||||
process_type: str
|
||||
process_comment: str
|
||||
bank_reference_code: str
|
||||
|
||||
|
||||
class FinderComment(FinderIban):
|
||||
|
||||
build_id: Optional[int] = None
|
||||
build_uu_id: Optional[str] = None
|
||||
decision_book_id: Optional[int] = None
|
||||
decision_book_uu_id: Optional[str] = None
|
||||
|
||||
|
||||
class RedisData(BaseModel):
|
||||
MailReader: MailReader
|
||||
MailParser: List[MailParser]
|
||||
FinderIban: List[FinderIban]
|
||||
FinderComment: List[FinderComment]
|
||||
|
||||
|
||||
class Status:
|
||||
PENDING: str = "PENDING"
|
||||
IN_PROGRESS: str = "IN_PROGRESS"
|
||||
COMPLETED: str = "COMPLETED"
|
||||
FAILED: str = "FAILED"
|
||||
|
||||
|
||||
class RedisTaskObject(BaseModel):
|
||||
task: str
|
||||
data: RedisData
|
||||
completed: bool
|
||||
service: str
|
||||
status: str
|
||||
created_at: str
|
||||
is_completed: bool
|
||||
|
||||
|
||||
class MailSendModel(BaseModel):
|
||||
receivers: List[str]
|
||||
subject: str
|
||||
template_name: str
|
||||
data: dict
|
||||
|
||||
|
||||
class RedisMailSender(BaseModel):
|
||||
task: RedisTaskObject
|
||||
data: MailSendModel
|
||||
completed: bool
|
||||
service: str
|
||||
status: str
|
||||
created_at: str
|
||||
completed: bool
|
||||
|
||||
|
||||
class EmailConfig:
|
||||
|
||||
HOST: str = os.getenv("EMAIL_HOST", "10.10.2.34")
|
||||
USERNAME: str = os.getenv("EMAIL_USERNAME", "isbank@mehmetkaratay.com.tr")
|
||||
PASSWORD: str = os.getenv("EMAIL_PASSWORD", "system")
|
||||
PORT: int = int(os.getenv("EMAIL_PORT", 993))
|
||||
|
||||
@classmethod
|
||||
def as_dict(cls):
|
||||
return dict(host=EmailConfig.HOST, port=EmailConfig.PORT, username=EmailConfig.USERNAME, password=EmailConfig.PASSWORD)
|
||||
|
||||
|
||||
class RedisConfig:
|
||||
|
||||
HOST: str = os.getenv("REDIS_HOST", "10.10.2.15")
|
||||
PASSWORD: str = os.getenv("REDIS_PASSWORD", "your_strong_password_here")
|
||||
PORT: int = int(os.getenv("REDIS_PORT", 6379))
|
||||
DB: int = int(os.getenv("REDIS_DB", 0))
|
||||
|
||||
@classmethod
|
||||
def as_dict(cls):
|
||||
return dict(host=RedisConfig.HOST, port=int(RedisConfig.PORT), password=RedisConfig.PASSWORD, db=int(RedisConfig.DB))
|
||||
|
||||
|
||||
class MailReaderMainConfig:
|
||||
|
||||
MAILBOX: str
|
||||
AUTHORIZE_IBAN: str
|
||||
NO_ATTACHMENT_FOLDER: str
|
||||
COMPLETED_FOLDER: str
|
||||
TASK_DATA_PREFIX: str
|
||||
TASK_MAILID_INDEX_PREFIX: str
|
||||
TASK_UUID_INDEX_PREFIX: str
|
||||
TASK_SEEN_PREFIX: str
|
||||
SERVICE_PREFIX: str
|
||||
NEXT_SERVICE_PREFIX: str
|
||||
|
||||
|
||||
class ConfigServices:
|
||||
|
||||
MAIN_TASK_PREFIX: str = "BANK:SERVICES:TASK:DATA"
|
||||
|
||||
TASK_MAILID_INDEX_PREFIX: str = "BANK:SERVICES:TASK:MAILID"
|
||||
TASK_UUID_INDEX_PREFIX: str = "BANK:SERVICES:TASK:UUID"
|
||||
TASK_SEEN_PREFIX: str = "BANK:SERVICES:TASK:SEEN"
|
||||
TASK_DELETED_PREFIX: str = "BANK:SERVICES:TASK:DELETED"
|
||||
TASK_COMMENT_PARSER: str = "BANK:SERVICES:TASK:COMMENT:PARSER"
|
||||
TASK_PREDICT_RESULT: str = "BANK:SERVICES:TASK:COMMENT:RESULT"
|
||||
|
||||
SERVICE_PREFIX_MAIL_READER: str = "MailReader"
|
||||
SERVICE_PREFIX_MAIL_PARSER: str = "MailParser"
|
||||
SERVICE_PREFIX_FINDER_IBAN: str = "FinderIban"
|
||||
SERVICE_PREFIX_FINDER_COMMENT: str = "FinderComment"
|
||||
SERVICE_PREFIX_MAIL_SENDER: str = "MailSender"
|
||||
|
||||
TEMPLATE_ACCOUNT_RECORDS: str = "template_accounts.html"
|
||||
|
||||
|
||||
paramsRedisData = Union[MailReader, MailParser, FinderIban, FinderComment]
|
||||
|
||||
381
ServicesTask/app/services/mail/mail_handler.py
Normal file
381
ServicesTask/app/services/mail/mail_handler.py
Normal file
@@ -0,0 +1,381 @@
|
||||
import os
|
||||
import socket
|
||||
import logging
|
||||
|
||||
from functools import wraps
|
||||
from base64 import b64encode
|
||||
from time import sleep
|
||||
from datetime import datetime
|
||||
from typing import List, Dict, Any, Union, TypeVar, Tuple
|
||||
|
||||
from email.message import EmailMessage
|
||||
from email.policy import default as policy
|
||||
from email.headerregistry import UniqueDateHeader, UniqueAddressHeader, UniqueUnstructuredHeader
|
||||
from email.parser import BytesParser
|
||||
from imaplib import IMAP4_SSL, IMAP4
|
||||
|
||||
from .config import EmailConfig, MailReaderMainConfig
|
||||
|
||||
|
||||
logger = logging.getLogger('Email Reader Service')
|
||||
|
||||
T = TypeVar('T')
|
||||
|
||||
|
||||
def retry_on_connection_error(max_retries: int = 3, delay: int = 5, backoff: int = 2, exceptions=(Exception,)):
|
||||
"""
|
||||
Retry decorator with exponential backoff for handling connection errors
|
||||
|
||||
Args:
|
||||
max_retries: Maximum number of retries
|
||||
delay: Initial delay between retries in seconds
|
||||
backoff: Backoff multiplier
|
||||
exceptions: Tuple of exceptions to catch
|
||||
Returns: Decorated function
|
||||
"""
|
||||
def decorator(func):
|
||||
@wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
mtries, mdelay = max_retries, delay
|
||||
while mtries > 0:
|
||||
try:
|
||||
return func(*args, **kwargs)
|
||||
except exceptions as e:
|
||||
logger.warning(f"Connection error in {func.__name__}: {str(e)}, retrying in {mdelay}s...")
|
||||
sleep(mdelay)
|
||||
mtries -= 1
|
||||
mdelay *= backoff
|
||||
return func(*args, **kwargs)
|
||||
return wrapper
|
||||
return decorator
|
||||
|
||||
|
||||
class Mails:
|
||||
"""Class representing an email with attachments and metadata"""
|
||||
|
||||
def __init__(self, mail_id: bytes, mail_data: bytes):
|
||||
"""
|
||||
Initialize a mail object
|
||||
Args: mail_id: Unique identifier for the email, mail_data: Raw email data
|
||||
"""
|
||||
self.id: bytes = mail_id
|
||||
self.raw_data: bytes = mail_data
|
||||
self.attachments: List[Dict[str, Union[str, bytes]]] = []
|
||||
self.message: EmailMessage = BytesParser(policy=policy).parsebytes(mail_data)
|
||||
self.subject: UniqueUnstructuredHeader = self.message.get('Subject', '') or ''
|
||||
self.from_: UniqueAddressHeader = self.message.get('From', '') or ''
|
||||
self.to: UniqueAddressHeader = self.message.get('To', '') or ''
|
||||
self.date: UniqueDateHeader = self.message.get('Date', '') or ''
|
||||
self.body_text: str = self._get_body_text()
|
||||
self._extract_attachments()
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
"""
|
||||
Convert mail object to dictionary representation
|
||||
Returns: Dictionary representation of mail
|
||||
"""
|
||||
return {
|
||||
'id': self.id.decode('utf-8'),
|
||||
'attachments': [{
|
||||
'filename': attachment['filename'], 'content_type': attachment['content_type'], 'charset': attachment['charset'],
|
||||
'data': b64encode(attachment['data']).decode(attachment['charset'], errors='replace')
|
||||
} for attachment in self.attachments],
|
||||
'subject': str(self.subject),
|
||||
'from_': {
|
||||
"display_name": self.from_.addresses[0].display_name, "username": self.from_.addresses[0].username,
|
||||
"domain": self.from_.addresses[0].domain, "mail": f"{self.from_.addresses[0].username}@{self.from_.addresses[0].domain}"
|
||||
},
|
||||
'to': [
|
||||
{
|
||||
"display_name": address.display_name, "username": address.username, "domain": address.domain,
|
||||
"mail": f"{address.username}@{address.domain}" } for address in self.to.addresses
|
||||
], 'date': str(self.date.datetime), 'body_text': str(self.body_text)
|
||||
}
|
||||
|
||||
def _get_body_text(self) -> str:
|
||||
"""
|
||||
Extract plain text body from email
|
||||
Returns: Plain text body of email
|
||||
"""
|
||||
body = self.message.get_body(preferencelist=('plain',))
|
||||
if body is not None:
|
||||
return body.get_content() or ''
|
||||
if self.message.is_multipart():
|
||||
for part in self.message.walk():
|
||||
if part.get_content_type() == 'text/plain' and (part.get_content_disposition() or '') != 'attachment':
|
||||
try:
|
||||
return part.get_content() or ''
|
||||
except Exception:
|
||||
payload = part.get_payload(decode=True) or b''
|
||||
return payload.decode(part.get_content_charset() or 'utf-8', errors='replace')
|
||||
else:
|
||||
if self.message.get_content_type() == 'text/plain':
|
||||
try:
|
||||
return self.message.get_content() or ''
|
||||
except Exception:
|
||||
payload = self.message.get_payload(decode=True) or b''
|
||||
return payload.decode(self.message.get_content_charset() or 'utf-8', errors='replace')
|
||||
return ''
|
||||
|
||||
def _extract_attachments(self) -> None:
|
||||
"""Extract attachments from email"""
|
||||
for part in self.message.walk():
|
||||
if part.get_content_disposition() == 'attachment':
|
||||
filename = part.get_filename()
|
||||
if not filename:
|
||||
continue
|
||||
data = part.get_payload(decode=True) or b''
|
||||
charset = part.get_charset() or 'utf-8'
|
||||
self.attachments.append({'filename': filename, 'content_type': part.get_content_type(), 'data': data, 'charset': charset})
|
||||
|
||||
def save_attachments(self, folder: str) -> None:
|
||||
"""
|
||||
Save attachments to folder
|
||||
Args: folder: Folder to save attachments to
|
||||
"""
|
||||
os.makedirs(folder, exist_ok=True)
|
||||
for att in self.attachments:
|
||||
with open(os.path.join(folder, att['filename']), 'wb') as f:
|
||||
f.write(att['data'])
|
||||
|
||||
|
||||
class EmailReaderService:
|
||||
|
||||
"""Service for reading emails from mailbox with improved connection resilience"""
|
||||
|
||||
def __init__(self, config: MailReaderMainConfig):
|
||||
"""
|
||||
Initialize email reader service
|
||||
Args: config: Application configuration
|
||||
"""
|
||||
self.email_config = EmailConfig()
|
||||
self.config = config
|
||||
self.mail = None
|
||||
self.data: List[Mails] = []
|
||||
self.mail_count = 0
|
||||
self.is_connected = False
|
||||
self.connect_imap()
|
||||
|
||||
def connect_imap(self) -> bool:
|
||||
"""
|
||||
Establish IMAP connection with retry mechanism
|
||||
Returns: True if connection successful, False otherwise
|
||||
"""
|
||||
try:
|
||||
if self.mail:
|
||||
try:
|
||||
self.mail.close()
|
||||
self.mail.logout()
|
||||
except Exception:
|
||||
pass
|
||||
logger.info(f"Connecting to IMAP server {self.email_config.HOST}:{self.email_config.PORT}")
|
||||
self.mail = IMAP4_SSL(self.email_config.HOST, self.email_config.PORT)
|
||||
self.is_connected = True
|
||||
return True
|
||||
except (socket.error, IMAP4.error) as e:
|
||||
logger.error(f"Failed to connect to IMAP server: {str(e)}")
|
||||
self.is_connected = False
|
||||
return False
|
||||
|
||||
@retry_on_connection_error(max_retries=3, delay=5, exceptions=(socket.error, IMAP4.error, OSError))
|
||||
def login_and_connect(self) -> bool:
|
||||
"""
|
||||
Login to IMAP server and connect to inbox with retry mechanism
|
||||
Returns: True if login successful, False otherwise
|
||||
Raises: ConnectionError: If connection cannot be established
|
||||
"""
|
||||
if not self.is_connected:
|
||||
if not self.connect_imap():
|
||||
raise ConnectionError("Cannot establish connection to IMAP server")
|
||||
|
||||
try:
|
||||
logger.info(f"Logging in as {self.email_config.USERNAME}")
|
||||
self.mail.login(self.email_config.USERNAME, self.email_config.PASSWORD)
|
||||
self._connect_inbox()
|
||||
logger.info("Successfully logged in and connected to inbox")
|
||||
return True
|
||||
except (socket.error, IMAP4.error) as e:
|
||||
logger.error(f"Login failed: {str(e)}")
|
||||
self.is_connected = False
|
||||
raise
|
||||
|
||||
@retry_on_connection_error(max_retries=2, delay=3, exceptions=(socket.error, IMAP4.error, OSError))
|
||||
def refresh(self) -> Tuple[List[Mails], int, int]:
|
||||
"""
|
||||
Refresh mail data with connection retry
|
||||
Returns: Tuple of (mail data, mail count, data length)
|
||||
"""
|
||||
try:
|
||||
self.mail_count = self._fetch_count()
|
||||
self.data = self._fetch_all()
|
||||
return self.data, self.mail_count, len(self.data)
|
||||
except (socket.error, IMAP4.error) as e:
|
||||
logger.error(f"Refresh failed, attempting to reconnect: {str(e)}")
|
||||
self.connect_imap()
|
||||
self.login_and_connect()
|
||||
self.mail_count = self._fetch_count()
|
||||
self.data = self._fetch_all()
|
||||
return self.data, self.mail_count, len(self.data)
|
||||
|
||||
@retry_on_connection_error(max_retries=2, delay=2, exceptions=(socket.error, IMAP4.error))
|
||||
def _connect_inbox(self) -> None:
|
||||
"""
|
||||
Connect to INBOX with retry mechanism
|
||||
Raises: IMAP4.error: If connection to INBOX fails
|
||||
"""
|
||||
logger.info("Selecting INBOX folder")
|
||||
status, _ = self.mail.select("INBOX")
|
||||
if status != 'OK':
|
||||
error_msg = "Failed to connect to INBOX"
|
||||
logger.error(error_msg)
|
||||
raise IMAP4.error(error_msg)
|
||||
|
||||
@retry_on_connection_error(max_retries=2, delay=2, exceptions=(socket.error, IMAP4.error))
|
||||
def _fetch_count(self) -> int:
|
||||
"""
|
||||
Fetch mail count with retry mechanism
|
||||
Returns: Number of emails
|
||||
Raises: IMAP4.error: If fetching mail count fails
|
||||
"""
|
||||
try:
|
||||
status, uids = self.mail.uid('SORT', '(REVERSE DATE)', 'UTF-8', 'ALL', 'FROM', f'"{self.config.MAILBOX}"')
|
||||
if status != 'OK':
|
||||
raise IMAP4.error("Failed to get mail count")
|
||||
count = len(uids[0].split()) if uids[0] else 0
|
||||
logger.info(f"Found {count} emails from {self.config.MAILBOX}")
|
||||
return count
|
||||
except (socket.error, IMAP4.error) as e:
|
||||
logger.error(f"Error fetching mail count: {str(e)}")
|
||||
raise
|
||||
|
||||
@retry_on_connection_error(max_retries=2, delay=2, exceptions=(socket.error, IMAP4.error))
|
||||
def _fetch_all(self) -> List[Mails]:
|
||||
"""
|
||||
Fetch all mails with retry mechanism
|
||||
Returns: List of mail objects
|
||||
Raises: IMAP4.error: If fetching mails fails
|
||||
"""
|
||||
self.data = []
|
||||
try:
|
||||
status, uids = self.mail.uid('SORT', '(REVERSE DATE)', 'UTF-8', 'ALL', 'FROM', f'"{self.config.MAILBOX}"')
|
||||
if status != 'OK':
|
||||
raise IMAP4.error("Mail search failed")
|
||||
if not uids[0]:
|
||||
logger.info("No emails found matching criteria")
|
||||
return self.data
|
||||
uid_list = uids[0].split()
|
||||
logger.info(f"Processing {len(uid_list)} emails")
|
||||
for uid in uid_list:
|
||||
try:
|
||||
status, msg_data = self.mail.uid('fetch', uid, '(RFC822)')
|
||||
if status == 'OK' and msg_data[0] is not None:
|
||||
self.data.append(Mails(uid, msg_data[0][1]))
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to fetch email with UID {uid}: {str(e)}")
|
||||
continue
|
||||
logger.info(f"Successfully fetched {len(self.data)} emails")
|
||||
return self.data
|
||||
except (socket.error, IMAP4.error) as e:
|
||||
logger.error(f"Error fetching emails: {str(e)}")
|
||||
raise
|
||||
|
||||
@retry_on_connection_error(max_retries=2, delay=1, exceptions=(socket.error, IMAP4.error))
|
||||
def move_to_folder(self, uid: Union[str, bytes], folder: str):
|
||||
"""
|
||||
Move message to folder with retry mechanism
|
||||
Args: uid: Email UID, folder: Destination folder
|
||||
"""
|
||||
try:
|
||||
log_uid = uid
|
||||
if isinstance(uid, bytes):
|
||||
log_uid = uid.decode('utf-8', errors='replace')
|
||||
elif isinstance(uid, str):
|
||||
uid = uid.encode('utf-8')
|
||||
logger.info(f"Moving email {log_uid} to {folder} folder")
|
||||
self.mail.uid('MOVE', uid, folder)
|
||||
self.commit()
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to move email to folder: {str(e)}")
|
||||
return False
|
||||
|
||||
@retry_on_connection_error(max_retries=2, delay=1, exceptions=(socket.error, IMAP4.error))
|
||||
def copy_to_folder(self, uid: Union[str, bytes], folder: str):
|
||||
"""
|
||||
Copy message to folder with retry mechanism
|
||||
Args: uid: Email UID, folder: Destination folder
|
||||
"""
|
||||
try:
|
||||
log_uid = uid
|
||||
if isinstance(uid, bytes):
|
||||
log_uid = uid.decode('utf-8', errors='replace')
|
||||
elif isinstance(uid, str):
|
||||
uid = uid.encode('utf-8')
|
||||
logger.info(f"Copying email {log_uid} to {folder} folder")
|
||||
self.mail.uid('COPY', uid, folder)
|
||||
self.commit()
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to copy email to folder: {str(e)}")
|
||||
return False
|
||||
|
||||
@retry_on_connection_error(max_retries=2, delay=1, exceptions=(socket.error, IMAP4.error))
|
||||
def mark_no_attachment(self, uid: Union[str, bytes]):
|
||||
"""
|
||||
Move message to no attachment folder with retry mechanism
|
||||
Args: uid: Email UID
|
||||
"""
|
||||
self.move_to_folder(uid, self.config.NO_ATTACHMENT_FOLDER)
|
||||
|
||||
@retry_on_connection_error(max_retries=2, delay=1, exceptions=(socket.error, IMAP4.error))
|
||||
def mark_completed(self, uid: Union[str, bytes]):
|
||||
"""
|
||||
Move message to completed folder with retry mechanism
|
||||
Args: uid: Email UID
|
||||
"""
|
||||
self.move_to_folder(uid, self.config.COMPLETED_FOLDER)
|
||||
|
||||
@retry_on_connection_error(max_retries=2, delay=1, exceptions=(socket.error, IMAP4.error))
|
||||
def delete(self, uid):
|
||||
"""
|
||||
Delete message with retry mechanism
|
||||
Args: uid: Email UID
|
||||
"""
|
||||
try:
|
||||
log_uid = uid
|
||||
if isinstance(uid, bytes):
|
||||
log_uid = uid.decode('utf-8', errors='replace')
|
||||
logger.info(f"Marking email {log_uid} for deletion")
|
||||
self.mail.uid('STORE', uid, '+FLAGS', r'(\Deleted)')
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to delete email: {str(e)}")
|
||||
raise
|
||||
|
||||
@retry_on_connection_error(max_retries=2, delay=1, exceptions=(socket.error, IMAP4.error))
|
||||
def commit(self):
|
||||
"""
|
||||
Commit pending operations with retry mechanism
|
||||
Raises: Exception: If commit fails
|
||||
"""
|
||||
try:
|
||||
logger.info("Committing changes (expunge)")
|
||||
self.mail.expunge()
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to commit changes: {str(e)}")
|
||||
raise
|
||||
|
||||
def logout(self):
|
||||
"""Logout from IMAP server"""
|
||||
if self.mail and self.is_connected:
|
||||
try:
|
||||
logger.info("Logging out from IMAP server")
|
||||
self.mail.close()
|
||||
self.mail.logout()
|
||||
self.is_connected = False
|
||||
except Exception as e:
|
||||
logger.warning(f"Logout failed: {str(e)}")
|
||||
|
||||
@property
|
||||
def count(self):
|
||||
"""Get count of emails"""
|
||||
return len(self.data)
|
||||
102
ServicesTask/app/services/mail/main.py
Normal file
102
ServicesTask/app/services/mail/main.py
Normal file
@@ -0,0 +1,102 @@
|
||||
import os
|
||||
import asyncio
|
||||
|
||||
from app.services.mail.IsBank.runner import initialize_service
|
||||
from app.services.mail.mail_handler import Mails
|
||||
from app.services.mail.IsBank.params import IsBankConfig
|
||||
from app.services.common.service_base_async import ServiceBaseAsync
|
||||
|
||||
from app.services.types.queue import Enqueue
|
||||
from app.services.types.mail import MailParsedResult, ProcessMailObject
|
||||
from app.services.types.task import Job
|
||||
|
||||
|
||||
PRODUCE_BURST = int(os.getenv("PRODUCE_BURST", "10"))
|
||||
PRODUCE_ONCE = os.getenv("PRODUCE_ONCE", "true").lower() == "true"
|
||||
EVENT_TYPE = os.getenv("EVENT_TYPE", "db-event")
|
||||
|
||||
_produced = False
|
||||
PROCESS_SEC = 10
|
||||
email_service = initialize_service()
|
||||
|
||||
|
||||
def generate_unique_with_mail_id(mail_id: str, service_prefix: str):
|
||||
return f"{service_prefix}_{mail_id}"
|
||||
|
||||
|
||||
def process_mail_with_attachments(mail: Mails, mail_id: str, count: int, total: int) -> ProcessMailObject:
|
||||
"""
|
||||
Process an email with attachments using MailReaderService
|
||||
Args: mail: Mail object, mail_id: Mail ID
|
||||
Raises: Exception: If processing mail fails
|
||||
"""
|
||||
try:
|
||||
mail_to_dict = mail.to_dict()
|
||||
task_uuid = generate_unique_with_mail_id(mail_id, IsBankConfig.SERVICE_NAME)
|
||||
return ProcessMailObject(uuid=task_uuid, id=mail_id, data=mail_to_dict, service=email_service.config.SERVICE_PREFIX, count=count, total=total, attachments=mail.attachments)
|
||||
except Exception as e:
|
||||
raise Exception(f"Email Service Runner Error processing mail {mail_id}: {str(e)}")
|
||||
|
||||
|
||||
# Isbank producer mail Reader
|
||||
async def produce(svc: ServiceBaseAsync):
|
||||
mails, count, length = email_service.refresh()
|
||||
if not mails:
|
||||
await asyncio.sleep(PROCESS_SEC)
|
||||
return
|
||||
for mail in mails:
|
||||
if not getattr(mail, 'id', None):
|
||||
print(f"Skipping email with no ID: {mail.subject}")
|
||||
continue
|
||||
mail_id, mail_dict = mail.id.decode('utf-8'), mail.to_dict()
|
||||
try:
|
||||
if mail.attachments:
|
||||
is_attachment_pdf = any([str(attachment['filename']).lower().endswith('.pdf') for attachment in mail_dict['attachments']])
|
||||
if not is_attachment_pdf:
|
||||
process_mail_object = process_mail_with_attachments(mail, mail_id, count, length)
|
||||
enqueue = Enqueue(task_id=process_mail_object.uuid, payload=process_mail_object.model_dump(), action=IsBankConfig.SERVICE_NAME)
|
||||
await svc.enqueue(enqueue)
|
||||
await svc.ack_current()
|
||||
print(f"Mail Consumer from parser with attachments : {mail_id}")
|
||||
continue
|
||||
print(f"Mail Consumer from parser with no attachments : {mail_id}")
|
||||
email_service.mark_no_attachment(mail_id)
|
||||
await svc.ack_current()
|
||||
except Exception as e:
|
||||
print(f"Error processing email {mail_id}: {str(e)}")
|
||||
await svc.retry_current()
|
||||
continue
|
||||
await asyncio.sleep(PROCESS_SEC)
|
||||
|
||||
|
||||
async def handle_from_parser(svc: ServiceBaseAsync, job):
|
||||
job_model = Job(**job)
|
||||
await svc.ack_current()
|
||||
print("Mail Consumer from parser :", job_model.model_dump())
|
||||
await asyncio.sleep(PROCESS_SEC)
|
||||
return
|
||||
|
||||
|
||||
async def handle_parser_excel(svc: ServiceBaseAsync, job):
|
||||
job_model = Job(**job)
|
||||
parsed_result = MailParsedResult(**job_model.payload)
|
||||
if parsed_result.send_to == "Completed":
|
||||
print("Mail Consumer from parser excel :", parsed_result.mail_data.id)
|
||||
email_service.mark_completed(parsed_result.mail_data.id)
|
||||
await svc.ack_current()
|
||||
await asyncio.sleep(PROCESS_SEC)
|
||||
return
|
||||
|
||||
|
||||
async def consume_default(svc: ServiceBaseAsync, job):
|
||||
job_model = Job(**job)
|
||||
await svc.dlq_current()
|
||||
await asyncio.sleep(PROCESS_SEC)
|
||||
print("Mail Consumer default:", job_model.model_dump())
|
||||
return
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
svc = ServiceBaseAsync(produce, consume_default, handlers={"parser.comment.publish": handle_from_parser, "parser.excel.publish": handle_parser_excel})
|
||||
asyncio.run(svc.run())
|
||||
41
ServicesTask/app/services/mail/pyproject.toml
Normal file
41
ServicesTask/app/services/mail/pyproject.toml
Normal file
@@ -0,0 +1,41 @@
|
||||
[build-system]
|
||||
requires = ["setuptools>=61.0"]
|
||||
build-backend = "setuptools.build_meta"
|
||||
|
||||
[project]
|
||||
name = "dual-queue-services"
|
||||
version = "0.1.0"
|
||||
description = "Async dual queue system with Redis Streams and SQLite persistence"
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.11"
|
||||
authors = [
|
||||
{ name = "Berkay Karatay", email = "karatay.berkay@gmail.com" }
|
||||
]
|
||||
|
||||
dependencies = [
|
||||
"aio-pika>=9.4.1",
|
||||
"prometheus-client>=0.20.0",
|
||||
"uvloop>=0.19.0",
|
||||
"arrow>=1.3.0",
|
||||
"pydantic>=2.0.0",
|
||||
"pydantic-settings>=2.0.0",
|
||||
"email-validator>=2.0.0",
|
||||
]
|
||||
|
||||
[project.optional-dependencies]
|
||||
dev = [
|
||||
"pytest>=7.4",
|
||||
"black>=23.0",
|
||||
"isort>=5.12"
|
||||
]
|
||||
|
||||
[tool.black]
|
||||
line-length = 88
|
||||
target-version = ["py311"]
|
||||
|
||||
[tool.isort]
|
||||
profile = "black"
|
||||
|
||||
[tool.setuptools.packages.find]
|
||||
where = ["app"]
|
||||
include = ["app*"]
|
||||
18
ServicesTask/app/services/mongo/Dockerfile
Normal file
18
ServicesTask/app/services/mongo/Dockerfile
Normal file
@@ -0,0 +1,18 @@
|
||||
FROM python:3.12-slim
|
||||
|
||||
ENV PYTHONDONTWRITEBYTECODE=1 PYTHONUNBUFFERED=1
|
||||
ENV PYTHONPATH=/app
|
||||
|
||||
WORKDIR /
|
||||
|
||||
COPY app/services/mongo/pyproject.toml ./
|
||||
COPY app/services/mongo/README.md ./
|
||||
|
||||
COPY app/core ./app/core
|
||||
COPY app/services/common/ ./app/services/common/
|
||||
COPY app/services/mongo/ ./app/services/mongo/
|
||||
|
||||
RUN pip install --upgrade pip && pip install --no-cache-dir .
|
||||
RUN mkdir -p /app/data
|
||||
|
||||
CMD ["python", "-m", "app.services.mongo.main"]
|
||||
0
ServicesTask/app/services/mongo/README.md
Normal file
0
ServicesTask/app/services/mongo/README.md
Normal file
0
ServicesTask/app/services/mongo/__init__.py
Normal file
0
ServicesTask/app/services/mongo/__init__.py
Normal file
40
ServicesTask/app/services/mongo/main.py
Normal file
40
ServicesTask/app/services/mongo/main.py
Normal file
@@ -0,0 +1,40 @@
|
||||
import os
|
||||
import uuid
|
||||
import asyncio
|
||||
|
||||
from app.services.common.service_base_async import ServiceBaseAsync
|
||||
|
||||
|
||||
PRODUCE_BURST = int(os.getenv("PRODUCE_BURST", "10"))
|
||||
PRODUCE_ONCE = os.getenv("PRODUCE_ONCE", "true").lower() == "true"
|
||||
EVENT_TYPE = os.getenv("EVENT_TYPE", "db-mongo")
|
||||
PROCESS_SEC = 10
|
||||
|
||||
|
||||
async def produce(svc: ServiceBaseAsync):
|
||||
await asyncio.sleep(PROCESS_SEC)
|
||||
print(f"Produced From Mongo Producer: {len([1,2])} events to '{svc.produce_key}'")
|
||||
|
||||
|
||||
async def handle_db_publish(svc: ServiceBaseAsync, job):
|
||||
await asyncio.sleep(PROCESS_SEC)
|
||||
await svc.ack_current()
|
||||
print("Mongo Consumer from db:", job["task_id"])
|
||||
|
||||
|
||||
async def handle_mail_publish(svc: ServiceBaseAsync, job):
|
||||
await asyncio.sleep(PROCESS_SEC)
|
||||
await svc.ack_current()
|
||||
print("Mongo Consumer from mail:", job["task_id"])
|
||||
|
||||
|
||||
async def consume_default(svc, job):
|
||||
await asyncio.sleep(PROCESS_SEC)
|
||||
print("Mongo Consumer default:", job["task_id"])
|
||||
return
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
svc = ServiceBaseAsync(produce_fn=produce, consume_fn=consume_default, handlers={"database.service.publish": handle_db_publish, "mail.service.publish": handle_mail_publish})
|
||||
asyncio.run(svc.run())
|
||||
37
ServicesTask/app/services/mongo/pyproject.toml
Normal file
37
ServicesTask/app/services/mongo/pyproject.toml
Normal file
@@ -0,0 +1,37 @@
|
||||
[build-system]
|
||||
requires = ["setuptools>=61.0"]
|
||||
build-backend = "setuptools.build_meta"
|
||||
|
||||
[project]
|
||||
name = "dual-queue-services"
|
||||
version = "0.1.0"
|
||||
description = "Async dual queue system with Redis Streams and SQLite persistence"
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.11"
|
||||
authors = [
|
||||
{ name = "Berkay Karatay", email = "karatay.berkay@gmail.com" }
|
||||
]
|
||||
|
||||
dependencies = [
|
||||
"aio-pika>=9.4.1",
|
||||
"prometheus-client>=0.20.0",
|
||||
"uvloop>=0.19.0"
|
||||
]
|
||||
|
||||
[project.optional-dependencies]
|
||||
dev = [
|
||||
"pytest>=7.4",
|
||||
"black>=23.0",
|
||||
"isort>=5.12"
|
||||
]
|
||||
|
||||
[tool.black]
|
||||
line-length = 88
|
||||
target-version = ["py311"]
|
||||
|
||||
[tool.isort]
|
||||
profile = "black"
|
||||
|
||||
[tool.setuptools.packages.find]
|
||||
where = ["app"]
|
||||
include = ["app*"]
|
||||
16
ServicesTask/app/services/mongo/queue_service_async.py
Normal file
16
ServicesTask/app/services/mongo/queue_service_async.py
Normal file
@@ -0,0 +1,16 @@
|
||||
import asyncio
|
||||
|
||||
from services.service_base_async import ServiceBaseAsync
|
||||
|
||||
|
||||
async def produce(service: ServiceBaseAsync):
|
||||
fake_jobs = [{"action": "cleanup", "target": "old-tasks"}]
|
||||
for job in fake_jobs:
|
||||
await service.enqueue(job, "queue-maintenance")
|
||||
|
||||
async def consume(service: ServiceBaseAsync, job: dict):
|
||||
print(f"[QUEUE CONTROL] İşleme alındı: {job}")
|
||||
await asyncio.sleep(0.05)
|
||||
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(ServiceBaseAsync(produce, consume).run())
|
||||
0
ServicesTask/app/services/parser/a.txt
Normal file
0
ServicesTask/app/services/parser/a.txt
Normal file
19
ServicesTask/app/services/parser/comment/Dockerfile
Normal file
19
ServicesTask/app/services/parser/comment/Dockerfile
Normal file
@@ -0,0 +1,19 @@
|
||||
FROM python:3.12-slim
|
||||
|
||||
ENV PYTHONDONTWRITEBYTECODE=1 PYTHONUNBUFFERED=1
|
||||
ENV PYTHONPATH=/app
|
||||
|
||||
WORKDIR /
|
||||
|
||||
COPY app/services/parser/comment/pyproject.toml ./
|
||||
COPY app/services/parser/comment/README.md ./
|
||||
|
||||
COPY app/core ./app/core
|
||||
COPY app/services/common/ ./app/services/common/
|
||||
COPY app/services/parser/comment/ ./app/services/parser/comment/
|
||||
COPY app/services/types/ ./app/services/types/
|
||||
|
||||
RUN pip install --upgrade pip && pip install --no-cache-dir .
|
||||
RUN mkdir -p /app/data
|
||||
|
||||
CMD ["python", "-m", "app.services.parser.comment.main"]
|
||||
0
ServicesTask/app/services/parser/comment/README.md
Normal file
0
ServicesTask/app/services/parser/comment/README.md
Normal file
40
ServicesTask/app/services/parser/comment/main.py
Normal file
40
ServicesTask/app/services/parser/comment/main.py
Normal file
@@ -0,0 +1,40 @@
|
||||
import asyncio
|
||||
|
||||
from app.services.common.service_base_async import ServiceBaseAsync
|
||||
from app.services.types.queue import Enqueue
|
||||
from app.services.types.task import Job
|
||||
from app.services.types.mail import MailParsedResult
|
||||
from app.services.types.mail import PlainMailReader
|
||||
from app.services.types.mail import ProcessMailObject
|
||||
|
||||
|
||||
PROCESS_SEC = 10
|
||||
|
||||
|
||||
async def produce(_svc: ServiceBaseAsync):
|
||||
# print("Parser Comment Producer produce :")
|
||||
await asyncio.sleep(PROCESS_SEC)
|
||||
|
||||
|
||||
async def handle_excel_publish(svc: ServiceBaseAsync, job: dict):
|
||||
print("Parser Comment Consumer from excel handle_excel_publish :", job)
|
||||
job_model = Job(**job)
|
||||
mail_id = job_model.payload['mail_id']
|
||||
task_id = f"IsBankServiceCommentParser_{mail_id}"
|
||||
await svc.enqueue(task_id=task_id, payload=job_model.payload, action="parser.comment.publish")
|
||||
print("Parser Comment Consumer from excel handle_excel_publish :", job_model.task_id)
|
||||
await svc.ack_current()
|
||||
await asyncio.sleep(PROCESS_SEC)
|
||||
|
||||
|
||||
async def consume_default(svc: ServiceBaseAsync, job: dict):
|
||||
job_model = Job(**job)
|
||||
print("Parser Comment Consumer default :", job_model.task_id)
|
||||
await asyncio.sleep(PROCESS_SEC)
|
||||
await svc.ack_current()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
svc = ServiceBaseAsync(produce_fn=produce, consume_fn=consume_default, handlers={"parser.excel.publish": handle_excel_publish})
|
||||
asyncio.run(svc.run())
|
||||
38
ServicesTask/app/services/parser/comment/pyproject.toml
Normal file
38
ServicesTask/app/services/parser/comment/pyproject.toml
Normal file
@@ -0,0 +1,38 @@
|
||||
[build-system]
|
||||
requires = ["setuptools>=61.0"]
|
||||
build-backend = "setuptools.build_meta"
|
||||
|
||||
[project]
|
||||
name = "dual-queue-services"
|
||||
version = "0.1.0"
|
||||
description = "Async dual queue system with Redis Streams and SQLite persistence"
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.11"
|
||||
authors = [
|
||||
{ name = "Berkay Karatay", email = "karatay.berkay@gmail.com" }
|
||||
]
|
||||
|
||||
dependencies = [
|
||||
"aio-pika>=9.4.1",
|
||||
"prometheus-client>=0.20.0",
|
||||
"uvloop>=0.19.0",
|
||||
"pydantic"
|
||||
]
|
||||
|
||||
[project.optional-dependencies]
|
||||
dev = [
|
||||
"pytest>=7.4",
|
||||
"black>=23.0",
|
||||
"isort>=5.12"
|
||||
]
|
||||
|
||||
[tool.black]
|
||||
line-length = 88
|
||||
target-version = ["py311"]
|
||||
|
||||
[tool.isort]
|
||||
profile = "black"
|
||||
|
||||
[tool.setuptools.packages.find]
|
||||
where = ["app"]
|
||||
include = ["app*"]
|
||||
19
ServicesTask/app/services/parser/excel/Dockerfile
Normal file
19
ServicesTask/app/services/parser/excel/Dockerfile
Normal file
@@ -0,0 +1,19 @@
|
||||
FROM python:3.12-slim
|
||||
|
||||
ENV PYTHONDONTWRITEBYTECODE=1 PYTHONUNBUFFERED=1
|
||||
ENV PYTHONPATH=/app
|
||||
|
||||
WORKDIR /
|
||||
|
||||
COPY app/services/parser/excel/pyproject.toml ./
|
||||
COPY app/services/parser/excel/README.md ./
|
||||
|
||||
COPY app/core ./app/core
|
||||
COPY app/services/common/ ./app/services/common/
|
||||
COPY app/services/types/ ./app/services/types/
|
||||
COPY app/services/parser/excel/ ./app/services/parser/excel/
|
||||
|
||||
RUN pip install --upgrade pip && pip install --no-cache-dir .
|
||||
RUN mkdir -p /app/data
|
||||
|
||||
CMD ["python", "-m", "app.services.parser.excel.main"]
|
||||
0
ServicesTask/app/services/parser/excel/README.md
Normal file
0
ServicesTask/app/services/parser/excel/README.md
Normal file
118
ServicesTask/app/services/parser/excel/isbank/parser.py
Normal file
118
ServicesTask/app/services/parser/excel/isbank/parser.py
Normal file
@@ -0,0 +1,118 @@
|
||||
import sys
|
||||
import logging
|
||||
from time import sleep
|
||||
from typing import List
|
||||
import pandas as pd
|
||||
|
||||
from datetime import datetime
|
||||
from io import BytesIO
|
||||
from base64 import b64decode
|
||||
from unidecode import unidecode
|
||||
|
||||
from app.services.types.mail import ProcessMailObject, MailParser
|
||||
|
||||
|
||||
logging.basicConfig(
|
||||
level=logging.INFO,
|
||||
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
|
||||
handlers=[logging.StreamHandler(sys.stdout), logging.FileHandler('isbank_parser_service.log')]
|
||||
)
|
||||
logger = logging.getLogger('IsBank_Parser_Service')
|
||||
|
||||
|
||||
class IsbankMailParserService:
|
||||
|
||||
|
||||
def try_dataframe_extract_with_xlsx(self, binary_data: BytesIO):
|
||||
try:
|
||||
df = pd.read_excel(binary_data, engine='openpyxl')
|
||||
return df
|
||||
except Exception as e:
|
||||
return None
|
||||
|
||||
def try_dataframe_extract_with_xls(self, binary_data: BytesIO):
|
||||
try:
|
||||
df = pd.read_excel(binary_data, engine='xlrd')
|
||||
return df
|
||||
except Exception as e:
|
||||
return None
|
||||
|
||||
def try_dataframe_extract_else(self, binary_data: BytesIO):
|
||||
try:
|
||||
df = pd.read_excel(binary_data, engine='openpyxl')
|
||||
except Exception as e1:
|
||||
try:
|
||||
binary_data.seek(0)
|
||||
df = pd.read_excel(binary_data, engine='xlrd')
|
||||
except Exception as e2:
|
||||
return None
|
||||
return df
|
||||
|
||||
def parse_record(self, excel_frame: pd.DataFrame, file_name: str) -> list[dict]:
|
||||
"""Parse Excel file data.
|
||||
|
||||
Args:
|
||||
excel_frame: DataFrame containing Excel data
|
||||
|
||||
Returns:
|
||||
list[dict]: List of parsed data dictionaries
|
||||
"""
|
||||
iban, data_list = "", []
|
||||
try:
|
||||
for row in excel_frame.itertuples():
|
||||
if "IBAN" in str(row[3]).upper():
|
||||
iban = str(row[5]).replace(" ", "")
|
||||
if not str(row[1]) == "nan" and not str(row[2]) == "nan":
|
||||
if len(str(row[1]).split("/")) > 2:
|
||||
data_list.append(dict(
|
||||
filename=file_name, iban=str(iban), bank_date=datetime.strptime(str(row[1]), "%d/%m/%Y-%H:%M:%S").strftime("%Y-%m-%d %H:%M:%S"),
|
||||
channel_branch=unidecode(str(row[3])), currency_value=(float(str(row[4]).replace(",", "")) if row[4] else 0),
|
||||
balance=(float(str(row[5]).replace(",", "")) if row[5] else 0), additional_balance=(float(str(row[6]).replace(",", "")) if row[6] else 0),
|
||||
process_name=str(row[7]), process_type=unidecode(str(row[8])), process_comment=unidecode(str(row[9])), bank_reference_code=str(row[15]),
|
||||
))
|
||||
except Exception as e:
|
||||
print(f"[PARSER_SERVICE] Error parsing Excel file: {str(e)}")
|
||||
return data_list
|
||||
|
||||
def parse_dataframes(self, dataframe: pd.DataFrame, task: ProcessMailObject, attachment_data: MailParser):
|
||||
data_list = self.parse_record(dataframe, attachment_data.filename)
|
||||
print(f"[PARSER_SERVICE] Successfully parsed {len(data_list)} records from Excel file")
|
||||
if data_list:
|
||||
print(f"Updated service data for task {task.uuid} with {len(data_list)} records")
|
||||
return data_list
|
||||
return None
|
||||
|
||||
def process_task(self, active_task: ProcessMailObject):
|
||||
"""Process a task object using the MailParserService
|
||||
Args: task: RedisTaskObject or task dictionary to process
|
||||
"""
|
||||
try:
|
||||
for attachment in active_task.data.attachments:
|
||||
task_id = active_task.data.id
|
||||
if not attachment or not attachment.data:
|
||||
print(f"[PARSER_SERVICE] No data found for task {task_id}")
|
||||
continue
|
||||
binary_data: bytes = b64decode(attachment.data)
|
||||
excel_data = BytesIO(binary_data)
|
||||
df = self.try_dataframe_extract_with_xlsx(excel_data)
|
||||
if df is None:
|
||||
excel_data.seek(0)
|
||||
df = self.try_dataframe_extract_with_xls(excel_data)
|
||||
if df is None:
|
||||
excel_data.seek(0)
|
||||
df = self.try_dataframe_extract_else(excel_data)
|
||||
if df is not None:
|
||||
return self.parse_dataframes(df, active_task, attachment)
|
||||
except Exception as e:
|
||||
print(f"[PARSER_SERVICE] Error processing task: {str(e)}")
|
||||
|
||||
|
||||
parser = IsbankMailParserService()
|
||||
|
||||
|
||||
def parse_isbank_mail(mailObject: ProcessMailObject):
|
||||
try:
|
||||
return parser.process_task(mailObject)
|
||||
except Exception as e:
|
||||
print(f"[PARSER_SERVICE] Error parsing mail: {str(e)}")
|
||||
return None
|
||||
79
ServicesTask/app/services/parser/excel/main.py
Normal file
79
ServicesTask/app/services/parser/excel/main.py
Normal file
@@ -0,0 +1,79 @@
|
||||
import os
|
||||
import asyncio
|
||||
import fnmatch
|
||||
from typing import Awaitable, Callable
|
||||
|
||||
from app.services.parser.excel.isbank.parser import parse_isbank_mail
|
||||
from app.services.types.mail import MailParsedResult, PlainMailReader, ProcessMailObject
|
||||
from app.services.common.service_base_async import ServiceBaseAsync
|
||||
from app.services.types.queue import Enqueue
|
||||
from app.services.types.task import Job
|
||||
|
||||
|
||||
PRODUCE_BURST = int(os.getenv("PRODUCE_BURST", "10"))
|
||||
PRODUCE_ONCE = os.getenv("PRODUCE_ONCE", "true").lower() == "true"
|
||||
EVENT_TYPE = os.getenv("EVENT_TYPE", "db-mongo")
|
||||
|
||||
PROCESS_SEC = 10
|
||||
|
||||
|
||||
bank_mail_dict = {
|
||||
"ileti.isbank.com.tr": parse_isbank_mail
|
||||
}
|
||||
|
||||
|
||||
def grab_fn_callable(domain: str) -> Callable[[ServiceBaseAsync, dict], Awaitable[None]]:
|
||||
for pat, fn in bank_mail_dict.items():
|
||||
if fnmatch.fnmatch(domain, pat):
|
||||
return fn
|
||||
return None
|
||||
|
||||
|
||||
async def produce(svc: ServiceBaseAsync):
|
||||
await asyncio.sleep(PROCESS_SEC)
|
||||
|
||||
|
||||
async def handle_from_parser(svc: ServiceBaseAsync, job: dict):
|
||||
job_model = Job(**job)
|
||||
await svc.ack_current()
|
||||
await asyncio.sleep(PROCESS_SEC)
|
||||
|
||||
|
||||
async def handle_from_mail_parser(svc: ServiceBaseAsync, job: dict):
|
||||
job_model = Job(**job)
|
||||
process_mail_object = ProcessMailObject(**job_model.payload)
|
||||
mail_id = process_mail_object.data.id
|
||||
task_id = f"IsBankServiceExcelParser_{mail_id}"
|
||||
await asyncio.sleep(PROCESS_SEC)
|
||||
function_handler = grab_fn_callable(process_mail_object.data.from_.domain)
|
||||
if not function_handler:
|
||||
await svc.dlq_current(job, error="Invalid domain")
|
||||
return
|
||||
parsed_data = function_handler(process_mail_object)
|
||||
if not parsed_data:
|
||||
plain_mail_data = PlainMailReader(**process_mail_object.data.model_dump())
|
||||
parsed_result = MailParsedResult(task_id=task_id, mail_data=plain_mail_data.model_dump(), send_to="Completed", data=parsed_data)
|
||||
print("Parser Excel Consumer from mail handle_from_mail :", parsed_result)
|
||||
enqueue = Enqueue(task_id=task_id, payload=parsed_result.model_dump(), action="mail.service.publish")
|
||||
await svc.enqueue(enqueue)
|
||||
await svc.ack_current()
|
||||
else:
|
||||
plain_mail_data = PlainMailReader(**process_mail_object.data.model_dump())
|
||||
parsed_result = MailParsedResult(task_id=task_id, mail_data=plain_mail_data.model_dump(), send_to="Completed", data=parsed_data)
|
||||
enqueue = Enqueue(task_id=task_id, payload=parsed_result.model_dump(), action="parser.comment.publish")
|
||||
await svc.enqueue(enqueue)
|
||||
await svc.ack_current()
|
||||
print("Parser Excel Consumer from mail handle_from_mail :", task_id)
|
||||
|
||||
|
||||
async def consume_default(svc: ServiceBaseAsync, job: dict):
|
||||
|
||||
job_model = Job(**job)
|
||||
await svc.ack_current()
|
||||
await asyncio.sleep(PROCESS_SEC)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
svc = ServiceBaseAsync(produce_fn=produce, consume_fn=consume_default, handlers={"parser.mail.publish": handle_from_mail_parser})
|
||||
asyncio.run(svc.run())
|
||||
43
ServicesTask/app/services/parser/excel/pyproject.toml
Normal file
43
ServicesTask/app/services/parser/excel/pyproject.toml
Normal file
@@ -0,0 +1,43 @@
|
||||
[build-system]
|
||||
requires = ["setuptools>=61.0"]
|
||||
build-backend = "setuptools.build_meta"
|
||||
|
||||
[project]
|
||||
name = "dual-queue-services"
|
||||
version = "0.1.0"
|
||||
description = "Async dual queue system with Redis Streams and SQLite persistence"
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.11"
|
||||
authors = [
|
||||
{ name = "Berkay Karatay", email = "karatay.berkay@gmail.com" }
|
||||
]
|
||||
|
||||
dependencies = [
|
||||
"aio-pika>=9.4.1",
|
||||
"prometheus-client>=0.20.0",
|
||||
"uvloop>=0.19.0",
|
||||
"pydantic",
|
||||
"arrow>=1.3.0",
|
||||
"pandas>=2.2.3",
|
||||
"unidecode>=1.3.8",
|
||||
"xlrd>=2.0.1",
|
||||
"openpyxl>=3.1.2",
|
||||
]
|
||||
|
||||
[project.optional-dependencies]
|
||||
dev = [
|
||||
"pytest>=7.4",
|
||||
"black>=23.0",
|
||||
"isort>=5.12"
|
||||
]
|
||||
|
||||
[tool.black]
|
||||
line-length = 88
|
||||
target-version = ["py311"]
|
||||
|
||||
[tool.isort]
|
||||
profile = "black"
|
||||
|
||||
[tool.setuptools.packages.find]
|
||||
where = ["app"]
|
||||
include = ["app*"]
|
||||
19
ServicesTask/app/services/parser/mail/Dockerfile
Normal file
19
ServicesTask/app/services/parser/mail/Dockerfile
Normal file
@@ -0,0 +1,19 @@
|
||||
FROM python:3.12-slim
|
||||
|
||||
ENV PYTHONDONTWRITEBYTECODE=1 PYTHONUNBUFFERED=1
|
||||
ENV PYTHONPATH=/app
|
||||
|
||||
WORKDIR /
|
||||
|
||||
COPY app/services/parser/mail/pyproject.toml ./
|
||||
COPY app/services/parser/mail/README.md ./
|
||||
|
||||
COPY app/core ./app/core
|
||||
COPY app/services/common/ ./app/services/common/
|
||||
COPY app/services/types/ ./app/services/types/
|
||||
COPY app/services/parser/mail/ ./app/services/parser/mail/
|
||||
|
||||
RUN pip install --upgrade pip && pip install --no-cache-dir .
|
||||
RUN mkdir -p /app/data
|
||||
|
||||
CMD ["python", "-m", "app.services.parser.mail.main"]
|
||||
0
ServicesTask/app/services/parser/mail/README.md
Normal file
0
ServicesTask/app/services/parser/mail/README.md
Normal file
52
ServicesTask/app/services/parser/mail/main.py
Normal file
52
ServicesTask/app/services/parser/mail/main.py
Normal file
@@ -0,0 +1,52 @@
|
||||
import os
|
||||
import asyncio
|
||||
import fnmatch
|
||||
|
||||
from app.services.types.queue import Enqueue
|
||||
from app.services.common.service_base_async import ServiceBaseAsync
|
||||
from app.services.types.task import Job
|
||||
from app.services.types.mail import ProcessMailObject
|
||||
|
||||
|
||||
PRODUCE_BURST = int(os.getenv("PRODUCE_BURST", "10"))
|
||||
PRODUCE_ONCE = os.getenv("PRODUCE_ONCE", "true").lower() == "true"
|
||||
EVENT_TYPE = os.getenv("EVENT_TYPE", "db-mongo")
|
||||
PROCESS_SEC = 10
|
||||
|
||||
|
||||
async def produce(svc: ServiceBaseAsync):
|
||||
await asyncio.sleep(PROCESS_SEC)
|
||||
|
||||
|
||||
async def handle_mail_publish(svc: ServiceBaseAsync, job: dict):
|
||||
# Check for bank mail is from which Bank and to which bulding
|
||||
job_model = Job(**job)
|
||||
process_mail_object = ProcessMailObject(**job_model.payload)
|
||||
mail_id = process_mail_object.data.id
|
||||
task_id = f"IsBankServiceMailParser_{mail_id}"
|
||||
enqueue = Enqueue(task_id=task_id, payload=process_mail_object.model_dump(), action="parser.excel.publish")
|
||||
await svc.enqueue(enqueue)
|
||||
print(f"Parser Mail Consumer parsed handle_mail_publish : {enqueue.task_id}")
|
||||
await svc.ack_current()
|
||||
await asyncio.sleep(PROCESS_SEC)
|
||||
|
||||
|
||||
async def handle_mongo_publish(svc: ServiceBaseAsync, job: dict):
|
||||
print('job', job)
|
||||
job_model = Job(**job)
|
||||
await svc.ack_current()
|
||||
print("Parser Mail Consumer default handle_mongo_publish :", job_model.task_id)
|
||||
await asyncio.sleep(PROCESS_SEC)
|
||||
|
||||
|
||||
async def consume_default(svc: ServiceBaseAsync, job: dict):
|
||||
job_model = Job(**job)
|
||||
await asyncio.sleep(PROCESS_SEC)
|
||||
print("Parser Mail Consumer default consume_default :", job_model.task_id)
|
||||
return
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
svc = ServiceBaseAsync(produce_fn=produce, consume_fn=consume_default, handlers={"mail.service.publish": handle_mail_publish})
|
||||
asyncio.run(svc.run())
|
||||
38
ServicesTask/app/services/parser/mail/pyproject.toml
Normal file
38
ServicesTask/app/services/parser/mail/pyproject.toml
Normal file
@@ -0,0 +1,38 @@
|
||||
[build-system]
|
||||
requires = ["setuptools>=61.0"]
|
||||
build-backend = "setuptools.build_meta"
|
||||
|
||||
[project]
|
||||
name = "dual-queue-services"
|
||||
version = "0.1.0"
|
||||
description = "Async dual queue system with Redis Streams and SQLite persistence"
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.11"
|
||||
authors = [
|
||||
{ name = "Berkay Karatay", email = "karatay.berkay@gmail.com" }
|
||||
]
|
||||
|
||||
dependencies = [
|
||||
"aio-pika>=9.4.1",
|
||||
"prometheus-client>=0.20.0",
|
||||
"uvloop>=0.19.0",
|
||||
"pydantic"
|
||||
]
|
||||
|
||||
[project.optional-dependencies]
|
||||
dev = [
|
||||
"pytest>=7.4",
|
||||
"black>=23.0",
|
||||
"isort>=5.12"
|
||||
]
|
||||
|
||||
[tool.black]
|
||||
line-length = 88
|
||||
target-version = ["py311"]
|
||||
|
||||
[tool.isort]
|
||||
profile = "black"
|
||||
|
||||
[tool.setuptools.packages.find]
|
||||
where = ["app"]
|
||||
include = ["app*"]
|
||||
18
ServicesTask/app/services/parser/payment/Dockerfile
Normal file
18
ServicesTask/app/services/parser/payment/Dockerfile
Normal file
@@ -0,0 +1,18 @@
|
||||
FROM python:3.12-slim
|
||||
|
||||
ENV PYTHONDONTWRITEBYTECODE=1 PYTHONUNBUFFERED=1
|
||||
ENV PYTHONPATH=/app
|
||||
|
||||
WORKDIR /
|
||||
|
||||
COPY app/services/parser/payment/pyproject.toml ./
|
||||
COPY app/services/parser/payment/README.md ./
|
||||
|
||||
COPY app/core ./app/core
|
||||
COPY app/services/common/ ./app/services/common/
|
||||
COPY app/services/parser/payment/ ./app/services/parser/payment/
|
||||
|
||||
RUN pip install --upgrade pip && pip install --no-cache-dir .
|
||||
RUN mkdir -p /app/data
|
||||
|
||||
CMD ["python", "-m", "app.services.parser.payment.main"]
|
||||
0
ServicesTask/app/services/parser/payment/README.md
Normal file
0
ServicesTask/app/services/parser/payment/README.md
Normal file
43
ServicesTask/app/services/parser/payment/main.py
Normal file
43
ServicesTask/app/services/parser/payment/main.py
Normal file
@@ -0,0 +1,43 @@
|
||||
import os
|
||||
import asyncio
|
||||
|
||||
from app.services.common.service_base_async import ServiceBaseAsync
|
||||
|
||||
|
||||
PRODUCE_BURST = int(os.getenv("PRODUCE_BURST", "10"))
|
||||
PRODUCE_ONCE = os.getenv("PRODUCE_ONCE", "true").lower() == "true"
|
||||
EVENT_TYPE = os.getenv("EVENT_TYPE", "db-mongo")
|
||||
|
||||
PROCESS_SEC = 10
|
||||
|
||||
|
||||
async def produce(svc: ServiceBaseAsync):
|
||||
await asyncio.sleep(PROCESS_SEC)
|
||||
print(f"Parser Payment Producer produced {len([1,2])} events to '{svc.produce_key}'")
|
||||
|
||||
|
||||
async def handle_from_parser(svc: ServiceBaseAsync, job):
|
||||
await asyncio.sleep(PROCESS_SEC)
|
||||
print("Parser Payment Consumer from parser:", job)
|
||||
await svc.ack_current()
|
||||
return
|
||||
|
||||
|
||||
async def handle_from_mail(svc: ServiceBaseAsync, job):
|
||||
await asyncio.sleep(PROCESS_SEC)
|
||||
print("Parser Payment Consumer from mail:", job)
|
||||
await svc.ack_current()
|
||||
return
|
||||
|
||||
|
||||
async def consume_default(svc: ServiceBaseAsync, job):
|
||||
await asyncio.sleep(PROCESS_SEC)
|
||||
print("Parser Payment Consumer default:", job)
|
||||
await svc.ack_current()
|
||||
return
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
svc = ServiceBaseAsync(produce_fn=produce, consume_fn=consume_default, handlers={"parser.publish": handle_from_parser, "mail.publish": handle_from_mail})
|
||||
asyncio.run(svc.run())
|
||||
37
ServicesTask/app/services/parser/payment/pyproject.toml
Normal file
37
ServicesTask/app/services/parser/payment/pyproject.toml
Normal file
@@ -0,0 +1,37 @@
|
||||
[build-system]
|
||||
requires = ["setuptools>=61.0"]
|
||||
build-backend = "setuptools.build_meta"
|
||||
|
||||
[project]
|
||||
name = "dual-queue-services"
|
||||
version = "0.1.0"
|
||||
description = "Async dual queue system with Redis Streams and SQLite persistence"
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.11"
|
||||
authors = [
|
||||
{ name = "Berkay Karatay", email = "karatay.berkay@gmail.com" }
|
||||
]
|
||||
|
||||
dependencies = [
|
||||
"aio-pika>=9.4.1",
|
||||
"prometheus-client>=0.20.0",
|
||||
"uvloop>=0.19.0"
|
||||
]
|
||||
|
||||
[project.optional-dependencies]
|
||||
dev = [
|
||||
"pytest>=7.4",
|
||||
"black>=23.0",
|
||||
"isort>=5.12"
|
||||
]
|
||||
|
||||
[tool.black]
|
||||
line-length = 88
|
||||
target-version = ["py311"]
|
||||
|
||||
[tool.isort]
|
||||
profile = "black"
|
||||
|
||||
[tool.setuptools.packages.find]
|
||||
where = ["app"]
|
||||
include = ["app*"]
|
||||
18
ServicesTask/app/services/test/Dockerfile
Normal file
18
ServicesTask/app/services/test/Dockerfile
Normal file
@@ -0,0 +1,18 @@
|
||||
FROM python:3.12-slim
|
||||
|
||||
ENV PYTHONDONTWRITEBYTECODE=1 PYTHONUNBUFFERED=1
|
||||
ENV PYTHONPATH=/app
|
||||
|
||||
WORKDIR /
|
||||
|
||||
COPY app/services/test/pyproject.toml ./
|
||||
COPY app/services/test/README.md ./
|
||||
|
||||
COPY app/core ./app/core
|
||||
COPY app/services/common/ ./app/services/common/
|
||||
COPY app/services/test/ ./app/services/test/
|
||||
|
||||
RUN pip install --upgrade pip && pip install --no-cache-dir .
|
||||
RUN mkdir -p /app/data
|
||||
|
||||
CMD ["python", "-m", "app.services.test.main"]
|
||||
0
ServicesTask/app/services/test/README.md
Normal file
0
ServicesTask/app/services/test/README.md
Normal file
71
ServicesTask/app/services/test/main.py
Normal file
71
ServicesTask/app/services/test/main.py
Normal file
@@ -0,0 +1,71 @@
|
||||
import os
|
||||
import asyncio
|
||||
import uuid
|
||||
import json
|
||||
import aiosqlite
|
||||
import redis.asyncio as aioredis
|
||||
|
||||
from app.core.config import RedisConfig, Env
|
||||
from app.core.utils import now_ms
|
||||
|
||||
|
||||
SQLITE_PATH = Env.SQLITE_PATH
|
||||
REDIS_STREAM_DATABASE_PUBLISH = os.getenv("REDIS_STREAM_DATABASE_PUBLISH", "ACCOUNT:SERVICES:DATABASE:PUBLISH")
|
||||
REDIS_STREAM_DATABASE_TASKS = os.getenv("REDIS_STREAM_DATABASE_TASKS", "ACCOUNT:SERVICES:DATABASE:TASKS")
|
||||
REDIS_STREAM_MAIL_PUBLISH = os.getenv("REDIS_STREAM_MAIL_PUBLISH", "ACCOUNT:SERVICES:MAIL:PUBLISH")
|
||||
REDIS_STREAM_MAIL_TASKS = os.getenv("REDIS_STREAM_MAIL_TASKS", "ACCOUNT:SERVICES:MAIL:TASKS")
|
||||
REDIS_STREAM_QUEUE_PUBLISH = os.getenv("REDIS_STREAM_QUEUE_PUBLISH", "ACCOUNT:SERVICES:QUEUE:PUBLISH")
|
||||
REDIS_STREAM_QUEUE_TASKS = os.getenv("REDIS_STREAM_QUEUE_TASKS", "ACCOUNT:SERVICES:QUEUE:TASKS")
|
||||
|
||||
|
||||
async def ensure_schema(sqlite_path: str):
|
||||
async with aiosqlite.connect(sqlite_path) as db:
|
||||
await db.execute("""
|
||||
CREATE TABLE IF NOT EXISTS tasks(
|
||||
task_id TEXT PRIMARY KEY,
|
||||
queue TEXT NOT NULL,
|
||||
type TEXT NOT NULL,
|
||||
payload_json TEXT NOT NULL,
|
||||
created_at INTEGER NOT NULL,
|
||||
status TEXT DEFAULT 'pending',
|
||||
attempts INTEGER DEFAULT 0,
|
||||
last_error TEXT
|
||||
);
|
||||
""")
|
||||
await db.commit()
|
||||
|
||||
async def enqueue(r: aioredis.Redis, sqlite_path: str, stream: str, payload: dict, type_: str):
|
||||
task_id = payload.get("task_id") or str(uuid.uuid4())
|
||||
task = {"task_id": task_id, "queue": stream, "type": type_, "payload": payload, "created_at": now_ms(), "_attempts": 0}
|
||||
await r.xadd(stream, {"data": json.dumps(task)})
|
||||
async with aiosqlite.connect(sqlite_path) as db:
|
||||
await db.execute("""INSERT OR REPLACE INTO tasks(task_id, queue, type, payload_json, created_at, status, attempts) VALUES(?,?,?,?,?,'pending',?)""",
|
||||
(task_id, stream, type_, json.dumps(payload), task["created_at"], 0))
|
||||
await db.commit()
|
||||
|
||||
async def push_db_mocks(r: aioredis.Redis, sqlite_path: str, n: int = 3):
|
||||
for i in range(n):
|
||||
payload = {"id": uuid.uuid4().hex, "op": "sync", "source": "tester"}
|
||||
await enqueue(r, sqlite_path, REDIS_STREAM_DATABASE_TASKS, payload, "db-sync")
|
||||
|
||||
async def push_mail_mocks(r: aioredis.Redis, sqlite_path: str, n: int = 3):
|
||||
for i in range(n):
|
||||
payload = {"to": f"user{i}@example.com", "subj": "Hello", "body": "Hi!", "source": "tester"}
|
||||
await enqueue(r, sqlite_path, REDIS_STREAM_MAIL_TASKS, payload, "send-mail")
|
||||
|
||||
async def push_queue_mocks(r: aioredis.Redis, sqlite_path: str, n: int = 3):
|
||||
for i in range(n):
|
||||
payload = {"action": "cleanup", "target": f"old-tasks-{i}", "source": "tester"}
|
||||
await enqueue(r, sqlite_path, REDIS_STREAM_QUEUE_TASKS, payload, "queue-maintenance")
|
||||
|
||||
async def main():
|
||||
db_n, mail_n, queue_n = 3, 3, 3
|
||||
cfg = RedisConfig()
|
||||
r = aioredis.Redis(host=cfg.host, port=cfg.port, db=cfg.db, username=cfg.username, password=cfg.password)
|
||||
await ensure_schema(SQLITE_PATH)
|
||||
await push_db_mocks(r, SQLITE_PATH, db_n)
|
||||
await push_mail_mocks(r, SQLITE_PATH, mail_n)
|
||||
await push_queue_mocks(r, SQLITE_PATH, queue_n)
|
||||
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(main())
|
||||
35
ServicesTask/app/services/test/pyproject.toml
Normal file
35
ServicesTask/app/services/test/pyproject.toml
Normal file
@@ -0,0 +1,35 @@
|
||||
[build-system]
|
||||
requires = ["setuptools>=61.0"]
|
||||
build-backend = "setuptools.build_meta"
|
||||
|
||||
[project]
|
||||
name = "dual-queue-services"
|
||||
version = "0.1.0"
|
||||
description = "Async dual queue system with Redis Streams and SQLite persistence"
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.11"
|
||||
authors = [
|
||||
{ name = "Berkay Karatay", email = "karatay.berkay@gmail.com" }
|
||||
]
|
||||
dependencies = [
|
||||
"redis>=5.0.0",
|
||||
"aiosqlite>=0.19.0",
|
||||
]
|
||||
|
||||
[project.optional-dependencies]
|
||||
dev = [
|
||||
"pytest>=7.4",
|
||||
"black>=23.0",
|
||||
"isort>=5.12"
|
||||
]
|
||||
|
||||
[tool.black]
|
||||
line-length = 88
|
||||
target-version = ["py311"]
|
||||
|
||||
[tool.isort]
|
||||
profile = "black"
|
||||
|
||||
[tool.setuptools.packages.find]
|
||||
where = ["app"]
|
||||
include = ["app*"]
|
||||
67
ServicesTask/app/services/types/mail.py
Normal file
67
ServicesTask/app/services/types/mail.py
Normal file
@@ -0,0 +1,67 @@
|
||||
from pydantic import BaseModel
|
||||
from typing import List, Optional, Literal
|
||||
|
||||
|
||||
class FromToHeader(BaseModel):
|
||||
|
||||
display_name: Optional[str]
|
||||
username: Optional[str]
|
||||
domain: Optional[str]
|
||||
mail: Optional[str]
|
||||
|
||||
|
||||
class MailParser(BaseModel):
|
||||
|
||||
filename: str
|
||||
content_type: str
|
||||
charset: str
|
||||
data: str
|
||||
|
||||
|
||||
class PlainMailReader(BaseModel):
|
||||
|
||||
id: str
|
||||
subject: str
|
||||
from_: FromToHeader
|
||||
to: List[FromToHeader]
|
||||
date: str
|
||||
|
||||
|
||||
class MailReader(BaseModel):
|
||||
|
||||
id: str
|
||||
subject: str
|
||||
from_: FromToHeader
|
||||
to: List[FromToHeader]
|
||||
date: str
|
||||
body_text: str
|
||||
attachments: List[MailParser]
|
||||
|
||||
|
||||
class MailTaskObject(BaseModel):
|
||||
|
||||
task: str
|
||||
data: MailReader
|
||||
completed: bool
|
||||
service: str
|
||||
status: str
|
||||
created_at: str
|
||||
is_completed: bool
|
||||
|
||||
|
||||
class ProcessMailObject(BaseModel):
|
||||
|
||||
service: str
|
||||
uuid: str
|
||||
id: int
|
||||
data: MailReader
|
||||
count: int
|
||||
total: int
|
||||
|
||||
|
||||
class MailParsedResult(BaseModel):
|
||||
|
||||
task_id: str
|
||||
mail_data: PlainMailReader
|
||||
send_to: Literal["Completed", "NoAttachments", "Archive"]
|
||||
data: Optional[List[dict]] = None
|
||||
18
ServicesTask/app/services/types/queue.py
Normal file
18
ServicesTask/app/services/types/queue.py
Normal file
@@ -0,0 +1,18 @@
|
||||
from json import dumps
|
||||
from typing import Any, Optional, Dict
|
||||
from pydantic import BaseModel
|
||||
|
||||
from core.utils import now_ms
|
||||
|
||||
|
||||
class Enqueue(BaseModel):
|
||||
|
||||
task_id: str
|
||||
payload: Dict[str, Any]
|
||||
action: Optional[str] = None
|
||||
routing_key: Optional[str] = None
|
||||
message_id: Optional[str] = None
|
||||
|
||||
@property
|
||||
def body(self):
|
||||
return dumps({"task_id": self.task_id, "action": self.action, "payload": self.payload, "created_at": now_ms()}).encode()
|
||||
31
ServicesTask/app/services/types/task.py
Normal file
31
ServicesTask/app/services/types/task.py
Normal file
@@ -0,0 +1,31 @@
|
||||
from contextvars import ContextVar
|
||||
from typing import NamedTuple, Optional
|
||||
from pydantic import BaseModel
|
||||
from aio_pika.abc import AbstractIncomingMessage
|
||||
|
||||
|
||||
class _MsgCtx(NamedTuple):
|
||||
|
||||
msg: AbstractIncomingMessage
|
||||
rk: str
|
||||
attempts: int
|
||||
|
||||
|
||||
_MSG_CTX: ContextVar[_MsgCtx | None] = ContextVar("_MSG_CTX", default=None)
|
||||
|
||||
|
||||
class Meta(BaseModel):
|
||||
|
||||
routing_key: str
|
||||
attempts: int
|
||||
exchange: str
|
||||
|
||||
|
||||
class Job(BaseModel):
|
||||
|
||||
task_id: str
|
||||
action: str
|
||||
payload: dict
|
||||
created_at: int
|
||||
_meta: Meta
|
||||
message_id: Optional[str] = None
|
||||
51
ServicesTask/app/storage/sqlite_queue.py
Normal file
51
ServicesTask/app/storage/sqlite_queue.py
Normal file
@@ -0,0 +1,51 @@
|
||||
import sqlite3
|
||||
import json
|
||||
from typing import Any, Dict, List
|
||||
from core.config import Env
|
||||
|
||||
class SqliteQueue:
|
||||
"""
|
||||
"""
|
||||
|
||||
def __init__(self, db_path: str = Env.SQLITE_PATH):
|
||||
self.db_path = db_path
|
||||
self._init_schema()
|
||||
|
||||
def _conn(self):
|
||||
conn = sqlite3.connect(self.db_path)
|
||||
conn.row_factory = sqlite3.Row
|
||||
conn.execute("PRAGMA journal_mode=WAL;")
|
||||
conn.execute("PRAGMA synchronous=NORMAL;")
|
||||
return conn
|
||||
|
||||
def _init_schema(self):
|
||||
with self._conn() as con:
|
||||
con.executescript("""
|
||||
CREATE TABLE IF NOT EXISTS tasks(
|
||||
task_id TEXT PRIMARY KEY,
|
||||
queue TEXT NOT NULL,
|
||||
type TEXT NOT NULL,
|
||||
payload_json TEXT NOT NULL,
|
||||
created_at INTEGER NOT NULL,
|
||||
status TEXT DEFAULT 'pending',
|
||||
attempts INTEGER DEFAULT 0,
|
||||
last_error TEXT
|
||||
);
|
||||
CREATE INDEX IF NOT EXISTS idx_tasks_queue_status ON tasks(queue, status);
|
||||
""")
|
||||
|
||||
def add_task(self, task: Dict[str, Any]) -> None:
|
||||
with self._conn() as con:
|
||||
con.execute("""
|
||||
INSERT OR REPLACE INTO tasks(task_id, queue, type, payload_json, created_at, status, attempts)
|
||||
VALUES(?,?,?,?,?,'pending',?)
|
||||
""", (task["task_id"], task["queue"], task["type"], json.dumps(task["payload"]), task["created_at"], task.get("_attempts", 0)))
|
||||
|
||||
def update_task(self, task_id: str, status: str, error: str | None = None, attempts: int | None = None) -> None:
|
||||
with self._conn() as con:
|
||||
con.execute("""UPDATE tasks SET status=?, last_error=?, attempts=COALESCE(?, attempts) WHERE task_id=? """, (status, error, attempts, task_id))
|
||||
|
||||
def pending_for_queue(self, queue: str) -> List[Dict[str, Any]]:
|
||||
with self._conn() as con:
|
||||
rows = con.execute("SELECT * FROM tasks WHERE queue=? AND status='pending'", (queue,)).fetchall()
|
||||
return [dict(r) for r in rows]
|
||||
272
ServicesTask/docker-compose.yml
Normal file
272
ServicesTask/docker-compose.yml
Normal file
@@ -0,0 +1,272 @@
|
||||
|
||||
networks:
|
||||
servicesNetwork:
|
||||
driver: bridge
|
||||
|
||||
volumes:
|
||||
sqlite_data:
|
||||
prom_data:
|
||||
grafana_data:
|
||||
rabbitmq_data:
|
||||
|
||||
services:
|
||||
|
||||
rabbitmq:
|
||||
image: rabbitmq:3.13-management
|
||||
container_name: rabbitmq
|
||||
ports:
|
||||
- "127.0.0.1:5672:5672"
|
||||
- "127.0.0.1:15672:15672"
|
||||
- "127.0.0.1:15692:15692"
|
||||
environment:
|
||||
RABBITMQ_DEFAULT_USER: admin
|
||||
RABBITMQ_DEFAULT_PASS: admin
|
||||
command: >
|
||||
sh -lc "rabbitmq-plugins enable --offline rabbitmq_prometheus && exec docker-entrypoint.sh rabbitmq-server"
|
||||
healthcheck:
|
||||
test: ["CMD", "rabbitmq-diagnostics", "-q", "ping"]
|
||||
interval: 5s
|
||||
timeout: 3s
|
||||
retries: 20
|
||||
start_period: 10s
|
||||
volumes:
|
||||
- rabbitmq_data:/var/lib/rabbitmq
|
||||
networks: [servicesNetwork]
|
||||
restart: unless-stopped
|
||||
logging:
|
||||
driver: "json-file"
|
||||
options:
|
||||
max-size: "10m"
|
||||
max-file: "3"
|
||||
|
||||
prometheus:
|
||||
image: prom/prometheus:latest
|
||||
depends_on: [rabbitmq]
|
||||
networks: [servicesNetwork]
|
||||
volumes:
|
||||
- ./monitor/prometheus/prometheus.yml:/etc/prometheus/prometheus.yml:ro
|
||||
- prom_data:/prometheus
|
||||
command:
|
||||
- "--config.file=/etc/prometheus/prometheus.yml"
|
||||
- "--storage.tsdb.path=/prometheus"
|
||||
- "--web.enable-lifecycle"
|
||||
ports:
|
||||
- "9090:9090"
|
||||
restart: unless-stopped
|
||||
logging:
|
||||
driver: "json-file"
|
||||
options:
|
||||
max-size: "10m"
|
||||
max-file: "3"
|
||||
|
||||
grafana:
|
||||
image: grafana/grafana:latest
|
||||
depends_on: [prometheus]
|
||||
networks: [servicesNetwork]
|
||||
environment:
|
||||
- GF_SECURITY_ADMIN_USER=admin
|
||||
- GF_SECURITY_ADMIN_PASSWORD=admin
|
||||
- GF_USERS_ALLOW_SIGN_UP=false
|
||||
volumes:
|
||||
- grafana_data:/var/lib/grafana
|
||||
- ./monitor/grafana/provisioning/datasources:/etc/grafana/provisioning/datasources:ro
|
||||
ports:
|
||||
- "3000:3000"
|
||||
restart: unless-stopped
|
||||
logging:
|
||||
driver: "json-file"
|
||||
options:
|
||||
max-size: "10m"
|
||||
max-file: "3"
|
||||
|
||||
mongo-service:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: app/services/mongo/Dockerfile
|
||||
depends_on:
|
||||
rabbitmq:
|
||||
condition: service_healthy
|
||||
env_file: [.env]
|
||||
environment:
|
||||
SERVICE_NAME: "mongo-service"
|
||||
PRODUCE_KEY: "mongo.service.publish"
|
||||
CONSUME_BINDINGS: "database.service.publish,mail.service.publish"
|
||||
RABBITMQ_URL: "amqp://admin:admin@rabbitmq:5672/"
|
||||
EXCHANGE_EVENTS: "app.events"
|
||||
PRODUCE_BURST: "10"
|
||||
PRODUCE_ONCE: "true"
|
||||
EVENT_TYPE: "mongo-event"
|
||||
RETRY_DELAY_MS: "5000"
|
||||
MAX_RETRIES: "3"
|
||||
PREFETCH: "5"
|
||||
IGNORE_SELF_PRODUCED: "true"
|
||||
networks: [servicesNetwork]
|
||||
restart: unless-stopped
|
||||
logging:
|
||||
driver: "json-file"
|
||||
options: { max-size: "10m", max-file: "3" }
|
||||
|
||||
database-service:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: app/services/database/Dockerfile
|
||||
depends_on:
|
||||
rabbitmq:
|
||||
condition: service_healthy
|
||||
networks: [servicesNetwork]
|
||||
env_file: [.env]
|
||||
environment:
|
||||
SERVICE_NAME: "database-service"
|
||||
PRODUCE_KEY: "database.service.publish"
|
||||
CONSUME_BINDINGS: "parser.comment.publish, mail.service.publish"
|
||||
RABBITMQ_URL: amqp://admin:admin@rabbitmq:5672/
|
||||
EXCHANGE_EVENTS: "app.events"
|
||||
PRODUCE_ONCE: "true"
|
||||
RETRY_DELAY_MS: "5000"
|
||||
MAX_RETRIES: "3"
|
||||
PREFETCH: "5"
|
||||
IGNORE_SELF_PRODUCED: "true"
|
||||
volumes:
|
||||
- ./app/services/database/venv:/opt/venv
|
||||
- ./app/services/database/.prisma-cache:/root/.cache/prisma-python
|
||||
restart: unless-stopped
|
||||
logging:
|
||||
driver: "json-file"
|
||||
options:
|
||||
max-size: "10m"
|
||||
max-file: "3"
|
||||
|
||||
mail-service:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: app/services/mail/Dockerfile
|
||||
env_file: [.env]
|
||||
depends_on:
|
||||
rabbitmq:
|
||||
condition: service_healthy
|
||||
environment:
|
||||
SERVICE_NAME: "mail-service"
|
||||
PRODUCE_KEY: "mail.service.publish"
|
||||
CONSUME_BINDINGS: "parser.comment.publish, database.service.publish, parser.excel.publish"
|
||||
RABBITMQ_URL: amqp://admin:admin@rabbitmq:5672/
|
||||
EXCHANGE_EVENTS: "app.events"
|
||||
PRODUCE_ONCE: "true"
|
||||
RETRY_DELAY_MS: "5000"
|
||||
MAX_RETRIES: "3"
|
||||
PREFETCH: "5"
|
||||
IGNORE_SELF_PRODUCED: "true"
|
||||
networks: [servicesNetwork]
|
||||
restart: unless-stopped
|
||||
logging:
|
||||
driver: "json-file"
|
||||
options:
|
||||
max-size: "10m"
|
||||
max-file: "3"
|
||||
|
||||
parser-mail-service:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: app/services/parser/mail/Dockerfile
|
||||
env_file: [.env]
|
||||
depends_on:
|
||||
rabbitmq:
|
||||
condition: service_healthy
|
||||
environment:
|
||||
SERVICE_NAME: "parser-mail-service"
|
||||
RABBITMQ_URL: amqp://admin:admin@rabbitmq:5672/
|
||||
EXCHANGE_EVENTS: "app.events"
|
||||
CONSUME_BINDINGS: "mail.service.publish,"
|
||||
PRODUCE_KEY: "parser.mail.publish"
|
||||
PRODUCE_ONCE: "true"
|
||||
RETRY_DELAY_MS: "5000"
|
||||
MAX_RETRIES: "3"
|
||||
PREFETCH: "5"
|
||||
IGNORE_SELF_PRODUCED: "true"
|
||||
networks: [servicesNetwork]
|
||||
restart: unless-stopped
|
||||
logging:
|
||||
driver: "json-file"
|
||||
options:
|
||||
max-size: "10m"
|
||||
max-file: "3"
|
||||
|
||||
parser-excel-service:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: app/services/parser/excel/Dockerfile
|
||||
env_file: [.env]
|
||||
depends_on:
|
||||
rabbitmq:
|
||||
condition: service_healthy
|
||||
environment:
|
||||
SERVICE_NAME: "parser-excel-service"
|
||||
RABBITMQ_URL: amqp://admin:admin@rabbitmq:5672/
|
||||
EXCHANGE_EVENTS: "app.events"
|
||||
CONSUME_BINDINGS: "parser.mail.publish,"
|
||||
PRODUCE_KEY: "parser.excel.publish"
|
||||
PRODUCE_ONCE: "true"
|
||||
RETRY_DELAY_MS: "5000"
|
||||
MAX_RETRIES: "3"
|
||||
PREFETCH: "5"
|
||||
IGNORE_SELF_PRODUCED: "true"
|
||||
networks: [servicesNetwork]
|
||||
restart: unless-stopped
|
||||
logging:
|
||||
driver: "json-file"
|
||||
options:
|
||||
max-size: "10m"
|
||||
max-file: "3"
|
||||
|
||||
parser-comment-service:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: app/services/parser/comment/Dockerfile
|
||||
env_file: [.env]
|
||||
depends_on:
|
||||
rabbitmq:
|
||||
condition: service_healthy
|
||||
environment:
|
||||
SERVICE_NAME: "parser-comment-service"
|
||||
RABBITMQ_URL: amqp://admin:admin@rabbitmq:5672/
|
||||
EXCHANGE_EVENTS: "app.events"
|
||||
CONSUME_BINDINGS: "parser.excel.publish,"
|
||||
PRODUCE_KEY: "parser.comment.publish"
|
||||
PRODUCE_ONCE: "true"
|
||||
RETRY_DELAY_MS: "5000"
|
||||
MAX_RETRIES: "3"
|
||||
PREFETCH: "5"
|
||||
IGNORE_SELF_PRODUCED: "true"
|
||||
networks: [servicesNetwork]
|
||||
restart: unless-stopped
|
||||
logging:
|
||||
driver: "json-file"
|
||||
options:
|
||||
max-size: "10m"
|
||||
max-file: "3"
|
||||
|
||||
parser-payment-service:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: app/services/parser/payment/Dockerfile
|
||||
env_file: [.env]
|
||||
depends_on:
|
||||
rabbitmq:
|
||||
condition: service_healthy
|
||||
environment:
|
||||
SERVICE_NAME: "parser-payment-service"
|
||||
RABBITMQ_URL: amqp://admin:admin@rabbitmq:5672/
|
||||
EXCHANGE_EVENTS: "app.events"
|
||||
CONSUME_BINDINGS: "parser.comment.publish"
|
||||
PRODUCE_KEY: "parser.payment.publish"
|
||||
PRODUCE_ONCE: "true"
|
||||
RETRY_DELAY_MS: "5000"
|
||||
MAX_RETRIES: "3"
|
||||
PREFETCH: "5"
|
||||
IGNORE_SELF_PRODUCED: "true"
|
||||
networks: [servicesNetwork]
|
||||
restart: unless-stopped
|
||||
logging:
|
||||
driver: "json-file"
|
||||
options:
|
||||
max-size: "10m"
|
||||
max-file: "3"
|
||||
@@ -0,0 +1,9 @@
|
||||
apiVersion: 1
|
||||
datasources:
|
||||
- name: Prometheus
|
||||
type: prometheus
|
||||
access: proxy
|
||||
isDefault: true
|
||||
url: http://prometheus:9090
|
||||
jsonData:
|
||||
timeInterval: 5s
|
||||
16
ServicesTask/monitor/prometheus/prometheus.yml
Normal file
16
ServicesTask/monitor/prometheus/prometheus.yml
Normal file
@@ -0,0 +1,16 @@
|
||||
global:
|
||||
scrape_interval: 5s
|
||||
evaluation_interval: 5s
|
||||
|
||||
scrape_configs:
|
||||
- job_name: prometheus
|
||||
static_configs:
|
||||
- targets: ["prometheus:9090"]
|
||||
|
||||
- job_name: nats
|
||||
static_configs:
|
||||
- targets: ["nats-exporter:7777"]
|
||||
|
||||
- job_name: db_service
|
||||
static_configs:
|
||||
- targets: ["db-service:8000"]
|
||||
@@ -21,6 +21,26 @@ services:
|
||||
# timeout: 10s
|
||||
# retries: 3
|
||||
|
||||
# prisma_studio:
|
||||
# image: node:18
|
||||
# working_dir: /app
|
||||
# # volumes:
|
||||
# # - ./ServicesRunner/Depends:/app
|
||||
# ports:
|
||||
# - "5555:5555"
|
||||
# entrypoint: [ "/bin/sh", "-c" ]
|
||||
# command: |
|
||||
# "npx prisma studio --schema=/app/schema.prisma"
|
||||
# depends_on:
|
||||
# - prisma_service_test
|
||||
# networks:
|
||||
# - bank-services-network
|
||||
# logging:
|
||||
# driver: "json-file"
|
||||
# options:
|
||||
# max-size: "10m"
|
||||
# max-file: "3"
|
||||
|
||||
prisma_service_iban:
|
||||
container_name: prisma_service_iban
|
||||
build:
|
||||
@@ -42,39 +62,27 @@ services:
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
|
||||
# prisma_studio:
|
||||
# image: node:18
|
||||
# working_dir: /app
|
||||
# # volumes:
|
||||
# # - ./ServicesRunner/Depends:/app
|
||||
# ports:
|
||||
# - "5555:5555"
|
||||
# entrypoint: [ "/bin/sh", "-c" ]
|
||||
# command: |
|
||||
# "npx prisma studio --schema=/app/schema.prisma"
|
||||
# depends_on:
|
||||
# - prisma_service_test
|
||||
# networks:
|
||||
# - bank-services-network
|
||||
# logging:
|
||||
# driver: "json-file"
|
||||
# options:
|
||||
# max-size: "10m"
|
||||
# max-file: "3"
|
||||
# finder_payments:
|
||||
# container_name: finder_payments
|
||||
# env_file:
|
||||
# - api_env.env
|
||||
# build:
|
||||
# context: .
|
||||
# dockerfile: ServicesBank/Finder/Payment/Dockerfile
|
||||
# networks:
|
||||
# - bank-services-network
|
||||
# logging:
|
||||
# driver: "json-file"
|
||||
# options:
|
||||
# max-size: "10m"
|
||||
# max-file: "3"
|
||||
prisma_service_process_comment:
|
||||
container_name: prisma_service_process_comment
|
||||
build:
|
||||
context: .
|
||||
dockerfile: ServicesRunner/AccountRecordServices/Finder/Comment/Dockerfile
|
||||
networks:
|
||||
- bank-services-network
|
||||
volumes:
|
||||
- ./ServicesRunner/AccountRecordServices/Finder/Comment/venv:/opt/venv
|
||||
- ./ServicesRunner/AccountRecordServices/Finder/Comment/.prisma-cache:/root/.cache/prisma-python
|
||||
restart: on-failure
|
||||
logging:
|
||||
driver: "json-file"
|
||||
options:
|
||||
max-size: "10m"
|
||||
max-file: "3"
|
||||
healthcheck:
|
||||
test: [ "CMD", "/opt/venv/bin/python", "-c", "import asyncio; from ServicesRunner.Depends.prisma_client import get_prisma_client; asyncio.run(get_prisma_client())" ]
|
||||
interval: 15s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
|
||||
isbank_email_reader:
|
||||
container_name: isbank_email_reader
|
||||
@@ -113,6 +121,36 @@ services:
|
||||
max-size: "10m"
|
||||
max-file: "3"
|
||||
|
||||
process_comment_parser:
|
||||
container_name: process_comment_parser
|
||||
build:
|
||||
context: .
|
||||
dockerfile: ServicesRunner/AccountRecordServices/Finder/Parser/Comment/Dockerfile
|
||||
networks:
|
||||
- bank-services-network
|
||||
volumes:
|
||||
- ./ServicesRunner/AccountRecordServices/Finder/Parser/Comment/venv:/opt/venv
|
||||
- ./ServicesRunner/AccountRecordServices/Finder/Parser/Comment/.prisma-cache:/root/.cache/prisma-python
|
||||
logging:
|
||||
driver: "json-file"
|
||||
options:
|
||||
max-size: "10m"
|
||||
max-file: "3"
|
||||
# finder_payments:
|
||||
# container_name: finder_payments
|
||||
# env_file:
|
||||
# - api_env.env
|
||||
# build:
|
||||
# context: .
|
||||
# dockerfile: ServicesBank/Finder/Payment/Dockerfile
|
||||
# networks:
|
||||
# - bank-services-network
|
||||
# logging:
|
||||
# driver: "json-file"
|
||||
# options:
|
||||
# max-size: "10m"
|
||||
# max-file: "3"
|
||||
|
||||
networks:
|
||||
bank-services-network:
|
||||
driver: bridge
|
||||
|
||||
Reference in New Issue
Block a user