updated email services

This commit is contained in:
berkay 2025-03-24 14:28:45 +03:00
parent 22876d250d
commit 3e1ba4cb67
4 changed files with 53 additions and 59 deletions

View File

@ -19,16 +19,14 @@ def collect_excel_files_from_mongo_database(mongo_provider) -> list:
def update_parsed_data_to_mongo_database( def update_parsed_data_to_mongo_database(
mongo_provider, collected_data_dict: dict, filename: str mongo_provider, collected_data_dict: list[dict], filename: str
) -> None: ) -> None:
if collected_data_dict: if collected_data_dict:
payload = collected_data_dict[filename] mongo_provider.update_one(
if payload: filter_query={"filename": filename},
mongo_provider.update_one( update_data={"$set": {"parsed": collected_data_dict, "stage": "parsed"}},
filter_query={"filename": filename}, )
update_data={"$set": {"parsed": payload, "stage": "parsed"}}, return
)
return
mongo_provider.update_one( mongo_provider.update_one(
filter_query={"filename": filename}, filter_query={"filename": filename},
update_data={"$set": {"parsed": None, "stage": "not found"}}, update_data={"$set": {"parsed": None, "stage": "not found"}},
@ -36,14 +34,14 @@ def update_parsed_data_to_mongo_database(
return return
def parse_excel_file(excel_frame: DataFrame, excel_name: str) -> dict: def parse_excel_file(excel_frame: DataFrame) -> list[dict]:
iban, data_dict = "", {} iban, data_list = "", []
for row in excel_frame.itertuples(): for row in excel_frame.itertuples():
if "IBAN" in str(row[3]).upper(): if "IBAN" in str(row[3]).upper():
iban = str(row[5]).replace(" ", "") iban = str(row[5]).replace(" ", "")
if not str(row[1]) == "nan" and not str(row[2]) == "nan": if not str(row[1]) == "nan" and not str(row[2]) == "nan":
if len(str(row[1]).split("/")) > 2: if len(str(row[1]).split("/")) > 2:
data_dict[excel_name] = dict( data_list.append(dict(
iban=str(iban), iban=str(iban),
bank_date=arrow.get( bank_date=arrow.get(
datetime.datetime.strptime(str(row[1]), "%d/%m/%Y-%H:%M:%S") datetime.datetime.strptime(str(row[1]), "%d/%m/%Y-%H:%M:%S")
@ -60,8 +58,8 @@ def parse_excel_file(excel_frame: DataFrame, excel_name: str) -> dict:
process_type=unidecode(str(row[8])), process_type=unidecode(str(row[8])),
process_comment=unidecode(str(row[9])), process_comment=unidecode(str(row[9])),
bank_reference_code=str(row[15]), bank_reference_code=str(row[15]),
) ))
return data_dict return data_list
def app(): def app():
@ -83,7 +81,7 @@ def app():
excel_frame = DataFrame(read_excel(io.BytesIO(payload))) excel_frame = DataFrame(read_excel(io.BytesIO(payload)))
# Extract IBAN and root info from the xl file # Extract IBAN and root info from the xl file
collected_data_dict = parse_excel_file(excel_frame, filename) collected_data_dict = parse_excel_file(excel_frame)
update_parsed_data_to_mongo_database( update_parsed_data_to_mongo_database(
mongo_provider=mongo_provider, mongo_provider=mongo_provider,
collected_data_dict=collected_data_dict, collected_data_dict=collected_data_dict,

View File

@ -48,15 +48,11 @@ def set_account_records_to_send_email():
account_records = ( account_records = (
account_records.order_by( account_records.order_by(
AccountRecords.bank_date.desc(), AccountRecords.bank_reference_code.desc() AccountRecords.bank_date.desc(), AccountRecords.bank_reference_code.desc()
) ).limit(3).all()
.limit(3)
.all()
) )
first_record, second_record, balance_error = ( first_record, second_record, balance_error = (
account_records[0], account_records[0], account_records[1], False
account_records[1],
False,
) )
second_balance = first_record.bank_balance - first_record.currency_value second_balance = first_record.bank_balance - first_record.currency_value
if second_balance != second_record.bank_balance: if second_balance != second_record.bank_balance:

View File

@ -66,14 +66,14 @@ if __name__ == "__main__":
) )
results = collect_parsed_data_from_mongo_database(mongo_provider=provider) results = collect_parsed_data_from_mongo_database(mongo_provider=provider)
for result in results: for result in results:
parsed_data = result.get("parsed") parsed_datas, file_name = result.get("parsed"), result.get("filename")
file_name = result.get("filename") if not parsed_datas:
if not parsed_data:
continue continue
write_parsed_data_to_account_records( for parsed_data in parsed_datas:
data_dict=parsed_data, write_parsed_data_to_account_records(
collection_name=provider.collection.name, data_dict=parsed_data,
mongo_provider=provider, collection_name=provider.collection.name,
file=file_name, mongo_provider=provider,
) file=file_name,
)
time.sleep(60) time.sleep(60)

View File

@ -51,29 +51,29 @@ services:
# volumes: # volumes:
# - wag_postgres_commercial_data:/bitnami/postgresql # - wag_postgres_commercial_data:/bitnami/postgresql
# email_service: email_service:
# container_name: email_service container_name: email_service
# build: build:
# context: . context: .
# dockerfile: BankServices/EmailService/Dockerfile dockerfile: BankServices/EmailService/Dockerfile
# networks: networks:
# - network_store_services - network_store_services
#
# parser_service: parser_service:
# container_name: parser_service container_name: parser_service
# build: build:
# context: . context: .
# dockerfile: BankServices/ParserService/Dockerfile dockerfile: BankServices/ParserService/Dockerfile
# networks: networks:
# - network_store_services - network_store_services
#
# writer_service: writer_service:
# container_name: writer_service container_name: writer_service
# build: build:
# context: . context: .
# dockerfile: BankServices/WriterService/Dockerfile dockerfile: BankServices/WriterService/Dockerfile
# networks: networks:
# - network_store_services - network_store_services
routine_email_service: routine_email_service:
container_name: routine_email_service container_name: routine_email_service
@ -83,13 +83,13 @@ services:
networks: networks:
- network_store_services - network_store_services
# sender_service: sender_service:
# container_name: sender_service container_name: sender_service
# build: build:
# context: . context: .
# dockerfile: BankServices/SenderService/Dockerfile dockerfile: BankServices/SenderService/Dockerfile
# networks: networks:
# - network_store_services - network_store_services
# test_server: # test_server:
# container_name: test_server # container_name: test_server