service acout records updated

This commit is contained in:
berkay 2024-11-20 20:06:36 +03:00
parent 1291ac87e2
commit 6ad1ba7d62
14 changed files with 581 additions and 162 deletions

View File

@ -54,13 +54,9 @@ def strip_date_to_valid(date_str):
def find_iban_in_comment(iban: str, comment: str):
iban_results, iban_count = BuildIbanDescription.filter_by(iban=iban)
iban_results = BuildIbanDescription.filter_by_one(system=True, iban=iban).data
sm_dict_extended, sm_dict_digit = {}, {}
# is_reference_build = any(
# letter in comment.lower() for letter in ["no", "daire", "nolu"]
# )
if iban_count:
for iban_result in iban_results:
for iban_result in iban_results or []:
candidate_parts = comment.split(" ")
extended_candidate_parts, digit_part = [], []
for part in candidate_parts:
@ -90,13 +86,15 @@ def find_iban_in_comment(iban: str, comment: str):
if not found:
similarity_ratio = 0.1
sm_dict_extended[f"{iban_result.id}"] = similarity_ratio
if sm_dict_extended:
result = sorted(
sm_dict_extended.items(), key=lambda item: item[1], reverse=True
)[0]
if float(result[1]) >= 0.5:
iban_result = BuildIbanDescription.find_one(id=int(result[0]))
iban_result = BuildIbanDescription.filter_one(
BuildIbanDescription.id==int(result[0]),
system=True
).data
return {
"decision_book_project_id": iban_result.decision_book_project_id,
"company_id": iban_result.company_id,

View File

@ -26,10 +26,10 @@ class BuildIbans(CrudCollection):
xcomment: Mapped[str] = mapped_column(String(64), server_default="????")
build_id: Mapped[int] = mapped_column(
ForeignKey("build.id"), nullable=False, comment="Building ID"
ForeignKey("build.id"), nullable=True, comment="Building ID"
)
build_uu_id: Mapped[str] = mapped_column(
String, nullable=False, comment="Building UUID", index=True
String, nullable=True, comment="Building UUID", index=True
)
# building: Mapped["Build"] = relationship(
# "Build", back_populates="build_ibans", foreign_keys=[build_id]
@ -67,23 +67,23 @@ class BuildIbanDescription(CrudCollection):
String, nullable=False, comment="Search Word", index=True
)
decision_book_project_id: Mapped[int] = mapped_column(
ForeignKey("build_decision_book_projects.id")
)
decision_book_project_uu_id: Mapped[str] = mapped_column(
String, nullable=False, comment="Decision Book Project UUID"
)
customer_id: Mapped[int] = mapped_column(ForeignKey("people.id"))
# decision_book_project_id: Mapped[int] = mapped_column(
# ForeignKey("build_decision_book_projects.id")
# )
# decision_book_project_uu_id: Mapped[str] = mapped_column(
# String, nullable=False, comment="Decision Book Project UUID"
# )
customer_id: Mapped[int] = mapped_column(ForeignKey("people.id"), nullable=True)
customer_uu_id: Mapped[str] = mapped_column(
String, nullable=False, comment="Customer UUID"
String, nullable=True, comment="Customer UUID"
)
company_id: Mapped[int] = mapped_column(ForeignKey("companies.id"))
company_id: Mapped[int] = mapped_column(ForeignKey("companies.id"), nullable=True)
company_uu_id: Mapped[str] = mapped_column(
String, nullable=False, comment="Company UUID"
String, nullable=True, comment="Company UUID"
)
build_parts_id: Mapped[int] = mapped_column(ForeignKey("build_parts.id"))
build_parts_id: Mapped[int] = mapped_column(ForeignKey("build_parts.id"), nullable=True)
build_parts_uu_id: Mapped[str] = mapped_column(
String, nullable=False, comment="Build Parts UUID"
String, nullable=True, comment="Build Parts UUID"
)
# decision_book_project: Mapped["BuildDecisionBookProjects"] = relationship(

View File

@ -1,25 +1,25 @@
services:
commercial_mongo_service:
container_name: commercial_mongo_service
image: "bitnami/mongodb:latest"
# image: "bitnami/mongodb:4.4.1-debian-10-r3"
networks:
- network_store_services
environment:
- MONGODB_DISABLE_ENFORCE_AUTH=true
- MONGODB_ROOT_PASSWORD=root
- MONGODB_DATABASE=mongo_database
- MONGODB_USERNAME=mongo_user
- MONGODB_PASSWORD=mongo_password
- MONGO_INITDB_ROOT_USERNAME=mongo_user
- MONGO_INITDB_ROOT_PASSWORD=mongo_password
- MONGO_INITDB_DATABASE=mongo_database
volumes:
- wag_commercial_mongodb_data:/bitnami/mongodb
ports:
- "11110:27017"
# commercial_mongo_service:
# container_name: commercial_mongo_service
# image: "bitnami/mongodb:latest"
## image: "bitnami/mongodb:4.4.1-debian-10-r3"
# networks:
# - network_store_services
# environment:
# - MONGODB_DISABLE_ENFORCE_AUTH=true
# - MONGODB_ROOT_PASSWORD=root
# - MONGODB_DATABASE=mongo_database
# - MONGODB_USERNAME=mongo_user
# - MONGODB_PASSWORD=mongo_password
# - MONGO_INITDB_ROOT_USERNAME=mongo_user
# - MONGO_INITDB_ROOT_PASSWORD=mongo_password
# - MONGO_INITDB_DATABASE=mongo_database
# volumes:
# - wag_commercial_mongodb_data:/bitnami/mongodb
# ports:
# - "11110:27017"
#
commercial_memory_service:
container_name: commercial_memory_service
image: 'bitnami/redis:latest'
@ -44,94 +44,106 @@ services:
- POSTGRES_DB=wag_database
- POSTGRES_USER=berkay_wag_user
- POSTGRES_PASSWORD=berkay_wag_user_password
depends_on:
- commercial_mongo_service
# depends_on:
# - commercial_mongo_service
ports:
- "5434:5432"
volumes:
- wag_postgres_commercial_data:/bitnami/postgresql
wag_management_service:
container_name: wag_management_service
# wag_management_service:
# container_name: wag_management_service
# restart: on-failure
# build:
# context: .
# dockerfile: service_app/Dockerfile
# ports:
# - "41575:41575"
# networks:
# - network_store_services
# depends_on:
# - wag_management_init_service
# - grafana
#
# wag_management_service_second:
# container_name: wag_management_service_second
# restart: on-failure
# build:
# context: .
# dockerfile: service_app/Dockerfile
# ports:
# - "41576:41575"
# networks:
# - network_store_services
# depends_on:
# - wag_management_init_service
# - grafana
#
# wag_management_init_service:
# container_name: wag_management_init_service
# build:
# context: .
# dockerfile: service_app_init/Dockerfile
# networks:
# - network_store_services
# depends_on:
# - postgres_commercial
#
# wag_bank_services:
# container_name: wag_bank_services
# restart: on-failure
# build:
# context: .
# dockerfile: service_app_banks/mailService.Dockerfile
# networks:
# - network_store_services
# depends_on:
# - postgres_commercial
# environment:
# - DATABASE_URL=postgresql+psycopg2://berkay_wag_user:berkay_wag_user_password@postgres_commercial:5432/wag_database
wag_account_services:
container_name: wag_account_services
restart: on-failure
build:
context: .
dockerfile: service_app/Dockerfile
ports:
- "41575:41575"
networks:
- network_store_services
depends_on:
- wag_management_init_service
- grafana
wag_management_service_second:
container_name: wag_management_service_second
restart: on-failure
build:
context: .
dockerfile: service_app/Dockerfile
ports:
- "41576:41575"
networks:
- network_store_services
depends_on:
- wag_management_init_service
- grafana
wag_management_init_service:
container_name: wag_management_init_service
build:
context: .
dockerfile: service_app_init/Dockerfile
networks:
- network_store_services
depends_on:
- postgres_commercial
wag_bank_services:
container_name: wag_bank_services
restart: on-failure
build:
context: .
dockerfile: service_app_banks/mailService.Dockerfile
dockerfile: service_account_records/account.Dockerfile
networks:
- network_store_services
depends_on:
- postgres_commercial
environment:
- DATABASE_URL=postgresql+psycopg2://berkay_wag_user:berkay_wag_user_password@postgres_commercial:5432/wag_database
volumes:
- wag_cronjob-volume:/etc/cron.d/send_email_cron
prometheus:
image: prom/prometheus
container_name: prometheus
ports:
- "9090:9090"
volumes:
- ./prometheus_data/prometheus.yml:/etc/prometheus/prometheus.yml
command:
- '--config.file=/etc/prometheus/prometheus.yml'
networks:
- network_store_services
grafana:
image: grafana/grafana
container_name: grafana
ports:
- "3000:3000"
depends_on:
- prometheus
networks:
- network_store_services
environment:
- GF_SECURITY_ADMIN_USER=admin
- GF_SECURITY_ADMIN_PASSWORD=admin
- GF_USERS_ALLOW_SIGN_UP=false
- GF_USERS_ALLOW_ORG_CREATE=false
volumes:
- grafana_data:/var/lib/grafana
- PYTHONPATH=/
#
# prometheus:
# image: prom/prometheus
# container_name: prometheus
# ports:
# - "9090:9090"
# volumes:
# - ./prometheus_data/prometheus.yml:/etc/prometheus/prometheus.yml
# command:
# - '--config.file=/etc/prometheus/prometheus.yml'
# networks:
# - network_store_services
#
# grafana:
# image: grafana/grafana
# container_name: grafana
# ports:
# - "3000:3000"
# depends_on:
# - prometheus
# networks:
# - network_store_services
# environment:
# - GF_SECURITY_ADMIN_USER=admin
# - GF_SECURITY_ADMIN_PASSWORD=admin
# - GF_USERS_ALLOW_SIGN_UP=false
# - GF_USERS_ALLOW_ORG_CREATE=false
# volumes:
# - grafana_data:/var/lib/grafana
# wag_management_test_service:
# container_name: wag_management_test_service
@ -163,10 +175,9 @@ networks:
network_store_services:
volumes:
wag_cronjob-volume:
wag_postgres_commercial_data:
grafana_data:
wag_commercial_mongodb_data:
# grafana_data:
# wag_commercial_mongodb_data:
# environment:
# - DATABASE_URL=postgresql+psycopg2://berkay_wag_user:berkay_wag_user_password@postgres_commercial:5432/wag_database

View File

@ -0,0 +1,93 @@
# Git
.git
.gitignore
.gitattributes
# CI
.codeclimate.yml
.travis.yml
.taskcluster.yml
# Docker
docker-compose.yml
service_app/Dockerfile
.docker
.dockerignore
# Byte-compiled / optimized / DLL files
**/__pycache__/
**/*.py[cod]
# C extensions
*.so
# Distribution / packaging
.Python
service_app/env/
build/
develop-eggs/
dist/
downloads/
eggs/
lib/
lib64/
parts/
sdist/
var/
*.egg-info/
.installed.cfg
*.egg
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.coverage
.cache
nosetests.xml
coverage.xml
# Translations
*.mo
*.pot
# Django stuff:
*.log
# Sphinx documentation
docs/_build/
# PyBuilder
target/
# Virtual environment
service_app/.env
.venv/
venv/
# PyCharm
.idea
# Python mode for VIM
.ropeproject
**/.ropeproject
# Vim swap files
**/*.swp
# VS Code
.vscode/
test_application/

162
service_account_records/.gitignore vendored Normal file
View File

@ -0,0 +1,162 @@
# ---> Python
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.idea/
.Python
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
cover/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
.pybuilder/
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pyenv
# For a library or package, you might want to ignore these files since the code is
# intended to run in multiple environments; otherwise, check them in:
# .python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock
# poetry
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
# This is especially recommended for binary packages to ensure reproducibility, and is more
# commonly ignored for libraries.
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
#poetry.lock
# pdm
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
#pdm.lock
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
# in version control.
# https://pdm.fming.dev/#use-with-ide
.pdm.toml
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
__pypackages__/
# Celery stuff
celerybeat-schedule
celerybeat.pid
# SageMath parsed files
*.sage.py
# Environments
service_app/.env
.venv
service_app/env/
venv/
service_app/env/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
# pytype static type analyzer
.pytype/
# Cython debug symbols
cython_debug/
# PyCharm
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
# and can be added to the global gitignore or merged into this file. For a more nuclear
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
#.idea/

View File

View File

@ -0,0 +1,35 @@
FROM python:3.10-slim
ENV PYTHONDONTWRITEBYTECODE 1
ENV PYTHONUNBUFFERED 1
COPY ../service_account_records/account.requirements.txt .
RUN pip install --upgrade pip
RUN pip install --no-cache-dir --upgrade -r account.requirements.txt
COPY ../service_account_records ./service_account_records
COPY ../api_library ./service_account_records/api_library
COPY ../api_services ./service_account_records/api_services
COPY ../databases ./service_account_records/databases
COPY ../api_configs ./service_account_records/api_configs
COPY ../api_objects ./service_account_records/api_objects
COPY ../api_validations ./service_account_records/api_validations
#COPY ../service_account_records/crontab_list /service_account_records/crontab_to_write
RUN apt-get update && apt-get install -y cron
# 11:00 Istanbul Time (UTC+3) system time is 08:00 UTC
#RUN echo "0 8 * * * /usr/local/bin/python /service_account_records/app_mail_sender.py >> /var/log/cron.log 2>&1" > /tmp/crontab_list && crontab /tmp/crontab_list
#RUN echo "*/15 * * * * /usr/local/bin/python /service_account_records/isbank/main_single_thread.py >> /var/log/cron.log 2>&1" >> /tmp/crontab_list && crontab /tmp/crontab_list
RUN echo "*/15 * * * * /usr/local/bin/python " >> /tmp/crontab_list && crontab /tmp/crontab_list
#RUN echo /service_account_records/crontab_to_write > /tmp/crontab_list
#RUN crontab /tmp/crontab_list
RUN touch /var/log/cron.log
CMD cron && tail -f /var/log/cron.log
#RUN chmod 0644 /etc/cron.d/app_mail_sender_cron
#RUN crontab /etc/cron.d/send_email_cron
#CMD ["python", "-m", "app_mail_sender"]

View File

@ -0,0 +1,18 @@
arrow
Deprecated
python-dotenv
pydantic
sqlalchemy-mixins
psycopg2-binary
redmail
redbox
pandas
xlrd
Unidecode
redis
cryptography
fastapi
requests
pymongo
unidecode
textdistance

View File

@ -0,0 +1,94 @@
import json
from api_services.bank_actions.wag_account_record_parser import parse_comment_with_name
from databases import (
AccountRecords,
BuildIbans,
BuildDecisionBook,
)
from api_validations.validations_request import (
ListOptions
)
from api_library.date_time_actions.date_functions import (
system_arrow
)
from service_account_records.configs import AccountConfig
account_list = ListOptions(
page=1,
size=1000,
order_field="bank_date",
order_type="desc",
)
def account_records_find_decision_book():
created_ibans, iban_build_dict = [], {}
AccountRecords.filter_attr = account_list
created_ibans, iban_build_dict = [], {}
AccountRecords.filter_attr = account_list
days_before_now = system_arrow.now().shift(days=(int(AccountConfig.BEFORE_DAY) * -1))
account_records_list: list[AccountRecords] = AccountRecords.filter_all(
# AccountRecords.bank_date >= str(days_before_now),
AccountRecords.build_decision_book_id == None,
system=True
).data
for account_record in account_records_list:
if account_record.iban not in created_ibans:
found_iban: BuildIbans = BuildIbans.filter_one(
BuildIbans.iban == account_record.iban,
system=True
).data
if not found_iban:
create_build_ibans = BuildIbans.find_or_create(
iban=account_record.iban,
start_date=str(system_arrow.now().shift(days=-1)),
)
create_build_ibans.save_and_confirm()
created_ibans.append(account_record.iban)
# It is in database already
if not found_iban.build_id:
iban_build_dict["iban"] = account_record.iban
iban_build_dict["build_id"] = None
else:
found_decision_book = BuildDecisionBook.filter_one(
BuildDecisionBook.build_id == found_iban.build_id,
BuildDecisionBook.expiry_starts <= account_record.bank_date,
BuildDecisionBook.expiry_ends > account_record.bank_date,
).data
if found_decision_book:
print('found_decision_book', found_decision_book.get_dict())
account_record.update(
build_decision_book_id=found_decision_book.id,
build_decision_book_uu_id=str(found_decision_book.uu_id),
)
account_record.save()
print('iban_build_dict', iban_build_dict)
print('created_ibans', created_ibans)
return
def account_records_search():
AccountRecords.filter_attr = account_list
account_records_list: list[AccountRecords] = AccountRecords.filter_all(
AccountRecords.build_decision_book_id != None,
system=True
).data
for account_record in account_records_list:
print('account_record', account_record.get_dict())
similarity_result = parse_comment_with_name(account_record.iban, account_record.process_comment)
print('similarity_result', similarity_result)
# build_iban = BuildIbans.find_one(iban=data["iban"])
print('account_records_list', account_records_list)
return
def account_records_service() -> None:
account_records_find_decision_book()
account_records_search()
return
if __name__ == "__main__":
account_records_service()

View File

@ -0,0 +1,4 @@
class AccountConfig:
BEFORE_DAY = 30

View File

@ -0,0 +1,2 @@
0 8 * * * /usr/local/bin/python /service_app_banks/app_mail_sender.py >> /var/log/cron.log 2>&1
*/15 * * * * /usr/local/bin/python /service_app_banks/isbank/main_single_thread.py >> /var/log/cron.log 2>&1

View File

@ -129,4 +129,5 @@ def send_mail_to_users_that_have_received_email_from_banks():
print("Email is not sent")
if __name__ == "__main__":
send_mail_to_users_that_have_received_email_from_banks()

View File

@ -1,5 +1,5 @@
class Config:
IP_ADDRESS: str = "http://10.10.2.46:41575/internal/isbank/retreive"
# IP_ADDRESS: str = "http://10.10.2.46:41575/internal/isbank/retreive"
SERVICE_TIMING: int = 900 # 15 min
UNREAD_PATH: str = "/home/bank/isbank/unread/"

View File

@ -22,6 +22,7 @@ RUN apt-get update && apt-get install -y cron
# 11:00 Istanbul Time (UTC+3) system time is 08:00 UTC
RUN echo "0 8 * * * /usr/local/bin/python /service_app_banks/app_mail_sender.py >> /var/log/cron.log 2>&1" > /tmp/crontab_list && crontab /tmp/crontab_list
RUN echo "*/15 * * * * /usr/local/bin/python /service_app_banks/isbank/main_single_thread.py >> /var/log/cron.log 2>&1" >> /tmp/crontab_list && crontab /tmp/crontab_list
RUN echo "*/15 * * * * /usr/local/bin/python " >> /tmp/crontab_list && crontab /tmp/crontab_list
#RUN echo /service_app_banks/crontab_to_write > /tmp/crontab_list
#RUN crontab /tmp/crontab_list