services api

This commit is contained in:
2025-07-31 17:26:30 +03:00
parent 479104a04f
commit 1f8db23f75
56 changed files with 1976 additions and 120 deletions

View File

@@ -0,0 +1,93 @@
# Git
.git
.gitignore
.gitattributes
# CI
.codeclimate.yml
.travis.yml
.taskcluster.yml
# Docker
docker-compose.yml
service_app/Dockerfile
.docker
.dockerignore
# Byte-compiled / optimized / DLL files
**/__pycache__/
**/*.py[cod]
# C extensions
*.so
# Distribution / packaging
.Python
service_app/env/
build/
develop-eggs/
dist/
downloads/
eggs/
lib/
lib64/
parts/
sdist/
var/
*.egg-info/
.installed.cfg
*.egg
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.coverage
.cache
nosetests.xml
coverage.xml
# Translations
*.mo
*.pot
# Django stuff:
*.log
# Sphinx documentation
docs/_build/
# PyBuilder
target/
# Virtual environment
service_app/.env
.venv/
venv/
# PyCharm
.idea
# Python mode for VIM
.ropeproject
**/.ropeproject
# Vim swap files
**/*.swp
# VS Code
.vscode/
test_application/

View File

@@ -0,0 +1,33 @@
FROM python:3.12-slim
WORKDIR /
# Set Python path to include app directory
ENV PYTHONPATH=/ PYTHONUNBUFFERED=1 PYTHONDONTWRITEBYTECODE=1
# Install system dependencies and Poetry
RUN apt-get update && apt-get install -y --no-install-recommends gcc && rm -rf /var/lib/apt/lists/* && pip install --no-cache-dir poetry
# Copy Poetry configuration
COPY /pyproject.toml ./pyproject.toml
# Configure Poetry and install dependencies with optimizations
RUN poetry config virtualenvs.create false && poetry install --no-interaction --no-ansi --no-root --only main && pip cache purge && rm -rf ~/.cache/pypoetry
# Install cron for scheduling tasks
RUN apt-get update && apt-get install -y cron
# Copy application code
COPY /ServicesBank/Finder/DecisionBook /
COPY /ServicesApi/Schemas /Schemas
COPY /ServicesApi/Controllers /Controllers
# Create log file to grab cron logs
RUN touch /var/log/cron.log
# Make entrypoint script executable
RUN chmod +x /entrypoint.sh
RUN chmod +x /run_app.sh
# Use entrypoint script to update run_app.sh with environment variables and start cron
ENTRYPOINT ["/entrypoint.sh"]

View File

@@ -0,0 +1,3 @@
# Docs of Finder
Finds people, living spaces, companies from AccountRecords

View File

@@ -0,0 +1,30 @@
#!/bin/bash
# Create environment file that will be available to cron jobs
echo "POSTGRES_USER=\"$POSTGRES_USER\"" >> /env.sh
echo "POSTGRES_PASSWORD=\"$POSTGRES_PASSWORD\"" >> /env.sh
echo "POSTGRES_DB=\"$POSTGRES_DB\"" >> /env.sh
echo "POSTGRES_HOST=\"$POSTGRES_HOST\"" >> /env.sh
echo "POSTGRES_PORT=$POSTGRES_PORT" >> /env.sh
echo "POSTGRES_ENGINE=\"$POSTGRES_ENGINE\"" >> /env.sh
echo "POSTGRES_POOL_PRE_PING=\"$POSTGRES_POOL_PRE_PING\"" >> /env.sh
echo "POSTGRES_POOL_SIZE=$POSTGRES_POOL_SIZE" >> /env.sh
echo "POSTGRES_MAX_OVERFLOW=$POSTGRES_MAX_OVERFLOW" >> /env.sh
echo "POSTGRES_POOL_RECYCLE=$POSTGRES_POOL_RECYCLE" >> /env.sh
echo "POSTGRES_POOL_TIMEOUT=$POSTGRES_POOL_TIMEOUT" >> /env.sh
echo "POSTGRES_ECHO=\"$POSTGRES_ECHO\"" >> /env.sh
# Add Python environment variables
echo "PYTHONPATH=/" >> /env.sh
echo "PYTHONUNBUFFERED=1" >> /env.sh
echo "PYTHONDONTWRITEBYTECODE=1" >> /env.sh
# Make the environment file available to cron
echo "*/15 * * * * /run_app.sh >> /var/log/cron.log 2>&1" > /tmp/crontab_list
crontab /tmp/crontab_list
# Start cron
cron
# Tail the log file
tail -f /var/log/cron.log

View File

@@ -0,0 +1,26 @@
#!/bin/bash
# Source the environment file directly
. /env.sh
# Re-export all variables to ensure they're available to the Python script
export POSTGRES_USER
export POSTGRES_PASSWORD
export POSTGRES_DB
export POSTGRES_HOST
export POSTGRES_PORT
export POSTGRES_ENGINE
export POSTGRES_POOL_PRE_PING
export POSTGRES_POOL_SIZE
export POSTGRES_MAX_OVERFLOW
export POSTGRES_POOL_RECYCLE
export POSTGRES_POOL_TIMEOUT
export POSTGRES_ECHO
# Python environment variables
export PYTHONPATH
export PYTHONUNBUFFERED
export PYTHONDONTWRITEBYTECODE
# env >> /var/log/cron.log
/usr/local/bin/python /runner.py

View File

@@ -0,0 +1,29 @@
from sqlalchemy import cast, Date
from Schemas import AccountRecords, BuildIbans, BuildDecisionBook
def account_records_find_decision_book(session):
AccountRecords.set_session(session)
BuildIbans.set_session(session)
BuildDecisionBook.set_session(session)
created_ibans, iban_build_dict = [], {}
filter_account_records = AccountRecords.build_id != None, AccountRecords.build_decision_book_id == None
account_records_list: list[AccountRecords] = AccountRecords.query.filter(*filter_account_records).order_by(AccountRecords.bank_date.desc()).all()
for account_record in account_records_list:
if found_iban := BuildIbans.query.filter(BuildIbans.iban == account_record.iban).first():
if found_decision_book := BuildDecisionBook.query.filter(
BuildDecisionBook.build_id == found_iban.build_id,
cast(BuildDecisionBook.expiry_starts, Date) <= cast(account_record.bank_date, Date),
cast(BuildDecisionBook.expiry_ends, Date) >= cast(account_record.bank_date, Date),
).first():
account_record.build_decision_book_id = found_decision_book.id
account_record.build_decision_book_uu_id = str(found_decision_book.uu_id)
account_record.save()
if __name__ == "__main__":
print("DecisionBook Service is running...")
with AccountRecords.new_session() as session:
account_records_find_decision_book(session)
print("DecisionBook Service is finished...")