updated Payment service

This commit is contained in:
Berkay 2025-06-30 21:34:16 +03:00
parent 5c640ddcee
commit 88afa6b329
55 changed files with 6860 additions and 130 deletions

View File

@ -12,12 +12,12 @@ COPY /pyproject.toml ./pyproject.toml
RUN poetry config virtualenvs.create false && poetry install --no-interaction --no-ansi --no-root --only main && pip cache purge && rm -rf ~/.cache/pypoetry RUN poetry config virtualenvs.create false && poetry install --no-interaction --no-ansi --no-root --only main && pip cache purge && rm -rf ~/.cache/pypoetry
# Copy application code # Copy application code
COPY /api_services/api_controllers /api_controllers COPY /ServicesApi/Controllers /Controllers
COPY /api_services/schemas /schemas COPY /ServicesApi/Schemas /Schemas
COPY /api_services/api_modules /api_modules COPY /ServicesApi/Extensions /Extensions
COPY /api_services/api_builds/initial-service /initial-service COPY /ServicesApi/Builds/Initial /initial-service
COPY /api_services/api_builds/initial-service / COPY /ServicesApi/Builds/Initial /
# Set Python path to include app directory # Set Python path to include app directory
ENV PYTHONPATH=/ PYTHONUNBUFFERED=1 PYTHONDONTWRITEBYTECODE=1 ENV PYTHONPATH=/ PYTHONUNBUFFERED=1 PYTHONDONTWRITEBYTECODE=1

View File

@ -1,6 +1,6 @@
import os import os
from api_controllers.postgres.engine import get_db from Controllers.Postgres.engine import get_db
from init_app_defaults import create_application_defaults from init_app_defaults import create_application_defaults
from init_enums import init_api_enums_build_types from init_enums import init_api_enums_build_types
from init_alembic import generate_alembic from init_alembic import generate_alembic

View File

@ -216,12 +216,12 @@ def init_api_enums_build_types(db_session):
{ {
"enum_class": "DebitTypes", "enum_class": "DebitTypes",
"type_code": "DT-D", "type_code": "DT-D",
"type_name": "Debit Sender", "type_name": "Incoming Fund",
}, },
{ {
"enum_class": "DebitTypes", "enum_class": "DebitTypes",
"type_code": "DT-R", "type_code": "DT-R",
"type_name": "Credit Receiver", "type_name": "Outgoing Fund",
}, },
{ {
"enum_class": "DebitTypes", "enum_class": "DebitTypes",

View File

@ -231,28 +231,18 @@ class CrudCollection(CrudMixin):
__repr__ = ReprMixin.__repr__ __repr__ = ReprMixin.__repr__
# Outer reference fields # Outer reference fields
ref_id: Mapped[str] = mapped_column( ref_int: Mapped[int] = mapped_column(Integer, nullable=True, index=True, comment="External reference ID")
String(100), nullable=True, index=True, comment="External reference ID" ref_id: Mapped[str] = mapped_column(String(100), nullable=True, index=True, comment="External reference UUID")
)
replication_id: Mapped[int] = mapped_column( replication_id: Mapped[int] = mapped_column(SmallInteger, server_default="0", comment="Replication identifier")
SmallInteger, server_default="0", comment="Replication identifier"
)
# Cryptographic and user tracking # Cryptographic and user tracking
cryp_uu_id: Mapped[str] = mapped_column( cryp_uu_id: Mapped[str] = mapped_column(String, nullable=True, index=True, comment="Cryptographic UUID")
String, nullable=True, index=True, comment="Cryptographic UUID"
)
# Token fields of modification # Token fields of modification
created_credentials_token: Mapped[str] = mapped_column( created_credentials_token: Mapped[str] = mapped_column(String, nullable=True, comment="Created Credentials token")
String, nullable=True, comment="Created Credentials token" updated_credentials_token: Mapped[str] = mapped_column(String, nullable=True, comment="Updated Credentials token")
) confirmed_credentials_token: Mapped[str] = mapped_column(String, nullable=True, comment="Confirmed Credentials token")
updated_credentials_token: Mapped[str] = mapped_column(
String, nullable=True, comment="Updated Credentials token"
)
confirmed_credentials_token: Mapped[str] = mapped_column(
String, nullable=True, comment="Confirmed Credentials token"
)
# Status flags # Status flags
is_confirmed: Mapped[bool] = mapped_column( is_confirmed: Mapped[bool] = mapped_column(

View File

@ -451,6 +451,12 @@ class AccountRecords(CrudCollection):
build_decision_book_uu_id: Mapped[str] = mapped_column( build_decision_book_uu_id: Mapped[str] = mapped_column(
String, nullable=True, comment="Build Decision Book UU ID" String, nullable=True, comment="Build Decision Book UU ID"
) )
# payment_result_type = Mapped[int] = mapped_column(
# ForeignKey("api_enum_dropdown.id"), nullable=True
# )
# payment_result_type_uu_id: Mapped[str] = mapped_column(
# String, nullable=True, comment="Payment Result Type UU ID"
# )
__table_args__ = ( __table_args__ = (
Index("_budget_records_ndx_00", is_receipt_mail_send, bank_date), Index("_budget_records_ndx_00", is_receipt_mail_send, bank_date),

View File

@ -549,12 +549,9 @@ class BuildDecisionBookPayments(CrudCollection):
) )
currency: Mapped[str] = mapped_column(String(8), server_default="TRY") currency: Mapped[str] = mapped_column(String(8), server_default="TRY")
payment_types_id: Mapped[int] = mapped_column( account_is_debit: Mapped[bool] = mapped_column(Boolean, nullable=True, server_default="1", comment="Is Debit")
ForeignKey("api_enum_dropdown.id"), nullable=True payment_types_id: Mapped[int] = mapped_column(ForeignKey("api_enum_dropdown.id"), nullable=True)
) payment_types_uu_id: Mapped[str] = mapped_column(String, nullable=True, comment="Dues Type UUID")
payment_types_uu_id: Mapped[str] = mapped_column(
String, nullable=True, comment="Dues Type UUID"
)
period_time: Mapped[str] = mapped_column(String(12)) period_time: Mapped[str] = mapped_column(String(12))
process_date_y: Mapped[int] = mapped_column(SmallInteger) process_date_y: Mapped[int] = mapped_column(SmallInteger)
@ -568,32 +565,18 @@ class BuildDecisionBookPayments(CrudCollection):
build_decision_book_item_uu_id: Mapped[str] = mapped_column( build_decision_book_item_uu_id: Mapped[str] = mapped_column(
String, nullable=False, comment="Decision Book Item UUID" String, nullable=False, comment="Decision Book Item UUID"
) )
# build_decision_book_id: Mapped[int] = mapped_column( build_decision_book_id: Mapped[int] = mapped_column(ForeignKey("build_decision_book.id"), nullable=True)
# ForeignKey("build_decision_book.id"), nullable=True build_decision_book_uu_id: Mapped[str] = mapped_column(String, nullable=True, comment="Decision Book UUID")
# ) build_parts_id: Mapped[int] = mapped_column(ForeignKey("build_parts.id"), nullable=False)
# build_decision_book_uu_id: Mapped[str] = mapped_column( build_parts_uu_id: Mapped[str] = mapped_column(String, nullable=False, comment="Build Part UUID")
# String, nullable=True, comment="Decision Book UUID"
# )
build_parts_id: Mapped[int] = mapped_column(
ForeignKey("build_parts.id"), nullable=False
)
build_parts_uu_id: Mapped[str] = mapped_column(
String, nullable=False, comment="Build Part UUID"
)
decision_book_project_id: Mapped[int] = mapped_column( decision_book_project_id: Mapped[int] = mapped_column(
ForeignKey("build_decision_book_projects.id"), ForeignKey("build_decision_book_projects.id"),
nullable=True, nullable=True,
comment="Decision Book Project ID", comment="Decision Book Project ID",
) )
decision_book_project_uu_id: Mapped[str] = mapped_column( decision_book_project_uu_id: Mapped[str] = mapped_column(String, nullable=True, comment="Decision Book Project UUID")
String, nullable=True, comment="Decision Book Project UUID" account_records_id: Mapped[int] = mapped_column(ForeignKey("account_records.id"), nullable=True)
) account_records_uu_id: Mapped[str] = mapped_column(String, nullable=True, comment="Account Record UU ID")
account_records_id: Mapped[int] = mapped_column(
ForeignKey("account_records.id"), nullable=True
)
account_records_uu_id: Mapped[str] = mapped_column(
String, nullable=True, comment="Account Record UU ID"
)
# budget_records_id: Mapped[int] = mapped_column(ForeignKey("account_records.id"), nullable=True) # budget_records_id: Mapped[int] = mapped_column(ForeignKey("account_records.id"), nullable=True)
# budget_records_uu_id: Mapped[str] = mapped_column( # budget_records_uu_id: Mapped[str] = mapped_column(
@ -626,12 +609,15 @@ class BuildDecisionBookPayments(CrudCollection):
__table_args__ = ( __table_args__ = (
Index( Index(
"build_decision_book_payments_detail_ndx_00", "build_decision_book_payments_detail_ndx_00",
"uu_id",
"ref_id",
build_decision_book_item_id, build_decision_book_item_id,
build_parts_id, build_parts_id,
payment_plan_time_periods, payment_plan_time_periods,
process_date, process_date,
payment_types_id, payment_types_id,
account_records_id, account_records_id,
account_is_debit,
unique=True, unique=True,
), ),
Index("build_decision_book_payments_detail_ndx_01", account_records_id), Index("build_decision_book_payments_detail_ndx_01", account_records_id),

0
ServicesApi/__init__.py Normal file
View File

View File

@ -0,0 +1,93 @@
# Git
.git
.gitignore
.gitattributes
# CI
.codeclimate.yml
.travis.yml
.taskcluster.yml
# Docker
docker-compose.yml
service_app/Dockerfile
.docker
.dockerignore
# Byte-compiled / optimized / DLL files
**/__pycache__/
**/*.py[cod]
# C extensions
*.so
# Distribution / packaging
.Python
service_app/env/
build/
develop-eggs/
dist/
downloads/
eggs/
lib/
lib64/
parts/
sdist/
var/
*.egg-info/
.installed.cfg
*.egg
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.coverage
.cache
nosetests.xml
coverage.xml
# Translations
*.mo
*.pot
# Django stuff:
*.log
# Sphinx documentation
docs/_build/
# PyBuilder
target/
# Virtual environment
service_app/.env
.venv/
venv/
# PyCharm
.idea
# Python mode for VIM
.ropeproject
**/.ropeproject
# Vim swap files
**/*.swp
# VS Code
.vscode/
test_application/

View File

@ -0,0 +1,93 @@
# Git
.git
.gitignore
.gitattributes
# CI
.codeclimate.yml
.travis.yml
.taskcluster.yml
# Docker
docker-compose.yml
service_app/Dockerfile
.docker
.dockerignore
# Byte-compiled / optimized / DLL files
**/__pycache__/
**/*.py[cod]
# C extensions
*.so
# Distribution / packaging
.Python
service_app/env/
build/
develop-eggs/
dist/
downloads/
eggs/
lib/
lib64/
parts/
sdist/
var/
*.egg-info/
.installed.cfg
*.egg
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.coverage
.cache
nosetests.xml
coverage.xml
# Translations
*.mo
*.pot
# Django stuff:
*.log
# Sphinx documentation
docs/_build/
# PyBuilder
target/
# Virtual environment
service_app/.env
.venv/
venv/
# PyCharm
.idea
# Python mode for VIM
.ropeproject
**/.ropeproject
# Vim swap files
**/*.swp
# VS Code
.vscode/
test_application/

View File

@ -0,0 +1,33 @@
FROM python:3.12-slim
WORKDIR /
# Set Python path to include app directory
ENV PYTHONPATH=/ PYTHONUNBUFFERED=1 PYTHONDONTWRITEBYTECODE=1
# Install system dependencies and Poetry
RUN apt-get update && apt-get install -y --no-install-recommends gcc && rm -rf /var/lib/apt/lists/* && pip install --no-cache-dir poetry
# Copy Poetry configuration
COPY /pyproject.toml ./pyproject.toml
# Configure Poetry and install dependencies with optimizations
RUN poetry config virtualenvs.create false && poetry install --no-interaction --no-ansi --no-root --only main && pip cache purge && rm -rf ~/.cache/pypoetry
# Install cron for scheduling tasks
RUN apt-get update && apt-get install -y cron
# Copy application code
COPY /ServicesBank/Finder/BuildFromIban /
COPY /ServicesApi/Schemas /Schemas
COPY /ServicesApi/Controllers /Controllers
# Create log file to grab cron logs
RUN touch /var/log/cron.log
# Make entrypoint script executable
RUN chmod +x /entrypoint.sh
RUN chmod +x /run_app.sh
# Use entrypoint script to update run_app.sh with environment variables and start cron
ENTRYPOINT ["/entrypoint.sh"]

View File

@ -0,0 +1,3 @@
# Docs of Finder
Finds people, living spaces, companies from AccountRecords

View File

@ -0,0 +1,30 @@
#!/bin/bash
# Create environment file that will be available to cron jobs
echo "POSTGRES_USER=\"$POSTGRES_USER\"" >> /env.sh
echo "POSTGRES_PASSWORD=\"$POSTGRES_PASSWORD\"" >> /env.sh
echo "POSTGRES_DB=\"$POSTGRES_DB\"" >> /env.sh
echo "POSTGRES_HOST=\"$POSTGRES_HOST\"" >> /env.sh
echo "POSTGRES_PORT=$POSTGRES_PORT" >> /env.sh
echo "POSTGRES_ENGINE=\"$POSTGRES_ENGINE\"" >> /env.sh
echo "POSTGRES_POOL_PRE_PING=\"$POSTGRES_POOL_PRE_PING\"" >> /env.sh
echo "POSTGRES_POOL_SIZE=$POSTGRES_POOL_SIZE" >> /env.sh
echo "POSTGRES_MAX_OVERFLOW=$POSTGRES_MAX_OVERFLOW" >> /env.sh
echo "POSTGRES_POOL_RECYCLE=$POSTGRES_POOL_RECYCLE" >> /env.sh
echo "POSTGRES_POOL_TIMEOUT=$POSTGRES_POOL_TIMEOUT" >> /env.sh
echo "POSTGRES_ECHO=\"$POSTGRES_ECHO\"" >> /env.sh
# Add Python environment variables
echo "PYTHONPATH=/" >> /env.sh
echo "PYTHONUNBUFFERED=1" >> /env.sh
echo "PYTHONDONTWRITEBYTECODE=1" >> /env.sh
# Make the environment file available to cron
echo "*/5 * * * * /run_app.sh >> /var/log/cron.log 2>&1" > /tmp/crontab_list
crontab /tmp/crontab_list
# Start cron
cron
# Tail the log file
tail -f /var/log/cron.log

View File

@ -0,0 +1,26 @@
#!/bin/bash
# Source the environment file directly
. /env.sh
# Re-export all variables to ensure they're available to the Python script
export POSTGRES_USER
export POSTGRES_PASSWORD
export POSTGRES_DB
export POSTGRES_HOST
export POSTGRES_PORT
export POSTGRES_ENGINE
export POSTGRES_POOL_PRE_PING
export POSTGRES_POOL_SIZE
export POSTGRES_MAX_OVERFLOW
export POSTGRES_POOL_RECYCLE
export POSTGRES_POOL_TIMEOUT
export POSTGRES_ECHO
# Python environment variables
export PYTHONPATH
export PYTHONUNBUFFERED
export PYTHONDONTWRITEBYTECODE
# env >> /var/log/cron.log
/usr/local/bin/python /runner.py

View File

@ -0,0 +1,26 @@
import arrow
from Schemas import AccountRecords, BuildIbans
def account_find_build_from_iban(session):
AccountRecords.set_session(session)
BuildIbans.set_session(session)
account_records_ibans = AccountRecords.query.filter(AccountRecords.build_id == None, AccountRecords.approved_record == False).distinct(AccountRecords.iban).all()
for account_records_iban in account_records_ibans:
found_iban: BuildIbans = BuildIbans.query.filter(BuildIbans.iban == account_records_iban.iban).first()
if not found_iban:
create_build_ibans = BuildIbans.create(iban=account_records_iban.iban, start_date=str(arrow.now().shift(days=-1)))
create_build_ibans.save()
else:
update_dict = {"build_id": found_iban.build_id, "build_uu_id": str(found_iban.build_uu_id)}
session.query(AccountRecords).filter(AccountRecords.iban == account_records_iban.iban).update(update_dict, synchronize_session=False)
session.commit()
if __name__ == "__main__":
print("Account Records Service is running...")
with AccountRecords.new_session() as session:
account_find_build_from_iban(session=session)
print("Account Records Service is finished...")

View File

@ -0,0 +1,93 @@
# Git
.git
.gitignore
.gitattributes
# CI
.codeclimate.yml
.travis.yml
.taskcluster.yml
# Docker
docker-compose.yml
service_app/Dockerfile
.docker
.dockerignore
# Byte-compiled / optimized / DLL files
**/__pycache__/
**/*.py[cod]
# C extensions
*.so
# Distribution / packaging
.Python
service_app/env/
build/
develop-eggs/
dist/
downloads/
eggs/
lib/
lib64/
parts/
sdist/
var/
*.egg-info/
.installed.cfg
*.egg
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.coverage
.cache
nosetests.xml
coverage.xml
# Translations
*.mo
*.pot
# Django stuff:
*.log
# Sphinx documentation
docs/_build/
# PyBuilder
target/
# Virtual environment
service_app/.env
.venv/
venv/
# PyCharm
.idea
# Python mode for VIM
.ropeproject
**/.ropeproject
# Vim swap files
**/*.swp
# VS Code
.vscode/
test_application/

View File

@ -0,0 +1,33 @@
FROM python:3.12-slim
WORKDIR /
# Set Python path to include app directory
ENV PYTHONPATH=/ PYTHONUNBUFFERED=1 PYTHONDONTWRITEBYTECODE=1
# Install system dependencies and Poetry
RUN apt-get update && apt-get install -y --no-install-recommends gcc && rm -rf /var/lib/apt/lists/* && pip install --no-cache-dir poetry
# Copy Poetry configuration
COPY /pyproject.toml ./pyproject.toml
# Configure Poetry and install dependencies with optimizations
RUN poetry config virtualenvs.create false && poetry install --no-interaction --no-ansi --no-root --only main && pip cache purge && rm -rf ~/.cache/pypoetry
# Install cron for scheduling tasks
RUN apt-get update && apt-get install -y cron
# Copy application code
COPY /ServicesBank/Finder/BuildLivingSpace /
COPY /ServicesApi/Schemas /Schemas
COPY /ServicesApi/Controllers /Controllers
# Create log file to grab cron logs
RUN touch /var/log/cron.log
# Make entrypoint script executable
RUN chmod +x /entrypoint.sh
RUN chmod +x /run_app.sh
# Use entrypoint script to update run_app.sh with environment variables and start cron
ENTRYPOINT ["/entrypoint.sh"]

View File

@ -0,0 +1,3 @@
# Docs of Finder
Finds people, living spaces, companies from AccountRecords

View File

@ -0,0 +1,8 @@
class AccountConfig:
BEFORE_DAY = 30
CATEGORIES = {
"DAIRE": ["daire", "dagire", "daare", "nolu daire", "no", "nolu dairenin"],
"APARTMAN": ["apartman", "aparman", "aprmn"],
"VILLA": ["villa", "vlla"],
"BINA": ["bina", "binna"],
}

View File

@ -0,0 +1,30 @@
#!/bin/bash
# Create environment file that will be available to cron jobs
echo "POSTGRES_USER=\"$POSTGRES_USER\"" >> /env.sh
echo "POSTGRES_PASSWORD=\"$POSTGRES_PASSWORD\"" >> /env.sh
echo "POSTGRES_DB=\"$POSTGRES_DB\"" >> /env.sh
echo "POSTGRES_HOST=\"$POSTGRES_HOST\"" >> /env.sh
echo "POSTGRES_PORT=$POSTGRES_PORT" >> /env.sh
echo "POSTGRES_ENGINE=\"$POSTGRES_ENGINE\"" >> /env.sh
echo "POSTGRES_POOL_PRE_PING=\"$POSTGRES_POOL_PRE_PING\"" >> /env.sh
echo "POSTGRES_POOL_SIZE=$POSTGRES_POOL_SIZE" >> /env.sh
echo "POSTGRES_MAX_OVERFLOW=$POSTGRES_MAX_OVERFLOW" >> /env.sh
echo "POSTGRES_POOL_RECYCLE=$POSTGRES_POOL_RECYCLE" >> /env.sh
echo "POSTGRES_POOL_TIMEOUT=$POSTGRES_POOL_TIMEOUT" >> /env.sh
echo "POSTGRES_ECHO=\"$POSTGRES_ECHO\"" >> /env.sh
# Add Python environment variables
echo "PYTHONPATH=/" >> /env.sh
echo "PYTHONUNBUFFERED=1" >> /env.sh
echo "PYTHONDONTWRITEBYTECODE=1" >> /env.sh
# Make the environment file available to cron
echo "*/15 * * * * /run_app.sh >> /var/log/cron.log 2>&1" > /tmp/crontab_list
crontab /tmp/crontab_list
# Start cron
cron
# Tail the log file
tail -f /var/log/cron.log

View File

@ -0,0 +1,319 @@
import re
import textdistance
from unidecode import unidecode
from gc import garbage
from Schemas import AccountRecords, People, Build, Companies, BuildIbanDescription
from regex_func import category_finder
from validations import Similarity
def parse_comment_to_split_with_star(account_record):
# Handle both ORM objects and dictionaries
try:
# Check if account_record is a dictionary or an ORM object
if isinstance(account_record, dict):
process_comment = str(account_record.get('process_comment', ''))
else:
process_comment = str(account_record.process_comment)
if "*" in process_comment:
process_comment_cleaned = process_comment.replace("**", "*")
process_comments = process_comment_cleaned.split("*")
return len(process_comments), *process_comments
return 1, process_comment
except Exception as e:
# print(f"Error in parse_comment_to_split_with_star: {e}")
# Return a safe default if there's an error
return 1, ""
def remove_garbage_words(comment: str, garbage_word: str):
cleaned_comment = remove_spaces_from_string(comment.replace("*", " "))
if garbage_word:
garbage_word = remove_spaces_from_string(garbage_word.replace("*", " "))
for letter in garbage_word.split(" "):
cleaned_comment = unidecode(remove_spaces_from_string(cleaned_comment))
cleaned_comment = cleaned_comment.replace(remove_spaces_from_string(letter), "")
return str(remove_spaces_from_string(cleaned_comment)).upper()
def remove_spaces_from_string(remove_string: str):
letter_list = []
for letter in remove_string.split(" "):
if letter_ := "".join(i for i in letter if not i == " "):
letter_list.append(letter_)
return " ".join(letter_list).upper()
def get_garbage_words(comment: str, search_word: str):
garbage_words = unidecode(remove_spaces_from_string(comment))
search_word = unidecode(remove_spaces_from_string(search_word))
for word in search_word.split(" "):
garbage_words = garbage_words.replace(remove_spaces_from_string(unidecode(word)), "")
if cleaned_from_spaces := remove_spaces_from_string(garbage_words):
return str(unidecode(cleaned_from_spaces)).upper()
return None
def parse_comment_with_name_iban_description(account_record):
# Extract necessary data from account_record to avoid session detachment
if isinstance(account_record, dict):
iban = account_record.get('iban', '')
process_comment = account_record.get('process_comment', '')
else:
try:
iban = account_record.iban
process_comment = account_record.process_comment
except Exception as e:
# print(f"Error accessing account_record attributes: {e}")
return Similarity(similarity=0.0, garbage="", cleaned="")
# Process the comment locally without depending on the account_record object
if "*" in process_comment:
process_comment_cleaned = str(process_comment.replace("**", "*"))
process_comments = process_comment_cleaned.split("*")
comments_list, comments_length = process_comments, len(process_comments)
else:
comments_list, comments_length = [process_comment], 1
# print("comments_list", comments_list, "comments_length", comments_length)
with BuildIbanDescription.new_session() as session:
BuildIbanDescription.set_session(session)
Companies.set_session(session)
iban_results = BuildIbanDescription.query.filter(BuildIbanDescription.iban == iban).all()
best_similarity = Similarity(similarity=0.0, garbage="", cleaned="")
for comment in comments_list:
for iban_result in iban_results:
search_word = unidecode(iban_result.search_word)
garbage_words = get_garbage_words(comment, search_word)
cleaned_comment = remove_garbage_words(comment, garbage_words)
similarity_ratio = textdistance.jaro_winkler(cleaned_comment, search_word)
company = Companies.query.filter_by(id=iban_result.company_id).first()
if float(similarity_ratio) > float(best_similarity.similarity):
best_similarity = Similarity(similarity=similarity_ratio, garbage=garbage_words, cleaned=cleaned_comment)
best_similarity.set_company(company)
best_similarity.set_found_from("Customer Public Name Description")
return best_similarity
def parse_comment_for_build_parts(comment: str, max_build_part: int = 200, parse: str = "DAIRE"):
results, results_list = category_finder(comment), []
# print("results[parse]", results[parse])
for result in results[parse] or []:
if digits := "".join([letter for letter in str(result) if letter.isdigit()]):
# print("digits", digits)
if int(digits) <= int(max_build_part):
results_list.append(int(digits))
return results_list or None
def parse_comment_with_name(account_record, living_space_dict: dict = None):
# Extract necessary data from account_record to avoid session detachment
if isinstance(account_record, dict):
iban = account_record.get('iban', '')
process_comment = account_record.get('process_comment', '')
try:
currency_value = int(account_record.get('currency_value', 0))
except (ValueError, TypeError):
currency_value = 0
else:
try:
iban = account_record.iban
process_comment = account_record.process_comment
currency_value = int(account_record.currency_value)
except Exception as e:
# print(f"Error accessing account_record attributes: {e}")
return Similarity(similarity=0.0, garbage="", cleaned="")
# Process the comment locally without depending on the account_record object
if "*" in process_comment:
process_comment_cleaned = str(process_comment.replace("**", "*"))
process_comments = process_comment_cleaned.split("*")
comments_list, comments_length = process_comments, len(process_comments)
else:
comments_list, comments_length = [process_comment], 1
# print("comments_list", comments_list, "comments_length", comments_length)
best_similarity = Similarity(similarity=0.0, garbage="", cleaned="")
if currency_value > 0: # Build receive money from living space people
living_space_matches = dict(living_space_dict=living_space_dict, iban=iban, whole_comment=process_comment)
if comments_length == 1:
best_similarity = parse_comment_for_living_space(iban=iban, comment=comments_list[0], living_space_dict=living_space_dict)
best_similarity.set_send_person_id(best_similarity.customer_id)
living_space_matches["best_similarity"] = best_similarity
# if 0.5 < float(best_similarity['similarity']) < 0.8
best_similarity = check_build_living_space_matches_with_build_parts(**living_space_matches)
return best_similarity
for comment in comments_list:
similarity_result = parse_comment_for_living_space(iban=iban, comment=comment, living_space_dict=living_space_dict)
if float(similarity_result.similarity) > float(best_similarity.similarity):
best_similarity = similarity_result
living_space_matches["best_similarity"] = best_similarity
# if 0.5 < float(best_similarity['similarity']) < 0.8:
best_similarity = check_build_living_space_matches_with_build_parts(**living_space_matches)
# print("last best_similarity", best_similarity)
return best_similarity
else: # Build pays money for service taken from company or individual
if not comments_length > 1:
best_similarity = parse_comment_for_company_or_individual(comment=comments_list[0])
best_similarity.set_send_person_id(best_similarity.customer_id)
return best_similarity
for comment in comments_list:
similarity_result = parse_comment_for_company_or_individual(comment=comment)
if float(similarity_result.similarity) > float(best_similarity.similarity):
best_similarity = similarity_result
return best_similarity
def check_build_living_space_matches_with_build_parts(living_space_dict: dict, best_similarity: Similarity, iban: str, whole_comment: str):
if 0.6 < float(best_similarity.similarity) < 0.8:
build_parts_data = living_space_dict[iban]["build_parts"]
# Check if we have living space ID in the similarity object
living_space_id = getattr(best_similarity, 'living_space_id', None)
if living_space_id:
# Find the corresponding living space data
living_space_data = None
for ls in living_space_dict[iban]["living_space"]:
if ls.get('id') == living_space_id:
living_space_data = ls
break
if living_space_data:
build_parts_id = living_space_data.get('build_parts_id')
parser_dict = dict(comment=str(whole_comment), max_build_part=len(build_parts_data))
# print("build parts similarity", best_similarity, "parser_dict", parser_dict)
results_list = parse_comment_for_build_parts(**parser_dict)
# print("results_list", results_list)
if not results_list:
return best_similarity
for build_part_data in build_parts_data:
# Get part_no directly if it exists in the dictionary
part_no = build_part_data.get('part_no')
# If part_no doesn't exist, try to extract it from other attributes
if part_no is None:
# Try to get it from a name attribute if it exists
name = build_part_data.get('name', '')
if name and isinstance(name, str) and 'part' in name.lower():
try:
part_no = int(name.lower().replace('part', '').strip())
except (ValueError, TypeError):
pass
# If we have a part_no, proceed with the comparison
if part_no is not None:
# print("part_no", part_no, " | ", results_list)
# print("build_part", build_part_data.get('id'), build_parts_id)
# print("cond", build_part_data.get('id') == build_parts_id)
# print("cond2", part_no in results_list)
if build_part_data.get('id') == build_parts_id and part_no in results_list:
similarity = float(best_similarity.similarity)
best_similarity.set_similarity((1 - similarity) / 2 + similarity)
# print("similarity", best_similarity.similarity)
break
return best_similarity
def parse_comment_for_company_or_individual(comment: str):
# Extract all necessary data from Companies within the session
companies_data = []
with Companies.new_session() as session:
Companies.set_session(session)
companies_list = Companies.query.filter(Companies.commercial_type != "Commercial").all()
# Extract all needed data from companies while session is active
for company in companies_list:
company_data = {
'id': company.id,
'public_name': unidecode(company.public_name)
}
# Add any other needed attributes
if hasattr(company, 'commercial_type'):
company_data['commercial_type'] = company.commercial_type
companies_data.append(company_data)
# Process the data outside the session
comment = unidecode(comment)
best_similarity = Similarity(similarity=0.0, garbage="", cleaned="")
for company_data in companies_data:
search_word = company_data['public_name']
garbage_words = get_garbage_words(comment, search_word)
cleaned_comment = remove_garbage_words(comment, garbage_words)
similarity_ratio = textdistance.jaro_winkler(cleaned_comment, search_word)
if similarity_ratio > float(best_similarity.similarity):
best_similarity = Similarity(similarity=similarity_ratio, garbage=garbage_words, cleaned=cleaned_comment)
# Store company ID instead of the ORM object
best_similarity.set_company_id(company_data['id'])
best_similarity.set_found_from("Customer Public Name")
# print('cleaned_comment', cleaned_comment, '\n', 'search_word', search_word, '\n', 'best_similarity', best_similarity, '\n',
# 'company name', company_data['public_name'], '\n', 'similarity_ratio', similarity_ratio, '\n', 'garbage_words', garbage_words)
return best_similarity
def parse_comment_for_living_space(iban: str, comment: str, living_space_dict: dict = None) -> Similarity:
comment = unidecode(comment)
best_similarity = Similarity(similarity=0.0, garbage="", cleaned="")
if not iban in living_space_dict:
return best_similarity
for person_data in living_space_dict[iban]["people"]:
# Extract name components from dictionary
first_name = unidecode(person_data.get('name', '')).upper()
last_name = unidecode(person_data.get('surname', '')).upper()
search_word_list = [
remove_spaces_from_string("".join([f"{first_name} {last_name}"])),
remove_spaces_from_string("".join([f"{last_name} {first_name}"])),
]
# We don't have middle_name in our dictionary, so skip that part
cleaned_comment = unidecode(comment).upper()
for search_word in search_word_list:
if garbage_words := get_garbage_words(comment, unidecode(search_word)):
garbage_words = unidecode(garbage_words).upper()
cleaned_comment = unidecode(remove_garbage_words(comment, garbage_words)).upper()
similarity_ratio = textdistance.jaro_winkler(cleaned_comment, str(search_word).upper())
if len(cleaned_comment) < len(f"{first_name}{last_name}"):
continue
if cleaned_comment and 0.9 < similarity_ratio <= 1:
pass
# print("cleaned comment dict", dict(
# garbage=garbage_words, cleaned=cleaned_comment, similarity=similarity_ratio,
# search_word=search_word, comment=comment, last_similarity=float(best_similarity.similarity))
# )
if similarity_ratio > float(best_similarity.similarity):
# Use person_id from the dictionary data
person_id = person_data['id']
for living_space_data in living_space_dict[iban]["living_space"]:
if living_space_data.get('person_id') == person_id:
# Create a dictionary with living space data
living_space_info = {
'id': living_space_data.get('id'),
'build_parts_id': living_space_data.get('build_parts_id'),
'name': living_space_data.get('name')
}
best_similarity.set_living_space_id(living_space_data.get('id'))
best_similarity.set_found_from("Person Name")
best_similarity.set_similarity(similarity_ratio)
best_similarity.set_garbage(garbage_words)
best_similarity.set_cleaned(cleaned_comment)
best_similarity.set_customer_id(person_data['id'])
# Find matching build part
build_parts_id = living_space_data.get('build_parts_id')
for build_part_data in living_space_dict[iban]["build_parts"]:
if build_part_data.get('id') == build_parts_id:
best_similarity.set_build_part_id(build_part_data.get('id'))
break
return best_similarity

View File

@ -0,0 +1,23 @@
import re
from difflib import get_close_matches
from configs import AccountConfig
def word_straighten(word, ref_list, threshold=0.8):
matches = get_close_matches(word, ref_list, n=1, cutoff=threshold)
return matches[0] if matches else word
def category_finder(text, output_template="{kategori} {numara}"):
categories = AccountConfig.CATEGORIES
result = {category: [] for category in categories}
for category, patterns in categories.items():
words = re.split(r"\W+", text)
straighten_words = [word_straighten(word, patterns) for word in words]
straighten_text = " ".join(straighten_words)
pattern = r"(?:\b|\s|^)(?:" + "|".join(map(re.escape, patterns)) + r")(?:\s*|:|\-|\#)*(\d+)(?:\b|$)"
if founds_list := re.findall(pattern, straighten_text, re.IGNORECASE):
list_of_output = [output_template.format(kategori=category, numara=num) for num in founds_list]
result[category].extend([i for i in list_of_output if str(i).replace(" ", "")])
return result

View File

@ -0,0 +1,26 @@
#!/bin/bash
# Source the environment file directly
. /env.sh
# Re-export all variables to ensure they're available to the Python script
export POSTGRES_USER
export POSTGRES_PASSWORD
export POSTGRES_DB
export POSTGRES_HOST
export POSTGRES_PORT
export POSTGRES_ENGINE
export POSTGRES_POOL_PRE_PING
export POSTGRES_POOL_SIZE
export POSTGRES_MAX_OVERFLOW
export POSTGRES_POOL_RECYCLE
export POSTGRES_POOL_TIMEOUT
export POSTGRES_ECHO
# Python environment variables
export PYTHONPATH
export PYTHONUNBUFFERED
export PYTHONDONTWRITEBYTECODE
# Run the Python script
/usr/local/bin/python /runner.py

View File

@ -0,0 +1,187 @@
from Schemas import AccountRecords, BuildIbans, BuildDecisionBook, Build, BuildLivingSpace, People, OccupantTypes, BuildParts, BuildDecisionBookPayments, ApiEnumDropdown
from Controllers.Postgres.engine import get_session_factory
from parser import parse_comment_with_name, parse_comment_with_name_iban_description
from validations import Similarity
import re
import time
from datetime import timedelta
def account_save_search_result(account_record_main_session: AccountRecords, similarity_result: Similarity):
with AccountRecords.new_session() as session:
AccountRecords.set_session(session)
BuildParts.set_session(session)
Build.set_session(session)
BuildLivingSpace.set_session(session)
People.set_session(session)
account_record = AccountRecords.query.filter_by(id=account_record_main_session.id).first()
if not account_record:
# print(f"Could not find account record with ID {account_record_main_session.id}")
return
company_id = getattr(similarity_result, 'company_id', None)
living_space_id = getattr(similarity_result, 'living_space_id', None)
build_part_id = getattr(similarity_result, 'build_part_id', None)
customer_id = getattr(similarity_result, 'customer_id', None)
part, build, found_customer = None, None, None
if living_space_id:
found_customer = BuildLivingSpace.query.get(living_space_id)
if build_part_id:
part = BuildParts.query.get(build_part_id)
elif found_customer and hasattr(found_customer, 'build_parts_id'):
part = BuildParts.query.filter_by(id=found_customer.build_parts_id, human_livable=True).first()
if part:
build = Build.query.filter_by(id=part.build_id).first()
account_record.similarity = similarity_result.similarity
account_record.found_from = similarity_result.found_from
account_record.company_id = company_id
if company_id:
company = People.query.get(company_id)
account_record.company_uu_id = getattr(company, "uu_id", None) if company else None
account_record.build_parts_id = getattr(part, "id", None)
account_record.build_parts_uu_id = getattr(part, "uu_id", None) if part else None
if not account_record.build_id and build:
account_record.build_id = getattr(build, "id", None)
account_record.build_uu_id = getattr(build, "uu_id", None)
account_record.living_space_id = living_space_id
if found_customer:
account_record.living_space_uu_id = getattr(found_customer, "uu_id", None)
if customer_id:
account_record.send_person_id = customer_id
customer = People.query.get(customer_id)
if customer:
account_record.send_person_uu_id = getattr(customer, "uu_id", None)
account_record.save()
if __name__ == "__main__":
# Start timer
start_time = time.time()
print("Build Living Space Service is running...")
new_session = get_session_factory()
flat_id_list = []
build_living_space_dict = {}
found_list = []
account_records_ibans = []
with OccupantTypes.new_session() as occupant_types_session:
OccupantTypes.set_session(occupant_types_session)
flat_resident = OccupantTypes.query.filter_by(occupant_category_type="FL", occupant_code="FL-RES").first()
flat_owner = OccupantTypes.query.filter_by(occupant_category_type="FL", occupant_code="FL-OWN").first()
flat_tenant = OccupantTypes.query.filter_by(occupant_category_type="FL", occupant_code="FL-TEN").first()
flat_represent = OccupantTypes.query.filter_by(occupant_category_type="FL", occupant_code="FL-REP").first()
flat_id_list = [flat_resident.id, flat_owner.id, flat_tenant.id, flat_represent.id]
AccountRecords.set_session(new_session)
BuildLivingSpace.set_session(new_session)
BuildParts.set_session(new_session)
People.set_session(new_session)
account_records_ibans = AccountRecords.query.filter(AccountRecords.build_decision_book_id != None).distinct(AccountRecords.iban).all()
for account_records_iban in account_records_ibans:
if account_records_iban.iban not in build_living_space_dict:
build_parts = BuildParts.query.filter_by(build_id=account_records_iban.build_id, human_livable=True).all()
build_parts_data = []
for bp in build_parts:
bp_dict = {'id': bp.id, 'build_id': bp.build_id, 'human_livable': bp.human_livable}
if hasattr(bp, 'part_no'):
bp_dict['part_no'] = bp.part_no
build_parts_data.append(bp_dict)
living_spaces = BuildLivingSpace.query.filter(
BuildLivingSpace.build_parts_id.in_([bp.id for bp in build_parts]), BuildLivingSpace.occupant_type_id.in_(flat_id_list),
).all()
living_spaces_data = []
for ls in living_spaces:
ls_dict = {'id': ls.id, 'build_parts_id': ls.build_parts_id, 'occupant_type_id': ls.occupant_type_id, 'person_id': ls.person_id}
if hasattr(ls, 'name'):
ls_dict['name'] = ls.name
living_spaces_data.append(ls_dict)
living_spaces_people = [ls.person_id for ls in living_spaces if ls.person_id]
people_list = People.query.filter(People.id.in_(living_spaces_people)).all()
people_data = []
for p in people_list:
p_dict = {'id': p.id, 'name': p.firstname, 'surname': p.surname, 'middle_name': p.middle_name}
p_dict['full_name'] = f"{p.firstname} {p.surname}".strip()
people_data.append(p_dict)
build_living_space_dict[str(account_records_iban.iban)] = {"people": people_data, "living_space": living_spaces_data, "build_parts": build_parts_data}
with AccountRecords.new_session() as query_session:
AccountRecords.set_session(query_session)
account_record_ids = [record.id for record in AccountRecords.query.filter(AccountRecords.build_decision_book_id != None).order_by(AccountRecords.bank_date.desc()).all()]
for account_id in account_record_ids:
with AccountRecords.new_session() as record_session:
AccountRecords.set_session(record_session)
account_record = AccountRecords.query.filter_by(id=account_id).first()
if not account_record:
continue
account_iban = account_record.iban
account_process_comment = account_record.process_comment
account_currency_value = account_record.currency_value
account_similarity_value = float(account_record.similarity or 0.0)
account_build_id = account_record.build_id
account_data = {"id": account_id, "iban": account_iban, "process_comment": account_process_comment, "currency_value": account_currency_value,
"similarity": account_similarity_value, "build_id": account_build_id}
try:
similarity_result = parse_comment_with_name(account_record=account_data, living_space_dict=build_living_space_dict)
fs = float(similarity_result.similarity)
if fs >= 0.8 and fs >= account_similarity_value:
found_list.append(similarity_result)
with AccountRecords.new_session() as save_session:
AccountRecords.set_session(save_session)
fresh_account = AccountRecords.query.filter_by(id=account_id).first()
if fresh_account:
account_save_search_result(account_record_main_session=fresh_account, similarity_result=similarity_result)
print("POSITIVE SIMILARITY RESULT:", {
'similarity': similarity_result.similarity, 'found_from': similarity_result.found_from, 'garbage': similarity_result.garbage,
'cleaned': similarity_result.cleaned, 'company_id': getattr(similarity_result, 'company_id', None),
'living_space_id': getattr(similarity_result, 'living_space_id', None), 'build_part_id': getattr(similarity_result, 'build_part_id', None),
'customer_id': getattr(similarity_result, 'customer_id', None)
})
else:
similarity_result = parse_comment_with_name_iban_description(account_record=account_data)
fs = float(similarity_result.similarity)
if fs >= 0.8 and fs > account_similarity_value:
found_list.append(similarity_result)
with AccountRecords.new_session() as save_session:
AccountRecords.set_session(save_session)
fresh_account = AccountRecords.query.filter_by(id=account_id).first()
if fresh_account:
account_save_search_result(account_record_main_session=fresh_account, similarity_result=similarity_result)
print("NEGATIVE SIMILARITY RESULT:", {
'similarity': similarity_result.similarity, 'found_from': similarity_result.found_from,
'garbage': similarity_result.garbage, 'cleaned': similarity_result.cleaned,
'company_id': getattr(similarity_result, 'company_id', None), 'living_space_id': getattr(similarity_result, 'living_space_id', None),
'build_part_id': getattr(similarity_result, 'build_part_id', None), 'customer_id': getattr(similarity_result, 'customer_id', None)
})
except Exception as e:
# print(f"Error processing account {account_id}: {e}")
continue
# Calculate elapsed time
end_time = time.time()
elapsed_time = end_time - start_time
elapsed_formatted = str(timedelta(seconds=int(elapsed_time)))
print("Account Records Search : ", len(found_list), "/", len(account_record_ids))
print(f"Total runtime: {elapsed_formatted} (HH:MM:SS)")
print(f"Total seconds: {elapsed_time:.2f}")
new_session.close()
print("Build Living Space Service is finished...")

View File

@ -0,0 +1,49 @@
from Schemas import BuildLivingSpace, People
class Similarity:
def __init__(self, similarity: float, garbage: str, cleaned: str):
self.similarity = similarity
self.garbage = garbage
self.cleaned = cleaned
self.living_space = None
self.living_space_id = None
self.build_part_id = None
self.company = None
self.company_id = None
self.found_from = None
self.send_person_id = None
self.customer_id = None
def set_customer_id(self, customer_id: int):
self.customer_id = customer_id
def set_living_space(self, living_space: BuildLivingSpace):
self.living_space = living_space
def set_company(self, company: People):
self.company = company
def set_found_from(self, found_from: str):
self.found_from = found_from
def set_send_person_id(self, send_person_id: int):
self.send_person_id = send_person_id
def set_similarity(self, similarity: float):
self.similarity = similarity
def set_garbage(self, garbage: str):
self.garbage = garbage
def set_cleaned(self, cleaned: str):
self.cleaned = cleaned
def set_living_space_id(self, living_space_id: int):
self.living_space_id = living_space_id
def set_build_part_id(self, build_part_id: int):
self.build_part_id = build_part_id
def set_company_id(self, company_id: int):
self.company_id = company_id

View File

@ -0,0 +1,93 @@
# Git
.git
.gitignore
.gitattributes
# CI
.codeclimate.yml
.travis.yml
.taskcluster.yml
# Docker
docker-compose.yml
service_app/Dockerfile
.docker
.dockerignore
# Byte-compiled / optimized / DLL files
**/__pycache__/
**/*.py[cod]
# C extensions
*.so
# Distribution / packaging
.Python
service_app/env/
build/
develop-eggs/
dist/
downloads/
eggs/
lib/
lib64/
parts/
sdist/
var/
*.egg-info/
.installed.cfg
*.egg
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.coverage
.cache
nosetests.xml
coverage.xml
# Translations
*.mo
*.pot
# Django stuff:
*.log
# Sphinx documentation
docs/_build/
# PyBuilder
target/
# Virtual environment
service_app/.env
.venv/
venv/
# PyCharm
.idea
# Python mode for VIM
.ropeproject
**/.ropeproject
# Vim swap files
**/*.swp
# VS Code
.vscode/
test_application/

View File

@ -0,0 +1,33 @@
FROM python:3.12-slim
WORKDIR /
# Set Python path to include app directory
ENV PYTHONPATH=/ PYTHONUNBUFFERED=1 PYTHONDONTWRITEBYTECODE=1
# Install system dependencies and Poetry
RUN apt-get update && apt-get install -y --no-install-recommends gcc && rm -rf /var/lib/apt/lists/* && pip install --no-cache-dir poetry
# Copy Poetry configuration
COPY /pyproject.toml ./pyproject.toml
# Configure Poetry and install dependencies with optimizations
RUN poetry config virtualenvs.create false && poetry install --no-interaction --no-ansi --no-root --only main && pip cache purge && rm -rf ~/.cache/pypoetry
# Install cron for scheduling tasks
RUN apt-get update && apt-get install -y cron
# Copy application code
COPY /ServicesBank/Finder/DecisionBook /
COPY /ServicesApi/Schemas /Schemas
COPY /ServicesApi/Controllers /Controllers
# Create log file to grab cron logs
RUN touch /var/log/cron.log
# Make entrypoint script executable
RUN chmod +x /entrypoint.sh
RUN chmod +x /run_app.sh
# Use entrypoint script to update run_app.sh with environment variables and start cron
ENTRYPOINT ["/entrypoint.sh"]

View File

@ -0,0 +1,3 @@
# Docs of Finder
Finds people, living spaces, companies from AccountRecords

View File

@ -0,0 +1,30 @@
#!/bin/bash
# Create environment file that will be available to cron jobs
echo "POSTGRES_USER=\"$POSTGRES_USER\"" >> /env.sh
echo "POSTGRES_PASSWORD=\"$POSTGRES_PASSWORD\"" >> /env.sh
echo "POSTGRES_DB=\"$POSTGRES_DB\"" >> /env.sh
echo "POSTGRES_HOST=\"$POSTGRES_HOST\"" >> /env.sh
echo "POSTGRES_PORT=$POSTGRES_PORT" >> /env.sh
echo "POSTGRES_ENGINE=\"$POSTGRES_ENGINE\"" >> /env.sh
echo "POSTGRES_POOL_PRE_PING=\"$POSTGRES_POOL_PRE_PING\"" >> /env.sh
echo "POSTGRES_POOL_SIZE=$POSTGRES_POOL_SIZE" >> /env.sh
echo "POSTGRES_MAX_OVERFLOW=$POSTGRES_MAX_OVERFLOW" >> /env.sh
echo "POSTGRES_POOL_RECYCLE=$POSTGRES_POOL_RECYCLE" >> /env.sh
echo "POSTGRES_POOL_TIMEOUT=$POSTGRES_POOL_TIMEOUT" >> /env.sh
echo "POSTGRES_ECHO=\"$POSTGRES_ECHO\"" >> /env.sh
# Add Python environment variables
echo "PYTHONPATH=/" >> /env.sh
echo "PYTHONUNBUFFERED=1" >> /env.sh
echo "PYTHONDONTWRITEBYTECODE=1" >> /env.sh
# Make the environment file available to cron
echo "*/15 * * * * /run_app.sh >> /var/log/cron.log 2>&1" > /tmp/crontab_list
crontab /tmp/crontab_list
# Start cron
cron
# Tail the log file
tail -f /var/log/cron.log

View File

@ -0,0 +1,26 @@
#!/bin/bash
# Source the environment file directly
. /env.sh
# Re-export all variables to ensure they're available to the Python script
export POSTGRES_USER
export POSTGRES_PASSWORD
export POSTGRES_DB
export POSTGRES_HOST
export POSTGRES_PORT
export POSTGRES_ENGINE
export POSTGRES_POOL_PRE_PING
export POSTGRES_POOL_SIZE
export POSTGRES_MAX_OVERFLOW
export POSTGRES_POOL_RECYCLE
export POSTGRES_POOL_TIMEOUT
export POSTGRES_ECHO
# Python environment variables
export PYTHONPATH
export PYTHONUNBUFFERED
export PYTHONDONTWRITEBYTECODE
# env >> /var/log/cron.log
/usr/local/bin/python /runner.py

View File

@ -0,0 +1,29 @@
from sqlalchemy import cast, Date
from Schemas import AccountRecords, BuildIbans, BuildDecisionBook
def account_records_find_decision_book(session):
AccountRecords.set_session(session)
BuildIbans.set_session(session)
BuildDecisionBook.set_session(session)
created_ibans, iban_build_dict = [], {}
filter_account_records = AccountRecords.build_id != None, AccountRecords.build_decision_book_id == None
account_records_list: list[AccountRecords] = AccountRecords.query.filter(*filter_account_records).order_by(AccountRecords.bank_date.desc()).all()
for account_record in account_records_list:
if found_iban := BuildIbans.query.filter(BuildIbans.iban == account_record.iban).first():
if found_decision_book := BuildDecisionBook.query.filter(
BuildDecisionBook.build_id == found_iban.build_id,
cast(BuildDecisionBook.expiry_starts, Date) <= cast(account_record.bank_date, Date),
cast(BuildDecisionBook.expiry_ends, Date) >= cast(account_record.bank_date, Date),
).first():
account_record.build_decision_book_id = found_decision_book.id
account_record.build_decision_book_uu_id = str(found_decision_book.uu_id)
account_record.save()
if __name__ == "__main__":
print("DecisionBook Service is running...")
with AccountRecords.new_session() as session:
account_records_find_decision_book(session)
print("DecisionBook Service is finished...")

View File

@ -0,0 +1,32 @@
FROM python:3.12-slim
WORKDIR /
# Set Python path to include app directory
ENV PYTHONPATH=/ PYTHONUNBUFFERED=1 PYTHONDONTWRITEBYTECODE=1
# Install system dependencies and Poetry
RUN apt-get update && apt-get install -y --no-install-recommends gcc && rm -rf /var/lib/apt/lists/* && pip install --no-cache-dir poetry
# Copy Poetry configuration
COPY /pyproject.toml ./pyproject.toml
# Configure Poetry and install dependencies with optimizations
RUN poetry config virtualenvs.create false && poetry install --no-interaction --no-ansi --no-root --only main && pip cache purge && rm -rf ~/.cache/pypoetry
# Install cron for scheduling tasks
RUN apt-get update && apt-get install -y cron
# Copy application code
COPY /ServicesBank/Finder /
COPY /ServicesApi/Schemas /Schemas
COPY /ServicesApi/Controllers /Controllers
# Create log file to grab cron logs
RUN touch /var/log/cron.log
# Make entrypoint script executable
RUN chmod +x /entrypoint.sh
# Use entrypoint script to update run_app.sh with environment variables and start cron
ENTRYPOINT ["/entrypoint.sh"]

View File

@ -0,0 +1,93 @@
# Git
.git
.gitignore
.gitattributes
# CI
.codeclimate.yml
.travis.yml
.taskcluster.yml
# Docker
docker-compose.yml
service_app/Dockerfile
.docker
.dockerignore
# Byte-compiled / optimized / DLL files
**/__pycache__/
**/*.py[cod]
# C extensions
*.so
# Distribution / packaging
.Python
service_app/env/
build/
develop-eggs/
dist/
downloads/
eggs/
lib/
lib64/
parts/
sdist/
var/
*.egg-info/
.installed.cfg
*.egg
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.coverage
.cache
nosetests.xml
coverage.xml
# Translations
*.mo
*.pot
# Django stuff:
*.log
# Sphinx documentation
docs/_build/
# PyBuilder
target/
# Virtual environment
service_app/.env
.venv/
venv/
# PyCharm
.idea
# Python mode for VIM
.ropeproject
**/.ropeproject
# Vim swap files
**/*.swp
# VS Code
.vscode/
test_application/

View File

@ -0,0 +1,33 @@
FROM python:3.12-slim
WORKDIR /
# Set Python path to include app directory
ENV PYTHONPATH=/ PYTHONUNBUFFERED=1 PYTHONDONTWRITEBYTECODE=1
# Install system dependencies and Poetry
RUN apt-get update && apt-get install -y --no-install-recommends gcc && rm -rf /var/lib/apt/lists/* && pip install --no-cache-dir poetry
# Copy Poetry configuration
COPY /pyproject.toml ./pyproject.toml
# Configure Poetry and install dependencies with optimizations
RUN poetry config virtualenvs.create false && poetry install --no-interaction --no-ansi --no-root --only main && pip cache purge && rm -rf ~/.cache/pypoetry
# Install cron for scheduling tasks
RUN apt-get update && apt-get install -y cron
# Copy application code
COPY /ServicesBank/Finder/Payment /
COPY /ServicesApi/Schemas /Schemas
COPY /ServicesApi/Controllers /Controllers
# Create log file to grab cron logs
RUN touch /var/log/cron.log
# Make entrypoint script executable
RUN chmod +x /entrypoint.sh
RUN chmod +x /run_app.sh
# Use entrypoint script to update run_app.sh with environment variables and start cron
ENTRYPOINT ["/entrypoint.sh"]

View File

@ -0,0 +1,44 @@
# Docs of Finder
Finds people, living spaces, companies from AccountRecords
start_time = perf_counter()
end_time = perf_counter()
elapsed = end_time - start_time
print(f'{elapsed:.3f} : seconds')
print('shallow_copy_list', len(shallow_copy_list))
"""
"""
1. Stage (Incoming Money)
# BuildDecisionBookPayments are reverse records of AccountRecords
AccountRecords.approved_record == True
AccountRecords.living_space_id is not None
# AccountRecords.receive_debit = Credit Receiver (Incoming money from client) / Debit Sender (Debt to be paid by system)
1.1
AccountRecords.currency_value > 0 Received Money Transaction +
AccountRecords.currency_value > AccountRecords.remainder_balance () You have extra money in system account
Money consumed => AccountRecords.currency_value != abs(AccountRecords.remainder_balance) singluar iban
Some payment done but money not yet all money is consumed => AccountRecords.currency_value + AccountRecords.remainder_balance != 0
AccountRecords.currency_value = AccountRecords.remainder_balance (There is no money that individual has in system)
AccountRecords.bank_date (Date money arrived)
AccountRecords.process_type (Type of bank transaction)
1.2
AccountRecords.currency_value < 0 Sent Money Transaction -
2. Stage (Payment Match Process)
Parse : BuildDecisionBookPayments.process_date (Year / Month / Day / Time)
BuildDecisionBookPayments.account_records_id == None ( Payment is not assigned to any account record)
BuildDecisionBookPayments.payment_types_id == debit_enum.id (Payment type is debit)
2.1 Check current month has any payment to due Payment Month == Money Arrived Month
2.2 Check previous months has any payment to due Payment Month < Money Arrived Month
3. Stage (Payment Assignment Process)
Do payment set left money to account record as AccountRecords.remainder_balance

View File

@ -0,0 +1,774 @@
import arrow
import time
from decimal import Decimal
from datetime import datetime, timedelta
from Schemas import BuildDecisionBookPayments, AccountRecords, ApiEnumDropdown
from time import perf_counter
from sqlalchemy import select, func, distinct, cast, Date, String, literal, desc, and_, or_
from Controllers.Postgres.engine import get_session_factory
#from ServicesApi.Schemas.account.account import AccountRecords
#from ServicesApi.Schemas.building.decision_book import BuildDecisionBookPayments
class BuildDuesTypes:
def __init__(self):
self.debit: ApiEnumDropdownShallowCopy = None
self.add_debit: ApiEnumDropdownShallowCopy = None
self.renovation: ApiEnumDropdownShallowCopy = None
self.lawyer_expence: ApiEnumDropdownShallowCopy = None
self.service_fee: ApiEnumDropdownShallowCopy = None
self.information: ApiEnumDropdownShallowCopy = None
class ApiEnumDropdownShallowCopy:
id: int
uuid: str
enum_class: str
key: str
value: str
def __init__(self, id: int, uuid: str, enum_class: str, key: str, value: str):
self.id = id
self.uuid = uuid
self.enum_class = enum_class
self.key = key
self.value = value
def get_enums_from_database():
build_dues_types = BuildDuesTypes()
with ApiEnumDropdown.new_session() as session:
ApiEnumDropdown.set_session(session)
debit_enum_shallow = ApiEnumDropdown.query.filter_by(enum_class="BuildDuesTypes", key="BDT-D").first() # Debit
add_debit_enum_shallow = ApiEnumDropdown.query.filter_by(enum_class="BuildDuesTypes", key="BDT-A").first() # Add Debit
renovation_enum_shallow = ApiEnumDropdown.query.filter_by(enum_class="BuildDuesTypes", key="BDT-R").first() # Renovation
late_payment_enum_shallow = ApiEnumDropdown.query.filter_by(enum_class="BuildDuesTypes", key="BDT-L").first() # Lawyer expence
service_fee_enum_shallow = ApiEnumDropdown.query.filter_by(enum_class="BuildDuesTypes", key="BDT-S").first() # Service fee
information_enum_shallow = ApiEnumDropdown.query.filter_by(enum_class="BuildDuesTypes", key="BDT-I").first() # Information
build_dues_types.debit = ApiEnumDropdownShallowCopy(
debit_enum_shallow.id, str(debit_enum_shallow.uu_id), debit_enum_shallow.enum_class, debit_enum_shallow.key, debit_enum_shallow.value
)
build_dues_types.add_debit = ApiEnumDropdownShallowCopy(
add_debit_enum_shallow.id, str(add_debit_enum_shallow.uu_id), add_debit_enum_shallow.enum_class, add_debit_enum_shallow.key, add_debit_enum_shallow.value
)
build_dues_types.renovation = ApiEnumDropdownShallowCopy(
renovation_enum_shallow.id, str(renovation_enum_shallow.uu_id), renovation_enum_shallow.enum_class, renovation_enum_shallow.key, renovation_enum_shallow.value
)
build_dues_types.lawyer_expence = ApiEnumDropdownShallowCopy(
late_payment_enum_shallow.id, str(late_payment_enum_shallow.uu_id), late_payment_enum_shallow.enum_class, late_payment_enum_shallow.key, late_payment_enum_shallow.value
)
build_dues_types.service_fee = ApiEnumDropdownShallowCopy(
service_fee_enum_shallow.id, str(service_fee_enum_shallow.uu_id), service_fee_enum_shallow.enum_class, service_fee_enum_shallow.key, service_fee_enum_shallow.value
)
build_dues_types.information = ApiEnumDropdownShallowCopy(
information_enum_shallow.id, str(information_enum_shallow.uu_id), information_enum_shallow.enum_class, information_enum_shallow.key, information_enum_shallow.value
)
return [build_dues_types.debit, build_dues_types.lawyer_expence, build_dues_types.add_debit, build_dues_types.renovation, build_dues_types.service_fee, build_dues_types.information]
def generate_total_paid_amount_for_spesific_build_part_id(build_parts_id: int, session):
"""
Calculate the total amount paid for a specific build part ID.
Args:
build_parts_id: The build part ID to calculate payments for
session: Database session
Returns:
float: The total amount paid (absolute value)
"""
payment_query = session.query(func.sum(BuildDecisionBookPayments.payment_amount)).filter(
BuildDecisionBookPayments.build_parts_id == build_parts_id,
BuildDecisionBookPayments.account_is_debit == False,
cast(BuildDecisionBookPayments.process_date, Date) >= '2022-01-01'
).scalar()
return payment_query if payment_query is not None else 0
def generate_total_debt_amount_for_spesific_build_part_id(build_parts_id: int, session):
# Use SQLAlchemy's func.sum to calculate the total debts
# For total debt, we want to include ALL debts, both processed and unprocessed
result = session.query(
func.sum(BuildDecisionBookPayments.payment_amount)
).filter(
BuildDecisionBookPayments.build_parts_id == build_parts_id,
BuildDecisionBookPayments.account_is_debit == True,
cast(BuildDecisionBookPayments.process_date, Date) >= '2022-01-01'
).scalar()
# Return 0 if no debts found, otherwise return the absolute value of the sum
return abs(result) if result is not None else 0
def generate_total_amount_that_user_has_in_account(account_record: AccountRecords, session):
# Get total amount that user has in account
result = session.query(
func.sum(AccountRecords.currency_value)
).filter(
AccountRecords.build_parts_id == account_record.build_parts_id,
AccountRecords.currency_value > 0,
cast(AccountRecords.bank_date, Date) >= '2022-01-01'
).scalar()
# Return 0 if no payments found, otherwise return the absolute value of the sum
return abs(result)
def get_unpaid_debts(build_parts_id: int, session, debit_type, date_query: tuple):
"""Find BuildDecisionBookPayments entries where the debt has NOT been fully paid for a specific debit type.
This function identifies payments where the sum of payments is less than the debit amount,
meaning the debt has not been fully closed.
Args:
build_parts_id: The build part ID to check
session: Database session
debit_type: The specific debit type to check
date_query: Tuple of date filters to apply
Returns:
list: List of unpaid debt entries (full BuildDecisionBookPayments objects)
query:
SELECT
bpf.ref_id,
bpf.process_date,
ABS(COALESCE(SUM(bpf.payment_amount), 0)) AS total_payments
FROM public.build_decision_book_payments AS bpf
GROUP BY
bpf.ref_id,
bpf.process_date
HAVING ABS(COALESCE(SUM(bpf.payment_amount), 0)) > 0
order by bpf.process_date
"""
# Create a subquery for the payment sums without executing it separately
payment_sums_subquery = select(
BuildDecisionBookPayments.ref_id,
BuildDecisionBookPayments.process_date,
func.abs(func.coalesce(func.sum(BuildDecisionBookPayments.payment_amount), 0)).label("total_payments")
).filter(
BuildDecisionBookPayments.build_parts_id == build_parts_id,
BuildDecisionBookPayments.payment_types_id == debit_type.id,
*date_query
).group_by(
BuildDecisionBookPayments.ref_id, BuildDecisionBookPayments.process_date
).having(
func.abs(func.coalesce(func.sum(BuildDecisionBookPayments.payment_amount), 0)) > 0
).order_by(BuildDecisionBookPayments.process_date.desc())
# Use the subquery directly in the main query
# query_results = session.query(BuildDecisionBookPayments).filter(
# BuildDecisionBookPayments.ref_id.in_(payment_sums_subquery),
# BuildDecisionBookPayments.build_parts_id == build_parts_id,
# BuildDecisionBookPayments.payment_types_id == debit_type.id,
# ).order_by(BuildDecisionBookPayments.process_date.desc())
payment_sums = session.execute(payment_sums_subquery).all()
payment_sums_list = []
for item in payment_sums:
payment_sums_list.append({"ref_id": item[0], "process_date": item[1], "total_payments": item[2]})
return payment_sums
def _print_debt_details(debt, session):
"""Helper function to print detailed information about an unpaid debt.
Args:
debt: The BuildDecisionBookPayments object representing the debt
session: Database session
"""
# Get the sum of payments for this debt
payments_sum = session.query(
func.sum(BuildDecisionBookPayments.payment_amount)
).filter(
BuildDecisionBookPayments.ref_id == debt.ref_id,
BuildDecisionBookPayments.account_is_debit == False
).scalar() or 0
# Calculate remaining amount
debit_amount = abs(debt.payment_amount)
remaining = debit_amount - abs(payments_sum)
payment_percentage = (abs(payments_sum) / debit_amount) * 100 if debit_amount > 0 else 0
# Format the date for display
date_str = debt.process_date.strftime('%Y-%m-%d') if debt.process_date else 'Unknown date'
def analyze_payment_function():
session_factory = get_session_factory()
session = session_factory()
# Set session for all models
AccountRecords.set_session(session)
BuildDecisionBookPayments.set_session(session)
order_pay = get_enums_from_database()
# Get distinct build_parts_id values from account records with positive currency_value
# This avoids redundant processing of the same build_parts_id
distinct_build_parts = session.query(
distinct(AccountRecords.build_parts_id)
).filter(
AccountRecords.build_parts_id.isnot(None),
AccountRecords.currency_value > 0,
AccountRecords.bank_date >= '2022-01-01'
).order_by(AccountRecords.build_parts_id.desc()).all()
start_time = time.time()
for build_part_id_tuple in distinct_build_parts:
build_part_id = build_part_id_tuple[0] # Extract the ID from the tuple
process_date = datetime.now()
last_date_of_process_date = datetime(process_date.year, process_date.month, 1) - timedelta(days=1)
first_date_of_process_date = datetime(process_date.year, process_date.month, 1)
print(f"\n{'=' * 50}")
print(f"ACCOUNT ANALYSIS FOR BUILD PART ID: {build_part_id}")
print(f"{'=' * 50}")
# Calculate total paid amount for this build_part_id
total_amount_paid = generate_total_paid_amount_for_spesific_build_part_id(build_part_id, session)
# Calculate total debt amount for this build_part_id
total_debt_amount = generate_total_debt_amount_for_spesific_build_part_id(build_part_id, session)
# Get total amount in account for this build_part_id
account_record = AccountRecords()
account_record.build_parts_id = build_part_id
total_amount_in_account = generate_total_amount_that_user_has_in_account(account_record, session)
# Calculate remaining amount to be paid
amount_need_to_paid = total_debt_amount - total_amount_paid
total_amount_that_user_need_to_transfer = abs(amount_need_to_paid) - abs(total_amount_in_account)
# Print summary with clear descriptions
print(f"PAYMENT SUMMARY:")
print(f" • Total debt amount: {total_debt_amount:,.2f} TL")
print(f" • Amount already paid: {total_amount_paid:,.2f} TL")
print(f" • Remaining debt to be collected: {amount_need_to_paid:,.2f} TL")
print(f" • Current account balance: {total_amount_in_account:,.2f} TL")
if total_amount_that_user_need_to_transfer > 0:
print(f" • Additional funds needed: {total_amount_that_user_need_to_transfer:,.2f} TL")
elif amount_need_to_paid <= 0:
print(f" • Account is fully paid with no outstanding debt")
else:
print(f" • Sufficient funds available to close all debt")
# Show debt coverage percentage
if total_debt_amount > 0:
# Calculate current coverage (already paid)
current_coverage_percentage = (total_amount_paid / total_debt_amount) * 100
# Calculate potential coverage (including available funds)
potential_coverage = min(100, ((total_amount_paid + total_amount_in_account) / total_debt_amount) * 100)
# Display both percentages
print(f" • Current debt coverage: {current_coverage_percentage:.2f}%")
print(f" • Potential debt coverage with available funds: {potential_coverage:.2f}%")
# Analyze unpaid debts for each payment type
print("\nUNPAID DEBTS ANALYSIS BY PAYMENT TYPE:")
for payment_type in order_pay:
# Get unpaid debts for current month
date_query_current = (
BuildDecisionBookPayments.process_date >= first_date_of_process_date,
BuildDecisionBookPayments.process_date <= process_date
)
date_query_previous = (
BuildDecisionBookPayments.process_date < first_date_of_process_date,
)
current_unpaid_debts = get_unpaid_debts(build_parts_id=build_part_id, session=session, debit_type=payment_type, date_query=date_query_current)
# Get unpaid debts from previous months
previous_unpaid_debts = get_unpaid_debts(build_parts_id=build_part_id, session=session, debit_type=payment_type, date_query=date_query_previous)
# Calculate totals
current_total = sum(abs(debt[2]) for debt in current_unpaid_debts)
previous_total = sum(abs(debt[2]) for debt in previous_unpaid_debts)
grand_total = current_total + previous_total
# Print summary for this payment type
if current_unpaid_debts or previous_unpaid_debts:
print(f"{payment_type.key}: Total unpaid: {grand_total:,.2f} TL")
# Current month details
if current_unpaid_debts:
print(f" - Current month: {len(current_unpaid_debts)} debts, {current_total:,.2f} TL")
# Show details of each unpaid debt if there aren't too many
# if len(current_unpaid_debts) <= 3:
# for debt in current_unpaid_debts:
# _print_debt_details(debt, session)
# Previous months details
if previous_unpaid_debts:
print(f" - Previous months: {len(previous_unpaid_debts)} debts, {previous_total:,.2f} TL")
# Show details of each unpaid debt if there aren't too many
# if len(previous_unpaid_debts) <= 3:
# for debt in previous_unpaid_debts:
# _print_debt_details(debt, session)
else:
print(f"{payment_type.key}: All debts paid")
print(f"{'=' * 50}\n")
def close_payment_book(payment_row_book, account_record, value, session):
"""Create a credit entry in BuildDecisionBookPayments to close a debt.
Args:
payment_row_book: The debit entry to be paid
account_record: The account record containing the funds
value: The amount to pay
session: Database session
Returns:
The newly created payment record
"""
BuildDecisionBookPayments.set_session(session)
# Create a new credit entry (payment)
new_row = BuildDecisionBookPayments.create(
ref_id=str(payment_row_book.uu_id),
payment_plan_time_periods=payment_row_book.payment_plan_time_periods,
period_time=payment_row_book.period_time,
currency=payment_row_book.currency,
account_records_id=account_record.id,
account_records_uu_id=str(account_record.uu_id),
build_parts_id=payment_row_book.build_parts_id,
build_parts_uu_id=str(payment_row_book.build_parts_uu_id),
payment_amount=abs(value), # Negative for credit entries
payment_types_id=payment_row_book.payment_types_id,
payment_types_uu_id=str(payment_row_book.payment_types_uu_id),
process_date_m=payment_row_book.process_date.month,
process_date_y=payment_row_book.process_date.year,
process_date=payment_row_book.process_date,
build_decision_book_item_id=payment_row_book.build_decision_book_item_id if payment_row_book.build_decision_book_item_id else None,
build_decision_book_item_uu_id=str(payment_row_book.build_decision_book_item_uu_id) if payment_row_book.build_decision_book_item_uu_id else None,
decision_book_project_id=payment_row_book.decision_book_project_id if payment_row_book.decision_book_project_id else None,
decision_book_project_uu_id=str(payment_row_book.decision_book_project_uu_id) if payment_row_book.decision_book_project_uu_id else None,
is_confirmed=True,
account_is_debit=False,
)
# Save the new payment record
saved_row = new_row.save()
# Update the original debt record to mark it as processed
# payment_row_book.account_records_id = account_record.id
# payment_row_book.account_records_uu_id = str(account_record.uu_id)
# payment_row_book.save()
# # Flush to ensure both records are saved to the database
# session.flush()
return saved_row
def update_account_remainder_if_spent(account_record, ref_id: str, session):
"""Update the remainder_balance of an account after spending money.
Args:
account_record: The account record to update
amount_spent: The amount spent in this transaction
session: Database session
Returns:
bool: True if all money is spent, False otherwise
"""
AccountRecords.set_session(session)
BuildDecisionBookPayments.set_session(session)
sum_of_paid = session.query(func.sum(func.abs(BuildDecisionBookPayments.payment_amount))).filter(
BuildDecisionBookPayments.account_records_id == account_record.id,
BuildDecisionBookPayments.account_is_debit == False
).scalar()
debit_row = BuildDecisionBookPayments.query.filter_by(ref_id=ref_id).first()
account_record_to_update = AccountRecords.query.filter_by(id=account_record.id).first()
account_record_to_update.remainder_balance = sum_of_paid
account_record_to_update.save()
# Get the current remainder balance
if abs(sum_of_paid) == abs(account_record.currency_value):
return True
return False
def update_all_spent_accounts(session):
"""Update remainder_balance for all accounts with payments.
This function finds account records in BuildDecisionBookPayments and updates
their remainder_balance based on the sum of payments made, regardless of whether
all funds have been spent or not.
Args:
session: Database session
"""
with AccountRecords.new_session() as session:
# Set sessions for models
AccountRecords.set_session(session)
BuildDecisionBookPayments.set_session(session)
# Get distinct account_records_id values from BuildDecisionBookPayments
distinct_account_ids = session.query(BuildDecisionBookPayments.account_records_id).filter(
BuildDecisionBookPayments.account_records_id.isnot(None),
BuildDecisionBookPayments.account_is_debit == False # Credit entries (payments)
).distinct().all()
updated_count = 0
for account_id_tuple in distinct_account_ids:
account_id = account_id_tuple[0]
# Get the account record
account = AccountRecords.query.filter_by(id=account_id).first()
if not account or not account.build_parts_id or account.currency_value <= 0:
continue
# Calculate the sum of payments made using this account
# Note: payment_amount is negative for credit entries, so we need to use abs() to get the positive amount
payment_query = session.query(func.sum(func.abs(BuildDecisionBookPayments.payment_amount))).filter(
BuildDecisionBookPayments.account_records_id == account_id,
BuildDecisionBookPayments.account_is_debit == False # Credit entries (payments)
)
payment_sum = payment_query.scalar() or 0
# Update remainder_balance for ALL accounts, regardless of payment_sum value
threshold = Decimal('0.01')
fully_spent = payment_sum >= abs(account.currency_value) - threshold
status = "All funds spent" if fully_spent else "Partial payment"
# Store the positive value in remainder_balance
account.remainder_balance = payment_sum
account.save()
updated_count += 1
print(f"\nTotal accounts updated: {updated_count}")
# def find_amount_to_pay_by_ref_id(ref_id, session):
# """Calculate the remaining amount to pay for a specific debt reference ID.
# Args:
# ref_id: The reference ID of the debt (this is the uu_id of the debt record)
# session: Database session
# Returns:
# float: The remaining amount to pay
# """
# # Get the original debt amount - the debt is identified by its uu_id which is passed as ref_id
# debit = BuildDecisionBookPayments.query.filter(
# BuildDecisionBookPayments.uu_id == ref_id,
# BuildDecisionBookPayments.account_is_debit == True
# ).first()
# if not debit:
# return 0 # No debit found, nothing to pay
# debit_amount = abs(debit.payment_amount) # Ensure positive value for debit amount
# # Get the sum of payments already made for this debt
# # The ref_id in credit records points to the uu_id of the original debit
# # Note: payment_amount is negative for credit entries, so we use abs() to get positive values
# credit_amount = session.query(
# func.sum(func.abs(BuildDecisionBookPayments.payment_amount))
# ).filter(
# BuildDecisionBookPayments.ref_id == str(ref_id),
# BuildDecisionBookPayments.account_is_debit == False
# ).scalar() or 0
# # Calculate remaining amount to pay
# remaining = abs(debit_amount) - abs(credit_amount)
# # Ensure we don't return negative values
# if remaining < 0:
# return 0
# return remaining
def do_payments_of_this_month():
"""Process payments for the current month's unpaid debts.
This function retrieves account records with available funds and processes
payments for current month's unpaid debts in order of payment type priority.
"""
session_factory = get_session_factory()
session = session_factory()
# Set session for all models
AccountRecords.set_session(session)
BuildDecisionBookPayments.set_session(session)
# Get payment types in priority order
payment_type_list = get_enums_from_database()
# Get account records with positive currency_value and available funds
account_records = AccountRecords.query.filter(
AccountRecords.build_parts_id.isnot(None),
AccountRecords.currency_value > 0,
or_(
AccountRecords.remainder_balance.is_(None),
AccountRecords.remainder_balance < AccountRecords.currency_value
),
AccountRecords.bank_date >= '2022-01-01'
).order_by(AccountRecords.build_parts_id.desc()).all()
payments_made = 0
total_amount_paid = 0
process_date = datetime.now()
first_date_of_process_date = datetime(process_date.year, process_date.month, 1)
last_date_of_process_date_ = datetime(process_date.year, process_date.month, 1) + timedelta(days=31)
last_date_of_process_date = datetime(last_date_of_process_date_.year, last_date_of_process_date_.month, 1) - timedelta(days=1)
# Current month date filter
date_query_tuple = (
BuildDecisionBookPayments.process_date >= first_date_of_process_date,
BuildDecisionBookPayments.process_date <= last_date_of_process_date
)
fund_finished = lambda money_spend, money_in_account: money_spend == money_in_account
# Update all accounts with spent funds but zero remainder_balance
update_all_spent_accounts(session)
for account_record in account_records:
# Initialize variables for this account
money_in_account = abs(account_record.currency_value) - abs(account_record.remainder_balance)
money_paid = 0
build_parts_id = account_record.build_parts_id
# Process each payment type in priority order
for payment_type in payment_type_list:
# Check if all money is spent and update remainder balance if needed
if fund_finished(money_paid, money_in_account):
break
# Get unpaid debts for this payment type
unpaid_debts = get_unpaid_debts(build_parts_id=build_parts_id, session=session, debit_type=payment_type, date_query=date_query_tuple)
# Process each unpaid debt
for debt in unpaid_debts:
if not money_in_account > 0:
update_account_remainder_if_spent(account_record, debt[0], session)
break
# Check if all money is spent and update remainder balance if needed
# if fund_finished(money_paid, money_in_account):
# break
# Calculate remaining amount to pay
# Use debt.uu_id as the ref_id, not debt.ref_id
debt_to_pay = debt[2]
# Skip if nothing to pay
if debt_to_pay <= 0:
continue
# Determine amount to pay based on available funds
payment_amount = min(debt_to_pay, money_in_account)
# Make payment
debt_to_copy = BuildDecisionBookPayments.query.filter_by(ref_id=debt[0]).first()
close_payment_book(debt_to_copy, account_record, payment_amount, session)
# Update counters
money_in_account -= payment_amount
money_paid += payment_amount
payments_made += 1
total_amount_paid += payment_amount
if not money_in_account > 0:
update_account_remainder_if_spent(account_record, debt[0], session)
break
# if money_paid > 0:
# update_account_remainder_if_spent(account_record, money_paid, session)
# Update all accounts with spent funds but zero remainder_balance
update_all_spent_accounts(session)
# Commit all changes to the database
session.commit()
# Print summary
print("\nCURRENT MONTH PAYMENT SUMMARY:")
print(f"Total payments made: {payments_made}")
print(f"Total amount paid: {total_amount_paid:,.2f} TL")
def do_payments_of_previos_months():
"""Process payments for previous months' unpaid debts.
This function retrieves account records with available funds and processes
payments for previous months' unpaid debts in order of payment type priority.
"""
session_factory = get_session_factory()
session = session_factory()
# Set session for all models
AccountRecords.set_session(session)
BuildDecisionBookPayments.set_session(session)
# Get payment types in priority order
payment_type_list = get_enums_from_database()
# Get account records with positive currency_value and available funds
account_query = AccountRecords.query.filter(
AccountRecords.build_parts_id.isnot(None),
AccountRecords.currency_value > 0,
AccountRecords.remainder_balance < AccountRecords.currency_value,
AccountRecords.bank_date >= '2022-01-01'
).order_by(AccountRecords.bank_date.desc())
account_records = account_query.all()
payments_made = 0
total_amount_paid = 0
# process_date = datetime.now()
# first_date_of_process_date = datetime(process_date.year, process_date.month, 1)
# Previous months date filter
# date_query_tuple = (BuildDecisionBookPayments.process_date < first_date_of_process_date, )
fund_finished = lambda money_spend, money_in_account: money_spend >= money_in_account
# Update all accounts with spent funds but zero remainder_balance
update_all_spent_accounts(session)
for account_record in account_records:
# Initialize variables for this account
process_date_begins = datetime(account_record.bank_date.year, account_record.bank_date.month, 1)
process_date_ends_ = datetime(account_record.bank_date.year, account_record.bank_date.month, 1)+ timedelta(days=31)
process_date_ends = datetime(process_date_ends_.year, process_date_ends_.month, 1)- timedelta(days=1)
date_query_tuple = (BuildDecisionBookPayments.process_date >= process_date_begins, BuildDecisionBookPayments.process_date <= process_date_ends)
money_in_account = abs(account_record.currency_value) - abs(account_record.remainder_balance)
money_paid = 0
build_parts_id = account_record.build_parts_id
# Process each payment type in priority order
for payment_type in payment_type_list:
# Check if all money is spent and update remainder balance if needed
if fund_finished(money_paid, money_in_account):
break
# Get unpaid debts for this payment type
unpaid_debts = get_unpaid_debts(build_parts_id=build_parts_id, session=session, debit_type=payment_type, date_query=date_query_tuple)
if not len(unpaid_debts) > 0:
process_date = datetime.now()
process_date_ends = datetime(process_date.year, process_date.month, 1)
date_query_tuple = (BuildDecisionBookPayments.process_date < process_date_ends, )
unpaid_debts = get_unpaid_debts(build_parts_id=build_parts_id, session=session, debit_type=payment_type, date_query=date_query_tuple)
# Process each unpaid debt
for debt in unpaid_debts:
if not money_in_account > 0:
update_account_remainder_if_spent(account_record, debt[0], session)
break
# Check if all money is spent and update remainder balance if needed
if fund_finished(money_paid, money_in_account):
break
# Calculate remaining amount to pay
debt_to_pay = debt[2]
# Skip if nothing to pay
if debt_to_pay <= 0:
continue
# Determine amount to pay based on available funds
payment_amount = min(debt_to_pay, money_in_account)
# Make payment
try:
# Create payment record and update original debt
debt_to_copy = BuildDecisionBookPayments.query.filter_by(ref_id=debt[0]).first()
new_payment = close_payment_book(debt_to_copy, account_record, payment_amount, session)
# Verify the payment was created
if new_payment and new_payment.id:
# Update counters
money_in_account -= payment_amount
money_paid += payment_amount
payments_made += 1
total_amount_paid += payment_amount
# Flush changes to ensure they are visible in subsequent queries
session.flush()
else:
session.rollback()
continue
except Exception as e:
print(f"Debt : {debt[0]} -> Exception: {e}")
session.rollback()
continue
if not money_in_account > 0:
update_account_remainder_if_spent(account_record, debt[0], session)
break
# Update all accounts with spent funds but zero remainder_balance
update_all_spent_accounts(session)
# Commit all changes to the database
session.commit()
# Print summary
print("\nPREVIOUS MONTHS PAYMENT SUMMARY:")
print(f"Total payments made: {payments_made}")
print(f"Total amount paid: {total_amount_paid:,.2f} TL")
# /draft/draft-first-work.py
if __name__ == "__main__":
start_time = perf_counter()
print("\n===== PROCESSING PAYMENTS =====\n")
print("Starting payment processing at:", datetime.now())
# Process payments for current month first
print("\n1. Processing current month payments...")
do_payments_of_this_month()
# Then process payments for previous months with remaining funds
print("\n2. Processing previous months payments...")
attempt = 4
while True:
total_debit = BuildDecisionBookPayments.query.filter(BuildDecisionBookPayments.account_is_debit == True).count()
total_credit = BuildDecisionBookPayments.query.filter(BuildDecisionBookPayments.account_is_debit == False).count()
if total_debit != total_credit:
do_payments_of_previos_months()
attempt -= 1
if attempt == 0:
break
else:
break
print("\n===== PAYMENT PROCESSING COMPLETE =====\n")
print("Payment processing completed at:", datetime.now())
# Analyze the payment situation after processing payments
print("\n===== ANALYZING PAYMENT SITUATION AFTER PROCESSING =====\n")
analyze_payment_function()
end_time = perf_counter()
print(f"\n{end_time - start_time:.3f} : seconds")
# # Create a subquery to get the sum of payments for each debit's uu_id
# # For credit entries, ref_id points to the original debit's uu_id
# payment_sums = session.query(
# BuildDecisionBookPayments.ref_id.label('original_debt_id'),
# func.sum(func.abs(BuildDecisionBookPayments.payment_amount)).label('payment_sum')
# ).filter(
# BuildDecisionBookPayments.account_is_debit == False # Credit entries only
# ).group_by(BuildDecisionBookPayments.ref_id).subquery()
# # Main query to find debits with their payment sums
# query = session.query(BuildDecisionBookPayments)
# # Join with payment sums - cast uu_id to string to match ref_id type
# query = query.outerjoin(
# payment_sums,
# func.cast(BuildDecisionBookPayments.uu_id, String) == payment_sums.c.original_debt_id
# )
# # Filter for debits of the specified build part and payment type
# query = query.filter(
# BuildDecisionBookPayments.build_parts_id == build_parts_id,
# BuildDecisionBookPayments.payment_types_id == debit_type.id,
# BuildDecisionBookPayments.account_is_debit == True, # Debit entries only
# )
# # Apply date filters if provided
# if date_query:
# for date_filter in date_query:
# query = query.filter(date_filter)
# # Filter for debits that are not fully paid
# # (payment_sum < debit_amount or payment_sum is NULL)
# query = query.filter(
# or_(
# payment_sums.c.payment_sum.is_(None),
# func.coalesce(payment_sums.c.payment_sum, 0) < func.abs(BuildDecisionBookPayments.payment_amount)
# )
# )
# # Execute the query and return the results
# results = query.order_by(BuildDecisionBookPayments.process_date).all()s

View File

@ -0,0 +1,188 @@
#!/usr/bin/env python3
# Debug script to test payment processing functions directly
import sys
import os
from datetime import datetime
from time import perf_counter
from sqlalchemy import func
# Import directly from the draft-first-work.py file in the same directory
sys.path.insert(0, '/draft')
# Import the necessary functions and classes directly from the file
from draft_first_work import * # Import everything for simplicity
def debug_find_unpaid_debts(build_parts_id, session):
"""Find unpaid debts directly using raw SQL for debugging."""
print(f"\nDEBUG: Finding unpaid debts for build part ID: {build_parts_id}")
# Set session for the model
BuildDecisionBookPayments.set_session(session)
# Get payment types
payment_types = get_enums_from_database()
for payment_type in payment_types:
print(f"\nChecking payment type: {payment_type.key}")
# Find debits that don't have account_records_id set (unpaid)
query = session.query(BuildDecisionBookPayments).filter(
BuildDecisionBookPayments.build_parts_id == build_parts_id,
BuildDecisionBookPayments.payment_types_id == payment_type.id,
BuildDecisionBookPayments.account_is_debit == True,
BuildDecisionBookPayments.account_records_id.is_(None)
)
# Print the SQL query
print(f"SQL Query: {query}")
# Execute the query
unpaid_debts = query.all()
print(f"Found {len(unpaid_debts)} unpaid debts")
# Print details of each unpaid debt
for i, debt in enumerate(unpaid_debts[:5]): # Limit to first 5 for brevity
print(f" Debt {i+1}:")
print(f" ID: {debt.id}")
print(f" UUID: {debt.uu_id}")
print(f" Amount: {abs(debt.payment_amount):,.2f} TL")
print(f" Process Date: {debt.process_date}")
print(f" Account Records ID: {debt.account_records_id}")
# Check if any payments have been made for this debt
credit_query = session.query(
func.sum(func.abs(BuildDecisionBookPayments.payment_amount))
).filter(
BuildDecisionBookPayments.ref_id == str(debt.uu_id),
BuildDecisionBookPayments.account_is_debit == False
)
credit_amount = credit_query.scalar() or 0
print(f" Payments made: {credit_amount:,.2f} TL")
print(f" Remaining to pay: {abs(debt.payment_amount) - credit_amount:,.2f} TL")
def debug_process_payment(build_parts_id, session):
"""Process a single payment for debugging purposes."""
print(f"\nDEBUG: Processing payment for build part ID: {build_parts_id}")
# Set session for all models
AccountRecords.set_session(session)
BuildDecisionBookPayments.set_session(session)
# Get payment types in priority order
payment_type_list = get_enums_from_database()
# Get account records with positive currency_value
account_query = AccountRecords.query.filter(
AccountRecords.build_parts_id == build_parts_id,
AccountRecords.currency_value > 0,
AccountRecords.bank_date >= '2022-01-01'
).order_by(AccountRecords.id.desc()).limit(5)
print(f"Account query: {account_query}")
account_records = account_query.all()
print(f"Found {len(account_records)} account records with funds")
# Print account details
for i, account in enumerate(account_records):
available_funds = abs(account.currency_value) - abs(account.remainder_balance)
print(f" Account {i+1}: ID: {account.id}, Build Part ID: {account.build_parts_id}, Value: {account.currency_value:,.2f} TL, Available: {available_funds:,.2f} TL")
if not account_records:
print("No account records found with funds. Cannot process payments.")
return
# Use the first account with funds
account_record = account_records[0]
money_in_account = abs(account_record.currency_value) - abs(account_record.remainder_balance)
print(f"\nUsing account ID: {account_record.id} with available funds: {money_in_account:,.2f} TL")
# Get the first payment type
payment_type = payment_type_list[0]
print(f"Using payment type: {payment_type.key}")
# Find unpaid debts for this payment type
query = session.query(BuildDecisionBookPayments).filter(
BuildDecisionBookPayments.build_parts_id == build_parts_id,
BuildDecisionBookPayments.payment_types_id == payment_type.id,
BuildDecisionBookPayments.account_is_debit == True,
BuildDecisionBookPayments.account_records_id.is_(None)
).limit(1)
print(f"Unpaid debt query: {query}")
unpaid_debt = query.first()
if not unpaid_debt:
print(f"No unpaid debts found for payment type {payment_type.key}")
return
print(f"\nFound unpaid debt:")
print(f" ID: {unpaid_debt.id}")
print(f" UUID: {unpaid_debt.uu_id}")
print(f" Amount: {abs(unpaid_debt.payment_amount):,.2f} TL")
# Calculate amount to pay
debt_amount = abs(unpaid_debt.payment_amount)
payment_amount = min(debt_amount, money_in_account)
print(f"\nProcessing payment:")
print(f" Debt amount: {debt_amount:,.2f} TL")
print(f" Available funds: {money_in_account:,.2f} TL")
print(f" Will pay: {payment_amount:,.2f} TL")
# Make payment
try:
print("Creating payment record...")
before_state = session.query(BuildDecisionBookPayments).filter(
BuildDecisionBookPayments.uu_id == unpaid_debt.uu_id
).first()
print(f"Before payment - account_records_id: {before_state.account_records_id}")
new_payment = close_payment_book(unpaid_debt, account_record, payment_amount, session)
# Verify the payment was created
after_state = session.query(BuildDecisionBookPayments).filter(
BuildDecisionBookPayments.uu_id == unpaid_debt.uu_id
).first()
print(f"After payment - account_records_id: {after_state.account_records_id}")
# Check if the new payment record was created
if new_payment:
print(f"New payment record created with ID: {new_payment.id}")
print(f"New payment amount: {abs(new_payment.payment_amount):,.2f} TL")
print(f"New payment ref_id: {new_payment.ref_id}")
print(f"New payment account_is_debit: {new_payment.account_is_debit}")
else:
print("Failed to create new payment record")
# Commit the transaction
session.commit()
print("Transaction committed successfully")
except Exception as e:
session.rollback()
print(f"Error making payment: {str(e)}")
print("Transaction rolled back")
if __name__ == "__main__":
start_time = perf_counter()
print("\n===== PAYMENT PROCESSING DEBUG =====\n")
# Create a session
session_factory = get_session_factory()
session = session_factory()
# Set the build part ID to debug
build_part_id = 14 # Change this to the build part ID you want to debug
# Find unpaid debts
debug_find_unpaid_debts(build_part_id, session)
# Process a single payment
debug_process_payment(build_part_id, session)
end_time = perf_counter()
print(f"\nDebug completed in {end_time - start_time:.3f} seconds")

View File

@ -0,0 +1,104 @@
from Schemas import AccountRecords, BuildDecisionBookPayments
from Controllers.Postgres.engine import get_session_factory
from sqlalchemy import func
from decimal import Decimal
def debug_remainder_balance():
session_factory = get_session_factory()
session = session_factory()
# Set sessions for models
AccountRecords.set_session(session)
BuildDecisionBookPayments.set_session(session)
print("\n" + "=" * 50)
print("DEBUGGING REMAINDER BALANCE ISSUES")
print("=" * 50)
# Get counts of accounts
total_accounts = session.query(AccountRecords).filter(
AccountRecords.currency_value > 0
).count()
zero_remainder = session.query(AccountRecords).filter(
AccountRecords.currency_value > 0,
AccountRecords.remainder_balance == 0
).count()
nonzero_remainder = session.query(AccountRecords).filter(
AccountRecords.currency_value > 0,
AccountRecords.remainder_balance != 0
).count()
print(f"Total accounts with positive currency_value: {total_accounts}")
print(f"Accounts with zero remainder_balance: {zero_remainder}")
print(f"Accounts with non-zero remainder_balance: {nonzero_remainder}")
# Get distinct account IDs with payments
distinct_account_ids = session.query(BuildDecisionBookPayments.account_records_id).filter(
BuildDecisionBookPayments.account_records_id.isnot(None),
BuildDecisionBookPayments.account_is_debit == False # Credit entries (payments)
).distinct().all()
print(f"\nDistinct account IDs with payments: {len(distinct_account_ids)}")
# Sample some accounts with zero remainder_balance but have payments
print("\nSampling accounts with zero remainder_balance:")
sample_count = 0
for account_id_tuple in distinct_account_ids[:10]: # Check first 10 accounts with payments
account_id = account_id_tuple[0]
# Get the account record
account = AccountRecords.query.get(account_id)
if not account or account.remainder_balance != 0:
continue
# Calculate the sum of payments made using this account
payment_sum = session.query(
func.sum(BuildDecisionBookPayments.payment_amount)
).filter(
BuildDecisionBookPayments.account_records_id == account_id,
BuildDecisionBookPayments.account_is_debit == False # Credit entries (payments)
).scalar() or 0
print(f" Account {account_id}: Currency Value={abs(account.currency_value):,.2f} TL, Payments={abs(payment_sum):,.2f} TL, Remainder={account.remainder_balance}")
sample_count += 1
if sample_count == 0:
print(" No accounts found with zero remainder_balance that have payments")
# Now let's fix a sample of accounts
print("\nFixing sample accounts with zero remainder_balance:")
fixed_count = 0
for account_id_tuple in distinct_account_ids[:5]: # Fix first 5 accounts with payments
account_id = account_id_tuple[0]
# Get the account record
account = AccountRecords.query.get(account_id)
if not account or not account.build_parts_id or account.currency_value <= 0:
continue
# Calculate the sum of payments made using this account
payment_sum = session.query(
func.sum(BuildDecisionBookPayments.payment_amount)
).filter(
BuildDecisionBookPayments.account_records_id == account_id,
BuildDecisionBookPayments.account_is_debit == False # Credit entries (payments)
).scalar() or 0
old_remainder = account.remainder_balance
# Update remainder_balance for this account
account.remainder_balance = payment_sum
account.save()
fixed_count += 1
print(f" Fixed Account {account_id}: Old remainder={old_remainder}, New remainder={account.remainder_balance:,.2f} TL")
print(f"\nTotal accounts fixed in this run: {fixed_count}")
print("=" * 50)
if __name__ == "__main__":
debug_remainder_balance()

View File

@ -0,0 +1,346 @@
import arrow
from decimal import Decimal
from datetime import datetime, timedelta
from Schemas import BuildDecisionBookPayments, AccountRecords, ApiEnumDropdown
from time import perf_counter
import time
from sqlalchemy import cast, Date, String
from Controllers.Postgres.engine import get_session_factory
# from ServicesApi.Schemas.account.account import AccountRecords
# from ServicesApi.Schemas.building.decision_book import BuildDecisionBookPayments
# Helper function to calculate available funds
def df_fund(account_income, total_paid):
return abs(account_income) - abs(total_paid)
class BuildDuesTypes:
def __init__(self):
self.debit: ApiEnumDropdownShallowCopy = None
self.add_debit: ApiEnumDropdownShallowCopy = None
self.renovation: ApiEnumDropdownShallowCopy = None
self.lawyer_expence: ApiEnumDropdownShallowCopy = None
self.service_fee: ApiEnumDropdownShallowCopy = None
self.information: ApiEnumDropdownShallowCopy = None
class ApiEnumDropdownShallowCopy:
id: int
uuid: str
enum_class: str
key: str
value: str
def __init__(self, id: int, uuid: str, enum_class: str, key: str, value: str):
self.id = id
self.uuid = uuid
self.enum_class = enum_class
self.key = key
self.value = value
def find_master_payment_value(build_parts_id: int, process_date: datetime, session, debit_type):
BuildDecisionBookPayments.set_session(session)
book_payments_query = (
BuildDecisionBookPayments.process_date_m == process_date.month, BuildDecisionBookPayments.process_date_y == process_date.year,
BuildDecisionBookPayments.build_parts_id == build_parts_id, BuildDecisionBookPayments.account_records_id.is_(None),
BuildDecisionBookPayments.account_is_debit == True, BuildDecisionBookPayments.payment_types_id == debit_type.id,
)
debit_row = BuildDecisionBookPayments.query.filter(*book_payments_query).order_by(BuildDecisionBookPayments.process_date.desc()).first()
if not debit_row:
return 0, None, None
return abs(debit_row.payment_amount), debit_row, str(debit_row.ref_id)
def calculate_paid_amount_for_master(ref_id: str, session, debit_amount):
"""Calculate how much has been paid for a given payment reference.
Args:
ref_id: The reference ID to check payments for
session: Database session
debit_amount: Original debit amount
Returns:
float: Remaining amount to pay (debit_amount - total_paid)
"""
BuildDecisionBookPayments.set_session(session)
paid_rows = BuildDecisionBookPayments.query.filter(
BuildDecisionBookPayments.ref_id == ref_id,
BuildDecisionBookPayments.account_records_id.isnot(None),
BuildDecisionBookPayments.account_is_debit == False
).order_by(BuildDecisionBookPayments.process_date.desc()).all()
if not paid_rows:
return debit_amount
total_paid = sum([abs(paid_row.payment_amount) for paid_row in paid_rows])
remaining = abs(debit_amount) - abs(total_paid)
return remaining
def find_master_payment_value_previous(build_parts_id: int, process_date: datetime, session, debit_type):
BuildDecisionBookPayments.set_session(session)
parse_process_date = datetime(process_date.year, process_date.month, 1) - timedelta(days=1)
book_payments_query = (
BuildDecisionBookPayments.process_date < parse_process_date,
BuildDecisionBookPayments.build_parts_id == build_parts_id, BuildDecisionBookPayments.account_records_id.is_(None),
BuildDecisionBookPayments.account_is_debit == True, BuildDecisionBookPayments.payment_types_id == debit_type.id,
)
debit_rows = BuildDecisionBookPayments.query.filter(*book_payments_query).order_by(BuildDecisionBookPayments.process_date.desc()).all()
return debit_rows
def find_amount_to_pay(build_parts_id: int, process_date: datetime, session, debit_type):
# debit -negative value that need to be pay
debit, debit_row, debit_row_ref_id = find_master_payment_value(build_parts_id=build_parts_id, process_date=process_date, session=session, debit_type=debit_type)
# Is there any payment done for this ref_id ?
return calculate_paid_amount_for_master(ref_id=debit_row_ref_id, session=session, debit_amount=debit), debit_row
def calculate_total_debt_for_account(build_parts_id: int, session):
"""Calculate the total debt and total paid amount for an account regardless of process date."""
BuildDecisionBookPayments.set_session(session)
# Get all debits for this account
all_debits = BuildDecisionBookPayments.query.filter(
BuildDecisionBookPayments.build_parts_id == build_parts_id,
BuildDecisionBookPayments.account_records_id.is_(None),
BuildDecisionBookPayments.account_is_debit == True
).all()
total_debt = sum([abs(debit.payment_amount) for debit in all_debits])
# Get all payments for this account's debits
total_paid = 0
for debit in all_debits:
payments = BuildDecisionBookPayments.query.filter(
BuildDecisionBookPayments.ref_id == debit.ref_id,
BuildDecisionBookPayments.account_is_debit == False
).all()
if payments:
total_paid += sum([abs(payment.payment_amount) for payment in payments])
return total_debt, total_paid
def refresh_book_payment(account_record: AccountRecords):
"""Update the remainder_balance of an account record based on attached payments.
This function calculates the total of all payments attached to an account record
and updates the remainder_balance field accordingly. The remainder_balance represents
funds that have been received but not yet allocated to specific debits.
Args:
account_record: The account record to update
Returns:
float: The total payment amount
"""
total_payment = 0
all_payment_attached = BuildDecisionBookPayments.query.filter(
BuildDecisionBookPayments.account_records_id == account_record.id,
BuildDecisionBookPayments.account_is_debit == False,
).all()
if all_payment_attached:
total_payment = sum([abs(row.payment_amount) for row in all_payment_attached])
# Always update the remainder_balance, even if no payments are attached
# This ensures we track unallocated funds properly
old_balance = account_record.remainder_balance
account_record.update(remainder_balance=total_payment)
account_record.save()
return total_payment
def close_payment_book(payment_row_book, account_record, value, session):
BuildDecisionBookPayments.set_session(session)
new_row = BuildDecisionBookPayments.create(
ref_id=str(payment_row_book.uu_id),
payment_plan_time_periods=payment_row_book.payment_plan_time_periods,
period_time=payment_row_book.period_time,
currency=payment_row_book.currency,
account_records_id=account_record.id,
account_records_uu_id=str(account_record.uu_id),
build_parts_id=payment_row_book.build_parts_id,
build_parts_uu_id=str(payment_row_book.build_parts_uu_id),
payment_amount=value,
payment_types_id=payment_row_book.payment_types_id,
payment_types_uu_id=str(payment_row_book.payment_types_uu_id),
process_date_m=payment_row_book.process_date.month,
process_date_y=payment_row_book.process_date.year,
process_date=payment_row_book.process_date,
build_decision_book_item_id=payment_row_book.build_decision_book_item_id,
build_decision_book_item_uu_id=str(payment_row_book.build_decision_book_item_uu_id),
decision_book_project_id=payment_row_book.decision_book_project_id,
decision_book_project_uu_id=str(payment_row_book.decision_book_project_uu_id),
is_confirmed=True,
account_is_debit=False,
)
return new_row.save()
def get_enums_from_database():
build_dues_types = BuildDuesTypes()
with ApiEnumDropdown.new_session() as session:
ApiEnumDropdown.set_session(session)
debit_enum_shallow = ApiEnumDropdown.query.filter_by(enum_class="BuildDuesTypes", key="BDT-D").first() # Debit
add_debit_enum_shallow = ApiEnumDropdown.query.filter_by(enum_class="BuildDuesTypes", key="BDT-A").first() # Add Debit
renovation_enum_shallow = ApiEnumDropdown.query.filter_by(enum_class="BuildDuesTypes", key="BDT-R").first() # Renovation
late_payment_enum_shallow = ApiEnumDropdown.query.filter_by(enum_class="BuildDuesTypes", key="BDT-L").first() # Lawyer expence
service_fee_enum_shallow = ApiEnumDropdown.query.filter_by(enum_class="BuildDuesTypes", key="BDT-S").first() # Service fee
information_enum_shallow = ApiEnumDropdown.query.filter_by(enum_class="BuildDuesTypes", key="BDT-I").first() # Information
build_dues_types.debit = ApiEnumDropdownShallowCopy(
debit_enum_shallow.id, str(debit_enum_shallow.uu_id), debit_enum_shallow.enum_class, debit_enum_shallow.key, debit_enum_shallow.value
)
build_dues_types.add_debit = ApiEnumDropdownShallowCopy(
add_debit_enum_shallow.id, str(add_debit_enum_shallow.uu_id), add_debit_enum_shallow.enum_class, add_debit_enum_shallow.key, add_debit_enum_shallow.value
)
build_dues_types.renovation = ApiEnumDropdownShallowCopy(
renovation_enum_shallow.id, str(renovation_enum_shallow.uu_id), renovation_enum_shallow.enum_class, renovation_enum_shallow.key, renovation_enum_shallow.value
)
build_dues_types.lawyer_expence = ApiEnumDropdownShallowCopy(
late_payment_enum_shallow.id, str(late_payment_enum_shallow.uu_id), late_payment_enum_shallow.enum_class, late_payment_enum_shallow.key, late_payment_enum_shallow.value
)
build_dues_types.service_fee = ApiEnumDropdownShallowCopy(
service_fee_enum_shallow.id, str(service_fee_enum_shallow.uu_id), service_fee_enum_shallow.enum_class, service_fee_enum_shallow.key, service_fee_enum_shallow.value
)
build_dues_types.information = ApiEnumDropdownShallowCopy(
information_enum_shallow.id, str(information_enum_shallow.uu_id), information_enum_shallow.enum_class, information_enum_shallow.key, information_enum_shallow.value
)
return [build_dues_types.debit, build_dues_types.lawyer_expence, build_dues_types.add_debit, build_dues_types.renovation, build_dues_types.service_fee, build_dues_types.information]
def payment_function():
session_factory = get_session_factory()
session = session_factory()
# Set session for all models
AccountRecords.set_session(session)
BuildDecisionBookPayments.set_session(session)
order_pay = get_enums_from_database()
# Get account records with positive currency_value regardless of remainder_balance
# This ensures accounts with unallocated funds are processed
account_records = AccountRecords.query.filter(
AccountRecords.build_parts_id.isnot(None),
AccountRecords.currency_value > 0,
AccountRecords.bank_date >= '2022-01-01'
).order_by(AccountRecords.build_parts_id.desc()).all()
start_time = time.time()
for account_record in account_records:
incoming_total_money = abs(account_record.currency_value)
total_paid = refresh_book_payment(account_record)
available_fund = df_fund(incoming_total_money, total_paid)
# Calculate total debt and payment status for this account
total_debt, already_paid = calculate_total_debt_for_account(account_record.build_parts_id, session)
remaining_debt = total_debt - already_paid
# Skip accounts with no debt and zero remainder balance
if remaining_debt <= 0 and account_record.remainder_balance == 0:
continue
# Skip accounts with no available funds
if not available_fund > 0.0:
continue
process_date = datetime.now()
# Try to pay current month first
for debit_type in order_pay:
amount_to_pay, debit_row = find_amount_to_pay(
build_parts_id=account_record.build_parts_id,
process_date=process_date,
session=session,
debit_type=debit_type
)
if amount_to_pay > 0 and debit_row:
if amount_to_pay >= available_fund:
close_payment_book(
payment_row_book=debit_row,
account_record=account_record,
value=available_fund,
session=session
)
total_paid = refresh_book_payment(account_record)
available_fund = df_fund(incoming_total_money, total_paid)
else:
close_payment_book(
payment_row_book=debit_row,
account_record=account_record,
value=amount_to_pay,
session=session
)
total_paid = refresh_book_payment(account_record)
available_fund = df_fund(incoming_total_money, total_paid)
if not available_fund > 0.0:
continue
# Try to pay previous unpaid debts
should_continue = False
for debit_type in order_pay:
debit_rows = find_master_payment_value_previous(
build_parts_id=account_record.build_parts_id,
process_date=process_date,
session=session,
debit_type=debit_type
)
if not debit_rows:
continue
for debit_row in debit_rows:
amount_to_pay = calculate_paid_amount_for_master(
ref_id=debit_row.ref_id,
session=session,
debit_amount=debit_row.payment_amount
)
# Skip if already fully paid
if not amount_to_pay > 0:
continue
if amount_to_pay >= available_fund:
close_payment_book(
payment_row_book=debit_row,
account_record=account_record,
value=available_fund,
session=session
)
total_paid = refresh_book_payment(account_record)
available_fund = df_fund(incoming_total_money, total_paid)
should_continue = True
break
else:
close_payment_book(
payment_row_book=debit_row,
account_record=account_record,
value=amount_to_pay,
session=session
)
total_paid = refresh_book_payment(account_record)
available_fund = df_fund(incoming_total_money, total_paid)
if should_continue or not available_fund > 0.0:
break
if not available_fund > 0.0:
continue # Changed from break to continue to process next account record
if __name__ == "__main__":
start_time = perf_counter()
payment_function()
end_time = perf_counter()
elapsed = end_time - start_time
print(f'{elapsed:.3f} : seconds')

View File

@ -0,0 +1,493 @@
class AccountRecordsShallowCopy:
id: int
uuid: str
iban: str
currency_value: Decimal
remainder_balance: Decimal
bank_date: datetime
process_type: int
receive_debit: int
receive_debit_uuid: str
living_space_id: int
living_space_uuid: str
build_id: int
build_uuid: str
build_parts_id: int
build_parts_uuid: str
class BuildDecisionBookPaymentsShallowCopy:
def __init__(self):
self.id: int
self.uuid: str
self.payment_plan_time_periods: str
self.process_date: datetime
self.payment_amount: float
self.currency: str
self.payment_types_id: int
self.payment_types_uu_id: str
self.period_time: str
self.process_date_y: int
self.process_date_m: int
self.build_decision_book_item_id: int
self.build_decision_book_item_uu_id: str
self.build_parts_id: int
self.build_parts_uu_id: str
self.decision_book_project_id: int
self.decision_book_project_uu_id: str
self.account_records_id: int
self.account_records_uu_id: str
@classmethod
def convert_row_to_shallow_copy(cls, row: BuildDecisionBookPayments):
shallow_copy = cls()
shallow_copy.id = row.id
shallow_copy.uuid = str(row.uu_id)
shallow_copy.ref_id = str(row.ref_id)
shallow_copy.payment_plan_time_periods = row.payment_plan_time_periods
shallow_copy.process_date = row.process_date
shallow_copy.payment_amount = row.payment_amount
shallow_copy.currency = row.currency
shallow_copy.payment_types_id = row.payment_types_id
shallow_copy.payment_types_uu_id = str(row.payment_types_uu_id)
shallow_copy.period_time = row.period_time
shallow_copy.process_date_y = row.process_date_y
shallow_copy.process_date_m = row.process_date_m
shallow_copy.build_decision_book_item_id = row.build_decision_book_item_id
shallow_copy.build_decision_book_item_uu_id = str(row.build_decision_book_item_uu_id)
shallow_copy.build_parts_id = row.build_parts_id
shallow_copy.build_parts_uu_id = str(row.build_parts_uu_id)
shallow_copy.decision_book_project_id = row.decision_book_project_id
shallow_copy.decision_book_project_uu_id = str(row.decision_book_project_uu_id)
shallow_copy.account_records_id = row.account_records_id
shallow_copy.account_records_uu_id = str(row.account_records_uu_id)
return shallow_copy
class ApiEnumDropdownShallowCopy:
id: int
uuid: str
enum_class: str
key: str
value: str
def __init__(self, id: int, uuid: str, enum_class: str, key: str, value: str):
self.id = id
self.uuid = uuid
self.enum_class = enum_class
self.key = key
self.value = value
class BuildDuesTypes:
def __init__(self):
self.debit: ApiEnumDropdownShallowCopy = None
self.add_debit: ApiEnumDropdownShallowCopy = None
self.renovation: ApiEnumDropdownShallowCopy = None
self.lawyer_expence: ApiEnumDropdownShallowCopy = None
self.service_fee: ApiEnumDropdownShallowCopy = None
self.information: ApiEnumDropdownShallowCopy = None
class PaymentsRows:
def __init__(self):
self.debit: list[BuildDecisionBookPaymentsShallowCopy] = []
self.add_debit: list[BuildDecisionBookPaymentsShallowCopy] = []
self.renovation: list[BuildDecisionBookPaymentsShallowCopy] = []
self.lawyer_expence: list[BuildDecisionBookPaymentsShallowCopy] = []
self.service_fee: list[BuildDecisionBookPaymentsShallowCopy] = []
self.information: list[BuildDecisionBookPaymentsShallowCopy] = []
class PaidRow:
def __init__(self, uuid: str, amount: Decimal, closed: bool, left_payment: Decimal | None = None):
self.uuid: str = uuid
self.amount: Decimal = amount
self.closed: bool = closed
self.left_payment: Decimal = Decimal(0)
if not self.closed:
self.left_payment = left_payment
if not self.check_transaction_is_valid():
raise ValueError(f"Record uuid: {self.uuid} tries to pay more than its debt in records.")
def check_transaction_is_valid(self):
with BuildDecisionBookPayments.new_session() as session:
BuildDecisionBookPayments.set_session(session)
payment_row = BuildDecisionBookPayments.query.filter(
BuildDecisionBookPayments.ref_id == self.uuid,
cast(BuildDecisionBookPayments.uu_id, String) == cast(BuildDecisionBookPayments.ref_id, String),
BuildDecisionBookPayments.account_records_id.is_(None),
).first()
if not payment_row:
return False
already_paid = BuildDecisionBookPayments.query.filter(
BuildDecisionBookPayments.ref_id == self.uuid,
cast(BuildDecisionBookPayments.uu_id, String) != cast(BuildDecisionBookPayments.ref_id, String),
BuildDecisionBookPayments.account_records_id.isnot(None),
).all()
already_paid = sum([abs(row.payment_amount) for row in already_paid])
left_amount = abs(payment_row.payment_amount) - abs(already_paid)
if left_amount < self.amount:
print(f"left_amount: {left_amount}, self.amount: {self.amount}. Record uuid: {self.uuid} tries to pay more than its debt in records.")
return False
return True
def get_dict(self):
return {
"uuid": self.uuid,
"amount": self.amount,
"closed": self.closed,
"left_payment": self.left_payment,
}
def __str__(self):
return f"{self.uuid} = Paid: {self.amount} Left: {self.left_payment}"
class PaymentActions:
def __init__(self, initial_money: Decimal):
self.initial_money: Decimal = initial_money
self.consumed_money: Decimal = Decimal(0)
self.remaining_money: Decimal = self.initial_money
self.paid_list: list[PaidRow] = []
def is_money_consumed(self):
return self.consumed_money == self.initial_money
def consume(self, payment_due: Decimal, payment_uuid: str):
left_payment = Decimal(0)
if self.remaining_money >= payment_due:
self.consumed_money += abs(payment_due)
self.remaining_money = abs(self.remaining_money) - abs(payment_due)
paid_row = PaidRow(payment_uuid, payment_due, True)
self.paid_list.append(paid_row)
elif self.remaining_money < payment_due:
self.consumed_money = self.remaining_money
self.remaining_money = Decimal(0)
left_payment = abs(payment_due) - abs(self.consumed_money)
paid_row = PaidRow(payment_uuid, self.consumed_money, False, left_payment)
self.paid_list.append(paid_row)
return left_payment
def row_iteration_account_records():
shallow_copy_list = []
build_dues_types = BuildDuesTypes()
with ApiEnumDropdown.new_session() as session:
ApiEnumDropdown.set_session(session)
debit_enum_shallow = ApiEnumDropdown.query.filter_by(enum_class="BuildDuesTypes", key="BDT-D").first() # Debit
add_debit_enum_shallow = ApiEnumDropdown.query.filter_by(enum_class="BuildDuesTypes", key="BDT-A").first() # Add Debit
renovation_enum_shallow = ApiEnumDropdown.query.filter_by(enum_class="BuildDuesTypes", key="BDT-R").first() # Renovation
late_payment_enum_shallow = ApiEnumDropdown.query.filter_by(enum_class="BuildDuesTypes", key="BDT-L").first() # Lawyer expence
service_fee_enum_shallow = ApiEnumDropdown.query.filter_by(enum_class="BuildDuesTypes", key="BDT-S").first() # Service fee
information_enum_shallow = ApiEnumDropdown.query.filter_by(enum_class="BuildDuesTypes", key="BDT-I").first() # Information
build_dues_types.debit = ApiEnumDropdownShallowCopy(
debit_enum_shallow.id, str(debit_enum_shallow.uu_id), debit_enum_shallow.enum_class, debit_enum_shallow.key, debit_enum_shallow.value
)
build_dues_types.add_debit = ApiEnumDropdownShallowCopy(
add_debit_enum_shallow.id, str(add_debit_enum_shallow.uu_id), add_debit_enum_shallow.enum_class, add_debit_enum_shallow.key, add_debit_enum_shallow.value
)
build_dues_types.renovation = ApiEnumDropdownShallowCopy(
renovation_enum_shallow.id, str(renovation_enum_shallow.uu_id), renovation_enum_shallow.enum_class, renovation_enum_shallow.key, renovation_enum_shallow.value
)
build_dues_types.lawyer_expence = ApiEnumDropdownShallowCopy(
late_payment_enum_shallow.id, str(late_payment_enum_shallow.uu_id), late_payment_enum_shallow.enum_class, late_payment_enum_shallow.key, late_payment_enum_shallow.value
)
build_dues_types.service_fee = ApiEnumDropdownShallowCopy(
service_fee_enum_shallow.id, str(service_fee_enum_shallow.uu_id), service_fee_enum_shallow.enum_class, service_fee_enum_shallow.key, service_fee_enum_shallow.value
)
build_dues_types.information = ApiEnumDropdownShallowCopy(
information_enum_shallow.id, str(information_enum_shallow.uu_id), information_enum_shallow.enum_class, information_enum_shallow.key, information_enum_shallow.value
)
with AccountRecords.new_session() as session:
AccountRecords.set_session(session)
account_records: list[AccountRecords] = AccountRecords.query.filter(
AccountRecords.approved_record == True, AccountRecords.living_space_id.isnot(None),
(AccountRecords.remainder_balance + AccountRecords.currency_value) > 0,
cast(AccountRecords.bank_date, Date) > cast("2022-01-01", Date),
# AccountRecords.currency_value > 0,
).all()
for account_record in account_records:
shallow_copy = AccountRecordsShallowCopy()
shallow_copy.id = account_record.id
shallow_copy.uuid = str(account_record.uu_id)
shallow_copy.iban = account_record.iban
shallow_copy.currency_value = account_record.currency_value
shallow_copy.remainder_balance = account_record.remainder_balance
shallow_copy.bank_date = arrow.get(account_record.bank_date).datetime
shallow_copy.process_type = account_record.process_type
shallow_copy.receive_debit = account_record.receive_debit
shallow_copy.receive_debit_uuid = str(account_record.receive_debit_uu_id)
shallow_copy.living_space_id = account_record.living_space_id
shallow_copy.living_space_uuid = str(account_record.living_space_uu_id)
shallow_copy.build_id = account_record.build_id
shallow_copy.build_uuid = str(account_record.build_uu_id)
shallow_copy.build_parts_id = account_record.build_parts_id
shallow_copy.build_parts_uuid = str(account_record.build_parts_uu_id)
shallow_copy_list.append(shallow_copy)
return shallow_copy_list, build_dues_types
def check_payment_stage_debit(account_shallow_copy: AccountRecordsShallowCopy, build_dues_types: BuildDuesTypes, payments_rows: PaymentsRows, payment_actions: PaymentActions):
account_records_bank_date = account_shallow_copy.bank_date
for payment_row in payments_rows.debit:
payment_actions.consume(payment_row.payment_amount, payment_row.uuid)
if payment_actions.is_money_consumed():
return
return
def check_payment_stage_add_debit(account_shallow_copy: AccountRecordsShallowCopy, build_dues_types: BuildDuesTypes, payments_rows: PaymentsRows, payment_actions: PaymentActions):
account_records_bank_date = account_shallow_copy.bank_date
for payment_row in payments_rows.add_debit:
payment_actions.consume(payment_row.payment_amount, payment_row.uuid)
if payment_actions.is_money_consumed():
return
return
def check_payment_stage_renovation(account_shallow_copy: AccountRecordsShallowCopy, build_dues_types: BuildDuesTypes, payments_rows: PaymentsRows, payment_actions: PaymentActions):
account_records_bank_date = account_shallow_copy.bank_date
for payment_row in payments_rows.renovation:
payment_actions.consume(payment_row.payment_amount, payment_row.uuid)
if payment_actions.is_money_consumed():
return
return
def check_payment_stage_lawyer_expence(account_shallow_copy: AccountRecordsShallowCopy, build_dues_types: BuildDuesTypes, payments_rows: PaymentsRows, payment_actions: PaymentActions):
account_records_bank_date = account_shallow_copy.bank_date
for payment_row in payments_rows.lawyer_expence:
payment_actions.consume(payment_row.payment_amount, payment_row.uuid)
if payment_actions.is_money_consumed():
return
return
def check_payment_stage_service_fee(account_shallow_copy: AccountRecordsShallowCopy, build_dues_types: BuildDuesTypes, payments_rows: PaymentsRows, payment_actions: PaymentActions):
account_records_bank_date = account_shallow_copy.bank_date
for payment_row in payments_rows.service_fee:
payment_actions.consume(payment_row.payment_amount, payment_row.uuid)
if payment_actions.is_money_consumed():
return
return
def check_payment_stage_information(account_shallow_copy: AccountRecordsShallowCopy, build_dues_types: BuildDuesTypes, payments_rows: PaymentsRows, payment_actions: PaymentActions):
account_records_bank_date = account_shallow_copy.bank_date
for payment_row in payments_rows.information:
payment_actions.consume(payment_row.payment_amount, payment_row.uuid)
if payment_actions.is_money_consumed():
return
return
def close_account_records(account_shallow_copy: AccountRecordsShallowCopy, payment_actions: PaymentActions, records_to_close: int):
if payment_actions.is_money_consumed():
print(f'payment_actions.is_money_consumed() : {payment_actions.is_money_consumed()}')
for paid_row in payment_actions.paid_list:
print(f'paid_row item : {paid_row.get_dict()}')
print(f'payment_actions.consumed_money : {payment_actions.consumed_money}')
print(f'payment_actions.initial_money : {payment_actions.initial_money}')
print(f'payment_actions.remaining_money : {payment_actions.remaining_money}')
with BuildDecisionBookPayments.new_session() as session:
BuildDecisionBookPayments.set_session(session)
for payment_row in payment_actions.paid_list:
print(f'payment_row : {payment_row}')
payment_row_book = BuildDecisionBookPayments.query.filter(
BuildDecisionBookPayments.uu_id == payment_row.uuid,
cast(BuildDecisionBookPayments.ref_id, String) == cast(BuildDecisionBookPayments.uu_id, String),
BuildDecisionBookPayments.account_records_id.is_(None),
).first()
if not payment_row_book:
raise ValueError(f"Payment row not found for uuid: {payment_row.uuid}")
new_row = BuildDecisionBookPayments.create(
ref_id=str(payment_row_book.uu_id),
payment_plan_time_periods=payment_row_book.payment_plan_time_periods,
period_time=payment_row_book.period_time,
currency=payment_row_book.currency,
account_records_id=account_shallow_copy.id,
account_records_uu_id=str(account_shallow_copy.uuid),
build_parts_id=payment_row_book.build_parts_id,
build_parts_uu_id=str(payment_row_book.build_parts_uu_id),
payment_amount=abs(payment_row.amount),
payment_types_id=payment_row_book.payment_types_id,
payment_types_uu_id=str(payment_row_book.payment_types_uu_id),
process_date_m=payment_row_book.process_date.month,
process_date_y=payment_row_book.process_date.year,
process_date=payment_row_book.process_date,
build_decision_book_item_id=payment_row_book.build_decision_book_item_id,
build_decision_book_item_uu_id=str(payment_row_book.build_decision_book_item_uu_id),
decision_book_project_id=payment_row_book.decision_book_project_id,
decision_book_project_uu_id=str(payment_row_book.decision_book_project_uu_id),
is_confirmed=True,
)
new_row.save()
account_record = AccountRecords.query.filter_by(id=account_shallow_copy.id).first()
account_record.remainder_balance = - abs(account_record.remainder_balance) - abs(payment_actions.consumed_money)
account_record.save()
records_to_close += 1
return records_to_close, True
return records_to_close, False
def any_function(shallow_copy_list, build_dues_types):
error_records, not_closed_records, records_to_close = 0, 0, 0
shallow_copy_list, build_dues_types = row_iteration_account_records()
for index, shallow_copy in enumerate(shallow_copy_list):
initial_amount = abs(shallow_copy.currency_value) - abs(shallow_copy.remainder_balance)
if initial_amount == 0:
# print(f'AC: {shallow_copy.uuid} initial_amount : {initial_amount} must be greater than 0 to spend money on payments')
not_closed_records += 1
continue
if initial_amount < 0:
print(f'AC: {shallow_copy.uuid} initial_amount : {initial_amount} wrong calculation is saved on account records Remainder Balance : {shallow_copy.remainder_balance} Currency Value : {shallow_copy.currency_value}')
error_records += 1
continue
payment_actions = PaymentActions(initial_amount)
# print(f'initial_amount : {initial_amount}')
payments_rows = PaymentsRows()
with BuildDecisionBookPayments.new_session() as session:
BuildDecisionBookPayments.set_session(session)
book_payments_query = (
BuildDecisionBookPayments.process_date_m == shallow_copy.bank_date.month, BuildDecisionBookPayments.process_date_y == shallow_copy.bank_date.year,
BuildDecisionBookPayments.build_parts_id == shallow_copy.build_parts_id, BuildDecisionBookPayments.account_records_id.is_(None),
cast(BuildDecisionBookPayments.ref_id, String) == cast(BuildDecisionBookPayments.uu_id, String),
)
query_db = BuildDecisionBookPayments.query.filter(
*book_payments_query, BuildDecisionBookPayments.payment_types_id == build_dues_types.debit.id).order_by(BuildDecisionBookPayments.process_date.desc()).all()
payments_rows.debit: list[BuildDecisionBookPaymentsShallowCopy] = [BuildDecisionBookPaymentsShallowCopy.convert_row_to_shallow_copy(row) for row in query_db]
query_db = BuildDecisionBookPayments.query.filter(
*book_payments_query, BuildDecisionBookPayments.payment_types_id == build_dues_types.add_debit.id).order_by(BuildDecisionBookPayments.process_date.desc()).all()
payments_rows.add_debit: list[BuildDecisionBookPaymentsShallowCopy] = [BuildDecisionBookPaymentsShallowCopy.convert_row_to_shallow_copy(row) for row in query_db]
query_db = BuildDecisionBookPayments.query.filter(
*book_payments_query, BuildDecisionBookPayments.payment_types_id == build_dues_types.renovation.id).order_by(BuildDecisionBookPayments.process_date.desc()).all()
payments_rows.renovation: list[BuildDecisionBookPaymentsShallowCopy] = [BuildDecisionBookPaymentsShallowCopy.convert_row_to_shallow_copy(row) for row in query_db]
query_db = BuildDecisionBookPayments.query.filter(
*book_payments_query, BuildDecisionBookPayments.payment_types_id == build_dues_types.lawyer_expence.id).order_by(BuildDecisionBookPayments.process_date.desc()).all()
payments_rows.lawyer_expence: list[BuildDecisionBookPaymentsShallowCopy] = [BuildDecisionBookPaymentsShallowCopy.convert_row_to_shallow_copy(row) for row in query_db]
query_db = BuildDecisionBookPayments.query.filter(
*book_payments_query, BuildDecisionBookPayments.payment_types_id == build_dues_types.service_fee.id).order_by(BuildDecisionBookPayments.process_date.desc()).all()
payments_rows.service_fee: list[BuildDecisionBookPaymentsShallowCopy] = [BuildDecisionBookPaymentsShallowCopy.convert_row_to_shallow_copy(row) for row in query_db]
query_db = BuildDecisionBookPayments.query.filter(
*book_payments_query, BuildDecisionBookPayments.payment_types_id == build_dues_types.information.id).order_by(BuildDecisionBookPayments.process_date.desc()).all()
payments_rows.information: list[BuildDecisionBookPaymentsShallowCopy] = [BuildDecisionBookPaymentsShallowCopy.convert_row_to_shallow_copy(row) for row in query_db]
if len(payments_rows.debit) > 0:
# print(f'{shallow_copy.uuid} debit', len(payments_rows.debit))
records_to_close += 1
if len(payments_rows.add_debit) > 0:
# print(f'{shallow_copy.uuid} add_debit', len(payments_rows.add_debit))
records_to_close += 1
if len(payments_rows.renovation) > 0:
# print(f'{shallow_copy.uuid} renovation', len(payments_rows.renovation))
records_to_close += 1
if len(payments_rows.lawyer_expence) > 0:
# print(f'{shallow_copy.uuid} lawyer_expence', len(payments_rows.lawyer_expence))
records_to_close += 1
if len(payments_rows.service_fee) > 0:
# print(f'{shallow_copy.uuid} service_fee', len(payments_rows.service_fee))
records_to_close += 1
if len(payments_rows.information) > 0:
# print(f'{shallow_copy.uuid} information', len(payments_rows.information))
records_to_close += 1
# continue
check_payment_stage_debit(account_shallow_copy=shallow_copy, build_dues_types=build_dues_types, payments_rows=payments_rows, payment_actions=payment_actions)
records_to_close, is_money_consumed = close_account_records(account_shallow_copy=shallow_copy, payment_actions=payment_actions, records_to_close=records_to_close)
if is_money_consumed:
continue
check_payment_stage_add_debit(account_shallow_copy=shallow_copy, build_dues_types=build_dues_types, payments_rows=payments_rows, payment_actions=payment_actions)
records_to_close, is_money_consumed = close_account_records(account_shallow_copy=shallow_copy, payment_actions=payment_actions, records_to_close=records_to_close)
if is_money_consumed:
continue
check_payment_stage_renovation(account_shallow_copy=shallow_copy, build_dues_types=build_dues_types, payments_rows=payments_rows, payment_actions=payment_actions)
records_to_close, is_money_consumed = close_account_records(account_shallow_copy=shallow_copy, payment_actions=payment_actions, records_to_close=records_to_close)
if is_money_consumed:
continue
check_payment_stage_lawyer_expence(account_shallow_copy=shallow_copy, build_dues_types=build_dues_types, payments_rows=payments_rows, payment_actions=payment_actions)
records_to_close, is_money_consumed = close_account_records(account_shallow_copy=shallow_copy, payment_actions=payment_actions, records_to_close=records_to_close)
if is_money_consumed:
continue
check_payment_stage_service_fee(account_shallow_copy=shallow_copy, build_dues_types=build_dues_types, payments_rows=payments_rows, payment_actions=payment_actions)
records_to_close, is_money_consumed = close_account_records(account_shallow_copy=shallow_copy, payment_actions=payment_actions, records_to_close=records_to_close)
if is_money_consumed:
continue
check_payment_stage_information(account_shallow_copy=shallow_copy, build_dues_types=build_dues_types, payments_rows=payments_rows, payment_actions=payment_actions)
records_to_close, is_money_consumed = close_account_records(account_shallow_copy=shallow_copy, payment_actions=payment_actions, records_to_close=records_to_close)
if is_money_consumed:
continue
# with BuildDecisionBookPayments.new_session() as session:
# BuildDecisionBookPayments.set_session(session)
# book_payments_query = (
# BuildDecisionBookPayments.process_date_m < shallow_copy.bank_date.month, BuildDecisionBookPayments.process_date_y == shallow_copy.bank_date.year,
# BuildDecisionBookPayments.build_parts_id == shallow_copy.build_parts_id, BuildDecisionBookPayments.account_records_id.is_(None),
# BuildDecisionBookPayments.ref_id == BuildDecisionBookPayments.uu_id,
# )
# query_db = BuildDecisionBookPayments.query.filter(
# *book_payments_query, BuildDecisionBookPayments.payment_types_id == build_dues_types.debit.id).order_by(BuildDecisionBookPayments.process_date.desc()).all()
# payments_rows.debit: list[BuildDecisionBookPaymentsShallowCopy] = [BuildDecisionBookPaymentsShallowCopy.convert_row_to_shallow_copy(row) for row in query_db]
# query_db = BuildDecisionBookPayments.query.filter(
# *book_payments_query, BuildDecisionBookPayments.payment_types_id == build_dues_types.add_debit.id).order_by(BuildDecisionBookPayments.process_date.desc()).all()
# payments_rows.add_debit: list[BuildDecisionBookPaymentsShallowCopy] = [BuildDecisionBookPaymentsShallowCopy.convert_row_to_shallow_copy(row) for row in query_db]
# query_db = BuildDecisionBookPayments.query.filter(
# *book_payments_query, BuildDecisionBookPayments.payment_types_id == build_dues_types.renovation.id).order_by(BuildDecisionBookPayments.process_date.desc()).all()
# payments_rows.renovation: list[BuildDecisionBookPaymentsShallowCopy] = [BuildDecisionBookPaymentsShallowCopy.convert_row_to_shallow_copy(row) for row in query_db]
# query_db = BuildDecisionBookPayments.query.filter(
# *book_payments_query, BuildDecisionBookPayments.payment_types_id == build_dues_types.lawyer_expence.id).order_by(BuildDecisionBookPayments.process_date.desc()).all()
# payments_rows.lawyer_expence: list[BuildDecisionBookPaymentsShallowCopy] = [BuildDecisionBookPaymentsShallowCopy.convert_row_to_shallow_copy(row) for row in query_db]
# query_db = BuildDecisionBookPayments.query.filter(
# *book_payments_query, BuildDecisionBookPayments.payment_types_id == build_dues_types.service_fee.id).order_by(BuildDecisionBookPayments.process_date.desc()).all()
# payments_rows.service_fee: list[BuildDecisionBookPaymentsShallowCopy] = [BuildDecisionBookPaymentsShallowCopy.convert_row_to_shallow_copy(row) for row in query_db]
# query_db = BuildDecisionBookPayments.query.filter(
# *book_payments_query, BuildDecisionBookPayments.payment_types_id == build_dues_types.information.id).order_by(BuildDecisionBookPayments.process_date.desc()).all()
# payments_rows.information: list[BuildDecisionBookPaymentsShallowCopy] = [BuildDecisionBookPaymentsShallowCopy.convert_row_to_shallow_copy(row) for row in query_db]
# check_payment_stage_debit(account_shallow_copy=shallow_copy, build_dues_types=build_dues_types, payments_rows=payments_rows, payment_actions=payment_actions)
# records_to_close, is_money_consumed = close_account_records(account_shallow_copy=shallow_copy, payment_actions=payment_actions, records_to_close=records_to_close)
# if is_money_consumed:
# continue
# check_payment_stage_add_debit(account_shallow_copy=shallow_copy, build_dues_types=build_dues_types, payments_rows=payments_rows, payment_actions=payment_actions)
# records_to_close, is_money_consumed = close_account_records(account_shallow_copy=shallow_copy, payment_actions=payment_actions, records_to_close=records_to_close)
# if is_money_consumed:
# continue
# check_payment_stage_renovation(account_shallow_copy=shallow_copy, build_dues_types=build_dues_types, payments_rows=payments_rows, payment_actions=payment_actions)
# records_to_close, is_money_consumed = close_account_records(account_shallow_copy=shallow_copy, payment_actions=payment_actions, records_to_close=records_to_close)
# if is_money_consumed:
# continue
# check_payment_stage_lawyer_expence(account_shallow_copy=shallow_copy, build_dues_types=build_dues_types, payments_rows=payments_rows, payment_actions=payment_actions)
# records_to_close, is_money_consumed = close_account_records(account_shallow_copy=shallow_copy, payment_actions=payment_actions, records_to_close=records_to_close)
# if is_money_consumed:
# continue
# check_payment_stage_service_fee(account_shallow_copy=shallow_copy, build_dues_types=build_dues_types, payments_rows=payments_rows, payment_actions=payment_actions)
# records_to_close, is_money_consumed = close_account_records(account_shallow_copy=shallow_copy, payment_actions=payment_actions, records_to_close=records_to_close)
# if is_money_consumed:
# continue
# check_payment_stage_information(account_shallow_copy=shallow_copy, build_dues_types=build_dues_types, payments_rows=payments_rows, payment_actions=payment_actions)
# records_to_close, is_money_consumed = close_account_records(account_shallow_copy=shallow_copy, payment_actions=payment_actions, records_to_close=records_to_close)
# if is_money_consumed:
# continue
"""
build_decision_book_item_id, type=null, pos=10
build_parts_id, type=null, pos=12
payment_plan_time_periods, type=null, pos=1
process_date, type=null, pos=2
payment_types_id, type=null, pos=5
account_records_id, type=null, pos=16
"""

View File

@ -0,0 +1,348 @@
import arrow
from decimal import Decimal
from datetime import datetime, timedelta
from Schemas import BuildDecisionBookPayments, AccountRecords, ApiEnumDropdown
from time import perf_counter
from sqlalchemy import cast, Date, String
from Controllers.Postgres.engine import get_session_factory
# from ServicesApi.Schemas.account.account import AccountRecords
# from ServicesApi.Schemas.building.decision_book import BuildDecisionBookPayments
class BuildDuesTypes:
def __init__(self):
self.debit: ApiEnumDropdownShallowCopy = None
self.add_debit: ApiEnumDropdownShallowCopy = None
self.renovation: ApiEnumDropdownShallowCopy = None
self.lawyer_expence: ApiEnumDropdownShallowCopy = None
self.service_fee: ApiEnumDropdownShallowCopy = None
self.information: ApiEnumDropdownShallowCopy = None
class ApiEnumDropdownShallowCopy:
id: int
uuid: str
enum_class: str
key: str
value: str
def __init__(self, id: int, uuid: str, enum_class: str, key: str, value: str):
self.id = id
self.uuid = uuid
self.enum_class = enum_class
self.key = key
self.value = value
def find_master_payment_value(build_parts_id: int, process_date: datetime, session, debit_type):
BuildDecisionBookPayments.set_session(session)
book_payments_query = (
BuildDecisionBookPayments.process_date_m == process_date.month, BuildDecisionBookPayments.process_date_y == process_date.year,
BuildDecisionBookPayments.build_parts_id == build_parts_id, BuildDecisionBookPayments.account_records_id.is_(None),
BuildDecisionBookPayments.account_is_debit == True, BuildDecisionBookPayments.payment_types_id == debit_type.id,
)
debit_row = BuildDecisionBookPayments.query.filter(*book_payments_query).order_by(BuildDecisionBookPayments.process_date.desc()).first()
if not debit_row:
print(f'No record of master payment is found for :{process_date.strftime("%Y-%m-%d %H:%M:%S")} ')
return 0, None, None
return abs(debit_row.payment_amount), debit_row, str(debit_row.ref_id)
def calculate_paid_amount_for_master(ref_id: str, session, debit_amount):
"""Calculate how much has been paid for a given payment reference.
Args:
ref_id: The reference ID to check payments for
session: Database session
debit_amount: Original debit amount
Returns:
float: Remaining amount to pay (debit_amount - total_paid)
"""
BuildDecisionBookPayments.set_session(session)
paid_rows = BuildDecisionBookPayments.query.filter(
BuildDecisionBookPayments.ref_id == ref_id,
BuildDecisionBookPayments.account_records_id.isnot(None),
BuildDecisionBookPayments.account_is_debit == False
).order_by(BuildDecisionBookPayments.process_date.desc()).all()
if not paid_rows:
return debit_amount
total_paid = sum([abs(paid_row.payment_amount) for paid_row in paid_rows])
remaining = abs(debit_amount) - abs(total_paid)
return remaining
def find_master_payment_value_previous(build_parts_id: int, process_date: datetime, session, debit_type):
BuildDecisionBookPayments.set_session(session)
parse_process_date = datetime(process_date.year, process_date.month, 1) - timedelta(days=1)
book_payments_query = (
BuildDecisionBookPayments.process_date < parse_process_date,
BuildDecisionBookPayments.build_parts_id == build_parts_id, BuildDecisionBookPayments.account_records_id.is_(None),
BuildDecisionBookPayments.account_is_debit == True, BuildDecisionBookPayments.payment_types_id == debit_type.id,
)
debit_rows = BuildDecisionBookPayments.query.filter(*book_payments_query).order_by(BuildDecisionBookPayments.process_date.desc()).all()
return debit_rows
def find_amount_to_pay(build_parts_id: int, process_date: datetime, session, debit_type):
# debit -negative value that need to be pay
debit, debit_row, debit_row_ref_id = find_master_payment_value(build_parts_id=build_parts_id, process_date=process_date, session=session, debit_type=debit_type)
# Is there any payment done for this ref_id ?
return calculate_paid_amount_for_master(ref_id=debit_row_ref_id, session=session, debit_amount=debit), debit_row
def calculate_total_debt_for_account(build_parts_id: int, session):
"""Calculate the total debt and total paid amount for an account regardless of process date."""
BuildDecisionBookPayments.set_session(session)
# Get all debits for this account
all_debits = BuildDecisionBookPayments.query.filter(
BuildDecisionBookPayments.build_parts_id == build_parts_id,
BuildDecisionBookPayments.account_records_id.is_(None),
BuildDecisionBookPayments.account_is_debit == True
).all()
total_debt = sum([abs(debit.payment_amount) for debit in all_debits])
# Get all payments for this account's debits
total_paid = 0
for debit in all_debits:
payments = BuildDecisionBookPayments.query.filter(
BuildDecisionBookPayments.ref_id == debit.ref_id,
BuildDecisionBookPayments.account_is_debit == False
).all()
if payments:
total_paid += sum([abs(payment.payment_amount) for payment in payments])
return total_debt, total_paid
def refresh_book_payment(account_record: AccountRecords):
"""Update the remainder_balance of an account record based on attached payments.
This function calculates the total of all payments attached to an account record
and updates the remainder_balance field accordingly. The remainder_balance represents
funds that have been received but not yet allocated to specific debits.
Args:
account_record: The account record to update
Returns:
float: The total payment amount
"""
total_payment = 0
all_payment_attached = BuildDecisionBookPayments.query.filter(
BuildDecisionBookPayments.account_records_id == account_record.id,
BuildDecisionBookPayments.account_is_debit == False,
).all()
if all_payment_attached:
total_payment = sum([abs(row.payment_amount) for row in all_payment_attached])
old_balance = account_record.remainder_balance
account_record.update(remainder_balance=total_payment)
account_record.save()
# Only print if there's a change in balance
if old_balance != total_payment:
print(f"Account {account_record.id}: Updated remainder_balance {old_balance}{total_payment}")
return total_payment
def close_payment_book(payment_row_book, account_record, value, session):
BuildDecisionBookPayments.set_session(session)
new_row = BuildDecisionBookPayments.create(
ref_id=str(payment_row_book.uu_id),
payment_plan_time_periods=payment_row_book.payment_plan_time_periods,
period_time=payment_row_book.period_time,
currency=payment_row_book.currency,
account_records_id=account_record.id,
account_records_uu_id=str(account_record.uu_id),
build_parts_id=payment_row_book.build_parts_id,
build_parts_uu_id=str(payment_row_book.build_parts_uu_id),
payment_amount=value,
payment_types_id=payment_row_book.payment_types_id,
payment_types_uu_id=str(payment_row_book.payment_types_uu_id),
process_date_m=payment_row_book.process_date.month,
process_date_y=payment_row_book.process_date.year,
process_date=payment_row_book.process_date,
build_decision_book_item_id=payment_row_book.build_decision_book_item_id,
build_decision_book_item_uu_id=str(payment_row_book.build_decision_book_item_uu_id),
decision_book_project_id=payment_row_book.decision_book_project_id,
decision_book_project_uu_id=str(payment_row_book.decision_book_project_uu_id),
is_confirmed=True,
account_is_debit=False,
)
return new_row.save()
def get_enums_from_database():
build_dues_types = BuildDuesTypes()
with ApiEnumDropdown.new_session() as session:
ApiEnumDropdown.set_session(session)
debit_enum_shallow = ApiEnumDropdown.query.filter_by(enum_class="BuildDuesTypes", key="BDT-D").first() # Debit
add_debit_enum_shallow = ApiEnumDropdown.query.filter_by(enum_class="BuildDuesTypes", key="BDT-A").first() # Add Debit
renovation_enum_shallow = ApiEnumDropdown.query.filter_by(enum_class="BuildDuesTypes", key="BDT-R").first() # Renovation
late_payment_enum_shallow = ApiEnumDropdown.query.filter_by(enum_class="BuildDuesTypes", key="BDT-L").first() # Lawyer expence
service_fee_enum_shallow = ApiEnumDropdown.query.filter_by(enum_class="BuildDuesTypes", key="BDT-S").first() # Service fee
information_enum_shallow = ApiEnumDropdown.query.filter_by(enum_class="BuildDuesTypes", key="BDT-I").first() # Information
build_dues_types.debit = ApiEnumDropdownShallowCopy(
debit_enum_shallow.id, str(debit_enum_shallow.uu_id), debit_enum_shallow.enum_class, debit_enum_shallow.key, debit_enum_shallow.value
)
build_dues_types.add_debit = ApiEnumDropdownShallowCopy(
add_debit_enum_shallow.id, str(add_debit_enum_shallow.uu_id), add_debit_enum_shallow.enum_class, add_debit_enum_shallow.key, add_debit_enum_shallow.value
)
build_dues_types.renovation = ApiEnumDropdownShallowCopy(
renovation_enum_shallow.id, str(renovation_enum_shallow.uu_id), renovation_enum_shallow.enum_class, renovation_enum_shallow.key, renovation_enum_shallow.value
)
build_dues_types.lawyer_expence = ApiEnumDropdownShallowCopy(
late_payment_enum_shallow.id, str(late_payment_enum_shallow.uu_id), late_payment_enum_shallow.enum_class, late_payment_enum_shallow.key, late_payment_enum_shallow.value
)
build_dues_types.service_fee = ApiEnumDropdownShallowCopy(
service_fee_enum_shallow.id, str(service_fee_enum_shallow.uu_id), service_fee_enum_shallow.enum_class, service_fee_enum_shallow.key, service_fee_enum_shallow.value
)
build_dues_types.information = ApiEnumDropdownShallowCopy(
information_enum_shallow.id, str(information_enum_shallow.uu_id), information_enum_shallow.enum_class, information_enum_shallow.key, information_enum_shallow.value
)
return [build_dues_types.debit, build_dues_types.lawyer_expence, build_dues_types.add_debit, build_dues_types.renovation, build_dues_types.service_fee, build_dues_types.information]
def payment_function():
session_factory = get_session_factory()
session = session_factory()
ApiEnumDropdown.set_session(session)
order_pay = ApiEnumDropdown.query.filter(ApiEnumDropdown.enum_type == 'order_pay').order_by(ApiEnumDropdown.id.asc()).all()
# Get account records with positive currency_value regardless of remainder_balance
# This ensures accounts with unallocated funds are processed
account_records = AccountRecords.query.filter(
AccountRecords.build_parts_id.isnot(None),
AccountRecords.currency_value > 0,
AccountRecords.bank_date >= '2022-01-01'
).order_by(AccountRecords.build_parts_id.desc()).all()
start_time = time.time()
for account_record in account_records:
incoming_total_money = abs(account_record.currency_value)
total_paid = refresh_book_payment(account_record)
available_fund = df_fund(incoming_total_money, total_paid)
# Calculate total debt and payment status for this account
total_debt, already_paid = calculate_total_debt_for_account(account_record.build_parts_id, session)
remaining_debt = total_debt - already_paid
# Log account status with debt information
if remaining_debt <= 0 and account_record.remainder_balance == 0:
# Skip accounts with no debt and zero remainder balance
continue
if not available_fund > 0.0:
# Skip accounts with no available funds
continue
process_date = datetime.now()
# Try to pay current month first
for debit_type in order_pay:
amount_to_pay, debit_row = find_amount_to_pay(
build_parts_id=account_record.build_parts_id,
process_date=process_date,
session=session,
debit_type=debit_type
)
if amount_to_pay > 0 and debit_row:
if amount_to_pay >= available_fund:
close_payment_book(
payment_row_book=debit_row,
account_record=account_record,
value=available_fund,
session=session
)
total_paid = refresh_book_payment(account_record)
available_fund = df_fund(incoming_total_money, total_paid)
else:
close_payment_book(
payment_row_book=debit_row,
account_record=account_record,
value=amount_to_pay,
session=session
)
total_paid = refresh_book_payment(account_record)
available_fund = df_fund(incoming_total_money, total_paid)
if not available_fund > 0.0:
continue
# Try to pay previous unpaid debts
should_continue = False
for debit_type in order_pay:
debit_rows = find_master_payment_value_previous(
build_parts_id=account_record.build_parts_id,
process_date=process_date,
session=session,
debit_type=debit_type
)
if not debit_rows:
continue
for debit_row in debit_rows:
amount_to_pay = calculate_paid_amount_for_master(
ref_id=debit_row.ref_id,
session=session,
debit_amount=debit_row.payment_amount
)
# Skip if already fully paid
if not amount_to_pay > 0:
continue
if amount_to_pay >= available_fund:
close_payment_book(
payment_row_book=debit_row,
account_record=account_record,
value=available_fund,
session=session
)
total_paid = refresh_book_payment(account_record)
available_fund = df_fund(incoming_total_money, total_paid)
should_continue = True
break
else:
close_payment_book(
payment_row_book=debit_row,
account_record=account_record,
value=amount_to_pay,
session=session
)
total_paid = refresh_book_payment(account_record)
available_fund = df_fund(incoming_total_money, total_paid)
print(f"Account {account_record.id}: {available_fund} funds remaining after payment")
if should_continue or not available_fund > 0.0:
break
if not available_fund > 0.0:
continue # Changed from break to continue to process next account record
if __name__ == "__main__":
start_time = perf_counter()
payment_function()
end_time = perf_counter()
elapsed = end_time - start_time
print(f'{elapsed:.3f} : seconds')
# print(f'not_closed_records : {not_closed_records}')
# print(f'error_records : {error_records}')
# print(f'records_to_close : {records_to_close}')
# print(f"Total: {not_closed_records + error_records + records_to_close} / {len(shallow_copy_list)}")

View File

@ -0,0 +1,63 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from sqlalchemy import create_engine, func
from sqlalchemy.orm import sessionmaker
from sqlalchemy.ext.declarative import declarative_base
import os
from datetime import datetime
import time
# Get database connection details from environment variables
DB_HOST = os.environ.get('DB_HOST', 'postgres')
DB_PORT = os.environ.get('DB_PORT', '5432')
DB_NAME = os.environ.get('DB_NAME', 'evyos')
DB_USER = os.environ.get('DB_USER', 'evyos')
DB_PASS = os.environ.get('DB_PASS', 'evyos')
# Create SQLAlchemy engine and session
engine = create_engine(f'postgresql://{DB_USER}:{DB_PASS}@{DB_HOST}:{DB_PORT}/{DB_NAME}')
Session = sessionmaker(bind=engine)
session = Session()
print("\n" + "=" * 50)
print("UPDATING ACCOUNTS WITH ZERO REMAINDER_BALANCE")
print("=" * 50)
try:
# First, get the count of accounts that need updating
count_query = """SELECT COUNT(*) FROM account_records
WHERE build_parts_id IS NOT NULL
AND currency_value > 0
AND (remainder_balance = 0 OR remainder_balance IS NULL)
AND bank_date >= '2022-01-01'"""
count_result = session.execute(count_query).scalar()
print(f"Found {count_result} accounts with zero remainder_balance")
# Then update all those accounts
update_query = """UPDATE account_records
SET remainder_balance = currency_value
WHERE build_parts_id IS NOT NULL
AND currency_value > 0
AND (remainder_balance = 0 OR remainder_balance IS NULL)
AND bank_date >= '2022-01-01'"""
result = session.execute(update_query)
session.commit()
print(f"Updated {result.rowcount} accounts with zero remainder_balance")
# Verify the update
verify_query = """SELECT COUNT(*) FROM account_records
WHERE build_parts_id IS NOT NULL
AND currency_value > 0
AND (remainder_balance = 0 OR remainder_balance IS NULL)
AND bank_date >= '2022-01-01'"""
verify_result = session.execute(verify_query).scalar()
print(f"Remaining accounts with zero remainder_balance: {verify_result}")
except Exception as e:
print(f"Error updating accounts: {str(e)}")
session.rollback()
finally:
session.close()
print("=" * 50)

View File

@ -0,0 +1,30 @@
#!/bin/bash
# Create environment file that will be available to cron jobs
echo "POSTGRES_USER=\"$POSTGRES_USER\"" >> /env.sh
echo "POSTGRES_PASSWORD=\"$POSTGRES_PASSWORD\"" >> /env.sh
echo "POSTGRES_DB=\"$POSTGRES_DB\"" >> /env.sh
echo "POSTGRES_HOST=\"$POSTGRES_HOST\"" >> /env.sh
echo "POSTGRES_PORT=$POSTGRES_PORT" >> /env.sh
echo "POSTGRES_ENGINE=\"$POSTGRES_ENGINE\"" >> /env.sh
echo "POSTGRES_POOL_PRE_PING=\"$POSTGRES_POOL_PRE_PING\"" >> /env.sh
echo "POSTGRES_POOL_SIZE=$POSTGRES_POOL_SIZE" >> /env.sh
echo "POSTGRES_MAX_OVERFLOW=$POSTGRES_MAX_OVERFLOW" >> /env.sh
echo "POSTGRES_POOL_RECYCLE=$POSTGRES_POOL_RECYCLE" >> /env.sh
echo "POSTGRES_POOL_TIMEOUT=$POSTGRES_POOL_TIMEOUT" >> /env.sh
echo "POSTGRES_ECHO=\"$POSTGRES_ECHO\"" >> /env.sh
# Add Python environment variables
echo "PYTHONPATH=/" >> /env.sh
echo "PYTHONUNBUFFERED=1" >> /env.sh
echo "PYTHONDONTWRITEBYTECODE=1" >> /env.sh
# Make the environment file available to cron
echo "*/30 * * * * /run_app.sh >> /var/log/cron.log 2>&1" > /tmp/crontab_list
crontab /tmp/crontab_list
# Start cron
cron
# Tail the log file
tail -f /var/log/cron.log

View File

@ -0,0 +1,326 @@
import sys
import arrow
from decimal import Decimal
from Schemas import BuildDecisionBookPayments, AccountRecords, ApiEnumDropdown
# Counters for tracking conditions
counter_current_currency_not_positive = 0 # Track when current_currency_value <= 0
counter_net_amount_not_positive = 0 # Track when net_amount <= 0
counter_account_records_updated = 0 # Track number of account records updated
counter_payments_found = 0 # Track how many payments were found
counter_found_payment_skips = 0 # Track how many times we skip due to found_payment
counter_payment_exceptions = 0 # Track exceptions during payment processing
counter_missing_build_parts_id = 0 # Track accounts with missing build_parts_id
counter_null_payment_types = 0 # Track payments with null payment_types_id
def pay_the_registration(account_record, receive_enum, debit_enum, is_old_record: bool = False, session=None):
# If no session is provided, create a new one
if session is None:
with AccountRecords.new_session() as new_session:
AccountRecords.set_session(new_session)
BuildDecisionBookPayments.set_session(new_session)
return _process_payment(account_record, receive_enum, debit_enum, is_old_record, new_session)
else:
# Use the provided session
AccountRecords.set_session(session)
BuildDecisionBookPayments.set_session(session)
return _process_payment(account_record, receive_enum, debit_enum, is_old_record, session)
def _process_payment(account_record, receive_enum, debit_enum, is_old_record, session):
"""Internal function to process payments with a given session"""
current_currency_value = float(Decimal(account_record.currency_value)) - float(Decimal(account_record.remainder_balance))
if not current_currency_value > 0:
global counter_current_currency_not_positive
counter_current_currency_not_positive += 1
return current_currency_value
# Check if account_record has build_parts_id
if account_record.build_parts_id is None:
global counter_missing_build_parts_id
counter_missing_build_parts_id += 1
return current_currency_value
process_date = arrow.get(account_record.bank_date)
account_bank_date_year, account_bank_date_month = (process_date.date().year, process_date.date().month)
# First, try to find payments with null payment_types_id
payment_arguments_debit = [
BuildDecisionBookPayments.build_parts_id == account_record.build_parts_id,
BuildDecisionBookPayments.account_records_id == None,
]
# Add date filters if not processing old records
if not is_old_record:
payment_arguments_debit.extend([BuildDecisionBookPayments.process_date_y == int(account_bank_date_year), BuildDecisionBookPayments.process_date_m == int(account_bank_date_month)])
# First try with debit_enum.id
payments = BuildDecisionBookPayments.query.filter(*payment_arguments_debit, BuildDecisionBookPayments.payment_types_id == debit_enum.id).order_by(BuildDecisionBookPayments.process_date.asc()).all()
# If no payments found, try with null payment_types_id
if len(payments) == 0:
payments = BuildDecisionBookPayments.query.filter(*payment_arguments_debit, BuildDecisionBookPayments.payment_types_id == None).order_by(BuildDecisionBookPayments.process_date.asc()).all()
if len(payments) > 0:
global counter_null_payment_types
counter_null_payment_types += len(payments)
global counter_payments_found
counter_payments_found += len(payments)
# Debug: Print info about the first few payments found (if any)
if len(payments) > 0 and account_record.id % 100 == 0: # Only print for every 100th record to avoid too much output
print(f"DEBUG: Found {len(payments)} payments for account_record {account_record.id}")
if len(payments) > 0:
sample_payment = payments[0]
print(f" Sample payment: ID={getattr(sample_payment, 'id', 'N/A')}, amount={getattr(sample_payment, 'payment_amount', 'N/A')}")
if len(payments) == 0:
# No payments found for this account record
return current_currency_value
for payment in payments:
if not current_currency_value > 0:
return current_currency_value
payment_arguments_receive = [
BuildDecisionBookPayments.build_parts_id == account_record.build_parts_id,
BuildDecisionBookPayments.payment_plan_time_periods == payment.payment_plan_time_periods,
BuildDecisionBookPayments.payment_types_id == receive_enum.id,
BuildDecisionBookPayments.build_decision_book_item_id == payment.build_decision_book_item_id,
BuildDecisionBookPayments.decision_book_project_id == payment.decision_book_project_id,
BuildDecisionBookPayments.process_date == payment.process_date,
]
if not is_old_record:
payment_arguments_receive.extend([BuildDecisionBookPayments.process_date_y == int(account_bank_date_year), BuildDecisionBookPayments.process_date_m == int(account_bank_date_month)])
payment_received = BuildDecisionBookPayments.query.filter(*payment_arguments_receive).all()
sum_of_payment_received = sum([abs(payment.payment_amount) for payment in payment_received])
net_amount = float(abs(Decimal(payment.payment_amount))) - float(abs(Decimal(sum_of_payment_received)))
if not net_amount > 0:
global counter_net_amount_not_positive
counter_net_amount_not_positive += 1
continue
if float(abs(current_currency_value)) < float(abs(net_amount)):
net_amount = float(current_currency_value)
process_date = arrow.get(payment.process_date)
try:
found_payment = BuildDecisionBookPayments.query.filter_by(
build_parts_id=payment.build_parts_id, payment_plan_time_periods=payment.payment_plan_time_periods,
payment_types_id=receive_enum.id, build_decision_book_item_id=payment.build_decision_book_item_id,
decision_book_project_id=payment.decision_book_project_id, process_date=str(process_date),
).first()
if found_payment:
global counter_found_payment_skips
counter_found_payment_skips += 1
continue
created_book_payment = BuildDecisionBookPayments.create(
payment_plan_time_periods=payment.payment_plan_time_periods, payment_amount=float(abs(net_amount)),
payment_types_id=receive_enum.id, payment_types_uu_id=str(receive_enum.uu_id),
process_date=str(process_date), process_date_m=process_date.date().month, process_date_y=process_date.date().year,
period_time=f"{process_date.year}-{str(process_date.month).zfill(2)}", build_parts_id=payment.build_parts_id,
build_parts_uu_id=str(payment.build_parts_uu_id), account_records_id=account_record.id,
account_records_uu_id=str(account_record.uu_id), build_decision_book_item_id=payment.build_decision_book_item_id,
build_decision_book_item_uu_id=str(payment.build_decision_book_item_uu_id), decision_book_project_id=payment.decision_book_project_id,
decision_book_project_uu_id=str(payment.decision_book_project_uu_id),
)
created_book_payment.save()
created_payment_amount = float(Decimal(created_book_payment.payment_amount))
remainder_balance = float(Decimal(account_record.remainder_balance)) + float(abs(created_payment_amount))
account_record.update(remainder_balance=remainder_balance)
account_record.save()
global counter_account_records_updated
counter_account_records_updated += 1
if current_currency_value >= abs(net_amount):
current_currency_value -= abs(net_amount)
except Exception as e:
print("Exception of decision payment:", e)
global counter_payment_exceptions
counter_payment_exceptions += 1
return current_currency_value
def create_direct_payment(account_record, receive_enum, session):
"""
Create a direct payment record for an account record without relying on matching BuildDecisionBookPayments
"""
try:
# Calculate the amount to process
payment_amount = float(Decimal(account_record.currency_value)) - float(Decimal(account_record.remainder_balance))
if payment_amount <= 0:
return False
# Get process date information
process_date = arrow.get(account_record.bank_date)
process_date_y = process_date.date().year
process_date_m = process_date.date().month
period_time = f"{process_date_y}-{str(process_date_m).zfill(2)}"
# Check if a payment already exists for this account record
existing_payment = BuildDecisionBookPayments.query.filter_by(account_records_id=account_record.id, payment_types_id=receive_enum.id).first()
if existing_payment: # Skip if payment already exists
return False
# Create a new payment record directly
created_book_payment = BuildDecisionBookPayments.create(
payment_plan_time_periods=1, # Default value
payment_types_id=receive_enum.id, payment_types_uu_id=str(receive_enum.uu_id), payment_amount=payment_amount, process_date=str(process_date), process_date_y=process_date_y,
process_date_m=process_date_m, period_time=period_time, account_records_id=account_record.id, account_records_uu_id=str(account_record.uu_id),
build_parts_id=account_record.build_parts_id, build_parts_uu_id=str(account_record.build_parts_uu_id) if hasattr(account_record, 'build_parts_uu_id') and account_record.build_parts_uu_id else None,
decision_book_project_id=getattr(account_record, 'decision_book_project_id', None),
decision_book_project_uu_id=str(account_record.decision_book_project_uu_id) if hasattr(account_record, 'decision_book_project_uu_id') and account_record.decision_book_project_uu_id else None,
)
created_book_payment.save()
# Update the account record
remainder_balance = float(Decimal(account_record.remainder_balance)) + float(abs(payment_amount))
account_record.update(remainder_balance=remainder_balance)
account_record.save()
global counter_account_records_updated
counter_account_records_updated += 1
return True
except Exception as e:
print(f"Exception in create_direct_payment for account {account_record.id}: {e}")
global counter_payment_exceptions
counter_payment_exceptions += 1
return False
def send_accounts_to_decision_payment():
with ApiEnumDropdown.new_session() as session:
# Set the session for all models that will be used
ApiEnumDropdown.set_session(session)
AccountRecords.set_session(session)
BuildDecisionBookPayments.set_session(session)
try:
# Get required enum values
receive_enum = ApiEnumDropdown.query.filter_by(enum_class="DebitTypes", key="DT-R").first()
debit_enum = ApiEnumDropdown.query.filter_by(enum_class="DebitTypes", key="DT-D").first()
if not receive_enum or not debit_enum:
print("Error: Could not find required enum values")
return
# Check if there are any BuildDecisionBookPayments records at all
total_payments = BuildDecisionBookPayments.query.count()
print(f"\n--- DEBUG: Database Statistics ---")
print(f"Total BuildDecisionBookPayments records in database: {total_payments}")
# Check how many have payment_types_id = debit_enum.id
debit_payments = BuildDecisionBookPayments.query.filter_by(payment_types_id=debit_enum.id).count()
print(f"BuildDecisionBookPayments with payment_types_id={debit_enum.id} (DT-D): {debit_payments}")
# Check how many have account_records_id = None
null_account_payments = BuildDecisionBookPayments.query.filter(BuildDecisionBookPayments.account_records_id == None).count()
print(f"BuildDecisionBookPayments with account_records_id=None: {null_account_payments}")
# Check a sample payment record
sample_payment = BuildDecisionBookPayments.query.first()
if sample_payment:
print("\n--- Sample BuildDecisionBookPayment ---")
print(f"ID: {getattr(sample_payment, 'id', 'N/A')}")
print(f"payment_types_id: {getattr(sample_payment, 'payment_types_id', 'N/A')}")
print(f"build_parts_id: {getattr(sample_payment, 'build_parts_id', 'N/A')}")
print(f"account_records_id: {getattr(sample_payment, 'account_records_id', 'N/A')}")
print(f"process_date_y: {getattr(sample_payment, 'process_date_y', 'N/A')}")
print(f"process_date_m: {getattr(sample_payment, 'process_date_m', 'N/A')}")
else:
print("No BuildDecisionBookPayment records found in the database!")
# Check a sample account record
sample_account = AccountRecords.query.filter(AccountRecords.remainder_balance < AccountRecords.currency_value, AccountRecords.receive_debit == receive_enum.id).first()
if sample_account:
print("\n--- Sample AccountRecord ---")
print(f"ID: {getattr(sample_account, 'id', 'N/A')}")
print(f"build_parts_id: {getattr(sample_account, 'build_parts_id', 'N/A')}")
print(f"bank_date: {getattr(sample_account, 'bank_date', 'N/A')}")
# Try to find payments for this specific account record
if sample_account.bank_date:
process_date = arrow.get(sample_account.bank_date)
account_bank_date_year, account_bank_date_month = (process_date.date().year, process_date.date().month)
print("\n--- Checking for payments for sample account ---")
print(f"Looking for payments with build_parts_id={sample_account.build_parts_id}, payment_types_id={debit_enum.id}, account_records_id=None")
# Try without date filters first
basic_payments = BuildDecisionBookPayments.query.filter(
BuildDecisionBookPayments.build_parts_id == sample_account.build_parts_id, BuildDecisionBookPayments.payment_types_id == debit_enum.id,
BuildDecisionBookPayments.account_records_id == None
).count()
print(f"Found {basic_payments} payments without date filters")
# Now try with date filters
dated_payments = BuildDecisionBookPayments.query.filter(
BuildDecisionBookPayments.build_parts_id == sample_account.build_parts_id, BuildDecisionBookPayments.payment_types_id == debit_enum.id,
BuildDecisionBookPayments.account_records_id == None, BuildDecisionBookPayments.process_date_y == int(account_bank_date_year),
BuildDecisionBookPayments.process_date_m == int(account_bank_date_month)
).count()
print(f"Found {dated_payments} payments with date filters (year={account_bank_date_year}, month={account_bank_date_month})")
else:
print("No matching AccountRecord found for debugging!")
# Query for account records that need payment processing
# Note: We removed the approved_record condition as it was too restrictive
account_records_list = AccountRecords.query.filter(AccountRecords.remainder_balance < AccountRecords.currency_value, AccountRecords.receive_debit == receive_enum.id
).order_by(AccountRecords.bank_date.desc()).all()
print(f"\nProcessing {len(account_records_list)} account records")
# Track how many records were processed with each method
traditional_method_count = 0
direct_method_count = 0
for account_record in account_records_list:
# Try the traditional method first
current_currency_value = pay_the_registration(account_record, receive_enum, debit_enum, False, session)
if current_currency_value > 0:
# If first pass found payments, try with old records too
pay_the_registration(account_record, receive_enum, debit_enum, True, session)
traditional_method_count += 1
else:
# If traditional method didn't work, try direct payment for all records
# This will handle records with missing build_parts_id
if create_direct_payment(account_record, receive_enum, session):
direct_method_count += 1
if direct_method_count % 10 == 0: # Only print every 10th record to avoid too much output
print(f"Direct payment created for account_record {account_record.id}")
# Refresh the account record to get updated values
session.refresh(account_record)
# Update status if the remainder balance equals the currency value
if abs(float(Decimal(account_record.remainder_balance))) == abs(float(Decimal(account_record.currency_value))):
account_record.update(status_id=97)
account_record.save()
print(f"\nProcessed with traditional method: {traditional_method_count} records")
print(f"Processed with direct payment method: {direct_method_count} records")
print("Payment processing completed successfully")
except Exception as e:
print(f"Error in send_accounts_to_decision_payment: {e}")
import traceback
traceback.print_exc()
# Rollback the session in case of error
session.rollback()
return
if __name__ == "__main__":
print("Payment Service is running...")
try:
send_accounts_to_decision_payment()
# Print counter statistics
print("\n--- Processing Statistics ---")
print(f"Records where current_currency_value <= 0: {counter_current_currency_not_positive}")
print(f"Records where net_amount <= 0: {counter_net_amount_not_positive}")
print(f"Account records updated: {counter_account_records_updated}")
print(f"Total payments found: {counter_payments_found}")
print(f"Skips due to found_payment: {counter_found_payment_skips}")
print(f"Payment exceptions: {counter_payment_exceptions}")
except Exception as e:
print(f"Error: {e}")
print("Payment Service is finished...")

View File

@ -0,0 +1,26 @@
#!/bin/bash
# Source the environment file directly
. /env.sh
# Re-export all variables to ensure they're available to the Python script
export POSTGRES_USER
export POSTGRES_PASSWORD
export POSTGRES_DB
export POSTGRES_HOST
export POSTGRES_PORT
export POSTGRES_ENGINE
export POSTGRES_POOL_PRE_PING
export POSTGRES_POOL_SIZE
export POSTGRES_MAX_OVERFLOW
export POSTGRES_POOL_RECYCLE
export POSTGRES_POOL_TIMEOUT
export POSTGRES_ECHO
# Python environment variables
export PYTHONPATH
export PYTHONUNBUFFERED
export PYTHONDONTWRITEBYTECODE
# env >> /var/log/cron.log
/usr/local/bin/python /runner.py

View File

@ -0,0 +1,854 @@
import arrow
import calendar
import time
from decimal import Decimal
from datetime import datetime, timedelta
from Schemas import BuildDecisionBookPayments, AccountRecords, ApiEnumDropdown, Build, BuildDecisionBook
from time import perf_counter
from sqlalchemy import select, func, distinct, cast, Date, String, literal, desc, and_, or_
from Controllers.Postgres.engine import get_session_factory
#from ServicesApi.Schemas.account.account import AccountRecords
#from ServicesApi.Schemas.building.decision_book import BuildDecisionBookPayments
def find_last_day_of_month(date_value):
today = date_value.date()
_, last_day = calendar.monthrange(today.year, today.month)
return datetime(today.year, today.month, last_day, 23, 59, 59)
def find_first_day_of_month(date_value):
today = date_value.date()
return datetime(today.year, today.month, 1)
class BuildDuesTypes:
def __init__(self):
self.debit: ApiEnumDropdownShallowCopy = None
self.add_debit: ApiEnumDropdownShallowCopy = None
self.renovation: ApiEnumDropdownShallowCopy = None
self.lawyer_expence: ApiEnumDropdownShallowCopy = None
self.service_fee: ApiEnumDropdownShallowCopy = None
self.information: ApiEnumDropdownShallowCopy = None
class ApiEnumDropdownShallowCopy:
id: int
uuid: str
enum_class: str
key: str
value: str
def __init__(self, id: int, uuid: str, enum_class: str, key: str, value: str):
self.id = id
self.uuid = uuid
self.enum_class = enum_class
self.key = key
self.value = value
def get_enums_from_database():
build_dues_types = BuildDuesTypes()
with ApiEnumDropdown.new_session() as session:
ApiEnumDropdown.set_session(session)
debit_enum_shallow = ApiEnumDropdown.query.filter_by(enum_class="BuildDuesTypes", key="BDT-D").first() # Debit
add_debit_enum_shallow = ApiEnumDropdown.query.filter_by(enum_class="BuildDuesTypes", key="BDT-A").first() # Add Debit
renovation_enum_shallow = ApiEnumDropdown.query.filter_by(enum_class="BuildDuesTypes", key="BDT-R").first() # Renovation
late_payment_enum_shallow = ApiEnumDropdown.query.filter_by(enum_class="BuildDuesTypes", key="BDT-L").first() # Lawyer expence
service_fee_enum_shallow = ApiEnumDropdown.query.filter_by(enum_class="BuildDuesTypes", key="BDT-S").first() # Service fee
information_enum_shallow = ApiEnumDropdown.query.filter_by(enum_class="BuildDuesTypes", key="BDT-I").first() # Information
build_dues_types.debit = ApiEnumDropdownShallowCopy(
debit_enum_shallow.id, str(debit_enum_shallow.uu_id), debit_enum_shallow.enum_class, debit_enum_shallow.key, debit_enum_shallow.value
)
build_dues_types.add_debit = ApiEnumDropdownShallowCopy(
add_debit_enum_shallow.id, str(add_debit_enum_shallow.uu_id), add_debit_enum_shallow.enum_class, add_debit_enum_shallow.key, add_debit_enum_shallow.value
)
build_dues_types.renovation = ApiEnumDropdownShallowCopy(
renovation_enum_shallow.id, str(renovation_enum_shallow.uu_id), renovation_enum_shallow.enum_class, renovation_enum_shallow.key, renovation_enum_shallow.value
)
build_dues_types.lawyer_expence = ApiEnumDropdownShallowCopy(
late_payment_enum_shallow.id, str(late_payment_enum_shallow.uu_id), late_payment_enum_shallow.enum_class, late_payment_enum_shallow.key, late_payment_enum_shallow.value
)
build_dues_types.service_fee = ApiEnumDropdownShallowCopy(
service_fee_enum_shallow.id, str(service_fee_enum_shallow.uu_id), service_fee_enum_shallow.enum_class, service_fee_enum_shallow.key, service_fee_enum_shallow.value
)
build_dues_types.information = ApiEnumDropdownShallowCopy(
information_enum_shallow.id, str(information_enum_shallow.uu_id), information_enum_shallow.enum_class, information_enum_shallow.key, information_enum_shallow.value
)
return [build_dues_types.debit, build_dues_types.lawyer_expence, build_dues_types.add_debit, build_dues_types.renovation, build_dues_types.service_fee, build_dues_types.information]
def generate_total_paid_amount_for_spesific_build_part_id(build_parts_id: int, session):
"""
Calculate the total amount paid for a specific build part ID.
Args:
build_parts_id: The build part ID to calculate payments for
session: Database session
Returns:
float: The total amount paid (absolute value)
"""
payment_query = session.query(func.sum(BuildDecisionBookPayments.payment_amount)).filter(
BuildDecisionBookPayments.build_parts_id == build_parts_id,
BuildDecisionBookPayments.account_is_debit == False,
cast(BuildDecisionBookPayments.process_date, Date) >= '2022-01-01'
).scalar()
return payment_query if payment_query is not None else 0
def generate_total_debt_amount_for_spesific_build_part_id(build_parts_id: int, session):
# Use SQLAlchemy's func.sum to calculate the total debts
# For total debt, we want to include ALL debts, both processed and unprocessed
result = session.query(
func.sum(BuildDecisionBookPayments.payment_amount)
).filter(
BuildDecisionBookPayments.build_parts_id == build_parts_id,
BuildDecisionBookPayments.account_is_debit == True,
cast(BuildDecisionBookPayments.process_date, Date) >= '2022-01-01'
).scalar()
# Return 0 if no debts found, otherwise return the absolute value of the sum
return abs(result) if result is not None else 0
def generate_total_amount_that_user_has_in_account(account_record: AccountRecords, session):
# Get total amount that user has in account
result = session.query(
func.sum(AccountRecords.currency_value)
).filter(
AccountRecords.build_parts_id == account_record.build_parts_id,
AccountRecords.currency_value > 0,
cast(AccountRecords.bank_date, Date) >= '2022-01-01'
).scalar()
# Return 0 if no payments found, otherwise return the absolute value of the sum
return abs(result)
def _print_debt_details(debt, session):
"""Helper function to print detailed information about an unpaid debt.
Args:
debt: The BuildDecisionBookPayments object representing the debt
session: Database session
"""
# Get the sum of payments for this debt
payments_sum = session.query(
func.sum(BuildDecisionBookPayments.payment_amount)
).filter(
BuildDecisionBookPayments.ref_id == debt.ref_id,
BuildDecisionBookPayments.account_is_debit == False
).scalar() or 0
# Calculate remaining amount
debit_amount = abs(debt.payment_amount)
remaining = debit_amount - abs(payments_sum)
payment_percentage = (abs(payments_sum) / debit_amount) * 100 if debit_amount > 0 else 0
# Format the date for display
date_str = debt.process_date.strftime('%Y-%m-%d') if debt.process_date else 'Unknown date'
def analyze_payment_function():
session_factory = get_session_factory()
session = session_factory()
# Set session for all models
AccountRecords.set_session(session)
BuildDecisionBookPayments.set_session(session)
order_pay = get_enums_from_database()
# Get distinct build_parts_id values from account records with positive currency_value
# This avoids redundant processing of the same build_parts_id
distinct_build_parts = session.query(
distinct(AccountRecords.build_parts_id)
).filter(
AccountRecords.build_parts_id.isnot(None),
AccountRecords.currency_value > 0,
AccountRecords.bank_date >= '2022-01-01'
).order_by(AccountRecords.build_parts_id.desc()).all()
start_time = time.time()
for build_part_id_tuple in distinct_build_parts:
build_part_id = build_part_id_tuple[0] # Extract the ID from the tuple
process_date = datetime.now()
last_date_of_process_date = datetime(process_date.year, process_date.month, 1) - timedelta(days=1)
first_date_of_process_date = datetime(process_date.year, process_date.month, 1)
print(f"\n{'=' * 50}")
print(f"ACCOUNT ANALYSIS FOR BUILD PART ID: {build_part_id}")
print(f"{'=' * 50}")
# Calculate total paid amount for this build_part_id
total_amount_paid = generate_total_paid_amount_for_spesific_build_part_id(build_part_id, session)
# Calculate total debt amount for this build_part_id
total_debt_amount = generate_total_debt_amount_for_spesific_build_part_id(build_part_id, session)
# Get total amount in account for this build_part_id
account_record = AccountRecords()
account_record.build_parts_id = build_part_id
total_amount_in_account = generate_total_amount_that_user_has_in_account(account_record, session)
# Calculate remaining amount to be paid
amount_need_to_paid = total_debt_amount - total_amount_paid
total_amount_that_user_need_to_transfer = abs(amount_need_to_paid) - abs(total_amount_in_account)
# Print summary with clear descriptions
print(f"PAYMENT SUMMARY:")
print(f" • Total debt amount: {total_debt_amount:,.2f} TL")
print(f" • Amount already paid: {total_amount_paid:,.2f} TL")
print(f" • Remaining debt to be collected: {amount_need_to_paid:,.2f} TL")
print(f" • Current account balance: {total_amount_in_account:,.2f} TL")
if total_amount_that_user_need_to_transfer > 0:
print(f" • Additional funds needed: {total_amount_that_user_need_to_transfer:,.2f} TL")
elif amount_need_to_paid <= 0:
print(f" • Account is fully paid with no outstanding debt")
else:
print(f" • Sufficient funds available to close all debt")
# Show debt coverage percentage
if total_debt_amount > 0:
# Calculate current coverage (already paid)
current_coverage_percentage = (total_amount_paid / total_debt_amount) * 100
# Calculate potential coverage (including available funds)
potential_coverage = min(100, ((total_amount_paid + total_amount_in_account) / total_debt_amount) * 100)
# Display both percentages
print(f" • Current debt coverage: {current_coverage_percentage:.2f}%")
print(f" • Potential debt coverage with available funds: {potential_coverage:.2f}%")
# Analyze unpaid debts for each payment type
print("\nUNPAID DEBTS ANALYSIS BY PAYMENT TYPE:")
for payment_type in order_pay:
# Get unpaid debts for current month
date_query_current = (
BuildDecisionBookPayments.process_date >= first_date_of_process_date,
BuildDecisionBookPayments.process_date <= process_date
)
date_query_previous = (
BuildDecisionBookPayments.process_date < first_date_of_process_date,
)
current_unpaid_debts = get_unpaid_debts(build_parts_id=build_part_id, session=session, debit_type=payment_type, date_query=date_query_current)
# Get unpaid debts from previous months
previous_unpaid_debts = get_unpaid_debts(build_parts_id=build_part_id, session=session, debit_type=payment_type, date_query=date_query_previous)
# Calculate totals
current_total = sum(abs(debt[2]) for debt in current_unpaid_debts)
previous_total = sum(abs(debt[2]) for debt in previous_unpaid_debts)
grand_total = current_total + previous_total
# Print summary for this payment type
if current_unpaid_debts or previous_unpaid_debts:
print(f"{payment_type.key}: Total unpaid: {grand_total:,.2f} TL")
# Current month details
if current_unpaid_debts:
print(f" - Current month: {len(current_unpaid_debts)} debts, {current_total:,.2f} TL")
# Show details of each unpaid debt if there aren't too many
# if len(current_unpaid_debts) <= 3:
# for debt in current_unpaid_debts:
# _print_debt_details(debt, session)
# Previous months details
if previous_unpaid_debts:
print(f" - Previous months: {len(previous_unpaid_debts)} debts, {previous_total:,.2f} TL")
# Show details of each unpaid debt if there aren't too many
# if len(previous_unpaid_debts) <= 3:
# for debt in previous_unpaid_debts:
# _print_debt_details(debt, session)
else:
print(f"{payment_type.key}: All debts paid")
print(f"{'=' * 50}\n")
def close_payment_book(payment_row_book, account_record, value, session):
"""Create a credit entry in BuildDecisionBookPayments to close a debt.
Args:
payment_row_book: The debit entry to be paid
account_record: The account record containing the funds
value: The amount to pay
session: Database session
Returns:
The newly created payment record
"""
BuildDecisionBookPayments.set_session(session)
# Create a new credit entry (payment)
new_row = BuildDecisionBookPayments.create(
ref_id=str(payment_row_book.uu_id),
payment_plan_time_periods=payment_row_book.payment_plan_time_periods,
period_time=payment_row_book.period_time,
currency=payment_row_book.currency,
account_records_id=account_record.id,
account_records_uu_id=str(account_record.uu_id),
build_parts_id=payment_row_book.build_parts_id,
build_parts_uu_id=str(payment_row_book.build_parts_uu_id),
payment_amount=abs(value), # Negative for credit entries
payment_types_id=payment_row_book.payment_types_id,
payment_types_uu_id=str(payment_row_book.payment_types_uu_id),
process_date_m=payment_row_book.process_date.month,
process_date_y=payment_row_book.process_date.year,
process_date=payment_row_book.process_date,
build_decision_book_item_id=payment_row_book.build_decision_book_item_id if payment_row_book.build_decision_book_item_id else None,
build_decision_book_item_uu_id=str(payment_row_book.build_decision_book_item_uu_id) if payment_row_book.build_decision_book_item_uu_id else None,
decision_book_project_id=payment_row_book.decision_book_project_id if payment_row_book.decision_book_project_id else None,
decision_book_project_uu_id=str(payment_row_book.decision_book_project_uu_id) if payment_row_book.decision_book_project_uu_id else None,
build_decision_book_id=payment_row_book.build_decision_book_id if payment_row_book.build_decision_book_id else None,
build_decision_book_uu_id=str(payment_row_book.build_decision_book_uu_id) if payment_row_book.build_decision_book_uu_id else None,
is_confirmed=True,
account_is_debit=False,
)
# Save the new payment record
saved_row = new_row.save()
session.commit()
session.refresh(saved_row)
session.flush()
return saved_row
def update_account_remainder_if_spent(account_record, ref_id: str, session):
"""Update the remainder_balance of an account after spending money.
Args:
account_record: The account record to update
amount_spent: The amount spent in this transaction
session: Database session
Returns:
bool: True if all money is spent, False otherwise
"""
AccountRecords.set_session(session)
BuildDecisionBookPayments.set_session(session)
session.commit()
sum_of_paid = session.query(func.sum(func.abs(BuildDecisionBookPayments.payment_amount))).filter(
BuildDecisionBookPayments.account_records_id == account_record.id,
BuildDecisionBookPayments.account_is_debit == False
).scalar()
if not sum_of_paid:
return False
debit_row = BuildDecisionBookPayments.query.filter_by(ref_id=ref_id).first()
account_record_to_update = AccountRecords.query.filter_by(id=account_record.id).first()
account_record_to_update.remainder_balance = (-1 * abs(sum_of_paid)) or 0
account_record_to_update.save()
session.commit()
session.refresh(account_record_to_update)
# Get the current remainder balance
if abs(sum_of_paid) == abs(account_record_to_update.currency_value):
return True
return False
def update_all_spent_accounts(session):
"""Update remainder_balance for all accounts with payments.
This function finds account records in BuildDecisionBookPayments and updates
their remainder_balance based on the sum of payments made, regardless of whether
all funds have been spent or not.
Args:
session: Database session
"""
with AccountRecords.new_session() as session:
# Set sessions for models
AccountRecords.set_session(session)
BuildDecisionBookPayments.set_session(session)
# Get distinct account_records_id values from BuildDecisionBookPayments
distinct_account_ids = session.query(BuildDecisionBookPayments.account_records_id).filter(
BuildDecisionBookPayments.account_records_id.isnot(None),
BuildDecisionBookPayments.account_is_debit == False # Credit entries (payments)
).distinct().all()
updated_count = 0
for account_id_tuple in distinct_account_ids:
account_id = account_id_tuple[0]
# Get the account record
account = AccountRecords.query.filter_by(id=account_id).first()
if not account or not account.build_parts_id or account.currency_value <= 0:
continue
# Calculate the sum of payments made using this account
# Note: payment_amount is negative for credit entries, so we need to use abs() to get the positive amount
payment_query = session.query(func.sum(func.abs(BuildDecisionBookPayments.payment_amount))).filter(
BuildDecisionBookPayments.account_records_id == account_id,
BuildDecisionBookPayments.account_is_debit == False # Credit entries (payments)
)
payment_sum = payment_query.scalar() or 0
# Update remainder_balance for ALL accounts, regardless of payment_sum value
threshold = Decimal('0.01')
fully_spent = abs(payment_sum) >= abs(account.currency_value) - threshold
status = "All funds spent" if fully_spent else "Partial payment"
# Store the positive value in remainder_balance
account.remainder_balance = -1 * abs(payment_sum)
account.save()
updated_count += 1
session.commit()
print(f"\nTotal accounts updated: {updated_count}")
# def find_amount_to_pay_by_ref_id(ref_id, session):
# """Calculate the remaining amount to pay for a specific debt reference ID.
# Args:
# ref_id: The reference ID of the debt (this is the uu_id of the debt record)
# session: Database session
# Returns:
# float: The remaining amount to pay
# """
# # Get the original debt amount - the debt is identified by its uu_id which is passed as ref_id
# debit = BuildDecisionBookPayments.query.filter(
# BuildDecisionBookPayments.uu_id == ref_id,
# BuildDecisionBookPayments.account_is_debit == True
# ).first()
# if not debit:
# return 0 # No debit found, nothing to pay
# debit_amount = abs(debit.payment_amount) # Ensure positive value for debit amount
# # Get the sum of payments already made for this debt
# # The ref_id in credit records points to the uu_id of the original debit
# # Note: payment_amount is negative for credit entries, so we use abs() to get positive values
# credit_amount = session.query(
# func.sum(func.abs(BuildDecisionBookPayments.payment_amount))
# ).filter(
# BuildDecisionBookPayments.ref_id == str(ref_id),
# BuildDecisionBookPayments.account_is_debit == False
# ).scalar() or 0
# # Calculate remaining amount to pay
# remaining = abs(debit_amount) - abs(credit_amount)
# # Ensure we don't return negative values
# if remaining < 0:
# return 0
# return remaining
def get_unpaid_debts_via_build_parts(build_parts_id: int, session, debit_type, date_query: tuple):
"""Find BuildDecisionBookPayments entries where the debt has NOT been fully paid for a specific debit type.
This function identifies payments where the sum of payments is less than the debit amount,
meaning the debt has not been fully closed.
Args:
build_parts_id: The build part ID to check
session: Database session
debit_type: The specific debit type to check
date_query: Tuple of date filters to apply
Returns:
list: List of unpaid debt entries (full BuildDecisionBookPayments objects)
query:
SELECT
bpf.ref_id,
bpf.process_date,
ABS(COALESCE(SUM(bpf.payment_amount), 0)) AS total_payments
FROM public.build_decision_book_payments AS bpf
GROUP BY
bpf.ref_id,
bpf.process_date
HAVING ABS(COALESCE(SUM(bpf.payment_amount), 0)) > 0
order by bpf.process_date
"""
# Create a subquery for the payment sums without executing it separately
payment_sums_subquery = select(
BuildDecisionBookPayments.ref_id,
BuildDecisionBookPayments.process_date,
func.abs(func.coalesce(func.sum(BuildDecisionBookPayments.payment_amount), 0)).label("total_payments")
).filter(
BuildDecisionBookPayments.build_parts_id == build_parts_id,
BuildDecisionBookPayments.payment_types_id == debit_type.id,
*date_query
).group_by(
BuildDecisionBookPayments.ref_id, BuildDecisionBookPayments.process_date
).having(
func.abs(func.coalesce(func.sum(BuildDecisionBookPayments.payment_amount), 0)) > 0
).order_by(BuildDecisionBookPayments.process_date.desc())
# Use the subquery directly in the main query
payment_sums = session.execute(payment_sums_subquery).all()
payment_sums_list = []
for item in payment_sums:
payment_sums_list.append({"ref_id": item[0], "process_date": item[1], "total_payments": item[2]})
return payment_sums
def get_unpaid_debts(session, debit_type, date_query: tuple):
"""Find BuildDecisionBookPayments entries where the debt has NOT been fully paid for a specific debit type.
This function identifies payments where the sum of payments is less than the debit amount,
meaning the debt has not been fully closed.
Args:
build_parts_id: The build part ID to check
session: Database session
debit_type: The specific debit type to check
date_query: Tuple of date filters to apply
Returns:
list: List of unpaid debt entries (full BuildDecisionBookPayments objects)
query:
SELECT
bpf.ref_id,
bpf.process_date,
ABS(COALESCE(SUM(bpf.payment_amount), 0)) AS total_payments
FROM public.build_decision_book_payments AS bpf
GROUP BY
bpf.ref_id,
bpf.process_date
HAVING ABS(COALESCE(SUM(bpf.payment_amount), 0)) > 0
order by bpf.process_date
"""
# Create a subquery for the payment sums without executing it separately
do_payments_set = lambda ref, date, total: {"ref_id": ref, "process_date": date, "total_payments": total}
payment_sums_subquery = select(BuildDecisionBookPayments.ref_id, BuildDecisionBookPayments.process_date,
func.abs(func.coalesce(func.sum(BuildDecisionBookPayments.payment_amount), 0)).label("total_payments")
).filter(BuildDecisionBookPayments.payment_types_id == debit_type.id, *date_query
).group_by(BuildDecisionBookPayments.ref_id, BuildDecisionBookPayments.process_date
).having(func.abs(func.coalesce(func.sum(BuildDecisionBookPayments.payment_amount), 0)) > 0
).order_by(BuildDecisionBookPayments.process_date.asc())
payment_sums = session.execute(payment_sums_subquery).all()
return [do_payments_set(item[0], item[1], item[2]) for item in payment_sums]
def do_payments_of_this_month(build_id: int = 1):
"""Process payments for the current month's unpaid debts.
This function retrieves account records with available funds and processes
payments for current month's unpaid debts in order of payment type priority.
"""
session_factory = get_session_factory()
session = session_factory()
# Set session for all models
AccountRecords.set_session(session)
BuildDecisionBookPayments.set_session(session)
Build.set_session(session)
BuildDecisionBook.set_session(session)
# Get payment types in priority order
payment_type_list = get_enums_from_database()
fund_finished = lambda money_spend, money_in_account: money_spend == money_in_account
payments_made, total_amount_paid, paid_count = 0, 0, 0
target_build = Build.query.filter(Build.id == build_id).first()
if not target_build:
raise ValueError(f"Build with id {build_id} not found")
now = datetime.now()
decision_book = BuildDecisionBook.query.filter(
BuildDecisionBook.build_id == build_id, cast(BuildDecisionBook.expiry_starts, Date) <= now.date(),
cast(BuildDecisionBook.expiry_ends, Date) >= now.date(), BuildDecisionBook.decision_type == "RBM"
).first()
if not decision_book:
raise ValueError(f"Decision book not found for build with id {build_id}")
period_date_start = decision_book.expiry_starts
period_date_end = decision_book.expiry_ends
period_id = decision_book.id
first_date_of_process_date = find_first_day_of_month(now)
last_date_of_process_date = find_last_day_of_month(now)
# Current month date filter
date_query_tuple = (
cast(BuildDecisionBookPayments.process_date, Date) >= first_date_of_process_date.date(), cast(BuildDecisionBookPayments.process_date, Date) <= last_date_of_process_date.date()
)
update_all_spent_accounts(session)
for payment_type in payment_type_list:
unpaid_debts = get_unpaid_debts(session, payment_type, date_query_tuple)
for unpaid_debt in unpaid_debts:
amount_to_pay = unpaid_debt["total_payments"]
ref_id = unpaid_debt["ref_id"]
process_date = unpaid_debt["process_date"]
debit_row = BuildDecisionBookPayments.query.filter(BuildDecisionBookPayments.uu_id == ref_id).first()
build_parts_id = debit_row.build_parts_id
money_to_pay_rows = AccountRecords.query.filter(
AccountRecords.build_parts_id == build_parts_id, AccountRecords.currency_value > 0, AccountRecords.currency_value > func.abs(AccountRecords.remainder_balance),
cast(AccountRecords.bank_date, Date) >= first_date_of_process_date.date(), cast(AccountRecords.bank_date, Date) <= last_date_of_process_date.date(),
).order_by(AccountRecords.bank_date.asc()).all()
for money_to_pay_row in money_to_pay_rows:
update_account_remainder_if_spent(account_record=money_to_pay_row, ref_id=debit_row.ref_id, session=session)
money_to_pay_row = AccountRecords.query.filter(AccountRecords.id == money_to_pay_row.id).first()
available_money = abs(money_to_pay_row.currency_value) - abs(money_to_pay_row.remainder_balance)
if available_money > amount_to_pay:
print('NOT All money is spent amount_to_pay:', amount_to_pay, 'debit_row payment_amount:', debit_row.payment_amount)
close_payment_book(debit_row, money_to_pay_row, amount_to_pay, session)
total_amount_paid += amount_to_pay
paid_count += 1
payments_made += 1
update_account_remainder_if_spent(account_record=money_to_pay_row, ref_id=debit_row.ref_id, session=session)
break
elif available_money <= amount_to_pay:
print('All money is spent amount_to_pay:', amount_to_pay, 'debit_row payment_amount:', debit_row.payment_amount)
close_payment_book(debit_row, money_to_pay_row, available_money, session)
total_amount_paid += available_money
paid_count += 1
payments_made += 1
update_account_remainder_if_spent(account_record=money_to_pay_row, ref_id=debit_row.ref_id, session=session)
continue
else:
print(f"Something else happened available_money: {available_money}, amount_to_pay: {amount_to_pay}")
update_all_spent_accounts(session)
print('payments_made', payments_made)
print('total_amount_paid', total_amount_paid)
print('paid_count', paid_count)
def do_payments_of_previos_months(build_id: int = 1):
"""Process payments for previous months' unpaid debts.
This function retrieves account records with available funds and processes
payments for previous months' unpaid debts in order of payment type priority.
"""
"""Process payments for the current month's unpaid debts.
This function retrieves account records with available funds and processes
payments for current month's unpaid debts in order of payment type priority.
"""
session_factory = get_session_factory()
session = session_factory()
# Set session for all models
AccountRecords.set_session(session)
BuildDecisionBookPayments.set_session(session)
Build.set_session(session)
BuildDecisionBook.set_session(session)
# Get payment types in priority order
payment_type_list = get_enums_from_database()
fund_finished = lambda money_spend, money_in_account: money_spend == money_in_account
payments_made, total_amount_paid, paid_count = 0, 0, 0
target_build = Build.query.filter(Build.id == build_id).first()
if not target_build:
raise ValueError(f"Build with id {build_id} not found")
now = datetime.now()
decision_book = BuildDecisionBook.query.filter(
BuildDecisionBook.build_id == build_id,
cast(BuildDecisionBook.expiry_starts, Date) <= now.date(),
cast(BuildDecisionBook.expiry_ends, Date) >= now.date(),
BuildDecisionBook.decision_type == "RBM"
).first()
if not decision_book:
raise ValueError(f"Decision book not found for build with id {build_id}")
early_date = datetime(now.year - 1, now.month, now.day)
early_decision_book = BuildDecisionBook.query.filter(
BuildDecisionBook.build_id == build_id,
cast(BuildDecisionBook.expiry_starts, Date) <= early_date.date(),
cast(BuildDecisionBook.expiry_ends, Date) >= early_date.date(),
BuildDecisionBook.decision_type == "RBM"
).first()
period_date_start = decision_book.expiry_starts
period_date_end = decision_book.expiry_ends
period_id = decision_book.id
early_period_date_start = early_decision_book.expiry_starts
early_period_date_end = early_decision_book.expiry_ends
early_period_id = early_decision_book.id
first_date_of_process_date = arrow.get(period_date_start).datetime
last_date_of_process_date = now
# Current month date filter
date_query_tuple = (
cast(BuildDecisionBookPayments.process_date, Date) >= first_date_of_process_date.date(),
cast(BuildDecisionBookPayments.process_date, Date) <= last_date_of_process_date.date()
)
update_all_spent_accounts(session)
for payment_type in payment_type_list:
unpaid_debts = get_unpaid_debts(session, payment_type, date_query_tuple)
print('length unpaid debts: ', len(unpaid_debts))
for unpaid_debt in unpaid_debts:
amount_to_pay = unpaid_debt["total_payments"]
ref_id = unpaid_debt["ref_id"]
process_date = unpaid_debt["process_date"]
debit_row = BuildDecisionBookPayments.query.filter(BuildDecisionBookPayments.uu_id == ref_id).first()
build_parts_id = debit_row.build_parts_id
money_to_pay_rows = AccountRecords.query.filter(
AccountRecords.build_parts_id == build_parts_id, AccountRecords.currency_value > 0, AccountRecords.currency_value > func.abs(AccountRecords.remainder_balance),
cast(AccountRecords.bank_date, Date) >= find_first_day_of_month(process_date).date(), cast(AccountRecords.bank_date, Date) <= find_last_day_of_month(process_date).date(),
).order_by(AccountRecords.bank_date.asc()).all()
if not money_to_pay_rows:
money_to_pay_rows = AccountRecords.query.filter(
AccountRecords.build_parts_id == build_parts_id, AccountRecords.currency_value > 0, AccountRecords.currency_value > func.abs(AccountRecords.remainder_balance),
cast(AccountRecords.bank_date, Date) >= first_date_of_process_date.date(), cast(AccountRecords.bank_date, Date) <= last_date_of_process_date.date(),
).order_by(AccountRecords.bank_date.asc()).all()
for money_to_pay_row in money_to_pay_rows:
update_account_remainder_if_spent(account_record=money_to_pay_row, ref_id=debit_row.ref_id, session=session)
money_to_pay_row = AccountRecords.query.filter(AccountRecords.id == money_to_pay_row.id).first()
available_money = abs(money_to_pay_row.currency_value) - abs(money_to_pay_row.remainder_balance)
if available_money > amount_to_pay:
print('NOT All money is spent amount_to_pay:', amount_to_pay, 'debit_row payment_amount:', debit_row.payment_amount)
close_payment_book(debit_row, money_to_pay_row, amount_to_pay, session)
total_amount_paid += amount_to_pay
paid_count += 1
payments_made += 1
update_account_remainder_if_spent(account_record=money_to_pay_row, ref_id=debit_row.ref_id, session=session)
break
elif available_money <= amount_to_pay:
print('All money is spent amount_to_pay:', available_money, 'debit_row payment_amount:', debit_row.payment_amount)
close_payment_book(debit_row, money_to_pay_row, available_money, session)
total_amount_paid += available_money
paid_count += 1
payments_made += 1
update_account_remainder_if_spent(account_record=money_to_pay_row, ref_id=debit_row.ref_id, session=session)
continue
else:
print(f"Something else happened available_money: {available_money}, amount_to_pay: {amount_to_pay}")
print('This years decision book payments')
print('payments_made', payments_made)
print('total_amount_paid', total_amount_paid)
print('paid_count', paid_count)
payments_made, total_amount_paid, paid_count = 0, 0, 0
update_all_spent_accounts(session)
first_date_of_process_date = arrow.get(early_period_date_start).datetime
last_date_of_process_date = arrow.get(early_period_date_end).datetime
# Early month date filter
date_query_tuple = (
cast(BuildDecisionBookPayments.process_date, Date) >= first_date_of_process_date.date(), cast(BuildDecisionBookPayments.process_date, Date) <= last_date_of_process_date.date()
)
for payment_type in payment_type_list:
unpaid_debts = get_unpaid_debts(session, payment_type, date_query_tuple)
print('length unpaid debts: ', len(unpaid_debts))
for unpaid_debt in unpaid_debts:
amount_to_pay = unpaid_debt["total_payments"]
ref_id = unpaid_debt["ref_id"]
process_date = unpaid_debt["process_date"]
debit_row = BuildDecisionBookPayments.query.filter(BuildDecisionBookPayments.uu_id == ref_id).first()
build_parts_id = debit_row.build_parts_id
first_date_of_process_date = find_first_day_of_month(process_date)
last_date_of_process_date = find_last_day_of_month(process_date)
money_to_pay_rows = AccountRecords.query.filter(
AccountRecords.build_parts_id == build_parts_id, AccountRecords.currency_value > 0, AccountRecords.currency_value > func.abs(AccountRecords.remainder_balance),
cast(AccountRecords.bank_date, Date) >= first_date_of_process_date.date(), cast(AccountRecords.bank_date, Date) <= last_date_of_process_date.date(),
).order_by(AccountRecords.bank_date.asc()).all()
if not money_to_pay_rows:
money_to_pay_rows = AccountRecords.query.filter(
AccountRecords.build_parts_id == build_parts_id, AccountRecords.currency_value > 0, AccountRecords.currency_value > func.abs(AccountRecords.remainder_balance),
cast(AccountRecords.bank_date, Date) >= first_date_of_process_date.date(), cast(AccountRecords.bank_date, Date) <= last_date_of_process_date.date(),
).order_by(AccountRecords.bank_date.asc()).all()
for money_to_pay_row in money_to_pay_rows:
update_account_remainder_if_spent(account_record=money_to_pay_row, ref_id=debit_row.ref_id, session=session)
money_to_pay_row = AccountRecords.query.filter(AccountRecords.id == money_to_pay_row.id).first()
available_money = abs(money_to_pay_row.currency_value) - abs(money_to_pay_row.remainder_balance)
if available_money > amount_to_pay:
print('NOT All money is spent amount_to_pay:', amount_to_pay, 'debit_row payment_amount:', debit_row.payment_amount)
close_payment_book(debit_row, money_to_pay_row, amount_to_pay, session)
total_amount_paid += amount_to_pay
paid_count += 1
payments_made += 1
update_account_remainder_if_spent(account_record=money_to_pay_row, ref_id=debit_row.ref_id, session=session)
break
elif available_money <= amount_to_pay:
print('All money is spent amount_to_pay:', available_money, 'debit_row payment_amount:', debit_row.payment_amount)
close_payment_book(debit_row, money_to_pay_row, available_money, session)
total_amount_paid += available_money
paid_count += 1
payments_made += 1
update_account_remainder_if_spent(account_record=money_to_pay_row, ref_id=debit_row.ref_id, session=session)
continue
else:
print(f"Something else happened available_money: {available_money}, amount_to_pay: {amount_to_pay}")
update_all_spent_accounts(session)
print('Early years decision book payments')
print('payments_made', payments_made)
print('total_amount_paid', total_amount_paid)
print('paid_count', paid_count)
if __name__ == "__main__":
start_time = perf_counter()
print("\n===== PROCESSING PAYMENTS =====\n")
print("Starting payment processing at:", datetime.now())
# Process payments for current month first
print("\n1. Processing current month payments...")
do_payments_of_this_month()
# Process payments for previous months
print("\n2. Processing previous months payments...")
do_payments_of_previos_months()
print("\n===== PAYMENT PROCESSING COMPLETE =====\n")
print("Payment processing completed at:", datetime.now())
# Analyze the payment situation after processing payments
print("\n===== ANALYZING PAYMENT SITUATION AFTER PROCESSING =====\n")
# analyze_payment_function()
end_time = perf_counter()
print(f"\n{end_time - start_time:.3f} : seconds")
# # Create a subquery to get the sum of payments for each debit's uu_id
# # For credit entries, ref_id points to the original debit's uu_id
# payment_sums = session.query(
# BuildDecisionBookPayments.ref_id.label('original_debt_id'),
# func.sum(func.abs(BuildDecisionBookPayments.payment_amount)).label('payment_sum')
# ).filter(
# BuildDecisionBookPayments.account_is_debit == False # Credit entries only
# ).group_by(BuildDecisionBookPayments.ref_id).subquery()
# # Main query to find debits with their payment sums
# query = session.query(BuildDecisionBookPayments)
# # Join with payment sums - cast uu_id to string to match ref_id type
# query = query.outerjoin(
# payment_sums,
# func.cast(BuildDecisionBookPayments.uu_id, String) == payment_sums.c.original_debt_id
# )
# # Filter for debits of the specified build part and payment type
# query = query.filter(
# BuildDecisionBookPayments.build_parts_id == build_parts_id,
# BuildDecisionBookPayments.payment_types_id == debit_type.id,
# BuildDecisionBookPayments.account_is_debit == True, # Debit entries only
# )
# # Apply date filters if provided
# if date_query:
# for date_filter in date_query:
# query = query.filter(date_filter)
# # Filter for debits that are not fully paid
# # (payment_sum < debit_amount or payment_sum is NULL)
# query = query.filter(
# or_(
# payment_sums.c.payment_sum.is_(None),
# func.coalesce(payment_sums.c.payment_sum, 0) < func.abs(BuildDecisionBookPayments.payment_amount)
# )
# )
# # Execute the query and return the results
# results = query.order_by(BuildDecisionBookPayments.process_date).all()

View File

@ -0,0 +1,3 @@
# Docs of Finder
Finds people, living spaces, companies from AccountRecords

View File

@ -0,0 +1,452 @@
import re
import textdistance
from unidecode import unidecode
from datetime import datetime
from Schemas import BuildIbanDescription, BuildIbans, BuildDecisionBook, BuildLivingSpace, AccountRecords, Companies, People
from gc import garbage
from typing import Optional
from pydantic import BaseModel
from regex_func import category_finder
class InsertBudgetRecord(BaseModel):
iban: str
bank_date: str = datetime.now().__str__()
receive_debit: str = "debit"
budget_type: str = "B"
currency_value: float = 0
balance: float = 0
bank_reference_code: str = ""
currency: str = "TL"
channel_branch: str = ""
process_name: str = ""
process_type: str = ""
process_comment: str = ""
add_xcomment: Optional[str] = None
project_no: Optional[str] = None
company_id: Optional[str] = None
customer_id: Optional[str] = None
send_person_id: Optional[int] = None
send_company_id: Optional[int] = None
build_id: Optional[int] = None
build_decision_book_id: Optional[int] = None
build_parts_id: Optional[int] = None
build_db_item_id: Optional[int] = None
dues_type: Optional[str] = "D"
period_time: Optional[str] = ""
approving_accounting_record: Optional[bool] = False
approving_accounting_person: Optional[str] = None
accounting_receipt_date: Optional[str] = "1900-01-01 00:00:00"
accounting_receipt_number: Optional[int] = 0
def strip_time_date(date_str):
return datetime.strptime(date_str, "%Y-%m-%d")
def strip_date_to_valid(date_str):
return datetime.strptime(date_str, "%Y-%m-%d %H:%M:%S")
def find_iban_in_comment(iban: str, comment: str, living_space_dict: dict = None):
with BuildIbanDescription.new_session() as session:
BuildIbanDescription.set_session(session)
iban_results = BuildIbanDescription.query.filter(BuildIbanDescription.iban == iban).all()
sm_dict_extended, sm_dict_digit = {}, {}
for iban_result in iban_results or []:
extended_candidate_parts, digit_part, candidate_parts = [], [], comment.split(" ")
for part in candidate_parts:
if part.lower() not in ["no", "daire", "nolu"]:
extended_candidate_parts.append(part)
# if part.isdigit():
# digit_part.append(part)
if extended_candidate_parts:
if all(candidate_part.lower() in comment.lower() for candidate_part in extended_candidate_parts):
similarity_ratio = textdistance.jaro_winkler(unidecode(str(iban_result.search_word)), comment)
found = False
name_list = (unidecode(str(iban_result.search_word)).replace(".", " ").split(" "))
for name in name_list:
if len(name) > 3 and name.lower() in comment.lower():
found = True
break
if not found:
similarity_ratio = 0.1
sm_dict_extended[f"{iban_result.id}"] = similarity_ratio
if sm_dict_extended:
result = sorted(sm_dict_extended.items(), key=lambda item: item[1], reverse=True)[0]
if float(result[1]) >= 0.5:
iban_result = BuildIbanDescription.query.filter(BuildIbanDescription.id == int(result[0]), system=True).first()
return {"company_id": iban_result.company_id, "customer_id": iban_result.customer_id, "found_from": "Name", "similarity": result[1]}
return {"company_id": None, "customer_id": None, "found_from": None, "similarity": 0.0}
def remove_spaces_from_string(remove_string: str):
letter_list = []
for letter in remove_string.split(" "):
if letter_ := "".join(i for i in letter if not i == " "):
letter_list.append(letter_)
return " ".join(letter_list).upper()
def get_garbage_words(comment: str, search_word: str):
garbage_words = unidecode(remove_spaces_from_string(comment))
search_word = unidecode(remove_spaces_from_string(search_word))
for word in search_word.split(" "):
garbage_words = garbage_words.replace(remove_spaces_from_string(unidecode(word)), "")
if cleaned_from_spaces := remove_spaces_from_string(garbage_words):
return str(unidecode(cleaned_from_spaces)).upper()
return None
def remove_garbage_words(comment: str, garbage_word: str):
cleaned_comment = remove_spaces_from_string(comment.replace("*", " "))
if garbage_word:
garbage_word = remove_spaces_from_string(garbage_word.replace("*", " "))
for letter in garbage_word.split(" "):
cleaned_comment = unidecode(remove_spaces_from_string(cleaned_comment))
cleaned_comment = cleaned_comment.replace(remove_spaces_from_string(letter), "")
return str(remove_spaces_from_string(cleaned_comment)).upper()
def check_is_comment_is_build(comment: str):
has_build_words = False
candidate_parts = remove_spaces_from_string(comment.replace("*", " ")).split(" ")
for candidate_part in candidate_parts:
candidate_part = remove_spaces_from_string(candidate_part).replace(":", "")
for build_word in ["no", "daire", "apt", "apartman"]:
if unidecode(candidate_part).upper() in unidecode(build_word).upper():
has_build_words = True
break
return has_build_words
def get_list_of_build_words(comment: str):
build_words = []
candidate_parts = remove_spaces_from_string(comment.replace("*", " "))
for build_word in ["no", "nolu", "daire", "apt", "apartman"]:
if unidecode(build_word).upper() in unidecode(candidate_parts).upper():
st = unidecode(candidate_parts).upper().index(unidecode(build_word).upper())
et = st + len(build_word)
st = st - 5 if st > 5 else 0
et = et + 5 if et + 5 <= len(candidate_parts) else len(candidate_parts)
number_digit = "".join(letter for letter in str(candidate_parts[st:et]) if letter.isdigit())
if number_digit:
rt_dict = {"garbage": candidate_parts[st:et],"number": int(number_digit) if number_digit else None }
build_words.append(rt_dict)
return build_words
def generate_pattern(word):
if len(word) < 1:
raise ValueError("The word must have at least 1 character.")
add_string, add_match = "\d{1,3}$\s?$", f"{{1, {len(word)}}}"
adda_string = "d{1,3}$\s?\^["
return adda_string + f"{word}]" + add_match + rf"{word}(?:e|é|ı|i|ğr)?" + add_string
def test_pattern(word, test_cases): # Generate the pattern
pattern = generate_pattern(word)
for test in test_cases: # Test the regex pattern on each input and print results
if re.match(pattern, test, re.IGNORECASE):
print(f"'{test}' matches the pattern.", "*" * 60)
else:
print(f"'{test}' does NOT match the pattern.")
def parse_comment_for_living_space(iban: str, comment: str, living_space_dict: dict = None):
comment = unidecode(comment)
best_similarity = dict(company=None, living_space=None, found_from=None, similarity=0.0, garbage="", cleaned="",)
if not iban in living_space_dict:
return best_similarity
for person in living_space_dict[iban]["people"]:
firstname = person.get("firstname")
surname = person.get("surname")
middle_name = person.get("middle_name")
first_name = unidecode(firstname).upper()
last_name = unidecode(surname).upper()
search_word_list = [
remove_spaces_from_string("".join([f"{first_name} {last_name}"])),
remove_spaces_from_string("".join([f"{last_name} {first_name}"])),
]
if middle_name := unidecode(middle_name).upper():
search_word_list.append(remove_spaces_from_string(f"{first_name} {middle_name} {last_name}"))
search_word_list.append(remove_spaces_from_string(f"{last_name} {middle_name} {first_name}"))
cleaned_comment = unidecode(comment).upper()
for search_word in search_word_list:
garbage_words = get_garbage_words(comment, unidecode(search_word))
if garbage_words:
garbage_words = unidecode(garbage_words).upper()
cleaned_comment = unidecode(remove_garbage_words(comment, garbage_words)).upper()
similarity_ratio = textdistance.jaro_winkler(cleaned_comment, str(search_word).upper())
if len(cleaned_comment) < len(f"{first_name}{last_name}"):
continue
if cleaned_comment and 0.9 < similarity_ratio <= 1:
print("cleaned comment dict", dict(garbage=garbage_words, cleaned=cleaned_comment, similarity=similarity_ratio, search_word=search_word, comment=comment, last_similarity=float(best_similarity["similarity"])))
if similarity_ratio > float(best_similarity["similarity"]):
for living_space in living_space_dict[iban]["living_space"]:
if living_space.get("person_id") == person.get("id"):
best_similarity = {"company": None,"living_space": living_space,"found_from": "Person Name","similarity": similarity_ratio,"garbage": garbage_words,"cleaned": cleaned_comment,}
return best_similarity
def parse_comment_for_build_parts(comment: str, max_build_part: int = 200, parse: str = "DAIRE"):
results, results_list = category_finder(comment), []
print("results[parse]", results[parse])
for result in results[parse] or []:
if digits := "".join([letter for letter in str(result) if letter.isdigit()]):
print("digits", digits)
if int(digits) <= int(max_build_part):
results_list.append(int(digits))
return results_list or None
def parse_comment_for_company_or_individual(comment: str):
with Companies.new_session() as session:
Companies.set_session(session)
companies_list = Companies.query.filter(Companies.commercial_type != "Commercial").all()
comment = unidecode(comment)
best_similarity = dict(company=None,living_space=None,found_from=None,similarity=0.0,garbage="",cleaned="",)
for company in companies_list:
search_word = unidecode(company.public_name)
garbage_words = get_garbage_words(comment, search_word)
cleaned_comment = remove_garbage_words(comment, garbage_words)
similarity_ratio = textdistance.jaro_winkler(cleaned_comment, search_word)
if similarity_ratio > float(best_similarity["similarity"]):
best_similarity = {"company": company,"living_space": None,"found_from": "Customer Public Name","similarity": similarity_ratio,"garbage": garbage_words,"cleaned": cleaned_comment,}
# print(
# 'cleaned_comment', cleaned_comment, '\n'
# 'search_word', search_word, '\n'
# 'best_similarity', best_similarity, '\n'
# 'company name', company.public_name, '\n'
# 'similarity_ratio', similarity_ratio, '\n'
# 'garbage_words', garbage_words
# )
return best_similarity
def parse_comment_to_split_with_star(account_record: dict):
if "*" in account_record.get("process_comment", ""):
process_comment = str(account_record.get("process_comment", "").replace("**", "*"))
process_comments = process_comment.split("*")
return len(process_comments), *process_comments
return 1, account_record.get("process_comment", "")
def check_build_living_space_matches_with_build_parts(living_space_dict: dict, best_similarity: dict, iban: str, whole_comment: str):
if 0.6 < float(best_similarity["similarity"]) < 0.8:
build_parts = living_space_dict[iban]["build_parts"]
if best_similarity["living_space"]:
build_parts_id = best_similarity["living_space"].build_parts_id
parser_dict = dict(comment=str(whole_comment), max_build_part=len(build_parts))
print("build parts similarity", best_similarity, "parser_dict", parser_dict)
results_list = parse_comment_for_build_parts(**parser_dict)
print("results_list", results_list)
if not results_list:
return best_similarity
for build_part in build_parts:
print("part_no", int(build_part.part_no), " | ", results_list)
print("build_part", int(build_part.id), int(build_parts_id))
print("cond", int(build_part.id) == int(build_parts_id))
print("cond2", int(build_part.part_no) in results_list)
if (int(build_part.id) == int(build_parts_id) and int(build_part.part_no) in results_list):
similarity = float(best_similarity["similarity"])
best_similarity["similarity"] = (1 - similarity) / 2 + similarity
print("similarity", best_similarity["similarity"])
break
return best_similarity
def parse_comment_with_name(account_record: dict, living_space_dict: dict = None):
comments = parse_comment_to_split_with_star(account_record=account_record)
best_similarity = {"similarity": 0.0}
comments_list, comments_length = comments[1:], int(comments[0])
if int(account_record.get("currency_value", 0)) > 0: # Build receive money from living space people
living_space_matches = dict(living_space_dict=living_space_dict, iban=account_record.get("iban", None), whole_comment=account_record.get("process_comment", None))
if comments_length == 1:
best_similarity = parse_comment_for_living_space(iban=account_record.get("iban", None), comment=comments_list[0], living_space_dict=living_space_dict)
best_similarity["send_person_id"] = best_similarity.get("customer_id", None)
living_space_matches["best_similarity"] = best_similarity
# if 0.5 < float(best_similarity['similarity']) < 0.8
best_similarity = check_build_living_space_matches_with_build_parts(**living_space_matches)
return best_similarity
for comment in comments_list:
similarity_result = parse_comment_for_living_space(iban=account_record.get("iban", None), comment=comment, living_space_dict=living_space_dict)
if float(similarity_result["similarity"]) > float(best_similarity["similarity"]):
best_similarity = similarity_result
living_space_matches["best_similarity"] = best_similarity
# if 0.5 < float(best_similarity['similarity']) < 0.8:
best_similarity = check_build_living_space_matches_with_build_parts(**living_space_matches)
print("last best_similarity", best_similarity)
return best_similarity
else: # Build pays money for service taken from company or individual
if not comments_length > 1:
best_similarity = parse_comment_for_company_or_individual(comment=comments_list[0])
best_similarity["send_person_id"] = best_similarity.get("customer_id", None)
return best_similarity
for comment in comments_list:
similarity_result = parse_comment_for_company_or_individual(comment=comment)
if float(similarity_result["similarity"]) > float(best_similarity["similarity"]):
best_similarity = similarity_result
return best_similarity
def parse_comment_with_name_iban_description(account_record: dict):
with AccountRecords.new_session() as session:
BuildIbanDescription.set_session(session)
Companies.set_session(session)
comments = parse_comment_to_split_with_star(account_record=account_record)
comments_list, comments_length = comments[1:], int(comments[0])
iban_results = BuildIbanDescription.query.filter(BuildIbanDescription.iban == account_record.get("iban", "")).all()
best_similarity = dict(company=None,living_space=None,found_from=None,similarity=0.0,garbage="",cleaned="",)
for comment in comments_list:
for iban_result in iban_results:
search_word = unidecode(iban_result.search_word)
garbage_words = get_garbage_words(comment, search_word)
cleaned_comment = remove_garbage_words(comment, garbage_words)
similarity_ratio = textdistance.jaro_winkler(cleaned_comment, search_word)
company = Companies.query.filter_by(id=iban_result.company_id).first()
if float(similarity_ratio) > float(best_similarity["similarity"]):
best_similarity = {"company": company,"living_space": None,"found_from": "Customer Public Name Description","similarity": similarity_ratio,"garbage": garbage_words,"cleaned": cleaned_comment,}
return best_similarity
# "decision_book_project_id": None,
# "build_parts_id": None,
# "decision_book_project_id": iban_result.decision_book_project_id,
# "build_parts_id": iban_result.build_parts_id,
# print('account_record.process_comment', account_record.process_comment)
# test_pattern(
# word=unidecode("no"),
# test_cases=[account_record.process_comment]
# )
# test_pattern(word="daire", test_cases=comments_list)
# sm_dict_extended, sm_dict_digit = {}, {}
# iban_results = BuildIbanDescription.filter_all(
# BuildIbanDescription.iban == iban, system=True
# ).data
# for iban_result in iban_results or []:
# candidate_parts = comment.split(" ")
# extended_candidate_parts, digit_part = [], []
# for part in candidate_parts:
# if part.lower() not in ["no", "daire", "nolu"]:
# extended_candidate_parts.append(part)
# if extended_candidate_parts:
# if all(
# candidate_part.lower() in comment.lower()
# for candidate_part in extended_candidate_parts
# ):
# similarity_ratio = textdistance.jaro_winkler(
# unidecode(str(iban_result.search_word)), comment
# )
# found = False
# name_list = (
# unidecode(str(iban_result.search_word)).replace(".", " ").split(" ")
# )
# for name in name_list:
# if len(name) > 3 and name.lower() in comment.lower():
# found = True
# break
#
# if not found:
# similarity_ratio = 0.1
# sm_dict_extended[f"{iban_result.id}"] = similarity_ratio
# if sm_dict_extended:
# result = sorted(
# sm_dict_extended.items(), key=lambda item: item[1], reverse=True
# )[0]
# if float(result[1]) >= 0.5:
# iban_result = BuildIbanDescription.filter_one(
# BuildIbanDescription.id == int(result[0]), system=True
# ).data
# return {
# "company_id": iban_result.company_id,
# "customer_id": iban_result.customer_id,
# "found_from": "Name",
# "similarity": result[1],
# }
# return {
# "company_id": None,
# "customer_id": None,
# "found_from": None,
# "similarity": 0.0,
# }
#
# def wag_insert_budget_record(data):
# similarity_result = parse_comment_with_name(data["iban"], data["process_comment"])
# build_iban = BuildIbans.find_one(iban=data["iban"])
#
# if payload := InsertBudgetRecord(**data):
# payload_dict = payload.model_dump(exclude_unset=True, exclude_none=True)
# decision_books = BuildDecisionBook.select_only(
# BuildDecisionBook.period_start_date
# < strip_date_to_valid(payload_dict["bank_date"]),
# BuildDecisionBook.period_stop_date
# > strip_date_to_valid(payload_dict["bank_date"]),
# select_args=[BuildDecisionBook.id],
# order_by=[BuildDecisionBook.expiry_ends.desc()],
# )
# payload_dict["build_id"] = getattr(
# BuildIbans.find_one(iban=data["iban"]), "build_id", None
# )
# living_space, count = BuildLivingSpace.find_living_from_customer_id(
# similarity_result.get("customer_id", None),
# strip_date_to_valid(payload_dict["bank_date"]),
# )
# # living_space, count = BuildLivingSpace.filter(
# # or_(
# # BuildLivingSpace.owner_person_id
# # == similarity_result.get("customer_id", None),
# # BuildLivingSpace.life_person_id
# # == similarity_result.get("customer_id", None),
# # ),
# # BuildLivingSpace.start_date
# # < strip_date_to_valid(payload_dict["bank_date"]) - timedelta(days=30),
# # BuildLivingSpace.stop_date
# # > strip_date_to_valid(payload_dict["bank_date"]) + timedelta(days=30),
# # BuildLivingSpace.active == True,
# # BuildLivingSpace.deleted == False,
# # )
# payload_dict["build_decision_book_id"] = (
# decision_books[0][0].id if decision_books else None
# )
# payload_dict["company_id"] = similarity_result.get("company_id", None)
# payload_dict["customer_id"] = similarity_result.get("customer_id", None)
# payload_dict["send_person_id"] = similarity_result.get("send_person_id", None)
#
# payload_dict["build_parts_id"] = (
# living_space[0].build_parts_id if living_space else None
# )
#
# payload_dict["bank_date_y"] = strip_date_to_valid(
# payload_dict["bank_date"]
# ).year
# payload_dict["bank_date_m"] = strip_date_to_valid(
# payload_dict["bank_date"]
# ).month
# payload_dict["bank_date_d"] = strip_date_to_valid(payload_dict["bank_date"]).day
# payload_dict["bank_date_w"] = strip_date_to_valid(
# payload_dict["bank_date"]
# ).isocalendar()[2]
# payload_dict["build_id"] = build_iban.build_id if build_iban else None
# payload_dict["replication_id"] = 55
# payload_dict["receive_debit"] = (
# "R" if payload_dict["currency_value"] < 0 else "D"
# )
# data, found = AccountRecords.find_or_create(
# **payload_dict,
# found_from=similarity_result.get("found_from", None),
# similarity=similarity_result.get("similarity", 0.0),
# )
# data.payment_budget_record_close()
# return data, found

View File

@ -0,0 +1,266 @@
import sys
import arrow
if "/service_account_records" not in list(sys.path):
sys.path.append("/service_account_records")
from decimal import Decimal
from pydantic import BaseModel
from typing import Optional, Union
from sqlalchemy import func, cast, Date
from Schemas import AccountRecords, BuildIbans, BuildDecisionBook, Build, BuildLivingSpace, People, OccupantTypes, BuildParts, BuildDecisionBookPayments, ApiEnumDropdown
from account_record_parser import parse_comment_with_name, parse_comment_with_name_iban_description
# from ServicesApi.Schemas.account.account import AccountRecords
# from ServicesApi.Schemas.building.build import BuildIbans, BuildDecisionBook, BuildParts, BuildLivingSpace, Build
# from ServicesApi.Schemas.identity.identity import People, OccupantTypes
# from ServicesApi.Schemas.others.enums import ApiEnumDropdown
# from ServicesApi.Schemas.building.decision_book import BuildDecisionBookPayments
# AccountRecords.approved_record = False
def account_find_build_from_iban(session):
created_ibans = []
AccountRecords.set_session(session)
BuildIbans.set_session(session)
account_records_ibans = AccountRecords.query.filter(AccountRecords.build_id == None, AccountRecords.approved_record == False).distinct(AccountRecords.iban).all()
for account_records_iban in account_records_ibans:
found_iban: BuildIbans = BuildIbans.query.filter(BuildIbans.iban == account_records_iban.iban).first()
if not found_iban:
create_build_ibans = BuildIbans.create(iban=account_records_iban.iban, start_date=str(arrow.now().shift(days=-1)))
create_build_ibans.save()
created_ibans.append(account_records_iban.iban)
else:
update_dict = {"build_id": found_iban.build_id, "build_uu_id": str(found_iban.build_uu_id)}
session.query(AccountRecords).filter(AccountRecords.iban == account_records_iban.iban).update(update_dict, synchronize_session=False)
session.commit()
def account_records_find_decision_book(session):
AccountRecords.set_session(session)
BuildIbans.set_session(session)
BuildDecisionBook.set_session(session)
created_ibans, iban_build_dict = [], {}
account_records_list: list[AccountRecords] = AccountRecords.query.filter(AccountRecords.build_id != None, AccountRecords.build_decision_book_id == None).order_by(AccountRecords.bank_date.desc()).all()
for account_record in account_records_list:
if found_iban := BuildIbans.query.filter(BuildIbans.iban == account_record.iban).first():
if found_decision_book := BuildDecisionBook.query.filter(
BuildDecisionBook.build_id == found_iban.build_id,
cast(BuildDecisionBook.expiry_starts, Date) <= cast(account_record.bank_date, Date),
cast(BuildDecisionBook.expiry_ends, Date) >= cast(account_record.bank_date, Date),
).first():
account_record.build_decision_book_id = found_decision_book.id
account_record.build_decision_book_uu_id = str(found_decision_book.uu_id)
account_record.save()
def account_get_people_and_living_space_info_via_iban() -> dict:
build_living_space_dict = {}
AccountRecords.set_session(session)
OccupantTypes.set_session(session)
BuildParts.set_session(session)
BuildLivingSpace.set_session(session)
People.set_session(session)
account_records_ibans = AccountRecords.query.filter(AccountRecords.build_decision_book_id != None).distinct(AccountRecords.iban).all()
flat_resident = OccupantTypes.query.filter_by(occupant_category_type="FL", occupant_code="FL-RES").first()
flat_owner = OccupantTypes.query.filter_by(occupant_category_type="FL", occupant_code="FL-OWN").first()
flat_tenant = OccupantTypes.query.filter_by(occupant_category_type="FL", occupant_code="FL-TEN").first()
flat_represent = OccupantTypes.query.filter_by(occupant_category_type="FL", occupant_code="FL-REP").first()
for account_records_iban in account_records_ibans:
if account_records_iban.iban not in build_living_space_dict:
build_parts = BuildParts.query.filter_by(build_id=account_records_iban.build_id, human_livable=True).all()
living_spaces = BuildLivingSpace.query.filter(
BuildLivingSpace.build_parts_id.in_([build_parts.id for build_parts in build_parts]),
BuildLivingSpace.occupant_type_id.in_([flat_resident.id, flat_owner.id, flat_tenant.id, flat_represent.id]),
).all()
living_spaces_people = [living_space.person_id for living_space in living_spaces if living_space.person_id]
people_list = People.query.filter(People.id.in_(living_spaces_people)).all()
people_list_dict = []
for people in people_list:
people_list_dict.append({"id": people.id, "uu_id": str(people.uu_id), "firstname": people.firstname, "surname": people.surname, "middle_name": people.middle_name})
living_spaces_dict = []
for living_space in living_spaces:
living_spaces_dict.append({"id": living_space.id, "uu_id": str(living_space.uu_id), "person_id": living_space.person_id, "person_uu_id": str(living_space.person_uu_id)})
build_parts_dict = []
for build_parts in build_parts:
build_parts_dict.append({"id": build_parts.id, "uu_id": str(build_parts.uu_id)})
build_living_space_dict[str(account_records_iban.iban)] = {"people": list(people_list_dict), "living_space": list(living_spaces_dict), "build_parts": list(build_parts_dict)}
return build_living_space_dict
def account_records_search():
build_living_space_dict = account_get_people_and_living_space_info_via_iban()
found_list = []
with AccountRecords.new_session() as session:
AccountRecords.set_session(session)
account_records_list: list[AccountRecords] = AccountRecords.query.filter(AccountRecords.build_decision_book_id != None).all()
for account_record in account_records_list:
account_record_dict = account_record.get_dict()
similarity_result = parse_comment_with_name(account_record=account_record_dict, living_space_dict=build_living_space_dict)
fs, ac = float(similarity_result.get("similarity", 0)), float(account_record_dict.get("similarity", 0))
if fs >= 0.8 and fs >= ac:
print("similarity_result positive", similarity_result)
found_list.append(similarity_result)
account_save_search_result(account_record=account_record_dict, similarity_result=similarity_result)
else:
similarity_result = parse_comment_with_name_iban_description(account_record=account_record_dict)
fs, ac = float(similarity_result.get("similarity", 0)), float(account_record_dict.get("similarity", 0))
if fs >= 0.8 and fs > ac:
print("similarity_result negative", similarity_result)
found_list.append(similarity_result)
account_save_search_result(account_record=account_record_dict, similarity_result=similarity_result)
print("Account Records Search : ", len(found_list), "/", len(account_records_list))
return
def account_save_search_result(account_record, similarity_result):
BuildParts.set_session(session)
Build.set_session(session)
BuildLivingSpace.set_session(session)
found_company = similarity_result.get("company", None)
found_customer, part, build = (similarity_result.get("living_space", None), None, None)
if found_customer:
found_living_space = BuildLivingSpace.query.filter_by(id=int(found_customer.get("id"))).first()
part = BuildParts.query.filter_by(id=found_living_space.build_parts_id, human_livable=True).first()
if part:
build = Build.query.filter_by(id=part.build_id).first()
account_record_dict = {
"similarity": similarity_result.get("similarity", 0.00),
"found_from": similarity_result.get("found_from", None),
"company_id": getattr(found_company, "id", None),
"company_uu_id": str(getattr(found_company, "uu_id", None)) if getattr(found_company, "uu_id", None) else None,
"build_parts_id": getattr(part, "id", None) if getattr(part, "id", None) else None,
"build_parts_uu_id": str(getattr(part, "uu_id", None)) if getattr(part, "uu_id", None) else None,
"living_space_id": getattr(found_customer, "id", None),
"living_space_uu_id": str(getattr(found_customer, "uu_id", None)) if getattr(found_customer, "uu_id", None) else None,
}
if not account_record.get("build_id") and build:
account_record_dict.update({"build_id": getattr(build, "id", None), "build_uu_id": str(getattr(build, "uu_id", None)) if getattr(build, "uu_id", None) else None})
AccountRecords.query.filter_by(uu_id=str(account_record.get("uu_id"))).update(account_record_dict)
session.commit()
def pay_the_registration(account_record, receive_enum, debit_enum, is_old_record: bool = False):
with AccountRecords.new_session() as session:
AccountRecords.set_session(session)
BuildDecisionBookPayments.set_session(session)
current_currency_value = float(Decimal(account_record.currency_value)) - float(Decimal(account_record.remainder_balance))
if not current_currency_value > 0:
return current_currency_value
process_date = arrow.get(account_record.bank_date)
account_bank_date_year, account_bank_date_month = (process_date.date().year, process_date.date().month)
payment_arguments_debit = [
BuildDecisionBookPayments.build_parts_id == account_record.build_parts_id,
BuildDecisionBookPayments.payment_types_id == debit_enum.id,
BuildDecisionBookPayments.account_records_id == None,
]
if not is_old_record:
payment_arguments_debit.extend([BuildDecisionBookPayments.process_date_y == int(account_bank_date_year), BuildDecisionBookPayments.process_date_m == int(account_bank_date_month)])
payments = BuildDecisionBookPayments.query.filter(*payment_arguments_debit).order_by(BuildDecisionBookPayments.process_date.asc()).all()
for payment in payments:
if not current_currency_value > 0:
return current_currency_value
payment_arguments_receive = [
BuildDecisionBookPayments.build_parts_id == account_record.build_parts_id,
BuildDecisionBookPayments.payment_plan_time_periods == payment.payment_plan_time_periods,
BuildDecisionBookPayments.payment_types_id == receive_enum.id,
BuildDecisionBookPayments.build_decision_book_item_id == payment.build_decision_book_item_id,
BuildDecisionBookPayments.decision_book_project_id == payment.decision_book_project_id,
BuildDecisionBookPayments.process_date == payment.process_date,
]
if not is_old_record:
payment_arguments_receive.extend([BuildDecisionBookPayments.process_date_y == int(account_bank_date_year), BuildDecisionBookPayments.process_date_m == int(account_bank_date_month)])
payment_received = BuildDecisionBookPayments.query.filter(*payment_arguments_receive).all()
sum_of_payment_received = sum([abs(payment.payment_amount) for payment in payment_received])
net_amount = float(abs(Decimal(payment.payment_amount))) - float(abs(Decimal(sum_of_payment_received)))
if not net_amount > 0:
continue
if float(abs(current_currency_value)) < float(abs(net_amount)):
net_amount = float(current_currency_value)
process_date = arrow.get(payment.process_date)
try:
found_payment = BuildDecisionBookPayments.query.filter_by(
build_parts_id=payment.build_parts_id,
payment_plan_time_periods=payment.payment_plan_time_periods,
payment_types_id=receive_enum.id,
build_decision_book_item_id=payment.build_decision_book_item_id,
decision_book_project_id=payment.decision_book_project_id,
process_date=str(process_date),
).first()
if found_payment:
continue
created_book_payment = BuildDecisionBookPayments.create(
payment_plan_time_periods=payment.payment_plan_time_periods,
payment_amount=float(abs(net_amount)),
payment_types_id=receive_enum.id,
payment_types_uu_id=str(receive_enum.uu_id),
process_date=str(process_date),
process_date_m=process_date.date().month,
process_date_y=process_date.date().year,
period_time=f"{process_date.year}-{str(process_date.month).zfill(2)}",
build_parts_id=payment.build_parts_id,
build_parts_uu_id=str(payment.build_parts_uu_id),
account_records_id=account_record.id,
account_records_uu_id=str(account_record.uu_id),
build_decision_book_item_id=payment.build_decision_book_item_id,
build_decision_book_item_uu_id=str(payment.build_decision_book_item_uu_id),
decision_book_project_id=payment.decision_book_project_id,
decision_book_project_uu_id=str(payment.decision_book_project_uu_id),
)
created_book_payment.save_and_confirm()
created_payment_amount = float(Decimal(created_book_payment.payment_amount))
remainder_balance = float(Decimal(account_record.remainder_balance)) + float(abs(created_payment_amount))
account_record.update(remainder_balance=remainder_balance)
account_record.save()
if current_currency_value >= abs(net_amount):
current_currency_value -= abs(net_amount)
except Exception as e:
print("Exception of decision payment ln:300", e)
return current_currency_value
def send_accounts_to_decision_payment():
with ApiEnumDropdown.new_session() as session:
ApiEnumDropdown.set_session(session)
AccountRecords.set_session(session)
receive_enum = ApiEnumDropdown.query.filter_by(enum_class="DebitTypes", key="DT-R").first()
debit_enum = ApiEnumDropdown.query.filter_by(enum_class="DebitTypes", key="DT-D").first()
account_records_list: list[AccountRecords] = AccountRecords.query.filter(
AccountRecords.remainder_balance < AccountRecords.currency_value,
AccountRecords.approved_record == True,
AccountRecords.receive_debit == receive_enum.id,
).order_by(AccountRecords.bank_date.desc()).limit(1000).offset(0).all()
for account_record in account_records_list:
current_currency_value = pay_the_registration(account_record, receive_enum, debit_enum)
if current_currency_value > 0:
pay_the_registration(account_record, receive_enum, debit_enum, True)
if abs(float(Decimal(account_record.remainder_balance))) == abs(float(Decimal(account_record.currency_value))):
account_record.update(status_id=97)
account_record.save()
# # # todo If the payment is more than the amount, then create a new account record with the remaining amount
return
def account_records_service() -> None:
print("Account Records Service is running...")
account_find_build_from_iban(session=session)
account_records_find_decision_book(session=session)
account_records_search(session=session)
send_accounts_to_decision_payment(session=session)
print("Account Records Service is finished...")
if __name__ == "__main__":
account_records_service()

View File

@ -0,0 +1,8 @@
class AccountConfig:
BEFORE_DAY = 30
CATEGORIES = {
"DAIRE": ["daire", "dagire", "daare", "nolu daire", "no", "nolu dairenin"],
"APARTMAN": ["apartman", "aparman", "aprmn"],
"VILLA": ["villa", "vlla"],
"BINA": ["bina", "binna"],
}

View File

@ -0,0 +1,29 @@
#!/bin/bash
# Create environment file that will be available to cron jobs
echo "# Environment variables for cron jobs" > /env.sh
echo "EMAIL_HOST=\"$EMAIL_HOST\"" >> /env.sh
echo "EMAIL_USERNAME=\"$EMAIL_USERNAME\"" >> /env.sh
echo "EMAIL_PASSWORD=\"$EMAIL_PASSWORD\"" >> /env.sh
echo "EMAIL_PORT=$EMAIL_PORT" >> /env.sh
echo "EMAIL_SEND=$EMAIL_SEND" >> /env.sh
echo "DB_HOST=\"$DB_HOST\"" >> /env.sh
echo "DB_USER=\"$DB_USER\"" >> /env.sh
echo "DB_PASSWORD=\"$DB_PASSWORD\"" >> /env.sh
echo "DB_PORT=$DB_PORT" >> /env.sh
echo "DB_NAME=\"$DB_NAME\"" >> /env.sh
# Add Python environment variables
echo "PYTHONPATH=/" >> /env.sh
echo "PYTHONUNBUFFERED=1" >> /env.sh
echo "PYTHONDONTWRITEBYTECODE=1" >> /env.sh
# Make the environment file available to cron
echo "0 8 * * * /run_app.sh >> /var/log/cron.log 2>&1" > /tmp/crontab_list
crontab /tmp/crontab_list
# Start cron
cron
# Tail the log file
tail -f /var/log/cron.log

View File

@ -0,0 +1,28 @@
import sys
if "/service_account_records" not in list(sys.path):
sys.path.append("/service_account_records")
import re
from difflib import get_close_matches
from configs import AccountConfig
def word_straighten(word, ref_list, threshold=0.8):
matches = get_close_matches(word, ref_list, n=1, cutoff=threshold)
return matches[0] if matches else word
def category_finder(text, output_template="{kategori} {numara}"):
categories = AccountConfig.CATEGORIES
result = {category: [] for category in categories}
for category, patterns in categories.items():
words = re.split(r"\W+", text)
straighten_words = [word_straighten(word, patterns) for word in words]
straighten_text = " ".join(straighten_words)
pattern = (r"(?:\b|\s|^)(?:" + "|".join(map(re.escape, patterns)) + r")(?:\s*|:|\-|\#)*(\d+)(?:\b|$)")
if founds_list := re.findall(pattern, straighten_text, re.IGNORECASE):
list_of_output = [output_template.format(kategori=category, numara=num) for num in founds_list]
result[category].extend([i for i in list_of_output if str(i).replace(" ", "")])
return result

View File

@ -31,27 +31,27 @@ services:
# cpus: 1 # cpus: 1
# mem_limit: 2048m # mem_limit: 2048m
tester_service: # tester_service:
container_name: tester_service # container_name: tester_service
build: # build:
context: . # context: .
dockerfile: ServicesApi/Builds/TestApi/Dockerfile # dockerfile: ServicesApi/Builds/TestApi/Dockerfile
env_file: # env_file:
- api_env.env # - api_env.env
networks: # networks:
- wag-services # - wag-services
environment: # environment:
- API_PATH=app:app # - API_PATH=app:app
- API_HOST=0.0.0.0 # - API_HOST=0.0.0.0
- API_PORT=8005 # - API_PORT=8005
- API_LOG_LEVEL=info # - API_LOG_LEVEL=info
- API_RELOAD=1 # - API_RELOAD=1
- API_APP_NAME=evyos-tester-api-gateway # - API_APP_NAME=evyos-tester-api-gateway
- API_TITLE=WAG API Tester Api Gateway # - API_TITLE=WAG API Tester Api Gateway
- API_DESCRIPTION=This api is serves as web tester api gateway only to evyos web services. # - API_DESCRIPTION=This api is serves as web tester api gateway only to evyos web services.
- API_APP_URL=https://tester_service # - API_APP_URL=https://tester_service
ports: # ports:
- "8005:8005" # - "8005:8005"
# account_service: # account_service:
# container_name: account_service # container_name: account_service
@ -187,7 +187,69 @@ services:
- API_APP_URL=https://management_service - API_APP_URL=https://management_service
ports: ports:
- "8003:8003" - "8003:8003"
# restart: unless-stopped # restart: unless-stopped python3 app_accounts.py
# finder_build_from_iban_service:
# container_name: finder_build_from_iban_service
# env_file:
# - api_env.env
# build:
# context: .
# dockerfile: ServicesBank/Finder/BuildFromIban/Dockerfile
# networks:
# - wag-services
# logging:
# driver: "json-file"
# options:
# max-size: "10m"
# max-file: "3"
# finder_build_living_space_service:
# container_name: finder_build_living_space_service
# env_file:
# - api_env.env
# build:
# context: .
# dockerfile: ServicesBank/Finder/BuildLivingSpace/Dockerfile
# networks:
# - wag-services
# logging:
# driver: "json-file"
# options:
# max-size: "10m"
# max-file: "3"
# finder_decision_book_service:
# container_name: finder_decision_book_service
# env_file:
# - api_env.env
# build:
# context: .
# dockerfile: ServicesBank/Finder/DecisionBook/Dockerfile
# networks:
# - wag-services
# logging:
# driver: "json-file"
# options:
# max-size: "10m"
# max-file: "3"
finder_payment_service:
container_name: finder_payment_service
env_file:
- api_env.env
build:
context: .
dockerfile: ServicesBank/Finder/Payment/Dockerfile
networks:
- wag-services
logging:
driver: "json-file"
options:
max-size: "10m"
max-file: "3"
# cpus: 0.25
# mem_limit: 512m
# address_service: # address_service:
# container_name: address_service # container_name: address_service
@ -281,9 +343,9 @@ services:
# container_name: initializer_service # container_name: initializer_service
# build: # build:
# context: . # context: .
# dockerfile: ServicesApi/Builds/initial_service/Dockerfile # dockerfile: ServicesApi/Builds/Initial/Dockerfile
# environment: # environment:
# - SET_ALEMBIC=0 # - SET_ALEMBIC=1
# networks: # networks:
# - wag-services # - wag-services
# env_file: # env_file:

48
package-lock.json generated
View File

@ -1,48 +0,0 @@
{
"name": "production-evyos-systems-and-services-3",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"dependencies": {
"@tanstack/react-query": "^5.80.7"
}
},
"node_modules/@tanstack/query-core": {
"version": "5.80.7",
"resolved": "https://registry.npmjs.org/@tanstack/query-core/-/query-core-5.80.7.tgz",
"integrity": "sha512-s09l5zeUKC8q7DCCCIkVSns8zZrK4ZDT6ryEjxNBFi68G4z2EBobBS7rdOY3r6W1WbUDpc1fe5oY+YO/+2UVUg==",
"license": "MIT",
"funding": {
"type": "github",
"url": "https://github.com/sponsors/tannerlinsley"
}
},
"node_modules/@tanstack/react-query": {
"version": "5.80.7",
"resolved": "https://registry.npmjs.org/@tanstack/react-query/-/react-query-5.80.7.tgz",
"integrity": "sha512-u2F0VK6+anItoEvB3+rfvTO9GEh2vb00Je05OwlUe/A0lkJBgW1HckiY3f9YZa+jx6IOe4dHPh10dyp9aY3iRQ==",
"license": "MIT",
"dependencies": {
"@tanstack/query-core": "5.80.7"
},
"funding": {
"type": "github",
"url": "https://github.com/sponsors/tannerlinsley"
},
"peerDependencies": {
"react": "^18 || ^19"
}
},
"node_modules/react": {
"version": "19.1.0",
"resolved": "https://registry.npmjs.org/react/-/react-19.1.0.tgz",
"integrity": "sha512-FS+XFBNvn3GTAWq26joslQgWNoFu08F4kl0J4CgdNKADkdSGXQyTCnKteIAJy96Br6YbpEU1LSzV5dYtjMkMDg==",
"license": "MIT",
"peer": true,
"engines": {
"node": ">=0.10.0"
}
}
}
}

View File

@ -1,5 +0,0 @@
{
"dependencies": {
"@tanstack/react-query": "^5.80.7"
}
}

910
uv.lock Normal file
View File

@ -0,0 +1,910 @@
version = 1
revision = 2
requires-python = ">=3.12"
[[package]]
name = "alembic"
version = "1.16.2"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "mako" },
{ name = "sqlalchemy" },
{ name = "typing-extensions" },
]
sdist = { url = "https://files.pythonhosted.org/packages/9c/35/116797ff14635e496bbda0c168987f5326a6555b09312e9b817e360d1f56/alembic-1.16.2.tar.gz", hash = "sha256:e53c38ff88dadb92eb22f8b150708367db731d58ad7e9d417c9168ab516cbed8", size = 1963563, upload-time = "2025-06-16T18:05:08.566Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/dd/e2/88e425adac5ad887a087c38d04fe2030010572a3e0e627f8a6e8c33eeda8/alembic-1.16.2-py3-none-any.whl", hash = "sha256:5f42e9bd0afdbd1d5e3ad856c01754530367debdebf21ed6894e34af52b3bb03", size = 242717, upload-time = "2025-06-16T18:05:10.27Z" },
]
[[package]]
name = "annotated-types"
version = "0.7.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081, upload-time = "2024-05-20T21:33:25.928Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643, upload-time = "2024-05-20T21:33:24.1Z" },
]
[[package]]
name = "anyio"
version = "4.9.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "idna" },
{ name = "sniffio" },
{ name = "typing-extensions", marker = "python_full_version < '3.13'" },
]
sdist = { url = "https://files.pythonhosted.org/packages/95/7d/4c1bd541d4dffa1b52bd83fb8527089e097a106fc90b467a7313b105f840/anyio-4.9.0.tar.gz", hash = "sha256:673c0c244e15788651a4ff38710fea9675823028a6f08a5eda409e0c9840a028", size = 190949, upload-time = "2025-03-17T00:02:54.77Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/a1/ee/48ca1a7c89ffec8b6a0c5d02b89c305671d5ffd8d3c94acf8b8c408575bb/anyio-4.9.0-py3-none-any.whl", hash = "sha256:9f76d541cad6e36af7beb62e978876f3b41e3e04f2c1fbf0884604c0a9c4d93c", size = 100916, upload-time = "2025-03-17T00:02:52.713Z" },
]
[[package]]
name = "arrow"
version = "1.3.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "python-dateutil" },
{ name = "types-python-dateutil" },
]
sdist = { url = "https://files.pythonhosted.org/packages/2e/00/0f6e8fcdb23ea632c866620cc872729ff43ed91d284c866b515c6342b173/arrow-1.3.0.tar.gz", hash = "sha256:d4540617648cb5f895730f1ad8c82a65f2dad0166f57b75f3ca54759c4d67a85", size = 131960, upload-time = "2023-09-30T22:11:18.25Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/f8/ed/e97229a566617f2ae958a6b13e7cc0f585470eac730a73e9e82c32a3cdd2/arrow-1.3.0-py3-none-any.whl", hash = "sha256:c728b120ebc00eb84e01882a6f5e7927a53960aa990ce7dd2b10f39005a67f80", size = 66419, upload-time = "2023-09-30T22:11:16.072Z" },
]
[[package]]
name = "certifi"
version = "2025.6.15"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/73/f7/f14b46d4bcd21092d7d3ccef689615220d8a08fb25e564b65d20738e672e/certifi-2025.6.15.tar.gz", hash = "sha256:d747aa5a8b9bbbb1bb8c22bb13e22bd1f18e9796defa16bab421f7f7a317323b", size = 158753, upload-time = "2025-06-15T02:45:51.329Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/84/ae/320161bd181fc06471eed047ecce67b693fd7515b16d495d8932db763426/certifi-2025.6.15-py3-none-any.whl", hash = "sha256:2e0c7ce7cb5d8f8634ca55d2ba7e6ec2689a2fd6537d8dec1296a477a4910057", size = 157650, upload-time = "2025-06-15T02:45:49.977Z" },
]
[[package]]
name = "cffi"
version = "1.17.1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "pycparser" },
]
sdist = { url = "https://files.pythonhosted.org/packages/fc/97/c783634659c2920c3fc70419e3af40972dbaf758daa229a7d6ea6135c90d/cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824", size = 516621, upload-time = "2024-09-04T20:45:21.852Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/5a/84/e94227139ee5fb4d600a7a4927f322e1d4aea6fdc50bd3fca8493caba23f/cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4", size = 183178, upload-time = "2024-09-04T20:44:12.232Z" },
{ url = "https://files.pythonhosted.org/packages/da/ee/fb72c2b48656111c4ef27f0f91da355e130a923473bf5ee75c5643d00cca/cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c", size = 178840, upload-time = "2024-09-04T20:44:13.739Z" },
{ url = "https://files.pythonhosted.org/packages/cc/b6/db007700f67d151abadf508cbfd6a1884f57eab90b1bb985c4c8c02b0f28/cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36", size = 454803, upload-time = "2024-09-04T20:44:15.231Z" },
{ url = "https://files.pythonhosted.org/packages/1a/df/f8d151540d8c200eb1c6fba8cd0dfd40904f1b0682ea705c36e6c2e97ab3/cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5", size = 478850, upload-time = "2024-09-04T20:44:17.188Z" },
{ url = "https://files.pythonhosted.org/packages/28/c0/b31116332a547fd2677ae5b78a2ef662dfc8023d67f41b2a83f7c2aa78b1/cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff", size = 485729, upload-time = "2024-09-04T20:44:18.688Z" },
{ url = "https://files.pythonhosted.org/packages/91/2b/9a1ddfa5c7f13cab007a2c9cc295b70fbbda7cb10a286aa6810338e60ea1/cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99", size = 471256, upload-time = "2024-09-04T20:44:20.248Z" },
{ url = "https://files.pythonhosted.org/packages/b2/d5/da47df7004cb17e4955df6a43d14b3b4ae77737dff8bf7f8f333196717bf/cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93", size = 479424, upload-time = "2024-09-04T20:44:21.673Z" },
{ url = "https://files.pythonhosted.org/packages/0b/ac/2a28bcf513e93a219c8a4e8e125534f4f6db03e3179ba1c45e949b76212c/cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3", size = 484568, upload-time = "2024-09-04T20:44:23.245Z" },
{ url = "https://files.pythonhosted.org/packages/d4/38/ca8a4f639065f14ae0f1d9751e70447a261f1a30fa7547a828ae08142465/cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8", size = 488736, upload-time = "2024-09-04T20:44:24.757Z" },
{ url = "https://files.pythonhosted.org/packages/86/c5/28b2d6f799ec0bdecf44dced2ec5ed43e0eb63097b0f58c293583b406582/cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65", size = 172448, upload-time = "2024-09-04T20:44:26.208Z" },
{ url = "https://files.pythonhosted.org/packages/50/b9/db34c4755a7bd1cb2d1603ac3863f22bcecbd1ba29e5ee841a4bc510b294/cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903", size = 181976, upload-time = "2024-09-04T20:44:27.578Z" },
{ url = "https://files.pythonhosted.org/packages/8d/f8/dd6c246b148639254dad4d6803eb6a54e8c85c6e11ec9df2cffa87571dbe/cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e", size = 182989, upload-time = "2024-09-04T20:44:28.956Z" },
{ url = "https://files.pythonhosted.org/packages/8b/f1/672d303ddf17c24fc83afd712316fda78dc6fce1cd53011b839483e1ecc8/cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2", size = 178802, upload-time = "2024-09-04T20:44:30.289Z" },
{ url = "https://files.pythonhosted.org/packages/0e/2d/eab2e858a91fdff70533cab61dcff4a1f55ec60425832ddfdc9cd36bc8af/cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3", size = 454792, upload-time = "2024-09-04T20:44:32.01Z" },
{ url = "https://files.pythonhosted.org/packages/75/b2/fbaec7c4455c604e29388d55599b99ebcc250a60050610fadde58932b7ee/cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683", size = 478893, upload-time = "2024-09-04T20:44:33.606Z" },
{ url = "https://files.pythonhosted.org/packages/4f/b7/6e4a2162178bf1935c336d4da8a9352cccab4d3a5d7914065490f08c0690/cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5", size = 485810, upload-time = "2024-09-04T20:44:35.191Z" },
{ url = "https://files.pythonhosted.org/packages/c7/8a/1d0e4a9c26e54746dc08c2c6c037889124d4f59dffd853a659fa545f1b40/cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4", size = 471200, upload-time = "2024-09-04T20:44:36.743Z" },
{ url = "https://files.pythonhosted.org/packages/26/9f/1aab65a6c0db35f43c4d1b4f580e8df53914310afc10ae0397d29d697af4/cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd", size = 479447, upload-time = "2024-09-04T20:44:38.492Z" },
{ url = "https://files.pythonhosted.org/packages/5f/e4/fb8b3dd8dc0e98edf1135ff067ae070bb32ef9d509d6cb0f538cd6f7483f/cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed", size = 484358, upload-time = "2024-09-04T20:44:40.046Z" },
{ url = "https://files.pythonhosted.org/packages/f1/47/d7145bf2dc04684935d57d67dff9d6d795b2ba2796806bb109864be3a151/cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9", size = 488469, upload-time = "2024-09-04T20:44:41.616Z" },
{ url = "https://files.pythonhosted.org/packages/bf/ee/f94057fa6426481d663b88637a9a10e859e492c73d0384514a17d78ee205/cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d", size = 172475, upload-time = "2024-09-04T20:44:43.733Z" },
{ url = "https://files.pythonhosted.org/packages/7c/fc/6a8cb64e5f0324877d503c854da15d76c1e50eb722e320b15345c4d0c6de/cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a", size = 182009, upload-time = "2024-09-04T20:44:45.309Z" },
]
[[package]]
name = "charset-normalizer"
version = "3.4.2"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/e4/33/89c2ced2b67d1c2a61c19c6751aa8902d46ce3dacb23600a283619f5a12d/charset_normalizer-3.4.2.tar.gz", hash = "sha256:5baececa9ecba31eff645232d59845c07aa030f0c81ee70184a90d35099a0e63", size = 126367, upload-time = "2025-05-02T08:34:42.01Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/d7/a4/37f4d6035c89cac7930395a35cc0f1b872e652eaafb76a6075943754f095/charset_normalizer-3.4.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0c29de6a1a95f24b9a1aa7aefd27d2487263f00dfd55a77719b530788f75cff7", size = 199936, upload-time = "2025-05-02T08:32:33.712Z" },
{ url = "https://files.pythonhosted.org/packages/ee/8a/1a5e33b73e0d9287274f899d967907cd0bf9c343e651755d9307e0dbf2b3/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cddf7bd982eaa998934a91f69d182aec997c6c468898efe6679af88283b498d3", size = 143790, upload-time = "2025-05-02T08:32:35.768Z" },
{ url = "https://files.pythonhosted.org/packages/66/52/59521f1d8e6ab1482164fa21409c5ef44da3e9f653c13ba71becdd98dec3/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcbe676a55d7445b22c10967bceaaf0ee69407fbe0ece4d032b6eb8d4565982a", size = 153924, upload-time = "2025-05-02T08:32:37.284Z" },
{ url = "https://files.pythonhosted.org/packages/86/2d/fb55fdf41964ec782febbf33cb64be480a6b8f16ded2dbe8db27a405c09f/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d41c4d287cfc69060fa91cae9683eacffad989f1a10811995fa309df656ec214", size = 146626, upload-time = "2025-05-02T08:32:38.803Z" },
{ url = "https://files.pythonhosted.org/packages/8c/73/6ede2ec59bce19b3edf4209d70004253ec5f4e319f9a2e3f2f15601ed5f7/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e594135de17ab3866138f496755f302b72157d115086d100c3f19370839dd3a", size = 148567, upload-time = "2025-05-02T08:32:40.251Z" },
{ url = "https://files.pythonhosted.org/packages/09/14/957d03c6dc343c04904530b6bef4e5efae5ec7d7990a7cbb868e4595ee30/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf713fe9a71ef6fd5adf7a79670135081cd4431c2943864757f0fa3a65b1fafd", size = 150957, upload-time = "2025-05-02T08:32:41.705Z" },
{ url = "https://files.pythonhosted.org/packages/0d/c8/8174d0e5c10ccebdcb1b53cc959591c4c722a3ad92461a273e86b9f5a302/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a370b3e078e418187da8c3674eddb9d983ec09445c99a3a263c2011993522981", size = 145408, upload-time = "2025-05-02T08:32:43.709Z" },
{ url = "https://files.pythonhosted.org/packages/58/aa/8904b84bc8084ac19dc52feb4f5952c6df03ffb460a887b42615ee1382e8/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a955b438e62efdf7e0b7b52a64dc5c3396e2634baa62471768a64bc2adb73d5c", size = 153399, upload-time = "2025-05-02T08:32:46.197Z" },
{ url = "https://files.pythonhosted.org/packages/c2/26/89ee1f0e264d201cb65cf054aca6038c03b1a0c6b4ae998070392a3ce605/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:7222ffd5e4de8e57e03ce2cef95a4c43c98fcb72ad86909abdfc2c17d227fc1b", size = 156815, upload-time = "2025-05-02T08:32:48.105Z" },
{ url = "https://files.pythonhosted.org/packages/fd/07/68e95b4b345bad3dbbd3a8681737b4338ff2c9df29856a6d6d23ac4c73cb/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:bee093bf902e1d8fc0ac143c88902c3dfc8941f7ea1d6a8dd2bcb786d33db03d", size = 154537, upload-time = "2025-05-02T08:32:49.719Z" },
{ url = "https://files.pythonhosted.org/packages/77/1a/5eefc0ce04affb98af07bc05f3bac9094513c0e23b0562d64af46a06aae4/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:dedb8adb91d11846ee08bec4c8236c8549ac721c245678282dcb06b221aab59f", size = 149565, upload-time = "2025-05-02T08:32:51.404Z" },
{ url = "https://files.pythonhosted.org/packages/37/a0/2410e5e6032a174c95e0806b1a6585eb21e12f445ebe239fac441995226a/charset_normalizer-3.4.2-cp312-cp312-win32.whl", hash = "sha256:db4c7bf0e07fc3b7d89ac2a5880a6a8062056801b83ff56d8464b70f65482b6c", size = 98357, upload-time = "2025-05-02T08:32:53.079Z" },
{ url = "https://files.pythonhosted.org/packages/6c/4f/c02d5c493967af3eda9c771ad4d2bbc8df6f99ddbeb37ceea6e8716a32bc/charset_normalizer-3.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:5a9979887252a82fefd3d3ed2a8e3b937a7a809f65dcb1e068b090e165bbe99e", size = 105776, upload-time = "2025-05-02T08:32:54.573Z" },
{ url = "https://files.pythonhosted.org/packages/ea/12/a93df3366ed32db1d907d7593a94f1fe6293903e3e92967bebd6950ed12c/charset_normalizer-3.4.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:926ca93accd5d36ccdabd803392ddc3e03e6d4cd1cf17deff3b989ab8e9dbcf0", size = 199622, upload-time = "2025-05-02T08:32:56.363Z" },
{ url = "https://files.pythonhosted.org/packages/04/93/bf204e6f344c39d9937d3c13c8cd5bbfc266472e51fc8c07cb7f64fcd2de/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eba9904b0f38a143592d9fc0e19e2df0fa2e41c3c3745554761c5f6447eedabf", size = 143435, upload-time = "2025-05-02T08:32:58.551Z" },
{ url = "https://files.pythonhosted.org/packages/22/2a/ea8a2095b0bafa6c5b5a55ffdc2f924455233ee7b91c69b7edfcc9e02284/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3fddb7e2c84ac87ac3a947cb4e66d143ca5863ef48e4a5ecb83bd48619e4634e", size = 153653, upload-time = "2025-05-02T08:33:00.342Z" },
{ url = "https://files.pythonhosted.org/packages/b6/57/1b090ff183d13cef485dfbe272e2fe57622a76694061353c59da52c9a659/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98f862da73774290f251b9df8d11161b6cf25b599a66baf087c1ffe340e9bfd1", size = 146231, upload-time = "2025-05-02T08:33:02.081Z" },
{ url = "https://files.pythonhosted.org/packages/e2/28/ffc026b26f441fc67bd21ab7f03b313ab3fe46714a14b516f931abe1a2d8/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c9379d65defcab82d07b2a9dfbfc2e95bc8fe0ebb1b176a3190230a3ef0e07c", size = 148243, upload-time = "2025-05-02T08:33:04.063Z" },
{ url = "https://files.pythonhosted.org/packages/c0/0f/9abe9bd191629c33e69e47c6ef45ef99773320e9ad8e9cb08b8ab4a8d4cb/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e635b87f01ebc977342e2697d05b56632f5f879a4f15955dfe8cef2448b51691", size = 150442, upload-time = "2025-05-02T08:33:06.418Z" },
{ url = "https://files.pythonhosted.org/packages/67/7c/a123bbcedca91d5916c056407f89a7f5e8fdfce12ba825d7d6b9954a1a3c/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1c95a1e2902a8b722868587c0e1184ad5c55631de5afc0eb96bc4b0d738092c0", size = 145147, upload-time = "2025-05-02T08:33:08.183Z" },
{ url = "https://files.pythonhosted.org/packages/ec/fe/1ac556fa4899d967b83e9893788e86b6af4d83e4726511eaaad035e36595/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ef8de666d6179b009dce7bcb2ad4c4a779f113f12caf8dc77f0162c29d20490b", size = 153057, upload-time = "2025-05-02T08:33:09.986Z" },
{ url = "https://files.pythonhosted.org/packages/2b/ff/acfc0b0a70b19e3e54febdd5301a98b72fa07635e56f24f60502e954c461/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:32fc0341d72e0f73f80acb0a2c94216bd704f4f0bce10aedea38f30502b271ff", size = 156454, upload-time = "2025-05-02T08:33:11.814Z" },
{ url = "https://files.pythonhosted.org/packages/92/08/95b458ce9c740d0645feb0e96cea1f5ec946ea9c580a94adfe0b617f3573/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:289200a18fa698949d2b39c671c2cc7a24d44096784e76614899a7ccf2574b7b", size = 154174, upload-time = "2025-05-02T08:33:13.707Z" },
{ url = "https://files.pythonhosted.org/packages/78/be/8392efc43487ac051eee6c36d5fbd63032d78f7728cb37aebcc98191f1ff/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4a476b06fbcf359ad25d34a057b7219281286ae2477cc5ff5e3f70a246971148", size = 149166, upload-time = "2025-05-02T08:33:15.458Z" },
{ url = "https://files.pythonhosted.org/packages/44/96/392abd49b094d30b91d9fbda6a69519e95802250b777841cf3bda8fe136c/charset_normalizer-3.4.2-cp313-cp313-win32.whl", hash = "sha256:aaeeb6a479c7667fbe1099af9617c83aaca22182d6cf8c53966491a0f1b7ffb7", size = 98064, upload-time = "2025-05-02T08:33:17.06Z" },
{ url = "https://files.pythonhosted.org/packages/e9/b0/0200da600134e001d91851ddc797809e2fe0ea72de90e09bec5a2fbdaccb/charset_normalizer-3.4.2-cp313-cp313-win_amd64.whl", hash = "sha256:aa6af9e7d59f9c12b33ae4e9450619cf2488e2bbe9b44030905877f0b2324980", size = 105641, upload-time = "2025-05-02T08:33:18.753Z" },
{ url = "https://files.pythonhosted.org/packages/20/94/c5790835a017658cbfabd07f3bfb549140c3ac458cfc196323996b10095a/charset_normalizer-3.4.2-py3-none-any.whl", hash = "sha256:7f56930ab0abd1c45cd15be65cc741c28b1c9a34876ce8c17a2fa107810c0af0", size = 52626, upload-time = "2025-05-02T08:34:40.053Z" },
]
[[package]]
name = "click"
version = "8.2.1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "colorama", marker = "sys_platform == 'win32'" },
]
sdist = { url = "https://files.pythonhosted.org/packages/60/6c/8ca2efa64cf75a977a0d7fac081354553ebe483345c734fb6b6515d96bbc/click-8.2.1.tar.gz", hash = "sha256:27c491cc05d968d271d5a1db13e3b5a184636d9d930f148c50b038f0d0646202", size = 286342, upload-time = "2025-05-20T23:19:49.832Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/85/32/10bb5764d90a8eee674e9dc6f4db6a0ab47c8c4d0d83c27f7c39ac415a4d/click-8.2.1-py3-none-any.whl", hash = "sha256:61a3265b914e850b85317d0b3109c7f8cd35a670f963866005d6ef1d5175a12b", size = 102215, upload-time = "2025-05-20T23:19:47.796Z" },
]
[[package]]
name = "colorama"
version = "0.4.6"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" },
]
[[package]]
name = "cryptography"
version = "45.0.4"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "cffi", marker = "platform_python_implementation != 'PyPy'" },
]
sdist = { url = "https://files.pythonhosted.org/packages/fe/c8/a2a376a8711c1e11708b9c9972e0c3223f5fc682552c82d8db844393d6ce/cryptography-45.0.4.tar.gz", hash = "sha256:7405ade85c83c37682c8fe65554759800a4a8c54b2d96e0f8ad114d31b808d57", size = 744890, upload-time = "2025-06-10T00:03:51.297Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/cc/1c/92637793de053832523b410dbe016d3f5c11b41d0cf6eef8787aabb51d41/cryptography-45.0.4-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:425a9a6ac2823ee6e46a76a21a4e8342d8fa5c01e08b823c1f19a8b74f096069", size = 7055712, upload-time = "2025-06-10T00:02:38.826Z" },
{ url = "https://files.pythonhosted.org/packages/ba/14/93b69f2af9ba832ad6618a03f8a034a5851dc9a3314336a3d71c252467e1/cryptography-45.0.4-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:680806cf63baa0039b920f4976f5f31b10e772de42f16310a6839d9f21a26b0d", size = 4205335, upload-time = "2025-06-10T00:02:41.64Z" },
{ url = "https://files.pythonhosted.org/packages/67/30/fae1000228634bf0b647fca80403db5ca9e3933b91dd060570689f0bd0f7/cryptography-45.0.4-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:4ca0f52170e821bc8da6fc0cc565b7bb8ff8d90d36b5e9fdd68e8a86bdf72036", size = 4431487, upload-time = "2025-06-10T00:02:43.696Z" },
{ url = "https://files.pythonhosted.org/packages/6d/5a/7dffcf8cdf0cb3c2430de7404b327e3db64735747d641fc492539978caeb/cryptography-45.0.4-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:f3fe7a5ae34d5a414957cc7f457e2b92076e72938423ac64d215722f6cf49a9e", size = 4208922, upload-time = "2025-06-10T00:02:45.334Z" },
{ url = "https://files.pythonhosted.org/packages/c6/f3/528729726eb6c3060fa3637253430547fbaaea95ab0535ea41baa4a6fbd8/cryptography-45.0.4-cp311-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:25eb4d4d3e54595dc8adebc6bbd5623588991d86591a78c2548ffb64797341e2", size = 3900433, upload-time = "2025-06-10T00:02:47.359Z" },
{ url = "https://files.pythonhosted.org/packages/d9/4a/67ba2e40f619e04d83c32f7e1d484c1538c0800a17c56a22ff07d092ccc1/cryptography-45.0.4-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:ce1678a2ccbe696cf3af15a75bb72ee008d7ff183c9228592ede9db467e64f1b", size = 4464163, upload-time = "2025-06-10T00:02:49.412Z" },
{ url = "https://files.pythonhosted.org/packages/7e/9a/b4d5aa83661483ac372464809c4b49b5022dbfe36b12fe9e323ca8512420/cryptography-45.0.4-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:49fe9155ab32721b9122975e168a6760d8ce4cffe423bcd7ca269ba41b5dfac1", size = 4208687, upload-time = "2025-06-10T00:02:50.976Z" },
{ url = "https://files.pythonhosted.org/packages/db/b7/a84bdcd19d9c02ec5807f2ec2d1456fd8451592c5ee353816c09250e3561/cryptography-45.0.4-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:2882338b2a6e0bd337052e8b9007ced85c637da19ef9ecaf437744495c8c2999", size = 4463623, upload-time = "2025-06-10T00:02:52.542Z" },
{ url = "https://files.pythonhosted.org/packages/d8/84/69707d502d4d905021cac3fb59a316344e9f078b1da7fb43ecde5e10840a/cryptography-45.0.4-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:23b9c3ea30c3ed4db59e7b9619272e94891f8a3a5591d0b656a7582631ccf750", size = 4332447, upload-time = "2025-06-10T00:02:54.63Z" },
{ url = "https://files.pythonhosted.org/packages/f3/ee/d4f2ab688e057e90ded24384e34838086a9b09963389a5ba6854b5876598/cryptography-45.0.4-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:b0a97c927497e3bc36b33987abb99bf17a9a175a19af38a892dc4bbb844d7ee2", size = 4572830, upload-time = "2025-06-10T00:02:56.689Z" },
{ url = "https://files.pythonhosted.org/packages/70/d4/994773a261d7ff98034f72c0e8251fe2755eac45e2265db4c866c1c6829c/cryptography-45.0.4-cp311-abi3-win32.whl", hash = "sha256:e00a6c10a5c53979d6242f123c0a97cff9f3abed7f064fc412c36dc521b5f257", size = 2932769, upload-time = "2025-06-10T00:02:58.467Z" },
{ url = "https://files.pythonhosted.org/packages/5a/42/c80bd0b67e9b769b364963b5252b17778a397cefdd36fa9aa4a5f34c599a/cryptography-45.0.4-cp311-abi3-win_amd64.whl", hash = "sha256:817ee05c6c9f7a69a16200f0c90ab26d23a87701e2a284bd15156783e46dbcc8", size = 3410441, upload-time = "2025-06-10T00:03:00.14Z" },
{ url = "https://files.pythonhosted.org/packages/ce/0b/2488c89f3a30bc821c9d96eeacfcab6ff3accc08a9601ba03339c0fd05e5/cryptography-45.0.4-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:964bcc28d867e0f5491a564b7debb3ffdd8717928d315d12e0d7defa9e43b723", size = 7031836, upload-time = "2025-06-10T00:03:01.726Z" },
{ url = "https://files.pythonhosted.org/packages/fe/51/8c584ed426093aac257462ae62d26ad61ef1cbf5b58d8b67e6e13c39960e/cryptography-45.0.4-cp37-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:6a5bf57554e80f75a7db3d4b1dacaa2764611ae166ab42ea9a72bcdb5d577637", size = 4195746, upload-time = "2025-06-10T00:03:03.94Z" },
{ url = "https://files.pythonhosted.org/packages/5c/7d/4b0ca4d7af95a704eef2f8f80a8199ed236aaf185d55385ae1d1610c03c2/cryptography-45.0.4-cp37-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:46cf7088bf91bdc9b26f9c55636492c1cce3e7aaf8041bbf0243f5e5325cfb2d", size = 4424456, upload-time = "2025-06-10T00:03:05.589Z" },
{ url = "https://files.pythonhosted.org/packages/1d/45/5fabacbc6e76ff056f84d9f60eeac18819badf0cefc1b6612ee03d4ab678/cryptography-45.0.4-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:7bedbe4cc930fa4b100fc845ea1ea5788fcd7ae9562e669989c11618ae8d76ee", size = 4198495, upload-time = "2025-06-10T00:03:09.172Z" },
{ url = "https://files.pythonhosted.org/packages/55/b7/ffc9945b290eb0a5d4dab9b7636706e3b5b92f14ee5d9d4449409d010d54/cryptography-45.0.4-cp37-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:eaa3e28ea2235b33220b949c5a0d6cf79baa80eab2eb5607ca8ab7525331b9ff", size = 3885540, upload-time = "2025-06-10T00:03:10.835Z" },
{ url = "https://files.pythonhosted.org/packages/7f/e3/57b010282346980475e77d414080acdcb3dab9a0be63071efc2041a2c6bd/cryptography-45.0.4-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:7ef2dde4fa9408475038fc9aadfc1fb2676b174e68356359632e980c661ec8f6", size = 4452052, upload-time = "2025-06-10T00:03:12.448Z" },
{ url = "https://files.pythonhosted.org/packages/37/e6/ddc4ac2558bf2ef517a358df26f45bc774a99bf4653e7ee34b5e749c03e3/cryptography-45.0.4-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:6a3511ae33f09094185d111160fd192c67aa0a2a8d19b54d36e4c78f651dc5ad", size = 4198024, upload-time = "2025-06-10T00:03:13.976Z" },
{ url = "https://files.pythonhosted.org/packages/3a/c0/85fa358ddb063ec588aed4a6ea1df57dc3e3bc1712d87c8fa162d02a65fc/cryptography-45.0.4-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:06509dc70dd71fa56eaa138336244e2fbaf2ac164fc9b5e66828fccfd2b680d6", size = 4451442, upload-time = "2025-06-10T00:03:16.248Z" },
{ url = "https://files.pythonhosted.org/packages/33/67/362d6ec1492596e73da24e669a7fbbaeb1c428d6bf49a29f7a12acffd5dc/cryptography-45.0.4-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:5f31e6b0a5a253f6aa49be67279be4a7e5a4ef259a9f33c69f7d1b1191939872", size = 4325038, upload-time = "2025-06-10T00:03:18.4Z" },
{ url = "https://files.pythonhosted.org/packages/53/75/82a14bf047a96a1b13ebb47fb9811c4f73096cfa2e2b17c86879687f9027/cryptography-45.0.4-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:944e9ccf67a9594137f942d5b52c8d238b1b4e46c7a0c2891b7ae6e01e7c80a4", size = 4560964, upload-time = "2025-06-10T00:03:20.06Z" },
{ url = "https://files.pythonhosted.org/packages/cd/37/1a3cba4c5a468ebf9b95523a5ef5651244693dc712001e276682c278fc00/cryptography-45.0.4-cp37-abi3-win32.whl", hash = "sha256:c22fe01e53dc65edd1945a2e6f0015e887f84ced233acecb64b4daadb32f5c97", size = 2924557, upload-time = "2025-06-10T00:03:22.563Z" },
{ url = "https://files.pythonhosted.org/packages/2a/4b/3256759723b7e66380397d958ca07c59cfc3fb5c794fb5516758afd05d41/cryptography-45.0.4-cp37-abi3-win_amd64.whl", hash = "sha256:627ba1bc94f6adf0b0a2e35d87020285ead22d9f648c7e75bb64f367375f3b22", size = 3395508, upload-time = "2025-06-10T00:03:24.586Z" },
]
[[package]]
name = "dnspython"
version = "2.7.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/b5/4a/263763cb2ba3816dd94b08ad3a33d5fdae34ecb856678773cc40a3605829/dnspython-2.7.0.tar.gz", hash = "sha256:ce9c432eda0dc91cf618a5cedf1a4e142651196bbcd2c80e89ed5a907e5cfaf1", size = 345197, upload-time = "2024-10-05T20:14:59.362Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/68/1b/e0a87d256e40e8c888847551b20a017a6b98139178505dc7ffb96f04e954/dnspython-2.7.0-py3-none-any.whl", hash = "sha256:b4c34b7d10b51bcc3a5071e7b8dee77939f1e878477eeecc965e9835f63c6c86", size = 313632, upload-time = "2024-10-05T20:14:57.687Z" },
]
[[package]]
name = "faker"
version = "37.4.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "tzdata" },
]
sdist = { url = "https://files.pythonhosted.org/packages/65/f9/66af4019ee952fc84b8fe5b523fceb7f9e631ed8484417b6f1e3092f8290/faker-37.4.0.tar.gz", hash = "sha256:7f69d579588c23d5ce671f3fa872654ede0e67047820255f43a4aa1925b89780", size = 1901976, upload-time = "2025-06-11T17:59:30.818Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/78/5e/c8c3c5ea0896ab747db2e2889bf5a6f618ed291606de6513df56ad8670a8/faker-37.4.0-py3-none-any.whl", hash = "sha256:cb81c09ebe06c32a10971d1bbdb264bb0e22b59af59548f011ac4809556ce533", size = 1942992, upload-time = "2025-06-11T17:59:28.698Z" },
]
[[package]]
name = "fastapi"
version = "0.115.13"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "pydantic" },
{ name = "starlette" },
{ name = "typing-extensions" },
]
sdist = { url = "https://files.pythonhosted.org/packages/20/64/ec0788201b5554e2a87c49af26b77a4d132f807a0fa9675257ac92c6aa0e/fastapi-0.115.13.tar.gz", hash = "sha256:55d1d25c2e1e0a0a50aceb1c8705cd932def273c102bff0b1c1da88b3c6eb307", size = 295680, upload-time = "2025-06-17T11:49:45.575Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/59/4a/e17764385382062b0edbb35a26b7cf76d71e27e456546277a42ba6545c6e/fastapi-0.115.13-py3-none-any.whl", hash = "sha256:0a0cab59afa7bab22f5eb347f8c9864b681558c278395e94035a741fc10cd865", size = 95315, upload-time = "2025-06-17T11:49:44.106Z" },
]
[[package]]
name = "greenlet"
version = "3.2.3"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/c9/92/bb85bd6e80148a4d2e0c59f7c0c2891029f8fd510183afc7d8d2feeed9b6/greenlet-3.2.3.tar.gz", hash = "sha256:8b0dd8ae4c0d6f5e54ee55ba935eeb3d735a9b58a8a1e5b5cbab64e01a39f365", size = 185752, upload-time = "2025-06-05T16:16:09.955Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/f3/94/ad0d435f7c48debe960c53b8f60fb41c2026b1d0fa4a99a1cb17c3461e09/greenlet-3.2.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:25ad29caed5783d4bd7a85c9251c651696164622494c00802a139c00d639242d", size = 271992, upload-time = "2025-06-05T16:11:23.467Z" },
{ url = "https://files.pythonhosted.org/packages/93/5d/7c27cf4d003d6e77749d299c7c8f5fd50b4f251647b5c2e97e1f20da0ab5/greenlet-3.2.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:88cd97bf37fe24a6710ec6a3a7799f3f81d9cd33317dcf565ff9950c83f55e0b", size = 638820, upload-time = "2025-06-05T16:38:52.882Z" },
{ url = "https://files.pythonhosted.org/packages/c6/7e/807e1e9be07a125bb4c169144937910bf59b9d2f6d931578e57f0bce0ae2/greenlet-3.2.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:baeedccca94880d2f5666b4fa16fc20ef50ba1ee353ee2d7092b383a243b0b0d", size = 653046, upload-time = "2025-06-05T16:41:36.343Z" },
{ url = "https://files.pythonhosted.org/packages/9d/ab/158c1a4ea1068bdbc78dba5a3de57e4c7aeb4e7fa034320ea94c688bfb61/greenlet-3.2.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:be52af4b6292baecfa0f397f3edb3c6092ce071b499dd6fe292c9ac9f2c8f264", size = 647701, upload-time = "2025-06-05T16:48:19.604Z" },
{ url = "https://files.pythonhosted.org/packages/cc/0d/93729068259b550d6a0288da4ff72b86ed05626eaf1eb7c0d3466a2571de/greenlet-3.2.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0cc73378150b8b78b0c9fe2ce56e166695e67478550769536a6742dca3651688", size = 649747, upload-time = "2025-06-05T16:13:04.628Z" },
{ url = "https://files.pythonhosted.org/packages/f6/f6/c82ac1851c60851302d8581680573245c8fc300253fc1ff741ae74a6c24d/greenlet-3.2.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:706d016a03e78df129f68c4c9b4c4f963f7d73534e48a24f5f5a7101ed13dbbb", size = 605461, upload-time = "2025-06-05T16:12:50.792Z" },
{ url = "https://files.pythonhosted.org/packages/98/82/d022cf25ca39cf1200650fc58c52af32c90f80479c25d1cbf57980ec3065/greenlet-3.2.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:419e60f80709510c343c57b4bb5a339d8767bf9aef9b8ce43f4f143240f88b7c", size = 1121190, upload-time = "2025-06-05T16:36:48.59Z" },
{ url = "https://files.pythonhosted.org/packages/f5/e1/25297f70717abe8104c20ecf7af0a5b82d2f5a980eb1ac79f65654799f9f/greenlet-3.2.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:93d48533fade144203816783373f27a97e4193177ebaaf0fc396db19e5d61163", size = 1149055, upload-time = "2025-06-05T16:12:40.457Z" },
{ url = "https://files.pythonhosted.org/packages/1f/8f/8f9e56c5e82eb2c26e8cde787962e66494312dc8cb261c460e1f3a9c88bc/greenlet-3.2.3-cp312-cp312-win_amd64.whl", hash = "sha256:7454d37c740bb27bdeddfc3f358f26956a07d5220818ceb467a483197d84f849", size = 297817, upload-time = "2025-06-05T16:29:49.244Z" },
{ url = "https://files.pythonhosted.org/packages/b1/cf/f5c0b23309070ae93de75c90d29300751a5aacefc0a3ed1b1d8edb28f08b/greenlet-3.2.3-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:500b8689aa9dd1ab26872a34084503aeddefcb438e2e7317b89b11eaea1901ad", size = 270732, upload-time = "2025-06-05T16:10:08.26Z" },
{ url = "https://files.pythonhosted.org/packages/48/ae/91a957ba60482d3fecf9be49bc3948f341d706b52ddb9d83a70d42abd498/greenlet-3.2.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:a07d3472c2a93117af3b0136f246b2833fdc0b542d4a9799ae5f41c28323faef", size = 639033, upload-time = "2025-06-05T16:38:53.983Z" },
{ url = "https://files.pythonhosted.org/packages/6f/df/20ffa66dd5a7a7beffa6451bdb7400d66251374ab40b99981478c69a67a8/greenlet-3.2.3-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:8704b3768d2f51150626962f4b9a9e4a17d2e37c8a8d9867bbd9fa4eb938d3b3", size = 652999, upload-time = "2025-06-05T16:41:37.89Z" },
{ url = "https://files.pythonhosted.org/packages/51/b4/ebb2c8cb41e521f1d72bf0465f2f9a2fd803f674a88db228887e6847077e/greenlet-3.2.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:5035d77a27b7c62db6cf41cf786cfe2242644a7a337a0e155c80960598baab95", size = 647368, upload-time = "2025-06-05T16:48:21.467Z" },
{ url = "https://files.pythonhosted.org/packages/8e/6a/1e1b5aa10dced4ae876a322155705257748108b7fd2e4fae3f2a091fe81a/greenlet-3.2.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:2d8aa5423cd4a396792f6d4580f88bdc6efcb9205891c9d40d20f6e670992efb", size = 650037, upload-time = "2025-06-05T16:13:06.402Z" },
{ url = "https://files.pythonhosted.org/packages/26/f2/ad51331a157c7015c675702e2d5230c243695c788f8f75feba1af32b3617/greenlet-3.2.3-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2c724620a101f8170065d7dded3f962a2aea7a7dae133a009cada42847e04a7b", size = 608402, upload-time = "2025-06-05T16:12:51.91Z" },
{ url = "https://files.pythonhosted.org/packages/26/bc/862bd2083e6b3aff23300900a956f4ea9a4059de337f5c8734346b9b34fc/greenlet-3.2.3-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:873abe55f134c48e1f2a6f53f7d1419192a3d1a4e873bace00499a4e45ea6af0", size = 1119577, upload-time = "2025-06-05T16:36:49.787Z" },
{ url = "https://files.pythonhosted.org/packages/86/94/1fc0cc068cfde885170e01de40a619b00eaa8f2916bf3541744730ffb4c3/greenlet-3.2.3-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:024571bbce5f2c1cfff08bf3fbaa43bbc7444f580ae13b0099e95d0e6e67ed36", size = 1147121, upload-time = "2025-06-05T16:12:42.527Z" },
{ url = "https://files.pythonhosted.org/packages/27/1a/199f9587e8cb08a0658f9c30f3799244307614148ffe8b1e3aa22f324dea/greenlet-3.2.3-cp313-cp313-win_amd64.whl", hash = "sha256:5195fb1e75e592dd04ce79881c8a22becdfa3e6f500e7feb059b1e6fdd54d3e3", size = 297603, upload-time = "2025-06-05T16:20:12.651Z" },
{ url = "https://files.pythonhosted.org/packages/d8/ca/accd7aa5280eb92b70ed9e8f7fd79dc50a2c21d8c73b9a0856f5b564e222/greenlet-3.2.3-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:3d04332dddb10b4a211b68111dabaee2e1a073663d117dc10247b5b1642bac86", size = 271479, upload-time = "2025-06-05T16:10:47.525Z" },
{ url = "https://files.pythonhosted.org/packages/55/71/01ed9895d9eb49223280ecc98a557585edfa56b3d0e965b9fa9f7f06b6d9/greenlet-3.2.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8186162dffde068a465deab08fc72c767196895c39db26ab1c17c0b77a6d8b97", size = 683952, upload-time = "2025-06-05T16:38:55.125Z" },
{ url = "https://files.pythonhosted.org/packages/ea/61/638c4bdf460c3c678a0a1ef4c200f347dff80719597e53b5edb2fb27ab54/greenlet-3.2.3-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f4bfbaa6096b1b7a200024784217defedf46a07c2eee1a498e94a1b5f8ec5728", size = 696917, upload-time = "2025-06-05T16:41:38.959Z" },
{ url = "https://files.pythonhosted.org/packages/22/cc/0bd1a7eb759d1f3e3cc2d1bc0f0b487ad3cc9f34d74da4b80f226fde4ec3/greenlet-3.2.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:ed6cfa9200484d234d8394c70f5492f144b20d4533f69262d530a1a082f6ee9a", size = 692443, upload-time = "2025-06-05T16:48:23.113Z" },
{ url = "https://files.pythonhosted.org/packages/67/10/b2a4b63d3f08362662e89c103f7fe28894a51ae0bc890fabf37d1d780e52/greenlet-3.2.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:02b0df6f63cd15012bed5401b47829cfd2e97052dc89da3cfaf2c779124eb892", size = 692995, upload-time = "2025-06-05T16:13:07.972Z" },
{ url = "https://files.pythonhosted.org/packages/5a/c6/ad82f148a4e3ce9564056453a71529732baf5448ad53fc323e37efe34f66/greenlet-3.2.3-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:86c2d68e87107c1792e2e8d5399acec2487a4e993ab76c792408e59394d52141", size = 655320, upload-time = "2025-06-05T16:12:53.453Z" },
{ url = "https://files.pythonhosted.org/packages/5c/4f/aab73ecaa6b3086a4c89863d94cf26fa84cbff63f52ce9bc4342b3087a06/greenlet-3.2.3-cp314-cp314-win_amd64.whl", hash = "sha256:8c47aae8fbbfcf82cc13327ae802ba13c9c36753b67e760023fd116bc124a62a", size = 301236, upload-time = "2025-06-05T16:15:20.111Z" },
]
[[package]]
name = "h11"
version = "0.16.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/01/ee/02a2c011bdab74c6fb3c75474d40b3052059d95df7e73351460c8588d963/h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1", size = 101250, upload-time = "2025-04-24T03:35:25.427Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515, upload-time = "2025-04-24T03:35:24.344Z" },
]
[[package]]
name = "idna"
version = "3.10"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490, upload-time = "2024-09-15T18:07:39.745Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442, upload-time = "2024-09-15T18:07:37.964Z" },
]
[[package]]
name = "iniconfig"
version = "2.1.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/f2/97/ebf4da567aa6827c909642694d71c9fcf53e5b504f2d96afea02718862f3/iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7", size = 4793, upload-time = "2025-03-19T20:09:59.721Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/2c/e1/e6716421ea10d38022b952c159d5161ca1193197fb744506875fbb87ea7b/iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760", size = 6050, upload-time = "2025-03-19T20:10:01.071Z" },
]
[[package]]
name = "jinja2"
version = "3.1.6"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "markupsafe" },
]
sdist = { url = "https://files.pythonhosted.org/packages/df/bf/f7da0350254c0ed7c72f3e33cef02e048281fec7ecec5f032d4aac52226b/jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d", size = 245115, upload-time = "2025-03-05T20:05:02.478Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67", size = 134899, upload-time = "2025-03-05T20:05:00.369Z" },
]
[[package]]
name = "mako"
version = "1.3.10"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "markupsafe" },
]
sdist = { url = "https://files.pythonhosted.org/packages/9e/38/bd5b78a920a64d708fe6bc8e0a2c075e1389d53bef8413725c63ba041535/mako-1.3.10.tar.gz", hash = "sha256:99579a6f39583fa7e5630a28c3c1f440e4e97a414b80372649c0ce338da2ea28", size = 392474, upload-time = "2025-04-10T12:44:31.16Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/87/fb/99f81ac72ae23375f22b7afdb7642aba97c00a713c217124420147681a2f/mako-1.3.10-py3-none-any.whl", hash = "sha256:baef24a52fc4fc514a0887ac600f9f1cff3d82c61d4d700a1fa84d597b88db59", size = 78509, upload-time = "2025-04-10T12:50:53.297Z" },
]
[[package]]
name = "markupsafe"
version = "3.0.2"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/b2/97/5d42485e71dfc078108a86d6de8fa46db44a1a9295e89c5d6d4a06e23a62/markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0", size = 20537, upload-time = "2024-10-18T15:21:54.129Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/22/09/d1f21434c97fc42f09d290cbb6350d44eb12f09cc62c9476effdb33a18aa/MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf", size = 14274, upload-time = "2024-10-18T15:21:13.777Z" },
{ url = "https://files.pythonhosted.org/packages/6b/b0/18f76bba336fa5aecf79d45dcd6c806c280ec44538b3c13671d49099fdd0/MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225", size = 12348, upload-time = "2024-10-18T15:21:14.822Z" },
{ url = "https://files.pythonhosted.org/packages/e0/25/dd5c0f6ac1311e9b40f4af06c78efde0f3b5cbf02502f8ef9501294c425b/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028", size = 24149, upload-time = "2024-10-18T15:21:15.642Z" },
{ url = "https://files.pythonhosted.org/packages/f3/f0/89e7aadfb3749d0f52234a0c8c7867877876e0a20b60e2188e9850794c17/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8", size = 23118, upload-time = "2024-10-18T15:21:17.133Z" },
{ url = "https://files.pythonhosted.org/packages/d5/da/f2eeb64c723f5e3777bc081da884b414671982008c47dcc1873d81f625b6/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c", size = 22993, upload-time = "2024-10-18T15:21:18.064Z" },
{ url = "https://files.pythonhosted.org/packages/da/0e/1f32af846df486dce7c227fe0f2398dc7e2e51d4a370508281f3c1c5cddc/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557", size = 24178, upload-time = "2024-10-18T15:21:18.859Z" },
{ url = "https://files.pythonhosted.org/packages/c4/f6/bb3ca0532de8086cbff5f06d137064c8410d10779c4c127e0e47d17c0b71/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22", size = 23319, upload-time = "2024-10-18T15:21:19.671Z" },
{ url = "https://files.pythonhosted.org/packages/a2/82/8be4c96ffee03c5b4a034e60a31294daf481e12c7c43ab8e34a1453ee48b/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48", size = 23352, upload-time = "2024-10-18T15:21:20.971Z" },
{ url = "https://files.pythonhosted.org/packages/51/ae/97827349d3fcffee7e184bdf7f41cd6b88d9919c80f0263ba7acd1bbcb18/MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30", size = 15097, upload-time = "2024-10-18T15:21:22.646Z" },
{ url = "https://files.pythonhosted.org/packages/c1/80/a61f99dc3a936413c3ee4e1eecac96c0da5ed07ad56fd975f1a9da5bc630/MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87", size = 15601, upload-time = "2024-10-18T15:21:23.499Z" },
{ url = "https://files.pythonhosted.org/packages/83/0e/67eb10a7ecc77a0c2bbe2b0235765b98d164d81600746914bebada795e97/MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd", size = 14274, upload-time = "2024-10-18T15:21:24.577Z" },
{ url = "https://files.pythonhosted.org/packages/2b/6d/9409f3684d3335375d04e5f05744dfe7e9f120062c9857df4ab490a1031a/MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430", size = 12352, upload-time = "2024-10-18T15:21:25.382Z" },
{ url = "https://files.pythonhosted.org/packages/d2/f5/6eadfcd3885ea85fe2a7c128315cc1bb7241e1987443d78c8fe712d03091/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094", size = 24122, upload-time = "2024-10-18T15:21:26.199Z" },
{ url = "https://files.pythonhosted.org/packages/0c/91/96cf928db8236f1bfab6ce15ad070dfdd02ed88261c2afafd4b43575e9e9/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396", size = 23085, upload-time = "2024-10-18T15:21:27.029Z" },
{ url = "https://files.pythonhosted.org/packages/c2/cf/c9d56af24d56ea04daae7ac0940232d31d5a8354f2b457c6d856b2057d69/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79", size = 22978, upload-time = "2024-10-18T15:21:27.846Z" },
{ url = "https://files.pythonhosted.org/packages/2a/9f/8619835cd6a711d6272d62abb78c033bda638fdc54c4e7f4272cf1c0962b/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a", size = 24208, upload-time = "2024-10-18T15:21:28.744Z" },
{ url = "https://files.pythonhosted.org/packages/f9/bf/176950a1792b2cd2102b8ffeb5133e1ed984547b75db47c25a67d3359f77/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca", size = 23357, upload-time = "2024-10-18T15:21:29.545Z" },
{ url = "https://files.pythonhosted.org/packages/ce/4f/9a02c1d335caabe5c4efb90e1b6e8ee944aa245c1aaaab8e8a618987d816/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c", size = 23344, upload-time = "2024-10-18T15:21:30.366Z" },
{ url = "https://files.pythonhosted.org/packages/ee/55/c271b57db36f748f0e04a759ace9f8f759ccf22b4960c270c78a394f58be/MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1", size = 15101, upload-time = "2024-10-18T15:21:31.207Z" },
{ url = "https://files.pythonhosted.org/packages/29/88/07df22d2dd4df40aba9f3e402e6dc1b8ee86297dddbad4872bd5e7b0094f/MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f", size = 15603, upload-time = "2024-10-18T15:21:32.032Z" },
{ url = "https://files.pythonhosted.org/packages/62/6a/8b89d24db2d32d433dffcd6a8779159da109842434f1dd2f6e71f32f738c/MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c", size = 14510, upload-time = "2024-10-18T15:21:33.625Z" },
{ url = "https://files.pythonhosted.org/packages/7a/06/a10f955f70a2e5a9bf78d11a161029d278eeacbd35ef806c3fd17b13060d/MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb", size = 12486, upload-time = "2024-10-18T15:21:34.611Z" },
{ url = "https://files.pythonhosted.org/packages/34/cf/65d4a571869a1a9078198ca28f39fba5fbb910f952f9dbc5220afff9f5e6/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c", size = 25480, upload-time = "2024-10-18T15:21:35.398Z" },
{ url = "https://files.pythonhosted.org/packages/0c/e3/90e9651924c430b885468b56b3d597cabf6d72be4b24a0acd1fa0e12af67/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d", size = 23914, upload-time = "2024-10-18T15:21:36.231Z" },
{ url = "https://files.pythonhosted.org/packages/66/8c/6c7cf61f95d63bb866db39085150df1f2a5bd3335298f14a66b48e92659c/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe", size = 23796, upload-time = "2024-10-18T15:21:37.073Z" },
{ url = "https://files.pythonhosted.org/packages/bb/35/cbe9238ec3f47ac9a7c8b3df7a808e7cb50fe149dc7039f5f454b3fba218/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5", size = 25473, upload-time = "2024-10-18T15:21:37.932Z" },
{ url = "https://files.pythonhosted.org/packages/e6/32/7621a4382488aa283cc05e8984a9c219abad3bca087be9ec77e89939ded9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a", size = 24114, upload-time = "2024-10-18T15:21:39.799Z" },
{ url = "https://files.pythonhosted.org/packages/0d/80/0985960e4b89922cb5a0bac0ed39c5b96cbc1a536a99f30e8c220a996ed9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9", size = 24098, upload-time = "2024-10-18T15:21:40.813Z" },
{ url = "https://files.pythonhosted.org/packages/82/78/fedb03c7d5380df2427038ec8d973587e90561b2d90cd472ce9254cf348b/MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6", size = 15208, upload-time = "2024-10-18T15:21:41.814Z" },
{ url = "https://files.pythonhosted.org/packages/4f/65/6079a46068dfceaeabb5dcad6d674f5f5c61a6fa5673746f42a9f4c233b3/MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f", size = 15739, upload-time = "2024-10-18T15:21:42.784Z" },
]
[[package]]
name = "numpy"
version = "2.3.1"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/2e/19/d7c972dfe90a353dbd3efbbe1d14a5951de80c99c9dc1b93cd998d51dc0f/numpy-2.3.1.tar.gz", hash = "sha256:1ec9ae20a4226da374362cca3c62cd753faf2f951440b0e3b98e93c235441d2b", size = 20390372, upload-time = "2025-06-21T12:28:33.469Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/c6/56/71ad5022e2f63cfe0ca93559403d0edef14aea70a841d640bd13cdba578e/numpy-2.3.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:2959d8f268f3d8ee402b04a9ec4bb7604555aeacf78b360dc4ec27f1d508177d", size = 20896664, upload-time = "2025-06-21T12:15:30.845Z" },
{ url = "https://files.pythonhosted.org/packages/25/65/2db52ba049813670f7f987cc5db6dac9be7cd95e923cc6832b3d32d87cef/numpy-2.3.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:762e0c0c6b56bdedfef9a8e1d4538556438288c4276901ea008ae44091954e29", size = 14131078, upload-time = "2025-06-21T12:15:52.23Z" },
{ url = "https://files.pythonhosted.org/packages/57/dd/28fa3c17b0e751047ac928c1e1b6990238faad76e9b147e585b573d9d1bd/numpy-2.3.1-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:867ef172a0976aaa1f1d1b63cf2090de8b636a7674607d514505fb7276ab08fc", size = 5112554, upload-time = "2025-06-21T12:16:01.434Z" },
{ url = "https://files.pythonhosted.org/packages/c9/fc/84ea0cba8e760c4644b708b6819d91784c290288c27aca916115e3311d17/numpy-2.3.1-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:4e602e1b8682c2b833af89ba641ad4176053aaa50f5cacda1a27004352dde943", size = 6646560, upload-time = "2025-06-21T12:16:11.895Z" },
{ url = "https://files.pythonhosted.org/packages/61/b2/512b0c2ddec985ad1e496b0bd853eeb572315c0f07cd6997473ced8f15e2/numpy-2.3.1-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:8e333040d069eba1652fb08962ec5b76af7f2c7bce1df7e1418c8055cf776f25", size = 14260638, upload-time = "2025-06-21T12:16:32.611Z" },
{ url = "https://files.pythonhosted.org/packages/6e/45/c51cb248e679a6c6ab14b7a8e3ead3f4a3fe7425fc7a6f98b3f147bec532/numpy-2.3.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:e7cbf5a5eafd8d230a3ce356d892512185230e4781a361229bd902ff403bc660", size = 16632729, upload-time = "2025-06-21T12:16:57.439Z" },
{ url = "https://files.pythonhosted.org/packages/e4/ff/feb4be2e5c09a3da161b412019caf47183099cbea1132fd98061808c2df2/numpy-2.3.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5f1b8f26d1086835f442286c1d9b64bb3974b0b1e41bb105358fd07d20872952", size = 15565330, upload-time = "2025-06-21T12:17:20.638Z" },
{ url = "https://files.pythonhosted.org/packages/bc/6d/ceafe87587101e9ab0d370e4f6e5f3f3a85b9a697f2318738e5e7e176ce3/numpy-2.3.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ee8340cb48c9b7a5899d1149eece41ca535513a9698098edbade2a8e7a84da77", size = 18361734, upload-time = "2025-06-21T12:17:47.938Z" },
{ url = "https://files.pythonhosted.org/packages/2b/19/0fb49a3ea088be691f040c9bf1817e4669a339d6e98579f91859b902c636/numpy-2.3.1-cp312-cp312-win32.whl", hash = "sha256:e772dda20a6002ef7061713dc1e2585bc1b534e7909b2030b5a46dae8ff077ab", size = 6320411, upload-time = "2025-06-21T12:17:58.475Z" },
{ url = "https://files.pythonhosted.org/packages/b1/3e/e28f4c1dd9e042eb57a3eb652f200225e311b608632bc727ae378623d4f8/numpy-2.3.1-cp312-cp312-win_amd64.whl", hash = "sha256:cfecc7822543abdea6de08758091da655ea2210b8ffa1faf116b940693d3df76", size = 12734973, upload-time = "2025-06-21T12:18:17.601Z" },
{ url = "https://files.pythonhosted.org/packages/04/a8/8a5e9079dc722acf53522b8f8842e79541ea81835e9b5483388701421073/numpy-2.3.1-cp312-cp312-win_arm64.whl", hash = "sha256:7be91b2239af2658653c5bb6f1b8bccafaf08226a258caf78ce44710a0160d30", size = 10191491, upload-time = "2025-06-21T12:18:33.585Z" },
{ url = "https://files.pythonhosted.org/packages/d4/bd/35ad97006d8abff8631293f8ea6adf07b0108ce6fec68da3c3fcca1197f2/numpy-2.3.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:25a1992b0a3fdcdaec9f552ef10d8103186f5397ab45e2d25f8ac51b1a6b97e8", size = 20889381, upload-time = "2025-06-21T12:19:04.103Z" },
{ url = "https://files.pythonhosted.org/packages/f1/4f/df5923874d8095b6062495b39729178eef4a922119cee32a12ee1bd4664c/numpy-2.3.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7dea630156d39b02a63c18f508f85010230409db5b2927ba59c8ba4ab3e8272e", size = 14152726, upload-time = "2025-06-21T12:19:25.599Z" },
{ url = "https://files.pythonhosted.org/packages/8c/0f/a1f269b125806212a876f7efb049b06c6f8772cf0121139f97774cd95626/numpy-2.3.1-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:bada6058dd886061f10ea15f230ccf7dfff40572e99fef440a4a857c8728c9c0", size = 5105145, upload-time = "2025-06-21T12:19:34.782Z" },
{ url = "https://files.pythonhosted.org/packages/6d/63/a7f7fd5f375b0361682f6ffbf686787e82b7bbd561268e4f30afad2bb3c0/numpy-2.3.1-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:a894f3816eb17b29e4783e5873f92faf55b710c2519e5c351767c51f79d8526d", size = 6639409, upload-time = "2025-06-21T12:19:45.228Z" },
{ url = "https://files.pythonhosted.org/packages/bf/0d/1854a4121af895aab383f4aa233748f1df4671ef331d898e32426756a8a6/numpy-2.3.1-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:18703df6c4a4fee55fd3d6e5a253d01c5d33a295409b03fda0c86b3ca2ff41a1", size = 14257630, upload-time = "2025-06-21T12:20:06.544Z" },
{ url = "https://files.pythonhosted.org/packages/50/30/af1b277b443f2fb08acf1c55ce9d68ee540043f158630d62cef012750f9f/numpy-2.3.1-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:5902660491bd7a48b2ec16c23ccb9124b8abfd9583c5fdfa123fe6b421e03de1", size = 16627546, upload-time = "2025-06-21T12:20:31.002Z" },
{ url = "https://files.pythonhosted.org/packages/6e/ec/3b68220c277e463095342d254c61be8144c31208db18d3fd8ef02712bcd6/numpy-2.3.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:36890eb9e9d2081137bd78d29050ba63b8dab95dff7912eadf1185e80074b2a0", size = 15562538, upload-time = "2025-06-21T12:20:54.322Z" },
{ url = "https://files.pythonhosted.org/packages/77/2b/4014f2bcc4404484021c74d4c5ee8eb3de7e3f7ac75f06672f8dcf85140a/numpy-2.3.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a780033466159c2270531e2b8ac063704592a0bc62ec4a1b991c7c40705eb0e8", size = 18360327, upload-time = "2025-06-21T12:21:21.053Z" },
{ url = "https://files.pythonhosted.org/packages/40/8d/2ddd6c9b30fcf920837b8672f6c65590c7d92e43084c25fc65edc22e93ca/numpy-2.3.1-cp313-cp313-win32.whl", hash = "sha256:39bff12c076812595c3a306f22bfe49919c5513aa1e0e70fac756a0be7c2a2b8", size = 6312330, upload-time = "2025-06-21T12:25:07.447Z" },
{ url = "https://files.pythonhosted.org/packages/dd/c8/beaba449925988d415efccb45bf977ff8327a02f655090627318f6398c7b/numpy-2.3.1-cp313-cp313-win_amd64.whl", hash = "sha256:8d5ee6eec45f08ce507a6570e06f2f879b374a552087a4179ea7838edbcbfa42", size = 12731565, upload-time = "2025-06-21T12:25:26.444Z" },
{ url = "https://files.pythonhosted.org/packages/0b/c3/5c0c575d7ec78c1126998071f58facfc124006635da75b090805e642c62e/numpy-2.3.1-cp313-cp313-win_arm64.whl", hash = "sha256:0c4d9e0a8368db90f93bd192bfa771ace63137c3488d198ee21dfb8e7771916e", size = 10190262, upload-time = "2025-06-21T12:25:42.196Z" },
{ url = "https://files.pythonhosted.org/packages/ea/19/a029cd335cf72f79d2644dcfc22d90f09caa86265cbbde3b5702ccef6890/numpy-2.3.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:b0b5397374f32ec0649dd98c652a1798192042e715df918c20672c62fb52d4b8", size = 20987593, upload-time = "2025-06-21T12:21:51.664Z" },
{ url = "https://files.pythonhosted.org/packages/25/91/8ea8894406209107d9ce19b66314194675d31761fe2cb3c84fe2eeae2f37/numpy-2.3.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:c5bdf2015ccfcee8253fb8be695516ac4457c743473a43290fd36eba6a1777eb", size = 14300523, upload-time = "2025-06-21T12:22:13.583Z" },
{ url = "https://files.pythonhosted.org/packages/a6/7f/06187b0066eefc9e7ce77d5f2ddb4e314a55220ad62dd0bfc9f2c44bac14/numpy-2.3.1-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:d70f20df7f08b90a2062c1f07737dd340adccf2068d0f1b9b3d56e2038979fee", size = 5227993, upload-time = "2025-06-21T12:22:22.53Z" },
{ url = "https://files.pythonhosted.org/packages/e8/ec/a926c293c605fa75e9cfb09f1e4840098ed46d2edaa6e2152ee35dc01ed3/numpy-2.3.1-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:2fb86b7e58f9ac50e1e9dd1290154107e47d1eef23a0ae9145ded06ea606f992", size = 6736652, upload-time = "2025-06-21T12:22:33.629Z" },
{ url = "https://files.pythonhosted.org/packages/e3/62/d68e52fb6fde5586650d4c0ce0b05ff3a48ad4df4ffd1b8866479d1d671d/numpy-2.3.1-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:23ab05b2d241f76cb883ce8b9a93a680752fbfcbd51c50eff0b88b979e471d8c", size = 14331561, upload-time = "2025-06-21T12:22:55.056Z" },
{ url = "https://files.pythonhosted.org/packages/fc/ec/b74d3f2430960044bdad6900d9f5edc2dc0fb8bf5a0be0f65287bf2cbe27/numpy-2.3.1-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:ce2ce9e5de4703a673e705183f64fd5da5bf36e7beddcb63a25ee2286e71ca48", size = 16693349, upload-time = "2025-06-21T12:23:20.53Z" },
{ url = "https://files.pythonhosted.org/packages/0d/15/def96774b9d7eb198ddadfcbd20281b20ebb510580419197e225f5c55c3e/numpy-2.3.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:c4913079974eeb5c16ccfd2b1f09354b8fed7e0d6f2cab933104a09a6419b1ee", size = 15642053, upload-time = "2025-06-21T12:23:43.697Z" },
{ url = "https://files.pythonhosted.org/packages/2b/57/c3203974762a759540c6ae71d0ea2341c1fa41d84e4971a8e76d7141678a/numpy-2.3.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:010ce9b4f00d5c036053ca684c77441f2f2c934fd23bee058b4d6f196efd8280", size = 18434184, upload-time = "2025-06-21T12:24:10.708Z" },
{ url = "https://files.pythonhosted.org/packages/22/8a/ccdf201457ed8ac6245187850aff4ca56a79edbea4829f4e9f14d46fa9a5/numpy-2.3.1-cp313-cp313t-win32.whl", hash = "sha256:6269b9edfe32912584ec496d91b00b6d34282ca1d07eb10e82dfc780907d6c2e", size = 6440678, upload-time = "2025-06-21T12:24:21.596Z" },
{ url = "https://files.pythonhosted.org/packages/f1/7e/7f431d8bd8eb7e03d79294aed238b1b0b174b3148570d03a8a8a8f6a0da9/numpy-2.3.1-cp313-cp313t-win_amd64.whl", hash = "sha256:2a809637460e88a113e186e87f228d74ae2852a2e0c44de275263376f17b5bdc", size = 12870697, upload-time = "2025-06-21T12:24:40.644Z" },
{ url = "https://files.pythonhosted.org/packages/d4/ca/af82bf0fad4c3e573c6930ed743b5308492ff19917c7caaf2f9b6f9e2e98/numpy-2.3.1-cp313-cp313t-win_arm64.whl", hash = "sha256:eccb9a159db9aed60800187bc47a6d3451553f0e1b08b068d8b277ddfbb9b244", size = 10260376, upload-time = "2025-06-21T12:24:56.884Z" },
]
[[package]]
name = "packaging"
version = "25.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/a1/d4/1fc4078c65507b51b96ca8f8c3ba19e6a61c8253c72794544580a7b6c24d/packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f", size = 165727, upload-time = "2025-04-19T11:48:59.673Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469, upload-time = "2025-04-19T11:48:57.875Z" },
]
[[package]]
name = "pandas"
version = "2.3.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "numpy" },
{ name = "python-dateutil" },
{ name = "pytz" },
{ name = "tzdata" },
]
sdist = { url = "https://files.pythonhosted.org/packages/72/51/48f713c4c728d7c55ef7444ba5ea027c26998d96d1a40953b346438602fc/pandas-2.3.0.tar.gz", hash = "sha256:34600ab34ebf1131a7613a260a61dbe8b62c188ec0ea4c296da7c9a06b004133", size = 4484490, upload-time = "2025-06-05T03:27:54.133Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/94/46/24192607058dd607dbfacdd060a2370f6afb19c2ccb617406469b9aeb8e7/pandas-2.3.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:2eb4728a18dcd2908c7fccf74a982e241b467d178724545a48d0caf534b38ebf", size = 11573865, upload-time = "2025-06-05T03:26:46.774Z" },
{ url = "https://files.pythonhosted.org/packages/9f/cc/ae8ea3b800757a70c9fdccc68b67dc0280a6e814efcf74e4211fd5dea1ca/pandas-2.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b9d8c3187be7479ea5c3d30c32a5d73d62a621166675063b2edd21bc47614027", size = 10702154, upload-time = "2025-06-05T16:50:14.439Z" },
{ url = "https://files.pythonhosted.org/packages/d8/ba/a7883d7aab3d24c6540a2768f679e7414582cc389876d469b40ec749d78b/pandas-2.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9ff730713d4c4f2f1c860e36c005c7cefc1c7c80c21c0688fd605aa43c9fcf09", size = 11262180, upload-time = "2025-06-05T16:50:17.453Z" },
{ url = "https://files.pythonhosted.org/packages/01/a5/931fc3ad333d9d87b10107d948d757d67ebcfc33b1988d5faccc39c6845c/pandas-2.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba24af48643b12ffe49b27065d3babd52702d95ab70f50e1b34f71ca703e2c0d", size = 11991493, upload-time = "2025-06-05T03:26:51.813Z" },
{ url = "https://files.pythonhosted.org/packages/d7/bf/0213986830a92d44d55153c1d69b509431a972eb73f204242988c4e66e86/pandas-2.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:404d681c698e3c8a40a61d0cd9412cc7364ab9a9cc6e144ae2992e11a2e77a20", size = 12470733, upload-time = "2025-06-06T00:00:18.651Z" },
{ url = "https://files.pythonhosted.org/packages/a4/0e/21eb48a3a34a7d4bac982afc2c4eb5ab09f2d988bdf29d92ba9ae8e90a79/pandas-2.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:6021910b086b3ca756755e86ddc64e0ddafd5e58e076c72cb1585162e5ad259b", size = 13212406, upload-time = "2025-06-05T03:26:55.992Z" },
{ url = "https://files.pythonhosted.org/packages/1f/d9/74017c4eec7a28892d8d6e31ae9de3baef71f5a5286e74e6b7aad7f8c837/pandas-2.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:094e271a15b579650ebf4c5155c05dcd2a14fd4fdd72cf4854b2f7ad31ea30be", size = 10976199, upload-time = "2025-06-05T03:26:59.594Z" },
{ url = "https://files.pythonhosted.org/packages/d3/57/5cb75a56a4842bbd0511c3d1c79186d8315b82dac802118322b2de1194fe/pandas-2.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:2c7e2fc25f89a49a11599ec1e76821322439d90820108309bf42130d2f36c983", size = 11518913, upload-time = "2025-06-05T03:27:02.757Z" },
{ url = "https://files.pythonhosted.org/packages/05/01/0c8785610e465e4948a01a059562176e4c8088aa257e2e074db868f86d4e/pandas-2.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:c6da97aeb6a6d233fb6b17986234cc723b396b50a3c6804776351994f2a658fd", size = 10655249, upload-time = "2025-06-05T16:50:20.17Z" },
{ url = "https://files.pythonhosted.org/packages/e8/6a/47fd7517cd8abe72a58706aab2b99e9438360d36dcdb052cf917b7bf3bdc/pandas-2.3.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb32dc743b52467d488e7a7c8039b821da2826a9ba4f85b89ea95274f863280f", size = 11328359, upload-time = "2025-06-05T03:27:06.431Z" },
{ url = "https://files.pythonhosted.org/packages/2a/b3/463bfe819ed60fb7e7ddffb4ae2ee04b887b3444feee6c19437b8f834837/pandas-2.3.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:213cd63c43263dbb522c1f8a7c9d072e25900f6975596f883f4bebd77295d4f3", size = 12024789, upload-time = "2025-06-05T03:27:09.875Z" },
{ url = "https://files.pythonhosted.org/packages/04/0c/e0704ccdb0ac40aeb3434d1c641c43d05f75c92e67525df39575ace35468/pandas-2.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1d2b33e68d0ce64e26a4acc2e72d747292084f4e8db4c847c6f5f6cbe56ed6d8", size = 12480734, upload-time = "2025-06-06T00:00:22.246Z" },
{ url = "https://files.pythonhosted.org/packages/e9/df/815d6583967001153bb27f5cf075653d69d51ad887ebbf4cfe1173a1ac58/pandas-2.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:430a63bae10b5086995db1b02694996336e5a8ac9a96b4200572b413dfdfccb9", size = 13223381, upload-time = "2025-06-05T03:27:15.641Z" },
{ url = "https://files.pythonhosted.org/packages/79/88/ca5973ed07b7f484c493e941dbff990861ca55291ff7ac67c815ce347395/pandas-2.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:4930255e28ff5545e2ca404637bcc56f031893142773b3468dc021c6c32a1390", size = 10970135, upload-time = "2025-06-05T03:27:24.131Z" },
{ url = "https://files.pythonhosted.org/packages/24/fb/0994c14d1f7909ce83f0b1fb27958135513c4f3f2528bde216180aa73bfc/pandas-2.3.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:f925f1ef673b4bd0271b1809b72b3270384f2b7d9d14a189b12b7fc02574d575", size = 12141356, upload-time = "2025-06-05T03:27:34.547Z" },
{ url = "https://files.pythonhosted.org/packages/9d/a2/9b903e5962134497ac4f8a96f862ee3081cb2506f69f8e4778ce3d9c9d82/pandas-2.3.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:e78ad363ddb873a631e92a3c063ade1ecfb34cae71e9a2be6ad100f875ac1042", size = 11474674, upload-time = "2025-06-05T03:27:39.448Z" },
{ url = "https://files.pythonhosted.org/packages/81/3a/3806d041bce032f8de44380f866059437fb79e36d6b22c82c187e65f765b/pandas-2.3.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:951805d146922aed8357e4cc5671b8b0b9be1027f0619cea132a9f3f65f2f09c", size = 11439876, upload-time = "2025-06-05T03:27:43.652Z" },
{ url = "https://files.pythonhosted.org/packages/15/aa/3fc3181d12b95da71f5c2537c3e3b3af6ab3a8c392ab41ebb766e0929bc6/pandas-2.3.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a881bc1309f3fce34696d07b00f13335c41f5f5a8770a33b09ebe23261cfc67", size = 11966182, upload-time = "2025-06-05T03:27:47.652Z" },
{ url = "https://files.pythonhosted.org/packages/37/e7/e12f2d9b0a2c4a2cc86e2aabff7ccfd24f03e597d770abfa2acd313ee46b/pandas-2.3.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:e1991bbb96f4050b09b5f811253c4f3cf05ee89a589379aa36cd623f21a31d6f", size = 12547686, upload-time = "2025-06-06T00:00:26.142Z" },
{ url = "https://files.pythonhosted.org/packages/39/c2/646d2e93e0af70f4e5359d870a63584dacbc324b54d73e6b3267920ff117/pandas-2.3.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:bb3be958022198531eb7ec2008cfc78c5b1eed51af8600c6c5d9160d89d8d249", size = 13231847, upload-time = "2025-06-05T03:27:51.465Z" },
]
[[package]]
name = "pluggy"
version = "1.6.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" },
]
[[package]]
name = "prod-wag-backend-automate-services"
version = "0.1.0"
source = { virtual = "." }
dependencies = [
{ name = "alembic" },
{ name = "arrow" },
{ name = "cryptography" },
{ name = "faker" },
{ name = "fastapi" },
{ name = "pandas" },
{ name = "prometheus-fastapi-instrumentator" },
{ name = "psycopg2-binary" },
{ name = "pydantic-settings" },
{ name = "pymongo" },
{ name = "pytest" },
{ name = "redbox" },
{ name = "redis" },
{ name = "redmail" },
{ name = "requests" },
{ name = "sqlalchemy-mixins" },
{ name = "textdistance" },
{ name = "unidecode" },
{ name = "uvicorn" },
]
[package.metadata]
requires-dist = [
{ name = "alembic", specifier = ">=1.15.2" },
{ name = "arrow", specifier = ">=1.3.0" },
{ name = "cryptography", specifier = ">=44.0.2" },
{ name = "faker", specifier = ">=37.1.0" },
{ name = "fastapi", specifier = ">=0.115.12" },
{ name = "pandas", specifier = ">=2.2.3" },
{ name = "prometheus-fastapi-instrumentator", specifier = ">=7.1.0" },
{ name = "psycopg2-binary", specifier = ">=2.9.10" },
{ name = "pydantic-settings", specifier = ">=2.8.1" },
{ name = "pymongo", specifier = ">=4.11.3" },
{ name = "pytest", specifier = ">=8.3.5" },
{ name = "redbox", specifier = ">=0.2.1" },
{ name = "redis", specifier = ">=5.2.1" },
{ name = "redmail", specifier = ">=0.6.0" },
{ name = "requests", specifier = ">=2.32.3" },
{ name = "sqlalchemy-mixins", specifier = ">=2.0.5" },
{ name = "textdistance", specifier = ">=4.6.3" },
{ name = "unidecode", specifier = ">=1.3.8" },
{ name = "uvicorn", specifier = ">=0.34.0" },
]
[[package]]
name = "prometheus-client"
version = "0.22.1"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/5e/cf/40dde0a2be27cc1eb41e333d1a674a74ce8b8b0457269cc640fd42b07cf7/prometheus_client-0.22.1.tar.gz", hash = "sha256:190f1331e783cf21eb60bca559354e0a4d4378facecf78f5428c39b675d20d28", size = 69746, upload-time = "2025-06-02T14:29:01.152Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/32/ae/ec06af4fe3ee72d16973474f122541746196aaa16cea6f66d18b963c6177/prometheus_client-0.22.1-py3-none-any.whl", hash = "sha256:cca895342e308174341b2cbf99a56bef291fbc0ef7b9e5412a0f26d653ba7094", size = 58694, upload-time = "2025-06-02T14:29:00.068Z" },
]
[[package]]
name = "prometheus-fastapi-instrumentator"
version = "7.1.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "prometheus-client" },
{ name = "starlette" },
]
sdist = { url = "https://files.pythonhosted.org/packages/69/6d/24d53033cf93826aa7857699a4450c1c67e5b9c710e925b1ed2b320c04df/prometheus_fastapi_instrumentator-7.1.0.tar.gz", hash = "sha256:be7cd61eeea4e5912aeccb4261c6631b3f227d8924542d79eaf5af3f439cbe5e", size = 20220, upload-time = "2025-03-19T19:35:05.351Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/27/72/0824c18f3bc75810f55dacc2dd933f6ec829771180245ae3cc976195dec0/prometheus_fastapi_instrumentator-7.1.0-py3-none-any.whl", hash = "sha256:978130f3c0bb7b8ebcc90d35516a6fe13e02d2eb358c8f83887cdef7020c31e9", size = 19296, upload-time = "2025-03-19T19:35:04.323Z" },
]
[[package]]
name = "psycopg2-binary"
version = "2.9.10"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/cb/0e/bdc8274dc0585090b4e3432267d7be4dfbfd8971c0fa59167c711105a6bf/psycopg2-binary-2.9.10.tar.gz", hash = "sha256:4b3df0e6990aa98acda57d983942eff13d824135fe2250e6522edaa782a06de2", size = 385764, upload-time = "2024-10-16T11:24:58.126Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/49/7d/465cc9795cf76f6d329efdafca74693714556ea3891813701ac1fee87545/psycopg2_binary-2.9.10-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:880845dfe1f85d9d5f7c412efea7a08946a46894537e4e5d091732eb1d34d9a0", size = 3044771, upload-time = "2024-10-16T11:20:35.234Z" },
{ url = "https://files.pythonhosted.org/packages/8b/31/6d225b7b641a1a2148e3ed65e1aa74fc86ba3fee850545e27be9e1de893d/psycopg2_binary-2.9.10-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:9440fa522a79356aaa482aa4ba500b65f28e5d0e63b801abf6aa152a29bd842a", size = 3275336, upload-time = "2024-10-16T11:20:38.742Z" },
{ url = "https://files.pythonhosted.org/packages/30/b7/a68c2b4bff1cbb1728e3ec864b2d92327c77ad52edcd27922535a8366f68/psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3923c1d9870c49a2d44f795df0c889a22380d36ef92440ff618ec315757e539", size = 2851637, upload-time = "2024-10-16T11:20:42.145Z" },
{ url = "https://files.pythonhosted.org/packages/0b/b1/cfedc0e0e6f9ad61f8657fd173b2f831ce261c02a08c0b09c652b127d813/psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b2c956c028ea5de47ff3a8d6b3cc3330ab45cf0b7c3da35a2d6ff8420896526", size = 3082097, upload-time = "2024-10-16T11:20:46.185Z" },
{ url = "https://files.pythonhosted.org/packages/18/ed/0a8e4153c9b769f59c02fb5e7914f20f0b2483a19dae7bf2db54b743d0d0/psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f758ed67cab30b9a8d2833609513ce4d3bd027641673d4ebc9c067e4d208eec1", size = 3264776, upload-time = "2024-10-16T11:20:50.879Z" },
{ url = "https://files.pythonhosted.org/packages/10/db/d09da68c6a0cdab41566b74e0a6068a425f077169bed0946559b7348ebe9/psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cd9b4f2cfab88ed4a9106192de509464b75a906462fb846b936eabe45c2063e", size = 3020968, upload-time = "2024-10-16T11:20:56.819Z" },
{ url = "https://files.pythonhosted.org/packages/94/28/4d6f8c255f0dfffb410db2b3f9ac5218d959a66c715c34cac31081e19b95/psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dc08420625b5a20b53551c50deae6e231e6371194fa0651dbe0fb206452ae1f", size = 2872334, upload-time = "2024-10-16T11:21:02.411Z" },
{ url = "https://files.pythonhosted.org/packages/05/f7/20d7bf796593c4fea95e12119d6cc384ff1f6141a24fbb7df5a668d29d29/psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:d7cd730dfa7c36dbe8724426bf5612798734bff2d3c3857f36f2733f5bfc7c00", size = 2822722, upload-time = "2024-10-16T11:21:09.01Z" },
{ url = "https://files.pythonhosted.org/packages/4d/e4/0c407ae919ef626dbdb32835a03b6737013c3cc7240169843965cada2bdf/psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:155e69561d54d02b3c3209545fb08938e27889ff5a10c19de8d23eb5a41be8a5", size = 2920132, upload-time = "2024-10-16T11:21:16.339Z" },
{ url = "https://files.pythonhosted.org/packages/2d/70/aa69c9f69cf09a01da224909ff6ce8b68faeef476f00f7ec377e8f03be70/psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c3cc28a6fd5a4a26224007712e79b81dbaee2ffb90ff406256158ec4d7b52b47", size = 2959312, upload-time = "2024-10-16T11:21:25.584Z" },
{ url = "https://files.pythonhosted.org/packages/d3/bd/213e59854fafe87ba47814bf413ace0dcee33a89c8c8c814faca6bc7cf3c/psycopg2_binary-2.9.10-cp312-cp312-win32.whl", hash = "sha256:ec8a77f521a17506a24a5f626cb2aee7850f9b69a0afe704586f63a464f3cd64", size = 1025191, upload-time = "2024-10-16T11:21:29.912Z" },
{ url = "https://files.pythonhosted.org/packages/92/29/06261ea000e2dc1e22907dbbc483a1093665509ea586b29b8986a0e56733/psycopg2_binary-2.9.10-cp312-cp312-win_amd64.whl", hash = "sha256:18c5ee682b9c6dd3696dad6e54cc7ff3a1a9020df6a5c0f861ef8bfd338c3ca0", size = 1164031, upload-time = "2024-10-16T11:21:34.211Z" },
{ url = "https://files.pythonhosted.org/packages/3e/30/d41d3ba765609c0763505d565c4d12d8f3c79793f0d0f044ff5a28bf395b/psycopg2_binary-2.9.10-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:26540d4a9a4e2b096f1ff9cce51253d0504dca5a85872c7f7be23be5a53eb18d", size = 3044699, upload-time = "2024-10-16T11:21:42.841Z" },
{ url = "https://files.pythonhosted.org/packages/35/44/257ddadec7ef04536ba71af6bc6a75ec05c5343004a7ec93006bee66c0bc/psycopg2_binary-2.9.10-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:e217ce4d37667df0bc1c397fdcd8de5e81018ef305aed9415c3b093faaeb10fb", size = 3275245, upload-time = "2024-10-16T11:21:51.989Z" },
{ url = "https://files.pythonhosted.org/packages/1b/11/48ea1cd11de67f9efd7262085588790a95d9dfcd9b8a687d46caf7305c1a/psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:245159e7ab20a71d989da00f280ca57da7641fa2cdcf71749c193cea540a74f7", size = 2851631, upload-time = "2024-10-16T11:21:57.584Z" },
{ url = "https://files.pythonhosted.org/packages/62/e0/62ce5ee650e6c86719d621a761fe4bc846ab9eff8c1f12b1ed5741bf1c9b/psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c4ded1a24b20021ebe677b7b08ad10bf09aac197d6943bfe6fec70ac4e4690d", size = 3082140, upload-time = "2024-10-16T11:22:02.005Z" },
{ url = "https://files.pythonhosted.org/packages/27/ce/63f946c098611f7be234c0dd7cb1ad68b0b5744d34f68062bb3c5aa510c8/psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3abb691ff9e57d4a93355f60d4f4c1dd2d68326c968e7db17ea96df3c023ef73", size = 3264762, upload-time = "2024-10-16T11:22:06.412Z" },
{ url = "https://files.pythonhosted.org/packages/43/25/c603cd81402e69edf7daa59b1602bd41eb9859e2824b8c0855d748366ac9/psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8608c078134f0b3cbd9f89b34bd60a943b23fd33cc5f065e8d5f840061bd0673", size = 3020967, upload-time = "2024-10-16T11:22:11.583Z" },
{ url = "https://files.pythonhosted.org/packages/5f/d6/8708d8c6fca531057fa170cdde8df870e8b6a9b136e82b361c65e42b841e/psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:230eeae2d71594103cd5b93fd29d1ace6420d0b86f4778739cb1a5a32f607d1f", size = 2872326, upload-time = "2024-10-16T11:22:16.406Z" },
{ url = "https://files.pythonhosted.org/packages/ce/ac/5b1ea50fc08a9df82de7e1771537557f07c2632231bbab652c7e22597908/psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:bb89f0a835bcfc1d42ccd5f41f04870c1b936d8507c6df12b7737febc40f0909", size = 2822712, upload-time = "2024-10-16T11:22:21.366Z" },
{ url = "https://files.pythonhosted.org/packages/c4/fc/504d4503b2abc4570fac3ca56eb8fed5e437bf9c9ef13f36b6621db8ef00/psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f0c2d907a1e102526dd2986df638343388b94c33860ff3bbe1384130828714b1", size = 2920155, upload-time = "2024-10-16T11:22:25.684Z" },
{ url = "https://files.pythonhosted.org/packages/b2/d1/323581e9273ad2c0dbd1902f3fb50c441da86e894b6e25a73c3fda32c57e/psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f8157bed2f51db683f31306aa497311b560f2265998122abe1dce6428bd86567", size = 2959356, upload-time = "2024-10-16T11:22:30.562Z" },
{ url = "https://files.pythonhosted.org/packages/08/50/d13ea0a054189ae1bc21af1d85b6f8bb9bbc5572991055d70ad9006fe2d6/psycopg2_binary-2.9.10-cp313-cp313-win_amd64.whl", hash = "sha256:27422aa5f11fbcd9b18da48373eb67081243662f9b46e6fd07c3eb46e4535142", size = 2569224, upload-time = "2025-01-04T20:09:19.234Z" },
]
[[package]]
name = "pycparser"
version = "2.22"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/1d/b2/31537cf4b1ca988837256c910a668b553fceb8f069bedc4b1c826024b52c/pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6", size = 172736, upload-time = "2024-03-30T13:22:22.564Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/13/a3/a812df4e2dd5696d1f351d58b8fe16a405b234ad2886a0dab9183fb78109/pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc", size = 117552, upload-time = "2024-03-30T13:22:20.476Z" },
]
[[package]]
name = "pydantic"
version = "2.11.7"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "annotated-types" },
{ name = "pydantic-core" },
{ name = "typing-extensions" },
{ name = "typing-inspection" },
]
sdist = { url = "https://files.pythonhosted.org/packages/00/dd/4325abf92c39ba8623b5af936ddb36ffcfe0beae70405d456ab1fb2f5b8c/pydantic-2.11.7.tar.gz", hash = "sha256:d989c3c6cb79469287b1569f7447a17848c998458d49ebe294e975b9baf0f0db", size = 788350, upload-time = "2025-06-14T08:33:17.137Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/6a/c0/ec2b1c8712ca690e5d61979dee872603e92b8a32f94cc1b72d53beab008a/pydantic-2.11.7-py3-none-any.whl", hash = "sha256:dde5df002701f6de26248661f6835bbe296a47bf73990135c7d07ce741b9623b", size = 444782, upload-time = "2025-06-14T08:33:14.905Z" },
]
[[package]]
name = "pydantic-core"
version = "2.33.2"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "typing-extensions" },
]
sdist = { url = "https://files.pythonhosted.org/packages/ad/88/5f2260bdfae97aabf98f1778d43f69574390ad787afb646292a638c923d4/pydantic_core-2.33.2.tar.gz", hash = "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc", size = 435195, upload-time = "2025-04-23T18:33:52.104Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/18/8a/2b41c97f554ec8c71f2a8a5f85cb56a8b0956addfe8b0efb5b3d77e8bdc3/pydantic_core-2.33.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a7ec89dc587667f22b6a0b6579c249fca9026ce7c333fc142ba42411fa243cdc", size = 2009000, upload-time = "2025-04-23T18:31:25.863Z" },
{ url = "https://files.pythonhosted.org/packages/a1/02/6224312aacb3c8ecbaa959897af57181fb6cf3a3d7917fd44d0f2917e6f2/pydantic_core-2.33.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3c6db6e52c6d70aa0d00d45cdb9b40f0433b96380071ea80b09277dba021ddf7", size = 1847996, upload-time = "2025-04-23T18:31:27.341Z" },
{ url = "https://files.pythonhosted.org/packages/d6/46/6dcdf084a523dbe0a0be59d054734b86a981726f221f4562aed313dbcb49/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e61206137cbc65e6d5256e1166f88331d3b6238e082d9f74613b9b765fb9025", size = 1880957, upload-time = "2025-04-23T18:31:28.956Z" },
{ url = "https://files.pythonhosted.org/packages/ec/6b/1ec2c03837ac00886ba8160ce041ce4e325b41d06a034adbef11339ae422/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb8c529b2819c37140eb51b914153063d27ed88e3bdc31b71198a198e921e011", size = 1964199, upload-time = "2025-04-23T18:31:31.025Z" },
{ url = "https://files.pythonhosted.org/packages/2d/1d/6bf34d6adb9debd9136bd197ca72642203ce9aaaa85cfcbfcf20f9696e83/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c52b02ad8b4e2cf14ca7b3d918f3eb0ee91e63b3167c32591e57c4317e134f8f", size = 2120296, upload-time = "2025-04-23T18:31:32.514Z" },
{ url = "https://files.pythonhosted.org/packages/e0/94/2bd0aaf5a591e974b32a9f7123f16637776c304471a0ab33cf263cf5591a/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:96081f1605125ba0855dfda83f6f3df5ec90c61195421ba72223de35ccfb2f88", size = 2676109, upload-time = "2025-04-23T18:31:33.958Z" },
{ url = "https://files.pythonhosted.org/packages/f9/41/4b043778cf9c4285d59742281a769eac371b9e47e35f98ad321349cc5d61/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f57a69461af2a5fa6e6bbd7a5f60d3b7e6cebb687f55106933188e79ad155c1", size = 2002028, upload-time = "2025-04-23T18:31:39.095Z" },
{ url = "https://files.pythonhosted.org/packages/cb/d5/7bb781bf2748ce3d03af04d5c969fa1308880e1dca35a9bd94e1a96a922e/pydantic_core-2.33.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:572c7e6c8bb4774d2ac88929e3d1f12bc45714ae5ee6d9a788a9fb35e60bb04b", size = 2100044, upload-time = "2025-04-23T18:31:41.034Z" },
{ url = "https://files.pythonhosted.org/packages/fe/36/def5e53e1eb0ad896785702a5bbfd25eed546cdcf4087ad285021a90ed53/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:db4b41f9bd95fbe5acd76d89920336ba96f03e149097365afe1cb092fceb89a1", size = 2058881, upload-time = "2025-04-23T18:31:42.757Z" },
{ url = "https://files.pythonhosted.org/packages/01/6c/57f8d70b2ee57fc3dc8b9610315949837fa8c11d86927b9bb044f8705419/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:fa854f5cf7e33842a892e5c73f45327760bc7bc516339fda888c75ae60edaeb6", size = 2227034, upload-time = "2025-04-23T18:31:44.304Z" },
{ url = "https://files.pythonhosted.org/packages/27/b9/9c17f0396a82b3d5cbea4c24d742083422639e7bb1d5bf600e12cb176a13/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5f483cfb75ff703095c59e365360cb73e00185e01aaea067cd19acffd2ab20ea", size = 2234187, upload-time = "2025-04-23T18:31:45.891Z" },
{ url = "https://files.pythonhosted.org/packages/b0/6a/adf5734ffd52bf86d865093ad70b2ce543415e0e356f6cacabbc0d9ad910/pydantic_core-2.33.2-cp312-cp312-win32.whl", hash = "sha256:9cb1da0f5a471435a7bc7e439b8a728e8b61e59784b2af70d7c169f8dd8ae290", size = 1892628, upload-time = "2025-04-23T18:31:47.819Z" },
{ url = "https://files.pythonhosted.org/packages/43/e4/5479fecb3606c1368d496a825d8411e126133c41224c1e7238be58b87d7e/pydantic_core-2.33.2-cp312-cp312-win_amd64.whl", hash = "sha256:f941635f2a3d96b2973e867144fde513665c87f13fe0e193c158ac51bfaaa7b2", size = 1955866, upload-time = "2025-04-23T18:31:49.635Z" },
{ url = "https://files.pythonhosted.org/packages/0d/24/8b11e8b3e2be9dd82df4b11408a67c61bb4dc4f8e11b5b0fc888b38118b5/pydantic_core-2.33.2-cp312-cp312-win_arm64.whl", hash = "sha256:cca3868ddfaccfbc4bfb1d608e2ccaaebe0ae628e1416aeb9c4d88c001bb45ab", size = 1888894, upload-time = "2025-04-23T18:31:51.609Z" },
{ url = "https://files.pythonhosted.org/packages/46/8c/99040727b41f56616573a28771b1bfa08a3d3fe74d3d513f01251f79f172/pydantic_core-2.33.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1082dd3e2d7109ad8b7da48e1d4710c8d06c253cbc4a27c1cff4fbcaa97a9e3f", size = 2015688, upload-time = "2025-04-23T18:31:53.175Z" },
{ url = "https://files.pythonhosted.org/packages/3a/cc/5999d1eb705a6cefc31f0b4a90e9f7fc400539b1a1030529700cc1b51838/pydantic_core-2.33.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f517ca031dfc037a9c07e748cefd8d96235088b83b4f4ba8939105d20fa1dcd6", size = 1844808, upload-time = "2025-04-23T18:31:54.79Z" },
{ url = "https://files.pythonhosted.org/packages/6f/5e/a0a7b8885c98889a18b6e376f344da1ef323d270b44edf8174d6bce4d622/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a9f2c9dd19656823cb8250b0724ee9c60a82f3cdf68a080979d13092a3b0fef", size = 1885580, upload-time = "2025-04-23T18:31:57.393Z" },
{ url = "https://files.pythonhosted.org/packages/3b/2a/953581f343c7d11a304581156618c3f592435523dd9d79865903272c256a/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b0a451c263b01acebe51895bfb0e1cc842a5c666efe06cdf13846c7418caa9a", size = 1973859, upload-time = "2025-04-23T18:31:59.065Z" },
{ url = "https://files.pythonhosted.org/packages/e6/55/f1a813904771c03a3f97f676c62cca0c0a4138654107c1b61f19c644868b/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ea40a64d23faa25e62a70ad163571c0b342b8bf66d5fa612ac0dec4f069d916", size = 2120810, upload-time = "2025-04-23T18:32:00.78Z" },
{ url = "https://files.pythonhosted.org/packages/aa/c3/053389835a996e18853ba107a63caae0b9deb4a276c6b472931ea9ae6e48/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fb2d542b4d66f9470e8065c5469ec676978d625a8b7a363f07d9a501a9cb36a", size = 2676498, upload-time = "2025-04-23T18:32:02.418Z" },
{ url = "https://files.pythonhosted.org/packages/eb/3c/f4abd740877a35abade05e437245b192f9d0ffb48bbbbd708df33d3cda37/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdac5d6ffa1b5a83bca06ffe7583f5576555e6c8b3a91fbd25ea7780f825f7d", size = 2000611, upload-time = "2025-04-23T18:32:04.152Z" },
{ url = "https://files.pythonhosted.org/packages/59/a7/63ef2fed1837d1121a894d0ce88439fe3e3b3e48c7543b2a4479eb99c2bd/pydantic_core-2.33.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04a1a413977ab517154eebb2d326da71638271477d6ad87a769102f7c2488c56", size = 2107924, upload-time = "2025-04-23T18:32:06.129Z" },
{ url = "https://files.pythonhosted.org/packages/04/8f/2551964ef045669801675f1cfc3b0d74147f4901c3ffa42be2ddb1f0efc4/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c8e7af2f4e0194c22b5b37205bfb293d166a7344a5b0d0eaccebc376546d77d5", size = 2063196, upload-time = "2025-04-23T18:32:08.178Z" },
{ url = "https://files.pythonhosted.org/packages/26/bd/d9602777e77fc6dbb0c7db9ad356e9a985825547dce5ad1d30ee04903918/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:5c92edd15cd58b3c2d34873597a1e20f13094f59cf88068adb18947df5455b4e", size = 2236389, upload-time = "2025-04-23T18:32:10.242Z" },
{ url = "https://files.pythonhosted.org/packages/42/db/0e950daa7e2230423ab342ae918a794964b053bec24ba8af013fc7c94846/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:65132b7b4a1c0beded5e057324b7e16e10910c106d43675d9bd87d4f38dde162", size = 2239223, upload-time = "2025-04-23T18:32:12.382Z" },
{ url = "https://files.pythonhosted.org/packages/58/4d/4f937099c545a8a17eb52cb67fe0447fd9a373b348ccfa9a87f141eeb00f/pydantic_core-2.33.2-cp313-cp313-win32.whl", hash = "sha256:52fb90784e0a242bb96ec53f42196a17278855b0f31ac7c3cc6f5c1ec4811849", size = 1900473, upload-time = "2025-04-23T18:32:14.034Z" },
{ url = "https://files.pythonhosted.org/packages/a0/75/4a0a9bac998d78d889def5e4ef2b065acba8cae8c93696906c3a91f310ca/pydantic_core-2.33.2-cp313-cp313-win_amd64.whl", hash = "sha256:c083a3bdd5a93dfe480f1125926afcdbf2917ae714bdb80b36d34318b2bec5d9", size = 1955269, upload-time = "2025-04-23T18:32:15.783Z" },
{ url = "https://files.pythonhosted.org/packages/f9/86/1beda0576969592f1497b4ce8e7bc8cbdf614c352426271b1b10d5f0aa64/pydantic_core-2.33.2-cp313-cp313-win_arm64.whl", hash = "sha256:e80b087132752f6b3d714f041ccf74403799d3b23a72722ea2e6ba2e892555b9", size = 1893921, upload-time = "2025-04-23T18:32:18.473Z" },
{ url = "https://files.pythonhosted.org/packages/a4/7d/e09391c2eebeab681df2b74bfe6c43422fffede8dc74187b2b0bf6fd7571/pydantic_core-2.33.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:61c18fba8e5e9db3ab908620af374db0ac1baa69f0f32df4f61ae23f15e586ac", size = 1806162, upload-time = "2025-04-23T18:32:20.188Z" },
{ url = "https://files.pythonhosted.org/packages/f1/3d/847b6b1fed9f8ed3bb95a9ad04fbd0b212e832d4f0f50ff4d9ee5a9f15cf/pydantic_core-2.33.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95237e53bb015f67b63c91af7518a62a8660376a6a0db19b89acc77a4d6199f5", size = 1981560, upload-time = "2025-04-23T18:32:22.354Z" },
{ url = "https://files.pythonhosted.org/packages/6f/9a/e73262f6c6656262b5fdd723ad90f518f579b7bc8622e43a942eec53c938/pydantic_core-2.33.2-cp313-cp313t-win_amd64.whl", hash = "sha256:c2fc0a768ef76c15ab9238afa6da7f69895bb5d1ee83aeea2e3509af4472d0b9", size = 1935777, upload-time = "2025-04-23T18:32:25.088Z" },
]
[[package]]
name = "pydantic-settings"
version = "2.10.1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "pydantic" },
{ name = "python-dotenv" },
{ name = "typing-inspection" },
]
sdist = { url = "https://files.pythonhosted.org/packages/68/85/1ea668bbab3c50071ca613c6ab30047fb36ab0da1b92fa8f17bbc38fd36c/pydantic_settings-2.10.1.tar.gz", hash = "sha256:06f0062169818d0f5524420a360d632d5857b83cffd4d42fe29597807a1614ee", size = 172583, upload-time = "2025-06-24T13:26:46.841Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/58/f0/427018098906416f580e3cf1366d3b1abfb408a0652e9f31600c24a1903c/pydantic_settings-2.10.1-py3-none-any.whl", hash = "sha256:a60952460b99cf661dc25c29c0ef171721f98bfcb52ef8d9ea4c943d7c8cc796", size = 45235, upload-time = "2025-06-24T13:26:45.485Z" },
]
[[package]]
name = "pygments"
version = "2.19.2"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631, upload-time = "2025-06-21T13:39:12.283Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" },
]
[[package]]
name = "pymongo"
version = "4.13.2"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "dnspython" },
]
sdist = { url = "https://files.pythonhosted.org/packages/4b/5a/d664298bf54762f0c89b8aa2c276868070e06afb853b4a8837de5741e5f9/pymongo-4.13.2.tar.gz", hash = "sha256:0f64c6469c2362962e6ce97258ae1391abba1566a953a492562d2924b44815c2", size = 2167844, upload-time = "2025-06-16T18:16:30.685Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/03/e0/0e187750e23eed4227282fcf568fdb61f2b53bbcf8cbe3a71dde2a860d12/pymongo-4.13.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:ec89516622dfc8b0fdff499612c0bd235aa45eeb176c9e311bcc0af44bf952b6", size = 912004, upload-time = "2025-06-16T18:15:14.299Z" },
{ url = "https://files.pythonhosted.org/packages/57/c2/9b79795382daaf41e5f7379bffdef1880d68160adea352b796d6948cb5be/pymongo-4.13.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f30eab4d4326df54fee54f31f93e532dc2918962f733ee8e115b33e6fe151d92", size = 911698, upload-time = "2025-06-16T18:15:16.334Z" },
{ url = "https://files.pythonhosted.org/packages/6f/e4/f04dc9ed5d1d9dbc539dc2d8758dd359c5373b0e06fcf25418b2c366737c/pymongo-4.13.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cce9428d12ba396ea245fc4c51f20228cead01119fcc959e1c80791ea45f820", size = 1690357, upload-time = "2025-06-16T18:15:18.358Z" },
{ url = "https://files.pythonhosted.org/packages/bb/de/41478a7d527d38f1b98b084f4a78bbb805439a6ebd8689fbbee0a3dfacba/pymongo-4.13.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac9241b727a69c39117c12ac1e52d817ea472260dadc66262c3fdca0bab0709b", size = 1754593, upload-time = "2025-06-16T18:15:20.096Z" },
{ url = "https://files.pythonhosted.org/packages/df/d9/8fa2eb110291e154f4312779b1a5b815090b8b05a59ecb4f4a32427db1df/pymongo-4.13.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3efc4c515b371a9fa1d198b6e03340985bfe1a55ae2d2b599a714934e7bc61ab", size = 1723637, upload-time = "2025-06-16T18:15:22.048Z" },
{ url = "https://files.pythonhosted.org/packages/27/7b/9863fa60a4a51ea09f5e3cd6ceb231af804e723671230f2daf3bd1b59c2b/pymongo-4.13.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f57a664aa74610eb7a52fa93f2cf794a1491f4f76098343485dd7da5b3bcff06", size = 1693613, upload-time = "2025-06-16T18:15:24.866Z" },
{ url = "https://files.pythonhosted.org/packages/9b/89/a42efa07820a59089836f409a63c96e7a74e33313e50dc39c554db99ac42/pymongo-4.13.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3dcb0b8cdd499636017a53f63ef64cf9b6bd3fd9355796c5a1d228e4be4a4c94", size = 1652745, upload-time = "2025-06-16T18:15:27.078Z" },
{ url = "https://files.pythonhosted.org/packages/6a/cf/2c77d1acda61d281edd3e3f00d5017d3fac0c29042c769efd3b8018cb469/pymongo-4.13.2-cp312-cp312-win32.whl", hash = "sha256:bf43ae07804d7762b509f68e5ec73450bb8824e960b03b861143ce588b41f467", size = 883232, upload-time = "2025-06-16T18:15:29.169Z" },
{ url = "https://files.pythonhosted.org/packages/d2/4f/727f59156e3798850c3c2901f106804053cb0e057ed1bd9883f5fa5aa8fa/pymongo-4.13.2-cp312-cp312-win_amd64.whl", hash = "sha256:812a473d584bcb02ab819d379cd5e752995026a2bb0d7713e78462b6650d3f3a", size = 903304, upload-time = "2025-06-16T18:15:31.346Z" },
{ url = "https://files.pythonhosted.org/packages/e0/95/b44b8e24b161afe7b244f6d43c09a7a1f93308cad04198de1c14c67b24ce/pymongo-4.13.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:d6044ca0eb74d97f7d3415264de86a50a401b7b0b136d30705f022f9163c3124", size = 966232, upload-time = "2025-06-16T18:15:33.057Z" },
{ url = "https://files.pythonhosted.org/packages/6d/fc/d4d59799a52033acb187f7bd1f09bc75bebb9fd12cef4ba2964d235ad3f9/pymongo-4.13.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:dd326bcb92d28d28a3e7ef0121602bad78691b6d4d1f44b018a4616122f1ba8b", size = 965935, upload-time = "2025-06-16T18:15:34.826Z" },
{ url = "https://files.pythonhosted.org/packages/07/a8/67502899d89b317ea9952e4769bc193ca15efee561b24b38a86c59edde6f/pymongo-4.13.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dfb0c21bdd58e58625c9cd8de13e859630c29c9537944ec0a14574fdf88c2ac4", size = 1954070, upload-time = "2025-06-16T18:15:36.576Z" },
{ url = "https://files.pythonhosted.org/packages/da/3b/0dac5d81d1af1b96b3200da7ccc52fc261a35efb7d2ac493252eb40a2b11/pymongo-4.13.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9c7d345d57f17b1361008aea78a37e8c139631a46aeb185dd2749850883c7ba", size = 2031424, upload-time = "2025-06-16T18:15:38.723Z" },
{ url = "https://files.pythonhosted.org/packages/31/ed/7a5af49a153224ca7e31e9915703e612ad9c45808cc39540e9dd1a2a7537/pymongo-4.13.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8860445a8da1b1545406fab189dc20319aff5ce28e65442b2b4a8f4228a88478", size = 1995339, upload-time = "2025-06-16T18:15:40.474Z" },
{ url = "https://files.pythonhosted.org/packages/f1/e9/9c72eceae8439c4f1bdebc4e6b290bf035e3f050a80eeb74abb5e12ef8e2/pymongo-4.13.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:01c184b612f67d5a4c8f864ae7c40b6cc33c0e9bb05e39d08666f8831d120504", size = 1956066, upload-time = "2025-06-16T18:15:42.272Z" },
{ url = "https://files.pythonhosted.org/packages/ac/79/9b019c47923395d5fced03856996465fb9340854b0f5a2ddf16d47e2437c/pymongo-4.13.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ea8c62d5f3c6529407c12471385d9a05f9fb890ce68d64976340c85cd661b", size = 1905642, upload-time = "2025-06-16T18:15:43.978Z" },
{ url = "https://files.pythonhosted.org/packages/93/2f/ebf56c7fa9298fa2f9716e7b66cf62b29e7fc6e11774f3b87f55d214d466/pymongo-4.13.2-cp313-cp313-win32.whl", hash = "sha256:d13556e91c4a8cb07393b8c8be81e66a11ebc8335a40fa4af02f4d8d3b40c8a1", size = 930184, upload-time = "2025-06-16T18:15:46.899Z" },
{ url = "https://files.pythonhosted.org/packages/76/2f/49c35464cbd5d116d950ff5d24b4b20491aaae115d35d40b945c33b29250/pymongo-4.13.2-cp313-cp313-win_amd64.whl", hash = "sha256:cfc69d7bc4d4d5872fd1e6de25e6a16e2372c7d5556b75c3b8e2204dce73e3fb", size = 955111, upload-time = "2025-06-16T18:15:48.85Z" },
{ url = "https://files.pythonhosted.org/packages/57/56/b17c8b5329b1842b7847cf0fa224ef0a272bf2e5126360f4da8065c855a1/pymongo-4.13.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:a457d2ac34c05e9e8a6bb724115b093300bf270f0655fb897df8d8604b2e3700", size = 1022735, upload-time = "2025-06-16T18:15:50.672Z" },
{ url = "https://files.pythonhosted.org/packages/83/e6/66fec65a7919bf5f35be02e131b4dc4bf3152b5e8d78cd04b6d266a44514/pymongo-4.13.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:02f131a6e61559613b1171b53fbe21fed64e71b0cb4858c47fc9bc7c8e0e501c", size = 1022740, upload-time = "2025-06-16T18:15:53.218Z" },
{ url = "https://files.pythonhosted.org/packages/17/92/cda7383df0d5e71dc007f172c1ecae6313d64ea05d82bbba06df7f6b3e49/pymongo-4.13.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c942d1c6334e894271489080404b1a2e3b8bd5de399f2a0c14a77d966be5bc9", size = 2282430, upload-time = "2025-06-16T18:15:55.356Z" },
{ url = "https://files.pythonhosted.org/packages/84/da/285e05eb1d617b30dc7a7a98ebeb264353a8903e0e816a4eec6487c81f18/pymongo-4.13.2-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:850168d115680ab66a0931a6aa9dd98ed6aa5e9c3b9a6c12128049b9a5721bc5", size = 2369470, upload-time = "2025-06-16T18:15:57.5Z" },
{ url = "https://files.pythonhosted.org/packages/89/c0/c0d5eae236de9ca293497dc58fc1e4872382223c28ec223f76afc701392c/pymongo-4.13.2-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:af7dfff90647ee77c53410f7fe8ca4fe343f8b768f40d2d0f71a5602f7b5a541", size = 2328857, upload-time = "2025-06-16T18:15:59.59Z" },
{ url = "https://files.pythonhosted.org/packages/2b/5a/d8639fba60def128ce9848b99c56c54c8a4d0cd60342054cd576f0bfdf26/pymongo-4.13.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8057f9bc9c94a8fd54ee4f5e5106e445a8f406aff2df74746f21c8791ee2403", size = 2280053, upload-time = "2025-06-16T18:16:02.166Z" },
{ url = "https://files.pythonhosted.org/packages/a1/69/d56f0897cc4932a336820c5d2470ffed50be04c624b07d1ad6ea75aaa975/pymongo-4.13.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:51040e1ba78d6671f8c65b29e2864483451e789ce93b1536de9cc4456ede87fa", size = 2219378, upload-time = "2025-06-16T18:16:04.108Z" },
{ url = "https://files.pythonhosted.org/packages/04/1e/427e7f99801ee318b6331062d682d3816d7e1d6b6013077636bd75d49c87/pymongo-4.13.2-cp313-cp313t-win32.whl", hash = "sha256:7ab86b98a18c8689514a9f8d0ec7d9ad23a949369b31c9a06ce4a45dcbffcc5e", size = 979460, upload-time = "2025-06-16T18:16:06.128Z" },
{ url = "https://files.pythonhosted.org/packages/b5/9c/00301a6df26f0f8d5c5955192892241e803742e7c3da8c2c222efabc0df6/pymongo-4.13.2-cp313-cp313t-win_amd64.whl", hash = "sha256:c38168263ed94a250fc5cf9c6d33adea8ab11c9178994da1c3481c2a49d235f8", size = 1011057, upload-time = "2025-06-16T18:16:07.917Z" },
]
[[package]]
name = "pytest"
version = "8.4.1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "colorama", marker = "sys_platform == 'win32'" },
{ name = "iniconfig" },
{ name = "packaging" },
{ name = "pluggy" },
{ name = "pygments" },
]
sdist = { url = "https://files.pythonhosted.org/packages/08/ba/45911d754e8eba3d5a841a5ce61a65a685ff1798421ac054f85aa8747dfb/pytest-8.4.1.tar.gz", hash = "sha256:7c67fd69174877359ed9371ec3af8a3d2b04741818c51e5e99cc1742251fa93c", size = 1517714, upload-time = "2025-06-18T05:48:06.109Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/29/16/c8a903f4c4dffe7a12843191437d7cd8e32751d5de349d45d3fe69544e87/pytest-8.4.1-py3-none-any.whl", hash = "sha256:539c70ba6fcead8e78eebbf1115e8b589e7565830d7d006a8723f19ac8a0afb7", size = 365474, upload-time = "2025-06-18T05:48:03.955Z" },
]
[[package]]
name = "python-dateutil"
version = "2.9.0.post0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "six" },
]
sdist = { url = "https://files.pythonhosted.org/packages/66/c0/0c8b6ad9f17a802ee498c46e004a0eb49bc148f2fd230864601a86dcf6db/python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 342432, upload-time = "2024-03-01T18:36:20.211Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892, upload-time = "2024-03-01T18:36:18.57Z" },
]
[[package]]
name = "python-dotenv"
version = "1.1.1"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/f6/b0/4bc07ccd3572a2f9df7e6782f52b0c6c90dcbb803ac4a167702d7d0dfe1e/python_dotenv-1.1.1.tar.gz", hash = "sha256:a8a6399716257f45be6a007360200409fce5cda2661e3dec71d23dc15f6189ab", size = 41978, upload-time = "2025-06-24T04:21:07.341Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/5f/ed/539768cf28c661b5b068d66d96a2f155c4971a5d55684a514c1a0e0dec2f/python_dotenv-1.1.1-py3-none-any.whl", hash = "sha256:31f23644fe2602f88ff55e1f5c79ba497e01224ee7737937930c448e4d0e24dc", size = 20556, upload-time = "2025-06-24T04:21:06.073Z" },
]
[[package]]
name = "pytz"
version = "2025.2"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/f8/bf/abbd3cdfb8fbc7fb3d4d38d320f2441b1e7cbe29be4f23797b4a2b5d8aac/pytz-2025.2.tar.gz", hash = "sha256:360b9e3dbb49a209c21ad61809c7fb453643e048b38924c765813546746e81c3", size = 320884, upload-time = "2025-03-25T02:25:00.538Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/81/c4/34e93fe5f5429d7570ec1fa436f1986fb1f00c3e0f43a589fe2bbcd22c3f/pytz-2025.2-py2.py3-none-any.whl", hash = "sha256:5ddf76296dd8c44c26eb8f4b6f35488f3ccbf6fbbd7adee0b7262d43f0ec2f00", size = 509225, upload-time = "2025-03-25T02:24:58.468Z" },
]
[[package]]
name = "redbox"
version = "0.2.1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "pydantic" },
]
sdist = { url = "https://files.pythonhosted.org/packages/5b/33/42dbfd394d8099079d31dc0f98afca62bc6cc9635ea1ccab1029fefdc6ff/redbox-0.2.1.tar.gz", hash = "sha256:17005f8cfe8acba992b649e5682b2dd4bff937d67df3fd8496e187cae4f19d60", size = 219953, upload-time = "2022-10-01T18:35:17.755Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/24/24/9f8330b5ce5a64cd97ae2d3a00d1d5cb9096c54ac2e56a05d7a5812709b8/redbox-0.2.1-py3-none-any.whl", hash = "sha256:14906668345c7e76db367d6d40347c2dcb5de2a5167f96d08f06f95c0a908f71", size = 16507, upload-time = "2022-10-01T18:35:15.546Z" },
]
[[package]]
name = "redis"
version = "6.2.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/ea/9a/0551e01ba52b944f97480721656578c8a7c46b51b99d66814f85fe3a4f3e/redis-6.2.0.tar.gz", hash = "sha256:e821f129b75dde6cb99dd35e5c76e8c49512a5a0d8dfdc560b2fbd44b85ca977", size = 4639129, upload-time = "2025-05-28T05:01:18.91Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/13/67/e60968d3b0e077495a8fee89cf3f2373db98e528288a48f1ee44967f6e8c/redis-6.2.0-py3-none-any.whl", hash = "sha256:c8ddf316ee0aab65f04a11229e94a64b2618451dab7a67cb2f77eb799d872d5e", size = 278659, upload-time = "2025-05-28T05:01:16.955Z" },
]
[[package]]
name = "redmail"
version = "0.6.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "jinja2" },
]
sdist = { url = "https://files.pythonhosted.org/packages/e9/96/36c740474cadc1b8a6e735334a0c67c02ea7169d29ffde48eb6c74f3abaa/redmail-0.6.0.tar.gz", hash = "sha256:0447cbd76deb2788b2d831c12e22b513587e99f725071d9951a01b0f2b8d0a72", size = 448832, upload-time = "2023-02-25T10:20:52.31Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/83/67/3e0005b255a9d02448c5529af450b6807403e9af7b82636123273906ea37/redmail-0.6.0-py3-none-any.whl", hash = "sha256:8e64a680ffc8aaf8054312bf8b216da8fed20669181b77b1f1ccbdf4ee064427", size = 46948, upload-time = "2023-02-25T10:20:50.438Z" },
]
[[package]]
name = "requests"
version = "2.32.4"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "certifi" },
{ name = "charset-normalizer" },
{ name = "idna" },
{ name = "urllib3" },
]
sdist = { url = "https://files.pythonhosted.org/packages/e1/0a/929373653770d8a0d7ea76c37de6e41f11eb07559b103b1c02cafb3f7cf8/requests-2.32.4.tar.gz", hash = "sha256:27d0316682c8a29834d3264820024b62a36942083d52caf2f14c0591336d3422", size = 135258, upload-time = "2025-06-09T16:43:07.34Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/7c/e4/56027c4a6b4ae70ca9de302488c5ca95ad4a39e190093d6c1a8ace08341b/requests-2.32.4-py3-none-any.whl", hash = "sha256:27babd3cda2a6d50b30443204ee89830707d396671944c998b5975b031ac2b2c", size = 64847, upload-time = "2025-06-09T16:43:05.728Z" },
]
[[package]]
name = "six"
version = "1.17.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/94/e7/b2c673351809dca68a0e064b6af791aa332cf192da575fd474ed7d6f16a2/six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81", size = 34031, upload-time = "2024-12-04T17:35:28.174Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050, upload-time = "2024-12-04T17:35:26.475Z" },
]
[[package]]
name = "sniffio"
version = "1.3.1"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372, upload-time = "2024-02-25T23:20:04.057Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235, upload-time = "2024-02-25T23:20:01.196Z" },
]
[[package]]
name = "sqlalchemy"
version = "2.0.41"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "greenlet", marker = "(python_full_version < '3.14' and platform_machine == 'AMD64') or (python_full_version < '3.14' and platform_machine == 'WIN32') or (python_full_version < '3.14' and platform_machine == 'aarch64') or (python_full_version < '3.14' and platform_machine == 'amd64') or (python_full_version < '3.14' and platform_machine == 'ppc64le') or (python_full_version < '3.14' and platform_machine == 'win32') or (python_full_version < '3.14' and platform_machine == 'x86_64')" },
{ name = "typing-extensions" },
]
sdist = { url = "https://files.pythonhosted.org/packages/63/66/45b165c595ec89aa7dcc2c1cd222ab269bc753f1fc7a1e68f8481bd957bf/sqlalchemy-2.0.41.tar.gz", hash = "sha256:edba70118c4be3c2b1f90754d308d0b79c6fe2c0fdc52d8ddf603916f83f4db9", size = 9689424, upload-time = "2025-05-14T17:10:32.339Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/3e/2a/f1f4e068b371154740dd10fb81afb5240d5af4aa0087b88d8b308b5429c2/sqlalchemy-2.0.41-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:81f413674d85cfd0dfcd6512e10e0f33c19c21860342a4890c3a2b59479929f9", size = 2119645, upload-time = "2025-05-14T17:55:24.854Z" },
{ url = "https://files.pythonhosted.org/packages/9b/e8/c664a7e73d36fbfc4730f8cf2bf930444ea87270f2825efbe17bf808b998/sqlalchemy-2.0.41-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:598d9ebc1e796431bbd068e41e4de4dc34312b7aa3292571bb3674a0cb415dd1", size = 2107399, upload-time = "2025-05-14T17:55:28.097Z" },
{ url = "https://files.pythonhosted.org/packages/5c/78/8a9cf6c5e7135540cb682128d091d6afa1b9e48bd049b0d691bf54114f70/sqlalchemy-2.0.41-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a104c5694dfd2d864a6f91b0956eb5d5883234119cb40010115fd45a16da5e70", size = 3293269, upload-time = "2025-05-14T17:50:38.227Z" },
{ url = "https://files.pythonhosted.org/packages/3c/35/f74add3978c20de6323fb11cb5162702670cc7a9420033befb43d8d5b7a4/sqlalchemy-2.0.41-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6145afea51ff0af7f2564a05fa95eb46f542919e6523729663a5d285ecb3cf5e", size = 3303364, upload-time = "2025-05-14T17:51:49.829Z" },
{ url = "https://files.pythonhosted.org/packages/6a/d4/c990f37f52c3f7748ebe98883e2a0f7d038108c2c5a82468d1ff3eec50b7/sqlalchemy-2.0.41-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b46fa6eae1cd1c20e6e6f44e19984d438b6b2d8616d21d783d150df714f44078", size = 3229072, upload-time = "2025-05-14T17:50:39.774Z" },
{ url = "https://files.pythonhosted.org/packages/15/69/cab11fecc7eb64bc561011be2bd03d065b762d87add52a4ca0aca2e12904/sqlalchemy-2.0.41-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:41836fe661cc98abfae476e14ba1906220f92c4e528771a8a3ae6a151242d2ae", size = 3268074, upload-time = "2025-05-14T17:51:51.736Z" },
{ url = "https://files.pythonhosted.org/packages/5c/ca/0c19ec16858585d37767b167fc9602593f98998a68a798450558239fb04a/sqlalchemy-2.0.41-cp312-cp312-win32.whl", hash = "sha256:a8808d5cf866c781150d36a3c8eb3adccfa41a8105d031bf27e92c251e3969d6", size = 2084514, upload-time = "2025-05-14T17:55:49.915Z" },
{ url = "https://files.pythonhosted.org/packages/7f/23/4c2833d78ff3010a4e17f984c734f52b531a8c9060a50429c9d4b0211be6/sqlalchemy-2.0.41-cp312-cp312-win_amd64.whl", hash = "sha256:5b14e97886199c1f52c14629c11d90c11fbb09e9334fa7bb5f6d068d9ced0ce0", size = 2111557, upload-time = "2025-05-14T17:55:51.349Z" },
{ url = "https://files.pythonhosted.org/packages/d3/ad/2e1c6d4f235a97eeef52d0200d8ddda16f6c4dd70ae5ad88c46963440480/sqlalchemy-2.0.41-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:4eeb195cdedaf17aab6b247894ff2734dcead6c08f748e617bfe05bd5a218443", size = 2115491, upload-time = "2025-05-14T17:55:31.177Z" },
{ url = "https://files.pythonhosted.org/packages/cf/8d/be490e5db8400dacc89056f78a52d44b04fbf75e8439569d5b879623a53b/sqlalchemy-2.0.41-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d4ae769b9c1c7757e4ccce94b0641bc203bbdf43ba7a2413ab2523d8d047d8dc", size = 2102827, upload-time = "2025-05-14T17:55:34.921Z" },
{ url = "https://files.pythonhosted.org/packages/a0/72/c97ad430f0b0e78efaf2791342e13ffeafcbb3c06242f01a3bb8fe44f65d/sqlalchemy-2.0.41-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a62448526dd9ed3e3beedc93df9bb6b55a436ed1474db31a2af13b313a70a7e1", size = 3225224, upload-time = "2025-05-14T17:50:41.418Z" },
{ url = "https://files.pythonhosted.org/packages/5e/51/5ba9ea3246ea068630acf35a6ba0d181e99f1af1afd17e159eac7e8bc2b8/sqlalchemy-2.0.41-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc56c9788617b8964ad02e8fcfeed4001c1f8ba91a9e1f31483c0dffb207002a", size = 3230045, upload-time = "2025-05-14T17:51:54.722Z" },
{ url = "https://files.pythonhosted.org/packages/78/2f/8c14443b2acea700c62f9b4a8bad9e49fc1b65cfb260edead71fd38e9f19/sqlalchemy-2.0.41-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c153265408d18de4cc5ded1941dcd8315894572cddd3c58df5d5b5705b3fa28d", size = 3159357, upload-time = "2025-05-14T17:50:43.483Z" },
{ url = "https://files.pythonhosted.org/packages/fc/b2/43eacbf6ccc5276d76cea18cb7c3d73e294d6fb21f9ff8b4eef9b42bbfd5/sqlalchemy-2.0.41-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4f67766965996e63bb46cfbf2ce5355fc32d9dd3b8ad7e536a920ff9ee422e23", size = 3197511, upload-time = "2025-05-14T17:51:57.308Z" },
{ url = "https://files.pythonhosted.org/packages/fa/2e/677c17c5d6a004c3c45334ab1dbe7b7deb834430b282b8a0f75ae220c8eb/sqlalchemy-2.0.41-cp313-cp313-win32.whl", hash = "sha256:bfc9064f6658a3d1cadeaa0ba07570b83ce6801a1314985bf98ec9b95d74e15f", size = 2082420, upload-time = "2025-05-14T17:55:52.69Z" },
{ url = "https://files.pythonhosted.org/packages/e9/61/e8c1b9b6307c57157d328dd8b8348ddc4c47ffdf1279365a13b2b98b8049/sqlalchemy-2.0.41-cp313-cp313-win_amd64.whl", hash = "sha256:82ca366a844eb551daff9d2e6e7a9e5e76d2612c8564f58db6c19a726869c1df", size = 2108329, upload-time = "2025-05-14T17:55:54.495Z" },
{ url = "https://files.pythonhosted.org/packages/1c/fc/9ba22f01b5cdacc8f5ed0d22304718d2c758fce3fd49a5372b886a86f37c/sqlalchemy-2.0.41-py3-none-any.whl", hash = "sha256:57df5dc6fdb5ed1a88a1ed2195fd31927e705cad62dedd86b46972752a80f576", size = 1911224, upload-time = "2025-05-14T17:39:42.154Z" },
]
[[package]]
name = "sqlalchemy-mixins"
version = "2.0.5"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "six" },
{ name = "sqlalchemy" },
]
sdist = { url = "https://files.pythonhosted.org/packages/8a/90/a920aa06a038677dde522dd8d7bc168eedd5fd3331ba1c759bf91ccd28d3/sqlalchemy_mixins-2.0.5.tar.gz", hash = "sha256:85197fc3682c4bf9c35671fb3d10282a0973b19cd2ff2b6791d601cbfb0fb89e", size = 20186, upload-time = "2023-08-29T13:37:22.123Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/58/4d/8d97e3ec646e8732ea8d33fb33068cab97d0bc5a0e3f46c93174e2d3d3eb/sqlalchemy_mixins-2.0.5-py3-none-any.whl", hash = "sha256:9067b630744741b472aa91d92494cc5612ed2d29c66729a5a4a1d3fbbeccd448", size = 17578, upload-time = "2023-08-29T13:37:20.475Z" },
]
[[package]]
name = "starlette"
version = "0.46.2"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "anyio" },
]
sdist = { url = "https://files.pythonhosted.org/packages/ce/20/08dfcd9c983f6a6f4a1000d934b9e6d626cff8d2eeb77a89a68eef20a2b7/starlette-0.46.2.tar.gz", hash = "sha256:7f7361f34eed179294600af672f565727419830b54b7b084efe44bb82d2fccd5", size = 2580846, upload-time = "2025-04-13T13:56:17.942Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/8b/0c/9d30a4ebeb6db2b25a841afbb80f6ef9a854fc3b41be131d249a977b4959/starlette-0.46.2-py3-none-any.whl", hash = "sha256:595633ce89f8ffa71a015caed34a5b2dc1c0cdb3f0f1fbd1e69339cf2abeec35", size = 72037, upload-time = "2025-04-13T13:56:16.21Z" },
]
[[package]]
name = "textdistance"
version = "4.6.3"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/01/68/97ac72dd781301d6a52140066c68400c96f1a91f69737959e414844749b0/textdistance-4.6.3.tar.gz", hash = "sha256:d6dabc50b4ea832cdcf0e1e6021bd0c7fcd9ade155888d79bb6a3c31fce2dc6f", size = 32710, upload-time = "2024-07-16T09:34:54.09Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/c6/c2/c62601c858010b0513a6434b9be19bd740533a6e861eddfd30b7258d92a0/textdistance-4.6.3-py3-none-any.whl", hash = "sha256:0cb1b2cc8e3339ddc3e0f8c870e49fb49de6ecc42a718917308b3c971f34aa56", size = 31263, upload-time = "2024-07-16T09:34:51.082Z" },
]
[[package]]
name = "types-python-dateutil"
version = "2.9.0.20250516"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/ef/88/d65ed807393285204ab6e2801e5d11fbbea811adcaa979a2ed3b67a5ef41/types_python_dateutil-2.9.0.20250516.tar.gz", hash = "sha256:13e80d6c9c47df23ad773d54b2826bd52dbbb41be87c3f339381c1700ad21ee5", size = 13943, upload-time = "2025-05-16T03:06:58.385Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/c5/3f/b0e8db149896005adc938a1e7f371d6d7e9eca4053a29b108978ed15e0c2/types_python_dateutil-2.9.0.20250516-py3-none-any.whl", hash = "sha256:2b2b3f57f9c6a61fba26a9c0ffb9ea5681c9b83e69cd897c6b5f668d9c0cab93", size = 14356, upload-time = "2025-05-16T03:06:57.249Z" },
]
[[package]]
name = "typing-extensions"
version = "4.14.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/d1/bc/51647cd02527e87d05cb083ccc402f93e441606ff1f01739a62c8ad09ba5/typing_extensions-4.14.0.tar.gz", hash = "sha256:8676b788e32f02ab42d9e7c61324048ae4c6d844a399eebace3d4979d75ceef4", size = 107423, upload-time = "2025-06-02T14:52:11.399Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/69/e0/552843e0d356fbb5256d21449fa957fa4eff3bbc135a74a691ee70c7c5da/typing_extensions-4.14.0-py3-none-any.whl", hash = "sha256:a1514509136dd0b477638fc68d6a91497af5076466ad0fa6c338e44e359944af", size = 43839, upload-time = "2025-06-02T14:52:10.026Z" },
]
[[package]]
name = "typing-inspection"
version = "0.4.1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "typing-extensions" },
]
sdist = { url = "https://files.pythonhosted.org/packages/f8/b1/0c11f5058406b3af7609f121aaa6b609744687f1d158b3c3a5bf4cc94238/typing_inspection-0.4.1.tar.gz", hash = "sha256:6ae134cc0203c33377d43188d4064e9b357dba58cff3185f22924610e70a9d28", size = 75726, upload-time = "2025-05-21T18:55:23.885Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/17/69/cd203477f944c353c31bade965f880aa1061fd6bf05ded0726ca845b6ff7/typing_inspection-0.4.1-py3-none-any.whl", hash = "sha256:389055682238f53b04f7badcb49b989835495a96700ced5dab2d8feae4b26f51", size = 14552, upload-time = "2025-05-21T18:55:22.152Z" },
]
[[package]]
name = "tzdata"
version = "2025.2"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/95/32/1a225d6164441be760d75c2c42e2780dc0873fe382da3e98a2e1e48361e5/tzdata-2025.2.tar.gz", hash = "sha256:b60a638fcc0daffadf82fe0f57e53d06bdec2f36c4df66280ae79bce6bd6f2b9", size = 196380, upload-time = "2025-03-23T13:54:43.652Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/5c/23/c7abc0ca0a1526a0774eca151daeb8de62ec457e77262b66b359c3c7679e/tzdata-2025.2-py2.py3-none-any.whl", hash = "sha256:1a403fada01ff9221ca8044d701868fa132215d84beb92242d9acd2147f667a8", size = 347839, upload-time = "2025-03-23T13:54:41.845Z" },
]
[[package]]
name = "unidecode"
version = "1.4.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/94/7d/a8a765761bbc0c836e397a2e48d498305a865b70a8600fd7a942e85dcf63/Unidecode-1.4.0.tar.gz", hash = "sha256:ce35985008338b676573023acc382d62c264f307c8f7963733405add37ea2b23", size = 200149, upload-time = "2025-04-24T08:45:03.798Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/8f/b7/559f59d57d18b44c6d1250d2eeaa676e028b9c527431f5d0736478a73ba1/Unidecode-1.4.0-py3-none-any.whl", hash = "sha256:c3c7606c27503ad8d501270406e345ddb480a7b5f38827eafe4fa82a137f0021", size = 235837, upload-time = "2025-04-24T08:45:01.609Z" },
]
[[package]]
name = "urllib3"
version = "2.5.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/15/22/9ee70a2574a4f4599c47dd506532914ce044817c7752a79b6a51286319bc/urllib3-2.5.0.tar.gz", hash = "sha256:3fc47733c7e419d4bc3f6b3dc2b4f890bb743906a30d56ba4a5bfa4bbff92760", size = 393185, upload-time = "2025-06-18T14:07:41.644Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/a7/c2/fe1e52489ae3122415c51f387e221dd0773709bad6c6cdaa599e8a2c5185/urllib3-2.5.0-py3-none-any.whl", hash = "sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc", size = 129795, upload-time = "2025-06-18T14:07:40.39Z" },
]
[[package]]
name = "uvicorn"
version = "0.34.3"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "click" },
{ name = "h11" },
]
sdist = { url = "https://files.pythonhosted.org/packages/de/ad/713be230bcda622eaa35c28f0d328c3675c371238470abdea52417f17a8e/uvicorn-0.34.3.tar.gz", hash = "sha256:35919a9a979d7a59334b6b10e05d77c1d0d574c50e0fc98b8b1a0f165708b55a", size = 76631, upload-time = "2025-06-01T07:48:17.531Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/6d/0d/8adfeaa62945f90d19ddc461c55f4a50c258af7662d34b6a3d5d1f8646f6/uvicorn-0.34.3-py3-none-any.whl", hash = "sha256:16246631db62bdfbf069b0645177d6e8a77ba950cfedbfd093acef9444e4d885", size = 62431, upload-time = "2025-06-01T07:48:15.664Z" },
]