From adfa5868a0ed27d7d63905223a573caf9279818e Mon Sep 17 00:00:00 2001 From: berkay Date: Sat, 22 Mar 2025 00:30:51 +0300 Subject: [PATCH] updated Empty Runner --- .gitignore | 1 - .python-version | 1 + Commons/json_functions.py | 13 + Commons/logger_functions.py | 13 + Commons/path_provider_function.py | 10 + Commons/select_functions.py | 78 + Configs/api.py | 116 ++ Configs/host_config.py | 34 + Configs/mongo.py | 10 + Configs/postgres.py | 17 + Configs/redis.py | 121 ++ EmptyRunner/Dockerfile | 28 + EmptyRunner/app.py | 5 + EmptyRunner/mongo_app.py | 67 + EmptyRunner/postgres_app.py | 45 + Schemas/__init__.py | 169 ++ Schemas/account/account.py | 625 ++++++ Schemas/account/iban.py | 103 + Schemas/api/encrypter.py | 124 ++ Schemas/building/budget.py | 156 ++ Schemas/building/build.py | 785 ++++++++ Schemas/building/decision_book.py | 1769 +++++++++++++++++ Schemas/company/company.py | 575 ++++++ Schemas/company/department.py | 232 +++ Schemas/company/employee.py | 143 ++ Schemas/event/event.py | 429 ++++ Schemas/identity/identity.py | 1034 ++++++++++ Schemas/others/enums.py | 103 + Schemas/rules/rules.py | 29 + Services/MongoService/handlers.py | 25 + Services/MongoService/provider.py | 178 ++ .../controllers/core_controllers.py | 108 + .../controllers/crud_controllers.py | 249 +++ .../controllers/filter_controllers.py | 202 ++ .../controllers/mixin_controllers.py | 135 ++ .../controllers/pagination_controllers.py | 249 +++ .../controllers/response_controllers.py | 109 + Services/PostgresService/database.py | 62 + alembic.ini | 118 ++ alembic/README | 1 + alembic/env.py | 78 + alembic/script.py.mako | 28 + docker-compose.yml | 198 ++ pyproject.toml | 23 + uv.lock | 773 +++++++ 45 files changed, 9370 insertions(+), 1 deletion(-) create mode 100644 .python-version create mode 100644 Commons/json_functions.py create mode 100644 Commons/logger_functions.py create mode 100644 Commons/path_provider_function.py create mode 100644 Commons/select_functions.py create mode 100644 Configs/api.py create mode 100644 Configs/host_config.py create mode 100644 Configs/mongo.py create mode 100644 Configs/postgres.py create mode 100644 Configs/redis.py create mode 100644 EmptyRunner/Dockerfile create mode 100644 EmptyRunner/app.py create mode 100644 EmptyRunner/mongo_app.py create mode 100644 EmptyRunner/postgres_app.py create mode 100644 Schemas/__init__.py create mode 100644 Schemas/account/account.py create mode 100644 Schemas/account/iban.py create mode 100644 Schemas/api/encrypter.py create mode 100644 Schemas/building/budget.py create mode 100644 Schemas/building/build.py create mode 100644 Schemas/building/decision_book.py create mode 100644 Schemas/company/company.py create mode 100644 Schemas/company/department.py create mode 100644 Schemas/company/employee.py create mode 100644 Schemas/event/event.py create mode 100644 Schemas/identity/identity.py create mode 100644 Schemas/others/enums.py create mode 100644 Schemas/rules/rules.py create mode 100644 Services/MongoService/handlers.py create mode 100644 Services/MongoService/provider.py create mode 100644 Services/PostgresService/controllers/core_controllers.py create mode 100644 Services/PostgresService/controllers/crud_controllers.py create mode 100644 Services/PostgresService/controllers/filter_controllers.py create mode 100644 Services/PostgresService/controllers/mixin_controllers.py create mode 100644 Services/PostgresService/controllers/pagination_controllers.py create mode 100644 Services/PostgresService/controllers/response_controllers.py create mode 100644 Services/PostgresService/database.py create mode 100644 alembic.ini create mode 100644 alembic/README create mode 100644 alembic/env.py create mode 100644 alembic/script.py.mako create mode 100644 docker-compose.yml create mode 100644 pyproject.toml create mode 100644 uv.lock diff --git a/.gitignore b/.gitignore index f852d0e..9ab4277 100644 --- a/.gitignore +++ b/.gitignore @@ -125,7 +125,6 @@ service_app/.env .venv service_app/env/ venv/ -service_app/env/ env.bak/ venv.bak/ diff --git a/.python-version b/.python-version new file mode 100644 index 0000000..e4fba21 --- /dev/null +++ b/.python-version @@ -0,0 +1 @@ +3.12 diff --git a/Commons/json_functions.py b/Commons/json_functions.py new file mode 100644 index 0000000..5465974 --- /dev/null +++ b/Commons/json_functions.py @@ -0,0 +1,13 @@ +import os +import json + + +def read_json_file(json_file_path): + if os.path.exists(json_file_path): + with open(json_file_path, "r") as json_file: + return json.load(json_file) + return {} + + +def write_json_file(json_file_path, data): + json.dump(obj=data, fp=json_file_path, indent=2) diff --git a/Commons/logger_functions.py b/Commons/logger_functions.py new file mode 100644 index 0000000..8ed5909 --- /dev/null +++ b/Commons/logger_functions.py @@ -0,0 +1,13 @@ +"""Utility functions for getting line numbers and file locations.""" +from inspect import currentframe, getframeinfo, stack + + +def get_line_number_for_error() -> str: + """Get the file name and line number of where an error occurred. + + Returns: + str: A string in the format 'filename | line_number' showing where the error occurred + """ + caller = stack()[1] # Get the caller's frame + frame_info = getframeinfo(caller[0]) + return f"{frame_info.filename} | {frame_info.lineno}" diff --git a/Commons/path_provider_function.py b/Commons/path_provider_function.py new file mode 100644 index 0000000..c363e70 --- /dev/null +++ b/Commons/path_provider_function.py @@ -0,0 +1,10 @@ +import sys + + +def set_python_path(path_to_provide: str): + + if str(path_to_provide)[0] == "/": + path_to_provide = "/" + path_to_provide + + if path_to_provide not in list(sys.path): + sys.path.append(path_to_provide) diff --git a/Commons/select_functions.py b/Commons/select_functions.py new file mode 100644 index 0000000..61315db --- /dev/null +++ b/Commons/select_functions.py @@ -0,0 +1,78 @@ + + +class SelectorsBase: + @classmethod + def add_confirmed_filter(cls, first_table, second_table) -> tuple: + return ( + first_table.active == True, + first_table.is_confirmed == True, + first_table.deleted == False, + second_table.active == True, + second_table.is_confirmed == True, + second_table.deleted == False, + ) + + +class SelectActionWithEmployee: + + @classmethod + def select_action(cls, employee_id, filter_expr: list = None): + if filter_expr is not None: + filter_expr = (cls.__many__table__.employee_id == employee_id, *filter_expr) + data = ( + cls.session.query(cls.id) + .select_from(cls) + .join(cls.__many__table__, cls.__many__table__.member_id == cls.id) + .filter( + *filter_expr, + *SelectorsBase.add_confirmed_filter( + first_table=cls, second_table=cls.__many__table__ + ), + ) + ) + return cls.query.filter(cls.id.in_([comp[0] for comp in data.all()])) + data = ( + cls.session.query(cls.id) + .select_from(cls) + .join(cls.__many__table__, cls.__many__table__.member_id == cls.id) + .filter( + cls.__many__table__.employee_id == employee_id, + *SelectorsBase.add_confirmed_filter( + first_table=cls, second_table=cls.__many__table__ + ), + ) + ) + return cls.query.filter(cls.id.in_([comp[0] for comp in data.all()])) + + +class SelectAction: + + @classmethod + def select_action(cls, duty_id_list: list, filter_expr: list = None): + if filter_expr is not None: + data = ( + cls.session.query(cls.id) + .select_from(cls) + .join(cls.__many__table__, cls.__many__table__.member_id == cls.id) + .filter( + cls.__many__table__.duties_id.in_(duty_id_list), + *SelectorsBase.add_confirmed_filter( + first_table=cls, second_table=cls.__many__table__ + ), + *filter_expr, + ) + ) + return cls.query.filter(cls.id.in_([comp[0] for comp in data.all()])) + + data = ( + cls.session.query(cls.id) + .select_from(cls) + .join(cls.__many__table__, cls.__many__table__.member_id == cls.id) + .filter( + cls.__many__table__.duties_id.in_(duty_id_list), + *SelectorsBase.add_confirmed_filter( + first_table=cls, second_table=cls.__many__table__ + ), + ) + ) + return cls.query.filter(cls.id.in_([comp[0] for comp in data.all()])) diff --git a/Configs/api.py b/Configs/api.py new file mode 100644 index 0000000..4c3304e --- /dev/null +++ b/Configs/api.py @@ -0,0 +1,116 @@ +import datetime + +class DefaultApiConfig: + app: str + host: str + port: int + log_level: str + reload: bool + + @classmethod + def as_dict(cls): + return { + "app": cls.app, + "host": cls.host, + "port": int(cls.port), + "log_level": cls.log_level, + "reload": bool(cls.reload), + } + + + +class ApiConfigs: + """Base class for all configurations.""" + + SECRET: str = "59f871a2d2194e96adb36b279d2cc21059f871a2d2194e96adb36b279d2cc21059f871a2d2194e96adb36b279d2cc210s" + ACCESS_TIME: int = 432000 + REFRESH_TIME: int = 864000 + DEFAULT_SIZE: int = 10 + MIN_SIZE: int = 5 + MAX_SIZE: int = 50 + ACCESS_TOKEN_TAG: str = "Authorization" + REFRESH_TOKEN_TAG: str = "Refresher" + ACCESS_TOKEN_LENGTH: int = 72 + REFRESH_TOKEN_LENGTH: int = 128 + + +class ApiStatic: + PLACEHOLDER = "https://s.tmimgcdn.com/scr/800x500/276800/building-home-nature-logo-vector-template-3_276851-original.jpg" + FORGOT_LINK = "https://www.evyos.com.tr/password/create?tokenUrl=" + BLACKLIST_LINK = "https://www.evyos.com.tr/support/unknown-login-notice/" + APP_DIR = "/home/berkay/git-evyos/api-managment-backend/" + + @classmethod + def forgot_link(cls, forgot_key): + return cls.FORGOT_LINK + forgot_key + + @classmethod + def blacklist_login(cls, record_id): + return cls.BLACKLIST_LINK + record_id + + +class MainConfig: + + # Date and Time Configuration + DATETIME_FORMAT = "YYYY-MM-DD HH:mm:ss Z" + DATETIME_FORMAT_JS = "YYYY-MM-DD HH:mm:ss +0" + + # Timezone Configuration + DEFAULT_TIMEZONE = "GMT+3" # Default timezone for the application + SYSTEM_TIMEZONE = "GMT+0" # System timezone (used for internal operations) + SUPPORTED_TIMEZONES = ["GMT+0", "GMT+3"] # List of supported timezones + + +class LanguageConfig: + + SUPPORTED_LANGUAGES = ["en", "tr"] + DEFAULT_LANGUAGE = "tr" + + +class ValidationsConfig: + + SUPPORTED_VALIDATIONS = ["header", "validation", "all"] + DEFAULT_VALIDATION = "all" + + +class ApiConfig(DefaultApiConfig): + # Application Information + APP_NAME = "evyos-auth-api-gateway" + TITLE = "WAG API Auth Api Gateway" + DESCRIPTION = ( + "This api is serves as web auth api gateway only to evyos web services." + ) + APP_URL = "https://www.auth.eys.gen.tr" + + # Server Configuration + app = "app:app" + host = "0.0.0.0" + port = 41575 + log_level = "info" + reload = True + + +class Auth: + ACCESS_EMAIL_EXT = "evyos.com.tr" + ACCESS_TOKEN_TAG = "evyos-session-key" + REFRESHER_TOKEN_TAG = "eys-session-refresher" + SECRET_KEY_72 = ( + "t3sUAmjTGeTgDc6dAUrB41u2SNg0ZHzj4HTjem95y3fRH1nZXOHIBj163kib6iLybT0gLaxq" + ) + SECRET_KEY_96 = "7ct8VpiwaP1hR2bVSet4dEEAgepuTZUOnO1QxOgKyDqBR2PkqNhcubSrbUUigQKoQA1PBoeeQn5ZCo24pESmVtKs76nA4EKq" + SECRET_KEY_144 = ( + "R2p5Rq6KCr6PCfjFYUeH1keF2VWHFEuqINVjBGGnvRA2m10pYUKqfOtIGBcaj2v5wZmElDndzSHGOS7roQsoTelPSok0" + + "qqMucurMWE0FGexGpFuJkfPEm9tH2OjMOqegvEetpSVywH0W4Kh4" + ) + + ALGORITHM = "HS256" + ACCESS_TOKEN_LENGTH: int = 90 + REFRESHER_TOKEN_LENGTH: int = 144 + PASSWORD_EXPIRE_DAY = datetime.timedelta(days=30) + TOKEN_EXPIRE_MINUTES_1 = datetime.timedelta(minutes=1) + TOKEN_EXPIRE_MINUTES_15 = datetime.timedelta(minutes=15) + TOKEN_EXPIRE_MINUTES_30 = datetime.timedelta(minutes=30) + TOKEN_EXPIRE_DAY_1 = datetime.timedelta(days=1) + TOKEN_EXPIRE_DAY_5 = datetime.timedelta(days=5) + TOKEN_EXPIRE_DAY_15 = datetime.timedelta(days=15) + TOKEN_EXPIRE_DAY_30 = datetime.timedelta(days=30) diff --git a/Configs/host_config.py b/Configs/host_config.py new file mode 100644 index 0000000..39c6c3a --- /dev/null +++ b/Configs/host_config.py @@ -0,0 +1,34 @@ + + +class HostConfig: + + MAIN_HOST: str = "10.10.2.36" # http://10.10.2.36 + EMAIL_HOST: str = "10.10.2.34" # http://10.10.2.34 + + +class MainConfig: + + APP_NAME: str = "evyos-web-api-gateway" + TITLE: str = "WAG API Web Api Gateway" + DESCRIPTION: str = "This api is serves as web api gateway only to evyos web services." + APP_URL: str = "https://www.wag.eys.gen.tr" + + DATETIME_FORMAT: str = "YYYY-MM-DD HH:mm:ss ZZ" + DATETIME_FORMAT_JS: str = "YYYY-MM-DD HH:mm:ss +0" + + # Timezone Configuration + DEFAULT_TIMEZONE: str = "GMT+3" # Default timezone for the application + SYSTEM_TIMEZONE: str = "GMT+0" # System timezone (used for internal operations) + SUPPORTED_TIMEZONES: list = ["GMT+0", "GMT+3"] # List of supported timezones + + +class LanguageConfig: + + SUPPORTED_LANGUAGES: list = ["en", "tr"] + DEFAULT_LANGUAGE: str = "tr" + + +class ValidationsConfig: + + SUPPORTED_VALIDATIONS: list = ["header", "validation", "all"] + DEFAULT_VALIDATION: str = "all" diff --git a/Configs/mongo.py b/Configs/mongo.py new file mode 100644 index 0000000..0346837 --- /dev/null +++ b/Configs/mongo.py @@ -0,0 +1,10 @@ +from Configs.host_config import HostConfig + + +class MongoConfig: + PASSWORD: str = "mongo_password" + USER_NAME: str = "mongo_user" + DATABASE_NAME: str = "mongo_database" + HOST: str = HostConfig.MAIN_HOST + PORT: str = 11777 + URL: str = f"mongodb://{USER_NAME}:{PASSWORD}@{HOST}:{PORT}/{DATABASE_NAME}?retryWrites=true&w=majority" diff --git a/Configs/postgres.py b/Configs/postgres.py new file mode 100644 index 0000000..7a1dfc3 --- /dev/null +++ b/Configs/postgres.py @@ -0,0 +1,17 @@ +from Configs.host_config import HostConfig + + +class Database: + HOST: str = HostConfig.MAIN_HOST + PORT: str = "5444" + SQL: str = "postgresql+psycopg2" + USERNAME: str = "berkay_wag_user" + PASSWORD: str = "berkay_wag_user_password" + DATABASE_NAME: str = "wag_database" + DATABASE_URL: str = f"{SQL}://{USERNAME}:{PASSWORD}@{HOST}:{PORT}/{DATABASE_NAME}" + + +class PaginateConfig: + DEFAULT_SIZE = 10 + MIN_SIZE = 10 + MAX_SIZE = 50 diff --git a/Configs/redis.py b/Configs/redis.py new file mode 100644 index 0000000..718e64b --- /dev/null +++ b/Configs/redis.py @@ -0,0 +1,121 @@ +from Configs.host_config import HostConfig + + +class WagRedis: + + REDIS_HOST = HostConfig.MAIN_HOST + REDIS_PASSWORD: str = "commercial_redis_password" + REDIS_PORT: int = 11222 + REDIS_DB: int = 0 + + @classmethod + def as_dict(cls): + return dict( + host=WagRedis.REDIS_HOST, + password=WagRedis.REDIS_PASSWORD, + port=WagRedis.REDIS_PORT, + db=WagRedis.REDIS_DB, + ) + + +class RedisValidationKeys: + ENDPOINTS: str = "ENDPOINTS" + VALIDATIONS: str = "VALIDATIONS" + HEADERS: str = "HEADERS" + ERRORCODES: str = "ERRORCODES" + RESPONSES: str = "RESPONSES" + REQUESTS: str = "REQUESTS" + RESPONSE: str = "RESPONSE" + LANGUAGE_MODELS: str = "LANGUAGE_MODELS" + STATIC: str = "STATIC" + DYNAMIC: str = "DYNAMIC" + # REQUEST: str = "REQUEST" + # VALIDATION_USER: str = "VALIDATION_USER" + + +class RedisAuthKeys: + AUTH: str = "AUTH" + OCCUPANT: str = "OCCUPANT" + EMPLOYEE: str = "EMPLOYEE" + CACHE: str = "CACHE" + + +class RedisCategoryKeys: + REBUILD: str = "REBUILD" + ENDPOINT2CLASS: str = "ENDPOINT2CLASS" + CLUSTER_INDEX: str = "CLUSTER_INDEX" + CLUSTER_FUNCTION_CODES: str = "CLUSTER_FUNCTION_CODES" + METHOD_FUNCTION_CODES: str = "METHOD_FUNCTION_CODES" + MENU_FIRST_LAYER: str = "MENU_FIRST_LAYER" + PAGE_MAPPER: str = "PAGE_MAPPER" + MENU_MAPPER: str = "MENU_MAPPER" + + +class RedisCategoryPageInfoKeys: + """ + ### /create?site=BuildingCluster, #/update?site=BuildingCluster, #/dashboard?site=BuildingCluster + PAGE_URL: /dashboard?site=BuildingCluster + PAGE_NAME: BuildingCluster + PAGE_INFO: {LANGUAGE_MODELS: "", ICON: "", URL: ""} + PAGE_MENU_INDEX: 1 # {build_living_space: "uuid4", LAYER: 1, MENU_INDEX: 1} + + PAGE_MENU_COMPONENT: {..., lang: {"tr"}: {...}, lang: {"en"}: {...}} + PAGE_LANGUAGE: {{"tr"}: {...},{"en"}: {...}} + """ + + PAGE_URL: str = "PAGE_URL" + PAGE_NAME: str = "PAGE_NAME" + PAGE_INFO: str = "PAGE_INFO" + PAGE_COMPONENT: str = "PAGE_COMPONENT" + PAGE_MENU_INDEX: str = "PAGE_MENU_INDEX" + PAGE_MENU_COMPONENT: str = "PAGE_MENU_COMPONENT" + PAGE_LANGUAGE: str = "PAGE_LANGUAGE" + + +class RedisCategoryPageInfoKeysAction: + """ + PAGE_MAPPER: {PAGE_URL: /dashboard?site=BuildingCluster, PAGE_NAME: BuildingCluster, PAGE_INFO: {LANGUAGE_MODELS: "", ICON: "", URL: ""}} + value : {RedisCategoryPageInfoKeys.PAGE_INFO} + MENU_MAPPER: {PAGE_MENU_INDEX: 1, PAGE_MENU_COMPONENT: {..., lang: {"tr"}: {...}, lang: {"en"}: {...}}} + value : {RedisCategoryPageInfoKeys.PAGE_INFO} + """ + + page_index: str = ( + f"{RedisCategoryPageInfoKeys.PAGE_MENU_INDEX}:{RedisCategoryPageInfoKeys.PAGE_URL}" + ) + page_mapper_key_language: str = ( + f"{RedisCategoryPageInfoKeys.PAGE_MENU_INDEX}:{RedisCategoryPageInfoKeys.PAGE_LANGUAGE}" + ) + menu_mapper_key: str = ( + f"{RedisCategoryPageInfoKeys.PAGE_URL}:{RedisCategoryPageInfoKeys.PAGE_MENU_INDEX}" + ) + menu_mapper_key_component: str = ( + f"{RedisCategoryPageInfoKeys.PAGE_URL}:{RedisCategoryPageInfoKeys.PAGE_MENU_COMPONENT}" + ) + + +class RedisValidationKeysAction: + # LANGUAGE_MODELS:DYNAMIC:VALIDATIONS: + dynamic_validation_key: str = ( + f"{RedisValidationKeys.LANGUAGE_MODELS}:{RedisValidationKeys.DYNAMIC}:{RedisValidationKeys.VALIDATIONS}" + ) + # LANGUAGE_MODELS:DYNAMIC:HEADERS:REQUEST + dynamic_header_request_key: str = ( + f"{RedisValidationKeys.LANGUAGE_MODELS}:{RedisValidationKeys.DYNAMIC}:{RedisValidationKeys.HEADERS}:{RedisValidationKeys.REQUESTS}" + ) + # LANGUAGE_MODELS:DYNAMIC:HEADERS:RESPONSE + dynamic_header_response_key: str = ( + f"{RedisValidationKeys.LANGUAGE_MODELS}:{RedisValidationKeys.DYNAMIC}:{RedisValidationKeys.HEADERS}:{RedisValidationKeys.RESPONSES}" + ) + # LANGUAGE_MODELS:STATIC:ERRORCODES: + static_error_code_key: str = ( + f"{RedisValidationKeys.LANGUAGE_MODELS}:{RedisValidationKeys.STATIC}:{RedisValidationKeys.ERRORCODES}" + ) + # LANGUAGE_MODELS:STATIC:RESPONSES: + static_response_key: str = ( + f"{RedisValidationKeys.LANGUAGE_MODELS}:{RedisValidationKeys.STATIC}:{RedisValidationKeys.RESPONSES}" + ) + # LANGUAGE_MODELS:STATIC:REQUESTS: + static_request_key: str = ( + f"{RedisValidationKeys.LANGUAGE_MODELS}:{RedisValidationKeys.STATIC}:{RedisValidationKeys.REQUESTS}" + ) diff --git a/EmptyRunner/Dockerfile b/EmptyRunner/Dockerfile new file mode 100644 index 0000000..be81e22 --- /dev/null +++ b/EmptyRunner/Dockerfile @@ -0,0 +1,28 @@ +FROM python:3.12-slim + +WORKDIR / + +# Install system dependencies and Poetry +RUN apt-get update && apt-get install -y --no-install-recommends gcc \ + && rm -rf /var/lib/apt/lists/* && pip install --no-cache-dir poetry + +# Copy Poetry configuration +COPY /pyproject.toml ./pyproject.toml + +# Configure Poetry and install dependencies with optimizations +RUN poetry config virtualenvs.create false \ + && poetry install --no-interaction --no-ansi --no-root --only main \ + && pip cache purge \ + && rm -rf ~/.cache/pypoetry + +# Copy application code +COPY . . +COPY /EmptyRunner . + +# Set Python path to include app directory +ENV PYTHONPATH=/ \ + PYTHONUNBUFFERED=1 \ + PYTHONDONTWRITEBYTECODE=1 + +# Run the application using the configured uvicorn server +CMD ["poetry", "run", "python", "app.py"] diff --git a/EmptyRunner/app.py b/EmptyRunner/app.py new file mode 100644 index 0000000..d5c5ad7 --- /dev/null +++ b/EmptyRunner/app.py @@ -0,0 +1,5 @@ +# Try Redis + + +if __name__ == "__main__": + pass diff --git a/EmptyRunner/mongo_app.py b/EmptyRunner/mongo_app.py new file mode 100644 index 0000000..d26cf29 --- /dev/null +++ b/EmptyRunner/mongo_app.py @@ -0,0 +1,67 @@ +import arrow +from uuid import uuid4 + +from Services.MongoService.provider import MongoProvider +from Configs.mongo import MongoConfig + +""" +URL mongodb://mongo_user:mongo_password@10.10.2.36:11777/mongo_database?retryWrites=true&w=majority +mongo_provider +uu_id_key 5f2ab2b3-87ff-4ca6-9186-ec8b8a25fd7e + +insert_one_result InsertOneResult(ObjectId('67dd92d5f23084946d88de9e'), acknowledged=True) +update_one_result UpdateResult({'n': 1, 'nModified': 1, 'ok': 1.0, 'updatedExisting': True}, acknowledged=True) +find_one_result : +{ +'_id': ObjectId('67dd95cec132281e8e593683'), 'date': '2025-03-21', 'uuid': 'f27b7a50-1b00-4369-95f8-d1afdec55605', +'data': 'Test data', 'added': 'Added data' +} +find_many_result : [ +{'_id': ObjectId('67dd95f687d6e0bfde791431'), 'date': '2025-03-21', 'uuid': 'f5d281ba-498f-4e2b-8c75-8c4db6312e85', +'data': 'Updated Test data', 'added': 'Added data'}, +{'_id': ObjectId('67dd96ff0ace2f49b9e56c7a'), 'date': '2025-03-21', 'uuid': 'dd750ad2-a3b3-4879-997b-65fa74b3171b', +'data': 'Updated Test data', 'added': 'Added data'}, +{'_id': ObjectId('67dd973966ee138cddcab371'), 'date': '2025-03-21', 'uuid': '0b0c5d4e-500f-410f-8919-2038ecc4bc8e', +'data': 'Updated Test data', 'added': 'Added data'} +] +""" +if __name__ == "__main__": + # Create a new instance of the EmptyRunner class + print('URL', MongoConfig.URL) + with MongoProvider.mongo_client() as client: + current_date = str(arrow.now().date()) + mongo_provider = MongoProvider( + client=client, + database=MongoConfig.DATABASE_NAME, + storage_reason=["Collected", "Data", str(current_date)], + ) + uu_id_key = str(uuid4()) + print('mongo_provider', mongo_provider) + insert_one_result = mongo_provider.insert_one( + document={ + "date": current_date, + "uuid": uu_id_key, + "data": "Test data", + } + ) + # str(insert_one_result.inserted_id) + # insert_one_result.acknowledged + print('uu_id_key', uu_id_key) + print('insert_one_result', insert_one_result) + update_one_result = mongo_provider.update_one( + filter_query={"uuid": uu_id_key}, + update_data={"$set": {"added": "Added data", "data": "Updated Test data"}}, + ) + print('update_one_result', update_one_result) + + find_many_result = mongo_provider.find_many(filter_query={"data": "Updated Test data"}) + print('find_many_result', find_many_result) + + find_one_result = mongo_provider.find_one(filter_query={"added": "Added data"}) + print('find_one_result', find_one_result) + + # Delete the document + delete_result = mongo_provider.delete_one(filter_query={"uuid": uu_id_key}) + + # Delete multiple documents + delete_many_result = mongo_provider.delete_many(filter_query={"added": "Added data"}) diff --git a/EmptyRunner/postgres_app.py b/EmptyRunner/postgres_app.py new file mode 100644 index 0000000..7b778ee --- /dev/null +++ b/EmptyRunner/postgres_app.py @@ -0,0 +1,45 @@ +from Schemas import AddressStreet, Users, Duty +from Services.PostgresService.controllers.pagination_controllers import Pagination, PaginationConfig + + +if __name__ == "__main__": + db = AddressStreet.new_session() + AddressStreet.pre_query = AddressStreet.filter_all( + AddressStreet.street_name.ilike( + "A%", + ), + db=db, + ).core_query + account_list = AddressStreet.filter_all( + AddressStreet.expiry_starts >= "2000-01-01", + AddressStreet.expiry_ends >= "2050-12-31", + AddressStreet.street_name.ilike("%B%"), + db=db, + ) + pagination_config = PaginationConfig( + page=200, + size=50, + order_field=["uu_id"], + order_type=["asc"], + ) + pagination = Pagination(data=account_list) + pagination.change(config=pagination_config) + + user_find_one = Users.filter_one( + Users.uu_id == "45554ebb-422d-4da7-b89a-1fcfb7e41414", + db=db, + ) + user_wrong_find_one = Users.filter_one( + Users.uu_id == "95554ebb-422d-4da7-b89a-1fcfb7e41414", + db=db, + ) + + created_duty = Duty.find_or_create( + duty_name="Test", + duty_code="T50", + duty_description=1, + db=db, + ) + print('created_duty wrong', created_duty) + created_duty.save(db=db) + print('created_duty', created_duty) diff --git a/Schemas/__init__.py b/Schemas/__init__.py new file mode 100644 index 0000000..c6bcd2a --- /dev/null +++ b/Schemas/__init__.py @@ -0,0 +1,169 @@ +# SQL Models +from .account.account import ( + AccountBooks, + AccountCodeParser, + AccountRecords, + AccountCodes, + AccountDetail, + AccountMaster, + AccountRecordExchanges, +) +from .building.budget import ( + DecisionBookBudgetBooks, + DecisionBookBudgetCodes, + DecisionBookBudgetMaster, + DecisionBookBudgets, +) +from .account.iban import ( + BuildIbans, + BuildIbanDescription, +) +from .api.encrypter import CrypterEngine +from .building.build import ( + Build, + BuildTypes, + BuildParts, + BuildArea, + BuildSites, + BuildLivingSpace, + BuildPersonProviding, + BuildCompaniesProviding, + RelationshipEmployee2Build, +) +from .building.decision_book import ( + BuildDecisionBook, + BuildDecisionBookItems, + BuildDecisionBookPerson, + BuildDecisionBookLegal, + BuildDecisionBookItemsUnapproved, + BuildDecisionBookInvitations, + BuildDecisionBookPayments, + BuildDecisionBookProjects, + BuildDecisionBookProjectPerson, + BuildDecisionBookPersonOccupants, + BuildDecisionBookProjectItems, +) +from .company.company import ( + Companies, + RelationshipDutyCompany, +) +from .company.employee import ( + Employees, + EmployeesSalaries, + EmployeeHistory, + Staff, +) +from .company.department import ( + Duty, + Duties, + Departments, +) +from .event.event import ( + Modules, + Services, + Service2Events, + Events, + Event2Occupant, + Event2Employee, + Event2OccupantExtra, + Event2EmployeeExtra, +) +from .identity.identity import ( + Addresses, + AddressCity, + AddressStreet, + AddressLocality, + AddressDistrict, + AddressNeighborhood, + AddressState, + AddressCountry, + AddressPostcode, + AddressGeographicLocations, + UsersTokens, + OccupantTypes, + People, + Users, + RelationshipDutyPeople, + RelationshipEmployee2PostCode, + Contracts, +) +from .others.enums import ( + ApiEnumDropdown, +) +from .rules.rules import ( + EndpointRestriction, +) + + +__all__ = [ + "AccountBooks", + "AccountCodeParser", + "AccountRecords", + "AccountCodes", + "AccountDetail", + "AccountMaster", + "AccountRecordExchanges", + "BuildIbans", + "BuildIbanDescription", + "CrypterEngine", + "Build", + "BuildTypes", + "BuildParts", + "BuildArea", + "BuildSites", + "BuildLivingSpace", + "BuildPersonProviding", + "BuildCompaniesProviding", + "BuildDecisionBook", + "BuildDecisionBookItems", + "BuildDecisionBookPerson", + "BuildDecisionBookLegal", + "BuildDecisionBookItemsUnapproved", + "BuildDecisionBookInvitations", + "BuildDecisionBookPayments", + "BuildDecisionBookProjects", + "BuildDecisionBookProjectPerson", + "BuildDecisionBookPersonOccupants", + "BuildDecisionBookProjectItems", + "DecisionBookBudgetBooks", + "DecisionBookBudgetCodes", + "DecisionBookBudgetMaster", + "DecisionBookBudgets", + "Companies", + "RelationshipDutyCompany", + "Employees", + "EmployeesSalaries", + "EmployeeHistory", + "Staff", + "Duty", + "Duties", + "Departments", + "Modules", + "Services", + "Service2Events", + "Events", + "Event2Occupant", + "Event2Employee", + "Event2OccupantExtra", + "Event2EmployeeExtra", + "Addresses", + "AddressCity", + "AddressStreet", + "AddressLocality", + "AddressDistrict", + "AddressNeighborhood", + "AddressState", + "AddressCountry", + "AddressPostcode", + "AddressGeographicLocations", + "UsersTokens", + "OccupantTypes", + "People", + "Users", + "RelationshipDutyPeople", + "RelationshipEmployee2PostCode", + "Contracts", + "ApiEnumDropdown", + "EndpointRestriction", + "RelationshipEmployee2Build", +] diff --git a/Schemas/account/account.py b/Schemas/account/account.py new file mode 100644 index 0000000..dbb8517 --- /dev/null +++ b/Schemas/account/account.py @@ -0,0 +1,625 @@ +from sqlalchemy.orm import mapped_column, Mapped +from sqlalchemy import ( + String, + Integer, + Boolean, + ForeignKey, + Index, + TIMESTAMP, + Numeric, + SmallInteger, +) + +from Services.PostgresService.controllers.mixin_controllers import CrudCollection + + +class AccountBooks(CrudCollection): + + __tablename__ = "account_books" + __exclude__fields__ = [] + + country: Mapped[str] = mapped_column(String, nullable=False) + branch_type: Mapped[int] = mapped_column(SmallInteger, server_default="0") + + company_id: Mapped[int] = mapped_column(ForeignKey("companies.id"), nullable=False) + company_uu_id: Mapped[str] = mapped_column(String, nullable=False) + branch_id: Mapped[int] = mapped_column(ForeignKey("companies.id")) + branch_uu_id: Mapped[str] = mapped_column(String, comment="Branch UU ID") + + __table_args__ = ( + Index("account_companies_book_ndx_00", company_id, "expiry_starts"), + {"comment": "Account Book Information"}, + ) + + +class AccountCodes(CrudCollection): + + __tablename__ = "account_codes" + __exclude__fields__ = [] + + account_code: Mapped[str] = mapped_column( + String(48), nullable=False, comment="Account Code" + ) + comment_line: Mapped[str] = mapped_column( + String(128), nullable=False, comment="Comment Line" + ) + + is_receive_or_debit: Mapped[bool] = mapped_column(Boolean) + product_id: Mapped[int] = mapped_column(Integer, server_default="0") + nvi_id: Mapped[str] = mapped_column(String(48), server_default="") + status_id: Mapped[int] = mapped_column(SmallInteger, server_default="0") + account_code_seperator: Mapped[str] = mapped_column(String(1), server_default=".") + + system_id: Mapped[int] = mapped_column(SmallInteger, server_default="0") + locked: Mapped[int] = mapped_column(SmallInteger, server_default="0") + + company_id: Mapped[int] = mapped_column(ForeignKey("companies.id")) + company_uu_id: Mapped[str] = mapped_column( + String, nullable=False, comment="Company UU ID" + ) + customer_id: Mapped[int] = mapped_column(ForeignKey("companies.id")) + customer_uu_id: Mapped[str] = mapped_column( + String, nullable=False, comment="Customer UU ID" + ) + person_id: Mapped[int] = mapped_column(ForeignKey("people.id")) + person_uu_id: Mapped[str] = mapped_column( + String, nullable=False, comment="Person UU ID" + ) + + __table_args__ = ({"comment": "Account Code Information"},) + + +class AccountCodeParser(CrudCollection): + + __tablename__ = "account_code_parser" + __exclude__fields__ = [] + + account_code_1: Mapped[str] = mapped_column(String, nullable=False, comment="Order") + account_code_2: Mapped[str] = mapped_column(String, nullable=False, comment="Order") + account_code_3: Mapped[str] = mapped_column(String, nullable=False, comment="Order") + account_code_4: Mapped[str] = mapped_column(String, server_default="") + account_code_5: Mapped[str] = mapped_column(String, server_default="") + account_code_6: Mapped[str] = mapped_column(String, server_default="") + + account_code_id: Mapped[int] = mapped_column( + ForeignKey("account_codes.id"), nullable=False + ) + account_code_uu_id: Mapped[str] = mapped_column( + String, nullable=False, comment="Account Code UU ID" + ) + + __table_args__ = ( + Index("_account_code_parser_ndx_00", account_code_id), + {"comment": "Account Code Parser Information"}, + ) + + @property + def get_account_code(self): + return f"{self.account_codes.account_code_seperator}".join( + [ + getattr(self, f"account_code_{i}") + for i in range(1, 7) + if getattr(self, f"account_code_{i}") + ] + ) + + +class AccountMaster(CrudCollection): + """ + AccountCodes class based on declarative_base and CrudCollection via session + """ + + __tablename__ = "account_master" + __exclude__fields__ = [] + + doc_date: Mapped[TIMESTAMP] = mapped_column( + TIMESTAMP(timezone=True), nullable=False, comment="Document Date" + ) + plug_type: Mapped[str] = mapped_column(String, nullable=False, comment="Plug Type") + plug_number: Mapped[int] = mapped_column( + Integer, nullable=False, comment="Plug Number" + ) + + special_code: Mapped[str] = mapped_column(String(12), server_default="") + authorization_code: Mapped[str] = mapped_column(String(12), server_default="") + + doc_code: Mapped[str] = mapped_column(String(12), server_default="") + doc_type: Mapped[int] = mapped_column(SmallInteger, server_default="0") + + comment_line1: Mapped[str] = mapped_column(String, server_default="") + comment_line2: Mapped[str] = mapped_column(String, server_default="") + comment_line3: Mapped[str] = mapped_column(String, server_default="") + comment_line4: Mapped[str] = mapped_column(String, server_default="") + comment_line5: Mapped[str] = mapped_column(String, server_default="") + comment_line6: Mapped[str] = mapped_column(String, server_default="") + project_code: Mapped[str] = mapped_column(String(12), server_default="") + module_no: Mapped[str] = mapped_column(String, server_default="") + journal_no: Mapped[int] = mapped_column(Integer, server_default="0") + + status_id: Mapped[int] = mapped_column(SmallInteger, server_default="0") + canceled: Mapped[bool] = mapped_column(Boolean, server_default="0") + print_count: Mapped[int] = mapped_column(SmallInteger, server_default="0") + total_active: Mapped[float] = mapped_column(Numeric(20, 6), server_default="0") + total_passive: Mapped[float] = mapped_column(Numeric(20, 6), server_default="0") + total_active_1: Mapped[float] = mapped_column(Numeric(20, 6), server_default="0") + total_passive_1: Mapped[float] = mapped_column(Numeric(20, 6), server_default="0") + total_active_2: Mapped[float] = mapped_column(Numeric(20, 6), server_default="0") + total_passive_2: Mapped[float] = mapped_column(Numeric(20, 6), server_default="0") + total_active_3: Mapped[float] = mapped_column(Numeric(20, 6), server_default="0") + total_passive_3: Mapped[float] = mapped_column(Numeric(20, 6), server_default="0") + total_active_4: Mapped[float] = mapped_column(Numeric(20, 6), server_default="0") + total_passive_4: Mapped[float] = mapped_column(Numeric(20, 6), server_default="0") + cross_ref: Mapped[int] = mapped_column(Integer, server_default="0") + data_center_id: Mapped[str] = mapped_column(String, server_default="") + data_center_rec_num: Mapped[int] = mapped_column(Integer, server_default="0") + + account_header_id: Mapped[int] = mapped_column( + ForeignKey("account_books.id"), nullable=False + ) + account_header_uu_id: Mapped[str] = mapped_column( + String, nullable=False, comment="Account Header UU ID" + ) + project_item_id: Mapped[int] = mapped_column( + ForeignKey("build_decision_book_projects.id") + ) + project_item_uu_id: Mapped[str] = mapped_column( + String, comment="Project Item UU ID" + ) + department_id: Mapped[int] = mapped_column(ForeignKey("departments.id")) + department_uu_id: Mapped[str] = mapped_column(String, comment="Department UU ID") + + __table_args__ = ( + Index("_account_master_ndx_00", doc_date, account_header_id), + {"comment": "Account Master Information"}, + ) + + +class AccountDetail(CrudCollection): + """ + AccountCodes class based on declarative_base and CrudCollection via session + """ + + __tablename__ = "account_detail" + __exclude__fields__ = [] + __enum_list__ = [("plug_type", "AccountingReceiptTypes", "M")] + + doc_date: Mapped[TIMESTAMP] = mapped_column( + TIMESTAMP(timezone=True), nullable=False, comment="Document Date" + ) + line_no: Mapped[int] = mapped_column( + SmallInteger, nullable=False, comment="Line Number" + ) + receive_debit: Mapped[str] = mapped_column( + String(1), nullable=False, comment="Receive Debit" + ) + debit: Mapped[float] = mapped_column( + Numeric(20, 6), nullable=False, comment="Debit" + ) + + department: Mapped[str] = mapped_column(String(24), server_default="") + special_code: Mapped[str] = mapped_column(String(12), server_default="") + account_ref: Mapped[int] = mapped_column(Integer, server_default="0") + account_fiche_ref: Mapped[int] = mapped_column(Integer, server_default="0") + center_ref: Mapped[int] = mapped_column(Integer, server_default="0") + general_code: Mapped[str] = mapped_column(String(32), server_default="") + credit: Mapped[float] = mapped_column(Numeric(20, 6), server_default="0") + currency_type: Mapped[str] = mapped_column(String(4), server_default="TL") + exchange_rate: Mapped[float] = mapped_column(Numeric(20, 6), server_default="0") + debit_cur: Mapped[float] = mapped_column(Numeric(20, 6), server_default="0") + credit_cur: Mapped[float] = mapped_column(Numeric(20, 6), server_default="0") + discount_cur: Mapped[float] = mapped_column(Numeric(20, 6), server_default="0") + amount: Mapped[float] = mapped_column(Numeric(20, 6), server_default="0") + cross_account_code: Mapped[float] = mapped_column(String(32), server_default="") + inf_index: Mapped[float] = mapped_column(Numeric(20, 6), server_default="0") + not_inflated: Mapped[int] = mapped_column(SmallInteger, server_default="0") + not_calculated: Mapped[int] = mapped_column(SmallInteger, server_default="0") + comment_line1: Mapped[str] = mapped_column(String(64), server_default="") + comment_line2: Mapped[str] = mapped_column(String(64), server_default="") + comment_line3: Mapped[str] = mapped_column(String(64), server_default="") + comment_line4: Mapped[str] = mapped_column(String(64), server_default="") + comment_line5: Mapped[str] = mapped_column(String(64), server_default="") + comment_line6: Mapped[str] = mapped_column(String(64), server_default="") + owner_acc_ref: Mapped[int] = mapped_column(Integer, server_default="0") + from_where: Mapped[int] = mapped_column(Integer, server_default="0") + orj_eid: Mapped[int] = mapped_column(Integer, server_default="0") + canceled: Mapped[int] = mapped_column(SmallInteger, server_default="0") + cross_ref: Mapped[int] = mapped_column(Integer, server_default="0") + data_center_id: Mapped[str] = mapped_column(String, server_default="") + data_center_rec_num: Mapped[int] = mapped_column(Integer, server_default="0") + status_id: Mapped[int] = mapped_column(SmallInteger, server_default="0") + + plug_type_id: Mapped[int] = mapped_column( + ForeignKey("api_enum_dropdown.id"), nullable=True + ) + plug_type_uu_id = mapped_column(String, nullable=False, comment="Plug Type UU ID") + account_header_id: Mapped[int] = mapped_column( + ForeignKey("account_books.id"), nullable=False + ) + account_header_uu_id: Mapped[str] = mapped_column( + String, nullable=False, comment="Account Header UU ID" + ) + account_code_id: Mapped[int] = mapped_column( + ForeignKey("account_codes.id"), nullable=False + ) + account_code_uu_id: Mapped[str] = mapped_column( + String, nullable=False, comment="Account Code UU ID" + ) + account_master_id: Mapped[int] = mapped_column( + ForeignKey("account_master.id"), nullable=False + ) + account_master_uu_id: Mapped[str] = mapped_column( + String, nullable=False, comment="Account Master UU ID" + ) + project_id: Mapped[int] = mapped_column( + ForeignKey("build_decision_book_projects.id") + ) + project_uu_id: Mapped[str] = mapped_column(String, comment="Project UU ID") + + __table_args__ = ( + Index( + "_account_detail_ndx_00", + account_master_id, + doc_date, + line_no, + account_header_id, + unique=True, + ), + {"comment": "Account Detail Information"}, + ) + + +class AccountRecords(CrudCollection): + """ + build_decision_book_id = kaydın sorumlu olduğu karar defteri + send_company_id = kaydı gönderen firma, send_person_id = gönderen kişi + customer_id = sorumlu kullanıcı bilgisi, company_id = sorumlu firma + """ + + __tablename__ = "account_records" + __exclude__fields__ = [] + __enum_list__ = [ + ("receive_debit", "DebitTypes", "D"), + ("budget_type", "BudgetType", "B"), + ] + + iban: Mapped[str] = mapped_column( + String(64), nullable=False, comment="IBAN Number of Bank" + ) + bank_date: Mapped[TIMESTAMP] = mapped_column( + TIMESTAMP(timezone=True), nullable=False, comment="Bank Transaction Date" + ) + + currency_value: Mapped[float] = mapped_column( + Numeric(20, 6), nullable=False, comment="Currency Value" + ) + bank_balance: Mapped[float] = mapped_column( + Numeric(20, 6), nullable=False, comment="Bank Balance" + ) + currency: Mapped[str] = mapped_column( + String(5), nullable=False, comment="Unit of Currency" + ) + additional_balance: Mapped[float] = mapped_column( + Numeric(20, 6), nullable=False, comment="Additional Balance" + ) + channel_branch: Mapped[str] = mapped_column( + String(120), nullable=False, comment="Branch Bank" + ) + process_name: Mapped[str] = mapped_column( + String, nullable=False, comment="Bank Process Type Name" + ) + process_type: Mapped[str] = mapped_column( + String, nullable=False, comment="Bank Process Type" + ) + process_comment: Mapped[str] = mapped_column( + String, nullable=False, comment="Transaction Record Comment" + ) + process_garbage: Mapped[str] = mapped_column( + String, nullable=True, comment="Transaction Record Garbage" + ) + bank_reference_code: Mapped[str] = mapped_column( + String, nullable=False, comment="Bank Reference Code" + ) + + add_comment_note: Mapped[str] = mapped_column(String, server_default="") + is_receipt_mail_send: Mapped[bool] = mapped_column(Boolean, server_default="0") + found_from = mapped_column(String, server_default="") + similarity: Mapped[float] = mapped_column(Numeric(20, 6), server_default="0") + remainder_balance: Mapped[float] = mapped_column(Numeric(20, 6), server_default="0") + + bank_date_y: Mapped[int] = mapped_column(Integer) + bank_date_m: Mapped[int] = mapped_column(SmallInteger) + bank_date_w: Mapped[int] = mapped_column(SmallInteger) + bank_date_d: Mapped[int] = mapped_column(SmallInteger) + + approving_accounting_record: Mapped[bool] = mapped_column( + Boolean, server_default="0" + ) + accounting_receipt_date: Mapped[TIMESTAMP] = mapped_column( + TIMESTAMP(timezone=True), server_default="1900-01-01 00:00:00" + ) + accounting_receipt_number: Mapped[int] = mapped_column(Integer, server_default="0") + status_id: Mapped[int] = mapped_column(SmallInteger, server_default="0") + + approved_record: Mapped[bool] = mapped_column(Boolean, server_default="0") + import_file_name: Mapped[str] = mapped_column( + String, nullable=True, comment="XLS Key" + ) + + receive_debit: Mapped[int] = mapped_column( + ForeignKey("api_enum_dropdown.id"), nullable=True + ) + receive_debit_uu_id: Mapped[str] = mapped_column( + String, nullable=True, comment="Debit UU ID" + ) + budget_type: Mapped[int] = mapped_column( + ForeignKey("api_enum_dropdown.id"), nullable=True + ) + budget_type_uu_id: Mapped[str] = mapped_column( + String, nullable=True, comment="Budget Type UU ID" + ) + company_id: Mapped[int] = mapped_column(ForeignKey("companies.id"), nullable=True) + company_uu_id: Mapped[str] = mapped_column( + String, nullable=True, comment="Company UU ID" + ) + send_company_id: Mapped[int] = mapped_column( + ForeignKey("companies.id"), nullable=True + ) + send_company_uu_id = mapped_column( + String, nullable=True, comment="Send Company UU ID" + ) + + send_person_id: Mapped[int] = mapped_column(ForeignKey("people.id"), nullable=True) + send_person_uu_id: Mapped[str] = mapped_column( + String, nullable=True, comment="Send Person UU ID" + ) + approving_accounting_person: Mapped[int] = mapped_column( + ForeignKey("people.id"), nullable=True + ) + approving_accounting_person_uu_id: Mapped[str] = mapped_column( + String, nullable=True, comment="Approving Accounting Person UU ID" + ) + + living_space_id: Mapped[int] = mapped_column( + ForeignKey("build_living_space.id"), nullable=True + ) + living_space_uu_id: Mapped[str] = mapped_column( + String, nullable=True, comment="Living Space UU ID" + ) + customer_id: Mapped[int] = mapped_column(ForeignKey("people.id"), nullable=True) + customer_uu_id = mapped_column(String, nullable=True, comment="Customer UU ID") + + build_id: Mapped[int] = mapped_column(ForeignKey("build.id"), nullable=True) + build_uu_id: Mapped[str] = mapped_column( + String, nullable=True, comment="Build UU ID" + ) + build_parts_id: Mapped[int] = mapped_column( + ForeignKey("build_parts.id"), nullable=True + ) + build_parts_uu_id: Mapped[str] = mapped_column( + String, nullable=True, comment="Build Parts UU ID" + ) + build_decision_book_id: Mapped[int] = mapped_column( + ForeignKey("build_decision_book.id"), nullable=True + ) + build_decision_book_uu_id: Mapped[str] = mapped_column( + String, nullable=True, comment="Build Decision Book UU ID" + ) + + __table_args__ = ( + Index("_budget_records_ndx_00", is_receipt_mail_send, bank_date), + Index( + "_budget_records_ndx_01", + iban, + bank_date, + bank_reference_code, + bank_balance, + unique=True, + ), + Index("_budget_records_ndx_02", status_id, bank_date), + { + "comment": "Bank Records that are related to building and financial transactions" + }, + ) + + # def payment_budget_record_close(self): + # from database_sql_models import ( + # DecisionBookProjectPaymentsMaster, + # ApiEnumDropdown, + # BuildDecisionBook, + # BuildDecisionBookPaymentsMaster, + # ) + # + # budget_record = self + # if self.receive_debit == ApiEnumDropdown.uuid_of_enum( + # enum_class="DebitTypes", key="R" + # ): + # print( + # "This record is not debit. Debit:", + # self.receive_debit, + # "DebitTypes.R.name", + # # str(DebitTypes.R.name), + # ) + # return + # if abs(budget_record.currency_value + budget_record.remainder_balance) > 0: + # payment_dict = { + # "budget_records_id": self.id, + # "build_decision_book_id": budget_record.build_decision_book_id, + # "build_parts_id": budget_record.build_parts_id, + # "start_date": budget_record.bank_date, + # "paid_value": budget_record.currency_value + # - budget_record.remainder_balance, + # "is_all": False, + # } + # (paid_value, start_paid_value, balance) = ( + # float(budget_record.currency_value - budget_record.remainder_balance), + # float(budget_record.currency_value - budget_record.remainder_balance), + # float(budget_record.remainder_balance), + # ) + # print( + # "self.id", + # self.id, + # "paid_value", + # paid_value, + # "start_paid_value", + # start_paid_value, + # "balance", + # balance, + # self.receive_debit, + # ) + # + # if not BuildDecisionBook.find_one( + # id=payment_dict["build_decision_book_id"] + # ): + # return paid_value + # + # if budget_record.replication_id == 55: + # if paid_value > 0: + # payment_dict["dues_type"] = ApiEnumDropdown.uuid_of_enum( + # enum_class="BuildDuesTypes", key="L" + # ) + # paid_value = ( + # DecisionBookProjectPaymentsMaster.pay_law_and_ren_of_build_part( + # **payment_dict + # ) + # ) + # print("dues_type", payment_dict["dues_type"], paid_value) + # if paid_value > 0: + # payment_dict.pop("dues_type", None) + # paid_value = BuildDecisionBookPaymentsMaster.pay_dues_of_build_part( + # **payment_dict + # ) + # print("dues_type", None, paid_value) + # if paid_value > 0: + # payment_dict["dues_type"] = ApiEnumDropdown.uuid_of_enum( + # enum_class="BuildDuesTypes", key="R" + # ) + # paid_value = ( + # DecisionBookProjectPaymentsMaster.pay_law_and_ren_of_build_part( + # **payment_dict + # ) + # ) + # print("dues_type", payment_dict["dues_type"], paid_value) + # payment_dict["is_all"] = True + # if paid_value > 0: + # payment_dict["dues_type"] = ApiEnumDropdown.uuid_of_enum( + # enum_class="BuildDuesTypes", key="L" + # ) + # paid_value = ( + # DecisionBookProjectPaymentsMaster.pay_law_and_ren_of_build_part( + # **payment_dict + # ) + # ) + # print("is all dues_type", payment_dict["dues_type"], paid_value) + # if paid_value > 0: + # payment_dict.pop("dues_type", None) + # paid_value = BuildDecisionBookPaymentsMaster.pay_dues_of_build_part( + # **payment_dict + # ) + # print("is all dues_type", None, paid_value) + # if paid_value > 0: + # payment_dict["dues_type"] = ApiEnumDropdown.uuid_of_enum( + # enum_class="BuildDuesTypes", key="R" + # ) + # paid_value = ( + # DecisionBookProjectPaymentsMaster.pay_law_and_ren_of_build_part( + # **payment_dict + # ) + # ) + # print("is all dues_type", payment_dict["dues_type"], paid_value) + + +# class AccountRecordDecisionPaymentClosed(CrudCollection): +# +# __tablename__ = "account_record_decision_payment_closed" +# __exclude__fields__ = [] +# +# arc_currency: Mapped[str] = mapped_column( +# String(5), nullable=False, comment="Unit of Currency" +# ) +# arc_processing_time: Mapped[TIMESTAMP] = mapped_column( +# TIMESTAMP(timezone=True), nullable=False, comment="Processing Time" +# ) +# arc_currency_value: Mapped[float] = mapped_column( +# Numeric(20, 6), nullable=False, comment="Currency Value" +# ) +# +# decision_book_budgets_id: Mapped[int] = mapped_column( +# ForeignKey("decision_book_budgets.id"), nullable=True +# ) +# decision_book_budgets_uu_id: Mapped[str] = mapped_column( +# String, nullable=True, comment="Budget UUID" +# ) +# +# build_decision_book_payment_id: Mapped[int] = mapped_column( +# ForeignKey("build_decision_book_payments.id") +# ) +# build_decision_book_payment_uu_id: Mapped[str] = mapped_column( +# String, nullable=True, comment="Build Decision Book Payment UU ID" +# ) +# account_records_id: Mapped[int] = mapped_column(ForeignKey("account_records.id")) +# account_records_uu_id: Mapped[str] = mapped_column( +# String, nullable=True, comment="Account Record UU ID" +# ) +# +# __table_args__ = ( +# Index( +# "_account_record_decision_payment_closed_ndx_00", +# account_records_id, +# build_decision_book_payment_id, +# arc_processing_time, +# ), +# Index( +# "_account_record_decision_payment_closed_ndx_01", +# build_decision_book_payment_id, +# account_records_id, +# arc_processing_time, +# ), +# {"comment": "Account Record Decision Payment Closed Information"}, +# ) + + +class AccountRecordExchanges(CrudCollection): + + __tablename__ = "account_record_exchanges" + __exclude__fields__ = [] + + are_currency: Mapped[str] = mapped_column( + String(5), nullable=False, comment="Unit of Currency" + ) + are_exchange_rate: Mapped[float] = mapped_column( + Numeric(18, 6), nullable=False, server_default="1" + ) + usd_exchange_rate_value: Mapped[float] = mapped_column( + Numeric(18, 6), + nullable=True, + server_default="0", + comment="It will be written by multiplying the usd exchange rate with the current value result.", + ) + eur_exchange_rate_value: Mapped[float] = mapped_column( + Numeric(18, 6), + nullable=True, + server_default="0", + comment="It will be written by multiplying the eur exchange rate with the current value result.", + ) + gbp_exchange_rate_value: Mapped[float] = mapped_column( + Numeric(18, 6), + nullable=True, + server_default="0", + comment="It will be written by multiplying the gpd exchange rate with the current value result.", + ) + cny_exchange_rate_value: Mapped[float] = mapped_column( + Numeric(18, 6), + nullable=True, + server_default="0", + comment="It will be written by multiplying the cny exchange rate with the current value result.", + ) + + account_records_id: Mapped[int] = mapped_column(ForeignKey("account_records.id")) + account_records_uu_id: Mapped[str] = mapped_column( + String, nullable=True, comment="Account Record UU ID" + ) + + __table_args__ = ( + Index("_account_record_exchanges_ndx_00", account_records_id), + {"comment": "Account Record Exchanges Information"}, + ) diff --git a/Schemas/account/iban.py b/Schemas/account/iban.py new file mode 100644 index 0000000..2501a40 --- /dev/null +++ b/Schemas/account/iban.py @@ -0,0 +1,103 @@ +from sqlalchemy import String, ForeignKey, Index, TIMESTAMP, SmallInteger, Identity +from sqlalchemy.orm import mapped_column, Mapped + +from Services.PostgresService.controllers.mixin_controllers import CrudCollection + + +class BuildIbans(CrudCollection): + """ + BuildParts class based on declarative_base and BaseMixin via session + """ + + __tablename__ = "build_ibans" + __exclude__fields__ = [] + + iban: Mapped[str] = mapped_column( + String(40), server_default="", nullable=False, comment="IBAN number" + ) + start_date: Mapped[TIMESTAMP] = mapped_column( + TIMESTAMP(timezone=True), nullable=False, comment="Bank Transaction Start Date" + ) + + stop_date: Mapped[TIMESTAMP] = mapped_column( + TIMESTAMP(timezone=True), server_default="2900-01-01 00:00:00" + ) + bank_code: Mapped[str] = mapped_column(String(24), server_default="TR0000000000000") + xcomment: Mapped[str] = mapped_column(String(64), server_default="????") + + build_id: Mapped[int] = mapped_column( + ForeignKey("build.id"), nullable=True, comment="Building ID" + ) + build_uu_id: Mapped[str] = mapped_column( + String, nullable=True, comment="Building UUID", index=True + ) + # building: Mapped["Build"] = relationship( + # "Build", back_populates="build_ibans", foreign_keys=[build_id] + # ) + + __table_args__ = ( + Index("_build_ibans_ndx_01", iban, start_date, unique=True), + {"comment": "IBANs related to money transactions due to building objects"}, + ) + + +class BuildIbanDescription(CrudCollection): + """ + SearchComments class based on declarative_base and CrudCollection via session + """ + + __tablename__ = "build_iban_description" + __exclude__fields__ = [] + + iban: Mapped[str] = mapped_column(String, nullable=False, comment="IBAN Number") + group_id: Mapped[int] = mapped_column( + SmallInteger, nullable=False, comment="Group ID" + ) + search_word: Mapped[str] = mapped_column( + String, nullable=False, comment="Search Word", index=True + ) + + # decision_book_project_id: Mapped[int] = mapped_column( + # ForeignKey("build_decision_book_projects.id") + # ) + # decision_book_project_uu_id: Mapped[str] = mapped_column( + # String, nullable=False, comment="Decision Book Project UUID" + # ) + customer_id: Mapped[int] = mapped_column(ForeignKey("people.id"), nullable=True) + customer_uu_id: Mapped[str] = mapped_column( + String, nullable=True, comment="Customer UUID" + ) + company_id: Mapped[int] = mapped_column(ForeignKey("companies.id"), nullable=True) + company_uu_id: Mapped[str] = mapped_column( + String, nullable=True, comment="Company UUID" + ) + build_parts_id: Mapped[int] = mapped_column( + ForeignKey("build_parts.id"), nullable=True + ) + build_parts_uu_id: Mapped[str] = mapped_column( + String, nullable=True, comment="Build Parts UUID" + ) + + # decision_book_project: Mapped["BuildDecisionBookProjects"] = relationship( + # "BuildDecisionBookProjects", + # back_populates="search_iban_description", + # foreign_keys=[decision_book_project_id], + # ) + # customer: Mapped["People"] = relationship( + # "People", back_populates="search_iban_description", foreign_keys=[customer_id] + # ) + # company: Mapped["Companies"] = relationship( + # "Company", back_populates="search_iban_description", foreign_keys=[company_id] + # ) + # parts: Mapped["BuildParts"] = relationship( + # "BuildParts", + # back_populates="search_iban_description", + # foreign_keys=[build_parts_id], + # ) + + __table_args__ = ( + Index( + "_search_iban_description_ndx_00", iban, search_word, group_id, unique=True + ), + {"comment": "Search Iban Description Information"}, + ) diff --git a/Schemas/api/encrypter.py b/Schemas/api/encrypter.py new file mode 100644 index 0000000..3251510 --- /dev/null +++ b/Schemas/api/encrypter.py @@ -0,0 +1,124 @@ +import random + +from datetime import datetime, timedelta + +from sqlalchemy import String +from sqlalchemy.orm import mapped_column, Mapped + +from cryptography.fernet import Fernet, MultiFernet +from Services.PostgresService.controllers.mixin_controllers import CrudCollection + + +class CrypterEngine(CrudCollection): + + __tablename__ = "crypter_engine" + __table_args__ = () + encrypt_list = [] + decrypt_list = [] + keys_error = "Unable to retrieve encrypt keys" + alchemy_error = "Alchemy object is empty" + + key_first: Mapped[str] = mapped_column(String, nullable=False) + key_second: Mapped[str] = mapped_column(String, nullable=False) + + @classmethod + def get_valid_keys(cls, row=None): + cls.encrypt_list, cls.decrypt_list = [], [] + if not cls.filter_all(cls.created_at > datetime.now() - timedelta(days=29)).get( + 1 + ): + cls.create_encrypt_keys(count=100) + if decrypt_identifier := getattr(row, "cryp_uu_id", None): + if decrypt_row := cls.find_one(uu_id=str(decrypt_identifier)): + return ( + decrypt_row.key_first.decode(), + decrypt_row.key_second.decode(), + decrypt_row.uu_id, + ) + if encrypt_rows := cls.filter_all( + cls.created_at > datetime.now() - timedelta(days=29) + ).data: + encrypt_row = random.choice(encrypt_rows) + return ( + encrypt_row.key_first.encode(), + encrypt_row.key_second.encode(), + encrypt_rows.uu_id, + ) + return None, None, None + + @classmethod + def create_encrypt_keys(cls, count: int): + for _ in range(count): + key_first = Fernet.generate_key() + key_second = Fernet.generate_key() + cls.find_or_create( + key_first=key_first.decode(), key_second=key_second.decode() + ) + + @classmethod + def raise_exception(cls, message=None): + raise Exception(message if message else cls.keys_error) + + @classmethod + def encrypt_given_alchemy_list(cls, alchemy_object_list: list): + for alchemy_object in alchemy_object_list: + key_first, key_second, cryp_uu_id = cls.get_valid_keys() + fernet_keys = MultiFernet([Fernet(key_first), Fernet(key_second)]) + if not key_first or not key_second: + cls.raise_exception() + alchemy_dict = alchemy_object.get_dict() if alchemy_object else None + if not alchemy_dict: + cls.raise_exception(cls.alchemy_error) + for key, plain_row in alchemy_dict.items(): + if key in alchemy_object.__encrypt_list__: + alchemy_dict[key] = fernet_keys.encrypt(plain_row).decode() + alchemy_dict["cryp_uu_id"] = cryp_uu_id + cls.encrypt_list.append(alchemy_object.update(**alchemy_dict)) + return cls.encrypt_list + + @classmethod + def encrypt_given_alchemy_object(cls, alchemy_object_object): + key_first, key_second, cryp_uu_id = cls.get_valid_keys() + fernet_keys = MultiFernet([Fernet(key_first), Fernet(key_second)]) + if not key_first or not key_second: + cls.raise_exception() + alchemy_dict = ( + alchemy_object_object.get_dict() if alchemy_object_object else None + ) + if not alchemy_dict: + cls.raise_exception(cls.alchemy_error) + for key, plain_row in alchemy_dict.items(): + if key in alchemy_object_object.__encrypt_list__: + alchemy_dict[key] = fernet_keys.encrypt(plain_row).decode() + alchemy_dict["cryp_uu_id"] = cryp_uu_id + return alchemy_object_object.update(**alchemy_dict) + + @classmethod + def decrypt_given_alchemy(cls, alchemy_object_list: list): + for alchemy_object in alchemy_object_list: + key_first, key_second, cryp_uu_id = cls.get_valid_keys(row=alchemy_object) + fernet_keys = MultiFernet([Fernet(key_first), Fernet(key_second)]) + if not key_first or not key_second: + cls.raise_exception() + alchemy_dict = alchemy_object.get_dict() if alchemy_object else None + if not alchemy_dict: + cls.raise_exception(cls.alchemy_error) + for key, plain_row in alchemy_dict.items(): + if key in alchemy_object.__encrypt_list__: + alchemy_dict[key] = fernet_keys.decrypt(plain_row).decode() + cls.decrypt_list.append(alchemy_dict) + return cls.decrypt_list + + @classmethod + def decrypt_given_alchemy_object(cls, alchemy_object): + key_first, key_second, cryp_uu_id = cls.get_valid_keys(row=alchemy_object) + fernet_keys = MultiFernet([Fernet(key_first), Fernet(key_second)]) + if not key_first or not key_second: + cls.raise_exception() + alchemy_dict = alchemy_object.get_dict() if alchemy_object else None + if not alchemy_dict: + cls.raise_exception(cls.alchemy_error) + for key, plain_row in alchemy_dict.items(): + if key in alchemy_object.__encrypt_list__: + alchemy_dict[key] = fernet_keys.decrypt(plain_row).decode() + return alchemy_dict diff --git a/Schemas/building/budget.py b/Schemas/building/budget.py new file mode 100644 index 0000000..bf91a75 --- /dev/null +++ b/Schemas/building/budget.py @@ -0,0 +1,156 @@ +from sqlalchemy import ( + String, + ForeignKey, + Index, + SmallInteger, + TIMESTAMP, + Text, + Numeric, + Integer, +) +from sqlalchemy.orm import mapped_column, Mapped +from Services.PostgresService.controllers.mixin_controllers import CrudCollection + + +class DecisionBookBudgetBooks(CrudCollection): + + __tablename__ = "decision_book_budget_books" + __exclude__fields__ = [] + + country: Mapped[str] = mapped_column(String, nullable=False) + branch_type: Mapped[int] = mapped_column(SmallInteger, server_default="0") + + company_id: Mapped[int] = mapped_column(ForeignKey("companies.id"), nullable=False) + company_uu_id: Mapped[str] = mapped_column(String, nullable=False) + branch_id: Mapped[int] = mapped_column(ForeignKey("companies.id"), nullable=True) + branch_uu_id: Mapped[str] = mapped_column( + String, comment="Branch UU ID", nullable=True + ) + build_decision_book_id: Mapped[int] = mapped_column( + ForeignKey("build_decision_book.id"), nullable=False + ) + build_decision_book_uu_id: Mapped[str] = mapped_column( + String, nullable=True, comment="Build Decision Book UU ID" + ) + + __table_args__ = ( + Index( + "_decision_book_budget_companies_book_ndx_00", + company_id, + "created_at", + ), + {"comment": "budget Book Information"}, + ) + + +class DecisionBookBudgetCodes(CrudCollection): + + __tablename__ = "decision_book_budget_codes" + __exclude__fields__ = [] + + budget_code: Mapped[str] = mapped_column( + String(48), nullable=False, comment="budget Code" + ) + comment_line: Mapped[str] = mapped_column( + Text, nullable=False, comment="Comment Line" + ) + + build_decision_book_id: Mapped[int] = mapped_column( + ForeignKey("build_decision_book.id"), nullable=True + ) + build_decision_book_uu_id: Mapped[str] = mapped_column( + String, nullable=True, comment="Build Decision Book UU ID" + ) + + build_parts_id: Mapped[int] = mapped_column( + ForeignKey("build_parts.id"), nullable=True + ) + build_parts_uu_id: Mapped[str] = mapped_column( + String, nullable=True, comment="Build Parts UU ID" + ) + + company_id: Mapped[int] = mapped_column(ForeignKey("companies.id"), nullable=True) + company_uu_id: Mapped[str] = mapped_column( + String, nullable=True, comment="Company UU ID" + ) + + __table_args__ = ( + Index("_decision_book_budget_codes_ndx_00", budget_code, "created_at"), + Index("_decision_book_budget_codes_ndx_01", company_id, "created_at"), + {"comment": "budget Book Information"}, + ) + + +class DecisionBookBudgetMaster(CrudCollection): + + __tablename__ = "decision_book_budget_master" + __exclude__fields__ = [] + + budget_type: Mapped[str] = mapped_column( + String(50), nullable=False + ) # Bütçe tipi (örneğin: Operasyonel, Yatırım) + currency: Mapped[str] = mapped_column( + String(8), server_default="TRY" + ) # Bütçe para birimi + total_budget: Mapped[float] = mapped_column( + Numeric(10, 2), nullable=False + ) # Toplam bütçe + + tracking_period_id: Mapped[int] = mapped_column( + ForeignKey("api_enum_dropdown.id"), nullable=True + ) + tracking_period_uu_id: Mapped[str] = mapped_column( + String, nullable=True, comment="Part Direction UUID" + ) + budget_books_id: Mapped[int] = mapped_column( + Integer, ForeignKey("decision_book_budget_books.id"), nullable=False + ) + budget_books_uu_id: Mapped[str] = mapped_column( + String, nullable=True, comment="Budget Books UU ID" + ) + department_id: Mapped[int] = mapped_column( + Integer, ForeignKey("departments.id"), nullable=False + ) # Departman ile ilişki + department_uu_id: Mapped[str] = mapped_column( + String, nullable=True, comment="Department UU ID" + ) + + __table_args__ = ({"comment": "budget Book Information"},) + + +class DecisionBookBudgets(CrudCollection): + + __tablename__ = "decision_book_budgets" + __exclude__fields__ = [] + + process_date: Mapped[TIMESTAMP] = mapped_column( + TIMESTAMP(timezone=True), nullable=False + ) # Başlangıç tarihi + budget_codes_id: Mapped[int] = mapped_column( + Integer, ForeignKey("decision_book_budget_codes.id"), nullable=False + ) + total_budget: Mapped[float] = mapped_column( + Numeric(10, 2), nullable=False + ) # Toplam bütçe + used_budget: Mapped[float] = mapped_column( + Numeric(10, 2), nullable=False, default=0.0 + ) # Kullanılan bütçe + remaining_budget: Mapped[float] = mapped_column( + Numeric(10, 2), nullable=False, default=0.0 + ) # Kullanılan bütçe + + decision_book_budget_master_id: Mapped[int] = mapped_column( + Integer, ForeignKey("decision_book_budget_master.id"), nullable=False + ) + decision_book_budget_master_uu_id: Mapped[str] = mapped_column( + String, nullable=True, comment="Decision Book Budget Master UU ID" + ) + + __table_args__ = ( + Index( + "_decision_book_budgets_ndx_00", + decision_book_budget_master_uu_id, + process_date, + ), + {"comment": "budget Book Information"}, + ) diff --git a/Schemas/building/build.py b/Schemas/building/build.py new file mode 100644 index 0000000..b29e477 --- /dev/null +++ b/Schemas/building/build.py @@ -0,0 +1,785 @@ +from datetime import timedelta +from typing import List, Union + +from fastapi import HTTPException, status +from sqlalchemy.orm import mapped_column, relationship, Mapped +from sqlalchemy import ( + String, + Integer, + Boolean, + ForeignKey, + Index, + TIMESTAMP, + Text, + Numeric, + or_, +) + +from Commons.select_functions import SelectActionWithEmployee +from Services.PostgresService.controllers.mixin_controllers import CrudCollection + +# from ApiLayers.ApiValidations.Request import ( +# InsertBuild, +# InsertBuildParts, +# InsertBuildLivingSpace, +# UpdateBuild, +# ) +# +# from ApiLayers.ApiValidations.Custom.token_objects import ( +# EmployeeTokenObject, +# OccupantTokenObject, +# ) + + +class BuildTypes(CrudCollection): + """ + BuildTypes class based on declarative_base and BaseMixin via session + + """ + + __tablename__ = "build_types" + __exclude__fields__ = [] + __include__fields__ = [] + + function_code: Mapped[str] = mapped_column( + String(12), server_default="", nullable=False, comment="Function Code" + ) + type_code: Mapped[str] = mapped_column( + String(12), server_default="", nullable=False, comment="Structure Type Code" + ) + lang: Mapped[str] = mapped_column( + String(4), server_default="TR", nullable=False, comment="Language" + ) + type_name: Mapped[str] = mapped_column( + String(48), server_default="", nullable=False, comment="Type Name" + ) + + __table_args__ = ( + Index("_build_types_ndx_00", type_code, function_code, lang, unique=True), + {"comment": "Function group of building types with their language information"}, + ) + + +class Part2Employee(CrudCollection): + """ + Employee2Parts class based on declarative_base and BaseMixin via session + In between start and end date, a part can be assigned to only one employee + """ + + __tablename__ = "part2employee" + __exclude__fields__ = [] + __include__fields__ = [] + + build_id: Mapped[int] = mapped_column(Integer, comment="Building ID") + part_id: Mapped[int] = mapped_column( + ForeignKey("build_parts.id"), nullable=False, comment="Part ID" + ) + employee_id: Mapped[int] = mapped_column( + ForeignKey("employees.id"), nullable=False, comment="Employee ID" + ) + + __table_args__ = ( + Index("_part2employee_ndx_00", employee_id, part_id, unique=True), + {"comment": "Employee2Parts Information"}, + ) + + +class RelationshipEmployee2Build(CrudCollection): + """ + CompanyRelationship class based on declarative_base and CrudCollection via session + Company -> Sub Company -> Sub-Sub Company + + """ + + __tablename__ = "relationship_employee2build" + __exclude__fields__ = [] + + company_id: Mapped[int] = mapped_column( + ForeignKey("companies.id"), nullable=False + ) # 1, 2, 3 + employee_id: Mapped[int] = mapped_column( + ForeignKey("employees.id"), nullable=False + ) # employee -> (n)person Evyos LTD + member_id: Mapped[int] = mapped_column( + ForeignKey("build.id"), nullable=False + ) # 2, 3, 4 + + relationship_type: Mapped[str] = mapped_column( + String, nullable=True, server_default="Employee" + ) # Commercial + show_only: Mapped[bool] = mapped_column(Boolean, server_default="False") + + __table_args__ = ( + Index( + "relationship_build_employee_ndx_00", + company_id, + employee_id, + member_id, + relationship_type, + unique=True, + ), + {"comment": "Build & Employee Relationship Information"}, + ) + + +class Build(CrudCollection, SelectActionWithEmployee): + """ + Builds class based on declarative_base and BaseMixin via session + """ + + __tablename__ = "build" + __exclude__fields__ = [] + __include__fields__ = [] + __access_by__ = [] + __many__table__ = RelationshipEmployee2Build + # __explain__ = AbstractBuild() + + gov_address_code: Mapped[str] = mapped_column( + String, server_default="", unique=True + ) + build_name: Mapped[str] = mapped_column( + String, nullable=False, comment="Building Name" + ) + build_no: Mapped[str] = mapped_column( + String(8), nullable=False, comment="Building Number" + ) + + max_floor: Mapped[int] = mapped_column( + Integer, server_default="1", nullable=False, comment="Max Floor" + ) + underground_floor: Mapped[int] = mapped_column( + Integer, server_default="0", nullable=False, comment="Underground Floor" + ) + build_date: Mapped[TIMESTAMP] = mapped_column( + TIMESTAMP(timezone=True), server_default="1900-01-01" + ) + decision_period_date: Mapped[TIMESTAMP] = mapped_column( + TIMESTAMP(timezone=True), + server_default="1900-01-01", + comment="Building annual ordinary meeting period", + ) + tax_no: Mapped[str] = mapped_column(String(24), server_default="") + lift_count: Mapped[int] = mapped_column(Integer, server_default="0") + heating_system: Mapped[bool] = mapped_column(Boolean, server_default="True") + cooling_system: Mapped[bool] = mapped_column(Boolean, server_default="False") + hot_water_system: Mapped[bool] = mapped_column(Boolean, server_default="False") + block_service_man_count: Mapped[int] = mapped_column(Integer, server_default="0") + security_service_man_count: Mapped[int] = mapped_column(Integer, server_default="0") + garage_count: Mapped[int] = mapped_column( + Integer, server_default="0", comment="Garage Count" + ) + management_room_id: Mapped[int] = mapped_column( + Integer, nullable=True, comment="Management Room ID" + ) + + site_id: Mapped[int] = mapped_column(ForeignKey("build_sites.id"), nullable=True) + site_uu_id: Mapped[str] = mapped_column(String, comment="Site UUID", nullable=True) + address_id: Mapped[int] = mapped_column(ForeignKey("addresses.id"), nullable=False) + address_uu_id: Mapped[str] = mapped_column( + String, comment="Address UUID", nullable=False + ) + build_types_id: Mapped[int] = mapped_column( + ForeignKey("build_types.id"), nullable=False, comment="Building Type" + ) + build_types_uu_id: Mapped[str] = mapped_column(String, comment="Building Type UUID") + + parts: Mapped[List["BuildParts"]] = relationship( + "BuildParts", back_populates="buildings", foreign_keys="BuildParts.build_id" + ) + decision_books: Mapped[List["BuildDecisionBook"]] = relationship( + "BuildDecisionBook", + back_populates="buildings", + foreign_keys="BuildDecisionBook.build_id", + ) + + # build_ibans: Mapped["BuildIbans"] = relationship( + # "BuildIbans", back_populates="building", foreign_keys="BuildIbans.build_id" + # ) + # areas: Mapped["BuildArea"] = relationship( + # "BuildArea", back_populates="buildings", foreign_keys="BuildArea.build_id" + # ) + # response_companies: Mapped["Companies"] = relationship( + # "Companies", + # back_populates="response_buildings", + # foreign_keys=[response_company_id], + # ) + # addresses: Mapped[List["Address"]] = relationship( + # "Address", back_populates="buildings", foreign_keys=[address_id] + # ) + # peoples: Mapped["People"] = relationship( + # "People", back_populates="buildings", foreign_keys=[people_id] + # ) + # sites: Mapped["BuildSites"] = relationship( + # "BuildSites", back_populates="buildings", foreign_keys=[site_id] + # ) + + __table_args__ = ( + Index("_builds_ndx_00", gov_address_code), + Index("_builds_ndx_01", build_name, build_no), + { + "comment": "Build objects are building that are created for living and store purposes" + }, + ) + + @property + def management_room(self): + if management_room := BuildParts.filter_by_one( + system=True, id=self.management_room_id, build_id=self.id + ).data: + return management_room + return None + # + # @classmethod + # def create_action(cls, data: InsertBuild, token): + # from Schemas import Addresses + # + # data_dict = data.excluded_dump() + # data_dict["address_id"] = None + # if data.address_uu_id: + # official_address = Addresses.filter_one( + # Addresses.uu_id == data.address_uu_id, + # ).data + # data_dict["address_id"] = official_address.id + # data_dict["build_no"] = str(official_address.build_number) + # if not data_dict["address_id"]: + # raise HTTPException( + # status_code=status.HTTP_404_NOT_FOUND, + # detail="Address is not found in database. Re-enter address record then try again.", + # ) + # build_type = BuildTypes.filter_by_one( + # system=True, uu_id=str(data.build_types_uu_id) + # ).data + # data_dict["build_types_id"] = build_type.id + # build_created = cls.find_or_create(**data_dict) + # created_build_relation = cls.__many__table__.find_or_create( + # company_id=token.selected_company.company_id, + # employee_id=token.selected_company.employee_id, + # member_id=build_created.id, + # ) + # build_created.save() + # build_created.update(is_confirmed=True) + # build_created.save() + # created_build_relation.update(is_confirmed=True) + # created_build_relation.save() + # return build_created + # + # @classmethod + # def update_action(cls, data: UpdateBuild, build_uu_id: str, token): + # from Schemas import Addresses + # + # data_dict = data.excluded_dump() + # db = Addresses.new_session() + # if data.address_uu_id: + # official_address = Addresses.filter_one( + # Addresses.uu_id == data.address_uu_id, db=db + # ).first + # data_dict["address_id"] = official_address.id if official_address else None + # if build_to_update := cls.filter_one(cls.uu_id == build_uu_id, db=db).first: + # updated_build = build_to_update.update(**data_dict) + # updated_build.save() + # return updated_build + + @property + def top_flat(self): + max_flat_no = 0 + for part in self.parts: + if part.part_no > self.max_floor: + max_flat_no = part.part_no + return max_flat_no + + @property + def bottom_flat(self): + min_flat_no = 0 + for part in self.parts: + if part.part_no < self.max_floor: + min_flat_no = part.part_no + return min_flat_no + + @property + def human_livable_parts(self) -> tuple: + parts = list(part for part in self.parts if part.human_livable) + return parts, len(parts) + + @property + def livable_part_count(self): + db = BuildParts.new_session() + livable_parts = BuildParts.filter_all( + BuildParts.build_id == self.id, + BuildParts.human_livable == True, + db=db, + ) + if not livable_parts.data: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="There is no livable part in this building.", + ) + return livable_parts.count + + @property + def part_type_count(self): + building_types, db = None, BuildParts.new_session() + for part in self.parts: + building_types = {} + build_type = BuildTypes.filter_by_one( + system=True, id=part.build_part_type_id, db=db + ).data + if build_type.type_code in building_types: + building_types[build_type.type_code]["list"].append(part.part_no) + else: + building_types[build_type.type_code] = {"list": [part.part_no]} + + # for key, val in building_types.items(): + # list_parts = val["list"] + # building_types[key] = { + # "list": list_parts, + # "min": min(list_parts), + # "max": max(list_parts), + # "count": len(list_parts), + # } + return building_types + + +class BuildParts(CrudCollection): + """ + BuildParts class based on declarative_base and BaseMixin via session + Attentions: Part_no is unique for each building and Every building must have a management section.!!! default no 0 + """ + + __tablename__ = "build_parts" + __exclude__fields__ = [] + __include__fields__ = [] + __enum_list__ = [("part_direction", "Directions", "NN")] + + # https://adres.nvi.gov.tr/VatandasIslemleri/AdresSorgu + address_gov_code: Mapped[str] = mapped_column( + String, nullable=False, comment="Goverment Door Code" + ) + # part_name: Mapped[str] = mapped_column(String(24), server_default="", nullable=False, comment="Part Name") + part_no: Mapped[int] = mapped_column( + Integer, server_default="0", nullable=False, comment="Part Number" + ) + part_level: Mapped[int] = mapped_column( + Integer, server_default="0", comment="Building Part Level" + ) + part_code: Mapped[str] = mapped_column( + String, server_default="", nullable=False, comment="Part Code" + ) + part_gross_size: Mapped[int] = mapped_column( + Integer, server_default="0", comment="Part Gross Size" + ) + part_net_size: Mapped[int] = mapped_column( + Integer, server_default="0", comment="Part Net Size" + ) + default_accessory: Mapped[str] = mapped_column( + Text, server_default="0", comment="Default Accessory" + ) + human_livable: Mapped[bool] = mapped_column( + Boolean, server_default="1", comment="Human Livable" + ) + due_part_key: Mapped[str] = mapped_column( + String, server_default="", nullable=False, comment="Constant Payment Group" + ) + + build_id: Mapped[int] = mapped_column( + ForeignKey("build.id"), nullable=False, comment="Building ID" + ) + build_uu_id: Mapped[str] = mapped_column( + String, nullable=False, comment="Building UUID" + ) + part_direction_id: Mapped[int] = mapped_column( + ForeignKey("api_enum_dropdown.id"), nullable=True + ) + part_direction_uu_id: Mapped[str] = mapped_column( + String, nullable=True, comment="Part Direction UUID" + ) + part_type_id: Mapped[int] = mapped_column( + ForeignKey("build_types.id"), nullable=False, comment="Building Part Type" + ) + part_type_uu_id: Mapped[str] = mapped_column( + String, nullable=False, comment="Building Part Type UUID" + ) + + buildings: Mapped["Build"] = relationship( + "Build", back_populates="parts", foreign_keys=[build_id] + ) + + __table_args__ = ( + Index("build_parts_ndx_01", build_id, part_no, unique=True), + {"comment": "Part objects that are belong to building objects"}, + ) + # + # @classmethod + # def create_action(cls, data: InsertBuildParts, token): + # from Schemas import ApiEnumDropdown + # + # data_dict = data.dump() + # build_from_duty = Build.select_action( + # employee_id=token.selected_company.employee_id, + # filter_expr=[Build.uu_id == data.build_uu_id], + # ) + # building = build_from_duty.first() + # if not building: + # raise HTTPException( + # status_code=status.HTTP_406_NOT_ACCEPTABLE, + # detail="This Employee can not reach this building or building uu-id not found in database. " + # "Check with your supervisor.", + # ) + # + # if build_types := BuildTypes.filter_one( + # BuildTypes.uu_id == data.build_part_type_uu_id, + # ).data: + # part_direction = ApiEnumDropdown.get_by_uuid( + # uuid=str(data.part_direction_uu_id) + # ) + # + # data_dict["part_gross_size"] = data.part_gross_size + # data_dict["part_net_size"] = data.part_net_size + # data_dict["part_type_id"] = build_types.id + # data_dict["part_level"] = data.part_level + # data_dict["build_id"] = building.id + # data_dict["part_no"] = data.part_no + # data_dict["part_code"] = ( + # f"{build_types.type_code}:{str(data_dict['part_no']).zfill(2)}" + # ) + # data_dict["address_gov_code"] = data.address_gov_code + # data_dict["default_accessory"] = data.default_accessory + # data_dict["human_livable"] = bool(data.human_livable) + # + # data_dict["build_uu_id"] = str(data.build_uu_id) + # data_dict["part_type_id"] = build_types.id + # data_dict["part_type_uu_id"] = str(build_types.uu_id) + # data_dict["part_direction_id"] = part_direction.id + # data_dict["part_direction_uu_id"] = str(part_direction.uu_id) + # # data_dict["part_direction"] = str(data.part_direction_uu_id) + # + # if not data_dict["part_gross_size"]: + # raise HTTPException( + # status_code=status.HTTP_406_NOT_ACCEPTABLE, + # detail="Part Gross Size can not be empty.", + # ) + # + # if not data_dict["part_net_size"]: + # raise HTTPException( + # status_code=status.HTTP_406_NOT_ACCEPTABLE, + # detail="Part Net Size can not be empty.", + # ) + # pt = int(data_dict["part_net_size"]) + # data_dict["due_part_key"] = str(pt + (5 - (pt % 5))) + "M2" + # del data_dict["build_part_type_uu_id"] + # return cls.find_or_create(**data_dict) + # + # raise HTTPException( + # status_code=status.HTTP_418_IM_A_TEAPOT, + # detail="Build Part can not be created.", + # ) + + @property + def part_name(self): + db = BuildTypes.new_session() + if build_type := BuildTypes.filter_by_one( + system=True, id=self.part_type_id, db=db + ).data: + return f"{str(build_type.type_name).upper()} : {str(self.part_no).upper()}" + return f"Undefined:{str(build_type.type_name).upper()}" + + +class BuildLivingSpace(CrudCollection): + """ + LivingSpace class based on declarative_base and BaseMixin via session + Owner or live person = Occupant of the build part + + Query OR(owner_person_id == person_id, life_person_id == person_id) AND (now(date)) + """ + + __tablename__ = "build_living_space" + __exclude__fields__ = [] + __include__fields__ = [] + + fix_value: Mapped[float] = mapped_column( + Numeric(20, 6), + server_default="0", + comment="Fixed value is deducted from debit.", + ) + fix_percent: Mapped[float] = mapped_column( + Numeric(6, 2), + server_default="0", + comment="Fixed percent is deducted from debit.", + ) + + agreement_no: Mapped[str] = mapped_column( + String, server_default="", comment="Agreement No" + ) + marketing_process: Mapped[bool] = mapped_column(Boolean, server_default="False") + marketing_layer: Mapped[int] = mapped_column(Integer, server_default="0") + + build_parts_id: Mapped[int] = mapped_column( + ForeignKey("build_parts.id"), + nullable=False, + index=True, + comment="Build Part ID", + ) + build_parts_uu_id: Mapped[str] = mapped_column( + String, nullable=False, comment="Build Part UUID" + ) + person_id: Mapped[int] = mapped_column( + ForeignKey("people.id"), + nullable=False, + index=True, + comment="Responsible People ID", + ) + person_uu_id: Mapped[str] = mapped_column( + String, nullable=False, comment="Responsible People UUID" + ) + occupant_type: Mapped[int] = mapped_column( + ForeignKey("occupant_types.id"), + nullable=False, + comment="Occupant Type", + ) + occupant_type_uu_id: Mapped[str] = mapped_column( + String, nullable=False, comment="Occupant Type UUID" + ) + + __table_args__ = ( + {"comment": "Living Space inside building parts that are related to people"}, + ) + # + # @classmethod + # def create_action( + # cls, + # data: dict, + # token_dict: Union[EmployeeTokenObject, OccupantTokenObject], + # ): + # from Schemas import Services, OccupantTypes + # from api_events.events.events.events_bind_modules import ( + # ModulesBindOccupantEventMethods, + # ) + # + # if data.get("expiry_starts"): + # data["expiry_starts"] = str(system_arrow.get(data["expiry_starts"])) + # if data.get("expiry_ends"): + # data["expiry_ends"] = str(system_arrow.get(data["expiry_ends"])) + # created_living_space = BuildLivingSpace.find_or_create(**data) + # occupant_type = OccupantTypes.filter_by_one( + # system=True, uu_id=created_living_space.occupant_type_uu_id + # ).data + # related_service = Services.filter_by_one( + # related_responsibility=occupant_type.occupant_code, + # ).data + # if not related_service: + # raise HTTPException( + # status_code=status.HTTP_418_IM_A_TEAPOT, + # detail="Service is not found in database. Re-enter service record then try again.", + # ) + # ModulesBindOccupantEventMethods.bind_default_module_for_first_init_occupant( + # build_living_space_id=created_living_space.id, + # ) + # created_living_space.save_and_confirm() + # return created_living_space + # + # @classmethod + # def find_living_from_customer_id( + # cls, customer_id, process_date, add_days: int = 32 + # ): + # from ApiLibrary.date_time_actions.date_functions import system_arrow + # + # formatted_date = system_arrow.get(str(process_date)) + # living_spaces = cls.filter_all( + # or_( + # cls.owner_person_id == customer_id, + # cls.life_person_id == customer_id, + # ), + # cls.start_date < formatted_date - timedelta(days=add_days), + # cls.stop_date > formatted_date + timedelta(days=add_days), + # ) + # return living_spaces.data, living_spaces.count + + +class BuildManagement(CrudCollection): + + __tablename__ = "build_management" + __exclude__fields__ = [] + + discounted_percentage: Mapped[float] = mapped_column( + Numeric(6, 2), server_default="0.00" + ) # %22 + discounted_price: Mapped[float] = mapped_column( + Numeric(20, 2), server_default="0.00" + ) # Normal: 78.00 TL + calculated_price: Mapped[float] = mapped_column( + Numeric(20, 2), server_default="0.00" + ) # sana düz 75.00 TL yapar + + occupant_type: Mapped[int] = mapped_column( + ForeignKey("occupant_types.id"), + nullable=False, + comment="Occupant Type", + ) + occupant_type_uu_id: Mapped[str] = mapped_column( + String, nullable=False, comment="Occupant Type UUID" + ) + build_id: Mapped[int] = mapped_column( + ForeignKey("build.id"), nullable=False, comment="Building ID" + ) + build_uu_id: Mapped[str] = mapped_column( + String, nullable=False, comment="Building UUID" + ) + build_parts_id: Mapped[int] = mapped_column( + ForeignKey("build_parts.id"), + nullable=False, + index=True, + comment="Build Part ID", + ) + build_parts_uu_id: Mapped[str] = mapped_column( + String, nullable=False, comment="Build Part UUID" + ) + + __table_args__ = ( + Index( + "build_management_ndx_00", + build_parts_id, + occupant_type, + "expiry_starts", + unique=True, + ), + {"comment": "Management of the building parts that are related to people"}, + ) + + +class BuildArea(CrudCollection): + """ + Builds class based on declarative_base and BaseMixin via session + """ + + __tablename__ = "build_area" + __exclude__fields__ = [] + + area_name: Mapped[str] = mapped_column(String, server_default="") + area_code: Mapped[str] = mapped_column(String, server_default="") + area_type: Mapped[str] = mapped_column(String, server_default="GREEN") + area_direction: Mapped[str] = mapped_column(String(2), server_default="NN") + area_gross_size: Mapped[float] = mapped_column(Numeric(20, 6), server_default="0") + area_net_size: Mapped[float] = mapped_column(Numeric(20, 6), server_default="0") + width = mapped_column(Integer, server_default="0") + size = mapped_column(Integer, server_default="0") + + build_id: Mapped[int] = mapped_column(ForeignKey("build.id")) + build_uu_id: Mapped[str] = mapped_column(String, comment="Building UUID") + part_type_id: Mapped[int] = mapped_column( + ForeignKey("build_types.id"), nullable=True, comment="Building Part Type" + ) + part_type_uu_id: Mapped[str] = mapped_column( + String, nullable=True, comment="Building Part Type UUID" + ) + + # buildings: Mapped["Build"] = relationship( + # "Build", back_populates="areas", foreign_keys=[build_id] + # ) + + _table_args_ = ( + Index("_edm_build_parts_area_ndx_00", build_id, area_code, unique=True), + ) + + +class BuildSites(CrudCollection): + """ + Builds class based on declarative_base and BaseMixin via session + """ + + __tablename__ = "build_sites" + __exclude__fields__ = [] + __include__fields__ = [] + + site_name: Mapped[str] = mapped_column(String(24), nullable=False) + site_no: Mapped[str] = mapped_column(String(8), nullable=False) + + address_id: Mapped[int] = mapped_column(ForeignKey("addresses.id")) + address_uu_id: Mapped[str] = mapped_column(String, comment="Address UUID") + + # addresses: Mapped["Address"] = relationship( + # "Address", back_populates="site", foreign_keys=[address_id] + # ) + # buildings: Mapped["Build"] = relationship( + # "Build", back_populates="sites", foreign_keys="Build.site_id" + # ) + + __table_args__ = ( + Index("_sites_ndx_01", site_no, site_name), + {"comment": "Sites that groups building objets"}, + ) + + +class BuildCompaniesProviding(CrudCollection): + """ """ + + __tablename__ = "build_companies_providing" + __exclude__fields__ = [] + __include__fields__ = [] + + build_id = mapped_column( + ForeignKey("build.id"), nullable=False, comment="Building ID" + ) + build_uu_id: Mapped[str] = mapped_column( + String, nullable=True, comment="Providing UUID" + ) + company_id: Mapped[int] = mapped_column(ForeignKey("companies.id")) + company_uu_id: Mapped[str] = mapped_column( + String, nullable=True, comment="Providing UUID" + ) + provide_id: Mapped[int] = mapped_column( + ForeignKey("api_enum_dropdown.id"), nullable=True + ) + provide_uu_id: Mapped[str] = mapped_column( + String, nullable=True, comment="Providing UUID" + ) + contract_id: Mapped[int] = mapped_column( + Integer, ForeignKey("companies.id"), nullable=True + ) + + __table_args__ = ( + Index( + "_build_companies_providing_ndx_00", + build_id, + company_id, + provide_id, + unique=True, + ), + {"comment": "Companies providing services for building"}, + ) + + +class BuildPersonProviding(CrudCollection): + """ """ + + __tablename__ = "build_person_providing" + __exclude__fields__ = [] + __include__fields__ = [] + + build_id = mapped_column( + ForeignKey("build.id"), nullable=False, comment="Building ID" + ) + build_uu_id: Mapped[str] = mapped_column( + String, nullable=True, comment="Providing UUID" + ) + people_id: Mapped[int] = mapped_column(ForeignKey("people.id")) + people_uu_id: Mapped[str] = mapped_column( + String, nullable=True, comment="People UUID" + ) + provide_id: Mapped[int] = mapped_column( + ForeignKey("api_enum_dropdown.id"), nullable=True + ) + provide_uu_id: Mapped[str] = mapped_column( + String, nullable=True, comment="Providing UUID" + ) + contract_id: Mapped[int] = mapped_column( + Integer, ForeignKey("companies.id"), nullable=True + ) + + __table_args__ = ( + Index( + "_build_person_providing_ndx_00", + build_id, + people_id, + provide_id, + unique=True, + ), + {"comment": "People providing services for building"}, + ) diff --git a/Schemas/building/decision_book.py b/Schemas/building/decision_book.py new file mode 100644 index 0000000..81b4a50 --- /dev/null +++ b/Schemas/building/decision_book.py @@ -0,0 +1,1769 @@ +import arrow +import math +from datetime import datetime, timedelta +from decimal import Decimal +from fastapi import HTTPException, status +from typing import List + +from sqlalchemy import ( + String, + ForeignKey, + Index, + SmallInteger, + Boolean, + TIMESTAMP, + Text, + Numeric, + Integer, +) +from sqlalchemy.orm import Mapped, mapped_column, relationship + +# from ApiLayers.ApiValidations.Request import ( +# InsertDecisionBook, +# InsertBuildDecisionBookItems, +# InsertBuildDecisionBookItemDebits, +# InsertBuildDecisionBookProjects, +# ) +from Services.PostgresService.controllers.mixin_controllers import CrudCollection + + + +class BuildDecisionBook(CrudCollection): + """ + Builds class based on declarative_base and BaseMixin via session + The start dates of the decision log periods are determined from the 'decision_period_date' field in the decision log table within the building information. + decision_period_date = Her yıl yapılan karar toplantısı + 365 gün her yıl tekrar eden + decision_book_pdf_path: Karar defteri pdf dosyasının yolu + resp_company_fix_wage: Karar defterinin oluşmasını sağlayan dışardaki danışmanlık ücreti + is_out_sourced: Karar defterinin dışardan alınan hizmetle oluşturulup oluşturulmadığı + contact_agreement_path: Karar defterinin oluşmasını sağlayan dışardaki danışmanlık anlaşması dosyasının yolu + contact_agreement_date: Karar defterinin oluşmasını sağlayan dışardaki danışmanlık anlaşma tarihi + meeting_date: Karar defterinin oluşmasını sağlayan toplantı tarihi + decision_type: Karar defterinin tipi (Bina Yönetim Toplantısı (BYT), Yıllık Acil Toplantı (YAT) + """ + + __tablename__ = "build_decision_book" + __exclude__fields__ = [] + + decision_book_pdf_path: Mapped[str] = mapped_column( + String, server_default="", nullable=True + ) + resp_company_fix_wage: Mapped[float] = mapped_column( + Numeric(10, 2), server_default="0" + ) # + is_out_sourced: Mapped[bool] = mapped_column(Boolean, server_default="0") + meeting_date: Mapped[TIMESTAMP] = mapped_column( + TIMESTAMP(timezone=True), server_default="1900-01-01" + ) + decision_type: Mapped[str] = mapped_column(String(3), server_default="RBM") + meeting_is_completed: Mapped[bool] = mapped_column(Boolean, server_default="0") + meeting_completed_date: Mapped[TIMESTAMP] = mapped_column( + TIMESTAMP(timezone=True), nullable=True, comment="Meeting Completed Date" + ) + + build_id: Mapped[int] = mapped_column(ForeignKey("build.id"), nullable=False) + build_uu_id: Mapped[str] = mapped_column( + String, nullable=True, comment="Build UUID" + ) + resp_company_id: Mapped[int] = mapped_column(ForeignKey("companies.id")) + resp_company_uu_id: Mapped[str] = mapped_column( + String, nullable=True, comment="Company UUID" + ) + contact_id: Mapped[int] = mapped_column( + ForeignKey("contracts.id"), nullable=True, comment="Contract id" + ) + contact_uu_id: Mapped[str] = mapped_column( + String, nullable=True, comment="Contract UUID" + ) + + buildings: Mapped["Build"] = relationship( + "Build", + back_populates="decision_books", + foreign_keys=build_id, + ) + decision_book_items: Mapped[List["BuildDecisionBookItems"]] = relationship( + "BuildDecisionBookItems", + back_populates="decision_books", + foreign_keys="BuildDecisionBookItems.build_decision_book_id", + ) + + __table_args__ = ( + Index("build_decision_book_ndx_011", meeting_date, build_id), + Index("build_decision_book_ndx_011", build_id, "expiry_starts", "expiry_ends"), + { + "comment": "Decision Book objects that are related to decision taken at building meetings" + }, + ) + # + # @classmethod + # def retrieve_active_rbm(cls): + # from Schemas.building.build import Build + # + # related_build = Build.find_one(id=cls.build_id) + # related_date = system_arrow.get(related_build.build_date) + # date_processed = related_date.replace( + # year=system_arrow.now().date().year, month=related_date.month, day=1 + # ) + # if system_arrow.now().date() <= date_processed: + # book = cls.filter_one( + # cls.expiry_ends <= date_processed, + # cls.decision_type == "RBM", + # cls.build_id == related_build.id, + # ).data + # if not book: + # cls.raise_http_exception( + # status_code="HTTP_404_NOT_FOUND", + # error_case="NOTFOUND", + # message=f"Decision Book is not found for {related_build.build_name}-RBM", + # data=dict( + # build_id=str(related_build.uu_id), + # build_name=related_build.build_name, + # decision_type="RBM", + # ), + # ) + # return book + # return + # + # @classmethod + # def select_action(cls, duty_id, token=None): + # from Schemas import Build, Companies + # + # related_companies = Companies.select_action(duty_id_list=[int(duty_id)]) + # related_companies_ids = list( + # related_.id for related_ in related_companies.all() + # ) + # related_building = Build.filter_all(Build.company_id.in_(related_companies_ids)) + # related_building_ids = list(related_.id for related_ in related_building.data) + # return cls.filter_all(cls.build_id.in_(related_building_ids)).query + # + # @classmethod + # def create_action(cls, data: InsertDecisionBook, token=None): + # from Schemas import ( + # Build, + # Companies, + # ) + # + # data_dict = data.model_dump() + # if building := Build.find_one(uu_id=data.build_uu_id): + # data_dict["build_id"] = building.id + # if response_company := Companies.find_one( + # uu_id=data_dict["resp_company_uu_id"] + # ): + # data_dict["resp_company_id"] = response_company.id + # if not building: + # raise HTTPException( + # status_code=status.HTTP_406_NOT_ACCEPTABLE, + # detail="Building must be given to create decision book.", + # ) + # expiry_starts = system_arrow.get(str(data_dict.get("expiry_starts"))).format( + # "%Y-%m-%d" + # ) + # data_dict["expiry_starts"] = str(expiry_starts) + # expiry_ends = system_arrow.get(str(data_dict.get("expiry_ends"))).format( + # "%Y-%m-%d" + # ) + # data_dict["expiry_ends"] = str( + # expiry_ends.replace(month=expiry_ends.month + 1, day=1) - timedelta(days=1) + # ) + # + # if decision_book := BuildDecisionBook.filter_one( + # BuildDecisionBook.build_id == building.id, + # BuildDecisionBook.expiry_ends > data_dict["expiry_starts"], + # BuildDecisionBook.decision_type == data_dict.get("decision_type"), + # ).data: # Decision book is already exist: + # cls.raise_http_exception( + # status_code=status.HTTP_409_CONFLICT, + # error_case="RECORDEXITS", + # message="Decision Book is already exist.", + # data=decision_book.get_dict(), + # ) + # + # data_dict["expiry_starts"] = str(expiry_starts.replace(day=1)) + # data_dict["expiry_ends"] = str( + # expiry_ends.replace(month=expiry_ends.month + 1, day=1) - timedelta(days=1) + # ) + # del data_dict["build_uu_id"], data_dict["resp_company_uu_id"] + # return cls.find_or_create(**data_dict) + # + # @property + # def semester(self): + # start_format = "".join( + # [str(self.expiry_starts.year), "-", str(self.expiry_starts.month)] + # ) + # end_format = "".join( + # [str(self.expiry_ends.year), "-", str(self.expiry_ends.month)] + # ) + # return "".join([start_format, " ", end_format]) + # + # def check_book_is_valid(self, bank_date: str): + # if all( + # [True if letter in str(bank_date) else False for letter in ["-", " ", ":"]] + # ): + # bank_date = datetime.strptime(str(bank_date), "%Y-%m-%d %H:%M:%S") + # date_valid = ( + # system_arrow.get(self.expiry_starts) + # < system_arrow.get(bank_date) + # < system_arrow.get(self.expiry_ends) + # ) + # return date_valid and self.active and not self.deleted + # + # @classmethod + # def retrieve_valid_book(cls, bank_date, iban): + # from Schemas import ( + # BuildIbans, + # ) + # + # if all( + # [True if letter in str(bank_date) else False for letter in ["-", " ", ":"]] + # ): + # bank_date = datetime.strptime(str(bank_date), "%Y-%m-%d %H:%M:%S") + # build_iban = BuildIbans.find_one(iban=iban) + # decision_book: cls = cls.filter_one( + # cls.build_id == build_iban.build_id, + # cls.expiry_starts < bank_date, + # cls.expiry_ends > bank_date, + # cls.active == True, + # cls.deleted == False, + # ).data + # decision_book.check_book_is_valid(bank_date.__str__()) + # return decision_book + # return + + +class BuildDecisionBookInvitations(CrudCollection): + """ + Builds class based on declarative_base and BaseMixin via session + """ + + __tablename__ = "build_decision_book_invitations" + __exclude__fields__ = [] + + build_id: Mapped[int] = mapped_column(Integer, nullable=False) + build_uu_id: Mapped[str] = mapped_column( + String, nullable=True, comment="Build UUID" + ) + decision_book_id: Mapped[int] = mapped_column( + ForeignKey("build_decision_book.id"), nullable=False + ) + decision_book_uu_id: Mapped[str] = mapped_column( + String, nullable=True, comment="Decision Book UUID" + ) + + invitation_type: Mapped[str] = mapped_column( + String, nullable=False, comment="Invite Type" + ) + invitation_attempt: Mapped[int] = mapped_column(SmallInteger, server_default="1") + living_part_count: Mapped[int] = mapped_column(SmallInteger, server_default="1") + living_part_percentage: Mapped[float] = mapped_column( + Numeric(10, 2), server_default="0.51" + ) + + message: Mapped[str] = mapped_column( + Text, nullable=True, comment="Invitation Message" + ) + planned_date: Mapped[TIMESTAMP] = mapped_column( + TIMESTAMP(timezone=True), nullable=False, comment="Planned Meeting Date" + ) + planned_date_expires: Mapped[TIMESTAMP] = mapped_column( + TIMESTAMP(timezone=True), nullable=False, comment="Planned Meeting Date Expires" + ) + + __table_args__ = ( + Index( + "_build_decision_book_invitations_ndx_01", + invitation_type, + planned_date, + invitation_attempt, + unique=True, + ), + {"comment": "People that are invited to building meetings."}, + ) + + # @classmethod + # def check_invites_are_ready_for_meeting(cls, selected_decision_book, token_dict): + # first_book_invitation = BuildDecisionBookInvitations.filter_one( + # BuildDecisionBookInvitations.build_id + # == token_dict.selected_occupant.build_id, + # BuildDecisionBookInvitations.decision_book_id == selected_decision_book.id, + # BuildDecisionBookInvitations.invitation_attempt == 1, + # ).data + # if not first_book_invitation: + # raise HTTPException( + # status_code=status.HTTP_404_NOT_FOUND, + # detail=f"First Meeting Invitation is not found for Decision Book UUID : {selected_decision_book.uu_id}", + # ) + # need_attend_count = int(first_book_invitation.living_part_count) * Decimal( + # first_book_invitation.living_part_percentage + # ) + # valid_invite_count = ( + # BuildDecisionBookPerson.filter_all( + # BuildDecisionBookPerson.invite_id == first_book_invitation.id, + # BuildDecisionBookPerson.build_decision_book_id + # == selected_decision_book.id, + # BuildDecisionBookPerson.is_attending == True, + # system=True, + # ) + # .query.distinct(BuildDecisionBookPerson.person_id) + # .count() + # ) + # + # second_book_invitation = BuildDecisionBookInvitations.filter_one( + # BuildDecisionBookInvitations.build_id + # == token_dict.selected_occupant.build_id, + # BuildDecisionBookInvitations.decision_book_id == selected_decision_book.id, + # BuildDecisionBookInvitations.invitation_attempt == 2, + # system=True, + # ).data + # if not valid_invite_count >= need_attend_count and not second_book_invitation: + # raise HTTPException( + # status_code=status.HTTP_400_BAD_REQUEST, + # detail=f"In order meeting to be held, {math.ceil(need_attend_count)} people must attend " + # f"to the meeting. Only {valid_invite_count} people are attending to the meeting.", + # ) + # return first_book_invitation or second_book_invitation + + +class BuildDecisionBookPerson(CrudCollection): + """ + Builds class based on declarative_base and BaseMixin via session + Karar Defteri toplantılarına katılan kişiler veya yetkililer + dues_percent_discount: Katılımcının aidat indirim oranı Aidatdan yüzde indirim alır + dues_fix_discount: Katılımcının aidat sabit miktarı Aidatdan sabit bir miktar indirim alır + dues_discount_approval_date: Bu kişinin indiriminin onayladığı tarih + management_typecode: Kişinin toplantı görevi + """ + + __tablename__ = "build_decision_book_person" + __exclude__fields__ = [] + __enum_list__ = [("management_typecode", "BuildManagementType", "bm")] + + dues_percent_discount: Mapped[int] = mapped_column(SmallInteger, server_default="0") + dues_fix_discount: Mapped[float] = mapped_column(Numeric(10, 2), server_default="0") + dues_discount_approval_date: Mapped[TIMESTAMP] = mapped_column( + TIMESTAMP(timezone=True), server_default="1900-01-01 00:00:00" + ) + send_date: Mapped[TIMESTAMP] = mapped_column( + TIMESTAMP(timezone=True), nullable=False, comment="Confirmation Date" + ) + is_attending: Mapped[bool] = mapped_column( + Boolean, server_default="0", comment="Occupant is Attending to invitation" + ) + confirmed_date: Mapped[TIMESTAMP] = mapped_column( + TIMESTAMP(timezone=True), nullable=True, comment="Confirmation Date" + ) + token: Mapped[str] = mapped_column( + String, server_default="", comment="Invitation Token" + ) + + vicarious_person_id: Mapped[int] = mapped_column( + ForeignKey("people.id"), nullable=True, comment="Vicarious Person ID" + ) + vicarious_person_uu_id: Mapped[str] = mapped_column( + String, nullable=True, comment="Vicarious Person UUID" + ) + + invite_id: Mapped[int] = mapped_column( + ForeignKey("build_decision_book_invitations.id"), nullable=False + ) + invite_uu_id: Mapped[str] = mapped_column( + String, nullable=False, comment="Invite UUID" + ) + + build_decision_book_id: Mapped[int] = mapped_column( + ForeignKey("build_decision_book.id"), nullable=False + ) + build_decision_book_uu_id: Mapped[str] = mapped_column( + String, nullable=False, comment="Decision Book UUID" + ) + build_living_space_id: Mapped[int] = mapped_column( + ForeignKey("build_living_space.id"), nullable=False + ) + build_living_space_uu_id: Mapped[str] = mapped_column( + String, nullable=True, comment="Living Space UUID" + ) + person_id: Mapped[int] = mapped_column(ForeignKey("people.id"), nullable=False) + # person_uu_id: Mapped[str] = mapped_column(String, nullable=False, comment="Person UUID") + + __table_args__ = ( + Index( + "_build_decision_book_person_ndx_01", + build_decision_book_id, + invite_id, + build_living_space_id, + unique=True, + ), + {"comment": "People that are attended to building meetings."}, + ) + # + # def retrieve_all_occupant_types(self): + # all_decision_book_people = self.filter_all( + # BuildDecisionBookPersonOccupants.invite_id == self.invite_id, + # system=True, + # ) + # BuildDecisionBookPersonOccupants.pre_query = all_decision_book_people.query + # return BuildDecisionBookPersonOccupants.filter_all(system=True).data + # + # def add_occupant_type(self, occupant_type, build_living_space_id: int = None): + # from Schemas import ( + # Build, + # BuildLivingSpace, + # Services, + # ) + # + # # from api_events.events.events.events_bind_services import ( + # # ServiceBindOccupantEventMethods, + # # ) + # + # book_dict = dict( + # build_decision_book_person_id=self.id, + # build_decision_book_person_uu_id=str(self.uu_id), + # invite_id=self.invite_id, + # invite_uu_id=str(self.invite_uu_id), + # occupant_type_id=occupant_type.id, + # occupant_type_uu_id=str(occupant_type.uu_id), + # ) + # if person_occupants := BuildDecisionBookPersonOccupants.find_or_create( + # **book_dict + # ): + # person_occupants.save_and_confirm() + # + # decision_book = BuildDecisionBook.filter_one( + # BuildDecisionBook.id == self.build_decision_book_id, + # ).data + # person_occupants.update( + # expiry_starts=decision_book.expiry_starts, + # expiry_ends=decision_book.expiry_ends, + # ) + # if build_living_space_id: + # related_service = Services.filter_by_one( + # related_responsibility=str(occupant_type.occupant_code), + # **Services.valid_record_dict, + # ).data + # if not related_service: + # raise HTTPException( + # status_code=status.HTTP_404_NOT_FOUND, + # detail=f"Service is not found for {occupant_type.occupant_code}", + # ) + # + # decision_build = Build.filter_one( + # Build.id == decision_book.build_id, + # ).data + # management_room = decision_build.management_room + # if not management_room: + # raise HTTPException( + # status_code=status.HTTP_404_NOT_FOUND, + # detail=f"Management Room is not found in {decision_build.build_name}", + # ) + # + # living_space = BuildLivingSpace.filter_one( + # BuildLivingSpace.id == build_living_space_id, + # ).data + # expiry_ends = str( + # system_arrow.get(decision_book.meeting_date).shift(hours=23) + # ) + # expiry_starts = str(system_arrow.get(decision_book.meeting_date)) + # related_living_space = BuildLivingSpace.find_or_create( + # build_parts_id=management_room.id, + # build_parts_uu_id=str(management_room.uu_id), + # occupant_type=occupant_type.id, + # occupant_type_uu_id=str(occupant_type.uu_id), + # person_id=living_space.person_id, + # person_uu_id=str(living_space.person_uu_id), + # expiry_starts=expiry_starts, + # expiry_ends=expiry_ends, + # ) + # expires_at = str( + # system_arrow.get(decision_book.meeting_date).shift(days=15) + # ) + # related_living_space.save_and_confirm() + # ServiceBindOccupantEventMethods.bind_services_occupant_system( + # build_living_space_id=related_living_space.id, + # service_id=related_service.id, + # expires_at=expires_at, + # ) + # return person_occupants + # return + # + # def get_occupant_types(self): + # if occupants := BuildDecisionBookPersonOccupants.filter_all( + # BuildDecisionBookPersonOccupants.build_decision_book_person_id == self.id, + # ).data: + # return occupants + # return + # + # def check_occupant_type(self, occupant_type): + # book_person_occupant_type = BuildDecisionBookPersonOccupants.filter_one( + # BuildDecisionBookPersonOccupants.build_decision_book_person_id == self.id, + # BuildDecisionBookPersonOccupants.occupant_type_id == occupant_type.id, + # BuildDecisionBookPersonOccupants.active == True, + # BuildDecisionBookPersonOccupants.is_confirmed == True, + # ).data + # if not book_person_occupant_type: + # raise HTTPException( + # status_code=status.HTTP_404_NOT_FOUND, + # detail=f"Occupant Type : {occupant_type.occupant_code} is not found in " + # f"Decision Book Person UUID {self.uu_id}", + # ) + + +class BuildDecisionBookPersonOccupants(CrudCollection): + """ + Builds class based on declarative_base and BaseMixin via session + """ + + __tablename__ = "build_decision_book_person_occupants" + __exclude__fields__ = [] + + build_decision_book_person_id: Mapped[int] = mapped_column( + ForeignKey("build_decision_book_person.id"), nullable=False + ) + build_decision_book_person_uu_id: Mapped[str] = mapped_column( + String, nullable=True, comment="Decision Book Person UUID" + ) + invite_id: Mapped[int] = mapped_column( + ForeignKey("build_decision_book_invitations.id"), nullable=True + ) + invite_uu_id: Mapped[str] = mapped_column( + String, nullable=True, comment="Invite UUID" + ) + + occupant_type_id: Mapped[int] = mapped_column( + ForeignKey("occupant_types.id"), nullable=False + ) + occupant_type_uu_id: Mapped[str] = mapped_column( + String, nullable=True, comment="Occupant UUID" + ) + + __table_args__ = ( + Index( + "_build_decision_book_person_occupants_ndx_01", + build_decision_book_person_id, + occupant_type_id, + unique=True, + ), + {"comment": "Occupant Types of People that are attended to building meetings."}, + ) + + +class BuildDecisionBookItems(CrudCollection): + """ + Builds class based on declarative_base and BaseMixin via session + item_commentary = metine itiraz şerh maddesi için + item_order = maddelerin sıralanma numarası + item_objection = maddelerin itiraz şerhi Text şeklinde + """ + + __tablename__ = "build_decision_book_items" + __exclude__fields__ = [] + + item_order: Mapped[int] = mapped_column( + SmallInteger, nullable=False, comment="Order Number of Item" + ) + item_comment: Mapped[str] = mapped_column( + Text, nullable=False, comment="Comment Content" + ) + item_objection: Mapped[str] = mapped_column( + Text, nullable=True, comment="Objection Content" + ) + info_is_completed: Mapped[bool] = mapped_column( + Boolean, server_default="0", comment="Info process is Completed" + ) + is_payment_created: Mapped[bool] = mapped_column( + Boolean, server_default="0", comment="Are payment Records Created" + ) + + info_type_id: Mapped[int] = mapped_column( + ForeignKey("api_enum_dropdown.id"), nullable=True + ) + info_type_uu_id: Mapped[str] = mapped_column( + String, nullable=True, comment="Info Type UUID" + ) + + build_decision_book_id: Mapped[int] = mapped_column( + ForeignKey("build_decision_book.id"), nullable=False + ) + build_decision_book_uu_id: Mapped[str] = mapped_column( + String, nullable=True, comment="Decision Book UUID" + ) + item_short_comment: Mapped[str] = mapped_column( + String(24), + nullable=True, + comment="This field is reserved for use in grouping data or in the pivot heading.", + ) + + decision_books: Mapped["BuildDecisionBook"] = relationship( + "BuildDecisionBook", + back_populates="decision_book_items", + foreign_keys=[build_decision_book_id], + ) + decision_book_project: Mapped["BuildDecisionBookProjects"] = relationship( + "BuildDecisionBookProjects", + back_populates="build_decision_book_item", + foreign_keys="BuildDecisionBookProjects.build_decision_book_item_id", + ) + + __table_args__ = ( + Index("_build_decision_book_item_ndx_01", build_decision_book_id), + Index( + "_build_decision_book_item_ndx_02", + build_decision_book_id, + item_order, + unique=True, + ), + { + "comment": "Decision Book Items that are related to decision taken at building meetings" + }, + ) + # + # @classmethod + # def select_action(cls, duty_id, token=None): + # from Schemas import ( + # Build, + # Companies, + # ) + # + # related_companies = Companies.select_action(duty_id_list=[duty_id]) + # related_companies_ids = list( + # related_.id for related_ in related_companies.all() + # ) + # related_building = Build.query.filter( + # Build.company_id.in_(related_companies_ids) + # ) + # related_building_ids = list(related_.id for related_ in related_building.all()) + # related_decision_books = BuildDecisionBook.query.filter( + # BuildDecisionBook.build_id.in_(related_building_ids) + # ) + # related_decision_books_ids = list( + # related_.id for related_ in related_decision_books.all() + # ) + # return cls.query.filter( + # cls.build_decision_book_id.in_(related_decision_books_ids) + # ) + # + # @classmethod + # def create_action(cls, data: InsertBuildDecisionBookItems, token): + # data_dict = data.dump() + # BuildDecisionBook.pre_query = BuildDecisionBook.select_action( + # duty_id=token.duty_list["duty_id"] + # ) + # cls.pre_query = cls.select_action(duty_id=token.duty_list["duty_id"]) + # if decision_book := BuildDecisionBook.filter_one( + # BuildDecisionBook.uu_id == data.build_decision_book_uu_id + # ).data: + # found_dict = dict( + # item_order=data.item_order, build_decision_book_id=decision_book.id + # ) + # if decision_book_is_already := cls.find_one(**found_dict): + # decision_book_is_already.is_found = True + # return decision_book_is_already.get_dict() + # data_dict["build_decision_book_id"] = decision_book.id + # data_dict["is_confirmed"] = True + # del data_dict["build_decision_book_uu_id"] + # return BuildDecisionBookItems.find_or_create(**data_dict) + # + # @classmethod + # def check_meeting_is_valid_to_start_add_attendance(cls, decision_book, token_dict): + # from Schemas import ( + # People, + # OccupantTypes, + # ) + # + # active_invite = ( + # BuildDecisionBookInvitations.check_invites_are_ready_for_meeting( + # selected_decision_book=decision_book, + # token_dict=token_dict, + # ) + # ) + # occupant_type_required_list = ("MT-PRS", "MT-WRT", "BU-MNG", "BU-SPV") + # occupant_type_list = OccupantTypes.filter_all( + # OccupantTypes.occupant_code.in_(occupant_type_required_list), + # system=True, + # ).data + # # active_invite = invitations[1] if invitations[1] else invitations[0] + # invitation = BuildDecisionBookInvitations.filter_one( + # BuildDecisionBookInvitations.id == active_invite.id + # ).data + # people_book_attend_count = None + # if int(invitation.invitation_attempt) == 1: + # people_book_attend_is_attending = BuildDecisionBookPerson.filter_all( + # BuildDecisionBookPerson.invite_id == invitation.id, + # BuildDecisionBookPerson.is_attending == True, + # ) + # people_book_attend = BuildDecisionBookPersonOccupants.filter_all( + # BuildDecisionBookPersonOccupants.build_decision_book_person_id.in_( + # [person.id for person in people_book_attend_is_attending.data] + # ), + # BuildDecisionBookPersonOccupants.occupant_type_id.in_( + # [occupant_type.id for occupant_type in occupant_type_list] + # ), + # ) + # people_book_attend_count = people_book_attend.count + # if not people_book_attend_count == len(occupant_type_required_list) - 1: + # error_detail = " - ".join(occupant_type_required_list) + # raise HTTPException( + # status_code=status.HTTP_400_BAD_REQUEST, + # detail=f"{error_detail} occupant types must be attend to meeting. " + # f"Check attendants and try again", + # ) + # + # comment = ( + # lambda id_, occ_type, full_name: f"{full_name} is nomindated for {occ_type} at Meeting Invite Code : {id_}" + # ) + # book_items_dict = dict( + # build_decision_book_id=decision_book.id, + # build_decision_book_uu_id=str(decision_book.uu_id), + # is_confirmed=True, + # active=True, + # is_payment_created=True, + # ) + # occupant_type_pre = OccupantTypes.filter_by_one( + # system=True, occupant_code="MT-PRS", occupant_category_type="MT" + # ).data + # occupant_type_wrt = OccupantTypes.filter_by_one( + # system=True, occupant_code="MT-WRT", occupant_category_type="MT" + # ).data + # occupant_type_mng = OccupantTypes.filter_by_one( + # system=True, occupant_code="BU-MNG", occupant_category_type="BU" + # ).data + # + # person_occupants_pre = BuildDecisionBookPersonOccupants.filter_one( + # BuildDecisionBookPersonOccupants.invite_id == invitation.id, + # BuildDecisionBookPersonOccupants.occupant_type_id == occupant_type_pre.id, + # ).data + # person_invite_pret = BuildDecisionBookPerson.filter_one( + # BuildDecisionBookPerson.id + # == person_occupants_pre.build_decision_book_person_id + # ).data + # person = People.filter_one(People.id == person_invite_pret.person_id).data + # created_attendance = BuildDecisionBookItems.find_or_create( + # **book_items_dict, + # item_order=1, + # item_comment=comment( + # id_=person_invite_pret.invite_uu_id, + # occ_type=occupant_type_pre.occupant_type, + # full_name=person.full_name, + # ), + # ) + # created_attendance.save_and_confirm() + # + # person_occupants_wrt = BuildDecisionBookPersonOccupants.filter_one( + # BuildDecisionBookPersonOccupants.invite_id == invitation.id, + # BuildDecisionBookPersonOccupants.occupant_type_id == occupant_type_wrt.id, + # ).data + # person_invite_wrt = BuildDecisionBookPerson.filter_one( + # BuildDecisionBookPerson.id + # == person_occupants_wrt.build_decision_book_person_id + # ).data + # person = People.filter_one(People.id == person_invite_pret.person_id).data + # created_attendance = BuildDecisionBookItems.find_or_create( + # **book_items_dict, + # item_order=2, + # item_comment=comment( + # id_=person_invite_wrt.invite_uu_id, + # occ_type=occupant_type_wrt.occupant_type, + # full_name=person.full_name, + # ), + # ) + # created_attendance.save_and_confirm() + # + # person_occupants_mng = BuildDecisionBookPersonOccupants.filter_one( + # BuildDecisionBookPersonOccupants.invite_id == invitation.id, + # BuildDecisionBookPersonOccupants.occupant_type_id == occupant_type_mng.id, + # ).data + # person_invite_mng = BuildDecisionBookPerson.filter_one( + # BuildDecisionBookPerson.id + # == person_occupants_mng.build_decision_book_person_id + # ).data + # person = People.filter_one(People.id == person_invite_pret.person_id).data + # created_attendance = BuildDecisionBookItems.find_or_create( + # **book_items_dict, + # item_order=3, + # item_comment=comment( + # id_=person_invite_mng.invite_uu_id, + # occ_type=occupant_type_mng.occupant_type, + # full_name=person.full_name, + # ), + # ) + # created_attendance.save_and_confirm() + # return people_book_attend_count + + +class BuildDecisionBookItemsUnapproved(CrudCollection): + """ + Builds class based on declarative_base and BaseMixin via session unapproved personnel + """ + + __tablename__ = "build_decision_book_items_unapproved" + __exclude__fields__ = [] + + item_objection: Mapped[str] = mapped_column( + Text, nullable=False, comment="Objection Content" + ) + item_order: Mapped[int] = mapped_column( + SmallInteger, nullable=False, comment="Order Number" + ) + + decision_book_item_id: Mapped[int] = mapped_column( + ForeignKey("build_decision_book_items.id"), nullable=False + ) + decision_book_item_uu_id: Mapped[str] = mapped_column( + String, nullable=True, comment="Decision Book Item" + ) + person_id: Mapped[int] = mapped_column(ForeignKey("people.id"), nullable=False) + person_uu_id: Mapped[str] = mapped_column( + String, nullable=True, comment="Person UUID" + ) + build_decision_book_item: Mapped[int] = mapped_column( + ForeignKey("build_decision_book_items.id"), nullable=False + ) + build_decision_book_item_uu_id: Mapped[str] = mapped_column( + String, nullable=True, comment="Decision Book Item UUID" + ) + + __table_args__ = ( + Index("_build_decision_book_item_unapproved_ndx_01", build_decision_book_item), + { + "comment": "People that are unapproved partially or completely in decision book items" + }, + ) + + +class BuildDecisionBookPayments(CrudCollection): + """ + Builds class based on declarative_base and BaseMixin via session + period_time = to_char(NEW.process_date, 'YYYY-MM'); + """ + + __tablename__ = "build_decision_book_payments" + __exclude__fields__ = [] + __enum_list__ = [("receive_debit", "DebitTypes", "D")] + + payment_plan_time_periods: Mapped[str] = mapped_column( + String(10), nullable=False, comment="Payment Plan Time Periods" + ) + process_date: Mapped[TIMESTAMP] = mapped_column( + TIMESTAMP(timezone=True), nullable=False, comment="Payment Due Date" + ) + payment_amount: Mapped[float] = mapped_column( + Numeric(16, 2), nullable=False, comment="Payment Amount" + ) + currency: Mapped[str] = mapped_column(String(8), server_default="TRY") + + payment_types_id: Mapped[int] = mapped_column( + ForeignKey("api_enum_dropdown.id"), nullable=True + ) + payment_types_uu_id: Mapped[str] = mapped_column( + String, nullable=True, comment="Dues Type UUID" + ) + + period_time: Mapped[str] = mapped_column(String(12)) + process_date_y: Mapped[int] = mapped_column(SmallInteger) + process_date_m: Mapped[int] = mapped_column(SmallInteger) + + build_decision_book_item_id: Mapped[int] = mapped_column( + ForeignKey("build_decision_book_items.id"), + nullable=False, + comment="Build Decision Book Item ID", + ) + build_decision_book_item_uu_id: Mapped[str] = mapped_column( + String, nullable=False, comment="Decision Book Item UUID" + ) + # build_decision_book_id: Mapped[int] = mapped_column( + # ForeignKey("build_decision_book.id"), nullable=True + # ) + # build_decision_book_uu_id: Mapped[str] = mapped_column( + # String, nullable=True, comment="Decision Book UUID" + # ) + build_parts_id: Mapped[int] = mapped_column( + ForeignKey("build_parts.id"), nullable=False + ) + build_parts_uu_id: Mapped[str] = mapped_column( + String, nullable=False, comment="Build Part UUID" + ) + decision_book_project_id: Mapped[int] = mapped_column( + ForeignKey("build_decision_book_projects.id"), + nullable=True, + comment="Decision Book Project ID", + ) + decision_book_project_uu_id: Mapped[str] = mapped_column( + String, nullable=True, comment="Decision Book Project UUID" + ) + account_records_id: Mapped[int] = mapped_column( + ForeignKey("account_records.id"), nullable=True + ) + account_records_uu_id: Mapped[str] = mapped_column( + String, nullable=True, comment="Account Record UU ID" + ) + + # budget_records_id: Mapped[int] = mapped_column(ForeignKey("account_records.id"), nullable=True) + # budget_records_uu_id: Mapped[str] = mapped_column( + # String, nullable=True, comment="Budget UUID" + # ) + # accounting_id: Mapped[int] = mapped_column(ForeignKey("account_detail.id"), nullable=True) + # accounting_uu_id: Mapped[str] = mapped_column( + # String, nullable=True, comment="Accounting UUID" + # ) + # receive_debit_id: Mapped[int] = mapped_column(ForeignKey("api_enum_dropdown.id"), nullable=True) + # receive_debit_uu_id: Mapped[str] = mapped_column(String, nullable=True, comment="Debit UUID") + + # accounting: Mapped["AccountDetail"] = relationship( + # "AccountDetail", + # back_populates="decision_book_payment_detail", + # foreign_keys=[accounting_id], + # ) + # + # decision_book_master: Mapped["BuildDecisionBookPaymentsMaster"] = relationship( + # "BuildDecisionBookPaymentsMaster", + # back_populates="decision_book_payment_detail", + # foreign_keys=[build_decision_book_payments_master_id], + # ) + # budget_records: Mapped["CompanyBudgetRecords"] = relationship( + # "CompanyBudgetRecords", + # back_populates="decision_book_payment_detail", + # foreign_keys=[budget_records_id], + # ) + + __table_args__ = ( + Index( + "build_decision_book_payments_detail_ndx_00", + build_decision_book_item_id, + build_parts_id, + payment_plan_time_periods, + process_date, + payment_types_id, + account_records_id, + unique=True, + ), + Index("build_decision_book_payments_detail_ndx_01", account_records_id), + {"comment": "Payment Details of Decision Book Payments"}, + ) + + +class BuildDecisionBookLegal(CrudCollection): + """ + Builds class based on declarative_base and BaseMixin via session + lawsuits_type C:Court Mehkeme M: mediator arabulucu + """ + + __tablename__ = "build_decision_book_legal" + __exclude__fields__ = [] + + period_start_date: Mapped[TIMESTAMP] = mapped_column( + TIMESTAMP(timezone=True), nullable=False, comment="Start Date of Legal Period" + ) + lawsuits_decision_number: Mapped[str] = mapped_column( + String, nullable=False, comment="Lawsuits Decision Number" + ) + lawsuits_decision_date: Mapped[TIMESTAMP] = mapped_column( + TIMESTAMP(timezone=True), nullable=False, comment="Lawsuits Decision Date" + ) + + period_stop_date: Mapped[TIMESTAMP] = mapped_column( + TIMESTAMP(timezone=True), server_default="2099-12-31 23:59:59" + ) + decision_book_pdf_path: Mapped[str] = mapped_column( + String, server_default="", nullable=True + ) + resp_company_total_wage: Mapped[float] = mapped_column( + Numeric(10, 2), server_default="0", nullable=True + ) + contact_agreement_path: Mapped[str] = mapped_column( + String, server_default="", nullable=True + ) + contact_agreement_date: Mapped[TIMESTAMP] = mapped_column( + TIMESTAMP(timezone=True), server_default="1900-01-01 00:00:00", nullable=True + ) + meeting_date: Mapped[TIMESTAMP] = mapped_column( + TIMESTAMP(timezone=True), server_default="1900-01-01 00:00:00" + ) + lawsuits_type: Mapped[str] = mapped_column(String(1), server_default="C") + lawsuits_name: Mapped[str] = mapped_column(String(128)) + lawsuits_note: Mapped[str] = mapped_column(String(512)) + lawyer_cost: Mapped[float] = mapped_column(Numeric(20, 2)) + mediator_lawyer_cost: Mapped[float] = mapped_column(Numeric(20, 2)) + other_cost: Mapped[float] = mapped_column(Numeric(20, 2)) + legal_cost: Mapped[float] = mapped_column(Numeric(20, 2)) + approved_cost: Mapped[float] = mapped_column(Numeric(20, 2)) + total_price: Mapped[float] = mapped_column(Numeric(20, 2)) + + build_db_item_id: Mapped[int] = mapped_column( + ForeignKey("build_decision_book_items.id"), nullable=False + ) + build_db_item_uu_id: Mapped[str] = mapped_column( + String, nullable=True, comment="Decision Book Item UUID" + ) + resp_attorney_id: Mapped[int] = mapped_column( + ForeignKey("people.id"), nullable=False + ) + resp_attorney_uu_id: Mapped[str] = mapped_column( + String, nullable=True, comment="Attorney UUID" + ) + resp_attorney_company_id: Mapped[int] = mapped_column(ForeignKey("companies.id")) + resp_attorney_company_uu_id: Mapped[str] = mapped_column( + String, nullable=True, comment="Company UUID" + ) + mediator_lawyer_person_id: Mapped[int] = mapped_column(ForeignKey("people.id")) + mediator_lawyer_person_uu_id: Mapped[str] = mapped_column( + String, nullable=True, comment="Mediator Lawyer UUID" + ) + + __table_args__ = ( + Index("_build_decision_book_legal_ndx_00", meeting_date), + { + "comment": "Legal items related to decision book items recoreded at building meetings" + }, + ) + + +class BuildDecisionBookProjects(CrudCollection): + """ + Builds class based on declarative_base and BaseMixin via session + project_type = C:Court Mehkeme M: mediator arabulucu + """ + + __tablename__ = "build_decision_book_projects" + __exclude__fields__ = [] + + project_no: Mapped[str] = mapped_column( + String(12), nullable=True, comment="Project Number of Decision Book" + ) + project_name: Mapped[str] = mapped_column( + String, nullable=False, comment="Project Name" + ) + project_start_date: Mapped[TIMESTAMP] = mapped_column( + TIMESTAMP(timezone=True), nullable=False, comment="Project Start Date" + ) + project_stop_date: Mapped[TIMESTAMP] = mapped_column( + TIMESTAMP(timezone=True), server_default="2099-12-31 23:59:59" + ) + project_type: Mapped[str] = mapped_column(String, server_default="C") + project_note: Mapped[str] = mapped_column(Text) + + decision_book_pdf_path: Mapped[str] = mapped_column( + String, server_default="", nullable=True + ) + is_completed: Mapped[bool] = mapped_column( + Boolean, server_default="0", comment="Project is Completed" + ) + status_code: Mapped[int] = mapped_column(SmallInteger, nullable=True) + resp_company_fix_wage: Mapped[float] = mapped_column( + Numeric(10, 2), server_default="0" + ) + is_out_sourced: Mapped[bool] = mapped_column(Boolean, server_default="0") + + meeting_date: Mapped[TIMESTAMP] = mapped_column( + TIMESTAMP(timezone=True), server_default="1900-01-01 00:00:00", index=True + ) + currency: Mapped[str] = mapped_column(String(8), server_default="TRY") + bid_price: Mapped[float] = mapped_column(Numeric(16, 4), server_default="0") + approved_price: Mapped[float] = mapped_column(Numeric(16, 4), server_default="0") + final_price: Mapped[float] = mapped_column(Numeric(16, 4), server_default="0") + + contact_id: Mapped[int] = mapped_column( + ForeignKey("contracts.id"), nullable=True, comment="Contract id" + ) + contact_uu_id: Mapped[str] = mapped_column( + String, nullable=True, comment="Contract UUID" + ) + build_decision_book_id: Mapped[int] = mapped_column( + ForeignKey("build_decision_book.id"), nullable=False + ) + build_decision_book_uu_id: Mapped[str] = mapped_column( + String, nullable=True, comment="Decision Book UUID" + ) + build_decision_book_item_id: Mapped[int] = mapped_column( + ForeignKey("build_decision_book_items.id"), nullable=False + ) + build_decision_book_item_uu_id: Mapped[str] = mapped_column( + String, nullable=True, comment="Decision Book Item UUID" + ) + project_response_living_space_id: Mapped[int] = mapped_column( + ForeignKey("build_living_space.id"), + nullable=True, + comment="Project Response Person ID", + ) + project_response_living_space_uu_id: Mapped[str] = mapped_column( + String, nullable=True, comment="Project Response Person UUID" + ) + resp_company_id: Mapped[int] = mapped_column( + ForeignKey("companies.id"), nullable=True + ) + resp_company_uu_id: Mapped[str] = mapped_column( + String, nullable=True, comment="Company UUID" + ) + + build_decision_book_item: Mapped["BuildDecisionBookItems"] = relationship( + "BuildDecisionBookItems", + back_populates="decision_book_project", + foreign_keys=[build_decision_book_item_id], + ) + # + # @classmethod + # def select_action(cls, duty_id, token=None): + # from Schemas import ( + # Build, + # Companies, + # ) + # + # related_companies = Companies.select_action(duty_id_list=[duty_id]) + # related_companies_ids = list( + # related_.id for related_ in related_companies.all() + # ) + # related_building = Build.filter_all(Build.company_id.in_(related_companies_ids)) + # related_building_ids = list(related_.id for related_ in related_building.data) + # related_decision_books = BuildDecisionBook.filter_all( + # BuildDecisionBook.build_id.in_(related_building_ids), + # ).data + # related_decision_books_ids = list( + # related_.id for related_ in related_decision_books + # ) + # related_decision_books_items = BuildDecisionBookItems.filter_all( + # BuildDecisionBookItems.build_decision_book_id.in_( + # related_decision_books_ids + # ), + # ).data + # related_decision_books_items_ids = list( + # related_.id for related_ in related_decision_books_items + # ) + # return cls.filter_all( + # cls.build_decision_book_item_id.in_(related_decision_books_items_ids), + # ).query + # + # @classmethod + # def create_action(cls, data: InsertBuildDecisionBookProjects, token=None): + # from Schemas import ( + # People, + # Companies, + # ) + # + # data_dict = data.dump() + # BuildDecisionBookItems.pre_query = BuildDecisionBookItems.select_action( + # duty_id=token.duty_list["duty_id"] + # ) + # People.pre_query = People.select_action( + # duty_id_list=[token.duty_list["duty_id"]] + # ) + # decision_book_project_item = BuildDecisionBookItems.find_one_or_abort( + # uu_id=data_dict.get("build_decision_book_item_uu_id") + # ) + # project_response_person = People.find_one_or_abort( + # uu_id=data_dict.get("project_response_person_uu_id") + # ) + # data_dict["build_decision_book_item_id"] = decision_book_project_item.id + # data_dict["project_response_person_id"] = project_response_person.id + # if data.resp_company_uu_id: + # resp_company = Companies.find_one(uu_id=data.resp_company_uu_id) + # data_dict["resp_company_id"] = resp_company.id + # del ( + # data_dict["build_decision_book_item_uu_id"], + # data_dict["project_response_person_uu_id"], + # ) + # del data_dict["resp_company_uu_id"] + # data_dict["is_confirmed"] = True + # return cls.find_or_create(**data_dict) + # + # __table_args__ = ( + # Index( + # "_build_decision_book_project_ndx_00", + # project_no, + # project_start_date, + # unique=True, + # ), + # { + # "comment": "Project related to decision taken at building meetings on book items" + # }, + # ) + # + # @property + # def get_project_year(self): + # return self.decision_book_items.decision_books.period_start_date.year + # + # @property + # def get_project_no(self): + # return f"{self.get_project_year}-{str(self.id)[-4:].zfill(4)}" + + +class BuildDecisionBookProjectPerson(CrudCollection): + """ + Builds class based on declarative_base and BaseMixin via session + """ + + __tablename__ = "build_decision_book_project_person" + __exclude__fields__ = [] + # __enum_list__ = [("management_typecode", "ProjectTeamTypes", "PTT-EMP")] + + dues_percent_discount: Mapped[int] = mapped_column(SmallInteger, server_default="0") + job_fix_wage: Mapped[float] = mapped_column(Numeric(10, 2), server_default="0") + bid_price: Mapped[float] = mapped_column(Numeric(10, 2), server_default="0") + decision_price: Mapped[float] = mapped_column(Numeric(10, 2), server_default="0") + + build_decision_book_project_id: Mapped[int] = mapped_column( + ForeignKey("build_decision_book_projects.id"), nullable=False + ) + build_decision_book_project_uu_id: Mapped[str] = mapped_column( + String, nullable=True, comment="Decision Book Project UUID" + ) + living_space_id: Mapped[int] = mapped_column( + ForeignKey("build_living_space.id"), nullable=False + ) + living_space_uu_id: Mapped[str] = mapped_column( + String, nullable=True, comment="Living Space UUID" + ) + + __table_args__ = ( + {"comment": "People that are attended to building project meetings."}, + ) + + +class BuildDecisionBookProjectItems(CrudCollection): + """ + Builds class based on declarative_base and BaseMixin via session + """ + + __tablename__ = "build_decision_book_project_items" + __exclude__fields__ = [] + + item_header: Mapped[str] = mapped_column( + String, nullable=False, comment="Item Header" + ) + item_comment: Mapped[str] = mapped_column( + Text, nullable=False, comment="Item Comment" + ) + attachment_pdf_path: Mapped[str] = mapped_column( + String, server_default="", nullable=True, comment="Attachment PDF Path" + ) + item_estimated_cost: Mapped[float] = mapped_column( + Numeric(16, 2), server_default="0", comment="Estimated Cost" + ) + item_short_comment: Mapped[str] = mapped_column( + String(24), + nullable=True, + comment="This field is reserved for use in grouping data or in the pivot heading.", + ) + + build_decision_book_project_id: Mapped[int] = mapped_column( + ForeignKey("build_decision_book_projects.id"), nullable=False + ) + build_decision_book_project_uu_id: Mapped[str] = mapped_column( + String, nullable=True, comment="Decision Book Project UUID" + ) + + __table_args__ = ( + {"comment": "Project Items related to decision taken at building meetings"}, + ) + + +# +# class BuildDecisionBookPaymentsMaster(CrudCollection): +# """ +# Builds class based on declarative_base and BaseMixin via session +# """ +# +# __tablename__ = "build_decision_book_payments_master" +# __exclude__fields__ = [] +# __enum_list__ = [("dues_types", "BuildDuesTypes", "D")] +# +# payment_plan_time_periods = mapped_column( +# String(8), nullable=False, comment="Payment Plan Time Periods" +# ) +# default_payment_amount = mapped_column( +# Numeric(20, 2), nullable=False, comment="Default Payment Amount" +# ) +# +# dues_types_id: Mapped[int] = mapped_column(ForeignKey("api_enum_dropdown.id"), nullable=True) +# dues_types_uu_id = mapped_column(String, nullable=True, comment="Dues Type UUID") +# build_decision_book_item_debits_id = mapped_column( +# ForeignKey("build_decision_book_item_debits.id"), nullable=False +# ) +# build_decision_book_item_debits_uu_id = mapped_column( +# String, nullable=True, comment="Decision Book Item Debit UUID" +# ) +# build_parts_id: Mapped[int] = mapped_column(ForeignKey("build_parts.id"), nullable=False) +# build_parts_uu_id = mapped_column(String, nullable=True, comment="Build Part UUID") +# +# # decision_books_item_debits: Mapped["BuildDecisionBookItemDebits"] = relationship( +# # "BuildDecisionBookItemDebits", +# # back_populates="decision_book_payment_masters", +# # foreign_keys=[build_decision_book_item_debits_id], +# # ) +# # parts: Mapped["BuildParts"] = relationship( +# # "BuildParts", +# # back_populates="decision_book_payment_master", +# # foreign_keys=[build_parts_id], +# # ) +# # decision_book_payment_detail: Mapped[List["BuildDecisionBookPaymentsDetail"]] = ( +# # relationship( +# # "BuildDecisionBookPaymentsDetail", +# # back_populates="decision_book_master", +# # foreign_keys="BuildDecisionBookPaymentsDetail.build_decision_book_payments_master_id", +# # ) +# # ) +# +# __table_args__ = ( +# Index( +# "_build_decision_book_payments_master_ndx_00", +# build_decision_book_item_debits_id, +# build_parts_id, +# dues_types_id, +# unique=True, +# ), +# { +# "comment": "Master Payment Items related to decision taken at building meetings" +# }, +# ) +# +# # @classmethod +# # def pay_dues_of_build_part( +# # cls, +# # budget_records_id, +# # build_decision_book_id, +# # build_parts_id, +# # start_date, +# # paid_value, +# # is_all=False, +# # is_limited=False, +# # ): +# # +# # book_payment_master = cls.find_one( +# # build_decision_book_id=build_decision_book_id, +# # build_parts_id=build_parts_id, +# # dues_types=BuildDuesTypes.D.name, +# # ) +# # paid_amount = 0 +# # if book_payment_master: +# # month_start_date = ( +# # find_first_day_of_month(start_date) +# # if not is_all +# # else datetime(1900, 1, 1) +# # ) +# # last_date = ( +# # find_last_day_of_month(start_date) if not is_limited else start_date +# # ) +# # payment_dues, count = BuildDecisionBookPaymentsDetail.filter( +# # and_( +# # BuildDecisionBookPaymentsDetail.build_decision_book_payments_master_id +# # == book_payment_master.id, +# # BuildDecisionBookPaymentsDetail.process_date >= month_start_date, +# # BuildDecisionBookPaymentsDetail.process_date <= last_date, +# # ) +# # ) +# # period_amount = {} +# # for payment_due in payment_dues: +# # if payment_due.period_time not in period_amount: +# # period_amount[payment_due.period_time] = 0 +# # period_amount[payment_due.period_time] += float( +# # payment_due.payment_amount +# # ) +# # paid_amount += payment_due.payment_amount +# # print( +# # "period_amount", +# # period_amount, +# # "paid_amount", +# # paid_amount, +# # "paid_value", +# # paid_value, +# # ) +# # if paid_amount > 0: +# # return float(paid_value) +# # period_amounts = sorted( +# # period_amount.items(), key=lambda x: x[0], reverse=False +# # ) +# # for period_amount in period_amounts: +# # if period_amount[1] >= 0: +# # continue +# # if not paid_value > 0: +# # break +# # if budget_record := CompanyBudgetRecords.find_one(id=budget_records_id): +# # debit_to_pay = abs(float(period_amount[1])) +# # debit_to_pay = ( +# # paid_value if debit_to_pay > paid_value else debit_to_pay +# # ) +# # budget_record.remainder_balance = float(debit_to_pay) + float( +# # budget_record.remainder_balance +# # ) +# # budget_record.save() +# # BuildDecisionBookPaymentsDetail.find_or_create( +# # build_decision_book_payments_master_id=book_payment_master.id, +# # budget_records_id=budget_records_id, +# # process_date=str(start_date), +# # receive_debit=DebitTypes.R.name, +# # period_time=str(period_amount[0]), +# # process_date_y=str(period_amount[0]).split("-")[0], +# # process_date_m=str(period_amount[0]).split("-")[1], +# # payment_amount=abs(debit_to_pay), +# # ) +# # paid_value = float(paid_value) - float(debit_to_pay) +# # return float(paid_value) +# # + +# +# class BuildDecisionBookItemDebits(CrudCollection): +# """ +# Builds class based on declarative_base and BaseMixin via session +# dues_values = due_key, due_value +# """ +# +# __tablename__ = "build_decision_book_item_debits" +# __exclude__fields__ = [] +# __enum_list__ = [("dues_types", "BuildDuesTypes", "D")] +# +# dues_types_id: Mapped[int] = mapped_column(ForeignKey("api_enum_dropdown.id"), nullable=True) +# dues_types_uu_id = mapped_column(String, nullable=True, comment="Dues Type UUID") +# # dues_values = mapped_column( +# # MutableDict.as_mutable(JSONB()), +# # nullable=False, +# # comment="Due Part Key Description of inner parts", +# # ) +# flat_type = mapped_column( +# String, nullable=True, comment="Flat Type of Building Part" +# ) +# flat_payment = mapped_column( +# Numeric(20, 2), nullable=True, comment="Flat Payment Amount" +# ) +# decision_taken: Mapped[bool] = mapped_column(Boolean, server_default="0") +# +# build_decision_book_item_id = mapped_column( +# ForeignKey("build_decision_book_items.id"), nullable=False +# ) +# build_decision_book_item_uu_id = mapped_column( +# String, nullable=True, comment="Decision Book Item UUID" +# ) +# +# @classmethod +# def select_action(cls, duty_id, token=None): +# from database_sql_models import Companies +# +# related_companies = Companies.select_action(duty_id=duty_id) +# related_companies_ids = list( +# related_.id for related_ in related_companies.all() +# ) +# related_building = Build.query.filter( +# Build.company_id.in_(related_companies_ids) +# ) +# related_building_ids = list(related_.id for related_ in related_building.all()) +# related_decision_books = BuildDecisionBook.query.filter( +# BuildDecisionBook.build_id.in_(related_building_ids) +# ) +# related_decision_books_ids = list( +# related_.id for related_ in related_decision_books.all() +# ) +# related_decision_books_items = BuildDecisionBookItems.query.filter( +# BuildDecisionBookItems.build_decision_book_id.in_( +# related_decision_books_ids +# ) +# ) +# related_decision_books_items_ids = list( +# related_.id for related_ in related_decision_books_items.all() +# ) +# return cls.query.filter( +# cls.build_decision_book_item_id.in_(related_decision_books_items_ids) +# ) +# +# @classmethod +# def create_action(cls, data: InsertBuildDecisionBookItemDebits, token): +# from database_sql_models import ApiEnumDropdown +# from application.shared_functions import find_last_day_of_month +# +# data_dict = data.dump() +# BuildDecisionBookItems.pre_query = BuildDecisionBookItems.select_action( +# duty_id=token.duty_list["duty_id"] +# ) +# cls.pre_query = cls.select_action(duty_id=token.duty_list["duty_id"]) +# if decision_book_item := BuildDecisionBookItems.find_one_or_abort( +# uu_id=data.build_decision_book_item_uu_id +# ): +# data_dict["build_decision_book_item_id"] = decision_book_item.id +# dues_values, payment_master_list = data_dict["dues_values"], [] +# data_dict["is_confirmed"] = True +# del data_dict["build_decision_book_item_uu_id"] +# item_debits = cls.find_or_create(**data_dict) +# debit_dropdown = ApiEnumDropdown.find_one( +# enum_class="DebitTypes", value="Debit" +# ) +# for dues_key, dues_value in dues_values.items(): +# building_parts = decision_book_item.decision_books.buildings.parts +# decision_book = decision_book_item.decision_books +# for building_part in building_parts: +# detail_list = [] +# if str(building_part.due_part_key) == str(dues_key): +# book_master = BuildDecisionBookPaymentsMaster.create( +# build_decision_book_item_debits_id=item_debits.id, +# build_parts_id=building_part.id, +# dues_types=debit_dropdown.uu_id, +# payment_plan_time_periods="M", +# default_payment_amount=dues_value, +# is_confirmed=True, +# ) +# if book_master: +# start_date = decision_book.expiry_starts +# while start_date <= decision_book.expiry_ends: +# start_date = find_last_day_of_month(start_date) +# data_detail = BuildDecisionBookPaymentsDetail.find_or_create( +# build_decision_book_payments_master_id=book_master.id, +# budget_records_id=None, +# process_date=start_date, +# receive_debit=debit_dropdown.uu_id, +# period_time=start_date.strftime("%Y-%m"), +# process_date_y=start_date.year, +# process_date_m=start_date.month, +# accounting_id=None, +# payment_amount=float(dues_value) * -1, +# is_confirmed=True, +# ) +# start_date = start_date + timedelta(days=2) +# detail_list.append(data_detail.get_dict()) +# payment_master_list.append( +# {**book_master.get_dict(), "detail_list": detail_list} +# ) +# return_dict = { +# **item_debits.get_dict(), +# "debit_lists": payment_master_list, +# } +# return return_dict +# +# __table_args__ = ( +# { +# "comment": "Debits of Decision Book Items that are related to decision taken at building meetings" +# }, +# ) + + +# +# class BuildDecisionBookBudget(CrudCollection): +# """ +# Builds class based on declarative_base and BaseMixin via session +# """ +# +# __tablename__ = "build_decision_book_budget" +# +# item_order = mapped_column(SmallInteger, nullable=False, comment="Order Number") +# budget_type = mapped_column(String, nullable=False, comment="Budget Type") +# plan_value: Mapped[float] = mapped_column(Numeric(10, 2), nullable=False, comment="Plan Value") +# +# line_comment = mapped_column(String(32), server_default="") +# process_date_y: Mapped[int] = mapped_column(SmallInteger) +# process_date_m: Mapped[int] = mapped_column(SmallInteger) +# process_date_w: Mapped[int] = mapped_column(SmallInteger) +# period_time = mapped_column(String(12), server_default="") +# +# build_decision_book_id: Mapped[int] = mapped_column(ForeignKey("build_decision_book.id")) +# accounting_id = mapped_column(ForeignKey("account_detail.id")) +# +# __table_args__ = ( +# Index("_build_decision_book_budget_ndx_01", accounting_id), +# {"comment": "Budget Items related to decision taken at building meetings"}, +# ) +# +# +# class BuildDecisionBookBudgetItem(CrudCollection): +# """ +# Builds class based on declarative_base and BaseMixin via session +# """ +# +# __tablename__ = "build_decision_book_budget_item" +# __exclude__fields__ = [] +# +# paid_date = mapped_column(TIMESTAMP, nullable=False, comment="Payment Due Date") +# period_time = mapped_column(String(12), server_default="") +# paid_value: Mapped[float] = mapped_column(Numeric(10, 2), server_default="0") +# +# build_decision_book_budget_id = mapped_column( +# ForeignKey("build_decision_book_budget.id"), nullable=False +# ) +# +# __table_args__ = ( +# Index( +# "_build_decision_book_budget_item_ndx_01", +# build_decision_book_budget_id, +# paid_date, +# ), +# ) +# + + +# buildings: Mapped["Build"] = relationship( +# "Build", back_populates="decision_books", foreign_keys=[build_id] +# ) +# companies: Mapped[List["Company"]] = relationship( +# "Company", back_populates="decision_books", foreign_keys=[resp_company_id] +# ) +# budget_records: Mapped[List["CompanyBudgetRecords"]] = relationship( +# "CompanyBudgetRecords", +# back_populates="decision_books", +# foreign_keys="CompanyBudgetRecords.build_decision_book_id", +# ) +# decision_book_items: Mapped[List["BuildDecisionBookItems"]] = relationship( +# "BuildDecisionBookItems", +# back_populates="decision_books", +# foreign_keys="BuildDecisionBookItems.build_decision_book_id", +# ) +# +# decision_book_management: Mapped["BuildDecisionBookManagement"] = relationship( +# "BuildDecisionBookManagement", +# back_populates="decision_book", +# foreign_keys="BuildDecisionBookManagement.build_decision_book_id", +# ) +# +# decision_book_people: Mapped[List["BuildDecisionBookPerson"]] = relationship( +# "BuildDecisionBookPerson", +# back_populates="decision_books", +# foreign_keys="BuildDecisionBookPerson.build_decision_book_id", +# ) +# +# # decision_book_projects: Mapped[List["DecisionBookProjects"]] = relationship( +# # "DecisionBookProjects", +# # back_populates="decision_books", +# # foreign_keys="DecisionBookProjects.build_decision_book_id", +# # ) +# # decision_book_project_people: Mapped[List["BuildDecisionBookProjectPerson"]] = ( +# # relationship( +# # "BuildDecisionBookProjectPerson", +# # back_populates="decision_books", +# # foreign_keys="BuildDecisionBookProjectPerson.build_decision_book_id", +# # ) +# # ) +# decision_book_legal_people: Mapped["BuildDecisionBookProjectsLegal"] = relationship( +# "BuildDecisionBookProjectsLegal", +# back_populates="decision_books", +# foreign_keys="BuildDecisionBookProjectsLegal.build_decision_book_id", +# ) +# +# decision_book_budget: Mapped["BuildDecisionBookBudget"] = relationship( +# "BuildDecisionBookBudget", +# back_populates="decision_book", +# foreign_keys="BuildDecisionBookBudget.build_decision_book_id", +# ) + +# decision_book_items: Mapped[List["BuildDecisionBookItems"]] = relationship( +# "BuildDecisionBookItems", +# back_populates="decision_book_item_debits", +# foreign_keys=[build_decision_book_item_id], +# ) +# decision_book_payment_masters: Mapped[List["BuildDecisionBookPaymentsMaster"]] = relationship( +# "BuildDecisionBookPaymentsMaster", +# back_populates="decision_books_item_debits", +# foreign_keys="BuildDecisionBookPaymentsMaster.build_decision_book_item_debits_id", +# ) +# +# decision_books: Mapped["BuildDecisionBook"] = relationship( +# "BuildDecisionBook", +# back_populates="decision_book_items", +# foreign_keys=[build_decision_book_id], +# ) +# decision_book_item_debits: Mapped[List["BuildDecisionBookItemDebits"]] = ( +# relationship( +# "BuildDecisionBookItemDebits", +# back_populates="decision_book_items", +# foreign_keys="BuildDecisionBookItemDebits.build_decision_book_item_id", +# ) +# ) +# decision_book_projects: Mapped["DecisionBookProjects"] = relationship( +# "DecisionBookProjects", +# back_populates="decision_book_items", +# foreign_keys="DecisionBookProjects.build_decision_book_item_id", +# ) +# decision_book_legal: Mapped["BuildDecisionBookLegal"] = relationship( +# "BuildDecisionBookLegal", +# back_populates="decision_books_items", +# foreign_keys="BuildDecisionBookLegal.build_db_item_id", +# ) +# +# build_decision_book_item_unapproved: Mapped[ +# List["BuildDecisionBookItemsUnapproved"] +# ] = relationship( +# "BuildDecisionBookItemsUnapproved", +# back_populates="decision_book_items", +# foreign_keys="BuildDecisionBookItemsUnapproved.build_decision_book_item", +# ) + +# decision_books_items: Mapped["BuildDecisionBookItems"] = relationship( +# "BuildDecisionBookItems", +# back_populates="decision_book_legal", +# foreign_keys=[build_db_item_id], +# ) +# attorney_companies: Mapped["Companies"] = relationship( +# "Company", +# back_populates="decision_book_legal", +# foreign_keys=[resp_attorney_company], +# ) +# attorney_persons: Mapped["People"] = relationship( +# "People", +# back_populates="attorney_decision_book_legal", +# foreign_keys=[resp_attorney_id], +# ) +# lawyer_persons: Mapped["People"] = relationship( +# "People", +# back_populates="lawyer_decision_book_legal", +# foreign_keys=[mediator_lawyer_person_id], +# ) + +# decision_books: Mapped["BuildDecisionBook"] = relationship( +# "BuildDecisionBook", +# back_populates="decision_book_people", +# foreign_keys=[build_decision_book_id], +# ) +# people: Mapped["People"] = relationship( +# "People", back_populates="decision_book_people", foreign_keys=[person_id] +# ) + +# decision_book_budget: Mapped["BuildDecisionBookBudget"] = relationship( +# "BuildDecisionBookBudget", +# back_populates="decision_book_budget_item", +# foreign_keys=[build_decision_book_budget_id], +# ) + +# accounting: Mapped["AccountDetail"] = relationship( +# "AccountDetail", +# back_populates="decision_book_budget", +# foreign_keys=[accounting_id], +# ) +# decision_book: Mapped["BuildDecisionBook"] = relationship( +# "BuildDecisionBook", +# back_populates="decision_book_budget", +# foreign_keys=[build_decision_book_id], +# ) +# decision_book_budget_item: Mapped["BuildDecisionBookBudgetItem"] = relationship( +# "BuildDecisionBookBudgetItem", +# back_populates="decision_book_budget", +# foreign_keys="BuildDecisionBookBudgetItem.build_decision_book_budget_id", +# ) + +# decision_book_items: Mapped["BuildDecisionBookItems"] = relationship( +# "BuildDecisionBookItems", +# back_populates="build_decision_book_item_unapproved", +# foreign_keys=[build_decision_book_item], +# ) +# +# peoples: Mapped["People"] = relationship( +# "People", +# back_populates="build_decision_book_item_unapproved", +# foreign_keys=[person_id], +# ) +# +# class BuildDecisionBookInvitationsPerson(CrudCollection): +# """ +# Builds class based on declarative_base and BaseMixin via session +# """ +# +# __tablename__ = "build_decision_book_invitations_person" +# __exclude__fields__ = [] +# +# invite_id = mapped_column(ForeignKey("build_decision_book_invitations.id"), nullable=False) +# invite_uu_id = mapped_column(String, nullable=True, comment="Invite UUID") +# person_id = mapped_column(ForeignKey("people.id"), nullable=False) +# person_uu_id = mapped_column(String, nullable=False, comment="Person UUID") +# +# send_date = mapped_column(TIMESTAMP, nullable=False, comment="Confirmation Date") +# is_confirmed: Mapped[bool] = mapped_column(Boolean, server_default="0", comment="Message is Confirmed") +# confirmed_date = mapped_column(TIMESTAMP, nullable=True, comment="Confirmation Date") +# token = mapped_column(String, server_default="", comment="Invitation Token") +# +# __table_args__ = ( +# Index( +# "decision_book_invitations_person_ndx_01", +# invite_id, +# person_id, +# unique=True, +# ), +# {"comment": "People that are invited to building meetings."}, +# ) diff --git a/Schemas/company/company.py b/Schemas/company/company.py new file mode 100644 index 0000000..7601402 --- /dev/null +++ b/Schemas/company/company.py @@ -0,0 +1,575 @@ +from fastapi.exceptions import HTTPException +from sqlalchemy import ( + String, + Integer, + Boolean, + ForeignKey, + Index, + Identity, + TIMESTAMP, + func, +) +from sqlalchemy.orm import mapped_column, relationship, Mapped + +from Commons.select_functions import SelectAction +# from ApiLayers.ApiValidations.Custom.token_objects import EmployeeTokenObject +# from ApiLayers.ApiValidations.Request import ( +# InsertCompany, +# UpdateCompany, +# MatchCompany2Company, +# ) + +from Services.PostgresService.controllers.mixin_controllers import CrudCollection + + +class RelationshipDutyCompany(CrudCollection): + """ + CompanyRelationship class based on declarative_base and CrudCollection via session + Company -> Sub Company -> Sub-Sub Company + + if owner_id == parent_id: can manipulate data of any record + else: Read-Only + duty_id = if relationship_type == base An organization / not operational / no responsible person + + relationship = company_id filter -> Action filter(company_id) relationship_type = Organization + relationship = company_id filter -> Action filter(company_id) relationship_type = Commercial + """ + + __tablename__ = "relationship_duty_company" + __exclude__fields__ = [] + + owner_id: Mapped[int] = mapped_column( + ForeignKey("companies.id"), nullable=False + ) # 1 + duties_id: Mapped[int] = mapped_column( + ForeignKey("duties.id"), nullable=False + ) # duty -> (n)employee Evyos LTD + + member_id: Mapped[int] = mapped_column( + ForeignKey("companies.id"), nullable=False + ) # 2, 3, 4 + parent_id: Mapped[int] = mapped_column( + ForeignKey("companies.id"), nullable=True + ) # None + + relationship_type: Mapped[str] = mapped_column( + String, nullable=True, server_default="Commercial" + ) # Commercial, Organization # Bulk + child_count: Mapped[int] = mapped_column(Integer) # 0 + show_only: Mapped[bool] = mapped_column(Boolean, server_default="0") + + # related_company: Mapped[List["Companies"]] = relationship( + # "Companies", + # back_populates="related_companies", + # foreign_keys=[related_company_id], + # ) + # + # @classmethod + # def match_company_to_company_commercial(cls, data: MatchCompany2Company, token): + # from Schemas import ( + # Duties, + # ) + # + # token_duties_id, token_company_id = token.get("duty_id"), token.get( + # "company_id" + # ) + # list_match_company_id = [] + # send_duties = Duties.filter_one( + # Duties.uu_id == data.duty_uu_id, + # ) + # send_user_duties = Duties.filter_one( + # Duties.duties_id == send_duties.id, + # Duties.company_id == token_duties_id, + # ) + # if not send_user_duties: + # raise Exception( + # "Send Duty is not found in company. Please check duty uuid and try again." + # ) + # + # for company_uu_id in list(data.match_company_uu_id): + # company = Companies.filter_one( + # Companies.uu_id == company_uu_id, + # ) + # bulk_company = RelationshipDutyCompany.filter_one( + # RelationshipDutyCompany.owner_id == token_company_id, + # RelationshipDutyCompany.relationship_type == "Bulk", + # RelationshipDutyCompany.member_id == company.id, + # ) + # if not bulk_company: + # raise Exception( + # f"Bulk Company is not found in company. " + # f"Please check company uuid {bulk_company.uu_id} and try again." + # ) + # list_match_company_id.append(bulk_company) + # + # for match_company_id in list_match_company_id: + # RelationshipDutyCompany.find_or_create( + # owner_id=token_company_id, + # duties_id=send_user_duties.id, + # member_id=match_company_id.id, + # parent_id=match_company_id.parent_id, + # relationship_type="Commercial", + # show_only=False, + # ) + # + # @classmethod + # def match_company_to_company_organization(cls, data: MatchCompany2Company, token): + # from Schemas import ( + # Duties, + # ) + # + # token_duties_id, token_company_id = token.get("duty_id"), token.get( + # "company_id" + # ) + # list_match_company_id = [] + # send_duties = Duties.filter_one( + # Duties.uu_id == data.duty_uu_id, + # ) + # send_user_duties = Duties.filter_one( + # Duties.duties_id == send_duties.id, + # Duties.company_id == token_duties_id, + # ) + # if not send_user_duties: + # raise Exception( + # "Send Duty is not found in company. Please check duty uuid and try again." + # ) + # + # for company_uu_id in list(data.match_company_uu_id): + # company = Companies.filter_one( + # Companies.uu_id == company_uu_id, + # ) + # bulk_company = RelationshipDutyCompany.filter_one( + # RelationshipDutyCompany.owner_id == token_company_id, + # RelationshipDutyCompany.relationship_type == "Bulk", + # RelationshipDutyCompany.member_id == company.id, + # ) + # if not bulk_company: + # raise Exception( + # f"Bulk Company is not found in company. " + # f"Please check company uuid {bulk_company.uu_id} and try again." + # ) + # list_match_company_id.append(bulk_company) + # + # for match_company_id in list_match_company_id: + # Duties.init_a_company_default_duties( + # company_id=match_company_id.id, + # company_uu_id=str(match_company_id.uu_id), + # ) + # RelationshipDutyCompany.find_or_create( + # owner_id=token_company_id, + # duties_id=send_user_duties.id, + # member_id=match_company_id.id, + # parent_id=match_company_id.parent_id, + # relationship_type="Organization", + # show_only=False, + # ) + # + # __table_args__ = ( + # Index( + # "_company_relationship_ndx_01", + # duties_id, + # owner_id, + # member_id, + # relationship_type, + # unique=True, + # ), + # {"comment": "Company Relationship Information"}, + # ) + + +class Companies(CrudCollection, SelectAction): + """ + Company class based on declarative_base and CrudCollection via session + formal_name = Government register name by offical + public_name = Public registered name by User + nick_name = Search by nickname, commercial_type = Tüzel veya birey + """ + + __tablename__ = "companies" + + __exclude__fields__ = ["is_blacklist", "is_commercial"] + __access_by__ = [] + __many__table__ = RelationshipDutyCompany + # __explain__ = AbstractCompany() + + formal_name: Mapped[str] = mapped_column( + String, nullable=False, comment="Formal Name" + ) + company_type: Mapped[str] = mapped_column( + String, nullable=False, comment="Company Type" + ) + commercial_type: Mapped[str] = mapped_column( + String, nullable=False, comment="Commercial Type" + ) + tax_no: Mapped[str] = mapped_column( + String, index=True, unique=True, nullable=False, comment="Tax No" + ) + + public_name: Mapped[str] = mapped_column(String, comment="Public Name of a company") + company_tag: Mapped[str] = mapped_column(String, comment="Company Tag") + default_lang_type: Mapped[str] = mapped_column(String, server_default="TR") + default_money_type: Mapped[str] = mapped_column(String, server_default="TL") + is_commercial: Mapped[bool] = mapped_column(Boolean, server_default="False") + is_blacklist: Mapped[bool] = mapped_column(Boolean, server_default="False") + parent_id = mapped_column(Integer, nullable=True) + workplace_no: Mapped[str] = mapped_column(String, nullable=True) + + official_address_id: Mapped[int] = mapped_column( + ForeignKey("addresses.id"), nullable=True + ) + official_address_uu_id: Mapped[str] = mapped_column( + String, nullable=True, comment="Official Address UUID" + ) + top_responsible_company_id: Mapped[int] = mapped_column( + ForeignKey("companies.id"), nullable=True + ) + top_responsible_company_uu_id: Mapped[str] = mapped_column( + String, nullable=True, comment="Top Responsible Company UUID" + ) + + # buildings: Mapped[List["Build"]] = relationship( + # "Build", + # back_populates="companies", + # foreign_keys="Build.company_id", + # ) + + __table_args__ = ( + Index("_company_ndx_01", tax_no, unique=True), + Index("_company_ndx_02", formal_name, public_name), + {"comment": "Company Information"}, + ) + # + # @classmethod + # def create_action(cls, data: InsertCompany, token: EmployeeTokenObject): + # from Schemas import Addresses, Duties + # + # data_dict = data.model_dump() + # if cls.filter_one(cls.tax_no == str(data.tax_no).strip(), system=True).data: + # raise HTTPException( + # status_code=400, + # detail="Company already exists. Please ask supervisor to make company visible for your duty.", + # ) + # + # official_address = Addresses.filter_one( + # Addresses.uu_id == data.official_address_uu_id, + # ).data + # # if not official_address: + # # raise HTTPException( + # # status_code=400, + # # detail="Official address is not found. Please check address uuid and try again.", + # # ) + # + # bulk_duties = Duties.get_bulk_duties_of_a_company( + # company_id=token.selected_company.company_id + # ) + # + # if official_address: + # data_dict["official_address_id"] = official_address.id + # data_dict["official_address_uu_id"] = str(official_address.uu_id) + # + # data_dict["parent_id"] = token.selected_company.company_id + # data_dict["top_responsible_company_id"] = token.selected_company.company_id + # data_dict["top_responsible_company_uu_id"] = ( + # token.selected_company.company_uu_id + # ) + # company_created = cls.find_or_create(**data_dict) + # company_created.save_and_confirm() + # company_relationship_created = RelationshipDutyCompany.find_or_create( + # owner_id=token.selected_company.company_id, + # duties_id=bulk_duties.id, + # member_id=company_created.id, + # parent_id=company_created.parent_id, + # child_count=0, + # relationship_type="Bulk", + # show_only=False, + # ) + # company_relationship_created.save_and_confirm() + # return company_created + # + # @classmethod + # def update_action(cls, data: UpdateCompany, token): + # from Schemas import ( + # Addresses, + # ) + # + # data_dict = data.excluded_dump() + # duty_id = token.get("duty_id") + # company_id = token.get("company_id") + # if data.official_address_uu_id: + # official_address = Addresses.filter_one( + # Addresses.uu_id == data.official_address_uu_id, + # *Addresses.valid_record_args(Addresses), + # ).data + # data_dict["official_address_id"] = official_address.id + # del data_dict["official_address_uu_id"], data_dict["company_uu_id"] + # company_to_update = cls.select_action( + # duty_id_list=[duty_id], + # filter_expr=[ + # cls.uu_id == data.company_uu_id, + # RelationshipDutyCompany.parent_id == company_id, + # ], + # ) + # return company_to_update.update(**data_dict) + + # parent_id = mapped_column(ForeignKey("companies.id")) + # if data.parent_uu_id: + # company = Companies.find_one(uu_id=data.parent_uu_id) + # data_dict["parent_id"] = company.id + # def is_access_valid(self, endpoint_ext: str): + # try: + # if ( + # not arrow.get(self.stop_date) + # > arrow.utcnow() + # > arrow.get(self.start_date) + # ): + # message = f"Kullanıcı yetkileri süresi dolmuştur. {self.endpoint_name} için supervisor ile görüşünüz." + # SystemLogs.create_log( + # log_type="ERROR", + # log_code="ACCESS_EXPIRED", + # log_action=self.__tablename__, + # log_message=message, + # ) + # return False + # except Exception as e: + # SystemLogs.create_log( + # log_type="ERROR", + # log_code="ACCESS_EXPIRED", + # log_action=self.__tablename__, + # log_message=e, + # ) + # return False + # + # access_dict = { + # "LIST": self.access_read, + # "INSERT": self.access_write, + # "UPDATE": self.access_update, + # "DELETE": self.access_delete, + # "ACTIVE": self.access_update, + # "PRINT": self.report_print, + # "EXPORT": self.report_export, + # } + # return access_dict.get(endpoint_ext.upper(), False) + + # official_address: Mapped[List["Address"]] = relationship( + # "Address", + # back_populates="official_companies", + # foreign_keys=[official_address_id], + # ) + # + # emails: Mapped[List["UsersEmails"]] = relationship( + # "UsersEmails", back_populates="companies", foreign_keys="UsersEmails.company_id" + # ) + # phones: Mapped[List["UsersPhones"]] = relationship( + # "UsersPhones", back_populates="company", foreign_keys="UsersPhones.company_id" + # ) + # buildings: Mapped[List["Build"]] = relationship( + # "Build", + # back_populates="companies", + # foreign_keys="Build.company_id", + # ) + # response_buildings: Mapped[List["Build"]] = relationship( + # "Build", + # back_populates="response_companies", + # foreign_keys="Build.response_company_id", + # ) + # departments: Mapped[List["CompanyDepartments"]] = relationship( + # "CompanyDepartments", + # back_populates="company", + # foreign_keys="CompanyDepartments.company_id", + # ) + # budget_records: Mapped[List["CompanyBudgetRecords"]] = relationship( + # "CompanyBudgetRecords", + # back_populates="companies", + # foreign_keys="CompanyBudgetRecords.company_id", + # ) + # send_budget_records: Mapped[List["CompanyBudgetRecords"]] = relationship( + # "CompanyBudgetRecords", + # back_populates="send_companies", + # foreign_keys="CompanyBudgetRecords.send_company_id", + # ) + # decision_books: Mapped[List["BuildDecisionBook"]] = relationship( + # "BuildDecisionBook", + # back_populates="companies", + # foreign_keys="BuildDecisionBook.resp_company_id", + # ) + # decision_book_projects: Mapped[List["BuildDecisionBookProjects"]] = relationship( + # "BuildDecisionBookProjects", + # back_populates="companies", + # foreign_keys="BuildDecisionBookProjects.resp_company_id", + # ) + # decision_book_legal: Mapped["BuildDecisionBookLegal"] = relationship( + # "BuildDecisionBookLegal", + # back_populates="attorney_companies", + # foreign_keys="BuildDecisionBookLegal.resp_attorney_company", + # ) + # + # company_account_books: Mapped["AccountBooks"] = relationship( + # "AccountBooks", + # back_populates="company", + # foreign_keys="AccountBooks.company_id", + # ) + # branch_account_books: Mapped["AccountBooks"] = relationship( + # "AccountBooks", + # back_populates="branch", + # foreign_keys="AccountBooks.branch_id", + # ) + # account_codes: Mapped["AccountCodes"] = relationship( + # "AccountCodes", back_populates="company", foreign_keys="AccountCodes.company_id" + # ) + # search_iban_description: Mapped["BuildIbanDescription"] = relationship( + # "BuildIbanDescription", + # back_populates="company", + # foreign_keys="BuildIbanDescription.company_id", + # ) + # related_companies: Mapped[List["CompanyRelationship"]] = relationship( + # "CompanyRelationship", + # back_populates="related_company", + # foreign_keys="CompanyRelationship.related_company_id", + # ) + + +# +# class AbstractCompany: +# """ +# Abstract and explanation of Company class for end-user guide +# """ +# +# formal_name = Explanation( +# explanation="Devletin resmi kayıtlarında bulunan şirket ünvanıdır.", +# usage="Devletin resmi kayıtlarında bulunan şirket adı istendiğinde kullanılır.", +# alias="Resmi Ünvan", +# example=["X Şirketi LTD", "Y Şirketi A.Ş."], +# ) +# company_type = Explanation( +# explanation="Şirketin türüdür.", +# usage="Şirketin türü istendiğinde kullanılır.", +# alias="Şirket Türü", +# example=[ +# "Şahıs", +# "Limited", +# "Anonim", +# "Kolektif", +# "Komandit", +# "Kooperatif", +# "Serbest Meslek", +# "Adi Ortaklık", +# ], +# ) +# commercial_type = Explanation( +# explanation="Şirketin ticari türüdür.", +# usage="Şirketin ticari türü istendiğinde kullanılır.", +# alias="Ticari Tür", +# example=["Tüzel", "Birey"], +# ) +# tax_no = Explanation( +# explanation="Şirketin vergi numarasıdır.", +# usage="Şirketin vergi numarası istendiğinde kullanılır.", +# alias="Vergi No", +# example=["1234567890"], +# ) +# public_name = Explanation( +# explanation="Şirketin kamuoyunda bilinen adıdır.", +# usage="Şirketin kamuoyunda bilinen adı istendiğinde kullanılır.", +# alias="Piyasada Bilinen Adı", +# example=["X Şirketi", "Y Şirketi"], +# ) +# company_tag = Explanation( +# explanation="Şirketin takma adı veya etiketidir.", +# usage="Şirketin yöneticisin karar verdiği takma adı veya etiketi istendiğinde kullanılır.", +# alias="Şirket Etiketi veya Takma Adı", +# example=["X", "Y"], +# ) +# default_lang_type = Explanation( +# explanation="Şirketin varsayılan dil türüdür.", +# usage="Şirketin varsayılan dil türü istendiğinde kullanılır.", +# alias="Şirketin Dil Türü", +# example=["TR", "EN"], +# ) +# default_money_type = Explanation( +# explanation="Şirketin varsayılan para birimi türüdür.", +# usage="Şirketin varsayılan para birimi türü istendiğinde kullanılır.", +# alias="Şirketin Para Birimi Türü", +# example=["TL", "USD", "EUR"], +# ) +# is_commercial = Explanation( +# explanation="Şirketin ticari olup olmadığını belirtir.", +# usage="Şirketin ticari olup olmadığını applikasyonun anlaması için kullanılır.", +# condition=lambda commercial_type: True if commercial_type == "Şahıs" else False, +# alias="Şirket Ticari mi?", +# ) +# is_blacklist = Explanation( +# explanation="Şirketin kara listeye alınıp alınmadığını belirtir.", +# usage="Şirketin kara listeye alınıp alınmadığını applikasyonun anlaması için kullanılır.", +# alias="Kara Listeye alınsın mı?", +# example=[True, False], +# ) +# parent_id = Explanation( +# explanation="Şirketin sorumlu olduğu şirketin ID'sidir.", +# usage="Şirketin sorumlu olduğu şirketin ID'si istendiğinde kullanılır.", +# alias="Sorumlu Şirket", +# example=[ +# "Bir şirketin sorumlu şirketi hangisi olduğunu bulmak için kullanılır.", +# ], +# ) +# workplace_no = Explanation( +# explanation="Şirketin iş yeri numarasıdır.", +# usage="Şirketin iş yeri numarası istendiğinde kullanılır.", +# alias="İş Yeri No", +# example=["1234567890"], +# ) +# official_address_id = Explanation( +# explanation="Şirketin resmi adresidi.", +# usage="Şirketin resmi adresinin ne olduğunu bulmak için kullanılır.", +# alias="Resmi Adres", +# example=[ +# "Bu şirketin adresi nedir sorusuna cevap vermek için kullanılır.", +# ], +# ) +# top_responsible_company_id = Explanation( +# explanation="Şirketin en üst sorumlu şirketin ID'sidir.", +# usage="Şirketin en üst sorumlu şirketin hangisi olduğunu bulmak için kullanılır.", +# alias="Ana Yetkili Şirket", +# example=[ +# "Bölge veya ülke genelinde en üst sorumlu şirketin hangisi olduğunu belirtmek için kullanılır.", +# ], +# ) +# buildings = Explanation( +# explanation="Şirketin sahip olduğu binaların listesidir.", +# usage="Şirketin sahip olduğu binaların listesini bulmak için kullanılır.", +# alias="Sorumlu olduğu binalar Binalar", +# example=[ +# "Şirketin sahip olduğu binaların listesini bulmak için kullanılır.", +# ], +# ) +# +# def wag_create_company(self): +# """ +# Er kişiye wag_create_company fonksiyonu = fieldları manipule edebilir? +# 78 ile oluşturulan bir user için wag_create_company fonksiyonu = fieldları manipule edebilir? +# """ +# return { +# "commercial_type": self.commercial_type, +# "formal_name": self.formal_name, +# "public_name": self.public_name, +# "company_type": self.company_type, +# "tax_no": self.tax_no, +# "workplace_no": self.workplace_no, +# "company_tag": self.company_tag, +# "default_lang_type": self.default_lang_type, +# "default_money_type": self.default_money_type, +# "official_address_id": self.official_address_id, +# } +# +# def wag_update_company(self): +# return { +# "commercial_type": self.commercial_type, +# "formal_name": self.formal_name, +# "public_name": self.public_name, +# "company_type": self.company_type, +# "tax_no": self.tax_no, +# "workplace_no": self.workplace_no, +# "company_tag": self.company_tag, +# "default_lang_type": self.default_lang_type, +# "default_money_type": self.default_money_type, +# "official_address_id": self.official_address_id, +# } diff --git a/Schemas/company/department.py b/Schemas/company/department.py new file mode 100644 index 0000000..6e3f55e --- /dev/null +++ b/Schemas/company/department.py @@ -0,0 +1,232 @@ +from sqlalchemy import String, Integer, ForeignKey, Index, Boolean, Identity +from sqlalchemy.orm import mapped_column, Mapped + +from Services.PostgresService.controllers.mixin_controllers import CrudCollection + + +class Departments(CrudCollection): + + __tablename__ = "departments" + __exclude__fields__ = [] + + parent_department_id = mapped_column(Integer, server_default="0") + department_code = mapped_column( + String(16), nullable=False, index=True, comment="Department Code" + ) + department_name: Mapped[str] = mapped_column( + String(128), nullable=False, comment="Department Name" + ) + department_description: Mapped[str] = mapped_column(String, server_default="") + + company_id: Mapped[int] = mapped_column(ForeignKey("companies.id"), nullable=False) + company_uu_id: Mapped[str] = mapped_column( + String, nullable=False, comment="Company UUID" + ) + + # @classmethod + # def create_action(cls, data: DepartmentsPydantic, token): + # data_dict = data.model_dump() + # data_dict["company_id"] = token.selected_company.company_id + # return cls.find_or_create(**data_dict) + + __table_args__ = {"comment": "Departments Information"} + + +class Duty(CrudCollection): + + __tablename__ = "duty" + __exclude__fields__ = [] + + duty_name: Mapped[str] = mapped_column( + String, unique=True, nullable=False, comment="Duty Name" + ) + duty_code: Mapped[str] = mapped_column(String, nullable=False, comment="Duty Code") + duty_description: Mapped[str] = mapped_column(String, comment="Duty Description") + + # @classmethod + # def create_action(cls, data: InsertCompanyDuty, token): + # # if not cls.__is_super__: + # # raise HTTPException( + # # status_code=401, detail="You are not authorized to create a duty." + # # ) + # data_dict = data.model_dump() + # + # return cls.find_or_create(**data_dict) + + __table_args__ = ({"comment": "Duty Information"},) + + +class Duties(CrudCollection): + + __tablename__ = "duties" + __exclude__fields__ = [] + + users_default_duty = mapped_column( + ForeignKey("duty.id"), nullable=True, comment="Default Duty for Users" + ) + company_id: Mapped[int] = mapped_column(Integer) + company_uu_id: Mapped[str] = mapped_column( + String, nullable=False, comment="Company UUID" + ) + duties_id: Mapped[int] = mapped_column(ForeignKey("duty.id"), nullable=False) + duties_uu_id: Mapped[str] = mapped_column( + String, nullable=False, comment="Duty UUID" + ) + department_id = mapped_column( + ForeignKey("departments.id"), nullable=False, comment="Department ID" + ) + department_uu_id: Mapped[str] = mapped_column( + String, nullable=False, comment="Department UUID" + ) + # priority_id: Mapped[int] = mapped_column(ForeignKey("priority.id"), nullable=True) + management_duty = mapped_column( + Boolean, server_default="0" + ) # is this a prime Company Duty ??? + + @classmethod + def init_a_company_default_duties(cls, company_id, company_uu_id): + __default_init__ = ["Execution Office", "IT Department"] + + active_row = dict( + is_confirmed=True, active=True, deleted=False, is_notification_send=True + ) + list_of_created = [] + + execution = Departments.find_or_create( + department_name="Execution Office", + department_code="EO001", + company_id=company_id, + company_uu_id=str(company_uu_id), + **active_row, + ) + list_of_created.append(execution) + it_dept = Departments.find_or_create( + department_name="IT Department", + department_code="ITD001", + company_id=company_id, + company_uu_id=str(company_uu_id), + **active_row, + ) + list_of_created.append(it_dept) + bm_duty = Duty.find_or_create( + duty_name="Business Manager", + duty_code="BM0001", + duty_description="Business Manager", + **active_row, + ) + list_of_created.append(bm_duty) + it_duty = Duty.find_or_create( + duty_name="IT Manager", + duty_code="IT0001", + duty_description="IT Manager", + **active_row, + ) + list_of_created.append(it_duty) + bulk_duty = Duty.find_or_create( + duty_name="BULK", + duty_code="BULK", + duty_description="BULK RECORDS OF THE COMPANY", + **active_row, + ) + list_of_created.append(bulk_duty) + occu_duty = Duty.find_or_create( + duty_name="OCCUPANT", + duty_code="OCCUPANT", + duty_description="OCCUPANT RECORDS OF THE COMPANY", + **active_row, + ) + list_of_created.append(occu_duty) + duties_created_bm = cls.find_or_create( + company_id=company_id, + company_uu_id=str(company_uu_id), + duties_id=bm_duty.id, + duties_uu_id=str(bm_duty.uu_id), + department_id=execution.id, + department_uu_id=str(execution.uu_id), + **active_row, + ) + list_of_created.append(duties_created_bm) + duties_created_it = cls.find_or_create( + company_id=company_id, + company_uu_id=str(company_uu_id), + duties_id=it_duty.id, + duties_uu_id=str(it_duty.uu_id), + department_id=it_dept.id, + department_uu_id=str(it_dept.uu_id), + **active_row, + ) + list_of_created.append(duties_created_it) + duties_created__ex = cls.find_or_create( + company_id=company_id, + company_uu_id=str(company_uu_id), + duties_id=bulk_duty.id, + duties_uu_id=str(bulk_duty.uu_id), + department_id=execution.id, + department_uu_id=str(execution.uu_id), + **active_row, + ) + list_of_created.append(duties_created__ex) + duties_created_at = cls.find_or_create( + company_id=company_id, + company_uu_id=str(company_uu_id), + duties_id=occu_duty.id, + duties_uu_id=str(occu_duty.uu_id), + department_id=execution.id, + department_uu_id=str(execution.uu_id), + **active_row, + ) + list_of_created.append(duties_created_at) + return list_of_created + + @classmethod + def get_bulk_duties_of_a_company(cls, company_id): + duties_id = Duty.filter_by_one(system=True, duty_code="BULK").data + if bulk_duties := Duties.filter_by_one( + duties_id=getattr(duties_id, "id", None), + company_id=company_id, + **Duties.valid_record_dict, + ).data: + return bulk_duties + raise Exception("Bulk Duty not found. Please contact with supervisor.") + + # @classmethod + # def create_action(cls, data: InsertCompanyDuty): + # data_dict = data.model_dump() + # if department := Departments.find_one(uu_id=data.department_uu_id): + # data_dict["department_id"] = department.id + # del data_dict["department_uu_id"] + # return cls.find_or_create(**data_dict) + + __table_args__ = ( + Index("duty_ndx_00", company_id, duties_id, department_id, unique=True), + {"comment": "Duty & Company & Department Information"}, + ) + + # department: Mapped[List["CompanyDepartments"]] = relationship( + # "CompanyDepartments", back_populates="duties", foreign_keys=[department_id] + # ) + # employees: Mapped[List["CompanyEmployees"]] = relationship( + # "CompanyEmployees", + # back_populates="duty", + # foreign_keys="CompanyEmployees.duty_id", + # ) + # duty_app: Mapped["CompanyDutyApp"] = relationship( + # "CompanyDutyApp", back_populates="duties", foreign_keys="CompanyDutyApp.company_duty_id" + # ) + + # def get_language_of_duty(self, lang): + # if erp_text := ErpText.find_one(lang=lang, text_code=self.duty_code): + # return erp_text.text_name, erp_text.text_description + # return None, None + + # company: Mapped["Companies"] = relationship( + # "Company", back_populates="departments", foreign_keys=[company_id] + # ) + # duties: Mapped[List["CompanyDuty"]] = relationship( + # "CompanyDuty", + # back_populates="department", + # foreign_keys="CompanyDuty.department_id", + # ) + # app_item: Mapped["AppItems"] = relationship( + # "AppItems", back_populates="department", foreign_keys="AppItems.department_id" + # ) diff --git a/Schemas/company/employee.py b/Schemas/company/employee.py new file mode 100644 index 0000000..ac6de75 --- /dev/null +++ b/Schemas/company/employee.py @@ -0,0 +1,143 @@ +from sqlalchemy import ( + String, + ForeignKey, + Index, + Numeric, +) +from sqlalchemy.orm import mapped_column, Mapped + +from Services.PostgresService.controllers.mixin_controllers import CrudCollection + +# from ApiLayers.ApiValidations.Request import InsertCompanyEmployees + + +class Staff(CrudCollection): + + __tablename__ = "staff" + __exclude__fields__ = [] + + staff_description: Mapped[str] = mapped_column( + String, server_default="", comment="Staff Description" + ) + staff_name: Mapped[str] = mapped_column( + String, nullable=False, comment="Staff Name" + ) + staff_code: Mapped[str] = mapped_column( + String, nullable=False, comment="Staff Code" + ) + + duties_id: Mapped[int] = mapped_column(ForeignKey("duties.id"), nullable=False) + duties_uu_id: Mapped[str] = mapped_column( + String, nullable=False, comment="Duty UUID" + ) + + # people: Mapped["People"] = relationship( + # "People", back_populates="employees", foreign_keys=[people_id], uselist=True + # ) + # duty: Mapped["CompanyDuty"] = relationship( + # "CompanyDuty", back_populates="employees", foreign_keys=[duty_id] + # ) + # + # @classmethod + # def create_action(cls, data: InsertCompanyEmployees): + # from Schemas import Duties + # + # data_dict = data.model_dump() + # if duty := Duties.find_one(uu_id=data.duty_uu_id): + # data_dict["duty_id"] = duty.id + # # if person := People.find_one(uu_id=data.person_uu_id): + # # data_dict["people_id"] = person.id + # if data.start_date: + # data_dict["expiry_starts"] = data.start_date + # if data.stop_date: + # data_dict["expiry_ends"] = data.stop_date + # # del data_dict["duty_uu_id"], data_dict["person_uu_id"] + # del data_dict["start_date"], data_dict["stop_date"], data_dict["duty_uu_id"] + # return cls.find_or_create(**data_dict) + # + # __table_args__ = ({"comment": "Staff Information"},) + + +class Employees(CrudCollection): + + __tablename__ = "employees" + __exclude__fields__ = [] + + staff_id: Mapped[int] = mapped_column(ForeignKey("staff.id")) + staff_uu_id: Mapped[str] = mapped_column( + String, nullable=False, comment="Staff UUID" + ) + people_id: Mapped[int] = mapped_column(ForeignKey("people.id"), nullable=True) + people_uu_id: Mapped[str] = mapped_column( + String, nullable=True, comment="People UUID" + ) + + __table_args__ = ( + Index("employees_ndx_00", people_id, staff_id, unique=True), + {"comment": "Employee Person Information"}, + ) + + +class EmployeeHistory(CrudCollection): + + __tablename__ = "employee_history" + __exclude__fields__ = [] + + staff_id: Mapped[int] = mapped_column( + ForeignKey("staff.id"), nullable=False, comment="Staff ID" + ) + staff_uu_id: Mapped[str] = mapped_column( + String, nullable=False, comment="Staff UUID" + ) + people_id: Mapped[int] = mapped_column( + ForeignKey("people.id"), nullable=False, comment="People ID" + ) + people_uu_id: Mapped[str] = mapped_column( + String, nullable=False, comment="People UUID" + ) + + __table_args__ = ( + Index("_employee_history_ndx_00", people_id, staff_id), + {"comment": "Employee History Information"}, + ) + + +class EmployeesSalaries(CrudCollection): + + __tablename__ = "employee_salaries" + __exclude__fields__ = [] + + gross_salary: Mapped[float] = mapped_column( + Numeric(20, 6), nullable=False, comment="Gross Salary" + ) + net_salary: Mapped[float] = mapped_column( + Numeric(20, 6), nullable=False, comment="Net Salary" + ) + + people_id: Mapped[int] = mapped_column(ForeignKey("people.id"), nullable=False) + people_uu_id: Mapped[str] = mapped_column( + String, nullable=False, comment="People UUID" + ) + + # people: Mapped["People"] = relationship( + # "People", back_populates="employee_salaries", foreign_keys=[people_id] + # ) + + __table_args__ = ( + Index("_employee_salaries_ndx_00", people_id, "expiry_starts"), + {"comment": "Employee Salaries Information"}, + ) + + +# class Events2Employees(CrudCollection): +# +# __tablename__ = "events2employees" +# __exclude__fields__ = [] +# +# event_id = mapped_column(ForeignKey("events.id"), nullable=False) +# employees_id = mapped_column(ForeignKey("employees.id"), nullable=False) +# +# __table_args__ = ( +# Index("_events2employees_ndx_00", event_id, employees_id), +# {"comment": "Events2Employees Information"}, +# ) diff --git a/Schemas/event/event.py b/Schemas/event/event.py new file mode 100644 index 0000000..2394625 --- /dev/null +++ b/Schemas/event/event.py @@ -0,0 +1,429 @@ +from Services.PostgresService.controllers.mixin_controllers import CrudCollection + +from sqlalchemy import ( + String, + ForeignKey, + Numeric, + SmallInteger, + Boolean, + Integer, + Index, +) +from sqlalchemy.orm import mapped_column, Mapped + + +class Events(CrudCollection): + """ + Events class based on declarative_base and BaseMixin via session + If Events2Occupants and Events2Employees are not found for user request, response 401 Unauthorized + """ + + __tablename__ = "events" + __exclude__fields__ = [] + + event_type: Mapped[str] = mapped_column( + String, nullable=False, comment="Event Type" + ) + function_code: Mapped[str] = mapped_column( + String, nullable=False, comment="function code" + ) + function_class: Mapped[str] = mapped_column( + String, nullable=False, comment="class name" + ) + + # name: Mapped[str] = mapped_column(String, nullable=True) # form or page title + description: Mapped[str] = mapped_column( + String, server_default="" + ) # form or page description + property_description: Mapped[str] = mapped_column(String, server_default="") + + marketing_layer = mapped_column(SmallInteger, server_default="3") + cost: Mapped[float] = mapped_column(Numeric(20, 2), server_default="0.00") + unit_price: Mapped[float] = mapped_column(Numeric(20, 2), server_default="0.00") + + endpoint_id: Mapped[int] = mapped_column( + ForeignKey("endpoint_restriction.id"), nullable=True + ) + endpoint_uu_id: Mapped[str] = mapped_column( + String, nullable=True, comment="Endpoint UUID" + ) + + __table_args__ = ({"comment": "Events Information"},) + + +class Modules(CrudCollection): + """ + Modules class based on declarative_base and BaseMixin via session + """ + + __tablename__ = "modules" + __exclude__fields__ = [] + + module_name: Mapped[str] = mapped_column( + String, nullable=False, comment="Module Name" + ) + module_description: Mapped[str] = mapped_column(String, server_default="") + module_code: Mapped[str] = mapped_column( + String, nullable=False, comment="Module Code" + ) + module_layer = mapped_column(Integer, nullable=False, comment="Module Layer") + is_default_module = mapped_column(Boolean, server_default="0") + + def retrieve_services(self): + services = Services.filter_all(Services.module_id == self.id).data + if not services: + self.raise_http_exception( + status_code="HTTP_404_NOT_FOUND", + error_case="RECORD_NOT_FOUND", + message=f"No services found for this module : {str(self.uu_id)}", + data={ + "module_uu_id": str(self.uu_id), + }, + ) + return services + + __table_args__ = ({"comment": "Modules Information"},) + + +class Services(CrudCollection): + """ + Services class based on declarative_base and BaseMixin via session + """ + + __tablename__ = "services" + __exclude__fields__ = [] + + module_id: Mapped[int] = mapped_column(ForeignKey("modules.id"), nullable=False) + module_uu_id: Mapped[str] = mapped_column( + String, nullable=False, comment="Module UUID" + ) + service_name: Mapped[str] = mapped_column( + String, nullable=False, comment="Service Name" + ) + service_description: Mapped[str] = mapped_column(String, server_default="") + service_code: Mapped[str] = mapped_column( + String, nullable=True, comment="Service Code" + ) + related_responsibility: Mapped[str] = mapped_column(String, server_default="") + + @classmethod + def retrieve_service_via_occupant_code(cls, occupant_code): + from Schemas import OccupantTypes + + occupant_type = OccupantTypes.filter_by_one( + system=True, + occupant_code=occupant_code, + ).data + if not occupant_type: + cls.raise_http_exception( + status_code="HTTP_404_NOT_FOUND", + error_case="RECORD_NOT_FOUND", + message=f"No occupant type found for this code : {occupant_code}", + data={ + "occupant_code": occupant_code, + }, + ) + return cls.filter_one( + cls.related_responsibility == occupant_type.occupant_code + ).data + + __table_args__ = ({"comment": "Services Information"},) + + +class Service2Events(CrudCollection): + """ + Service2Actions class based on declarative_base and BaseMixin via session + """ + + __tablename__ = "services2events" + __exclude__fields__ = [] + + service_id: Mapped[int] = mapped_column(ForeignKey("services.id"), nullable=False) + service_uu_id = mapped_column(String, nullable=False, comment="Service UUID") + event_id: Mapped[int] = mapped_column(ForeignKey("events.id"), nullable=False) + event_uu_id = mapped_column(String, nullable=False, comment="Event UUID") + + __table_args__ = ({"comment": "Service2Events Information"},) + + +class Event2OccupantExtra(CrudCollection): + + __tablename__ = "event2occupant_extra" + __exclude__fields__ = [] + + build_living_space_id: Mapped[int] = mapped_column( + ForeignKey("build_living_space.id"), nullable=False + ) + build_living_space_uu_id: Mapped[str] = mapped_column( + String, nullable=False, comment="Build Living Space UUID" + ) + event_id: Mapped[int] = mapped_column(ForeignKey("events.id"), nullable=False) + event_uu_id: Mapped[str] = mapped_column( + String, nullable=False, comment="Event UUID" + ) + + __table_args__ = ( + Index( + "event2occupant_extra_bind_event_to_occupant", + build_living_space_id, + event_id, + unique=True, + ), + {"comment": "Occupant2Event Information"}, + ) + + +class Event2EmployeeExtra(CrudCollection): + """ + Employee2Event class based on declarative_base and BaseMixin via session + """ + + __tablename__ = "event2employee_extra" + __exclude__fields__ = [] + + employee_id: Mapped[int] = mapped_column(ForeignKey("employees.id"), nullable=False) + employee_uu_id: Mapped[str] = mapped_column( + String, nullable=False, comment="Employee UUID" + ) + + event_id: Mapped[int] = mapped_column(ForeignKey("events.id"), nullable=False) + event_uu_id: Mapped[str] = mapped_column( + String, nullable=False, comment="Event UUID" + ) + + __table_args__ = ( + Index( + "event2employee_extra_employee_to_event", + employee_id, + event_id, + unique=True, + ), + {"comment": "Employee to Event Information"}, + ) + + +class Event2Employee(CrudCollection): + """ + Employee2Event class based on declarative_base and BaseMixin via session + """ + + __tablename__ = "event2employee" + __exclude__fields__ = [] + + employee_id: Mapped[int] = mapped_column(ForeignKey("employees.id"), nullable=False) + employee_uu_id: Mapped[str] = mapped_column( + String, nullable=False, comment="Employee UUID" + ) + event_service_id: Mapped[int] = mapped_column( + ForeignKey("services.id"), nullable=False + ) + event_service_uu_id: Mapped[str] = mapped_column( + String, nullable=False, comment="Event Cluster UUID" + ) + + __table_args__ = ( + Index( + "event2employee_employee_to_event", + employee_id, + event_service_id, + unique=True, + ), + {"comment": "Employee to Event Information"}, + ) + + @classmethod + def get_event_codes(cls, employee_id: int) -> list: + db = cls.new_session() + employee_events = cls.filter_all( + cls.employee_id == employee_id, + db=db, + ).data + active_event_ids = Service2Events.filter_all_system( + Service2Events.service_id.in_( + [event.event_service_id for event in employee_events] + ), + db=db, + ).data + active_events = Events.filter_all( + Events.id.in_([event.event_id for event in active_event_ids]), + db=db, + ).data + if extra_events := Event2EmployeeExtra.filter_all( + Event2EmployeeExtra.employee_id == employee_id, + db=db, + ).data: + events_extra = Events.filter_all( + Events.id.in_([event.event_id for event in extra_events]), + db=db, + ).data + active_events.extend(events_extra) + return [event.function_code for event in active_events] + + # @classmethod + # def get_event_endpoints(cls, employee_id: int) -> list: + # from Schemas import EndpointRestriction + # + # db = cls.new_session() + # employee_events = cls.filter_all( + # cls.employee_id == employee_id, + # db=db, + # ).data + # active_event_ids = Service2Events.filter_all( + # Service2Events.service_id.in_( + # [event.event_service_id for event in employee_events] + # ), + # db=db, + # system=True, + # ).data + # active_events = Events.filter_all( + # Events.id.in_([event.event_id for event in active_event_ids]), + # db=db, + # ).data + # if extra_events := Event2EmployeeExtra.filter_all( + # Event2EmployeeExtra.employee_id == employee_id, + # db=db, + # ).data: + # events_extra = Events.filter_all( + # Events.id.in_([event.event_id for event in extra_events]), + # db=db, + # ).data + # active_events.extend(events_extra) + # endpoint_restrictions = EndpointRestriction.filter_all( + # EndpointRestriction.id.in_([event.endpoint_id for event in active_events]), + # db=db, + # ).data + # return [event.endpoint_name for event in endpoint_restrictions] + # + + +class Event2Occupant(CrudCollection): + """ + Occupant2Event class based on declarative_base and BaseMixin via session + """ + + __tablename__ = "event2occupant" + __exclude__fields__ = [] + + build_living_space_id: Mapped[str] = mapped_column( + ForeignKey("build_living_space.id"), nullable=False + ) + build_living_space_uu_id: Mapped[str] = mapped_column( + String, nullable=False, comment="Build Living Space UUID" + ) + event_service_id: Mapped[int] = mapped_column( + ForeignKey("services.id"), nullable=False + ) + event_service_uu_id: Mapped[str] = mapped_column( + String, nullable=False, comment="Event Cluster UUID" + ) + # event_id: Mapped[int] = mapped_column(ForeignKey("events.id"), nullable=False) + # event_uu_id = mapped_column(String, nullable=False, comment="Event UUID") + + __table_args__ = ( + Index( + "event2occupant_bind_event_to_occupant", + build_living_space_id, + event_service_id, + unique=True, + ), + {"comment": "Occupant2Event Information"}, + ) + + @classmethod + def get_event_codes(cls, build_living_space_id) -> list: + db = cls.new_session() + occupant_events = cls.filter_all( + cls.build_living_space_id == build_living_space_id, + db=db, + ).data + active_event_ids = Service2Events.filter_all_system( + Service2Events.service_id.in_( + [event.event_service_id for event in occupant_events] + ), + db=db, + ).data + active_events = Events.filter_all( + Events.id.in_([event.event_id for event in active_event_ids]), + db=db, + ).data + if extra_events := Event2OccupantExtra.filter_all( + Event2OccupantExtra.build_living_space_id == build_living_space_id, + db=db, + ).data: + events_extra = Events.filter_all( + Events.id.in_([event.event_id for event in extra_events]), + db=db, + ).data + active_events.extend(events_extra) + return [event.function_code for event in active_events] + + # @classmethod + # def get_event_endpoints(cls, build_living_space_id) -> list: + # from Schemas import EndpointRestriction + # + # db = cls.new_session() + # occupant_events = cls.filter_all( + # cls.build_living_space_id == build_living_space_id, + # db=db, + # ).data + # active_event_ids = Service2Events.filter_all( + # Service2Events.service_id.in_( + # [event.event_service_id for event in occupant_events] + # ), + # db=db, + # system=True, + # ).data + # active_events = Events.filter_all( + # Events.id.in_([event.event_id for event in active_event_ids]), + # db=db, + # ).data + # if extra_events := Event2OccupantExtra.filter_all( + # Event2OccupantExtra.build_living_space_id == build_living_space_id, + # db=db, + # ).data: + # events_extra = Events.filter_all( + # Events.id.in_([event.event_id for event in extra_events]), + # db=db, + # ).data + # active_events.extend(events_extra) + # endpoint_restrictions = EndpointRestriction.filter_all( + # EndpointRestriction.id.in_([event.endpoint_id for event in active_events]), + # db=db, + # ).data + # return [event.endpoint_name for event in endpoint_restrictions] + + +class ModulePrice(CrudCollection): + """ + ModulePrice class based on declarative_base and BaseMixin via session + """ + + __tablename__ = "module_price" + __exclude__fields__ = [] + + campaign_code: Mapped[str] = mapped_column( + String, nullable=False, comment="Campaign Code" + ) + module_id: Mapped[int] = mapped_column(ForeignKey("modules.id"), nullable=False) + module_uu_id: Mapped[str] = mapped_column( + String, nullable=False, comment="Module UUID" + ) + service_id: Mapped[int] = mapped_column(ForeignKey("services.id"), nullable=False) + service_uu_id: Mapped[str] = mapped_column( + String, nullable=False, comment="Service UUID" + ) + event_id: Mapped[int] = mapped_column(ForeignKey("events.id"), nullable=False) + event_uu_id: Mapped[str] = mapped_column( + String, nullable=False, comment="Event UUID" + ) + is_counted_percentage: Mapped[float] = mapped_column( + Numeric(6, 2), server_default="0.00" + ) # %22 + discounted_price: Mapped[float] = mapped_column( + Numeric(20, 2), server_default="0.00" + ) # Normal: 78.00 TL + calculated_price: Mapped[float] = mapped_column( + Numeric(20, 2), server_default="0.00" + ) # sana düz 75.00 TL yapar + + __table_args__ = ({"comment": "ModulePrice Information"},) diff --git a/Schemas/identity/identity.py b/Schemas/identity/identity.py new file mode 100644 index 0000000..20e71bf --- /dev/null +++ b/Schemas/identity/identity.py @@ -0,0 +1,1034 @@ +import arrow + +from Commons.select_functions import SelectAction, SelectActionWithEmployee +from Configs.api import Auth, ApiStatic + +from datetime import timedelta +from fastapi import HTTPException + +from Services.PostgresService.controllers.mixin_controllers import CrudCollection +from sqlalchemy import ( + String, + Integer, + Boolean, + ForeignKey, + Index, + TIMESTAMP, + Text, + BigInteger, + Numeric, + func, + or_, +) +from sqlalchemy.orm import mapped_column, relationship, Mapped + + +class UsersTokens(CrudCollection): + + __tablename__ = "users_tokens" + __exclude__fields__ = [] + + user_id: Mapped[int] = mapped_column(ForeignKey("users.id"), nullable=False) + + token_type: Mapped[str] = mapped_column(String(16), server_default="RememberMe") + token: Mapped[str] = mapped_column(String, server_default="") + domain: Mapped[str] = mapped_column(String, server_default="") + expires_at: Mapped[TIMESTAMP] = mapped_column( + TIMESTAMP(timezone=True), default=str(arrow.now().shift(days=3)), + ) + + # users = relationship("Users", back_populates="tokens", foreign_keys=[user_id]) + + +class Users(CrudCollection, SelectAction): + """ + Application User frame to connect to API with assigned token-based HTTP connection + """ + + __tablename__ = "users" + __exclude__fields__ = [ + "hash_password", + "password_token", + "expiry_begins", + "related_company", + ] + + user_tag: Mapped[str] = mapped_column( + String(64), server_default="", comment="Unique tag for the user", index=True + ) + email: Mapped[str] = mapped_column( + String(128), server_default="", comment="Email address of the user", index=True + ) + phone_number: Mapped[str] = mapped_column( + String, server_default="", comment="Phone number of the user", index=True + ) + via: Mapped[str] = mapped_column( + String, + server_default="111", + comment="Email 1/ Phone 2/ User Tag 3 All 111 Only 100", + ) + + avatar: Mapped[str] = mapped_column( + String, server_default="", comment="Avatar URL for the user" + ) + hash_password: Mapped[str] = mapped_column( + String(256), server_default="", comment="Hashed password for security" + ) + password_token: Mapped[str] = mapped_column( + String(256), server_default="", comment="Token for password reset" + ) + remember_me: Mapped[bool] = mapped_column( + Boolean, server_default="0", comment="Flag to remember user login" + ) + + password_expires_day: Mapped[int] = mapped_column( + Integer, + server_default=str(Auth.PASSWORD_EXPIRE_DAY.days), + comment="Password expires in days", + ) + password_expiry_begins: Mapped[TIMESTAMP] = mapped_column( + TIMESTAMP(timezone=True), + server_default=func.now(), + comment="Timestamp when password expiry begins", + ) + related_company: Mapped[str] = mapped_column(String, comment="Related Company UUID") + + person_id: Mapped[int] = mapped_column( + ForeignKey("people.id"), nullable=False, comment="Foreign key to person table" + ) + person_uu_id: Mapped[str] = mapped_column( + String, server_default="", comment="Person UUID", index=True + ) + local_timezone = mapped_column( + String, server_default="GMT+3", comment="Local timezone of user" + ) + person = relationship("People", back_populates="user", foreign_keys=[person_id]) + + @property + def is_occupant(self): + return not str(self.email).split("@")[1] == Auth.ACCESS_EMAIL_EXT + + @property + def is_employee(self): + return str(self.email).split("@")[1] == Auth.ACCESS_EMAIL_EXT + + @property + def user_type(self): + return "Occupant" if self.is_occupant else "Employee" + + @classmethod + def credentials(cls): + db_session = cls.new_session() + person_object: People = People.filter_by_one( + db=db_session, system=True, id=cls.person_id + ).data + if person_object: + return { + "person_id": person_object.id, + "person_uu_id": str(person_object.uu_id), + } + return { + "person_id": None, + "person_uu_id": None, + } + + @property + def password_expiry_ends(self): + """Calculates the expiry end date based on expiry begins and expires day""" + return self.password_expiry_begins + timedelta( + days=int( + "".join( + [ + _ + for _ in str(self.password_expires_day).split(",")[0] + if _.isdigit() + ] + ) + ) + ) + # + # @classmethod + # def create_action(cls, create_user: InsertUsers, token_dict): + # db_session = cls.new_session() + # found_person = People.filter_one( + # People.uu_id == create_user.people_uu_id, + # db=db_session, + # ).data + # + # if not found_person: + # raise HTTPException(status_code=400, detail="Person not found.") + # if ( + # not any(i in str(create_user.email) for i in ["@", "."]) + # and not len(str(create_user.phone_number)) >= 10 + # ): + # raise HTTPException( + # status_code=400, + # detail="Please enter at least one valid email or phone number.", + # ) + # if not create_user.avatar: + # create_user.avatar = ApiStatic.PLACEHOLDER + # create_dict = create_user.model_dump() + # del create_dict["people_uu_id"] + # create_dict["person_id"] = found_person.id + # create_dict["person_uu_id"] = str(found_person.uu_id) + # create_dict["related_company"] = token_dict.selected_company.company_uu_id + # created_user = cls.find_or_create(**create_dict) + # created_user.reset_password_token(found_user=created_user) + # return created_user + # + # def get_employee_and_duty_details(self): + # from ApiLayers.Schemas import Employees, Duties + # + # db_session = self.new_session() + # found_person = People.filter_one( + # People.id == self.person_id, + # db=db_session, + # ) + # found_employees = Employees.filter_by_active( + # people_id=found_person.id, is_confirmed=True, db=db_session + # ) + # found_duties = Duties.filter_all( + # Duties.is_confirmed == True, + # Duties.id.in_( + # list(found_employee.duty_id for found_employee in found_employees.data) + # ), + # db=db_session, + # ) + # if not found_employees.count: + # raise HTTPException( + # status_code=401, + # detail={ + # "message": "Person has no confirmed duty. No employee match please register " + # "your super admin", + # "completed": False, + # }, + # ) + # return { + # "duty_list": [ + # { + # "duty_id": duty.id, + # "duty_uu_id": duty.uu_id.__str__(), + # "duty_code": duty.duty_code, + # "duty_name": duty.duty_name, + # "duty_description": duty.duty_description, + # } + # for duty in found_duties.data + # ], + # } + # + # def get_main_domain_and_other_domains(self, get_main_domain: bool = True): + # from ApiLayers.Schemas import MongoQueryIdentity + # + # query_engine = MongoQueryIdentity(company_uuid=self.related_company) + # domain_via_user = query_engine.get_domain_via_user(user_uu_id=str(self.uu_id)) + # if not domain_via_user: + # raise HTTPException( + # status_code=401, + # detail="Domain not found. Please contact the admin.", + # ) + # domain_via_user = domain_via_user[0] + # if get_main_domain: + # return domain_via_user.get("main_domain", None) + # return domain_via_user.get("other_domains_list", None) + + +class RelationshipDutyPeople(CrudCollection): + + __tablename__ = "relationship_duty_people" + __exclude__fields__ = [] + + company_id: Mapped[int] = mapped_column( + ForeignKey("companies.id"), nullable=False + ) # 1, 2, 3 + duties_id: Mapped[int] = mapped_column( + ForeignKey("duties.id"), nullable=False + ) # duty -> (n)person Evyos LTD + member_id: Mapped[int] = mapped_column( + ForeignKey("people.id"), nullable=False + ) # 2, 3, 4 + + relationship_type: Mapped[str] = mapped_column( + String, nullable=True, server_default="Employee" + ) # Commercial + show_only: Mapped[bool] = mapped_column(Boolean, server_default="0") + + # related_company: Mapped[List["Company"]] = relationship( + # "Company", + # back_populates="related_companies", + # foreign_keys=[related_company_id], + # ) + + __table_args__ = ( + Index( + "person_relationship_ndx_01", + company_id, + duties_id, + member_id, + relationship_type, + unique=True, + ), + {"comment": "Person Relationship Information"}, + ) + + +class People(CrudCollection, SelectAction): + """ + People that are related to users in the application + """ + + __tablename__ = "people" + __exclude__fields__ = [] + __many__table__ = RelationshipDutyPeople + __encrypt_list__ = [ + "father_name", + "mother_name", + "country_code", + "national_identity_id", + "birth_place", + "birth_date", + "tax_no", + ] + + firstname: Mapped[str] = mapped_column( + String, nullable=False, comment="First name of the person" + ) + surname: Mapped[str] = mapped_column( + String(24), nullable=False, comment="Surname of the person" + ) + middle_name: Mapped[str] = mapped_column( + String, server_default="", comment="Middle name of the person" + ) + sex_code: Mapped[str] = mapped_column( + String(1), nullable=False, comment="Sex code of the person (e.g., M/F)" + ) + person_ref: Mapped[str] = mapped_column( + String, server_default="", comment="Reference ID for the person" + ) + person_tag: Mapped[str] = mapped_column( + String, server_default="", comment="Unique tag for the person" + ) + + # ENCRYPT DATA + father_name: Mapped[str] = mapped_column( + String, server_default="", comment="Father's name of the person" + ) + mother_name: Mapped[str] = mapped_column( + String, server_default="", comment="Mother's name of the person" + ) + country_code: Mapped[str] = mapped_column( + String(4), server_default="TR", comment="Country code of the person" + ) + national_identity_id: Mapped[str] = mapped_column( + String, server_default="", comment="National identity ID of the person" + ) + birth_place: Mapped[str] = mapped_column( + String, server_default="", comment="Birth place of the person" + ) + birth_date: Mapped[TIMESTAMP] = mapped_column( + TIMESTAMP(timezone=True), + server_default="1900-01-01", + comment="Birth date of the person", + ) + tax_no: Mapped[str] = mapped_column( + String, server_default="", comment="Tax number of the person" + ) + # Receive at Create person + # language = mapped_column( + # String, comment="Language code of the person" + # ) + # currency = mapped_column( + # String, comment="Currency code of the person" + # ) + + # ENCRYPT DATA + user = relationship( + "Users", back_populates="person", foreign_keys="Users.person_id" + ) + + __table_args__ = ( + Index( + "person_ndx_001", + national_identity_id, + unique=True, + ), + {"comment": "Person Information"}, + ) + + @property + def full_name(self): + if self.middle_name: + return f"{self.firstname} {self.middle_name} {self.surname}" + return f"{self.firstname} {self.surname}" + # + # @classmethod + # def create_action(cls, data: InsertPerson, token): + # from ApiLayers.Schemas import Duties + # + # token_duties_id, token_company_id = ( + # token.selected_company.duty_id, + # token.selected_company.company_id, + # ) + # bulk_duty = Duties.get_bulk_duties_of_a_company(company_id=token_company_id) + # + # if str(data.country_code) == "TR": + # if not len(data.national_identity_id) == 11: + # raise HTTPException( + # status_code=400, + # detail="Please enter a valid national identity number.", + # ) + # if data.tax_no and not len(str(data.tax_no)) == 10: + # raise HTTPException( + # status_code=400, + # detail="Please enter a valid tax number.", + # ) + # + # create_dict = data.model_dump() + # create_dict["firstname"] = str(create_dict["firstname"]).capitalize() + # create_dict["middle_name"] = str(create_dict["middle_name"]).capitalize() + # create_dict["surname"] = str(create_dict["surname"]).upper() + # create_dict["birth_place"] = str(create_dict["birth_place"]).upper() + # created_people = cls.find_or_create(**create_dict) + # created_people.update(is_confirmed=True) + # duty_people = RelationshipDutyPeople.find_or_create( + # company_id=token.selected_company.company_id, + # duties_id=bulk_duty.id, + # member_id=created_people.id, + # ) + # duty_people.update(is_confirmed=True) + # return created_people + + +class RelationshipEmployee2PostCode(CrudCollection): + """ + Build2EmployeeRelationship class based on declarative_base and BaseMixin via session + """ + + __tablename__ = "relationship_employee2postcode" + __exclude__fields__ = [] + __include__fields__ = [] + + company_id: Mapped[int] = mapped_column( + ForeignKey("companies.id"), nullable=True + ) # 1, 2, 3 + employee_id: Mapped[int] = mapped_column(ForeignKey("employees.id"), nullable=False) + member_id: Mapped[int] = mapped_column( + ForeignKey("address_postcode.id"), nullable=False + ) + + relationship_type: Mapped[str] = mapped_column( + String, nullable=True, server_default="Employee" + ) # Commercial + show_only: Mapped[bool] = mapped_column(Boolean, server_default="0") + + __table_args__ = ({"comment": "Build2Employee Relationship Information"},) + + +class AddressPostcode(CrudCollection, SelectActionWithEmployee): + """ + Postcode class based on declarative_base and BaseMixin via session + """ + + __tablename__ = "address_postcode" + __exclude__fields__ = [] + __access_by__ = [] + __many__table__ = RelationshipEmployee2PostCode + + street_id: Mapped[int] = mapped_column(ForeignKey("address_street.id")) + street_uu_id: Mapped[str] = mapped_column( + String, server_default="", comment="Street UUID" + ) + postcode: Mapped[str] = mapped_column( + String(32), nullable=False, comment="Postcode" + ) + + __table_args__ = ({"comment": "Postcode Information"},) + + +class Addresses(CrudCollection): + """ + Address class based on declarative_base and BaseMixin via session + """ + + __tablename__ = "addresses" + __exclude__fields__ = [] + + build_number: Mapped[str] = mapped_column( + String(24), nullable=False, comment="Build Number" + ) + door_number: Mapped[str] = mapped_column( + String(24), nullable=True, comment="Door Number" + ) + floor_number: Mapped[str] = mapped_column( + String(24), nullable=True, comment="Floor Number" + ) + + comment_address: Mapped[str] = mapped_column( + String, nullable=False, comment="Address" + ) + letter_address: Mapped[str] = mapped_column( + String, nullable=False, comment="Address" + ) + short_letter_address: Mapped[str] = mapped_column( + String, nullable=False, comment="Address" + ) + + latitude: Mapped[float] = mapped_column(Numeric(20, 12), server_default="0") + longitude: Mapped[float] = mapped_column(Numeric(20, 12), server_default="0") + + street_id: Mapped[int] = mapped_column( + ForeignKey("address_street.id"), nullable=False + ) + street_uu_id: Mapped[str] = mapped_column( + String, server_default="", comment="Street UUID" + ) + + @classmethod + def list_via_employee(cls, token_dict, filter_expr=None): + post_code_list = RelationshipEmployee2PostCode.filter_all( + RelationshipEmployee2PostCode.employee_id + == token_dict.selected_company.employee_id, + ).data + post_code_id_list = [post_code.member_id for post_code in post_code_list] + if not post_code_id_list: + raise HTTPException( + status_code=404, + detail="User has no post code registered. User can not list addresses.", + ) + cls.pre_query = cls.filter_all(cls.post_code_id.in_(post_code_id_list)).query + filter_cls = cls.filter_all(*filter_expr or []) + cls.pre_query = None + return filter_cls.data + + # buildings: Mapped["Build"] = relationship( + # "Build", back_populates="addresses", foreign_keys="Build.address_id" + # ) + # site: Mapped["BuildSites"] = relationship( + # "BuildSites", back_populates="addresses", foreign_keys="BuildSites.address_id" + # ) + # official_companies: Mapped["Companies"] = relationship( + # "Company", + # back_populates="official_address", + # foreign_keys="Company.official_address_id", + # ) + + # @classmethod + # def create_action(cls, request, create_address: InsertAddress): + # from services.redis.auth_actions.token import parse_token_object_to_dict + # + # token_dict = parse_token_object_to_dict(request=request) + # data_dict = create_address.model_dump() + # post_code = AddressPostcode.find_one(uu_id=create_address.post_code_uu_id) + # if not post_code: + # raise HTTPException( + # status_code=404, + # detail="Post code not found.", + # ) + # if Employee2AddressRelationship.post_code_id.find_one( + # employee_id=token_dict.selected_company.employee_id, + # post_code_id=post_code.id, + # ): + # data_dict["post_code_id"] = post_code.id + # del data_dict["post_code_uu_id"] + # return cls.find_or_create(**create_address.model_dump()) + # raise HTTPException( + # status_code=401, + # detail=f"User is not qualified to create address at this post code {post_code.postcode}", + # ) + + # __table_args__ = ( + # Index("_address_ndx_00", country_code, b_state, city, district), + # {"comment": "Address Information"}, + # ) + + +class AddressGeographicLocations(CrudCollection): + """ + Country class based on declarative_base and BaseMixin via session + """ + + __tablename__ = "address_geographic_locations" + __exclude__fields__ = [] + + geo_table: Mapped[str] = mapped_column( + String, nullable=False, comment="Address Table Name" + ) + geo_id: Mapped[int] = mapped_column( + Integer, nullable=False, comment="Address Table ID" + ) + geo_name: Mapped[str] = mapped_column( + String, nullable=False, comment="Geographic Location Name" + ) + geo_latitude: Mapped[float] = mapped_column( + Numeric(20, 6), server_default="0", comment="Geographic Location Name" + ) + geo_longitude: Mapped[float] = mapped_column( + Numeric(20, 6), server_default="0", comment="Geographic Location Latitude" + ) + geo_altitude: Mapped[float] = mapped_column( + Numeric(20, 6), server_default="0", comment="Geographic Location Longitude" + ) + geo_description: Mapped[str] = mapped_column( + Text, nullable=False, comment="Geographic Location Description" + ) + geo_area_size: Mapped[float] = mapped_column( + Numeric(20, 2), + nullable=True, + server_default="0", + comment="Geographic Location Area Size", + ) + geo_population: Mapped[int] = mapped_column( + BigInteger, nullable=True, comment="Geographic Location Population" + ) + # geo_geom_point = mapped_column(Geometry('POINT', srid=4326), nullable=True, comment="Geographic Location Points") + # geo_geom_polygon = mapped_column(Geometry('POLYGON', srid=4326), nullable=True, + # comment="Geographic Location Vector geographic information (polygon)") + # geo_centroid = mapped_column( GEOMETRY(POINT, 4326), nullable=True, + # comment="Geographic Location center of gravity of the region(points)") + + __table_args__ = ( + Index("_address_geographic_locations_ndx_00", geo_table, geo_id), + Index("_address_geographic_locations_ndx_01", geo_latitude, geo_longitude), + {"comment": "Geographic Location Information"}, + ) + + +class AddressCountry(CrudCollection): + """ + Country class based on declarative_base and BaseMixin via session + """ + + __tablename__ = "address_country" + __exclude__fields__ = [] + + country_code: Mapped[str] = mapped_column( + String(16), nullable=False, comment="Country Code" + ) + country_name: Mapped[str] = mapped_column( + String, nullable=False, comment="Country Name" + ) + money_code: Mapped[str] = mapped_column( + String(12), nullable=True, comment="Money Code" + ) + language: Mapped[str] = mapped_column( + String, nullable=True, comment="Language Code" + ) + address_geographic_id: Mapped[int] = mapped_column( + BigInteger, nullable=True, comment="Address Geographic Id" + ) + + __table_args__ = ( + Index("_address_country_ndx_00", money_code), + Index("_address_country_ndx_01", country_code, unique=True), + {"comment": "Country Information"}, + ) + + +class AddressState(CrudCollection): + """ + State class based on declarative_base and BaseMixin via session + """ + + __tablename__ = "address_state" + __exclude__fields__ = [] + + state_code: Mapped[str] = mapped_column( + String(16), nullable=False, comment="State Code" + ) + state_name: Mapped[str] = mapped_column( + String, nullable=False, comment="State Name" + ) + licence_plate: Mapped[str] = mapped_column( + String(24), nullable=True, comment="Sign Code" + ) + phone_code: Mapped[str] = mapped_column( + String(36), nullable=True, comment="Phone Code" + ) + gov_code: Mapped[str] = mapped_column( + String(128), nullable=True, comment="Government Code" + ) + address_geographic_id: Mapped[int] = mapped_column( + BigInteger, nullable=True, comment="Address Geographic Id" + ) + + country_id: Mapped[int] = mapped_column(ForeignKey("address_country.id")) + country_uu_id: Mapped[str] = mapped_column( + String, server_default="", comment="Country UUID" + ) + + __table_args__ = ( + Index( + "_address_state_ndx_01", + country_id, + state_code, + unique=True, + ), + {"comment": "State Information"}, + ) + + +class AddressCity(CrudCollection): + """ + City class based on declarative_base and BaseMixin via session + """ + + __tablename__ = "address_city" + __exclude__fields__ = [] + + city_code: Mapped[str] = mapped_column( + String(24), nullable=False, comment="City Code" + ) + city_name: Mapped[str] = mapped_column(String, nullable=False, comment="City Name") + licence_plate: Mapped[str] = mapped_column( + String(24), nullable=True, comment="Sign Code" + ) + phone_code: Mapped[str] = mapped_column( + String(36), nullable=True, comment="Phone Code" + ) + gov_code: Mapped[str] = mapped_column( + String(128), nullable=True, comment="Government Code" + ) + address_geographic_id: Mapped[int] = mapped_column( + BigInteger, nullable=True, comment="Address Geographic Id" + ) + + state_id: Mapped[int] = mapped_column(ForeignKey("address_state.id")) + state_uu_id: Mapped[str] = mapped_column( + String, server_default="", comment="State UUID" + ) + + __table_args__ = ( + Index( + "_address_city_ndx_01", + state_id, + city_code, + unique=True, + ), + {"comment": "City Information"}, + ) + + +class AddressDistrict(CrudCollection): + """ + District class based on declarative_base and BaseMixin via session + """ + + __tablename__ = "address_district" + __exclude__fields__ = [] + + district_code: Mapped[str] = mapped_column( + String(16), nullable=False, comment="District Code" + ) + district_name: Mapped[str] = mapped_column( + String, nullable=False, comment="District Name" + ) + phone_code: Mapped[str] = mapped_column( + String(36), nullable=True, comment="Phone Code" + ) + gov_code: Mapped[str] = mapped_column( + String(128), nullable=True, comment="Government Code" + ) + address_geographic_id: Mapped[int] = mapped_column( + BigInteger, nullable=True, comment="Address Geographic Id" + ) + + city_id: Mapped[int] = mapped_column( + ForeignKey("address_city.id"), nullable=False, comment="City ID" + ) + city_uu_id: Mapped[str] = mapped_column( + String, server_default="", comment="City UUID" + ) + + __table_args__ = ( + Index( + "_address_district_ndx_01", + city_id, + district_code, + unique=True, + ), + {"comment": "District Information"}, + ) + + +class AddressLocality(CrudCollection): + """ + Locality class based on declarative_base and BaseMixin via session + """ + + __tablename__ = "address_locality" + __exclude__fields__ = [] + + locality_code: Mapped[str] = mapped_column( + String(16), nullable=False, comment="Locality Code" + ) + locality_name: Mapped[str] = mapped_column( + String, nullable=False, comment="Locality Name" + ) + type_code: Mapped[str] = mapped_column(String, nullable=True, comment="Type Name") + type_description: Mapped[str] = mapped_column( + String, nullable=True, comment="Type Name" + ) + gov_code: Mapped[str] = mapped_column( + String(128), nullable=True, comment="Government Code" + ) + address_show: Mapped[bool] = mapped_column(Boolean, server_default="1") + address_geographic_id: Mapped[int] = mapped_column( + BigInteger, nullable=True, comment="Address Geographic Id" + ) + + district_id: Mapped[int] = mapped_column( + ForeignKey("address_district.id"), nullable=False, comment="District ID" + ) + district_uu_id: Mapped[str] = mapped_column( + String, server_default="", comment="District UUID" + ) + + __table_args__ = ( + Index( + "_address_locality_ndx_01", + district_id, + locality_code, + unique=True, + ), + {"comment": "Locality Information"}, + ) + + +class AddressNeighborhood(CrudCollection): + """ + Neighborhood class based on declarative_base and BaseMixin via session + """ + + __tablename__ = "address_neighborhood" + __exclude__fields__ = [] + + neighborhood_code: Mapped[str] = mapped_column( + String(16), nullable=False, comment="Neighborhood Code" + ) + neighborhood_name: Mapped[str] = mapped_column( + String, nullable=False, comment="Neighborhood Name" + ) + type_code: Mapped[str] = mapped_column(String, nullable=True, comment="Type Name") + type_description: Mapped[str] = mapped_column( + String, nullable=True, comment="Type Name" + ) + gov_code: Mapped[str] = mapped_column( + String(128), nullable=True, comment="Government Code" + ) + address_show: Mapped[bool] = mapped_column(Boolean, server_default="1") + address_geographic_id: Mapped[int] = mapped_column( + BigInteger, nullable=True, comment="Address Geographic Id" + ) + + district_id: Mapped[int] = mapped_column( + ForeignKey("address_district.id"), nullable=True, comment="District ID" + ) + district_uu_id: Mapped[str] = mapped_column( + String, server_default="", comment="District UUID" + ) + locality_id: Mapped[int] = mapped_column( + ForeignKey("address_locality.id"), nullable=True, comment="Locality ID" + ) + locality_uu_id: Mapped[str] = mapped_column( + String, server_default="", comment="Locality UUID" + ) + + __table_args__ = ( + Index( + "_address_neighborhood_ndx_01", + locality_id, + neighborhood_code, + unique=True, + ), + {"comment": "Neighborhood Information"}, + ) + + +class AddressStreet(CrudCollection): + """ + Street class based on declarative_base and BaseMixin via session + """ + + __tablename__ = "address_street" + __exclude__fields__ = [] + + street_code: Mapped[str] = mapped_column( + String(16), nullable=False, comment="Street Code" + ) + street_name: Mapped[str] = mapped_column( + String, nullable=False, comment="Street Name" + ) + type_code: Mapped[str] = mapped_column(String, nullable=True, comment="Type Name") + type_description: Mapped[str] = mapped_column( + String, nullable=True, comment="Type Name" + ) + gov_code: Mapped[str] = mapped_column( + String(128), nullable=True, comment="Government Code" + ) + + address_geographic_id: Mapped[int] = mapped_column( + BigInteger, nullable=True, comment="Address Geographic Id" + ) + neighborhood_id: Mapped[int] = mapped_column( + ForeignKey("address_neighborhood.id"), nullable=False, comment="Neighborhood ID" + ) + neighborhood_uu_id: Mapped[str] = mapped_column( + String, server_default="", comment="Neighborhood UUID" + ) + + __table_args__ = ( + Index("_address_street_ndx_01", neighborhood_id, street_code, unique=True), + {"comment": "Street Information"}, + ) + + @classmethod + def search_address_text(cls, search_text, token_dict=None): + field_dict = { + "AddressStreet.uu_id": cls.uu_id, + "AddressCountry.uu_id": AddressCountry.uu_id, + "AddressState.uu_id": AddressState.uu_id, + "AddressCity.uu_id": AddressCity.uu_id, + "AddressDistrict.uu_id": AddressDistrict.uu_id, + "AddressLocality.uu_id": AddressLocality.uu_id, + "AddressNeighborhood.uu_id": AddressNeighborhood.uu_id, + "AddressCountry.country_name": AddressCountry.country_name, + "AddressState.state_name": AddressState.state_name, + "AddressCity.city_name": AddressCity.city_name, + "AddressDistrict.district_name": AddressDistrict.district_name, + "AddressLocality.locality_name": AddressLocality.locality_name, + "AddressNeighborhood.neighborhood_name": AddressNeighborhood.neighborhood_name, + "AddressStreet.street_name": cls.street_name, + } + joined_data = ( + cls.session.query(*list(field_dict.values())) + .select_from(cls) + .join(AddressNeighborhood, AddressNeighborhood.id == cls.neighborhood_id) + .join( + AddressLocality, AddressLocality.id == AddressNeighborhood.locality_id + ) + .join(AddressDistrict, AddressDistrict.id == AddressLocality.district_id) + .join(AddressCity, AddressCity.id == AddressDistrict.city_id) + .join(AddressState, AddressState.id == AddressCity.state_id) + .join(AddressCountry, AddressCountry.id == AddressState.country_id) + .filter( + or_( + AddressNeighborhood.neighborhood_name.ilike( + f"%{str(search_text).upper()}%" + ), + AddressLocality.locality_name.ilike( + f"%{str(search_text).upper()}%" + ), + AddressDistrict.district_name.ilike( + f"%{str(search_text).upper()}%" + ), + # AddressCity.city_name.ilike(f"%{str(search_text).upper()}%"), + # AddressState.state_name.ilike(f"%{str(search_text).upper()}%"), + # AddressCountry.country_name.ilike(f"%{str(search_text).upper()}%"), + cls.street_name.ilike(f"%{str(search_text).upper()}%"), + ), + ) + ) + # select([mytable.c.id]).where( + # func.to_tsvector('english', mytable.c.title) \ + # .match('somestring', postgresql_regconfig='english') + # ) + joined_statement = joined_data + joined_data = joined_data.first() + if not joined_data: + raise HTTPException( + status_code=404, + detail="No address found with the given search text.", + ) + return dict( + query=joined_statement, + schema=list(field_dict.keys()), + ) + + +class OccupantTypes(CrudCollection): + """ + Occupant Types class based on declarative_base and BaseMixin via session + """ + + __tablename__ = "occupant_types" + __exclude__fields__ = [] + + occupant_type: Mapped[str] = mapped_column( + String, nullable=False, comment="Occupant Type" + ) + occupant_description: Mapped[str] = mapped_column(String, server_default="") + occupant_code: Mapped[str] = mapped_column(String, server_default="") + occupant_category: Mapped[str] = mapped_column(String, server_default="") + occupant_category_type: Mapped[str] = mapped_column(String, server_default="") + occupant_is_unique: Mapped[bool] = mapped_column(Boolean, server_default="0") + + __table_args__ = ({"comment": "Occupant Types Information"},) + + @classmethod + def get_manager_occupant_type(cls): + if occupant_types := cls.filter_all( + cls.occupant_is_unique == True, cls.occupant_category_type == "MT" + ).data: + return [occupant.uu_id.__str__() for occupant in occupant_types] + raise HTTPException( + status_code=404, + detail="No manager type found.", + ) + + +class Contracts(CrudCollection): + """ + Contract class based on declarative_base and BaseMixin via session + """ + + __tablename__ = "contracts" + __exclude__fields__ = [] + + contract_type: Mapped[str] = mapped_column( + String(5), + nullable=False, + comment="The code for personnel is P and the code for companies is C.", + ) + contract_title: Mapped[str] = mapped_column(String(255)) + contract_details: Mapped[str] = mapped_column(Text) + contract_terms: Mapped[str] = mapped_column(Text) + + contract_code: Mapped[str] = mapped_column( + String(100), + nullable=False, + comment="contract_code is the unique code given by the system.", + ) + contract_date: Mapped[TIMESTAMP] = mapped_column( + TIMESTAMP(timezone=True), + server_default="2099-12-31 23:59:59", + comment="contract date is the date the contract is made. " + "expire start is the start date of the contract, expire en is the end date of the contract.", + ) + + company_id: Mapped[int] = mapped_column( + Integer, ForeignKey("companies.id"), nullable=True + ) + company_uu_id: Mapped[str] = mapped_column( + String, server_default="", comment="Company UUID" + ) + + person_id: Mapped[int] = mapped_column( + Integer, ForeignKey("people.id"), nullable=True + ) + person_uu_id: Mapped[str] = mapped_column( + String, server_default="", comment="Person UUID" + ) + + @classmethod + def retrieve_contact_no(cls): + # from api_library.date_time_actions.date_functions import system_arrow + + # todo When create record contract_code == below string + related_date, counter = Contracts.client_arrow.now(), 1 + return ( + f"{related_date.date().year}{str(cls.contract_type)}{str(counter).zfill(6)}" + ) + + __table_args__ = ( + Index("_contract_ndx_01", contract_code, unique=True), + {"comment": "Contract Information"}, + ) diff --git a/Schemas/others/enums.py b/Schemas/others/enums.py new file mode 100644 index 0000000..71dc380 --- /dev/null +++ b/Schemas/others/enums.py @@ -0,0 +1,103 @@ +from fastapi.exceptions import HTTPException + +from sqlalchemy import ( + UUID, + String, + text, +) +from sqlalchemy.orm import ( + Mapped, + mapped_column, +) +from Services.PostgresService.controllers.mixin_controllers import CrudCollection + + +class ApiEnumDropdown(CrudCollection): + __tablename__ = "api_enum_dropdown" + __exclude__fields__ = ["enum_class"] + __language_model__ = None + + id: Mapped[int] = mapped_column(primary_key=True) + uu_id: Mapped[str] = mapped_column( + UUID, server_default=text("gen_random_uuid()"), index=True, unique=True + ) + enum_class: Mapped[str] = mapped_column( + String, nullable=False, comment="Enum Constant Name" + ) + key: Mapped[str] = mapped_column(String, nullable=False, comment="Enum Key") + value: Mapped[str] = mapped_column(String, nullable=False, comment="Enum Value") + description: Mapped[str] = mapped_column(String, nullable=True) + + __table_args__ = ({"comment": "Enum objets that are linked to tables"},) + + @classmethod + def get_by_uuid(cls, uuid: str): + return cls.filter_by_one(system=True, uu_id=str(uuid)).data + + @classmethod + def get_debit_search(cls, search_debit: str = None, search_uu_id: str = None): + if search_uu_id: + if search := cls.filter_one( + cls.enum_class.in_(["DebitTypes"]), + cls.uu_id == search_uu_id, + system=True, + ).data: + return search + elif search_debit: + if search := cls.filter_one( + cls.enum_class.in_(["DebitTypes"]), cls.key == search_debit, system=True + ).data: + return search + return cls.filter_all(cls.enum_class.in_(["DebitTypes"]), system=True).data + + @classmethod + def get_due_types(cls): + if due_list := cls.filter_all( + cls.enum_class == "BuildDuesTypes", + cls.key.in_(["BDT-A", "BDT-D"]), + system=True, + ).data: + return [due.uu_id.__str__() for due in due_list] + raise HTTPException( + status_code=404, + detail="No dues types found", + ) + + @classmethod + def due_type_search(cls, search_management: str = None, search_uu_id: str = None): + if search_uu_id: + if search := cls.filter_one( + cls.enum_class.in_(["BuildDuesTypes"]), + cls.uu_id == search_uu_id, + system=True, + ).data: + return search + elif search_management: + if search := cls.filter_one( + cls.enum_class.in_(["BuildDuesTypes"]), + cls.key == search_management, + system=True, + ).data: + return search + return cls.filter_all(cls.enum_class.in_(["BuildDuesTypes"]), system=True).data + + def get_enum_dict(self): + return { + "uu_id": str(self.uu_id), + "enum_class": self.enum_class, + "key": self.key, + "value": self.value, + "description": self.description, + } + + @classmethod + def uuid_of_enum(cls, enum_class: str, key: str): + return str( + getattr( + cls.filter_one( + cls.enum_class == enum_class, cls.key == key, system=True + ).data, + "uu_id", + None, + ) + ) diff --git a/Schemas/rules/rules.py b/Schemas/rules/rules.py new file mode 100644 index 0000000..8e77d01 --- /dev/null +++ b/Schemas/rules/rules.py @@ -0,0 +1,29 @@ +from sqlalchemy import String +from sqlalchemy.orm import mapped_column, Mapped + +from Services.PostgresService.controllers.mixin_controllers import CrudCollection + + +class EndpointRestriction(CrudCollection): + """ + Initialize Endpoint Restriction with default values + """ + + __tablename__ = "endpoint_restriction" + __exclude__fields__ = [] + + endpoint_function: Mapped[str] = mapped_column( + String, server_default="", comment="Function name of the API endpoint" + ) + endpoint_name: Mapped[str] = mapped_column( + String, server_default="", comment="Name of the API endpoint" + ) + endpoint_method: Mapped[str] = mapped_column( + String, server_default="", comment="HTTP method used by the endpoint" + ) + endpoint_desc: Mapped[str] = mapped_column( + String, server_default="", comment="Description of the endpoint" + ) + endpoint_code: Mapped[str] = mapped_column( + String, server_default="", unique=True, comment="Unique code for the endpoint" + ) diff --git a/Services/MongoService/handlers.py b/Services/MongoService/handlers.py new file mode 100644 index 0000000..c210f3e --- /dev/null +++ b/Services/MongoService/handlers.py @@ -0,0 +1,25 @@ +from typing import Any, Dict, List, Optional +from functools import wraps +# from pymongo.errors import ( +# ConnectionFailure, +# OperationFailure, +# ServerSelectionTimeoutError, +# PyMongoError, +# ) + + +def mongo_error_wrapper(func): + """Decorator to handle MongoDB operation errors. + + Catches MongoDB-specific errors and converts them to HTTPExceptionApi. + """ + + @wraps(func) + def wrapper(*args, **kwargs): + """ + :param args: + :param kwargs: + :return: + """ + return func(*args, **kwargs) + return wrapper diff --git a/Services/MongoService/provider.py b/Services/MongoService/provider.py new file mode 100644 index 0000000..bdcb4ff --- /dev/null +++ b/Services/MongoService/provider.py @@ -0,0 +1,178 @@ +from typing import Optional, Dict, Any, List, TypeVar, Iterator +from contextlib import contextmanager + +from pymongo import MongoClient +from pymongo.collection import Collection + +from Configs.mongo import MongoConfig +from Services.MongoService.handlers import mongo_error_wrapper + + +class MongoBase: + """Base class for MongoDB connection and operations.""" + collection: Collection = None + + +class MongoErrorHandler: + """Error handler for MongoDB operations.""" + ... + + +class MongoInsertMixin(MongoBase): + """Mixin for MongoDB insert operations.""" + + def insert_one(self, document: Dict[str, Any]): + """Insert a single document into the collection.""" + return self.collection.insert_one(document) + + def insert_many(self, documents: List[Dict[str, Any]]): + """Insert multiple documents.""" + return self.collection.insert_many(documents) + + +class MongoFindMixin(MongoBase): + """Mixin for MongoDB find operations.""" + + @mongo_error_wrapper + def find_one( + self, + filter_query: Dict[str, Any], + projection: Optional[Dict[str, Any]] = None, + ): + """Find a single document in the collection.""" + return self.collection.find_one(filter_query, projection) + + @mongo_error_wrapper + def find_many( + self, + filter_query: Dict[str, Any], + projection: Optional[Dict[str, Any]] = None, + sort: Optional[List[tuple[str, int]]] = None, + limit: Optional[int] = None, + skip: Optional[int] = None, + ): + """Find multiple documents in the collection with pagination support.""" + cursor = self.collection.find(filter_query, projection) + if sort: + cursor = cursor.sort(sort) + if skip: + cursor = cursor.skip(skip) + if limit: + cursor = cursor.limit(limit) + return list(cursor) + + +class MongoUpdateMixin(MongoBase): + + """Mixin for MongoDB update operations.""" + + @mongo_error_wrapper + def update_one( + self, + filter_query: Dict[str, Any], + update_data: Dict[str, Any], + upsert: bool = False, + ): + """Update a single document in the collection.""" + return self.collection.update_one(filter_query, update_data, upsert=upsert) + + @mongo_error_wrapper + def update_many( + self, + filter_query: Dict[str, Any], + update_data: Dict[str, Any], + upsert: bool = False, + ): + """Update multiple documents in the collection.""" + return self.collection.update_many(filter_query, update_data, upsert=upsert) + + +class MongoDeleteMixin(MongoBase): + """Mixin for MongoDB delete operations.""" + + @mongo_error_wrapper + def delete_one(self, filter_query: Dict[str, Any]): + """Delete a single document from the collection.""" + return self.collection.delete_one(filter_query) + + @mongo_error_wrapper + def delete_many(self, filter_query: Dict[str, Any]): + """Delete multiple documents from the collection.""" + return self.collection.delete_many(filter_query) + +class MongoAggregateMixin(MongoBase): + + """Mixin for MongoDB aggregation operations.""" + @mongo_error_wrapper + def aggregate(self, collection: Collection, pipeline: List[Dict[str, Any]]): + """Execute an aggregation pipeline on the collection.""" + result = collection.aggregate(pipeline) + return result + + + +class MongoProvider( + MongoUpdateMixin, + MongoInsertMixin, + MongoFindMixin, + MongoDeleteMixin, + MongoAggregateMixin, +): + """Main MongoDB actions class that inherits all CRUD operation mixins. + + This class provides a unified interface for all MongoDB operations while + managing collections based on company UUID and storage reason. + """ + + def __init__( + self, client: MongoClient, database: str, storage_reason: list[str] + ): + """Initialize MongoDB actions with client and collection info. + + Args: + client: MongoDB client + database: Database name to use + storage_reason: Storage reason for collection naming + """ + self.delimiter = "|" + self._client = client + self._database = database + self._storage_reason: list[str] = storage_reason + self._collection = None + self.use_collection(storage_reason) + + @staticmethod + @contextmanager + def mongo_client() -> Iterator[MongoClient]: + """ + Context provider for MongoDB test client. + # Example Usage + with mongo_client() as client: + db = client["your_database"] + print(db.list_collection_names()) + """ + client = MongoClient(MongoConfig.URL) + try: + client.admin.command("ping") # Test connection + yield client + finally: + client.close() # Ensure proper cleanup + + @property + def collection(self) -> Collection: + """Get current MongoDB collection.""" + return self._collection + + def use_collection(self, storage_name_list: list[str]) -> None: + """Switch to a different collection. + + Args: + storage_name_list: New storage reason for collection naming + """ + collection_name = "" + for each_storage_reason in storage_name_list: + if self.delimiter in str(each_storage_reason): + raise ValueError(f"Storage reason cannot contain delimiter : {self.delimiter}") + collection_name += f"{self.delimiter}{each_storage_reason}" + collection_name = collection_name[1:] + self._collection = self._client[self._database][collection_name] diff --git a/Services/PostgresService/controllers/core_controllers.py b/Services/PostgresService/controllers/core_controllers.py new file mode 100644 index 0000000..a13cbfe --- /dev/null +++ b/Services/PostgresService/controllers/core_controllers.py @@ -0,0 +1,108 @@ +from typing import Type, TypeVar + +from sqlalchemy.exc import SQLAlchemyError +from sqlalchemy.orm import Session + +from fastapi import status +from fastapi.exceptions import HTTPException + +from Services.PostgresService.database import get_db + + +# Type variable for class methods returning self +T = TypeVar("T", bound="FilterAttributes") + + +class BaseAlchemyModel: + """ + Controller of alchemy to database transactions. + Query: Query object for model + Session: Session object for model + Actions: save, flush, rollback, commit + """ + + __abstract__ = True + + @classmethod + def new_session(cls) -> Session: + """Get database session.""" + + with get_db() as session: + return session + + @classmethod + def flush(cls: Type[T], db: Session) -> T: + """ + Flush the current session to the database. + + Args: + db: Database session + + Returns: + Self instance + + Raises: + HTTPException: If database operation fails + """ + try: + db.flush() + return cls + except SQLAlchemyError as e: + raise HTTPException( + status_code=status.HTTP_406_NOT_ACCEPTABLE, + detail={ + "message": "Database operation failed", + }, + ) + + def destroy(self: Type[T], db: Session) -> None: + """ + Delete the record from the database. + + Args: + db: Database session + """ + db.delete(self) + + @classmethod + def save(cls: Type[T], db: Session) -> None: + """ + Commit changes to database. + + Args: + db: Database session + + Raises: + HTTPException: If commit fails + """ + try: + db.commit() + db.flush() + except SQLAlchemyError as e: + db.rollback() + raise HTTPException( + status_code=status.HTTP_406_NOT_ACCEPTABLE, + detail={ + "message": "Alchemy save operation failed", + "error": str(e), + }, + ) + except Exception as e: + db.rollback() + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail={ + "message": "Unknown exception raised.", + "error": str(e), + }, + ) + + @classmethod + def rollback(cls: Type[T], db: Session) -> None: + """ + Rollback current transaction. + + Args: + db: Database session + """ + db.rollback() diff --git a/Services/PostgresService/controllers/crud_controllers.py b/Services/PostgresService/controllers/crud_controllers.py new file mode 100644 index 0000000..9fdfee2 --- /dev/null +++ b/Services/PostgresService/controllers/crud_controllers.py @@ -0,0 +1,249 @@ +import arrow +import datetime + +from typing import Optional, Any, Dict +from sqlalchemy.orm import Session, Mapped +from pydantic import BaseModel +from fastapi.exceptions import HTTPException + +from decimal import Decimal +from sqlalchemy import TIMESTAMP, NUMERIC +from sqlalchemy.orm.attributes import InstrumentedAttribute + + +class Credentials(BaseModel): + """ + Class to store user credentials. + """ + person_id: int + person_name: str + full_name: Optional[str] = None + + +class MetaData: + """ + Class to store metadata for a query. + """ + created: bool = False + updated: bool = False + + +class CRUDModel: + + __abstract__ = True + + creds: Credentials = None + meta_data: MetaData = MetaData() + + @classmethod + def create_credentials(cls, record_created) -> None: + """ + Save user credentials for tracking. + + Args: + record_created: Record that created or updated + """ + if getattr(cls.creds, "person_id", None) and getattr(cls.creds, "person_name", None): + record_created.created_by_id = cls.creds.person_id + record_created.created_by = cls.creds.person_name + return + + @classmethod + def raise_exception(cls): + raise HTTPException( + status_code=400, + detail={ + "message": "Exception raised.", + }, + ) + + @classmethod + def create_or_abort(cls, db: Session, **kwargs): + """ + Create a new record or abort if it already exists. + + Args: + db: Database session + **kwargs: Record fields + + Returns: + New record if successfully created + """ + + # Search for existing record + query = db.query(cls).filter( + cls.expiry_ends > str(arrow.now()), cls.expiry_starts <= str(arrow.now()), + ) + + for key, value in kwargs.items(): + if hasattr(cls, key): + query = query.filter(getattr(cls, key) == value) + + already_record = query.first() + # Handle existing record + if already_record or already_record.deleted: + cls.raise_exception() + + # Create new record + created_record = cls() + for key, value in kwargs.items(): + setattr(created_record, key, value) + cls.create_credentials(created_record) + db.add(created_record) + db.flush() + return created_record + + @classmethod + def iterate_over_variables(cls, val: Any, key: str) -> tuple[bool, Optional[Any]]: + """ + Process a field value based on its type and convert it to the appropriate format. + + Args: + val: Field value + key: Field name + + Returns: + Tuple of (should_include, processed_value) + """ + key_ = cls.__annotations__.get(key, None) + is_primary = key in cls.primary_keys + row_attr = bool(getattr(getattr(cls, key), "foreign_keys", None)) + + # Skip primary keys and foreign keys + if is_primary or row_attr: + return False, None + + if val is None: # Handle None values + return True, None + + if str(key[-5:]).lower() == "uu_id": # Special handling for UUID fields + return True, str(val) + + if key_: # Handle typed fields + if key_ == Mapped[int]: + return True, int(val) + elif key_ == Mapped[bool]: + return True, bool(val) + elif key_ == Mapped[float] or key_ == Mapped[NUMERIC]: + return True, round(float(val), 3) + elif key_ == Mapped[TIMESTAMP]: + return True, str(arrow.get(str(val)).format("YYYY-MM-DD HH:mm:ss ZZ")) + elif key_ == Mapped[str]: + return True, str(val) + else: # Handle based on Python types + if isinstance(val, datetime.datetime): + return True, str(arrow.get(str(val)).format("YYYY-MM-DD HH:mm:ss ZZ")) + elif isinstance(val, bool): + return True, bool(val) + elif isinstance(val, (float, Decimal)): + return True, round(float(val), 3) + elif isinstance(val, int): + return True, int(val) + elif isinstance(val, str): + return True, str(val) + elif val is None: + return True, None + + return False, None + + def get_dict(self, exclude_list: Optional[list[InstrumentedAttribute]] = None) -> Dict[str, Any]: + """ + Convert model instance to dictionary with customizable fields. + Returns: + Dictionary representation of the model + Dictionary returns only UUID fields and fields that are not in exclude_list + """ + return_dict: Dict[str, Any] = {} # Handle default field selection + exclude_list = exclude_list or [] + exclude_list = [exclude_arg.key for exclude_arg in exclude_list] + + columns_set = set(self.columns) + columns_list = set([col for col in list(columns_set) if str(col)[-2:] != "id"]) + columns_extend = set(col for col in list(columns_set) if str(col)[-5:].lower() == "uu_id") + columns_list = set(columns_list) | set(columns_extend) + columns_list = list(set(columns_list) - set(exclude_list)) + + for key in columns_list: + val = getattr(self, key) + correct, value_of_database = self.iterate_over_variables(val, key) + if correct: + return_dict[key] = value_of_database + + return return_dict + + @classmethod + def find_or_create( + cls, db: Session, exclude_args: Optional[list[InstrumentedAttribute]] = None, **kwargs + ): + """ + Find an existing record matching the criteria or create a new one. + + Args: + db: Database session + exclude_args: Keys to exclude from search + **kwargs: Search/creation criteria + + Returns: + Existing or newly created record + """ + # Search for existing record + query = db.query(cls).filter( + cls.expiry_ends > str(arrow.now()), cls.expiry_starts <= str(arrow.now()), + ) + exclude_args = exclude_args or [] + exclude_args = [exclude_arg.key for exclude_arg in exclude_args] + for key, value in kwargs.items(): + if hasattr(cls, key) and key not in exclude_args: + query = query.filter(getattr(cls, key) == value) + + already_record = query.first() + if already_record: # Handle existing record + cls.meta_data.created = False + return already_record + + # Create new record + created_record = cls() + for key, value in kwargs.items(): + setattr(created_record, key, value) + cls.create_credentials(created_record) + + db.add(created_record) + db.flush() + cls.meta_data.created = True + return created_record + + def update(self, db: Session, **kwargs): + """ + Update the record with new values. + + Args: + db: Database session + **kwargs: Fields to update + + Returns: + Updated record + """ + for key, value in kwargs.items(): + setattr(self, key, value) + + self.update_credentials() + try: + db.flush() + self.meta_data.updated = True + except Exception as e: + print('Error:', e) + self.meta_data.updated = False + db.rollback() + return self + + def update_credentials(self) -> None: + """ + Save user credentials for tracking. + """ + # Update confirmation or modification tracking + person_id = getattr(self.creds, "person_id", None) + person_name = getattr(self.creds, "person_name", None) + if person_id and person_name: + self.updated_by_id = self.creds.person_id + self.updated_by = self.creds.person_name + return diff --git a/Services/PostgresService/controllers/filter_controllers.py b/Services/PostgresService/controllers/filter_controllers.py new file mode 100644 index 0000000..254ac38 --- /dev/null +++ b/Services/PostgresService/controllers/filter_controllers.py @@ -0,0 +1,202 @@ +""" +Advanced filtering functionality for SQLAlchemy models. + +This module provides a comprehensive set of filtering capabilities for SQLAlchemy models, +including pagination, ordering, and complex query building. +""" + +from __future__ import annotations +import arrow + +from typing import Any, TypeVar, Type, Union, Optional + +from sqlalchemy import ColumnExpressionArgument +from sqlalchemy.orm import Query, Session +from sqlalchemy.sql.elements import BinaryExpression + +from Services.PostgresService.controllers.response_controllers import PostgresResponse + + +T = TypeVar("T", bound="QueryModel") + + +class QueryModel: + + pre_query = None + __abstract__ = True + + @classmethod + def _query(cls: Type[T], db: Session) -> Query: + """Returns the query to use in the model.""" + return cls.pre_query if cls.pre_query else db.query(cls) + + @classmethod + def add_new_arg_to_args(cls: Type[T], args_list, argument, value): + new_arg_list = list( + set( + args_ + for args_ in list(args_list) + if isinstance(args_, BinaryExpression) + ) + ) + arg_left = lambda arg_obj: getattr(getattr(arg_obj, "left", None), "key", None) + # arg_right = lambda arg_obj: getattr(getattr(arg_obj, "right", None), "value", None) + if not any(True for arg in new_arg_list if arg_left(arg_obj=arg) == argument): + new_arg_list.append(value) + return tuple(new_arg_list) + + @classmethod + def get_not_expired_query_arg(cls: Type[T], arg): + """Add expiry_starts and expiry_ends to the query.""" + starts = cls.expiry_starts <= str(arrow.now()) + ends = cls.expiry_ends > str(arrow.now()) + arg = cls.add_new_arg_to_args(arg, "expiry_ends", ends) + arg = cls.add_new_arg_to_args(arg, "expiry_starts", starts) + return arg + + @classmethod + def produce_query_to_add(cls: Type[T], filter_list): + """ + Adds query to main filter options + Args: + filter_list: + """ + if filter_list.get("query"): + for smart_iter in cls.filter_expr(**filter_list["query"]): + if key := getattr(getattr(smart_iter, "left", None), "key", None): + args = cls.add_new_arg_to_args(args, key, smart_iter) + + @classmethod + def convert( + cls: Type[T], smart_options: dict, validate_model: Any = None + ) -> Optional[tuple[BinaryExpression]]: + if not validate_model: + return tuple(cls.filter_expr(**smart_options)) + + @classmethod + def filter_by_one( + cls: Type[T], db: Session, system: bool = False, **kwargs + ) -> PostgresResponse: + """ + Filter single record by keyword arguments. + + Args: + db: Database session + system: If True, skip status filtering + **kwargs: Filter criteria + + Returns: + Query response with single record + """ + if "is_confirmed" not in kwargs and not system: + kwargs["is_confirmed"] = True + kwargs.pop("system", None) + query = cls._query(db).filter_by(**kwargs) + return PostgresResponse( + model=cls, pre_query=cls._query(db), query=query, is_array=False + ) + + @classmethod + def filter_one( + cls: Type[T], + *args: Union[BinaryExpression, ColumnExpressionArgument], + db: Session, + ) -> PostgresResponse: + """ + Filter single record by expressions. + + Args: + db: Database session + args: Filter expressions + + Returns: + Query response with single record + """ + args = cls.get_not_expired_query_arg(args) + query = db.query(cls).filter(*args) + pre_query = cls._query(db=db).filter(*args) + return PostgresResponse( + model=cls, pre_query=pre_query, query=query, is_array=False + ) + + @classmethod + def filter_one_system( + cls, + *args: Union[BinaryExpression, ColumnExpressionArgument], + db: Session, + ): + """ + Filter single record by expressions without status filtering + Args: + *args: + db: + + Returns: + Query response with single record + """ + query = cls._query(db=db).filter(*args) + return PostgresResponse( + model=cls, pre_query=cls._query(db=db), query=query, is_array=False + ) + + @classmethod + def filter_all_system( + cls: Type[T], + *args: Union[BinaryExpression, ColumnExpressionArgument], + db: Session, + ) -> PostgresResponse: + """ + Filter multiple records by expressions without status filtering. + + Args: + db: Database session + args: Filter expressions + + Returns: + Query response with matching records + """ + + query = cls._query(db) + query = query.filter(*args) + return PostgresResponse( + model=cls, pre_query=cls._query(db), query=query, is_array=True + ) + + @classmethod + def filter_all( + cls: Type[T], + *args: Union[BinaryExpression, ColumnExpressionArgument], + db: Session, + ) -> PostgresResponse: + """ + Filter multiple records by expressions. + + Args: + db: Database session + args: Filter expressions + Returns: + Query response with matching records + """ + args = cls.get_not_expired_query_arg(args) + pre_query = cls._query(db=db) + query = cls._query(db=db).filter(*args) + return PostgresResponse( + model=cls, pre_query=pre_query, query=query, is_array=True + ) + + @classmethod + def filter_by_all_system(cls: Type[T], db: Session, **kwargs) -> PostgresResponse: + """ + Filter multiple records by keyword arguments. + + Args: + db: Database session + **kwargs: Filter criteria + + Returns: + Query response with matching records + """ + query = cls._query(db).filter_by(**kwargs) + return PostgresResponse( + model=cls, pre_query=cls._query(db), query=query, is_array=True + ) diff --git a/Services/PostgresService/controllers/mixin_controllers.py b/Services/PostgresService/controllers/mixin_controllers.py new file mode 100644 index 0000000..c8965f0 --- /dev/null +++ b/Services/PostgresService/controllers/mixin_controllers.py @@ -0,0 +1,135 @@ +import arrow +from sqlalchemy import ( + TIMESTAMP, + func, + text, + UUID, + String, + Integer, + Boolean, + SmallInteger, +) +from sqlalchemy.orm import Mapped, mapped_column +from sqlalchemy_mixins.serialize import SerializeMixin +from sqlalchemy_mixins.repr import ReprMixin +from sqlalchemy_mixins.smartquery import SmartQueryMixin + +from Services.PostgresService.controllers.core_controllers import BaseAlchemyModel +from Services.PostgresService.controllers.crud_controllers import CRUDModel +from Services.PostgresService.controllers.filter_controllers import QueryModel +from Services.PostgresService.database import Base + + +class BasicMixin( + Base, + BaseAlchemyModel, + CRUDModel, + SerializeMixin, + ReprMixin, + SmartQueryMixin, + QueryModel, +): + + __abstract__ = True + __repr__ = ReprMixin.__repr__ + + +class CrudMixin(BasicMixin): + """ + Base mixin providing CRUD operations and common fields for PostgreSQL models. + + Features: + - Automatic timestamps (created_at, updated_at) + - Soft delete capability + - User tracking (created_by, updated_by) + - Data serialization + - Multi-language support + """ + + __abstract__ = True + + # Primary and reference fields + id: Mapped[int] = mapped_column(Integer, primary_key=True) + uu_id: Mapped[str] = mapped_column( + UUID, + server_default=text("gen_random_uuid()"), + index=True, + unique=True, + comment="Unique identifier UUID", + ) + + # Common timestamp fields for all models + expiry_starts: Mapped[TIMESTAMP] = mapped_column( + TIMESTAMP(timezone=True), + server_default=func.now(), + nullable=False, + comment="Record validity start timestamp", + ) + expiry_ends: Mapped[TIMESTAMP] = mapped_column( + TIMESTAMP(timezone=True), + default=str(arrow.get("2099-12-31")), + server_default=func.now(), + comment="Record validity end timestamp", + ) + + +class CrudCollection(CrudMixin): + """ + Full-featured model class with all common fields. + + Includes: + - UUID and reference ID + - Timestamps + - User tracking + - Confirmation status + - Soft delete + - Notification flags + """ + + __abstract__ = True + __repr__ = ReprMixin.__repr__ + + ref_id: Mapped[str] = mapped_column( + String(100), nullable=True, index=True, comment="External reference ID" + ) + + # Timestamps + created_at: Mapped[TIMESTAMP] = mapped_column( + TIMESTAMP(timezone=True), + server_default=func.now(), + nullable=False, + index=True, + comment="Record creation timestamp", + ) + updated_at: Mapped[TIMESTAMP] = mapped_column( + TIMESTAMP(timezone=True), + server_default=func.now(), + onupdate=func.now(), + nullable=False, + index=True, + comment="Last update timestamp", + ) + + created_by: Mapped[str] = mapped_column( + String, nullable=True, comment="Creator name" + ) + created_by_id: Mapped[int] = mapped_column( + Integer, nullable=True, comment="Creator ID" + ) + updated_by: Mapped[str] = mapped_column( + String, nullable=True, comment="Last modifier name" + ) + updated_by_id: Mapped[int] = mapped_column( + Integer, nullable=True, comment="Last modifier ID" + ) + + # Status flags + replication_id: Mapped[int] = mapped_column( + SmallInteger, server_default="0", comment="Replication identifier" + ) + deleted: Mapped[bool] = mapped_column( + Boolean, server_default="0", comment="Soft delete flag" + ) + active: Mapped[bool] = mapped_column( + Boolean, server_default="1", comment="Record active status" + ) diff --git a/Services/PostgresService/controllers/pagination_controllers.py b/Services/PostgresService/controllers/pagination_controllers.py new file mode 100644 index 0000000..e6a7531 --- /dev/null +++ b/Services/PostgresService/controllers/pagination_controllers.py @@ -0,0 +1,249 @@ +from __future__ import annotations +from typing import Any, Dict, Optional, Union +from sqlalchemy import desc, asc +from pydantic import BaseModel + +# from application.validations.request.list_options.list_options import ListOptions +from Services.PostgresService.controllers.response_controllers import PostgresResponse +from Configs.api import ApiConfigs + + +class ListOptions: + ... + + +class PaginationConfig(BaseModel): + """ + Configuration for pagination settings. + + Attributes: + page: Current page number (default: 1) + size: Items per page (default: 10) + order_field: Field to order by (default: "id") + order_type: Order direction (default: "asc") + """ + + page: int = 1 + size: int = 10 + order_field: Optional[Union[tuple[str], list[str]]] = None + order_type: Optional[Union[tuple[str], list[str]]] = None + + def __init__(self, **data): + super().__init__(**data) + if self.order_field is None: + self.order_field = ["uu_id"] + if self.order_type is None: + self.order_type = ["asc"] + + +class Pagination: + """ + Handles pagination logic for query results. + + Manages page size, current page, ordering, and calculates total pages + and items based on the data source. + + Attributes: + DEFAULT_SIZE: Default number of items per page + MIN_SIZE: Minimum allowed page size + MAX_SIZE: Maximum allowed page size + """ + + DEFAULT_SIZE: int = int(ApiConfigs.DEFAULT_SIZE or 10) + MIN_SIZE: int = int(ApiConfigs.MIN_SIZE or 5) + MAX_SIZE: int = int(ApiConfigs.MAX_SIZE or 50) + + def __init__(self, data: PostgresResponse): + self._data = data + self.size: int = self.DEFAULT_SIZE + self.page: int = 1 + self.orderField: Optional[Union[tuple[str], list[str]]] = ["uu_id"] + self.orderType: Optional[Union[tuple[str], list[str]]] = ["asc"] + self.page_count: int = 1 + self.total_pages: int = 1 + self.total_count: int = self._data.count + self.all_count: int = self._data.total_count + self._update_page_counts() + + def change(self, config: PaginationConfig) -> None: + """Update pagination settings from config.""" + self.size = ( + config.size + if self.MIN_SIZE <= config.size <= self.MAX_SIZE + else self.DEFAULT_SIZE + ) + self.page = config.page + self.orderField = config.order_field + self.orderType = config.order_type + self._update_page_counts() + + def feed(self, data: PostgresResponse) -> None: + """Calculate pagination based on data source.""" + self._data = data + self._update_page_counts() + + def _update_page_counts(self) -> None: + """Update page counts and validate current page.""" + self.total_count = self._data.count + self.all_count = self._data.total_count + + self.size = ( + self.size + if self.MIN_SIZE <= self.size <= self.MAX_SIZE + else self.DEFAULT_SIZE + ) + self.total_pages = max(1, (self.total_count + self.size - 1) // self.size) + self.page = max(1, min(self.page, self.total_pages)) + self.page_count = ( + self.total_count % self.size + if self.page == self.total_pages and self.total_count % self.size + else self.size + ) + + def refresh(self) -> None: + """Reset pagination state to defaults.""" + self._update_page_counts() + + def reset(self) -> None: + """Reset pagination state to defaults.""" + self.size = self.DEFAULT_SIZE + self.page = 1 + self.orderField = "uu_id" + self.orderType = "asc" + + def as_dict(self) -> Dict[str, Any]: + """Convert pagination state to dictionary format.""" + self.refresh() + return { + "size": self.size, + "page": self.page, + "allCount": self.all_count, + "totalCount": self.total_count, + "totalPages": self.total_pages, + "pageCount": self.page_count, + "orderField": self.orderField, + "orderType": self.orderType, + } + + +class PaginationResult: + """ + Result of a paginated query. + + Contains the query result and pagination state. + data: PostgresResponse of query results + pagination: Pagination state + + Attributes: + _query: Original query object + pagination: Pagination state + """ + + def __init__( + self, data: PostgresResponse, pagination: Pagination, response_model: Any = None + ): + self._data = data + self._query = data.query + self._core_query = data.core_query + self.pagination = pagination + self.response_type = data.is_list + self.limit = self.pagination.size + self.offset = self.pagination.size * (self.pagination.page - 1) + self.order_by = self.pagination.orderField + self.order_type = self.pagination.orderType + self.response_model = response_model + + def dynamic_order_by(self): + """ + Dynamically order a query by multiple fields. + Returns: + Ordered query object. + """ + if not len(self.order_by) == len(self.order_type): + raise ValueError( + "Order by fields and order types must have the same length." + ) + order_criteria = zip(self.order_by, self.order_type) + print('order_criteria', order_criteria) + if not self._data.data: + return self._core_query + + for field, direction in order_criteria: + print('field', field, direction) + columns = self._data.data[0].filterable_attributes + print('columns', columns) + if field in columns: + if direction.lower().startswith("d"): + self._core_query = self._core_query.order_by( + desc( + getattr(self._core_query.column_descriptions[0]["entity"], field) + ) + ) + else: + self._core_query = self._core_query.order_by( + asc( + getattr(self._core_query.column_descriptions[0]["entity"], field) + ) + ) + return self._core_query + + @property + def data(self) -> Union[list | dict]: + """Get query object.""" + query_ordered = self.dynamic_order_by() + query_paginated = query_ordered.limit(self.limit).offset(self.offset) + queried_data = ( + query_paginated.all() if self.response_type else query_paginated.first() + ) + data = ( + [result.get_dict() for result in queried_data] + if self.response_type + else queried_data.get_dict() + ) + if self.response_model: + return [self.response_model(**item).model_dump() for item in data] + return data + + +class QueryOptions: + + def __init__( + self, + table, + data: Union[dict, ListOptions] = None, + model_query: Optional[Any] = None, + ): + self.table = table + self.data = data + self.model_query = model_query + if isinstance(data, dict): + self.data = ListOptions(**data) + self.validate_query() + if not self.data.order_type: + self.data.order_type = ["created_at"] + if not self.data.order_field: + self.data.order_field = ["uu_id"] + + def validate_query(self): + if not self.data.query or not self.model_query: + return () + cleaned_query, cleaned_query_by_model, last_dict = {}, {}, {} + for key, value in self.data.query.items(): + cleaned_query[str(str(key).split("__")[0])] = value + cleaned_query_by_model[str(str(key).split("__")[0])] = (key, value) + cleaned_model = self.model_query(**cleaned_query) + for i in cleaned_query: + if hasattr(cleaned_model, i): + last_dict[str(cleaned_query_by_model[str(i)][0])] = str( + cleaned_query_by_model[str(i)][1] + ) + self.data.query = last_dict + + def convert(self) -> tuple: + """ + self.table.convert(query) + (, ) + """ + if not self.data or self.data.query: + return () + return tuple(self.table.convert(self.data.query)) diff --git a/Services/PostgresService/controllers/response_controllers.py b/Services/PostgresService/controllers/response_controllers.py new file mode 100644 index 0000000..dc752dc --- /dev/null +++ b/Services/PostgresService/controllers/response_controllers.py @@ -0,0 +1,109 @@ +""" +Response handler for PostgreSQL query results. + +This module provides a wrapper class for SQLAlchemy query results, +adding convenience methods for accessing data and managing query state. +""" + +from typing import Any, Dict, Optional, TypeVar, Generic, Union +from sqlalchemy.orm import Query + + +T = TypeVar("T") + + +class PostgresResponse(Generic[T]): + """ + Wrapper for PostgreSQL/SQLAlchemy query results. + + Attributes: + metadata: Additional metadata for the query + + Properties: + count: Total count of results + query: Get query object + as_dict: Convert response to dictionary format + """ + + def __init__( + self, + pre_query: Query, + query: Query, + model, + is_array: bool = True, + metadata: Any = None, + ): + self._core_class = model + self._is_list = is_array + self._query = query + self._pre_query = pre_query + self._count: Optional[int] = None + self.metadata = metadata + + @property + def core_class(self): + """Get query object.""" + return self._core_class + + @property + def data(self) -> Union[T, list[T]]: + """Get query results.""" + if not self.is_list: + first_item = self._query.first() + return first_item if first_item else None + return self._query.all() if self._query.all() else [] + + @property + def data_as_dict(self) -> Union[Dict[str, Any], list[Dict[str, Any]]] | None: + """Get query results as dictionary.""" + if not self.count: + return None + if self.is_list: + first_item = self._query.first() + return first_item.get_dict() if first_item.first() else None + all_items = self._query.all() + return [result.get_dict() for result in all_items] if all_items else [] + + @property + def total_count(self) -> int: + """Lazy load and return total count of results.""" + return self._pre_query.count() if self._pre_query else 0 + + @property + def count(self) -> int: + """Lazy load and return total count of results.""" + return self._query.count() + + @property + def query(self) -> str: + """Get query object.""" + return str(self._query) + + @property + def core_query(self) -> Query: + """Get query object.""" + return self._query + + @property + def is_list(self) -> bool: + """Check if response is a list.""" + return self._is_list + + @property + def as_dict(self) -> Dict[str, Any]: + """Convert response to dictionary format.""" + if isinstance(self.data, list): + return { + "metadata": self.metadata, + "is_list": self._is_list, + "query": str(self.query), + "count": self.count, + "data": [result.get_dict() for result in self.data], + } + return { + "metadata": self.metadata, + "is_list": self._is_list, + "query": str(self.query), + "count": self.count, + "data": self.data.get_dict() if self.data else {}, + } diff --git a/Services/PostgresService/database.py b/Services/PostgresService/database.py new file mode 100644 index 0000000..b0025d7 --- /dev/null +++ b/Services/PostgresService/database.py @@ -0,0 +1,62 @@ +from contextlib import contextmanager +from functools import lru_cache +from typing import Generator + +from Configs.postgres import Database + +from sqlalchemy import create_engine +from sqlalchemy.orm import declarative_base, sessionmaker, scoped_session, Session + + +# Configure the database engine with proper pooling +engine = create_engine( + Database.DATABASE_URL, + pool_pre_ping=True, # Verify connection before using + pool_size=20, # Maximum number of permanent connections + max_overflow=10, # Maximum number of additional connections + pool_recycle=3600, # Recycle connections after 1 hour + pool_timeout=30, # Wait up to 30 seconds for a connection + echo=True, # Set to True for debugging SQL queries +) + +Base = declarative_base() + + +# Create a cached session factory +@lru_cache() +def get_session_factory() -> scoped_session: + """Create a thread-safe session factory.""" + session_local = sessionmaker( + bind=engine, + autocommit=False, + autoflush=False, + expire_on_commit=False, # Prevent expired object issues + ) + return scoped_session(session_local) + + +# Get database session with proper connection management +@contextmanager +def get_db() -> Generator[Session, None, None]: + """Get database session with proper connection management. + + This context manager ensures: + - Proper connection pooling + - Session cleanup + - Connection return to pool + - Thread safety + + Yields: + Session: SQLAlchemy session object + """ + session_factory = get_session_factory() + session = session_factory() + try: + yield session + session.commit() + except Exception: + session.rollback() + raise + finally: + session.close() + session_factory.remove() # Clean up the session from the registry diff --git a/alembic.ini b/alembic.ini new file mode 100644 index 0000000..069386c --- /dev/null +++ b/alembic.ini @@ -0,0 +1,118 @@ +# A generic, single database configuration. + +[alembic] +# path to migration scripts +# Use forward slashes (/) also on windows to provide an os agnostic path +script_location = alembic + +# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s +# Uncomment the line below if you want the files to be prepended with date and time +# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file +# for all available tokens +# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s + +# sys.path path, will be prepended to sys.path if present. +# defaults to the current working directory. +prepend_sys_path = . + +# timezone to use when rendering the date within the migration file +# as well as the filename. +# If specified, requires the python>=3.9 or backports.zoneinfo library and tzdata library. +# Any required deps can installed by adding `alembic[tz]` to the pip requirements +# string value is passed to ZoneInfo() +# leave blank for localtime +# timezone = + +# max length of characters to apply to the "slug" field +# truncate_slug_length = 40 + +# set to 'true' to run the environment during +# the 'revision' command, regardless of autogenerate +# revision_environment = false + +# set to 'true' to allow .pyc and .pyo files without +# a source .py file to be detected as revisions in the +# versions/ directory +# sourceless = false + +# version location specification; This defaults +# to alembic/versions. When using multiple version +# directories, initial revisions must be specified with --version-path. +# The path separator used here should be the separator specified by "version_path_separator" below. +# version_locations = %(here)s/bar:%(here)s/bat:alembic/versions + +# version path separator; As mentioned above, this is the character used to split +# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep. +# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas. +# Valid values for version_path_separator are: +# +# version_path_separator = : +# version_path_separator = ; +# version_path_separator = space +# version_path_separator = newline +# +# Use os.pathsep. Default configuration used for new projects. +version_path_separator = os + +# set to 'true' to search source files recursively +# in each "version_locations" directory +# new in Alembic version 1.10 +# recursive_version_locations = false + +# the output encoding used when revision files +# are written from script.py.mako +# output_encoding = utf-8 + +sqlalchemy.url = postgresql+psycopg2://berkay_wag_user:berkay_wag_user_password@postgres_commercial:5432/wag_database + +[post_write_hooks] +# post_write_hooks defines scripts or Python functions that are run +# on newly generated revision scripts. See the documentation for further +# detail and examples + +# format using "black" - use the console_scripts runner, against the "black" entrypoint +# hooks = black +# black.type = console_scripts +# black.entrypoint = black +# black.options = -l 79 REVISION_SCRIPT_FILENAME + +# lint with attempts to fix using "ruff" - use the exec runner, execute a binary +# hooks = ruff +# ruff.type = exec +# ruff.executable = %(here)s/.venv/bin/ruff +# ruff.options = check --fix REVISION_SCRIPT_FILENAME + +# Logging configuration +[loggers] +keys = root,sqlalchemy,alembic + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARNING +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARNING +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/alembic/README b/alembic/README new file mode 100644 index 0000000..98e4f9c --- /dev/null +++ b/alembic/README @@ -0,0 +1 @@ +Generic single-database configuration. \ No newline at end of file diff --git a/alembic/env.py b/alembic/env.py new file mode 100644 index 0000000..edd5c39 --- /dev/null +++ b/alembic/env.py @@ -0,0 +1,78 @@ +from logging.config import fileConfig + +from sqlalchemy import engine_from_config +from sqlalchemy import pool + +from alembic import context +from Schemas import * +from Services.PostgresService.database import Base + +# this is the Alembic Config object, which provides +# access to the values within the .ini file in use. +config = context.config + +# Interpret the config file for Python logging. +# This line sets up loggers basically. +if config.config_file_name is not None: + fileConfig(config.config_file_name) + +# add your model's MetaData object here +# for 'autogenerate' support +target_metadata = None + +# other values from the config, defined by the needs of env.py, +# can be acquired: +# my_important_option = config.get_main_option("my_important_option") +# ... etc. + + +def run_migrations_offline() -> None: + """Run migrations in 'offline' mode. + + This configures the context with just a URL + and not an Engine, though an Engine is acceptable + here as well. By skipping the Engine creation + we don't even need a DBAPI to be available. + + Calls to context.execute() here emit the given string to the + script output. + + """ + url = config.get_main_option("sqlalchemy.url") + context.configure( + url=url, + target_metadata=target_metadata, + literal_binds=True, + dialect_opts={"paramstyle": "named"}, + ) + + with context.begin_transaction(): + context.run_migrations() + + +def run_migrations_online() -> None: + """Run migrations in 'online' mode. + + In this scenario we need to create an Engine + and associate a connection with the context. + + """ + connectable = engine_from_config( + config.get_section(config.config_ini_section, {}), + prefix="sqlalchemy.", + poolclass=pool.NullPool, + ) + + with connectable.connect() as connection: + context.configure( + connection=connection, target_metadata=target_metadata + ) + + with context.begin_transaction(): + context.run_migrations() + + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() diff --git a/alembic/script.py.mako b/alembic/script.py.mako new file mode 100644 index 0000000..480b130 --- /dev/null +++ b/alembic/script.py.mako @@ -0,0 +1,28 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +${imports if imports else ""} + +# revision identifiers, used by Alembic. +revision: str = ${repr(up_revision)} +down_revision: Union[str, None] = ${repr(down_revision)} +branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)} +depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)} + + +def upgrade() -> None: + """Upgrade schema.""" + ${upgrades if upgrades else "pass"} + + +def downgrade() -> None: + """Downgrade schema.""" + ${downgrades if downgrades else "pass"} diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..8d508e8 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,198 @@ +services: + + commercial_mongo_service: + container_name: commercial_mongo_service + image: "bitnami/mongodb:latest" +# image: "bitnami/mongodb:4.4.1-debian-10-r3" + networks: + - network_store_services + environment: + - MONGODB_DISABLE_ENFORCE_AUTH=true + - MONGODB_ROOT_PASSWORD=root + - MONGODB_DATABASE=mongo_database + - MONGODB_USERNAME=mongo_user + - MONGODB_PASSWORD=mongo_password + - MONGO_INITDB_ROOT_USERNAME=mongo_user + - MONGO_INITDB_ROOT_PASSWORD=mongo_password + - MONGO_INITDB_DATABASE=mongo_database + volumes: + - wag_commercial_mongodb_data:/bitnami/mongodb + ports: + - "11110:27017" + + commercial_memory_service: + container_name: commercial_memory_service + image: 'bitnami/redis:latest' + restart: on-failure + environment: + - REDIS_HOST=commercial_redis_service + - REDIS_PASSWORD=commercial_redis_password + - REDIS_PORT=6379 + - REDIS_DB=0 + networks: + - network_store_services + ports: + - "11112:6379" + + postgres_commercial: + image: 'bitnami/postgresql:latest' + container_name: postgres_commercial + restart: on-failure + networks: + - network_store_services + environment: + - POSTGRES_DB=wag_database + - POSTGRES_USER=berkay_wag_user + - POSTGRES_PASSWORD=berkay_wag_user_password + depends_on: + - commercial_mongo_service + ports: + - "5434:5432" + volumes: + - wag_postgres_commercial_data:/bitnami/postgresql + + test_server: + container_name: test_server + build: + context: . + dockerfile: EmptyRunner/Dockerfile + networks: + - network_store_services + depends_on: + - postgres_commercial + +# wag_management_service: +# container_name: wag_management_service +# restart: on-failure +# build: +# context: . +# dockerfile: service_app/Dockerfile +# ports: +# - "41575:41575" +# networks: +# - network_store_services +# depends_on: +# - wag_management_init_service +# - grafana +# +# wag_management_service_second: +# container_name: wag_management_service_second +# restart: on-failure +# build: +# context: . +# dockerfile: service_app/Dockerfile +# ports: +# - "41576:41575" +# networks: +# - network_store_services +# depends_on: +# - wag_management_init_service +# - grafana + +# wag_management_init_service: +# container_name: wag_management_init_service +# build: +# context: . +# dockerfile: service_app_init/Dockerfile +# networks: +# - network_store_services +# depends_on: +# - postgres_commercial +# +# wag_bank_services: +# container_name: wag_bank_services +# restart: on-failure +# build: +# context: . +# dockerfile: service_app_banks/mailService.Dockerfile +# networks: +# - network_store_services +# depends_on: +# - postgres_commercial +# environment: +# - DATABASE_URL=postgresql+psycopg2://berkay_wag_user:berkay_wag_user_password@postgres_commercial:5432/wag_database +# - PYTHONPATH=/service_app_banks +# +# wag_account_services: +# container_name: wag_account_services +# restart: on-failure +# build: +# context: . +# dockerfile: service_account_records/account.Dockerfile +# networks: +# - network_store_services +# depends_on: +# - postgres_commercial +# environment: +# - DATABASE_URL=postgresql+psycopg2://berkay_wag_user:berkay_wag_user_password@postgres_commercial:5432/wag_database +# - PYTHONPATH=/ +# +# prometheus: +# image: prom/prometheus +# container_name: prometheus +# ports: +# - "9090:9090" +# volumes: +# - ./prometheus_data/prometheus.yml:/etc/prometheus/prometheus.yml +# command: +# - '--config.file=/etc/prometheus/prometheus.yml' +# networks: +# - network_store_services +# +# grafana: +# image: grafana/grafana +# container_name: grafana +# ports: +# - "3030:3000" +# depends_on: +# - prometheus +# networks: +# - network_store_services +# environment: +# - GF_SECURITY_ADMIN_USER=admin +# - GF_SECURITY_ADMIN_PASSWORD=admin +# - GF_USERS_ALLOW_SIGN_UP=false +# - GF_USERS_ALLOW_ORG_CREATE=false +# volumes: +# - grafana_data:/var/lib/grafana +# +# wag_management_test_service: +# container_name: wag_management_test_service +# build: +# context: . +# dockerfile: service_app_test/Dockerfile +# networks: +# - network_store_services +# depends_on: +# - wag_management_init_service + +# nginx-proxy-wag: +# container_name: nginx-proxy-wag +# image: 'jc21/nginx-proxy-manager:latest' +# restart: unless-stopped +# networks: +# - network_store_services +# depends_on: +# - wag_management_service +# ports: +# - '80:80' # Public HTTP Port +# - '443:443' # Public HTTPS Port +# - '81:81' # Admin Web Port +# volumes: +# - ./data:/data +# - ./letsencrypt:/etc/letsencrypt + +networks: + network_store_services: + +volumes: + grafana_data: + wag_postgres_commercial_data: + wag_commercial_mongodb_data: + +# environment: +# - DATABASE_URL=postgresql+psycopg2://berkay_wag_user:berkay_wag_user_password@postgres_commercial:5432/wag_database +# - REDIS_HOST=commercial_memory_service +# - REDIS_PASSWORD=commercial_redis_password +# - REDIS_PORT=6379 +# - REDIS_DB=0 \ No newline at end of file diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..a7793df --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,23 @@ +[project] +name = "wag-services-and-backend-latest" +version = "0.1.0" +description = "Add your description here" +readme = "README.md" +requires-python = ">=3.12" +dependencies = [ + "alembic>=1.15.1", + "arrow>=1.3.0", + "cryptography>=44.0.2", + "faker>=37.0.2", + "fastapi>=0.115.11", + "pandas>=2.2.3", + "psycopg2-binary>=2.9.10", + "pymongo>=4.11.3", + "redis>=5.2.1", + "redmail>=0.6.0", + "requests>=2.32.3", + "sqlalchemy-mixins>=2.0.5", + "textdistance>=4.6.3", + "unidecode>=1.3.8", + "uvicorn>=0.34.0", +] diff --git a/uv.lock b/uv.lock new file mode 100644 index 0000000..f6e08a0 --- /dev/null +++ b/uv.lock @@ -0,0 +1,773 @@ +version = 1 +requires-python = ">=3.12" + +[[package]] +name = "alembic" +version = "1.15.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "mako" }, + { name = "sqlalchemy" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/4a/ed/901044acb892caa5604bf818d2da9ab0df94ef606c6059fdf367894ebf60/alembic-1.15.1.tar.gz", hash = "sha256:e1a1c738577bca1f27e68728c910cd389b9a92152ff91d902da649c192e30c49", size = 1924789 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/99/f7/d398fae160568472ddce0b3fde9c4581afc593019a6adc91006a66406991/alembic-1.15.1-py3-none-any.whl", hash = "sha256:197de710da4b3e91cf66a826a5b31b5d59a127ab41bd0fc42863e2902ce2bbbe", size = 231753 }, +] + +[[package]] +name = "annotated-types" +version = "0.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643 }, +] + +[[package]] +name = "anyio" +version = "4.9.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "idna" }, + { name = "sniffio" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/95/7d/4c1bd541d4dffa1b52bd83fb8527089e097a106fc90b467a7313b105f840/anyio-4.9.0.tar.gz", hash = "sha256:673c0c244e15788651a4ff38710fea9675823028a6f08a5eda409e0c9840a028", size = 190949 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a1/ee/48ca1a7c89ffec8b6a0c5d02b89c305671d5ffd8d3c94acf8b8c408575bb/anyio-4.9.0-py3-none-any.whl", hash = "sha256:9f76d541cad6e36af7beb62e978876f3b41e3e04f2c1fbf0884604c0a9c4d93c", size = 100916 }, +] + +[[package]] +name = "arrow" +version = "1.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "python-dateutil" }, + { name = "types-python-dateutil" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/2e/00/0f6e8fcdb23ea632c866620cc872729ff43ed91d284c866b515c6342b173/arrow-1.3.0.tar.gz", hash = "sha256:d4540617648cb5f895730f1ad8c82a65f2dad0166f57b75f3ca54759c4d67a85", size = 131960 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f8/ed/e97229a566617f2ae958a6b13e7cc0f585470eac730a73e9e82c32a3cdd2/arrow-1.3.0-py3-none-any.whl", hash = "sha256:c728b120ebc00eb84e01882a6f5e7927a53960aa990ce7dd2b10f39005a67f80", size = 66419 }, +] + +[[package]] +name = "certifi" +version = "2025.1.31" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/1c/ab/c9f1e32b7b1bf505bf26f0ef697775960db7932abeb7b516de930ba2705f/certifi-2025.1.31.tar.gz", hash = "sha256:3d5da6925056f6f18f119200434a4780a94263f10d1c21d032a6f6b2baa20651", size = 167577 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/38/fc/bce832fd4fd99766c04d1ee0eead6b0ec6486fb100ae5e74c1d91292b982/certifi-2025.1.31-py3-none-any.whl", hash = "sha256:ca78db4565a652026a4db2bcdf68f2fb589ea80d0be70e03929ed730746b84fe", size = 166393 }, +] + +[[package]] +name = "cffi" +version = "1.17.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pycparser" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/fc/97/c783634659c2920c3fc70419e3af40972dbaf758daa229a7d6ea6135c90d/cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824", size = 516621 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5a/84/e94227139ee5fb4d600a7a4927f322e1d4aea6fdc50bd3fca8493caba23f/cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4", size = 183178 }, + { url = "https://files.pythonhosted.org/packages/da/ee/fb72c2b48656111c4ef27f0f91da355e130a923473bf5ee75c5643d00cca/cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c", size = 178840 }, + { url = "https://files.pythonhosted.org/packages/cc/b6/db007700f67d151abadf508cbfd6a1884f57eab90b1bb985c4c8c02b0f28/cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36", size = 454803 }, + { url = "https://files.pythonhosted.org/packages/1a/df/f8d151540d8c200eb1c6fba8cd0dfd40904f1b0682ea705c36e6c2e97ab3/cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5", size = 478850 }, + { url = "https://files.pythonhosted.org/packages/28/c0/b31116332a547fd2677ae5b78a2ef662dfc8023d67f41b2a83f7c2aa78b1/cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff", size = 485729 }, + { url = "https://files.pythonhosted.org/packages/91/2b/9a1ddfa5c7f13cab007a2c9cc295b70fbbda7cb10a286aa6810338e60ea1/cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99", size = 471256 }, + { url = "https://files.pythonhosted.org/packages/b2/d5/da47df7004cb17e4955df6a43d14b3b4ae77737dff8bf7f8f333196717bf/cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93", size = 479424 }, + { url = "https://files.pythonhosted.org/packages/0b/ac/2a28bcf513e93a219c8a4e8e125534f4f6db03e3179ba1c45e949b76212c/cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3", size = 484568 }, + { url = "https://files.pythonhosted.org/packages/d4/38/ca8a4f639065f14ae0f1d9751e70447a261f1a30fa7547a828ae08142465/cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8", size = 488736 }, + { url = "https://files.pythonhosted.org/packages/86/c5/28b2d6f799ec0bdecf44dced2ec5ed43e0eb63097b0f58c293583b406582/cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65", size = 172448 }, + { url = "https://files.pythonhosted.org/packages/50/b9/db34c4755a7bd1cb2d1603ac3863f22bcecbd1ba29e5ee841a4bc510b294/cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903", size = 181976 }, + { url = "https://files.pythonhosted.org/packages/8d/f8/dd6c246b148639254dad4d6803eb6a54e8c85c6e11ec9df2cffa87571dbe/cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e", size = 182989 }, + { url = "https://files.pythonhosted.org/packages/8b/f1/672d303ddf17c24fc83afd712316fda78dc6fce1cd53011b839483e1ecc8/cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2", size = 178802 }, + { url = "https://files.pythonhosted.org/packages/0e/2d/eab2e858a91fdff70533cab61dcff4a1f55ec60425832ddfdc9cd36bc8af/cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3", size = 454792 }, + { url = "https://files.pythonhosted.org/packages/75/b2/fbaec7c4455c604e29388d55599b99ebcc250a60050610fadde58932b7ee/cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683", size = 478893 }, + { url = "https://files.pythonhosted.org/packages/4f/b7/6e4a2162178bf1935c336d4da8a9352cccab4d3a5d7914065490f08c0690/cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5", size = 485810 }, + { url = "https://files.pythonhosted.org/packages/c7/8a/1d0e4a9c26e54746dc08c2c6c037889124d4f59dffd853a659fa545f1b40/cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4", size = 471200 }, + { url = "https://files.pythonhosted.org/packages/26/9f/1aab65a6c0db35f43c4d1b4f580e8df53914310afc10ae0397d29d697af4/cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd", size = 479447 }, + { url = "https://files.pythonhosted.org/packages/5f/e4/fb8b3dd8dc0e98edf1135ff067ae070bb32ef9d509d6cb0f538cd6f7483f/cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed", size = 484358 }, + { url = "https://files.pythonhosted.org/packages/f1/47/d7145bf2dc04684935d57d67dff9d6d795b2ba2796806bb109864be3a151/cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9", size = 488469 }, + { url = "https://files.pythonhosted.org/packages/bf/ee/f94057fa6426481d663b88637a9a10e859e492c73d0384514a17d78ee205/cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d", size = 172475 }, + { url = "https://files.pythonhosted.org/packages/7c/fc/6a8cb64e5f0324877d503c854da15d76c1e50eb722e320b15345c4d0c6de/cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a", size = 182009 }, +] + +[[package]] +name = "charset-normalizer" +version = "3.4.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/16/b0/572805e227f01586461c80e0fd25d65a2115599cc9dad142fee4b747c357/charset_normalizer-3.4.1.tar.gz", hash = "sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3", size = 123188 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0a/9a/dd1e1cdceb841925b7798369a09279bd1cf183cef0f9ddf15a3a6502ee45/charset_normalizer-3.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545", size = 196105 }, + { url = "https://files.pythonhosted.org/packages/d3/8c/90bfabf8c4809ecb648f39794cf2a84ff2e7d2a6cf159fe68d9a26160467/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7", size = 140404 }, + { url = "https://files.pythonhosted.org/packages/ad/8f/e410d57c721945ea3b4f1a04b74f70ce8fa800d393d72899f0a40526401f/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757", size = 150423 }, + { url = "https://files.pythonhosted.org/packages/f0/b8/e6825e25deb691ff98cf5c9072ee0605dc2acfca98af70c2d1b1bc75190d/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa", size = 143184 }, + { url = "https://files.pythonhosted.org/packages/3e/a2/513f6cbe752421f16d969e32f3583762bfd583848b763913ddab8d9bfd4f/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d", size = 145268 }, + { url = "https://files.pythonhosted.org/packages/74/94/8a5277664f27c3c438546f3eb53b33f5b19568eb7424736bdc440a88a31f/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616", size = 147601 }, + { url = "https://files.pythonhosted.org/packages/7c/5f/6d352c51ee763623a98e31194823518e09bfa48be2a7e8383cf691bbb3d0/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b", size = 141098 }, + { url = "https://files.pythonhosted.org/packages/78/d4/f5704cb629ba5ab16d1d3d741396aec6dc3ca2b67757c45b0599bb010478/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d", size = 149520 }, + { url = "https://files.pythonhosted.org/packages/c5/96/64120b1d02b81785f222b976c0fb79a35875457fa9bb40827678e54d1bc8/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a", size = 152852 }, + { url = "https://files.pythonhosted.org/packages/84/c9/98e3732278a99f47d487fd3468bc60b882920cef29d1fa6ca460a1fdf4e6/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9", size = 150488 }, + { url = "https://files.pythonhosted.org/packages/13/0e/9c8d4cb99c98c1007cc11eda969ebfe837bbbd0acdb4736d228ccaabcd22/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1", size = 146192 }, + { url = "https://files.pythonhosted.org/packages/b2/21/2b6b5b860781a0b49427309cb8670785aa543fb2178de875b87b9cc97746/charset_normalizer-3.4.1-cp312-cp312-win32.whl", hash = "sha256:9b23ca7ef998bc739bf6ffc077c2116917eabcc901f88da1b9856b210ef63f35", size = 95550 }, + { url = "https://files.pythonhosted.org/packages/21/5b/1b390b03b1d16c7e382b561c5329f83cc06623916aab983e8ab9239c7d5c/charset_normalizer-3.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:6ff8a4a60c227ad87030d76e99cd1698345d4491638dfa6673027c48b3cd395f", size = 102785 }, + { url = "https://files.pythonhosted.org/packages/38/94/ce8e6f63d18049672c76d07d119304e1e2d7c6098f0841b51c666e9f44a0/charset_normalizer-3.4.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:aabfa34badd18f1da5ec1bc2715cadc8dca465868a4e73a0173466b688f29dda", size = 195698 }, + { url = "https://files.pythonhosted.org/packages/24/2e/dfdd9770664aae179a96561cc6952ff08f9a8cd09a908f259a9dfa063568/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22e14b5d70560b8dd51ec22863f370d1e595ac3d024cb8ad7d308b4cd95f8313", size = 140162 }, + { url = "https://files.pythonhosted.org/packages/24/4e/f646b9093cff8fc86f2d60af2de4dc17c759de9d554f130b140ea4738ca6/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8436c508b408b82d87dc5f62496973a1805cd46727c34440b0d29d8a2f50a6c9", size = 150263 }, + { url = "https://files.pythonhosted.org/packages/5e/67/2937f8d548c3ef6e2f9aab0f6e21001056f692d43282b165e7c56023e6dd/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2d074908e1aecee37a7635990b2c6d504cd4766c7bc9fc86d63f9c09af3fa11b", size = 142966 }, + { url = "https://files.pythonhosted.org/packages/52/ed/b7f4f07de100bdb95c1756d3a4d17b90c1a3c53715c1a476f8738058e0fa/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:955f8851919303c92343d2f66165294848d57e9bba6cf6e3625485a70a038d11", size = 144992 }, + { url = "https://files.pythonhosted.org/packages/96/2c/d49710a6dbcd3776265f4c923bb73ebe83933dfbaa841c5da850fe0fd20b/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:44ecbf16649486d4aebafeaa7ec4c9fed8b88101f4dd612dcaf65d5e815f837f", size = 147162 }, + { url = "https://files.pythonhosted.org/packages/b4/41/35ff1f9a6bd380303dea55e44c4933b4cc3c4850988927d4082ada230273/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0924e81d3d5e70f8126529951dac65c1010cdf117bb75eb02dd12339b57749dd", size = 140972 }, + { url = "https://files.pythonhosted.org/packages/fb/43/c6a0b685fe6910d08ba971f62cd9c3e862a85770395ba5d9cad4fede33ab/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2967f74ad52c3b98de4c3b32e1a44e32975e008a9cd2a8cc8966d6a5218c5cb2", size = 149095 }, + { url = "https://files.pythonhosted.org/packages/4c/ff/a9a504662452e2d2878512115638966e75633519ec11f25fca3d2049a94a/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c75cb2a3e389853835e84a2d8fb2b81a10645b503eca9bcb98df6b5a43eb8886", size = 152668 }, + { url = "https://files.pythonhosted.org/packages/6c/71/189996b6d9a4b932564701628af5cee6716733e9165af1d5e1b285c530ed/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:09b26ae6b1abf0d27570633b2b078a2a20419c99d66fb2823173d73f188ce601", size = 150073 }, + { url = "https://files.pythonhosted.org/packages/e4/93/946a86ce20790e11312c87c75ba68d5f6ad2208cfb52b2d6a2c32840d922/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fa88b843d6e211393a37219e6a1c1df99d35e8fd90446f1118f4216e307e48cd", size = 145732 }, + { url = "https://files.pythonhosted.org/packages/cd/e5/131d2fb1b0dddafc37be4f3a2fa79aa4c037368be9423061dccadfd90091/charset_normalizer-3.4.1-cp313-cp313-win32.whl", hash = "sha256:eb8178fe3dba6450a3e024e95ac49ed3400e506fd4e9e5c32d30adda88cbd407", size = 95391 }, + { url = "https://files.pythonhosted.org/packages/27/f2/4f9a69cc7712b9b5ad8fdb87039fd89abba997ad5cbe690d1835d40405b0/charset_normalizer-3.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:b1ac5992a838106edb89654e0aebfc24f5848ae2547d22c2c3f66454daa11971", size = 102702 }, + { url = "https://files.pythonhosted.org/packages/0e/f6/65ecc6878a89bb1c23a086ea335ad4bf21a588990c3f535a227b9eea9108/charset_normalizer-3.4.1-py3-none-any.whl", hash = "sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85", size = 49767 }, +] + +[[package]] +name = "click" +version = "8.1.8" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "platform_system == 'Windows'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b9/2e/0090cbf739cee7d23781ad4b89a9894a41538e4fcf4c31dcdd705b78eb8b/click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a", size = 226593 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7e/d4/7ebdbd03970677812aac39c869717059dbb71a4cfc033ca6e5221787892c/click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2", size = 98188 }, +] + +[[package]] +name = "colorama" +version = "0.4.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335 }, +] + +[[package]] +name = "cryptography" +version = "44.0.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cffi", marker = "platform_python_implementation != 'PyPy'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/cd/25/4ce80c78963834b8a9fd1cc1266be5ed8d1840785c0f2e1b73b8d128d505/cryptography-44.0.2.tar.gz", hash = "sha256:c63454aa261a0cf0c5b4718349629793e9e634993538db841165b3df74f37ec0", size = 710807 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/92/ef/83e632cfa801b221570c5f58c0369db6fa6cef7d9ff859feab1aae1a8a0f/cryptography-44.0.2-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:efcfe97d1b3c79e486554efddeb8f6f53a4cdd4cf6086642784fa31fc384e1d7", size = 6676361 }, + { url = "https://files.pythonhosted.org/packages/30/ec/7ea7c1e4c8fc8329506b46c6c4a52e2f20318425d48e0fe597977c71dbce/cryptography-44.0.2-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29ecec49f3ba3f3849362854b7253a9f59799e3763b0c9d0826259a88efa02f1", size = 3952350 }, + { url = "https://files.pythonhosted.org/packages/27/61/72e3afdb3c5ac510330feba4fc1faa0fe62e070592d6ad00c40bb69165e5/cryptography-44.0.2-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc821e161ae88bfe8088d11bb39caf2916562e0a2dc7b6d56714a48b784ef0bb", size = 4166572 }, + { url = "https://files.pythonhosted.org/packages/26/e4/ba680f0b35ed4a07d87f9e98f3ebccb05091f3bf6b5a478b943253b3bbd5/cryptography-44.0.2-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:3c00b6b757b32ce0f62c574b78b939afab9eecaf597c4d624caca4f9e71e7843", size = 3958124 }, + { url = "https://files.pythonhosted.org/packages/9c/e8/44ae3e68c8b6d1cbc59040288056df2ad7f7f03bbcaca6b503c737ab8e73/cryptography-44.0.2-cp37-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7bdcd82189759aba3816d1f729ce42ffded1ac304c151d0a8e89b9996ab863d5", size = 3678122 }, + { url = "https://files.pythonhosted.org/packages/27/7b/664ea5e0d1eab511a10e480baf1c5d3e681c7d91718f60e149cec09edf01/cryptography-44.0.2-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:4973da6ca3db4405c54cd0b26d328be54c7747e89e284fcff166132eb7bccc9c", size = 4191831 }, + { url = "https://files.pythonhosted.org/packages/2a/07/79554a9c40eb11345e1861f46f845fa71c9e25bf66d132e123d9feb8e7f9/cryptography-44.0.2-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:4e389622b6927d8133f314949a9812972711a111d577a5d1f4bee5e58736b80a", size = 3960583 }, + { url = "https://files.pythonhosted.org/packages/bb/6d/858e356a49a4f0b591bd6789d821427de18432212e137290b6d8a817e9bf/cryptography-44.0.2-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:f514ef4cd14bb6fb484b4a60203e912cfcb64f2ab139e88c2274511514bf7308", size = 4191753 }, + { url = "https://files.pythonhosted.org/packages/b2/80/62df41ba4916067fa6b125aa8c14d7e9181773f0d5d0bd4dcef580d8b7c6/cryptography-44.0.2-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:1bc312dfb7a6e5d66082c87c34c8a62176e684b6fe3d90fcfe1568de675e6688", size = 4079550 }, + { url = "https://files.pythonhosted.org/packages/f3/cd/2558cc08f7b1bb40683f99ff4327f8dcfc7de3affc669e9065e14824511b/cryptography-44.0.2-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:3b721b8b4d948b218c88cb8c45a01793483821e709afe5f622861fc6182b20a7", size = 4298367 }, + { url = "https://files.pythonhosted.org/packages/71/59/94ccc74788945bc3bd4cf355d19867e8057ff5fdbcac781b1ff95b700fb1/cryptography-44.0.2-cp37-abi3-win32.whl", hash = "sha256:51e4de3af4ec3899d6d178a8c005226491c27c4ba84101bfb59c901e10ca9f79", size = 2772843 }, + { url = "https://files.pythonhosted.org/packages/ca/2c/0d0bbaf61ba05acb32f0841853cfa33ebb7a9ab3d9ed8bb004bd39f2da6a/cryptography-44.0.2-cp37-abi3-win_amd64.whl", hash = "sha256:c505d61b6176aaf982c5717ce04e87da5abc9a36a5b39ac03905c4aafe8de7aa", size = 3209057 }, + { url = "https://files.pythonhosted.org/packages/9e/be/7a26142e6d0f7683d8a382dd963745e65db895a79a280a30525ec92be890/cryptography-44.0.2-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:8e0ddd63e6bf1161800592c71ac794d3fb8001f2caebe0966e77c5234fa9efc3", size = 6677789 }, + { url = "https://files.pythonhosted.org/packages/06/88/638865be7198a84a7713950b1db7343391c6066a20e614f8fa286eb178ed/cryptography-44.0.2-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81276f0ea79a208d961c433a947029e1a15948966658cf6710bbabb60fcc2639", size = 3951919 }, + { url = "https://files.pythonhosted.org/packages/d7/fc/99fe639bcdf58561dfad1faa8a7369d1dc13f20acd78371bb97a01613585/cryptography-44.0.2-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a1e657c0f4ea2a23304ee3f964db058c9e9e635cc7019c4aa21c330755ef6fd", size = 4167812 }, + { url = "https://files.pythonhosted.org/packages/53/7b/aafe60210ec93d5d7f552592a28192e51d3c6b6be449e7fd0a91399b5d07/cryptography-44.0.2-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:6210c05941994290f3f7f175a4a57dbbb2afd9273657614c506d5976db061181", size = 3958571 }, + { url = "https://files.pythonhosted.org/packages/16/32/051f7ce79ad5a6ef5e26a92b37f172ee2d6e1cce09931646eef8de1e9827/cryptography-44.0.2-cp39-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:d1c3572526997b36f245a96a2b1713bf79ce99b271bbcf084beb6b9b075f29ea", size = 3679832 }, + { url = "https://files.pythonhosted.org/packages/78/2b/999b2a1e1ba2206f2d3bca267d68f350beb2b048a41ea827e08ce7260098/cryptography-44.0.2-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:b042d2a275c8cee83a4b7ae30c45a15e6a4baa65a179a0ec2d78ebb90e4f6699", size = 4193719 }, + { url = "https://files.pythonhosted.org/packages/72/97/430e56e39a1356e8e8f10f723211a0e256e11895ef1a135f30d7d40f2540/cryptography-44.0.2-cp39-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:d03806036b4f89e3b13b6218fefea8d5312e450935b1a2d55f0524e2ed7c59d9", size = 3960852 }, + { url = "https://files.pythonhosted.org/packages/89/33/c1cf182c152e1d262cac56850939530c05ca6c8d149aa0dcee490b417e99/cryptography-44.0.2-cp39-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:c7362add18b416b69d58c910caa217f980c5ef39b23a38a0880dfd87bdf8cd23", size = 4193906 }, + { url = "https://files.pythonhosted.org/packages/e1/99/87cf26d4f125380dc674233971069bc28d19b07f7755b29861570e513650/cryptography-44.0.2-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:8cadc6e3b5a1f144a039ea08a0bdb03a2a92e19c46be3285123d32029f40a922", size = 4081572 }, + { url = "https://files.pythonhosted.org/packages/b3/9f/6a3e0391957cc0c5f84aef9fbdd763035f2b52e998a53f99345e3ac69312/cryptography-44.0.2-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:6f101b1f780f7fc613d040ca4bdf835c6ef3b00e9bd7125a4255ec574c7916e4", size = 4298631 }, + { url = "https://files.pythonhosted.org/packages/e2/a5/5bc097adb4b6d22a24dea53c51f37e480aaec3465285c253098642696423/cryptography-44.0.2-cp39-abi3-win32.whl", hash = "sha256:3dc62975e31617badc19a906481deacdeb80b4bb454394b4098e3f2525a488c5", size = 2773792 }, + { url = "https://files.pythonhosted.org/packages/33/cf/1f7649b8b9a3543e042d3f348e398a061923ac05b507f3f4d95f11938aa9/cryptography-44.0.2-cp39-abi3-win_amd64.whl", hash = "sha256:5f6f90b72d8ccadb9c6e311c775c8305381db88374c65fa1a68250aa8a9cb3a6", size = 3210957 }, +] + +[[package]] +name = "dnspython" +version = "2.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b5/4a/263763cb2ba3816dd94b08ad3a33d5fdae34ecb856678773cc40a3605829/dnspython-2.7.0.tar.gz", hash = "sha256:ce9c432eda0dc91cf618a5cedf1a4e142651196bbcd2c80e89ed5a907e5cfaf1", size = 345197 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/68/1b/e0a87d256e40e8c888847551b20a017a6b98139178505dc7ffb96f04e954/dnspython-2.7.0-py3-none-any.whl", hash = "sha256:b4c34b7d10b51bcc3a5071e7b8dee77939f1e878477eeecc965e9835f63c6c86", size = 313632 }, +] + +[[package]] +name = "faker" +version = "37.0.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "tzdata" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/37/62/80f15fe1b5abf3e5b09815178d7eb63a150fc7fcfebd5271ca4aab1d885a/faker-37.0.2.tar.gz", hash = "sha256:948bd27706478d3aa0b6f9f58b9f25207098f6ca79852c7b49c44a8ced2bc59b", size = 1875441 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a9/8b/b738d3d79ee4502ca966a2a4fa6833c11f50130127bdd57729e9b29c6d2f/faker-37.0.2-py3-none-any.whl", hash = "sha256:8955706c56c28099585e9e2b6f814eb0a3a227eb36a2ee3eb9ab577c4764eacc", size = 1918397 }, +] + +[[package]] +name = "fastapi" +version = "0.115.11" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pydantic" }, + { name = "starlette" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b5/28/c5d26e5860df807241909a961a37d45e10533acef95fc368066c7dd186cd/fastapi-0.115.11.tar.gz", hash = "sha256:cc81f03f688678b92600a65a5e618b93592c65005db37157147204d8924bf94f", size = 294441 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b3/5d/4d8bbb94f0dbc22732350c06965e40740f4a92ca560e90bb566f4f73af41/fastapi-0.115.11-py3-none-any.whl", hash = "sha256:32e1541b7b74602e4ef4a0260ecaf3aadf9d4f19590bba3e1bf2ac4666aa2c64", size = 94926 }, +] + +[[package]] +name = "greenlet" +version = "3.1.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/2f/ff/df5fede753cc10f6a5be0931204ea30c35fa2f2ea7a35b25bdaf4fe40e46/greenlet-3.1.1.tar.gz", hash = "sha256:4ce3ac6cdb6adf7946475d7ef31777c26d94bccc377e070a7986bd2d5c515467", size = 186022 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7d/ec/bad1ac26764d26aa1353216fcbfa4670050f66d445448aafa227f8b16e80/greenlet-3.1.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:4afe7ea89de619adc868e087b4d2359282058479d7cfb94970adf4b55284574d", size = 274260 }, + { url = "https://files.pythonhosted.org/packages/66/d4/c8c04958870f482459ab5956c2942c4ec35cac7fe245527f1039837c17a9/greenlet-3.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f406b22b7c9a9b4f8aa9d2ab13d6ae0ac3e85c9a809bd590ad53fed2bf70dc79", size = 649064 }, + { url = "https://files.pythonhosted.org/packages/51/41/467b12a8c7c1303d20abcca145db2be4e6cd50a951fa30af48b6ec607581/greenlet-3.1.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c3a701fe5a9695b238503ce5bbe8218e03c3bcccf7e204e455e7462d770268aa", size = 663420 }, + { url = "https://files.pythonhosted.org/packages/27/8f/2a93cd9b1e7107d5c7b3b7816eeadcac2ebcaf6d6513df9abaf0334777f6/greenlet-3.1.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2846930c65b47d70b9d178e89c7e1a69c95c1f68ea5aa0a58646b7a96df12441", size = 658035 }, + { url = "https://files.pythonhosted.org/packages/57/5c/7c6f50cb12be092e1dccb2599be5a942c3416dbcfb76efcf54b3f8be4d8d/greenlet-3.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99cfaa2110534e2cf3ba31a7abcac9d328d1d9f1b95beede58294a60348fba36", size = 660105 }, + { url = "https://files.pythonhosted.org/packages/f1/66/033e58a50fd9ec9df00a8671c74f1f3a320564c6415a4ed82a1c651654ba/greenlet-3.1.1-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1443279c19fca463fc33e65ef2a935a5b09bb90f978beab37729e1c3c6c25fe9", size = 613077 }, + { url = "https://files.pythonhosted.org/packages/19/c5/36384a06f748044d06bdd8776e231fadf92fc896bd12cb1c9f5a1bda9578/greenlet-3.1.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b7cede291382a78f7bb5f04a529cb18e068dd29e0fb27376074b6d0317bf4dd0", size = 1135975 }, + { url = "https://files.pythonhosted.org/packages/38/f9/c0a0eb61bdf808d23266ecf1d63309f0e1471f284300ce6dac0ae1231881/greenlet-3.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:23f20bb60ae298d7d8656c6ec6db134bca379ecefadb0b19ce6f19d1f232a942", size = 1163955 }, + { url = "https://files.pythonhosted.org/packages/43/21/a5d9df1d21514883333fc86584c07c2b49ba7c602e670b174bd73cfc9c7f/greenlet-3.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:7124e16b4c55d417577c2077be379514321916d5790fa287c9ed6f23bd2ffd01", size = 299655 }, + { url = "https://files.pythonhosted.org/packages/f3/57/0db4940cd7bb461365ca8d6fd53e68254c9dbbcc2b452e69d0d41f10a85e/greenlet-3.1.1-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:05175c27cb459dcfc05d026c4232f9de8913ed006d42713cb8a5137bd49375f1", size = 272990 }, + { url = "https://files.pythonhosted.org/packages/1c/ec/423d113c9f74e5e402e175b157203e9102feeb7088cee844d735b28ef963/greenlet-3.1.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:935e943ec47c4afab8965954bf49bfa639c05d4ccf9ef6e924188f762145c0ff", size = 649175 }, + { url = "https://files.pythonhosted.org/packages/a9/46/ddbd2db9ff209186b7b7c621d1432e2f21714adc988703dbdd0e65155c77/greenlet-3.1.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:667a9706c970cb552ede35aee17339a18e8f2a87a51fba2ed39ceeeb1004798a", size = 663425 }, + { url = "https://files.pythonhosted.org/packages/bc/f9/9c82d6b2b04aa37e38e74f0c429aece5eeb02bab6e3b98e7db89b23d94c6/greenlet-3.1.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b8a678974d1f3aa55f6cc34dc480169d58f2e6d8958895d68845fa4ab566509e", size = 657736 }, + { url = "https://files.pythonhosted.org/packages/d9/42/b87bc2a81e3a62c3de2b0d550bf91a86939442b7ff85abb94eec3fc0e6aa/greenlet-3.1.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efc0f674aa41b92da8c49e0346318c6075d734994c3c4e4430b1c3f853e498e4", size = 660347 }, + { url = "https://files.pythonhosted.org/packages/37/fa/71599c3fd06336cdc3eac52e6871cfebab4d9d70674a9a9e7a482c318e99/greenlet-3.1.1-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0153404a4bb921f0ff1abeb5ce8a5131da56b953eda6e14b88dc6bbc04d2049e", size = 615583 }, + { url = "https://files.pythonhosted.org/packages/4e/96/e9ef85de031703ee7a4483489b40cf307f93c1824a02e903106f2ea315fe/greenlet-3.1.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:275f72decf9932639c1c6dd1013a1bc266438eb32710016a1c742df5da6e60a1", size = 1133039 }, + { url = "https://files.pythonhosted.org/packages/87/76/b2b6362accd69f2d1889db61a18c94bc743e961e3cab344c2effaa4b4a25/greenlet-3.1.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c4aab7f6381f38a4b42f269057aee279ab0fc7bf2e929e3d4abfae97b682a12c", size = 1160716 }, + { url = "https://files.pythonhosted.org/packages/1f/1b/54336d876186920e185066d8c3024ad55f21d7cc3683c856127ddb7b13ce/greenlet-3.1.1-cp313-cp313-win_amd64.whl", hash = "sha256:b42703b1cf69f2aa1df7d1030b9d77d3e584a70755674d60e710f0af570f3761", size = 299490 }, + { url = "https://files.pythonhosted.org/packages/5f/17/bea55bf36990e1638a2af5ba10c1640273ef20f627962cf97107f1e5d637/greenlet-3.1.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1695e76146579f8c06c1509c7ce4dfe0706f49c6831a817ac04eebb2fd02011", size = 643731 }, + { url = "https://files.pythonhosted.org/packages/78/d2/aa3d2157f9ab742a08e0fd8f77d4699f37c22adfbfeb0c610a186b5f75e0/greenlet-3.1.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7876452af029456b3f3549b696bb36a06db7c90747740c5302f74a9e9fa14b13", size = 649304 }, + { url = "https://files.pythonhosted.org/packages/f1/8e/d0aeffe69e53ccff5a28fa86f07ad1d2d2d6537a9506229431a2a02e2f15/greenlet-3.1.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4ead44c85f8ab905852d3de8d86f6f8baf77109f9da589cb4fa142bd3b57b475", size = 646537 }, + { url = "https://files.pythonhosted.org/packages/05/79/e15408220bbb989469c8871062c97c6c9136770657ba779711b90870d867/greenlet-3.1.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8320f64b777d00dd7ccdade271eaf0cad6636343293a25074cc5566160e4de7b", size = 642506 }, + { url = "https://files.pythonhosted.org/packages/18/87/470e01a940307796f1d25f8167b551a968540fbe0551c0ebb853cb527dd6/greenlet-3.1.1-cp313-cp313t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6510bf84a6b643dabba74d3049ead221257603a253d0a9873f55f6a59a65f822", size = 602753 }, + { url = "https://files.pythonhosted.org/packages/e2/72/576815ba674eddc3c25028238f74d7b8068902b3968cbe456771b166455e/greenlet-3.1.1-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:04b013dc07c96f83134b1e99888e7a79979f1a247e2a9f59697fa14b5862ed01", size = 1122731 }, + { url = "https://files.pythonhosted.org/packages/ac/38/08cc303ddddc4b3d7c628c3039a61a3aae36c241ed01393d00c2fd663473/greenlet-3.1.1-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:411f015496fec93c1c8cd4e5238da364e1da7a124bcb293f085bf2860c32c6f6", size = 1142112 }, +] + +[[package]] +name = "h11" +version = "0.14.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f5/38/3af3d3633a34a3316095b39c8e8fb4853a28a536e55d347bd8d8e9a14b03/h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d", size = 100418 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/95/04/ff642e65ad6b90db43e668d70ffb6736436c7ce41fcc549f4e9472234127/h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761", size = 58259 }, +] + +[[package]] +name = "idna" +version = "3.10" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442 }, +] + +[[package]] +name = "jinja2" +version = "3.1.6" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markupsafe" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/df/bf/f7da0350254c0ed7c72f3e33cef02e048281fec7ecec5f032d4aac52226b/jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d", size = 245115 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67", size = 134899 }, +] + +[[package]] +name = "mako" +version = "1.3.9" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markupsafe" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/62/4f/ddb1965901bc388958db9f0c991255b2c469349a741ae8c9cd8a562d70a6/mako-1.3.9.tar.gz", hash = "sha256:b5d65ff3462870feec922dbccf38f6efb44e5714d7b593a656be86663d8600ac", size = 392195 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cd/83/de0a49e7de540513f53ab5d2e105321dedeb08a8f5850f0208decf4390ec/Mako-1.3.9-py3-none-any.whl", hash = "sha256:95920acccb578427a9aa38e37a186b1e43156c87260d7ba18ca63aa4c7cbd3a1", size = 78456 }, +] + +[[package]] +name = "markupsafe" +version = "3.0.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b2/97/5d42485e71dfc078108a86d6de8fa46db44a1a9295e89c5d6d4a06e23a62/markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0", size = 20537 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/22/09/d1f21434c97fc42f09d290cbb6350d44eb12f09cc62c9476effdb33a18aa/MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf", size = 14274 }, + { url = "https://files.pythonhosted.org/packages/6b/b0/18f76bba336fa5aecf79d45dcd6c806c280ec44538b3c13671d49099fdd0/MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225", size = 12348 }, + { url = "https://files.pythonhosted.org/packages/e0/25/dd5c0f6ac1311e9b40f4af06c78efde0f3b5cbf02502f8ef9501294c425b/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028", size = 24149 }, + { url = "https://files.pythonhosted.org/packages/f3/f0/89e7aadfb3749d0f52234a0c8c7867877876e0a20b60e2188e9850794c17/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8", size = 23118 }, + { url = "https://files.pythonhosted.org/packages/d5/da/f2eeb64c723f5e3777bc081da884b414671982008c47dcc1873d81f625b6/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c", size = 22993 }, + { url = "https://files.pythonhosted.org/packages/da/0e/1f32af846df486dce7c227fe0f2398dc7e2e51d4a370508281f3c1c5cddc/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557", size = 24178 }, + { url = "https://files.pythonhosted.org/packages/c4/f6/bb3ca0532de8086cbff5f06d137064c8410d10779c4c127e0e47d17c0b71/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22", size = 23319 }, + { url = "https://files.pythonhosted.org/packages/a2/82/8be4c96ffee03c5b4a034e60a31294daf481e12c7c43ab8e34a1453ee48b/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48", size = 23352 }, + { url = "https://files.pythonhosted.org/packages/51/ae/97827349d3fcffee7e184bdf7f41cd6b88d9919c80f0263ba7acd1bbcb18/MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30", size = 15097 }, + { url = "https://files.pythonhosted.org/packages/c1/80/a61f99dc3a936413c3ee4e1eecac96c0da5ed07ad56fd975f1a9da5bc630/MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87", size = 15601 }, + { url = "https://files.pythonhosted.org/packages/83/0e/67eb10a7ecc77a0c2bbe2b0235765b98d164d81600746914bebada795e97/MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd", size = 14274 }, + { url = "https://files.pythonhosted.org/packages/2b/6d/9409f3684d3335375d04e5f05744dfe7e9f120062c9857df4ab490a1031a/MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430", size = 12352 }, + { url = "https://files.pythonhosted.org/packages/d2/f5/6eadfcd3885ea85fe2a7c128315cc1bb7241e1987443d78c8fe712d03091/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094", size = 24122 }, + { url = "https://files.pythonhosted.org/packages/0c/91/96cf928db8236f1bfab6ce15ad070dfdd02ed88261c2afafd4b43575e9e9/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396", size = 23085 }, + { url = "https://files.pythonhosted.org/packages/c2/cf/c9d56af24d56ea04daae7ac0940232d31d5a8354f2b457c6d856b2057d69/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79", size = 22978 }, + { url = "https://files.pythonhosted.org/packages/2a/9f/8619835cd6a711d6272d62abb78c033bda638fdc54c4e7f4272cf1c0962b/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a", size = 24208 }, + { url = "https://files.pythonhosted.org/packages/f9/bf/176950a1792b2cd2102b8ffeb5133e1ed984547b75db47c25a67d3359f77/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca", size = 23357 }, + { url = "https://files.pythonhosted.org/packages/ce/4f/9a02c1d335caabe5c4efb90e1b6e8ee944aa245c1aaaab8e8a618987d816/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c", size = 23344 }, + { url = "https://files.pythonhosted.org/packages/ee/55/c271b57db36f748f0e04a759ace9f8f759ccf22b4960c270c78a394f58be/MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1", size = 15101 }, + { url = "https://files.pythonhosted.org/packages/29/88/07df22d2dd4df40aba9f3e402e6dc1b8ee86297dddbad4872bd5e7b0094f/MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f", size = 15603 }, + { url = "https://files.pythonhosted.org/packages/62/6a/8b89d24db2d32d433dffcd6a8779159da109842434f1dd2f6e71f32f738c/MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c", size = 14510 }, + { url = "https://files.pythonhosted.org/packages/7a/06/a10f955f70a2e5a9bf78d11a161029d278eeacbd35ef806c3fd17b13060d/MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb", size = 12486 }, + { url = "https://files.pythonhosted.org/packages/34/cf/65d4a571869a1a9078198ca28f39fba5fbb910f952f9dbc5220afff9f5e6/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c", size = 25480 }, + { url = "https://files.pythonhosted.org/packages/0c/e3/90e9651924c430b885468b56b3d597cabf6d72be4b24a0acd1fa0e12af67/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d", size = 23914 }, + { url = "https://files.pythonhosted.org/packages/66/8c/6c7cf61f95d63bb866db39085150df1f2a5bd3335298f14a66b48e92659c/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe", size = 23796 }, + { url = "https://files.pythonhosted.org/packages/bb/35/cbe9238ec3f47ac9a7c8b3df7a808e7cb50fe149dc7039f5f454b3fba218/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5", size = 25473 }, + { url = "https://files.pythonhosted.org/packages/e6/32/7621a4382488aa283cc05e8984a9c219abad3bca087be9ec77e89939ded9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a", size = 24114 }, + { url = "https://files.pythonhosted.org/packages/0d/80/0985960e4b89922cb5a0bac0ed39c5b96cbc1a536a99f30e8c220a996ed9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9", size = 24098 }, + { url = "https://files.pythonhosted.org/packages/82/78/fedb03c7d5380df2427038ec8d973587e90561b2d90cd472ce9254cf348b/MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6", size = 15208 }, + { url = "https://files.pythonhosted.org/packages/4f/65/6079a46068dfceaeabb5dcad6d674f5f5c61a6fa5673746f42a9f4c233b3/MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f", size = 15739 }, +] + +[[package]] +name = "numpy" +version = "2.2.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e1/78/31103410a57bc2c2b93a3597340a8119588571f6a4539067546cb9a0bfac/numpy-2.2.4.tar.gz", hash = "sha256:9ba03692a45d3eef66559efe1d1096c4b9b75c0986b5dff5530c378fb8331d4f", size = 20270701 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a2/30/182db21d4f2a95904cec1a6f779479ea1ac07c0647f064dea454ec650c42/numpy-2.2.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:a7b9084668aa0f64e64bd00d27ba5146ef1c3a8835f3bd912e7a9e01326804c4", size = 20947156 }, + { url = "https://files.pythonhosted.org/packages/24/6d/9483566acfbda6c62c6bc74b6e981c777229d2af93c8eb2469b26ac1b7bc/numpy-2.2.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:dbe512c511956b893d2dacd007d955a3f03d555ae05cfa3ff1c1ff6df8851854", size = 14133092 }, + { url = "https://files.pythonhosted.org/packages/27/f6/dba8a258acbf9d2bed2525cdcbb9493ef9bae5199d7a9cb92ee7e9b2aea6/numpy-2.2.4-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:bb649f8b207ab07caebba230d851b579a3c8711a851d29efe15008e31bb4de24", size = 5163515 }, + { url = "https://files.pythonhosted.org/packages/62/30/82116199d1c249446723c68f2c9da40d7f062551036f50b8c4caa42ae252/numpy-2.2.4-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:f34dc300df798742b3d06515aa2a0aee20941c13579d7a2f2e10af01ae4901ee", size = 6696558 }, + { url = "https://files.pythonhosted.org/packages/0e/b2/54122b3c6df5df3e87582b2e9430f1bdb63af4023c739ba300164c9ae503/numpy-2.2.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c3f7ac96b16955634e223b579a3e5798df59007ca43e8d451a0e6a50f6bfdfba", size = 14084742 }, + { url = "https://files.pythonhosted.org/packages/02/e2/e2cbb8d634151aab9528ef7b8bab52ee4ab10e076509285602c2a3a686e0/numpy-2.2.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f92084defa704deadd4e0a5ab1dc52d8ac9e8a8ef617f3fbb853e79b0ea3592", size = 16134051 }, + { url = "https://files.pythonhosted.org/packages/8e/21/efd47800e4affc993e8be50c1b768de038363dd88865920439ef7b422c60/numpy-2.2.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:7a4e84a6283b36632e2a5b56e121961f6542ab886bc9e12f8f9818b3c266bfbb", size = 15578972 }, + { url = "https://files.pythonhosted.org/packages/04/1e/f8bb88f6157045dd5d9b27ccf433d016981032690969aa5c19e332b138c0/numpy-2.2.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:11c43995255eb4127115956495f43e9343736edb7fcdb0d973defd9de14cd84f", size = 17898106 }, + { url = "https://files.pythonhosted.org/packages/2b/93/df59a5a3897c1f036ae8ff845e45f4081bb06943039ae28a3c1c7c780f22/numpy-2.2.4-cp312-cp312-win32.whl", hash = "sha256:65ef3468b53269eb5fdb3a5c09508c032b793da03251d5f8722b1194f1790c00", size = 6311190 }, + { url = "https://files.pythonhosted.org/packages/46/69/8c4f928741c2a8efa255fdc7e9097527c6dc4e4df147e3cadc5d9357ce85/numpy-2.2.4-cp312-cp312-win_amd64.whl", hash = "sha256:2aad3c17ed2ff455b8eaafe06bcdae0062a1db77cb99f4b9cbb5f4ecb13c5146", size = 12644305 }, + { url = "https://files.pythonhosted.org/packages/2a/d0/bd5ad792e78017f5decfb2ecc947422a3669a34f775679a76317af671ffc/numpy-2.2.4-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:1cf4e5c6a278d620dee9ddeb487dc6a860f9b199eadeecc567f777daace1e9e7", size = 20933623 }, + { url = "https://files.pythonhosted.org/packages/c3/bc/2b3545766337b95409868f8e62053135bdc7fa2ce630aba983a2aa60b559/numpy-2.2.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1974afec0b479e50438fc3648974268f972e2d908ddb6d7fb634598cdb8260a0", size = 14148681 }, + { url = "https://files.pythonhosted.org/packages/6a/70/67b24d68a56551d43a6ec9fe8c5f91b526d4c1a46a6387b956bf2d64744e/numpy-2.2.4-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:79bd5f0a02aa16808fcbc79a9a376a147cc1045f7dfe44c6e7d53fa8b8a79392", size = 5148759 }, + { url = "https://files.pythonhosted.org/packages/1c/8b/e2fc8a75fcb7be12d90b31477c9356c0cbb44abce7ffb36be39a0017afad/numpy-2.2.4-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:3387dd7232804b341165cedcb90694565a6015433ee076c6754775e85d86f1fc", size = 6683092 }, + { url = "https://files.pythonhosted.org/packages/13/73/41b7b27f169ecf368b52533edb72e56a133f9e86256e809e169362553b49/numpy-2.2.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f527d8fdb0286fd2fd97a2a96c6be17ba4232da346931d967a0630050dfd298", size = 14081422 }, + { url = "https://files.pythonhosted.org/packages/4b/04/e208ff3ae3ddfbafc05910f89546382f15a3f10186b1f56bd99f159689c2/numpy-2.2.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bce43e386c16898b91e162e5baaad90c4b06f9dcbe36282490032cec98dc8ae7", size = 16132202 }, + { url = "https://files.pythonhosted.org/packages/fe/bc/2218160574d862d5e55f803d88ddcad88beff94791f9c5f86d67bd8fbf1c/numpy-2.2.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:31504f970f563d99f71a3512d0c01a645b692b12a63630d6aafa0939e52361e6", size = 15573131 }, + { url = "https://files.pythonhosted.org/packages/a5/78/97c775bc4f05abc8a8426436b7cb1be806a02a2994b195945600855e3a25/numpy-2.2.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:81413336ef121a6ba746892fad881a83351ee3e1e4011f52e97fba79233611fd", size = 17894270 }, + { url = "https://files.pythonhosted.org/packages/b9/eb/38c06217a5f6de27dcb41524ca95a44e395e6a1decdc0c99fec0832ce6ae/numpy-2.2.4-cp313-cp313-win32.whl", hash = "sha256:f486038e44caa08dbd97275a9a35a283a8f1d2f0ee60ac260a1790e76660833c", size = 6308141 }, + { url = "https://files.pythonhosted.org/packages/52/17/d0dd10ab6d125c6d11ffb6dfa3423c3571befab8358d4f85cd4471964fcd/numpy-2.2.4-cp313-cp313-win_amd64.whl", hash = "sha256:207a2b8441cc8b6a2a78c9ddc64d00d20c303d79fba08c577752f080c4007ee3", size = 12636885 }, + { url = "https://files.pythonhosted.org/packages/fa/e2/793288ede17a0fdc921172916efb40f3cbc2aa97e76c5c84aba6dc7e8747/numpy-2.2.4-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:8120575cb4882318c791f839a4fd66161a6fa46f3f0a5e613071aae35b5dd8f8", size = 20961829 }, + { url = "https://files.pythonhosted.org/packages/3a/75/bb4573f6c462afd1ea5cbedcc362fe3e9bdbcc57aefd37c681be1155fbaa/numpy-2.2.4-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a761ba0fa886a7bb33c6c8f6f20213735cb19642c580a931c625ee377ee8bd39", size = 14161419 }, + { url = "https://files.pythonhosted.org/packages/03/68/07b4cd01090ca46c7a336958b413cdbe75002286295f2addea767b7f16c9/numpy-2.2.4-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:ac0280f1ba4a4bfff363a99a6aceed4f8e123f8a9b234c89140f5e894e452ecd", size = 5196414 }, + { url = "https://files.pythonhosted.org/packages/a5/fd/d4a29478d622fedff5c4b4b4cedfc37a00691079623c0575978d2446db9e/numpy-2.2.4-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:879cf3a9a2b53a4672a168c21375166171bc3932b7e21f622201811c43cdd3b0", size = 6709379 }, + { url = "https://files.pythonhosted.org/packages/41/78/96dddb75bb9be730b87c72f30ffdd62611aba234e4e460576a068c98eff6/numpy-2.2.4-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f05d4198c1bacc9124018109c5fba2f3201dbe7ab6e92ff100494f236209c960", size = 14051725 }, + { url = "https://files.pythonhosted.org/packages/00/06/5306b8199bffac2a29d9119c11f457f6c7d41115a335b78d3f86fad4dbe8/numpy-2.2.4-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e2f085ce2e813a50dfd0e01fbfc0c12bbe5d2063d99f8b29da30e544fb6483b8", size = 16101638 }, + { url = "https://files.pythonhosted.org/packages/fa/03/74c5b631ee1ded596945c12027649e6344614144369fd3ec1aaced782882/numpy-2.2.4-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:92bda934a791c01d6d9d8e038363c50918ef7c40601552a58ac84c9613a665bc", size = 15571717 }, + { url = "https://files.pythonhosted.org/packages/cb/dc/4fc7c0283abe0981e3b89f9b332a134e237dd476b0c018e1e21083310c31/numpy-2.2.4-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:ee4d528022f4c5ff67332469e10efe06a267e32f4067dc76bb7e2cddf3cd25ff", size = 17879998 }, + { url = "https://files.pythonhosted.org/packages/e5/2b/878576190c5cfa29ed896b518cc516aecc7c98a919e20706c12480465f43/numpy-2.2.4-cp313-cp313t-win32.whl", hash = "sha256:05c076d531e9998e7e694c36e8b349969c56eadd2cdcd07242958489d79a7286", size = 6366896 }, + { url = "https://files.pythonhosted.org/packages/3e/05/eb7eec66b95cf697f08c754ef26c3549d03ebd682819f794cb039574a0a6/numpy-2.2.4-cp313-cp313t-win_amd64.whl", hash = "sha256:188dcbca89834cc2e14eb2f106c96d6d46f200fe0200310fc29089657379c58d", size = 12739119 }, +] + +[[package]] +name = "pandas" +version = "2.2.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "numpy" }, + { name = "python-dateutil" }, + { name = "pytz" }, + { name = "tzdata" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9c/d6/9f8431bacc2e19dca897724cd097b1bb224a6ad5433784a44b587c7c13af/pandas-2.2.3.tar.gz", hash = "sha256:4f18ba62b61d7e192368b84517265a99b4d7ee8912f8708660fb4a366cc82667", size = 4399213 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/17/a3/fb2734118db0af37ea7433f57f722c0a56687e14b14690edff0cdb4b7e58/pandas-2.2.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b1d432e8d08679a40e2a6d8b2f9770a5c21793a6f9f47fdd52c5ce1948a5a8a9", size = 12529893 }, + { url = "https://files.pythonhosted.org/packages/e1/0c/ad295fd74bfac85358fd579e271cded3ac969de81f62dd0142c426b9da91/pandas-2.2.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a5a1595fe639f5988ba6a8e5bc9649af3baf26df3998a0abe56c02609392e0a4", size = 11363475 }, + { url = "https://files.pythonhosted.org/packages/c6/2a/4bba3f03f7d07207481fed47f5b35f556c7441acddc368ec43d6643c5777/pandas-2.2.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5de54125a92bb4d1c051c0659e6fcb75256bf799a732a87184e5ea503965bce3", size = 15188645 }, + { url = "https://files.pythonhosted.org/packages/38/f8/d8fddee9ed0d0c0f4a2132c1dfcf0e3e53265055da8df952a53e7eaf178c/pandas-2.2.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fffb8ae78d8af97f849404f21411c95062db1496aeb3e56f146f0355c9989319", size = 12739445 }, + { url = "https://files.pythonhosted.org/packages/20/e8/45a05d9c39d2cea61ab175dbe6a2de1d05b679e8de2011da4ee190d7e748/pandas-2.2.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dfcb5ee8d4d50c06a51c2fffa6cff6272098ad6540aed1a76d15fb9318194d8", size = 16359235 }, + { url = "https://files.pythonhosted.org/packages/1d/99/617d07a6a5e429ff90c90da64d428516605a1ec7d7bea494235e1c3882de/pandas-2.2.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:062309c1b9ea12a50e8ce661145c6aab431b1e99530d3cd60640e255778bd43a", size = 14056756 }, + { url = "https://files.pythonhosted.org/packages/29/d4/1244ab8edf173a10fd601f7e13b9566c1b525c4f365d6bee918e68381889/pandas-2.2.3-cp312-cp312-win_amd64.whl", hash = "sha256:59ef3764d0fe818125a5097d2ae867ca3fa64df032331b7e0917cf5d7bf66b13", size = 11504248 }, + { url = "https://files.pythonhosted.org/packages/64/22/3b8f4e0ed70644e85cfdcd57454686b9057c6c38d2f74fe4b8bc2527214a/pandas-2.2.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f00d1345d84d8c86a63e476bb4955e46458b304b9575dcf71102b5c705320015", size = 12477643 }, + { url = "https://files.pythonhosted.org/packages/e4/93/b3f5d1838500e22c8d793625da672f3eec046b1a99257666c94446969282/pandas-2.2.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3508d914817e153ad359d7e069d752cdd736a247c322d932eb89e6bc84217f28", size = 11281573 }, + { url = "https://files.pythonhosted.org/packages/f5/94/6c79b07f0e5aab1dcfa35a75f4817f5c4f677931d4234afcd75f0e6a66ca/pandas-2.2.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:22a9d949bfc9a502d320aa04e5d02feab689d61da4e7764b62c30b991c42c5f0", size = 15196085 }, + { url = "https://files.pythonhosted.org/packages/e8/31/aa8da88ca0eadbabd0a639788a6da13bb2ff6edbbb9f29aa786450a30a91/pandas-2.2.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3a255b2c19987fbbe62a9dfd6cff7ff2aa9ccab3fc75218fd4b7530f01efa24", size = 12711809 }, + { url = "https://files.pythonhosted.org/packages/ee/7c/c6dbdb0cb2a4344cacfb8de1c5808ca885b2e4dcfde8008266608f9372af/pandas-2.2.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:800250ecdadb6d9c78eae4990da62743b857b470883fa27f652db8bdde7f6659", size = 16356316 }, + { url = "https://files.pythonhosted.org/packages/57/b7/8b757e7d92023b832869fa8881a992696a0bfe2e26f72c9ae9f255988d42/pandas-2.2.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6374c452ff3ec675a8f46fd9ab25c4ad0ba590b71cf0656f8b6daa5202bca3fb", size = 14022055 }, + { url = "https://files.pythonhosted.org/packages/3b/bc/4b18e2b8c002572c5a441a64826252ce5da2aa738855747247a971988043/pandas-2.2.3-cp313-cp313-win_amd64.whl", hash = "sha256:61c5ad4043f791b61dd4752191d9f07f0ae412515d59ba8f005832a532f8736d", size = 11481175 }, + { url = "https://files.pythonhosted.org/packages/76/a3/a5d88146815e972d40d19247b2c162e88213ef51c7c25993942c39dbf41d/pandas-2.2.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:3b71f27954685ee685317063bf13c7709a7ba74fc996b84fc6821c59b0f06468", size = 12615650 }, + { url = "https://files.pythonhosted.org/packages/9c/8c/f0fd18f6140ddafc0c24122c8a964e48294acc579d47def376fef12bcb4a/pandas-2.2.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:38cf8125c40dae9d5acc10fa66af8ea6fdf760b2714ee482ca691fc66e6fcb18", size = 11290177 }, + { url = "https://files.pythonhosted.org/packages/ed/f9/e995754eab9c0f14c6777401f7eece0943840b7a9fc932221c19d1abee9f/pandas-2.2.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ba96630bc17c875161df3818780af30e43be9b166ce51c9a18c1feae342906c2", size = 14651526 }, + { url = "https://files.pythonhosted.org/packages/25/b0/98d6ae2e1abac4f35230aa756005e8654649d305df9a28b16b9ae4353bff/pandas-2.2.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1db71525a1538b30142094edb9adc10be3f3e176748cd7acc2240c2f2e5aa3a4", size = 11871013 }, + { url = "https://files.pythonhosted.org/packages/cc/57/0f72a10f9db6a4628744c8e8f0df4e6e21de01212c7c981d31e50ffc8328/pandas-2.2.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:15c0e1e02e93116177d29ff83e8b1619c93ddc9c49083f237d4312337a61165d", size = 15711620 }, + { url = "https://files.pythonhosted.org/packages/ab/5f/b38085618b950b79d2d9164a711c52b10aefc0ae6833b96f626b7021b2ed/pandas-2.2.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:ad5b65698ab28ed8d7f18790a0dc58005c7629f227be9ecc1072aa74c0c1d43a", size = 13098436 }, +] + +[[package]] +name = "psycopg2-binary" +version = "2.9.10" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/cb/0e/bdc8274dc0585090b4e3432267d7be4dfbfd8971c0fa59167c711105a6bf/psycopg2-binary-2.9.10.tar.gz", hash = "sha256:4b3df0e6990aa98acda57d983942eff13d824135fe2250e6522edaa782a06de2", size = 385764 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/49/7d/465cc9795cf76f6d329efdafca74693714556ea3891813701ac1fee87545/psycopg2_binary-2.9.10-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:880845dfe1f85d9d5f7c412efea7a08946a46894537e4e5d091732eb1d34d9a0", size = 3044771 }, + { url = "https://files.pythonhosted.org/packages/8b/31/6d225b7b641a1a2148e3ed65e1aa74fc86ba3fee850545e27be9e1de893d/psycopg2_binary-2.9.10-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:9440fa522a79356aaa482aa4ba500b65f28e5d0e63b801abf6aa152a29bd842a", size = 3275336 }, + { url = "https://files.pythonhosted.org/packages/30/b7/a68c2b4bff1cbb1728e3ec864b2d92327c77ad52edcd27922535a8366f68/psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3923c1d9870c49a2d44f795df0c889a22380d36ef92440ff618ec315757e539", size = 2851637 }, + { url = "https://files.pythonhosted.org/packages/0b/b1/cfedc0e0e6f9ad61f8657fd173b2f831ce261c02a08c0b09c652b127d813/psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b2c956c028ea5de47ff3a8d6b3cc3330ab45cf0b7c3da35a2d6ff8420896526", size = 3082097 }, + { url = "https://files.pythonhosted.org/packages/18/ed/0a8e4153c9b769f59c02fb5e7914f20f0b2483a19dae7bf2db54b743d0d0/psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f758ed67cab30b9a8d2833609513ce4d3bd027641673d4ebc9c067e4d208eec1", size = 3264776 }, + { url = "https://files.pythonhosted.org/packages/10/db/d09da68c6a0cdab41566b74e0a6068a425f077169bed0946559b7348ebe9/psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cd9b4f2cfab88ed4a9106192de509464b75a906462fb846b936eabe45c2063e", size = 3020968 }, + { url = "https://files.pythonhosted.org/packages/94/28/4d6f8c255f0dfffb410db2b3f9ac5218d959a66c715c34cac31081e19b95/psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dc08420625b5a20b53551c50deae6e231e6371194fa0651dbe0fb206452ae1f", size = 2872334 }, + { url = "https://files.pythonhosted.org/packages/05/f7/20d7bf796593c4fea95e12119d6cc384ff1f6141a24fbb7df5a668d29d29/psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:d7cd730dfa7c36dbe8724426bf5612798734bff2d3c3857f36f2733f5bfc7c00", size = 2822722 }, + { url = "https://files.pythonhosted.org/packages/4d/e4/0c407ae919ef626dbdb32835a03b6737013c3cc7240169843965cada2bdf/psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:155e69561d54d02b3c3209545fb08938e27889ff5a10c19de8d23eb5a41be8a5", size = 2920132 }, + { url = "https://files.pythonhosted.org/packages/2d/70/aa69c9f69cf09a01da224909ff6ce8b68faeef476f00f7ec377e8f03be70/psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c3cc28a6fd5a4a26224007712e79b81dbaee2ffb90ff406256158ec4d7b52b47", size = 2959312 }, + { url = "https://files.pythonhosted.org/packages/d3/bd/213e59854fafe87ba47814bf413ace0dcee33a89c8c8c814faca6bc7cf3c/psycopg2_binary-2.9.10-cp312-cp312-win32.whl", hash = "sha256:ec8a77f521a17506a24a5f626cb2aee7850f9b69a0afe704586f63a464f3cd64", size = 1025191 }, + { url = "https://files.pythonhosted.org/packages/92/29/06261ea000e2dc1e22907dbbc483a1093665509ea586b29b8986a0e56733/psycopg2_binary-2.9.10-cp312-cp312-win_amd64.whl", hash = "sha256:18c5ee682b9c6dd3696dad6e54cc7ff3a1a9020df6a5c0f861ef8bfd338c3ca0", size = 1164031 }, + { url = "https://files.pythonhosted.org/packages/3e/30/d41d3ba765609c0763505d565c4d12d8f3c79793f0d0f044ff5a28bf395b/psycopg2_binary-2.9.10-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:26540d4a9a4e2b096f1ff9cce51253d0504dca5a85872c7f7be23be5a53eb18d", size = 3044699 }, + { url = "https://files.pythonhosted.org/packages/35/44/257ddadec7ef04536ba71af6bc6a75ec05c5343004a7ec93006bee66c0bc/psycopg2_binary-2.9.10-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:e217ce4d37667df0bc1c397fdcd8de5e81018ef305aed9415c3b093faaeb10fb", size = 3275245 }, + { url = "https://files.pythonhosted.org/packages/1b/11/48ea1cd11de67f9efd7262085588790a95d9dfcd9b8a687d46caf7305c1a/psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:245159e7ab20a71d989da00f280ca57da7641fa2cdcf71749c193cea540a74f7", size = 2851631 }, + { url = "https://files.pythonhosted.org/packages/62/e0/62ce5ee650e6c86719d621a761fe4bc846ab9eff8c1f12b1ed5741bf1c9b/psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c4ded1a24b20021ebe677b7b08ad10bf09aac197d6943bfe6fec70ac4e4690d", size = 3082140 }, + { url = "https://files.pythonhosted.org/packages/27/ce/63f946c098611f7be234c0dd7cb1ad68b0b5744d34f68062bb3c5aa510c8/psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3abb691ff9e57d4a93355f60d4f4c1dd2d68326c968e7db17ea96df3c023ef73", size = 3264762 }, + { url = "https://files.pythonhosted.org/packages/43/25/c603cd81402e69edf7daa59b1602bd41eb9859e2824b8c0855d748366ac9/psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8608c078134f0b3cbd9f89b34bd60a943b23fd33cc5f065e8d5f840061bd0673", size = 3020967 }, + { url = "https://files.pythonhosted.org/packages/5f/d6/8708d8c6fca531057fa170cdde8df870e8b6a9b136e82b361c65e42b841e/psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:230eeae2d71594103cd5b93fd29d1ace6420d0b86f4778739cb1a5a32f607d1f", size = 2872326 }, + { url = "https://files.pythonhosted.org/packages/ce/ac/5b1ea50fc08a9df82de7e1771537557f07c2632231bbab652c7e22597908/psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:bb89f0a835bcfc1d42ccd5f41f04870c1b936d8507c6df12b7737febc40f0909", size = 2822712 }, + { url = "https://files.pythonhosted.org/packages/c4/fc/504d4503b2abc4570fac3ca56eb8fed5e437bf9c9ef13f36b6621db8ef00/psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f0c2d907a1e102526dd2986df638343388b94c33860ff3bbe1384130828714b1", size = 2920155 }, + { url = "https://files.pythonhosted.org/packages/b2/d1/323581e9273ad2c0dbd1902f3fb50c441da86e894b6e25a73c3fda32c57e/psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f8157bed2f51db683f31306aa497311b560f2265998122abe1dce6428bd86567", size = 2959356 }, + { url = "https://files.pythonhosted.org/packages/08/50/d13ea0a054189ae1bc21af1d85b6f8bb9bbc5572991055d70ad9006fe2d6/psycopg2_binary-2.9.10-cp313-cp313-win_amd64.whl", hash = "sha256:27422aa5f11fbcd9b18da48373eb67081243662f9b46e6fd07c3eb46e4535142", size = 2569224 }, +] + +[[package]] +name = "pycparser" +version = "2.22" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/1d/b2/31537cf4b1ca988837256c910a668b553fceb8f069bedc4b1c826024b52c/pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6", size = 172736 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/13/a3/a812df4e2dd5696d1f351d58b8fe16a405b234ad2886a0dab9183fb78109/pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc", size = 117552 }, +] + +[[package]] +name = "pydantic" +version = "2.10.6" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "annotated-types" }, + { name = "pydantic-core" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b7/ae/d5220c5c52b158b1de7ca89fc5edb72f304a70a4c540c84c8844bf4008de/pydantic-2.10.6.tar.gz", hash = "sha256:ca5daa827cce33de7a42be142548b0096bf05a7e7b365aebfa5f8eeec7128236", size = 761681 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f4/3c/8cc1cc84deffa6e25d2d0c688ebb80635dfdbf1dbea3e30c541c8cf4d860/pydantic-2.10.6-py3-none-any.whl", hash = "sha256:427d664bf0b8a2b34ff5dd0f5a18df00591adcee7198fbd71981054cef37b584", size = 431696 }, +] + +[[package]] +name = "pydantic-core" +version = "2.27.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/fc/01/f3e5ac5e7c25833db5eb555f7b7ab24cd6f8c322d3a3ad2d67a952dc0abc/pydantic_core-2.27.2.tar.gz", hash = "sha256:eb026e5a4c1fee05726072337ff51d1efb6f59090b7da90d30ea58625b1ffb39", size = 413443 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d6/74/51c8a5482ca447871c93e142d9d4a92ead74de6c8dc5e66733e22c9bba89/pydantic_core-2.27.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:9e0c8cfefa0ef83b4da9588448b6d8d2a2bf1a53c3f1ae5fca39eb3061e2f0b0", size = 1893127 }, + { url = "https://files.pythonhosted.org/packages/d3/f3/c97e80721735868313c58b89d2de85fa80fe8dfeeed84dc51598b92a135e/pydantic_core-2.27.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:83097677b8e3bd7eaa6775720ec8e0405f1575015a463285a92bfdfe254529ef", size = 1811340 }, + { url = "https://files.pythonhosted.org/packages/9e/91/840ec1375e686dbae1bd80a9e46c26a1e0083e1186abc610efa3d9a36180/pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:172fce187655fece0c90d90a678424b013f8fbb0ca8b036ac266749c09438cb7", size = 1822900 }, + { url = "https://files.pythonhosted.org/packages/f6/31/4240bc96025035500c18adc149aa6ffdf1a0062a4b525c932065ceb4d868/pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:519f29f5213271eeeeb3093f662ba2fd512b91c5f188f3bb7b27bc5973816934", size = 1869177 }, + { url = "https://files.pythonhosted.org/packages/fa/20/02fbaadb7808be578317015c462655c317a77a7c8f0ef274bc016a784c54/pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:05e3a55d124407fffba0dd6b0c0cd056d10e983ceb4e5dbd10dda135c31071d6", size = 2038046 }, + { url = "https://files.pythonhosted.org/packages/06/86/7f306b904e6c9eccf0668248b3f272090e49c275bc488a7b88b0823444a4/pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c3ed807c7b91de05e63930188f19e921d1fe90de6b4f5cd43ee7fcc3525cb8c", size = 2685386 }, + { url = "https://files.pythonhosted.org/packages/8d/f0/49129b27c43396581a635d8710dae54a791b17dfc50c70164866bbf865e3/pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fb4aadc0b9a0c063206846d603b92030eb6f03069151a625667f982887153e2", size = 1997060 }, + { url = "https://files.pythonhosted.org/packages/0d/0f/943b4af7cd416c477fd40b187036c4f89b416a33d3cc0ab7b82708a667aa/pydantic_core-2.27.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:28ccb213807e037460326424ceb8b5245acb88f32f3d2777427476e1b32c48c4", size = 2004870 }, + { url = "https://files.pythonhosted.org/packages/35/40/aea70b5b1a63911c53a4c8117c0a828d6790483f858041f47bab0b779f44/pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:de3cd1899e2c279b140adde9357c4495ed9d47131b4a4eaff9052f23398076b3", size = 1999822 }, + { url = "https://files.pythonhosted.org/packages/f2/b3/807b94fd337d58effc5498fd1a7a4d9d59af4133e83e32ae39a96fddec9d/pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:220f892729375e2d736b97d0e51466252ad84c51857d4d15f5e9692f9ef12be4", size = 2130364 }, + { url = "https://files.pythonhosted.org/packages/fc/df/791c827cd4ee6efd59248dca9369fb35e80a9484462c33c6649a8d02b565/pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a0fcd29cd6b4e74fe8ddd2c90330fd8edf2e30cb52acda47f06dd615ae72da57", size = 2158303 }, + { url = "https://files.pythonhosted.org/packages/9b/67/4e197c300976af185b7cef4c02203e175fb127e414125916bf1128b639a9/pydantic_core-2.27.2-cp312-cp312-win32.whl", hash = "sha256:1e2cb691ed9834cd6a8be61228471d0a503731abfb42f82458ff27be7b2186fc", size = 1834064 }, + { url = "https://files.pythonhosted.org/packages/1f/ea/cd7209a889163b8dcca139fe32b9687dd05249161a3edda62860430457a5/pydantic_core-2.27.2-cp312-cp312-win_amd64.whl", hash = "sha256:cc3f1a99a4f4f9dd1de4fe0312c114e740b5ddead65bb4102884b384c15d8bc9", size = 1989046 }, + { url = "https://files.pythonhosted.org/packages/bc/49/c54baab2f4658c26ac633d798dab66b4c3a9bbf47cff5284e9c182f4137a/pydantic_core-2.27.2-cp312-cp312-win_arm64.whl", hash = "sha256:3911ac9284cd8a1792d3cb26a2da18f3ca26c6908cc434a18f730dc0db7bfa3b", size = 1885092 }, + { url = "https://files.pythonhosted.org/packages/41/b1/9bc383f48f8002f99104e3acff6cba1231b29ef76cfa45d1506a5cad1f84/pydantic_core-2.27.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:7d14bd329640e63852364c306f4d23eb744e0f8193148d4044dd3dacdaacbd8b", size = 1892709 }, + { url = "https://files.pythonhosted.org/packages/10/6c/e62b8657b834f3eb2961b49ec8e301eb99946245e70bf42c8817350cbefc/pydantic_core-2.27.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:82f91663004eb8ed30ff478d77c4d1179b3563df6cdb15c0817cd1cdaf34d154", size = 1811273 }, + { url = "https://files.pythonhosted.org/packages/ba/15/52cfe49c8c986e081b863b102d6b859d9defc63446b642ccbbb3742bf371/pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71b24c7d61131bb83df10cc7e687433609963a944ccf45190cfc21e0887b08c9", size = 1823027 }, + { url = "https://files.pythonhosted.org/packages/b1/1c/b6f402cfc18ec0024120602bdbcebc7bdd5b856528c013bd4d13865ca473/pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fa8e459d4954f608fa26116118bb67f56b93b209c39b008277ace29937453dc9", size = 1868888 }, + { url = "https://files.pythonhosted.org/packages/bd/7b/8cb75b66ac37bc2975a3b7de99f3c6f355fcc4d89820b61dffa8f1e81677/pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce8918cbebc8da707ba805b7fd0b382816858728ae7fe19a942080c24e5b7cd1", size = 2037738 }, + { url = "https://files.pythonhosted.org/packages/c8/f1/786d8fe78970a06f61df22cba58e365ce304bf9b9f46cc71c8c424e0c334/pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eda3f5c2a021bbc5d976107bb302e0131351c2ba54343f8a496dc8783d3d3a6a", size = 2685138 }, + { url = "https://files.pythonhosted.org/packages/a6/74/d12b2cd841d8724dc8ffb13fc5cef86566a53ed358103150209ecd5d1999/pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd8086fa684c4775c27f03f062cbb9eaa6e17f064307e86b21b9e0abc9c0f02e", size = 1997025 }, + { url = "https://files.pythonhosted.org/packages/a0/6e/940bcd631bc4d9a06c9539b51f070b66e8f370ed0933f392db6ff350d873/pydantic_core-2.27.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8d9b3388db186ba0c099a6d20f0604a44eabdeef1777ddd94786cdae158729e4", size = 2004633 }, + { url = "https://files.pythonhosted.org/packages/50/cc/a46b34f1708d82498c227d5d80ce615b2dd502ddcfd8376fc14a36655af1/pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7a66efda2387de898c8f38c0cf7f14fca0b51a8ef0b24bfea5849f1b3c95af27", size = 1999404 }, + { url = "https://files.pythonhosted.org/packages/ca/2d/c365cfa930ed23bc58c41463bae347d1005537dc8db79e998af8ba28d35e/pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:18a101c168e4e092ab40dbc2503bdc0f62010e95d292b27827871dc85450d7ee", size = 2130130 }, + { url = "https://files.pythonhosted.org/packages/f4/d7/eb64d015c350b7cdb371145b54d96c919d4db516817f31cd1c650cae3b21/pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ba5dd002f88b78a4215ed2f8ddbdf85e8513382820ba15ad5ad8955ce0ca19a1", size = 2157946 }, + { url = "https://files.pythonhosted.org/packages/a4/99/bddde3ddde76c03b65dfd5a66ab436c4e58ffc42927d4ff1198ffbf96f5f/pydantic_core-2.27.2-cp313-cp313-win32.whl", hash = "sha256:1ebaf1d0481914d004a573394f4be3a7616334be70261007e47c2a6fe7e50130", size = 1834387 }, + { url = "https://files.pythonhosted.org/packages/71/47/82b5e846e01b26ac6f1893d3c5f9f3a2eb6ba79be26eef0b759b4fe72946/pydantic_core-2.27.2-cp313-cp313-win_amd64.whl", hash = "sha256:953101387ecf2f5652883208769a79e48db18c6df442568a0b5ccd8c2723abee", size = 1990453 }, + { url = "https://files.pythonhosted.org/packages/51/b2/b2b50d5ecf21acf870190ae5d093602d95f66c9c31f9d5de6062eb329ad1/pydantic_core-2.27.2-cp313-cp313-win_arm64.whl", hash = "sha256:ac4dbfd1691affb8f48c2c13241a2e3b60ff23247cbcf981759c768b6633cf8b", size = 1885186 }, +] + +[[package]] +name = "pymongo" +version = "4.11.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "dnspython" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/db/e6/cdb1105c14a86aa2b1663a6cccc6bf54722bb12fb5d479979628142dde42/pymongo-4.11.3.tar.gz", hash = "sha256:b6f24aec7c0cfcf0ea9f89e92b7d40ba18a1e18c134815758f111ecb0122e61c", size = 2054848 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6d/cf/c606c9d889d8f34dcf80455e045854ef2fa187c439b22a6d30357790c12a/pymongo-4.11.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:5f48b7faf4064e5f484989608a59503b11b7f134ca344635e416b1b12e7dc255", size = 895374 }, + { url = "https://files.pythonhosted.org/packages/c6/f5/287e84ba6c8e34cb13f798e7e859b4dcbc5fab99261f91202a8027f62ba6/pymongo-4.11.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:722f22bf18d208aa752591bde93e018065641711594e7a2fef0432da429264e8", size = 895063 }, + { url = "https://files.pythonhosted.org/packages/0e/ba/fe8964ec3f8d7348e9cd6a11864e1e84b2be62ea98ca0ba01a4f5b4d417d/pymongo-4.11.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5be1b35c4897626327c4e8bae14655807c2bc710504fa790bc19a72403142264", size = 1673722 }, + { url = "https://files.pythonhosted.org/packages/92/89/925b7160c517b66c80d05b36f63d4cc0d0ff23f01b5150b55936b5fab097/pymongo-4.11.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:14f9e4d2172545798738d27bc6293b972c4f1f98cce248aa56e1e62c4c258ca7", size = 1737946 }, + { url = "https://files.pythonhosted.org/packages/f8/97/bcedba78ddbc1b8837bf556da55eb08a055e93b331722ecd1dad602a3427/pymongo-4.11.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd3f7bafe441135f58d2b91a312714f423e15fed5afe3854880c8c61ad78d3ce", size = 1706981 }, + { url = "https://files.pythonhosted.org/packages/d7/ce/63719be395ec29b8f71fd267014af4957736b5297a1f51f76ef32d05a0cf/pymongo-4.11.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73de1b9f416a2662ba95b4b49edc963d47b93760a7e2b561b932c8099d160151", size = 1676948 }, + { url = "https://files.pythonhosted.org/packages/c1/36/de366cee39e6c2e64d824d1f2e5672381ec766c51224304d1aebf7db3507/pymongo-4.11.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e24268e2d7ae96eab12161985b39e75a75185393134fc671f4bb1a16f50bf6f4", size = 1636072 }, + { url = "https://files.pythonhosted.org/packages/07/48/34751291a152e8098b4cf6f467046f00edd71b695d5cf6be1b15778cda63/pymongo-4.11.3-cp312-cp312-win32.whl", hash = "sha256:33a936d3c1828e4f52bed3dad6191a3618cc28ab056e2770390aec88d9e9f9ea", size = 864025 }, + { url = "https://files.pythonhosted.org/packages/96/8a/604fab1e1f45deb0dc19e06053369e7db44e3d1359a39e0fe376bdb95b41/pymongo-4.11.3-cp312-cp312-win_amd64.whl", hash = "sha256:c4673d8ef0c8ef712491a750adf64f7998202a82abd72be5be749749275b3edb", size = 882290 }, + { url = "https://files.pythonhosted.org/packages/01/f1/19f8a81ca1ef180983b89e24f8003863612aea358a06d7685566ccc18a87/pymongo-4.11.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5e53b98c9700bb69f33a322b648d028bfe223ad135fb04ec48c0226998b80d0e", size = 949622 }, + { url = "https://files.pythonhosted.org/packages/67/9a/ae232aa9379a9e6cf325facf0f65176d70520d6a16807f4de2e1ccfb76ec/pymongo-4.11.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:8464aff011208cf86eae28f4a3624ebc4a40783634e119b2b35852252b901ef3", size = 949299 }, + { url = "https://files.pythonhosted.org/packages/70/6d/1ddef8b6c6d598fe21c917d93c49a6304611a252a07e98a9b7e70e1b995b/pymongo-4.11.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3742ffc1951bec1450a5a6a02cfd40ddd4b1c9416b36c70ae439a532e8be0e05", size = 1937616 }, + { url = "https://files.pythonhosted.org/packages/13/9c/e735715789a876140f453def1b2015948708d224f1728f9b8412b6e495d2/pymongo-4.11.3-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a29294b508975a5dfd384f4b902cd121dc2b6e5d55ea2be2debffd2a63461cd9", size = 2015041 }, + { url = "https://files.pythonhosted.org/packages/fc/d3/cf41e9ce81644de9d8db54cc039823863e7240e021466ae093edc061683a/pymongo-4.11.3-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:051c741586ab6efafe72e027504ac4e5f01c88eceec579e4e1a438a369a61b0c", size = 1978716 }, + { url = "https://files.pythonhosted.org/packages/be/c8/c3f15c6cc5a9e0a75d18ae86209584cb14fdca017197def9741bff19c151/pymongo-4.11.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b05e03a327cdef28ec2bb72c974d412d308f5cf867a472ef17f9ac95d18ec05", size = 1939524 }, + { url = "https://files.pythonhosted.org/packages/1b/0d/613cd91c736325d05d2d5d389d06ed899bcdce5a265cb486b948729bf1eb/pymongo-4.11.3-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dafeddf1db51df19effd0828ae75492b15d60c7faec388da08f1fe9593c88e7a", size = 1888960 }, + { url = "https://files.pythonhosted.org/packages/e7/eb/b1e9cf2e03a47c4f35ffc5db1cb0ed0f92c5fe58c6f5f04d5a2da9d6bb77/pymongo-4.11.3-cp313-cp313-win32.whl", hash = "sha256:40c55afb34788ae6a6b8c175421fa46a37cfc45de41fe4669d762c3b1bbda48e", size = 910370 }, + { url = "https://files.pythonhosted.org/packages/77/f3/023f12ee9028f341880016fd6251255bf755f70730440ad11bf745f5f9e4/pymongo-4.11.3-cp313-cp313-win_amd64.whl", hash = "sha256:a5b8b7ba9614a081d1f932724b7a6a20847f6c9629420ae81ce827db3b599af2", size = 932930 }, + { url = "https://files.pythonhosted.org/packages/d3/c7/0a145cc66fc756cea547b948150583357e5518cfa60b3ad0d3266d3ee168/pymongo-4.11.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0f23f849693e829655f667ea18b87bf34e1395237eb45084f3495317d455beb2", size = 1006138 }, + { url = "https://files.pythonhosted.org/packages/81/88/4ed3cd03d2f7835393a72ed87f5e9186f6fc54bcb0e9b7f718424c0b5db8/pymongo-4.11.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:62bcfa88deb4a6152a7c93bedd1a808497f6c2881424ca54c3c81964a51c5040", size = 1006125 }, + { url = "https://files.pythonhosted.org/packages/91/a9/d86844a9aff958c959e84b8223b9d226c3b39a71f2f2fbf2aa3a4a748212/pymongo-4.11.3-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2eaa0233858f72074bf0319f5034018092b43f19202bd7ecb822980c35bfd623", size = 2266315 }, + { url = "https://files.pythonhosted.org/packages/1d/06/fff82b09382a887dab6207bb23778395c5986a5ddab6f55905ebdd82e10c/pymongo-4.11.3-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0a434e081017be360595237cd1aeac3d047dd38e8785c549be80748608c1d4ca", size = 2353538 }, + { url = "https://files.pythonhosted.org/packages/5d/f7/ff5399baee5888eb686c1508d28b4e9d82b9da5ca63215f958356dee4016/pymongo-4.11.3-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3e8aa65a9e4a989245198c249816d86cb240221861b748db92b8b3a5356bd6f1", size = 2312410 }, + { url = "https://files.pythonhosted.org/packages/b0/4d/1746ee984b229eddf5f768265b553a90b31b2395fb5ae1d30d28e430a862/pymongo-4.11.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d0a91004029d1fc9e66a800e6da4170afaa9b93bcf41299e4b5951b837b3467a", size = 2263706 }, + { url = "https://files.pythonhosted.org/packages/1c/dc/5d4154c5baf62af9ffb9391cf41848a87cda97798f92e4336730690be7d5/pymongo-4.11.3-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1b992904ac78cb712b42c4b7348974ba1739137c1692cdf8bf75c3eeb22881a4", size = 2202724 }, + { url = "https://files.pythonhosted.org/packages/72/15/c18fcc456fdcb793714776da273fc4cba4579f21818f2219e23ff9512314/pymongo-4.11.3-cp313-cp313t-win32.whl", hash = "sha256:45e18bda802d95a2aed88e487f06becc3bd0b22286a25aeca8c46b8c64980dbb", size = 959256 }, + { url = "https://files.pythonhosted.org/packages/7d/64/11d87df61cdca4fef90388af592247e17f3d31b15a909780f186d2739592/pymongo-4.11.3-cp313-cp313t-win_amd64.whl", hash = "sha256:07d40b831590bc458b624f421849c2b09ad2b9110b956f658b583fe01fe01c01", size = 987855 }, +] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "six" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/66/c0/0c8b6ad9f17a802ee498c46e004a0eb49bc148f2fd230864601a86dcf6db/python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 342432 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892 }, +] + +[[package]] +name = "pytz" +version = "2025.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/5f/57/df1c9157c8d5a05117e455d66fd7cf6dbc46974f832b1058ed4856785d8a/pytz-2025.1.tar.gz", hash = "sha256:c2db42be2a2518b28e65f9207c4d05e6ff547d1efa4086469ef855e4ab70178e", size = 319617 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/eb/38/ac33370d784287baa1c3d538978b5e2ea064d4c1b93ffbd12826c190dd10/pytz-2025.1-py2.py3-none-any.whl", hash = "sha256:89dd22dca55b46eac6eda23b2d72721bf1bdfef212645d81513ef5d03038de57", size = 507930 }, +] + +[[package]] +name = "redis" +version = "5.2.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/47/da/d283a37303a995cd36f8b92db85135153dc4f7a8e4441aa827721b442cfb/redis-5.2.1.tar.gz", hash = "sha256:16f2e22dff21d5125e8481515e386711a34cbec50f0e44413dd7d9c060a54e0f", size = 4608355 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3c/5f/fa26b9b2672cbe30e07d9a5bdf39cf16e3b80b42916757c5f92bca88e4ba/redis-5.2.1-py3-none-any.whl", hash = "sha256:ee7e1056b9aea0f04c6c2ed59452947f34c4940ee025f5dd83e6a6418b6989e4", size = 261502 }, +] + +[[package]] +name = "redmail" +version = "0.6.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "jinja2" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e9/96/36c740474cadc1b8a6e735334a0c67c02ea7169d29ffde48eb6c74f3abaa/redmail-0.6.0.tar.gz", hash = "sha256:0447cbd76deb2788b2d831c12e22b513587e99f725071d9951a01b0f2b8d0a72", size = 448832 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/83/67/3e0005b255a9d02448c5529af450b6807403e9af7b82636123273906ea37/redmail-0.6.0-py3-none-any.whl", hash = "sha256:8e64a680ffc8aaf8054312bf8b216da8fed20669181b77b1f1ccbdf4ee064427", size = 46948 }, +] + +[[package]] +name = "requests" +version = "2.32.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "charset-normalizer" }, + { name = "idna" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/63/70/2bf7780ad2d390a8d301ad0b550f1581eadbd9a20f896afe06353c2a2913/requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760", size = 131218 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f9/9b/335f9764261e915ed497fcdeb11df5dfd6f7bf257d4a6a2a686d80da4d54/requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6", size = 64928 }, +] + +[[package]] +name = "six" +version = "1.17.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/94/e7/b2c673351809dca68a0e064b6af791aa332cf192da575fd474ed7d6f16a2/six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81", size = 34031 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050 }, +] + +[[package]] +name = "sniffio" +version = "1.3.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235 }, +] + +[[package]] +name = "sqlalchemy" +version = "2.0.39" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "greenlet", marker = "(python_full_version < '3.14' and platform_machine == 'AMD64') or (python_full_version < '3.14' and platform_machine == 'WIN32') or (python_full_version < '3.14' and platform_machine == 'aarch64') or (python_full_version < '3.14' and platform_machine == 'amd64') or (python_full_version < '3.14' and platform_machine == 'ppc64le') or (python_full_version < '3.14' and platform_machine == 'win32') or (python_full_version < '3.14' and platform_machine == 'x86_64')" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/00/8e/e77fcaa67f8b9f504b4764570191e291524575ddbfe78a90fc656d671fdc/sqlalchemy-2.0.39.tar.gz", hash = "sha256:5d2d1fe548def3267b4c70a8568f108d1fed7cbbeccb9cc166e05af2abc25c22", size = 9644602 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/98/86/b2cb432aeb00a1eda7ed33ce86d943c2452dc1642f3ec51bfe9eaae9604b/sqlalchemy-2.0.39-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c457a38351fb6234781d054260c60e531047e4d07beca1889b558ff73dc2014b", size = 2107210 }, + { url = "https://files.pythonhosted.org/packages/bf/b0/b2479edb3419ca763ba1b587161c292d181351a33642985506a530f9162b/sqlalchemy-2.0.39-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:018ee97c558b499b58935c5a152aeabf6d36b3d55d91656abeb6d93d663c0c4c", size = 2097599 }, + { url = "https://files.pythonhosted.org/packages/58/5e/c5b792a4abcc71e68d44cb531c4845ac539d558975cc61db1afbc8a73c96/sqlalchemy-2.0.39-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5493a8120d6fc185f60e7254fc056a6742f1db68c0f849cfc9ab46163c21df47", size = 3247012 }, + { url = "https://files.pythonhosted.org/packages/e0/a8/055fa8a7c5f85e6123b7e40ec2e9e87d63c566011d599b4a5ab75e033017/sqlalchemy-2.0.39-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2cf5b5ddb69142511d5559c427ff00ec8c0919a1e6c09486e9c32636ea2b9dd", size = 3257851 }, + { url = "https://files.pythonhosted.org/packages/f6/40/aec16681e91a22ddf03dbaeb3c659bce96107c5f47d2a7c665eb7f24a014/sqlalchemy-2.0.39-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9f03143f8f851dd8de6b0c10784363712058f38209e926723c80654c1b40327a", size = 3193155 }, + { url = "https://files.pythonhosted.org/packages/21/9d/cef697b137b9eb0b66ab8e9cf193a7c7c048da3b4bb667e5fcea4d90c7a2/sqlalchemy-2.0.39-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:06205eb98cb3dd52133ca6818bf5542397f1dd1b69f7ea28aa84413897380b06", size = 3219770 }, + { url = "https://files.pythonhosted.org/packages/57/05/e109ca7dde837d8f2f1b235357e4e607f8af81ad8bc29c230fed8245687d/sqlalchemy-2.0.39-cp312-cp312-win32.whl", hash = "sha256:7f5243357e6da9a90c56282f64b50d29cba2ee1f745381174caacc50d501b109", size = 2077567 }, + { url = "https://files.pythonhosted.org/packages/97/c6/25ca068e38c29ed6be0fde2521888f19da923dbd58f5ff16af1b73ec9b58/sqlalchemy-2.0.39-cp312-cp312-win_amd64.whl", hash = "sha256:2ed107331d188a286611cea9022de0afc437dd2d3c168e368169f27aa0f61338", size = 2103136 }, + { url = "https://files.pythonhosted.org/packages/32/47/55778362642344324a900b6b2b1b26f7f02225b374eb93adc4a363a2d8ae/sqlalchemy-2.0.39-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:fe193d3ae297c423e0e567e240b4324d6b6c280a048e64c77a3ea6886cc2aa87", size = 2102484 }, + { url = "https://files.pythonhosted.org/packages/1b/e1/f5f26f67d095f408138f0fb2c37f827f3d458f2ae51881546045e7e55566/sqlalchemy-2.0.39-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:79f4f502125a41b1b3b34449e747a6abfd52a709d539ea7769101696bdca6716", size = 2092955 }, + { url = "https://files.pythonhosted.org/packages/c5/c2/0db0022fc729a54fc7aef90a3457bf20144a681baef82f7357832b44c566/sqlalchemy-2.0.39-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8a10ca7f8a1ea0fd5630f02feb055b0f5cdfcd07bb3715fc1b6f8cb72bf114e4", size = 3179367 }, + { url = "https://files.pythonhosted.org/packages/33/b7/f33743d87d0b4e7a1f12e1631a4b9a29a8d0d7c0ff9b8c896d0bf897fb60/sqlalchemy-2.0.39-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e6b0a1c7ed54a5361aaebb910c1fa864bae34273662bb4ff788a527eafd6e14d", size = 3192705 }, + { url = "https://files.pythonhosted.org/packages/c9/74/6814f31719109c973ddccc87bdfc2c2a9bc013bec64a375599dc5269a310/sqlalchemy-2.0.39-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:52607d0ebea43cf214e2ee84a6a76bc774176f97c5a774ce33277514875a718e", size = 3125927 }, + { url = "https://files.pythonhosted.org/packages/e8/6b/18f476f4baaa9a0e2fbc6808d8f958a5268b637c8eccff497bf96908d528/sqlalchemy-2.0.39-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:c08a972cbac2a14810463aec3a47ff218bb00c1a607e6689b531a7c589c50723", size = 3154055 }, + { url = "https://files.pythonhosted.org/packages/b4/60/76714cecb528da46bc53a0dd36d1ccef2f74ef25448b630a0a760ad07bdb/sqlalchemy-2.0.39-cp313-cp313-win32.whl", hash = "sha256:23c5aa33c01bd898f879db158537d7e7568b503b15aad60ea0c8da8109adf3e7", size = 2075315 }, + { url = "https://files.pythonhosted.org/packages/5b/7c/76828886d913700548bac5851eefa5b2c0251ebc37921fe476b93ce81b50/sqlalchemy-2.0.39-cp313-cp313-win_amd64.whl", hash = "sha256:4dabd775fd66cf17f31f8625fc0e4cfc5765f7982f94dc09b9e5868182cb71c0", size = 2099175 }, + { url = "https://files.pythonhosted.org/packages/7b/0f/d69904cb7d17e65c65713303a244ec91fd3c96677baf1d6331457fd47e16/sqlalchemy-2.0.39-py3-none-any.whl", hash = "sha256:a1c6b0a5e3e326a466d809b651c63f278b1256146a377a528b6938a279da334f", size = 1898621 }, +] + +[[package]] +name = "sqlalchemy-mixins" +version = "2.0.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "six" }, + { name = "sqlalchemy" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/8a/90/a920aa06a038677dde522dd8d7bc168eedd5fd3331ba1c759bf91ccd28d3/sqlalchemy_mixins-2.0.5.tar.gz", hash = "sha256:85197fc3682c4bf9c35671fb3d10282a0973b19cd2ff2b6791d601cbfb0fb89e", size = 20186 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/58/4d/8d97e3ec646e8732ea8d33fb33068cab97d0bc5a0e3f46c93174e2d3d3eb/sqlalchemy_mixins-2.0.5-py3-none-any.whl", hash = "sha256:9067b630744741b472aa91d92494cc5612ed2d29c66729a5a4a1d3fbbeccd448", size = 17578 }, +] + +[[package]] +name = "starlette" +version = "0.46.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/04/1b/52b27f2e13ceedc79a908e29eac426a63465a1a01248e5f24aa36a62aeb3/starlette-0.46.1.tar.gz", hash = "sha256:3c88d58ee4bd1bb807c0d1acb381838afc7752f9ddaec81bbe4383611d833230", size = 2580102 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a0/4b/528ccf7a982216885a1ff4908e886b8fb5f19862d1962f56a3fce2435a70/starlette-0.46.1-py3-none-any.whl", hash = "sha256:77c74ed9d2720138b25875133f3a2dae6d854af2ec37dceb56aef370c1d8a227", size = 71995 }, +] + +[[package]] +name = "textdistance" +version = "4.6.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/01/68/97ac72dd781301d6a52140066c68400c96f1a91f69737959e414844749b0/textdistance-4.6.3.tar.gz", hash = "sha256:d6dabc50b4ea832cdcf0e1e6021bd0c7fcd9ade155888d79bb6a3c31fce2dc6f", size = 32710 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c6/c2/c62601c858010b0513a6434b9be19bd740533a6e861eddfd30b7258d92a0/textdistance-4.6.3-py3-none-any.whl", hash = "sha256:0cb1b2cc8e3339ddc3e0f8c870e49fb49de6ecc42a718917308b3c971f34aa56", size = 31263 }, +] + +[[package]] +name = "types-python-dateutil" +version = "2.9.0.20241206" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a9/60/47d92293d9bc521cd2301e423a358abfac0ad409b3a1606d8fbae1321961/types_python_dateutil-2.9.0.20241206.tar.gz", hash = "sha256:18f493414c26ffba692a72369fea7a154c502646301ebfe3d56a04b3767284cb", size = 13802 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0f/b3/ca41df24db5eb99b00d97f89d7674a90cb6b3134c52fb8121b6d8d30f15c/types_python_dateutil-2.9.0.20241206-py3-none-any.whl", hash = "sha256:e248a4bc70a486d3e3ec84d0dc30eec3a5f979d6e7ee4123ae043eedbb987f53", size = 14384 }, +] + +[[package]] +name = "typing-extensions" +version = "4.12.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/df/db/f35a00659bc03fec321ba8bce9420de607a1d37f8342eee1863174c69557/typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8", size = 85321 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/26/9f/ad63fc0248c5379346306f8668cda6e2e2e9c95e01216d2b8ffd9ff037d0/typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d", size = 37438 }, +] + +[[package]] +name = "tzdata" +version = "2025.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/43/0f/fa4723f22942480be4ca9527bbde8d43f6c3f2fe8412f00e7f5f6746bc8b/tzdata-2025.1.tar.gz", hash = "sha256:24894909e88cdb28bd1636c6887801df64cb485bd593f2fd83ef29075a81d694", size = 194950 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0f/dd/84f10e23edd882c6f968c21c2434fe67bd4a528967067515feca9e611e5e/tzdata-2025.1-py2.py3-none-any.whl", hash = "sha256:7e127113816800496f027041c570f50bcd464a020098a3b6b199517772303639", size = 346762 }, +] + +[[package]] +name = "unidecode" +version = "1.3.8" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f7/89/19151076a006b9ac0dd37b1354e031f5297891ee507eb624755e58e10d3e/Unidecode-1.3.8.tar.gz", hash = "sha256:cfdb349d46ed3873ece4586b96aa75258726e2fa8ec21d6f00a591d98806c2f4", size = 192701 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/84/b7/6ec57841fb67c98f52fc8e4a2d96df60059637cba077edc569a302a8ffc7/Unidecode-1.3.8-py3-none-any.whl", hash = "sha256:d130a61ce6696f8148a3bd8fe779c99adeb4b870584eeb9526584e9aa091fd39", size = 235494 }, +] + +[[package]] +name = "urllib3" +version = "2.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/aa/63/e53da845320b757bf29ef6a9062f5c669fe997973f966045cb019c3f4b66/urllib3-2.3.0.tar.gz", hash = "sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d", size = 307268 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c8/19/4ec628951a74043532ca2cf5d97b7b14863931476d117c471e8e2b1eb39f/urllib3-2.3.0-py3-none-any.whl", hash = "sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df", size = 128369 }, +] + +[[package]] +name = "uvicorn" +version = "0.34.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click" }, + { name = "h11" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/4b/4d/938bd85e5bf2edeec766267a5015ad969730bb91e31b44021dfe8b22df6c/uvicorn-0.34.0.tar.gz", hash = "sha256:404051050cd7e905de2c9a7e61790943440b3416f49cb409f965d9dcd0fa73e9", size = 76568 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/61/14/33a3a1352cfa71812a3a21e8c9bfb83f60b0011f5e36f2b1399d51928209/uvicorn-0.34.0-py3-none-any.whl", hash = "sha256:023dc038422502fa28a09c7a30bf2b6991512da7dcdb8fd35fe57cfc154126f4", size = 62315 }, +] + +[[package]] +name = "wag-services-and-backend-latest" +version = "0.1.0" +source = { virtual = "." } +dependencies = [ + { name = "alembic" }, + { name = "arrow" }, + { name = "cryptography" }, + { name = "faker" }, + { name = "fastapi" }, + { name = "pandas" }, + { name = "psycopg2-binary" }, + { name = "pymongo" }, + { name = "redis" }, + { name = "redmail" }, + { name = "requests" }, + { name = "sqlalchemy-mixins" }, + { name = "textdistance" }, + { name = "unidecode" }, + { name = "uvicorn" }, +] + +[package.metadata] +requires-dist = [ + { name = "alembic", specifier = ">=1.15.1" }, + { name = "arrow", specifier = ">=1.3.0" }, + { name = "cryptography", specifier = ">=44.0.2" }, + { name = "faker", specifier = ">=37.0.2" }, + { name = "fastapi", specifier = ">=0.115.11" }, + { name = "pandas", specifier = ">=2.2.3" }, + { name = "psycopg2-binary", specifier = ">=2.9.10" }, + { name = "pymongo", specifier = ">=4.11.3" }, + { name = "redis", specifier = ">=5.2.1" }, + { name = "redmail", specifier = ">=0.6.0" }, + { name = "requests", specifier = ">=2.32.3" }, + { name = "sqlalchemy-mixins", specifier = ">=2.0.5" }, + { name = "textdistance", specifier = ">=4.6.3" }, + { name = "unidecode", specifier = ">=1.3.8" }, + { name = "uvicorn", specifier = ">=0.34.0" }, +]