diff --git a/.gitignore copy b/.gitignore copy
new file mode 100644
index 0000000..5d381cc
--- /dev/null
+++ b/.gitignore copy
@@ -0,0 +1,162 @@
+# ---> Python
+# Byte-compiled / optimized / DLL files
+__pycache__/
+*.py[cod]
+*$py.class
+
+# C extensions
+*.so
+
+# Distribution / packaging
+.Python
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+.eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+wheels/
+share/python-wheels/
+*.egg-info/
+.installed.cfg
+*.egg
+MANIFEST
+
+# PyInstaller
+# Usually these files are written by a python script from a template
+# before PyInstaller builds the exe, so as to inject date/other infos into it.
+*.manifest
+*.spec
+
+# Installer logs
+pip-log.txt
+pip-delete-this-directory.txt
+
+# Unit test / coverage reports
+htmlcov/
+.tox/
+.nox/
+.coverage
+.coverage.*
+.cache
+nosetests.xml
+coverage.xml
+*.cover
+*.py,cover
+.hypothesis/
+.pytest_cache/
+cover/
+
+# Translations
+*.mo
+*.pot
+
+# Django stuff:
+*.log
+local_settings.py
+db.sqlite3
+db.sqlite3-journal
+
+# Flask stuff:
+instance/
+.webassets-cache
+
+# Scrapy stuff:
+.scrapy
+
+# Sphinx documentation
+docs/_build/
+
+# PyBuilder
+.pybuilder/
+target/
+
+# Jupyter Notebook
+.ipynb_checkpoints
+
+# IPython
+profile_default/
+ipython_config.py
+
+# pyenv
+# For a library or package, you might want to ignore these files since the code is
+# intended to run in multiple environments; otherwise, check them in:
+# .python-version
+
+# pipenv
+# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
+# However, in case of collaboration, if having platform-specific dependencies or dependencies
+# having no cross-platform support, pipenv may install dependencies that don't work, or not
+# install all needed dependencies.
+#Pipfile.lock
+
+# poetry
+# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
+# This is especially recommended for binary packages to ensure reproducibility, and is more
+# commonly ignored for libraries.
+# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
+#poetry.lock
+
+# pdm
+# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
+#pdm.lock
+# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
+# in version control.
+# https://pdm.fming.dev/#use-with-ide
+.pdm.toml
+
+# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
+__pypackages__/
+
+# Celery stuff
+celerybeat-schedule
+celerybeat.pid
+
+# SageMath parsed files
+*.sage.py
+
+# Environments
+.env
+.venv
+env/
+venv/
+ENV/
+env.bak/
+venv.bak/
+
+# Spyder project settings
+.spyderproject
+.spyproject
+
+# Rope project settings
+.ropeproject
+
+# mkdocs documentation
+/site
+
+# mypy
+.mypy_cache/
+.dmypy.json
+dmypy.json
+
+# Pyre type checker
+.pyre/
+
+# pytype static type analyzer
+.pytype/
+
+# Cython debug symbols
+cython_debug/
+
+# PyCharm
+# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
+# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
+# and can be added to the global gitignore or merged into this file. For a more nuclear
+# option (not recommended) you can uncomment the following to ignore the entire idea folder.
+#.idea/
+
diff --git a/ApiLayers/AllConfigs/Email/configs.py b/ApiLayers/AllConfigs/Email/configs.py
new file mode 100644
index 0000000..ce92382
--- /dev/null
+++ b/ApiLayers/AllConfigs/Email/configs.py
@@ -0,0 +1,18 @@
+from ApiLayers.AllConfigs import HostConfig
+
+
+class EmailConfig:
+ EMAIL_HOST: str = HostConfig.EMAIL_HOST
+ EMAIL_USERNAME: str = "karatay@mehmetkaratay.com.tr"
+ EMAIL_PASSWORD: str = "system"
+ EMAIL_PORT: int = 587
+ EMAIL_SEND: bool = False
+
+ @classmethod
+ def as_dict(cls):
+ return dict(
+ host=EmailConfig.EMAIL_HOST,
+ port=EmailConfig.EMAIL_PORT,
+ username=EmailConfig.EMAIL_USERNAME,
+ password=EmailConfig.EMAIL_PASSWORD,
+ )
diff --git a/ApiLayers/AllConfigs/Email/email_send_model.py b/ApiLayers/AllConfigs/Email/email_send_model.py
new file mode 100644
index 0000000..3108072
--- /dev/null
+++ b/ApiLayers/AllConfigs/Email/email_send_model.py
@@ -0,0 +1,13 @@
+from pydantic import BaseModel
+from typing import List, Dict, Optional
+
+
+class EmailSendModel(BaseModel):
+ subject: str
+ html: str = ""
+ receivers: List[str]
+ text: Optional[str] = ""
+ cc: Optional[List[str]] = None
+ bcc: Optional[List[str]] = None
+ headers: Optional[Dict] = None
+ attachments: Optional[Dict] = None
diff --git a/ApiLayers/AllConfigs/NoSqlDatabase/configs.py b/ApiLayers/AllConfigs/NoSqlDatabase/configs.py
new file mode 100644
index 0000000..813b12f
--- /dev/null
+++ b/ApiLayers/AllConfigs/NoSqlDatabase/configs.py
@@ -0,0 +1,10 @@
+from ApiLayers.AllConfigs import HostConfig
+
+
+class MongoConfig:
+ PASSWORD = "mongo_password"
+ USER_NAME = "mongo_user"
+ DATABASE_NAME = "mongo_database"
+ HOST = HostConfig.MAIN_HOST
+ PORT = 11777
+ URL = f"mongodb://{USER_NAME}:{PASSWORD}@{HOST}:{PORT}/{DATABASE_NAME}?retryWrites=true&w=majority"
diff --git a/ApiLayers/AllConfigs/Redis/configs.py b/ApiLayers/AllConfigs/Redis/configs.py
new file mode 100644
index 0000000..978028f
--- /dev/null
+++ b/ApiLayers/AllConfigs/Redis/configs.py
@@ -0,0 +1,125 @@
+from ApiLayers.AllConfigs import HostConfig
+
+
+class WagRedis:
+
+ REDIS_HOST = HostConfig.MAIN_HOST
+ REDIS_PASSWORD: str = "commercial_redis_password"
+ REDIS_PORT: int = 11222
+ REDIS_DB: int = 0
+
+ @classmethod
+ def as_dict(cls):
+ return dict(
+ host=WagRedis.REDIS_HOST,
+ password=WagRedis.REDIS_PASSWORD,
+ port=WagRedis.REDIS_PORT,
+ db=WagRedis.REDIS_DB,
+ )
+
+
+class RedisValidationKeys:
+ ENDPOINTS: str = "ENDPOINTS"
+ VALIDATIONS: str = "VALIDATIONS"
+ HEADERS: str = "HEADERS"
+ ERRORCODES: str = "ERRORCODES"
+ RESPONSES: str = "RESPONSES"
+ REQUESTS: str = "REQUESTS"
+ RESPONSE: str = "RESPONSE"
+ LANGUAGE_MODELS: str = "LANGUAGE_MODELS"
+ STATIC: str = "STATIC"
+ DYNAMIC: str = "DYNAMIC"
+ # REQUEST: str = "REQUEST"
+ # VALIDATION_USER: str = "VALIDATION_USER"
+
+
+class RedisAuthKeys:
+ AUTH: str = "AUTH"
+ OCCUPANT: str = "OCCUPANT"
+ EMPLOYEE: str = "EMPLOYEE"
+ CACHE: str = "CACHE"
+
+
+class RedisCategoryKeys:
+ REBUILD: str = "REBUILD"
+ ENDPOINT2CLASS: str = "ENDPOINT2CLASS"
+ CLUSTER_INDEX: str = "CLUSTER_INDEX"
+ CLUSTER_FUNCTION_CODES: str = "CLUSTER_FUNCTION_CODES"
+ METHOD_FUNCTION_CODES: str = "METHOD_FUNCTION_CODES"
+ MENU_FIRST_LAYER: str = "MENU_FIRST_LAYER"
+ PAGE_MAPPER: str = "PAGE_MAPPER"
+ MENU_MAPPER: str = "MENU_MAPPER"
+
+
+class RedisCategoryPageInfoKeys:
+ """
+ ### /create?site=BuildingCluster, #/update?site=BuildingCluster, #/dashboard?site=BuildingCluster
+ PAGE_URL: /dashboard?site=BuildingCluster
+ PAGE_NAME: BuildingCluster
+ PAGE_INFO: {LANGUAGE_MODELS: "", ICON: "", URL: ""}
+ PAGE_MENU_INDEX: 1 # {build_living_space: "uuid4", LAYER: 1, MENU_INDEX: 1}
+
+ PAGE_MENU_COMPONENT: {..., lang: {"tr"}: {...}, lang: {"en"}: {...}}
+ PAGE_LANGUAGE: {{"tr"}: {...},{"en"}: {...}}
+ """
+
+ PAGE_URL: str = "PAGE_URL"
+ PAGE_NAME: str = "PAGE_NAME"
+ PAGE_INFO: str = "PAGE_INFO"
+ PAGE_COMPONENT: str = "PAGE_COMPONENT"
+ PAGE_MENU_INDEX: str = "PAGE_MENU_INDEX"
+ PAGE_MENU_COMPONENT: str = "PAGE_MENU_COMPONENT"
+ PAGE_LANGUAGE: str = "PAGE_LANGUAGE"
+
+
+class RedisCategoryPageInfoKeysAction:
+ """
+ PAGE_MAPPER: {PAGE_URL: /dashboard?site=BuildingCluster, PAGE_NAME: BuildingCluster, PAGE_INFO: {LANGUAGE_MODELS: "", ICON: "", URL: ""}}
+ value : {RedisCategoryPageInfoKeys.PAGE_INFO}
+ MENU_MAPPER: {PAGE_MENU_INDEX: 1, PAGE_MENU_COMPONENT: {..., lang: {"tr"}: {...}, lang: {"en"}: {...}}}
+ value : {RedisCategoryPageInfoKeys.PAGE_INFO}
+ """
+
+ page_index: str = (
+ f"{RedisCategoryPageInfoKeys.PAGE_MENU_INDEX}:{RedisCategoryPageInfoKeys.PAGE_URL}"
+ )
+
+ page_mapper_key: str = (
+ f"{RedisCategoryPageInfoKeys.PAGE_MENU_INDEX}:{RedisCategoryPageInfoKeys.PAGE_URL}"
+ )
+ page_mapper_key: str = (
+ f"{page_mapper_key}:{RedisCategoryPageInfoKeys.PAGE_LANGUAGE}"
+ )
+ menu_mapper_key: str = (
+ f"{RedisCategoryPageInfoKeys.PAGE_URL}:{RedisCategoryPageInfoKeys.PAGE_MENU_INDEX}"
+ )
+ menu_mapper_key: str = (
+ f"{menu_mapper_key}:{RedisCategoryPageInfoKeys.PAGE_MENU_COMPONENT}"
+ )
+
+
+class RedisValidationKeysAction:
+ # LANGUAGE_MODELS:DYNAMIC:VALIDATIONS:
+ dynamic_validation_key: str = (
+ f"{RedisValidationKeys.LANGUAGE_MODELS}:{RedisValidationKeys.DYNAMIC}:{RedisValidationKeys.VALIDATIONS}"
+ )
+ # LANGUAGE_MODELS:DYNAMIC:HEADERS:REQUEST
+ dynamic_header_request_key: str = (
+ f"{RedisValidationKeys.LANGUAGE_MODELS}:{RedisValidationKeys.DYNAMIC}:{RedisValidationKeys.HEADERS}:{RedisValidationKeys.REQUESTS}"
+ )
+ # LANGUAGE_MODELS:DYNAMIC:HEADERS:RESPONSE
+ dynamic_header_response_key: str = (
+ f"{RedisValidationKeys.LANGUAGE_MODELS}:{RedisValidationKeys.DYNAMIC}:{RedisValidationKeys.HEADERS}:{RedisValidationKeys.RESPONSES}"
+ )
+ # LANGUAGE_MODELS:STATIC:ERRORCODES:
+ static_error_code_key: str = (
+ f"{RedisValidationKeys.LANGUAGE_MODELS}:{RedisValidationKeys.STATIC}:{RedisValidationKeys.ERRORCODES}"
+ )
+ # LANGUAGE_MODELS:STATIC:RESPONSES:
+ static_response_key: str = (
+ f"{RedisValidationKeys.LANGUAGE_MODELS}:{RedisValidationKeys.STATIC}:{RedisValidationKeys.RESPONSES}"
+ )
+ # LANGUAGE_MODELS:STATIC:REQUESTS:
+ static_request_key: str = (
+ f"{RedisValidationKeys.LANGUAGE_MODELS}:{RedisValidationKeys.STATIC}:{RedisValidationKeys.REQUESTS}"
+ )
diff --git a/ApiLayers/AllConfigs/SqlDatabase/configs.py b/ApiLayers/AllConfigs/SqlDatabase/configs.py
new file mode 100644
index 0000000..c36f662
--- /dev/null
+++ b/ApiLayers/AllConfigs/SqlDatabase/configs.py
@@ -0,0 +1,17 @@
+from ApiLayers.AllConfigs import HostConfig
+
+
+class WagDatabase:
+ HOST: str = HostConfig.MAIN_HOST
+ PORT: str = "5444"
+ SQL: str = "postgresql+psycopg2"
+ USERNAME: str = "berkay_wag_user"
+ PASSWORD: str = "berkay_wag_user_password"
+ DATABASE_NAME: str = "wag_database"
+ DATABASE_URL: str = f"{SQL}://{USERNAME}:{PASSWORD}@{HOST}:{PORT}/{DATABASE_NAME}"
+
+
+class PaginateConfig:
+ DEFAULT_SIZE = 10
+ MIN_SIZE = 10
+ MAX_SIZE = 50
diff --git a/ApiLayers/AllConfigs/Templates/password_templates.py b/ApiLayers/AllConfigs/Templates/password_templates.py
new file mode 100644
index 0000000..1f35b31
--- /dev/null
+++ b/ApiLayers/AllConfigs/Templates/password_templates.py
@@ -0,0 +1,243 @@
+import datetime
+
+
+def change_your_password_template(**kwargs):
+ user_name, forgot_link, current_year = (
+ kwargs["user_name"],
+ kwargs["forgot_link"],
+ str(datetime.datetime.now().year),
+ )
+ template = """
+
+
+
+
+ Bootstrap demo
+
+
+
+
+
+
+
Dear %s,
+
We have received a request to reset your password for your account with Let's Program Blog. To complete the password reset process, please click on the button below:
+
Please note that this link is only valid for a day only. If you did not request a password reset, please disregard this message.
+
Reset Password
+
+
+
+
+
+""" % (
+ user_name,
+ forgot_link,
+ current_year,
+ )
+
+ return template
+
+
+def password_is_changed_template(**kwargs):
+ user_name, current_year = kwargs["user_name"], str(datetime.datetime.now().year)
+ template = """
+
+
+
+
+
+ Thank You for Changing Your Password
+
+
+
+
+
+
+
Dear %s,
+
We wanted to let you know that your password has been successfully updated.
+ If you did not make this change or if you believe an unauthorized person has accessed your account,
+ please contact our support team immediately.
+
Thank you for helping us keep your account secure.
+
+
+
+
+
+ """ % (
+ user_name,
+ current_year,
+ )
+
+ return template
+
+
+def invalid_ip_or_address_found(**kwargs):
+ user_name, current_year, address = (
+ kwargs["user_name"],
+ str(datetime.datetime.now().year),
+ kwargs.get("address"),
+ )
+ template = """
+
+
+
+
+
+ Thank You for Changing Your Password
+
+
+
+
+
+
+
Dear %s,
+
We wanted to let you know that an unusual login attempt has been tried from address below.
+ If you have login from address below please ignore this message
+
Thank you for helping us keep your account secure.
+
Address of ip attempt
+
City : %s
+
Zip Code : %s
+
Country : %s
+
Region : %s
+
Region Name : %s
+
If you are not login from this address lets us now by clicking link below
+
Reset Password
+
+
+
+
+
+ """ % (
+ user_name,
+ address["city"],
+ address["zip"],
+ address["country"],
+ address["region"],
+ address["regionName"],
+ kwargs["notice_link"],
+ current_year,
+ )
+ return template
diff --git a/ApiLayers/AllConfigs/Token/config.py b/ApiLayers/AllConfigs/Token/config.py
new file mode 100644
index 0000000..9793055
--- /dev/null
+++ b/ApiLayers/AllConfigs/Token/config.py
@@ -0,0 +1,27 @@
+import datetime
+
+
+class Auth:
+ ACCESS_EMAIL_EXT = "evyos.com.tr"
+ ACCESS_TOKEN_TAG = "evyos-session-key"
+ REFRESHER_TOKEN_TAG = "eys-session-refresher"
+ SECRET_KEY_72 = (
+ "t3sUAmjTGeTgDc6dAUrB41u2SNg0ZHzj4HTjem95y3fRH1nZXOHIBj163kib6iLybT0gLaxq"
+ )
+ SECRET_KEY_96 = "7ct8VpiwaP1hR2bVSet4dEEAgepuTZUOnO1QxOgKyDqBR2PkqNhcubSrbUUigQKoQA1PBoeeQn5ZCo24pESmVtKs76nA4EKq"
+ SECRET_KEY_144 = (
+ "R2p5Rq6KCr6PCfjFYUeH1keF2VWHFEuqINVjBGGnvRA2m10pYUKqfOtIGBcaj2v5wZmElDndzSHGOS7roQsoTelPSok0"
+ + "qqMucurMWE0FGexGpFuJkfPEm9tH2OjMOqegvEetpSVywH0W4Kh4"
+ )
+
+ ALGORITHM = "HS256"
+ ACCESS_TOKEN_LENGTH: int = 90
+ REFRESHER_TOKEN_LENGTH: int = 144
+ PASSWORD_EXPIRE_DAY = datetime.timedelta(days=30)
+ TOKEN_EXPIRE_MINUTES_1 = datetime.timedelta(minutes=1)
+ TOKEN_EXPIRE_MINUTES_15 = datetime.timedelta(minutes=15)
+ TOKEN_EXPIRE_MINUTES_30 = datetime.timedelta(minutes=30)
+ TOKEN_EXPIRE_DAY_1 = datetime.timedelta(days=1)
+ TOKEN_EXPIRE_DAY_5 = datetime.timedelta(days=5)
+ TOKEN_EXPIRE_DAY_15 = datetime.timedelta(days=15)
+ TOKEN_EXPIRE_DAY_30 = datetime.timedelta(days=30)
diff --git a/ApiLayers/AllConfigs/__init__.py b/ApiLayers/AllConfigs/__init__.py
new file mode 100644
index 0000000..7712fd9
--- /dev/null
+++ b/ApiLayers/AllConfigs/__init__.py
@@ -0,0 +1,3 @@
+from .main import HostConfig
+
+__all__ = ["HostConfig"]
diff --git a/ApiLayers/AllConfigs/main.py b/ApiLayers/AllConfigs/main.py
new file mode 100644
index 0000000..80d7281
--- /dev/null
+++ b/ApiLayers/AllConfigs/main.py
@@ -0,0 +1,31 @@
+class HostConfig:
+ MAIN_HOST = "10.10.2.36" # http://10.10.2.36
+ EMAIL_HOST = "10.10.2.34" # http://10.10.2.34
+
+
+class MainConfig:
+
+ APP_NAME = "evyos-web-api-gateway"
+ TITLE = "WAG API Web Api Gateway"
+ DESCRIPTION = "This api is serves as web api gateway only to evyos web services."
+ APP_URL = "https://www.wag.eys.gen.tr"
+
+ DATETIME_FORMAT = "YYYY-MM-DD HH:mm:ss Z"
+ DATETIME_FORMAT_JS = "YYYY-MM-DD HH:mm:ss +0"
+
+ # Timezone Configuration
+ DEFAULT_TIMEZONE = "GMT+3" # Default timezone for the application
+ SYSTEM_TIMEZONE = "GMT+0" # System timezone (used for internal operations)
+ SUPPORTED_TIMEZONES = ["GMT+0", "GMT+3"] # List of supported timezones
+
+
+class LanguageConfig:
+
+ SUPPORTED_LANGUAGES = ["en", "tr"]
+ DEFAULT_LANGUAGE = "tr"
+
+
+class ValidationsConfig:
+
+ SUPPORTED_VALIDATIONS = ["header", "validation", "all"]
+ DEFAULT_VALIDATION = "all"
diff --git a/ApiLayers/ApiLibrary/__init__.py b/ApiLayers/ApiLibrary/__init__.py
new file mode 100644
index 0000000..b64093b
--- /dev/null
+++ b/ApiLayers/ApiLibrary/__init__.py
@@ -0,0 +1,19 @@
+from ApiLayers.ApiLibrary.date_time_actions.date_functions import (
+ DateTimeLocal,
+ system_arrow,
+ client_arrow,
+)
+from ApiLayers.ApiLibrary.extensions.select import (
+ SelectActionWithEmployee,
+ SelectAction,
+)
+from ApiLayers.ApiLibrary.common.line_number import get_line_number_for_error
+
+__all__ = [
+ "DateTimeLocal",
+ "system_arrow",
+ "client_arrow",
+ "get_line_number_for_error",
+ "SelectActionWithEmployee",
+ "SelectAction",
+]
diff --git a/ApiLayers/ApiLibrary/common/line_number.py b/ApiLayers/ApiLibrary/common/line_number.py
new file mode 100644
index 0000000..383b830
--- /dev/null
+++ b/ApiLayers/ApiLibrary/common/line_number.py
@@ -0,0 +1,14 @@
+"""Utility functions for getting line numbers and file locations."""
+
+from inspect import currentframe, getframeinfo, stack
+
+
+def get_line_number_for_error() -> str:
+ """Get the file name and line number of where an error occurred.
+
+ Returns:
+ str: A string in the format 'filename | line_number' showing where the error occurred
+ """
+ caller = stack()[1] # Get the caller's frame
+ frameinfo = getframeinfo(caller[0])
+ return f"{frameinfo.filename} | {frameinfo.lineno}"
diff --git a/ApiLayers/ApiLibrary/date_time_actions/date_functions.py b/ApiLayers/ApiLibrary/date_time_actions/date_functions.py
new file mode 100644
index 0000000..3071d23
--- /dev/null
+++ b/ApiLayers/ApiLibrary/date_time_actions/date_functions.py
@@ -0,0 +1,117 @@
+import arrow
+import calendar
+
+from ApiLayers.AllConfigs.main import MainConfig as Config
+
+
+class DateTimeLocal:
+
+ def __init__(self, timezone: str = None, is_client: bool = True):
+ if timezone and timezone not in Config.SUPPORTED_TIMEZONES:
+ raise ValueError(
+ f"Unsupported timezone: {timezone}. Must be one of {Config.SUPPORTED_TIMEZONES}"
+ )
+
+ self.timezone = Config.SYSTEM_TIMEZONE
+ if is_client:
+ self.timezone = (timezone or Config.DEFAULT_TIMEZONE).replace("-", "+")
+
+ def find_last_day_of_month(self, date_value):
+ today = self.get(date_value).date()
+ _, last_day = calendar.monthrange(today.year, today.month)
+ return self.get(today.year, today.month, last_day, 23, 59, 59).to(self.timezone)
+
+ def find_first_day_of_month(self, date_value):
+ today = self.get(date_value).date()
+ return self.get(today.year, today.month, 1).to(self.timezone)
+
+ def get(self, *args):
+ return arrow.get(*args).to(str(self.timezone))
+
+ def now(self):
+ return arrow.now().to(str(self.timezone))
+
+ def shift(self, date, **kwargs):
+ return self.get(date).shift(**kwargs)
+
+ def date(self, date):
+ return self.get(date).date()
+
+ def time(self, date):
+ return self.get(date).time()
+
+ def string_date(self, date, splitter: str = "-"):
+ return str(self.get(date).date()).replace("-", splitter)
+
+ def string_time_only(self, date):
+ return self.get(date).format("HH:mm:ss")
+
+ def string_date_only(self, date):
+ return self.get(date).format("YYYY-MM-DD")
+
+ def to_timestamp(self, date):
+ """Convert datetime to UTC timestamp"""
+ return self.get(date).timestamp()
+
+ def from_timestamp(self, timestamp):
+ """Convert timestamp to timezone-aware datetime"""
+ return arrow.get(timestamp).to(str(self.timezone))
+
+ def is_timezone_aware(self, date):
+ """Check if a date is timezone-aware"""
+ return self.get(date).tzinfo is not None
+
+ def standardize_timezone(self, date):
+ """Ensure date is in the correct timezone"""
+ if not self.is_timezone_aware(date):
+ return self.get(date).to(str(self.timezone))
+ return self.get(date)
+
+ def get_expiry_time(self, **kwargs):
+ """Get future time for cache expiry
+ Example: get_expiry_time(hours=1, minutes=30)
+ """
+ return self.now().shift(**kwargs)
+
+ def is_expired(self, timestamp):
+ """Check if a timestamp is expired"""
+ if not timestamp:
+ return True
+ return self.from_timestamp(timestamp) < self.now()
+
+ def get_cache_key(self, base_key, *args):
+ """Generate a cache key with timezone info
+ Example: get_cache_key('user_profile', user_id, 'details')
+ """
+ components = [str(base_key)]
+ components.extend(str(arg) for arg in args)
+ components.append(f"tz_{self.timezone}")
+ return ":".join(components)
+
+ def format_for_db(self, date):
+ """Format date for database storage"""
+ return self.get(date).format("YYYY-MM-DD HH:mm:ss.SSSZZ")
+
+ def parse_from_db(self, date_str):
+ """Parse date from database format"""
+ if not date_str:
+ return None
+ return self.get(date_str)
+
+ def get_day_boundaries(self, date=None):
+ """Get start and end of day in current timezone"""
+ dt = self.get(date) if date else self.now()
+ start = dt.floor("day")
+ end = dt.ceil("day")
+ return start, end
+
+ def get_month_boundaries(self, date=None):
+ """Get start and end of month in current timezone"""
+ dt = self.get(date) if date else self.now()
+ start = dt.floor("month")
+ end = dt.ceil("month")
+ return start, end
+
+
+client_arrow = DateTimeLocal(is_client=True)
+system_arrow = DateTimeLocal(is_client=False)
diff --git a/ApiLayers/ApiLibrary/extensions/select.py b/ApiLayers/ApiLibrary/extensions/select.py
new file mode 100644
index 0000000..b94da17
--- /dev/null
+++ b/ApiLayers/ApiLibrary/extensions/select.py
@@ -0,0 +1,76 @@
+class SelectorsBase:
+ @classmethod
+ def add_confirmed_filter(cls, first_table, second_table) -> tuple:
+ return (
+ first_table.active == True,
+ first_table.is_confirmed == True,
+ first_table.deleted == False,
+ second_table.active == True,
+ second_table.is_confirmed == True,
+ second_table.deleted == False,
+ )
+
+
+class SelectActionWithEmployee:
+
+ @classmethod
+ def select_action(cls, employee_id, filter_expr: list = None):
+ if filter_expr is not None:
+ filter_expr = (cls.__many__table__.employee_id == employee_id, *filter_expr)
+ data = (
+ cls.session.query(cls.id)
+ .select_from(cls)
+ .join(cls.__many__table__, cls.__many__table__.member_id == cls.id)
+ .filter(
+ *filter_expr,
+ *SelectorsBase.add_confirmed_filter(
+ first_table=cls, second_table=cls.__many__table__
+ ),
+ )
+ )
+ return cls.query.filter(cls.id.in_([comp[0] for comp in data.all()]))
+ data = (
+ cls.session.query(cls.id)
+ .select_from(cls)
+ .join(cls.__many__table__, cls.__many__table__.member_id == cls.id)
+ .filter(
+ cls.__many__table__.employee_id == employee_id,
+ *SelectorsBase.add_confirmed_filter(
+ first_table=cls, second_table=cls.__many__table__
+ ),
+ )
+ )
+ return cls.query.filter(cls.id.in_([comp[0] for comp in data.all()]))
+
+
+class SelectAction:
+
+ @classmethod
+ def select_action(cls, duty_id_list: list, filter_expr: list = None):
+ if filter_expr is not None:
+ data = (
+ cls.session.query(cls.id)
+ .select_from(cls)
+ .join(cls.__many__table__, cls.__many__table__.member_id == cls.id)
+ .filter(
+ cls.__many__table__.duties_id.in_(duty_id_list),
+ *SelectorsBase.add_confirmed_filter(
+ first_table=cls, second_table=cls.__many__table__
+ ),
+ *filter_expr,
+ )
+ )
+ return cls.query.filter(cls.id.in_([comp[0] for comp in data.all()]))
+
+ data = (
+ cls.session.query(cls.id)
+ .select_from(cls)
+ .join(cls.__many__table__, cls.__many__table__.member_id == cls.id)
+ .filter(
+ cls.__many__table__.duties_id.in_(duty_id_list),
+ *SelectorsBase.add_confirmed_filter(
+ first_table=cls, second_table=cls.__many__table__
+ ),
+ )
+ )
+ return cls.query.filter(cls.id.in_([comp[0] for comp in data.all()]))
diff --git a/ApiLayers/ApiLibrary/token/password_module.py b/ApiLayers/ApiLibrary/token/password_module.py
new file mode 100644
index 0000000..c6367d3
--- /dev/null
+++ b/ApiLayers/ApiLibrary/token/password_module.py
@@ -0,0 +1,43 @@
+import hashlib
+import uuid
+import secrets
+import random
+
+from ApiLayers.AllConfigs.Token.config import Auth
+
+
+class PasswordModule:
+
+ @staticmethod
+ def generate_random_uu_id(str_std: bool = True):
+ return str(uuid.uuid4()) if str_std else uuid.uuid4()
+
+ @staticmethod
+ def generate_token(length=32) -> str:
+ letters = "abcdefghijklmnopqrstuvwxyz"
+ merged_letters = [letter for letter in letters] + [
+ letter.upper() for letter in letters
+ ]
+ token_generated = secrets.token_urlsafe(length)
+ for i in str(token_generated):
+ if i not in merged_letters:
+ token_generated = token_generated.replace(
+ i, random.choice(merged_letters), 1
+ )
+ return token_generated
+
+ @staticmethod
+ def generate_access_token() -> str:
+ return secrets.token_urlsafe(Auth.ACCESS_TOKEN_LENGTH)
+
+ @staticmethod
+ def generate_refresher_token() -> str:
+ return secrets.token_urlsafe(Auth.REFRESHER_TOKEN_LENGTH)
+
+ @staticmethod
+ def create_hashed_password(domain: str, id_: str, password: str) -> str:
+ return hashlib.sha256(f"{domain}:{id_}:{password}".encode("utf-8")).hexdigest()
+
+ @classmethod
+ def check_password(cls, domain, id_, password, password_hashed) -> bool:
+ return cls.create_hashed_password(domain, id_, password) == password_hashed
diff --git a/ApiLayers/ApiServices/Cluster/create_router.py b/ApiLayers/ApiServices/Cluster/create_router.py
new file mode 100644
index 0000000..2eaacd6
--- /dev/null
+++ b/ApiLayers/ApiServices/Cluster/create_router.py
@@ -0,0 +1,39 @@
+from fastapi import APIRouter
+import uuid
+from Events.Engine.abstract_class import CategoryCluster, MethodToEvent
+
+
+class CreateRouterFromCluster:
+
+ def __init__(self, **kwargs):
+ self.prefix = kwargs.get("prefix")
+ self.tags = kwargs.get("tags")
+ self.include_in_schema = bool(kwargs.get("include_in_schema", True))
+ self.router = APIRouter(
+ prefix=self.prefix, tags=self.tags, include_in_schema=self.include_in_schema
+ )
+
+
+class CreateEndpointFromCluster:
+
+ def __init__(self, **kwargs):
+ self.router: CategoryCluster = kwargs.get("router")
+ self.method_endpoint: MethodToEvent = kwargs.get("method_endpoint")
+ self.attach_router()
+
+ def attach_router(self):
+ method = getattr(self.router, self.method_endpoint.METHOD.lower())
+
+ # Create a unique operation ID based on the endpoint path, method, and a unique identifier
+ kwargs = {
+ "path": self.method_endpoint.URL,
+ "summary": self.method_endpoint.SUMMARY,
+ "description": self.method_endpoint.DESCRIPTION,
+ }
+ if (
+ hasattr(self.method_endpoint, "RESPONSE_MODEL")
+ and self.method_endpoint.RESPONSE_MODEL is not None
+ ):
+ kwargs["response_model"] = self.method_endpoint.RESPONSE_MODEL
+
+ method(**kwargs)(self.method_endpoint.endpoint_callable)
diff --git a/ApiLayers/ApiServices/Cluster/handle_cluster.py b/ApiLayers/ApiServices/Cluster/handle_cluster.py
new file mode 100644
index 0000000..865ea1b
--- /dev/null
+++ b/ApiLayers/ApiServices/Cluster/handle_cluster.py
@@ -0,0 +1,4 @@
+from Services.Redis import RedisActions, AccessToken
+from Services.Redis.Models.cluster import RedisList
+
+redis_list = RedisList(redis_key="test")
diff --git a/ApiLayers/ApiServices/Login/user_login_handler.py b/ApiLayers/ApiServices/Login/user_login_handler.py
new file mode 100644
index 0000000..aec97ce
--- /dev/null
+++ b/ApiLayers/ApiServices/Login/user_login_handler.py
@@ -0,0 +1,95 @@
+from ApiLayers.ApiValidations.Request.authentication import Login
+from ApiLayers.ApiLibrary.token.password_module import PasswordModule
+from ApiLayers.ApiLibrary.common.line_number import get_line_number_for_error
+from ApiLayers.ErrorHandlers import HTTPExceptionApi
+
+
+class UserLoginModule:
+
+ def __init__(self, request: "Request"):
+ self.request = request
+ self.user = None
+ self.access_object = None
+ self.access_token = None
+ self.refresh_token = None
+
+ @property
+ def as_dict(self) -> dict:
+ return {
+ "user": self.user,
+ "access_object": self.access_object,
+ "access_token": self.access_token,
+ "refresh_token": self.refresh_token,
+ }
+
+ @staticmethod
+ def check_user_exists(access_key: str):
+ from ApiLayers.Schemas import Users
+
+ """
+ Check if the user exists in the database.
+ """
+ db_session = Users.new_session() # Check if user exists.
+ if "@" in access_key:
+ found_user: Users = Users.filter_one(
+ Users.email == access_key.lower(), db=db_session
+ ).data
+ else:
+ found_user: Users = Users.filter_one(
+ Users.phone_number == access_key.replace(" ", ""), db=db_session
+ ).data
+ if not found_user:
+ raise HTTPExceptionApi(
+ error_code="HTTP_400_BAD_REQUEST",
+ lang="en",
+ loc=get_line_number_for_error(),
+ sys_msg="check_user_exists: User not found",
+ )
+ return found_user
+
+ def login_user_via_credentials(self, access_data: "Login") -> None:
+ from ApiLayers.ApiServices.Token.token_handler import TokenService
+ from ApiLayers.Schemas import Users
+
+ """
+ Login the user via the credentials.
+ """
+
+ # Get the actual data from the BaseRequestModel if needed
+ found_user: Users = self.check_user_exists(access_key=access_data.access_key)
+ if len(found_user.hash_password) < 5:
+ raise HTTPExceptionApi(
+ error_code="HTTP_400_BAD_REQUEST",
+ lang=found_user.lang,
+ loc=get_line_number_for_error(),
+ sys_msg="login_user_via_credentials: Invalid password create a password to user first",
+ )
+ # Check if the password is correct
+ if PasswordModule.check_password(
+ domain=access_data.domain,
+ id_=found_user.uu_id,
+ password=access_data.password,
+ password_hashed=found_user.hash_password,
+ ):
+ # Set the access token to the redis
+ token_response = TokenService.set_access_token_to_redis(
+ request=self.request,
+ user=found_user,
+ domain=access_data.domain,
+ remember=access_data.remember_me,
+ )
+ # Set the user and token information to the instance
+ self.user = found_user.get_dict()
+ self.access_token = token_response.get("access_token")
+ self.refresh_token = token_response.get("refresh_token")
+ self.access_object = {
+ "user_type": token_response.get("user_type", None),
+ "selection_list": token_response.get("selection_list", {}),
+ }
+ return None
+ raise HTTPExceptionApi(
+ error_code="HTTP_400_BAD_REQUEST",
+ lang="tr",
+ loc=get_line_number_for_error(),
+ sys_msg="login_user_via_credentials: raised an unknown error",
+ )
diff --git a/ApiLayers/ApiServices/Token/token_handler.py b/ApiLayers/ApiServices/Token/token_handler.py
new file mode 100644
index 0000000..9618401
--- /dev/null
+++ b/ApiLayers/ApiServices/Token/token_handler.py
@@ -0,0 +1,457 @@
+"""Token service for handling authentication tokens and user sessions."""
+
+from typing import List, Union, TypeVar, Dict, Any, TYPE_CHECKING
+
+import arrow
+
+from ApiLayers.AllConfigs.Token.config import Auth
+from ApiLayers.ApiLibrary.common.line_number import get_line_number_for_error
+from ApiLayers.ApiLibrary.date_time_actions.date_functions import DateTimeLocal
+from ApiLayers.ApiLibrary.token.password_module import PasswordModule
+from ApiLayers.ErrorHandlers import HTTPExceptionApi
+
+from ApiLayers.ApiValidations.Custom.token_objects import (
+ EmployeeTokenObject,
+ OccupantTokenObject,
+ UserType,
+ CompanyToken,
+ OccupantToken,
+)
+from ApiLayers.Schemas import (
+ Users,
+ BuildLivingSpace,
+ BuildParts,
+ Employees,
+ Addresses,
+ Companies,
+ Staff,
+ Duty,
+ Duties,
+ Departments,
+ OccupantTypes,
+)
+from Services.Redis.Models.response import RedisResponse
+from Services.Redis import RedisActions, AccessToken
+
+
+if TYPE_CHECKING:
+ from fastapi import Request
+
+
+T = TypeVar("T", EmployeeTokenObject, OccupantTokenObject)
+
+
+class TokenService:
+ """Service class for handling authentication tokens and user sessions."""
+
+ @classmethod
+ def _create_access_token(cls, access: bool = True) -> str:
+ """Generate a new access token."""
+ if not access:
+ return PasswordModule.generate_token(Auth.REFRESHER_TOKEN_LENGTH)
+ return PasswordModule.generate_token(Auth.ACCESS_TOKEN_LENGTH)
+
+ @classmethod
+ def _get_user_tokens(cls, user: Users) -> RedisResponse:
+ """Get all tokens for a user from Redis."""
+ return RedisActions.get_json(list_keys=[f"*:{str(user.uu_id)}"])
+
+ @classmethod
+ def do_employee_login(
+ cls, request: "Request", user: Users, domain: str
+ ) -> Dict[str, Any]:
+ """Handle employee login process and return login information."""
+ from ApiLayers.Schemas.identity.identity import UsersTokens, People
+
+ db_session = Employees.new_session()
+ list_employee = Employees.filter_all(
+ Employees.people_id == user.person_id, db=db_session
+ ).data
+
+ companies_uu_id_list: List[str] = []
+ companies_id_list: List[int] = []
+ companies_list: List[Dict[str, Any]] = []
+ duty_uu_id_list: List[str] = []
+ duty_id_list: List[int] = []
+
+ for employee in list_employee:
+ staff = Staff.filter_one(Staff.id == employee.staff_id, db=db_session).data
+ if duties := Duties.filter_one(
+ Duties.id == staff.duties_id, db=db_session
+ ).data:
+ if duty_found := Duty.filter_by_one(
+ id=duties.duties_id, db=db_session
+ ).data:
+ duty_uu_id_list.append(str(duty_found.uu_id))
+ duty_id_list.append(duty_found.id)
+
+ department = Departments.filter_one(
+ Departments.id == duties.department_id, db=db_session
+ ).data
+
+ if company := Companies.filter_one(
+ Companies.id == department.company_id, db=db_session
+ ).data:
+ companies_uu_id_list.append(str(company.uu_id))
+ companies_id_list.append(company.id)
+ company_address = Addresses.filter_by_one(
+ id=company.official_address_id, db=db_session
+ ).data
+ companies_list.append(
+ {
+ "uu_id": str(company.uu_id),
+ "public_name": company.public_name,
+ "company_type": company.company_type,
+ "company_address": company_address,
+ }
+ )
+ person = People.filter_one(People.id == user.person_id, db=db_session).data
+ model_value = EmployeeTokenObject(
+ domain=domain,
+ user_type=UserType.employee.value,
+ user_uu_id=str(user.uu_id),
+ credentials=user.credentials(),
+ user_id=user.id,
+ person_id=person.id,
+ person_uu_id=str(person.uu_id),
+ full_name=person.full_name,
+ request=dict(request.headers),
+ companies_uu_id_list=companies_uu_id_list,
+ companies_id_list=companies_id_list,
+ duty_uu_id_list=duty_uu_id_list,
+ duty_id_list=duty_id_list,
+ timezone=user.local_timezone or "GMT+0",
+ lang="tr",
+ ).model_dump()
+ if access_token := cls.set_object_to_redis(user, model_value):
+ return {
+ "access_token": access_token,
+ "user_type": UserType.employee.name,
+ "selection_list": companies_list,
+ }
+ raise HTTPExceptionApi(
+ error_code="",
+ lang="en",
+ loc=get_line_number_for_error(),
+ sys_msg="Creating Token failed...",
+ )
+
+ @classmethod
+ def do_occupant_login(
+ cls, request: "Request", user: Users, domain: str
+ ) -> Dict[str, Any]:
+ """Handle occupant login process and return login information."""
+ db_session = BuildLivingSpace.new_session()
+ living_spaces: list[BuildLivingSpace] = BuildLivingSpace.filter_all(
+ BuildLivingSpace.person_id == user.person_id, db=db_session
+ ).data
+ if not living_spaces:
+ raise HTTPExceptionApi(
+ error_code="",
+ lang="en",
+ loc=get_line_number_for_error(),
+ sys_msg="User does not have any living space",
+ )
+
+ occupants_selection_dict: Dict[str, Any] = {}
+ for living_space in living_spaces:
+ build_parts_selection = BuildParts.filter_all(
+ BuildParts.id == living_space.build_parts_id,
+ db=db_session,
+ ).data
+ if not build_parts_selection:
+ raise HTTPExceptionApi(
+ error_code="",
+ lang="en",
+ loc=get_line_number_for_error(),
+ sys_msg="User does not have any living space",
+ )
+
+ build_part = build_parts_selection.get(1)
+ build = build_part.buildings
+ occupant_type = OccupantTypes.filter_by_one(
+ id=living_space.occupant_type,
+ db=db_session,
+ system=True,
+ ).data
+
+ occupant_data = {
+ "part_uu_id": str(build_part.uu_id),
+ "part_name": build_part.part_name,
+ "part_level": build_part.part_level,
+ "uu_id": str(occupant_type.uu_id),
+ "description": occupant_type.occupant_description,
+ "code": occupant_type.occupant_code,
+ }
+
+ build_key = str(build.uu_id)
+ if build_key not in occupants_selection_dict:
+ occupants_selection_dict[build_key] = {
+ "build_uu_id": build_key,
+ "build_name": build.build_name,
+ "build_no": build.build_no,
+ "occupants": [occupant_data],
+ }
+ else:
+ occupants_selection_dict[build_key]["occupants"].append(occupant_data)
+
+ person = user.person
+ model_value = OccupantTokenObject(
+ domain=domain,
+ user_type=UserType.occupant.value,
+ user_uu_id=str(user.uu_id),
+ credentials=user.credentials(),
+ user_id=user.id,
+ person_id=person.id,
+ person_uu_id=str(person.uu_id),
+ full_name=person.full_name,
+ request=dict(request.headers),
+ available_occupants=occupants_selection_dict,
+ timezone=user.local_timezone or "GMT+0",
+ lang="tr",
+ ).model_dump()
+ if access_token := cls.set_object_to_redis(user, model_value):
+ return {
+ "access_token": access_token,
+ "user_type": UserType.occupant.name,
+ "available_occupants": occupants_selection_dict,
+ }
+ raise HTTPExceptionApi(
+ error_code="",
+ lang="en",
+ loc=get_line_number_for_error(),
+ sys_msg="Creating Token failed...",
+ )
+
+ @classmethod
+ def set_object_to_redis(cls, user, model: Dict):
+ access_object = AccessToken(
+ userUUID=user.uu_id,
+ accessToken=cls._create_access_token(),
+ )
+ cls.remove_token_with_domain(user=user, domain=model.get("domain"))
+ redis_action = RedisActions.set_json(
+ list_keys=access_object.to_list(),
+ value=model,
+ expires={"seconds": int(Auth.TOKEN_EXPIRE_MINUTES_30.seconds)},
+ )
+ if redis_action.status:
+ return access_object.accessToken
+ raise HTTPExceptionApi(
+ error_code="",
+ lang="en",
+ loc=get_line_number_for_error(),
+ sys_msg="Saving Token failed...",
+ )
+
+ @classmethod
+ def update_object_to_redis(cls, access_token: str, user_uu_id: str, model: Dict):
+ access_object = AccessToken(
+ userUUID=user_uu_id,
+ accessToken=access_token,
+ )
+ redis_action = RedisActions.set_json(
+ list_keys=access_object.to_list(),
+ value=model,
+ expires={"seconds": int(Auth.TOKEN_EXPIRE_MINUTES_30.seconds)},
+ )
+ if redis_action.status:
+ return access_object.accessToken
+ raise HTTPExceptionApi(
+ error_code="",
+ lang="en",
+ loc=get_line_number_for_error(),
+ sys_msg="Saving Token failed...",
+ )
+
+ @classmethod
+ def remove_token_with_domain(cls, user: Users, domain: str) -> None:
+ """Remove all tokens for a user with specific domain."""
+ redis_rows = cls._get_user_tokens(user)
+ for redis_row in redis_rows.all:
+ if redis_row.row.get("domain") == domain:
+ RedisActions.delete([redis_row.key])
+
+ @classmethod
+ def remove_all_token(cls, user: Users) -> None:
+ """Remove all tokens for a user."""
+ redis_rows = cls._get_user_tokens(user)
+ RedisActions.delete([redis_row.key for redis_row in redis_rows.all])
+
+ @classmethod
+ def set_access_token_to_redis(
+ cls,
+ request: "Request",
+ user: Users,
+ domain: str,
+ remember: bool,
+ ) -> Dict[str, Any]:
+ """Set access token to redis and handle user session."""
+ from ApiLayers.AllConfigs.Token.config import Auth
+ from ApiLayers.Schemas.identity.identity import UsersTokens, People
+
+ cls.remove_token_with_domain(user=user, domain=domain)
+ # Users.client_arrow = DateTimeLocal(is_client=True, timezone=user.local_timezone)
+ login_dict, db_session = {}, UsersTokens.new_session()
+ if user.is_occupant: # Handle login based on user type
+ login_dict = cls.do_occupant_login(
+ request=request, user=user, domain=domain
+ )
+ elif user.is_employee:
+ login_dict = cls.do_employee_login(
+ request=request, user=user, domain=domain
+ )
+
+ # Handle remember me functionality
+ user.remember_me = bool(remember)
+ if remember:
+ users_token_created = cls._create_access_token(access=False)
+ login_dict["refresh_token"] = users_token_created
+ users_token = UsersTokens.find_or_create(
+ db=db_session,
+ user_id=user.id,
+ token_type="RememberMe",
+ domain=domain,
+ )
+ if users_token.meta_data.created:
+ users_token.token = users_token_created
+ users_token.save(db=db_session)
+ else:
+ if arrow.now() > arrow.get(
+ str(users_token.expires_at)
+ ): # Check if token is expired
+ users_token.token = users_token_created
+ users_token.expires_at = str(
+ arrow.now().datetime + Auth.TOKEN_EXPIRE_DAY_1
+ )
+ users_token.save(db=db_session)
+ else:
+ login_dict["refresh_token"] = users_token.token
+ else:
+ already_refresher = UsersTokens.filter_all(
+ UsersTokens.user_id == user.id,
+ UsersTokens.token_type == "RememberMe",
+ UsersTokens.domain == domain,
+ db=db_session,
+ )
+ if already_refresher.count:
+ already_refresher.query.delete(synchronize_session=False)
+ user.save(db=db_session)
+ return {**login_dict, "user": user.get_dict()}
+
+ @classmethod
+ def update_token_at_redis(
+ cls, request: "Request", add_payload: Union[CompanyToken, OccupantToken]
+ ) -> Dict[str, Any]:
+ """Update token at Redis."""
+ access_token = cls.get_access_token_from_request(request=request)
+ token_object = cls.get_object_via_access_key(access_token=access_token)
+ if isinstance(token_object, EmployeeTokenObject) and isinstance(
+ add_payload, CompanyToken
+ ):
+ token_object.selected_company = add_payload
+ cls.update_object_to_redis(
+ access_token=access_token,
+ user_uu_id=token_object.user_uu_id,
+ model=token_object.model_dump(),
+ )
+ return token_object.selected_company.model_dump()
+ elif isinstance(token_object, OccupantTokenObject) and isinstance(
+ add_payload, OccupantToken
+ ):
+ token_object.selected_occupant = add_payload
+ cls.update_object_to_redis(
+ access_token=access_token,
+ user_uu_id=token_object.user_uu_id,
+ model=token_object.model_dump(),
+ )
+ return token_object.selected_occupant.model_dump()
+ raise HTTPExceptionApi(
+ error_code="",
+ lang="en",
+ loc=get_line_number_for_error(),
+ sys_msg="Token not found",
+ )
+
+ @classmethod
+ def raise_error_if_request_has_no_token(cls, request: "Request") -> None:
+ """Validate request has required token headers."""
+ if not hasattr(request, "headers"):
+ raise HTTPExceptionApi(
+ error_code="",
+ lang="en",
+ loc=get_line_number_for_error(),
+ sys_msg="Request has no headers",
+ )
+ if not request.headers.get(Auth.ACCESS_TOKEN_TAG):
+ raise HTTPExceptionApi(
+ error_code="",
+ lang="en",
+ loc=get_line_number_for_error(),
+ sys_msg="Request has no access token presented",
+ )
+
+ @classmethod
+ def access_token_is_valid(cls, request: "Request") -> bool:
+ """Check if access token in request is valid."""
+ access_token = cls.get_access_token_from_request(request=request)
+ return RedisActions.get_json(
+ list_keys=AccessToken(accessToken=access_token).to_list()
+ ).status
+
+ @classmethod
+ def get_access_token_from_request(cls, request: "Request") -> str:
+ """Extract access token from request headers."""
+ cls.raise_error_if_request_has_no_token(request=request)
+ return request.headers.get(Auth.ACCESS_TOKEN_TAG)
+
+ @classmethod
+ def _process_redis_object(cls, redis_object: Dict[str, Any]) -> T:
+ """Process Redis object and return appropriate token object."""
+ if not redis_object.get("selected_company"):
+ redis_object["selected_company"] = None
+ if not redis_object.get("selected_occupant"):
+ redis_object["selected_occupant"] = None
+ if redis_object.get("user_type") == UserType.employee.value:
+ return EmployeeTokenObject(**redis_object)
+ elif redis_object.get("user_type") == UserType.occupant.value:
+ return OccupantTokenObject(**redis_object)
+ raise HTTPExceptionApi(
+ error_code="",
+ lang="en",
+ loc=get_line_number_for_error(),
+ sys_msg="Unknown user type",
+ )
+
+ @classmethod
+ def get_object_via_access_key(cls, access_token: str) -> T:
+ """Get token object using access key."""
+ access_token_obj = AccessToken(accessToken=access_token)
+ redis_response = RedisActions.get_json(list_keys=access_token_obj.to_list())
+ if not redis_response.status:
+ raise HTTPExceptionApi(
+ error_code="",
+ lang="en",
+ loc=get_line_number_for_error(),
+ sys_msg="Access token token is not found or unable to retrieve",
+ )
+ if redis_object := redis_response.first:
+ access_token_obj.userUUID = redis_object.get("user_uu_id")
+ return cls._process_redis_object(redis_object)
+
+ @classmethod
+ def get_object_via_user_uu_id(cls, user_id: str) -> T:
+ """Get token object using user UUID."""
+ access_token = AccessToken(userUUID=user_id)
+ redis_response = RedisActions.get_json(list_keys=access_token.to_list())
+
+ if redis_object := redis_response.first.row:
+ access_token.userUUID = redis_object.get("user_uu_id")
+ return cls._process_redis_object(redis_object)
+
+ raise HTTPExceptionApi(
+ error_code="",
+ lang="en",
+ loc=get_line_number_for_error(),
+ sys_msg="Invalid access token",
+ )
diff --git a/ApiLayers/ApiServices/__init__.py b/ApiLayers/ApiServices/__init__.py
new file mode 100644
index 0000000..7dec7ae
--- /dev/null
+++ b/ApiLayers/ApiServices/__init__.py
@@ -0,0 +1,5 @@
+from ApiLayers.ApiServices.Token.token_handler import TokenService
+
+__all__ = [
+ "TokenService",
+]
diff --git a/ApiLayers/ApiValidations/Custom/__init__.py b/ApiLayers/ApiValidations/Custom/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/ApiLayers/ApiValidations/Custom/token_objects.py b/ApiLayers/ApiValidations/Custom/token_objects.py
new file mode 100644
index 0000000..3bf3d40
--- /dev/null
+++ b/ApiLayers/ApiValidations/Custom/token_objects.py
@@ -0,0 +1,119 @@
+import enum
+from typing import Optional, List, Any
+from pydantic import BaseModel
+
+
+# Company / Priority / Department / Duty / Employee / Occupant / Module / Endpoint are changeable dynamics
+
+
+class UserType(enum.Enum):
+
+ employee = 1
+ occupant = 2
+
+
+class Credentials(BaseModel):
+
+ person_id: int
+ person_name: str
+
+
+class ApplicationToken(BaseModel):
+ # Application Token Object -> is the main object for the user
+
+ domain: Optional[str] = "app.evyos.com.tr"
+ lang: Optional[str] = "TR"
+ timezone: Optional[str] = "GMT+3"
+
+ user_type: int = UserType.occupant.value
+ credentials: dict = None
+
+ user_uu_id: str
+ user_id: int
+
+ person_id: int
+ person_uu_id: str
+ full_name: Optional[str] = None
+
+ request: Optional[dict] = None # Request Info of Client
+ expires_at: Optional[float] = None # Expiry timestamp
+
+
+class OccupantToken(BaseModel):
+ # Selection of the occupant type for a build part is made by the user
+
+ living_space_id: int # Internal use
+ living_space_uu_id: str # Outer use
+
+ occupant_type_id: int
+ occupant_type_uu_id: str
+ occupant_type: str
+
+ build_id: int
+ build_uuid: str
+ build_part_id: int
+ build_part_uuid: str
+
+ responsible_company_id: Optional[int] = None
+ responsible_company_uuid: Optional[str] = None
+ responsible_employee_id: Optional[int] = None
+ responsible_employee_uuid: Optional[str] = None
+
+ reachable_event_codes: Optional[list[str]] = None # ID list of reachable modules
+
+
+class CompanyToken(BaseModel):
+ # Selection of the company for an employee is made by the user
+ company_id: int
+ company_uu_id: str
+
+ department_id: int # ID list of departments
+ department_uu_id: str # ID list of departments
+
+ duty_id: int
+ duty_uu_id: str
+
+ staff_id: int
+ staff_uu_id: str
+
+ employee_id: int
+ employee_uu_id: str
+
+ bulk_duties_id: int
+
+ reachable_event_codes: Optional[list[str]] = None # ID list of reachable modules
+
+
+class OccupantTokenObject(ApplicationToken):
+ # Occupant Token Object -> Requires selection of the occupant type for a specific build part
+
+ available_occupants: dict = None
+ selected_occupant: Optional[OccupantToken] = None # Selected Occupant Type
+
+ @property
+ def is_employee(self) -> bool:
+ return False
+
+ @property
+ def is_occupant(self) -> bool:
+ return True
+
+
+class EmployeeTokenObject(ApplicationToken):
+ # Full hierarchy Employee[staff_id] -> Staff -> Duty -> Department -> Company
+
+ companies_id_list: List[int] # List of company objects
+ companies_uu_id_list: List[str] # List of company objects
+
+ duty_id_list: List[int] # List of duty objects
+ duty_uu_id_list: List[str] # List of duty objects
+
+ selected_company: Optional[CompanyToken] = None # Selected Company Object
+
+ @property
+ def is_employee(self) -> bool:
+ return True
+
+ @property
+ def is_occupant(self) -> bool:
+ return False
diff --git a/ApiLayers/ApiValidations/Custom/validation_response.py b/ApiLayers/ApiValidations/Custom/validation_response.py
new file mode 100644
index 0000000..82e6109
--- /dev/null
+++ b/ApiLayers/ApiValidations/Custom/validation_response.py
@@ -0,0 +1,83 @@
+import json
+from typing import Any, ClassVar, TypeVar, Dict, Tuple, List
+from pydantic import BaseModel
+
+from ErrorHandlers import HTTPExceptionApi
+from ApiLibrary.common.line_number import get_line_number_for_error
+from ApiValidations.Request.base_validations import CrudRecords, PydanticBaseModel
+
+
+class ValidationParser:
+
+ def __init__(self, active_validation: BaseModel):
+ self.core_validation = active_validation
+ self.annotations = active_validation.model_json_schema()
+ self.annotations = json.loads(json.dumps(self.annotations))
+ self.schema = {}
+ self.parse()
+
+ def parse(self):
+ from ApiValidations.Request.base_validations import (
+ CrudRecords,
+ PydanticBaseModel,
+ )
+
+ properties = dict(self.annotations.get("properties")).items()
+ total_class_annotations = {
+ **self.core_validation.__annotations__,
+ **PydanticBaseModel.__annotations__,
+ **CrudRecords.__annotations__,
+ }
+ for key, value in properties:
+ default, required, possible_types = (
+ dict(value).get("default", None),
+ True,
+ [],
+ )
+ if dict(value).get("anyOf", None):
+ for _ in dict(value).get("anyOf") or []:
+ type_opt = json.loads(json.dumps(_))
+ if not type_opt.get("type") == "null":
+ possible_types.append(type_opt.get("type"))
+ field_type = possible_types[0]
+ required = False
+ else:
+ field_type = dict(value).get("type", "string")
+ attribute_of_class = total_class_annotations.get(key, None)
+ aoc = str(attribute_of_class) if attribute_of_class else None
+ if attribute_of_class:
+ if aoc in ("", "typing.Optional[str]"):
+ field_type, required = "string", aoc == ""
+ elif aoc in ("", "typing.Optional[int]"):
+ field_type, required = "integer", aoc == ""
+ elif aoc in ("", "typing.Optional[bool]"):
+ field_type, required = "boolean", aoc == ""
+ elif aoc in ("", "typing.Optional[float]"):
+ field_type, required = "float", aoc == ""
+ elif aoc in (
+ "",
+ "typing.Optional[datetime.datetime]",
+ ):
+ field_type, required = (
+ "datetime",
+ aoc == "",
+ )
+ self.schema[key] = {
+ "type": field_type,
+ "required": required,
+ "default": default,
+ }
+
+
+class ValidationModel:
+
+ def __init__(self, response_model: BaseModel, language_model, language_models):
+ self.response_model = response_model
+ self.validation = None
+ self.headers = language_model
+ self.language_models = language_models
+ self.get_validation()
+
+ def get_validation(self) -> Tuple:
+ self.headers = self.language_models
+ self.validation = ValidationParser(self.response_model).schema
diff --git a/ApiLayers/ApiValidations/Custom/wrapper_contexts.py b/ApiLayers/ApiValidations/Custom/wrapper_contexts.py
new file mode 100644
index 0000000..f453931
--- /dev/null
+++ b/ApiLayers/ApiValidations/Custom/wrapper_contexts.py
@@ -0,0 +1,28 @@
+from typing import Optional, Any
+from pydantic import BaseModel
+
+
+class DefaultContext(BaseModel): ...
+
+
+class EventContext(DefaultContext):
+
+ auth: Any
+ code: str
+ url: str
+ request: Optional[Any] = None
+
+ @property
+ def base(self) -> dict[str, Any]:
+ return {"url": self.url, "code": self.code}
+
+
+class AuthContext(DefaultContext):
+
+ auth: Any
+ url: str
+ request: Optional[Any] = None
+
+ @property
+ def base(self) -> dict[str, Any]:
+ return {"url": self.url}
diff --git a/ApiLayers/ApiValidations/Queries/__init__.py b/ApiLayers/ApiValidations/Queries/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/ApiLayers/ApiValidations/Queries/account.py b/ApiLayers/ApiValidations/Queries/account.py
new file mode 100644
index 0000000..e69de29
diff --git a/ApiLayers/ApiValidations/Queries/address.py b/ApiLayers/ApiValidations/Queries/address.py
new file mode 100644
index 0000000..e69de29
diff --git a/ApiLayers/ApiValidations/Request/__init__.py b/ApiLayers/ApiValidations/Request/__init__.py
new file mode 100644
index 0000000..d312c43
--- /dev/null
+++ b/ApiLayers/ApiValidations/Request/__init__.py
@@ -0,0 +1,239 @@
+from .base_validations import (
+ BaseModelRegular,
+ PydanticBaseModel,
+ ListOptions,
+ CrudRecords,
+)
+from .address import (
+ InsertAddress,
+ UpdateAddress,
+ UpdatePostCode,
+ InsertPostCode,
+ SearchAddress,
+)
+from .application import (
+ SingleEnumUUID,
+ SingleEnumClassKey,
+ SingleEnumOnlyClass,
+ SingleOccupantTypeUUID,
+ SingleOccupantTypeClassKey,
+)
+from .area import (
+ InsertBuildArea,
+ InsertBuildSites,
+ UpdateBuildArea,
+ UpdateBuildSites,
+)
+from .authentication import (
+ Login,
+ Logout,
+ ChangePassword,
+ Remember,
+ Forgot,
+ CreatePassword,
+ OccupantSelection,
+ EmployeeSelection,
+)
+from .account_records import (
+ InsertAccountRecord,
+ UpdateAccountRecord,
+)
+
+from .build_living_space import (
+ InsertBuildLivingSpace,
+ UpdateBuildLivingSpace,
+)
+from .build_part import (
+ InsertBuildParts,
+ InsertBuildTypes,
+ UpdateBuildParts,
+ UpdateBuildTypes,
+)
+from .building import (
+ InsertBuild,
+ UpdateBuild,
+)
+from .company import (
+ MatchCompany2Company,
+ InsertCompany,
+ UpdateCompany,
+)
+from .decision_book import (
+ DecisionBookDecisionBookInvitations,
+ DecisionBookDecisionBookInvitationsUpdate,
+ DecisionBookDecisionBookInvitationsAttend,
+ DecisionBookDecisionBookInvitationsAssign,
+ UpdateDecisionBook,
+ UpdateBuildDecisionBookItems,
+ UpdateBuildDecisionBookItemDebits,
+ InsertBuildDecisionBookItems,
+ InsertBuildDecisionBookItemDebits,
+ InsertDecisionBookCompleted,
+ InsertDecisionBook,
+ InsertDecisionBookPerson,
+ ListDecisionBook,
+ RemoveDecisionBookPerson,
+)
+from .departments import (
+ DepartmentsPydantic,
+)
+from .employee import (
+ InsertDuties,
+ UpdateDuties,
+ InsertEmployees,
+ SelectDuties,
+ UnBindEmployees2People,
+ BindEmployees2People,
+ UpdateCompanyEmployees,
+ InsertCompanyEmployees,
+ InsertCompanyEmployeesSalaries,
+ InsertCompanyDuty,
+ UpdateCompanyEmployeesSalaries,
+ UpdateCompanyDuty,
+)
+from .events import (
+ # CreateEvents,
+ RegisterEvents2Employee,
+ RegisterEvents2Occupant,
+)
+from .people import (
+ UpdatePerson,
+ InsertPerson,
+)
+from .project_decision_book import (
+ InsertBuildDecisionBookProjectItemDebits,
+ UpdateBuildDecisionBookProjectItemDebits,
+ InsertBuildDecisionBookProjects,
+ UpdateBuildDecisionBookProjects,
+ InsertBuildDecisionBookProjectPerson,
+ UpdateBuildDecisionBookProjectPerson,
+ InsertBuildDecisionBookProjectItems,
+ UpdateBuildDecisionBookProjectItems,
+ ApprovalsBuildDecisionBookProjects,
+)
+from .rules import (
+ UpdateEndpointAccess,
+ UpdateEndpointAccessList,
+ InsertEndpointAccess,
+ CheckEndpointAccess,
+)
+from .services import (
+ RegisterServices2Employee,
+ RegisterServices2Occupant,
+)
+from .staff import (
+ InsertStaff,
+ SelectStaff,
+)
+from .user import (
+ InsertUsers,
+ UpdateUsers,
+ QueryUsers,
+ # ActiveUsers,
+ # ListUsers,
+ # DeleteUsers,
+)
+from .modules import (
+ RegisterModules2Occupant,
+ RegisterModules2Employee,
+)
+
+
+__all__ = [
+ "BaseModelRegular",
+ "PydanticBaseModel",
+ "ListOptions",
+ "CrudRecords",
+ "ListOptions",
+ "CrudRecords",
+ "PydanticBaseModel",
+ "BaseModelRegular",
+ "InsertAddress",
+ "UpdateAddress",
+ "UpdatePostCode",
+ "InsertPostCode",
+ "SearchAddress",
+ "SingleEnumUUID",
+ "SingleEnumClassKey",
+ "SingleEnumOnlyClass",
+ "SingleOccupantTypeUUID",
+ "SingleOccupantTypeClassKey",
+ "InsertBuildArea",
+ "InsertBuildSites",
+ "UpdateBuildArea",
+ "UpdateBuildSites",
+ "Login",
+ "Logout",
+ "ChangePassword",
+ "Remember",
+ "Forgot",
+ "CreatePassword",
+ "OccupantSelection",
+ "EmployeeSelection",
+ "InsertAccountRecord",
+ "UpdateAccountRecord",
+ "InsertBuildLivingSpace",
+ "UpdateBuildLivingSpace",
+ "InsertBuildParts",
+ "InsertBuildTypes",
+ "UpdateBuildParts",
+ "UpdateBuildTypes",
+ "InsertBuild",
+ "UpdateBuild",
+ "MatchCompany2Company",
+ "InsertCompany",
+ "UpdateCompany",
+ "DecisionBookDecisionBookInvitations",
+ "DecisionBookDecisionBookInvitationsUpdate",
+ "DecisionBookDecisionBookInvitationsAttend",
+ "DecisionBookDecisionBookInvitationsAssign",
+ "UpdateDecisionBook",
+ "UpdateBuildDecisionBookItems",
+ "UpdateBuildDecisionBookItemDebits",
+ "InsertBuildDecisionBookItems",
+ "InsertBuildDecisionBookItemDebits",
+ "InsertDecisionBookCompleted",
+ "InsertDecisionBook",
+ "InsertDecisionBookPerson",
+ "ListDecisionBook",
+ "RemoveDecisionBookPerson",
+ "DepartmentsPydantic",
+ "InsertDuties",
+ "UpdateDuties",
+ "InsertEmployees",
+ "SelectDuties",
+ "UnBindEmployees2People",
+ "BindEmployees2People",
+ "UpdateCompanyEmployees",
+ "InsertCompanyEmployees",
+ "InsertCompanyEmployeesSalaries",
+ "InsertCompanyDuty",
+ "UpdateCompanyEmployeesSalaries",
+ "UpdateCompanyDuty",
+ "RegisterEvents2Employee",
+ "RegisterEvents2Occupant",
+ "UpdatePerson",
+ "InsertPerson",
+ "InsertBuildDecisionBookProjectItems",
+ "UpdateBuildDecisionBookProjectItems",
+ "ApprovalsBuildDecisionBookProjects",
+ "InsertBuildDecisionBookProjectItemDebits",
+ "UpdateBuildDecisionBookProjectItemDebits",
+ "InsertBuildDecisionBookProjects",
+ "UpdateBuildDecisionBookProjects",
+ "InsertBuildDecisionBookProjectPerson",
+ "UpdateBuildDecisionBookProjectPerson",
+ "UpdateEndpointAccess",
+ "UpdateEndpointAccessList",
+ "InsertEndpointAccess",
+ "CheckEndpointAccess",
+ "RegisterServices2Employee",
+ "RegisterServices2Occupant",
+ "InsertStaff",
+ "SelectStaff",
+ "InsertUsers",
+ "UpdateUsers",
+ "QueryUsers",
+ "RegisterModules2Occupant",
+ "RegisterModules2Employee",
+]
diff --git a/ApiLayers/ApiValidations/Request/account_records.py b/ApiLayers/ApiValidations/Request/account_records.py
new file mode 100644
index 0000000..cdbf515
--- /dev/null
+++ b/ApiLayers/ApiValidations/Request/account_records.py
@@ -0,0 +1,159 @@
+from ApiLayers.ApiValidations.Request import BaseModelRegular, PydanticBaseModel
+from typing import Optional
+
+
+class AccountValidation:
+ tr = {
+ "iban": "IBAN Numarası",
+ "bank_date": "Bank Tarih",
+ "currency_value": "Para Değeri",
+ "bank_balance": "Banka Bakiye",
+ "currency": "Para Birimi",
+ "additional_balance": "Ek Bakiye",
+ "channel_branch": "Kanal Şubesi",
+ "process_name": "İşlem Adı",
+ "process_type": "İşlem Tipi",
+ "process_comment": "İşlem Yorum",
+ "bank_reference_code": "Banka Referans Kodu",
+ "add_comment_note": "Yorum Not",
+ "is_receipt_mail_send": "Fiş Mail Gönderildi",
+ "found_from": "Bulunduğu Yer",
+ "similarity": "Benzerlik",
+ "remainder_balance": "Kalan Bakiye",
+ "bank_date_y": "Bank Tarih Yıl",
+ "bank_date_m": "Bank Tarih Ay",
+ "bank_date_w": "Bank Tarih Hafta",
+ "bank_date_d": "Bank Tarih Gün",
+ "approving_accounting_record": "Onaylayan Muhasebe Kaydı",
+ "accounting_receipt_date": "Muhasebe Fiş Tarihi",
+ "accounting_receipt_number": "Muhasebe Fiş Numarası",
+ "approved_record": "Onaylanmış Kayıt",
+ "import_file_name": "İçe Aktarım Dosya Adı",
+ "receive_debit_uu_id": "Alacak UUID",
+ "budget_type_uu_id": "Bütçe Tipi UUID",
+ "company_uu_id": "Şirket UUID",
+ "send_company_uu_id": "Gönderen Şirket UUID",
+ "customer_id": "Müşteri ID",
+ "customer_uu_id": "Müşteri UUID",
+ "send_person_uu_id": "Gönderen Kişi UUID",
+ "approving_accounting_person_uu_id": "Onaylayan Muhasebe Kişi UUID",
+ "build_parts_uu_id": "Daire UUID",
+ "build_decision_book_uu_id": "Karar Defteri UUID",
+ }
+ en = {
+ "iban": "IBAN Number",
+ "bank_date": "Bank Date",
+ "currency_value": "Currency Value",
+ "bank_balance": "Bank Balance",
+ "currency": "Currency",
+ "additional_balance": "Additional Balance",
+ "channel_branch": "Channel Branch",
+ "process_name": "Process Name",
+ "process_type": "Process Type",
+ "process_comment": "Process Comment",
+ "bank_reference_code": "Bank Reference Code",
+ "add_comment_note": "Comment Note",
+ "is_receipt_mail_send": "Receipt Mail Send",
+ "found_from": "Found From",
+ "similarity": "Similarity",
+ "remainder_balance": "Remainder Balance",
+ "bank_date_y": "Bank Date Year",
+ "bank_date_m": "Bank Date Month",
+ "bank_date_w": "Bank Date Week",
+ "bank_date_d": "Bank Date Day",
+ "approving_accounting_record": "Approving Accounting Record",
+ "accounting_receipt_date": "Accounting Receipt Date",
+ "accounting_receipt_number": "Accounting Receipt Number",
+ "approved_record": "Approved Record",
+ "import_file_name": "Import File Name",
+ "receive_debit_uu_id": "Receive Debit UUID",
+ "budget_type_uu_id": "Budget Type UUID",
+ "company_uu_id": "Company UUID",
+ "send_company_uu_id": "Send Company UUID",
+ "customer_id": "Customer ID",
+ "customer_uu_id": "Customer UUID",
+ "send_person_uu_id": "Send Person UUID",
+ "approving_accounting_person_uu_id": "Approving Accounting Person UUID",
+ "build_parts_uu_id": "Build Parts UUID",
+ "build_decision_book_uu_id": "Build Decision Book UUID",
+ }
+
+
+class InsertAccountRecord(BaseModelRegular, AccountValidation):
+
+ iban: str
+ bank_date: str
+ currency_value: float
+ bank_balance: float
+ currency: str
+ additional_balance: float
+ channel_branch: str
+ process_name: str
+ process_type: str
+ process_comment: str
+ bank_reference_code: str
+
+ add_comment_note: Optional[str] = None
+ is_receipt_mail_send: Optional[bool] = None
+ found_from: Optional[str] = None
+ similarity: Optional[float] = None
+ remainder_balance: Optional[float] = None
+ bank_date_y: Optional[int] = None
+ bank_date_m: Optional[int] = None
+ bank_date_w: Optional[int] = None
+ bank_date_d: Optional[int] = None
+ approving_accounting_record: Optional[bool] = None
+ accounting_receipt_date: Optional[str] = None
+ accounting_receipt_number: Optional[int] = None
+ approved_record: Optional[bool] = None
+ import_file_name: Optional[str] = None
+ # receive_debit_uu_id: Optional[str] = None
+ budget_type_uu_id: Optional[str] = None
+ company_uu_id: Optional[str] = None
+ send_company_uu_id: Optional[str] = None
+ customer_id: Optional[str] = None
+ customer_uu_id: Optional[str] = None
+ send_person_uu_id: Optional[str] = None
+ approving_accounting_person_uu_id: Optional[str] = None
+ build_parts_uu_id: Optional[str] = None
+ build_decision_book_uu_id: Optional[str] = None
+
+
+class UpdateAccountRecord(PydanticBaseModel, AccountValidation):
+
+ iban: Optional[str] = None
+ bank_date: Optional[str] = None
+ currency_value: Optional[float] = None
+ bank_balance: Optional[float] = None
+ currency: Optional[str] = None
+ additional_balance: Optional[float] = None
+ channel_branch: Optional[str] = None
+ process_name: Optional[str] = None
+ process_type: Optional[str] = None
+ process_comment: Optional[str] = None
+ bank_reference_code: Optional[str] = None
+
+ add_comment_note: Optional[str] = None
+ is_receipt_mail_send: Optional[bool] = None
+ found_from: Optional[str] = None
+ similarity: Optional[float] = None
+ remainder_balance: Optional[float] = None
+ bank_date_y: Optional[int] = None
+ bank_date_m: Optional[int] = None
+ bank_date_w: Optional[int] = None
+ bank_date_d: Optional[int] = None
+ approving_accounting_record: Optional[bool] = None
+ accounting_receipt_date: Optional[str] = None
+ accounting_receipt_number: Optional[int] = None
+ approved_record: Optional[bool] = None
+ import_file_name: Optional[str] = None
+ receive_debit_uu_id: Optional[str] = None
+ budget_type_uu_id: Optional[str] = None
+ company_uu_id: Optional[str] = None
+ send_company_uu_id: Optional[str] = None
+ customer_id: Optional[str] = None
+ customer_uu_id: Optional[str] = None
+ send_person_uu_id: Optional[str] = None
+ approving_accounting_person_uu_id: Optional[str] = None
+ build_parts_uu_id: Optional[str] = None
+ build_decision_book_uu_id: Optional[str] = None
diff --git a/ApiLayers/ApiValidations/Request/address.py b/ApiLayers/ApiValidations/Request/address.py
new file mode 100644
index 0000000..5482b5e
--- /dev/null
+++ b/ApiLayers/ApiValidations/Request/address.py
@@ -0,0 +1,128 @@
+from typing import Optional
+
+from ApiLayers.ApiValidations.Request import PydanticBaseModel, ListOptions
+from ApiLayers.ApiValidations.handler import BaseModelRegular
+
+
+class PostCodeValidation:
+ tr = {
+ "post_code": "Posta Kodu",
+ "street_uu_id": "Sokak UUID",
+ }
+ en = {
+ "post_code": "Post Code",
+ "street_uu_id": "Street UUID",
+ }
+
+
+class InsertPostCode(BaseModelRegular, PostCodeValidation):
+ street_uu_id: str
+ post_code: str
+
+
+class UpdatePostCode(PydanticBaseModel, PostCodeValidation):
+ street_uu_id: Optional[str] = None
+ post_code: Optional[str] = None
+
+
+class SearchAddressValidation:
+ tr = {
+ "search": "Ara",
+ "list_options": "Liste Seçenekleri",
+ }
+ en = {
+ "search": "Search",
+ "list_options": "List Options",
+ }
+
+
+class SearchAddress(PydanticBaseModel, SearchAddressValidation):
+ search: str
+ list_options: ListOptions
+
+
+class StreetValidation:
+ tr = {
+ "street_code": "Sokak Kodu",
+ "street_name": "Sokak Adı",
+ "postcode": "Posta Kodu",
+ "type_code": "Tip Kodu",
+ "type_description": "Tip Açıklaması",
+ "gov_code": "Devlet Kodu",
+ "address_geographic_uu_id": "Coğrafi UUID",
+ }
+ en = {
+ "street_code": "Street Code",
+ "street_name": "Street Name",
+ "postcode": "Post Code",
+ "type_code": "Type Code",
+ "type_description": "Type Description",
+ "gov_code": "Government Code",
+ "address_geographic_uu_id": "Address Geographic UUID",
+ }
+
+
+class InsertStreet(PydanticBaseModel, StreetValidation):
+ street_code: str
+ street_name: str
+ postcode: str
+
+ type_code: Optional[str] = None
+ type_description: Optional[str] = None
+ gov_code: Optional[str] = None
+ address_geographic_uu_id: Optional[str] = None
+
+
+class AddressValidation:
+ tr = {
+ "post_code_uu_id": "Posta Kodu UUID",
+ "comment_address": "Adres Yorumu",
+ "letter_address": "Mektup Adresi",
+ "build_number": "Bina Numarası",
+ "door_number": "Kapı Numarası",
+ "floor_number": "Kat Numarası",
+ "short_letter_address": "Kısa Mektup Adresi",
+ "latitude": "Enlem",
+ "longitude": "Boylam",
+ }
+ en = {
+ "post_code_uu_id": "Post Code UUID",
+ "comment_address": "Address Comment",
+ "letter_address": "Letter Address",
+ "build_number": "Build Number",
+ "door_number": "Door Number",
+ "floor_number": "Floor Number",
+ "short_letter_address": "Short Letter Address",
+ "latitude": "Latitude",
+ "longitude": "Longitude",
+ }
+
+
+class InsertAddress(BaseModelRegular, AddressValidation):
+ post_code_uu_id: str
+
+ comment_address: Optional[str] = None
+ letter_address: Optional[str] = None
+
+ build_number: str
+ door_number: Optional[str] = None
+ floor_number: Optional[str] = None
+
+ short_letter_address: Optional[str] = None
+ latitude: Optional[float] = None
+ longitude: Optional[float] = None
+
+
+class UpdateAddress(PydanticBaseModel, AddressValidation):
+ post_code_uu_id: Optional[str] = None
+
+ comment_address: Optional[str] = None
+ letter_address: Optional[str] = None
+
+ build_number: Optional[str] = None
+ door_number: Optional[str] = None
+ floor_number: Optional[str] = None
+
+ short_letter_address: Optional[str] = None
+ latitude: Optional[float] = None
+ longitude: Optional[float] = None
diff --git a/ApiLayers/ApiValidations/Request/application.py b/ApiLayers/ApiValidations/Request/application.py
new file mode 100644
index 0000000..f9e4739
--- /dev/null
+++ b/ApiLayers/ApiValidations/Request/application.py
@@ -0,0 +1,69 @@
+from ApiLayers.ApiValidations.Request import BaseModelRegular
+
+
+class SingleEnumClassKeyValidation:
+ tr = {
+ "class_name": "Sınıf Adı",
+ "key_name": "Anahtar Adı",
+ }
+ en = {
+ "class_name": "Class Name",
+ "key_name": "Key Name",
+ }
+
+
+class SingleEnumClassKey(BaseModelRegular):
+ class_name: str
+ key_name: str
+
+
+class SingleEnumUUIDValidation:
+ tr = {
+ "uu_id": "UUID",
+ }
+ en = {
+ "uu_id": "UUID",
+ }
+
+
+class SingleEnumUUID(BaseModelRegular):
+ uu_id: str
+
+
+class SingleEnumOnlyClassValidation:
+ tr = {
+ "class_name": "Sınıf Adı",
+ }
+ en = {
+ "class_name": "Class Name",
+ }
+
+
+class SingleEnumOnlyClass(BaseModelRegular):
+ class_name: str
+
+
+class SingleOccupantTypeClassKeyValidation:
+ tr = {
+ "type_code": "Tip Kodu",
+ }
+ en = {
+ "type_code": "Type Code",
+ }
+
+
+class SingleOccupantTypeClassKey(BaseModelRegular):
+ type_code: str
+
+
+class SingleOccupantTypeUUIDValidation:
+ tr = {
+ "uu_id": "Görev UUID",
+ }
+ en = {
+ "uu_id": "Occupant UUID",
+ }
+
+
+class SingleOccupantTypeUUID(BaseModelRegular):
+ uu_id: str
diff --git a/ApiLayers/ApiValidations/Request/area.py b/ApiLayers/ApiValidations/Request/area.py
new file mode 100644
index 0000000..1cc63aa
--- /dev/null
+++ b/ApiLayers/ApiValidations/Request/area.py
@@ -0,0 +1,73 @@
+from typing import Optional
+from ApiLayers.ApiValidations.Request import BaseModelRegular, PydanticBaseModel
+
+
+class BuildAreaValidation:
+
+ tr = {
+ "area_name": "Alan Adı",
+ "area_code": "Alan Kodu",
+ "area_type": "Alan Tipi",
+ "area_direction": "Alan Yönü",
+ "area_gross_size": "Brüt Alan",
+ "area_net_size": "Net Alan",
+ "width": "Genişlik",
+ "size": "En",
+ }
+ en = {
+ "area_name": "Area Name",
+ "area_code": "Area Code",
+ "area_type": "Area Type",
+ "area_direction": "Area Direction",
+ "area_gross_size": "Gross Size",
+ "area_net_size": "Net Size",
+ "width": "Width",
+ "size": "Size",
+ }
+
+
+class InsertBuildArea(BaseModelRegular, BuildAreaValidation):
+
+ build_uu_id: str
+ area_name: str
+ area_code: str
+ area_type: str
+ area_direction: Optional[str] = None
+ area_gross_size: Optional[float] = None
+ area_net_size: Optional[float] = None
+ width: Optional[int] = None
+ size: Optional[int] = None
+
+
+class UpdateBuildArea(PydanticBaseModel, BuildAreaValidation):
+
+ area_name: Optional[str] = None
+ area_code: Optional[str] = None
+ area_type: Optional[str] = None
+ area_direction: Optional[str] = None
+ area_gross_size: Optional[float] = None
+ area_net_size: Optional[float] = None
+ width: Optional[int] = None
+ size: Optional[int] = None
+
+
+class BuildSites:
+ tr = {"address_uu_id": "Adres UU ID", "site_name": "Site Adı", "site_no": "Site No"}
+ en = {
+ "address_uu_id": "Address UU ID",
+ "site_name": "Site Name",
+ "site_no": "Site No",
+ }
+
+
+class InsertBuildSites(BaseModelRegular, BuildSites):
+
+ address_uu_id: str
+ site_name: str
+ site_no: str
+
+
+class UpdateBuildSites(PydanticBaseModel, BuildSites):
+
+ site_name: Optional[str] = None
+ site_no: Optional[str] = None
diff --git a/ApiLayers/ApiValidations/Request/authentication.py b/ApiLayers/ApiValidations/Request/authentication.py
new file mode 100644
index 0000000..7338095
--- /dev/null
+++ b/ApiLayers/ApiValidations/Request/authentication.py
@@ -0,0 +1,174 @@
+from ApiLayers.ApiValidations.Request import BaseModelRegular
+
+from typing import Optional
+from pydantic import BaseModel, ConfigDict, Field
+
+
+class ChangePasswordValidation:
+ tr = {"old_password": "Eski Şifre", "new_password": "Yeni Şifre"}
+ en = {"old_password": "Old Password", "new_password": "New Password"}
+
+
+class ChangePassword(BaseModelRegular, ChangePasswordValidation):
+ old_password: str = Field(..., example="current123")
+ new_password: str = Field(..., example="newpass456")
+
+ model_config = ConfigDict(
+ json_schema_extra={
+ "example": {"old_password": "current123", "new_password": "newpass456"}
+ }
+ )
+
+
+class CreatePasswordValidation:
+ tr = {
+ "password_token": "Şifre Token",
+ "password": "Şifre",
+ "re_password": "Şifre Tekrar",
+ }
+ en = {
+ "password_token": "Password Token",
+ "password": "Password",
+ "re_password": "Re-Password",
+ }
+
+
+class CreatePassword(BaseModelRegular, CreatePasswordValidation):
+ password_token: str = Field(..., example="abc123token")
+ password: str = Field(..., example="newpass123")
+ re_password: str = Field(..., example="newpass123")
+
+ model_config = ConfigDict(
+ json_schema_extra={
+ "example": {
+ "password_token": "abc123token",
+ "password": "newpass123",
+ "re_password": "newpass123",
+ }
+ }
+ )
+
+
+class OccupantSelection(BaseModel):
+
+ build_living_space_uu_id: str = Field(
+ ..., example="987fcdeb-51a2-43e7-9876-543210987654"
+ )
+
+ model_config = ConfigDict(
+ json_schema_extra={
+ "example": [
+ {"company_uu_id": "abcdef12-3456-7890-abcd-ef1234567890"},
+ {"build_living_space_uu_id": "987fcdeb-51a2-43e7-9876-543210987654"},
+ ],
+ }
+ )
+
+ @property
+ def is_employee(self):
+ return False
+
+ @property
+ def is_occupant(self):
+ return True
+
+
+class EmployeeSelection(BaseModel):
+
+ company_uu_id: str = Field(..., example="abcdef12-3456-7890-abcd-ef1234567890")
+
+ model_config = ConfigDict(
+ json_schema_extra={
+ "example": [
+ {"company_uu_id": "abcdef12-3456-7890-abcd-ef1234567890"},
+ {"build_living_space_uu_id": "987fcdeb-51a2-43e7-9876-543210987654"},
+ ],
+ }
+ )
+
+ @property
+ def is_employee(self):
+ return True
+
+ @property
+ def is_occupant(self):
+ return False
+
+
+class LoginValidation:
+ tr = {
+ "domain": "Domain",
+ "access_key": "Erişim Anahtarı",
+ "password": "Şifre",
+ "remember_me": "Beni Hatırla",
+ }
+ en = {
+ "domain": "Domain",
+ "access_key": "Access Key",
+ "password": "Password",
+ "remember_me": "Remember Me",
+ }
+
+
+class Login(BaseModelRegular, LoginValidation):
+ domain: str = Field(..., example="example.com")
+ access_key: str = Field(..., example="user@example.com")
+ password: str = Field(..., example="password123")
+ remember_me: Optional[bool] = Field(False, example=True)
+
+ model_config = ConfigDict(
+ json_schema_extra={
+ "example": {
+ "domain": "evyos.com.tr",
+ "access_key": "karatay.berkay.sup@evyos.com.tr",
+ "password": "string",
+ "remember_me": False,
+ }
+ }
+ )
+
+
+class LogoutValidation:
+ tr = {"domain": "Domain"}
+ en = {"domain": "Domain"}
+
+
+class Logout(BaseModelRegular, LogoutValidation):
+ domain: str = Field(..., example="example.com")
+
+ model_config = ConfigDict(json_schema_extra={"example": {"domain": "example.com"}})
+
+
+class RememberValidation:
+ tr = {"domain": "Domain", "refresh_token": "Yenileme Anahtarı"}
+ en = {"domain": "Domain", "refresh_token": "Refresh Token"}
+
+
+class Remember(BaseModelRegular, RememberValidation):
+ domain: str = Field(..., example="example.com")
+ refresh_token: str = Field(..., example="eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9...")
+
+ model_config = ConfigDict(
+ json_schema_extra={
+ "example": {
+ "domain": "example.com",
+ "refresh_token": "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9...",
+ }
+ }
+ )
+
+
+class ForgotValidation:
+ tr = {"domain": "Domain", "access_key": "Erişim Anahtarı"}
+ en = {"domain": "Domain", "access_key": "Access Key"}
+
+
+class Forgot(BaseModelRegular, ForgotValidation):
+ domain: str = Field(..., example="example.com")
+ access_key: str = Field(..., example="user@example.com")
+
+ model_config = ConfigDict(
+ json_schema_extra={
+ "example": {"domain": "example.com", "access_key": "user@example.com"}
+ }
+ )
diff --git a/ApiLayers/ApiValidations/Request/base_validations.py b/ApiLayers/ApiValidations/Request/base_validations.py
new file mode 100644
index 0000000..b1eb0b8
--- /dev/null
+++ b/ApiLayers/ApiValidations/Request/base_validations.py
@@ -0,0 +1,32 @@
+from typing import Optional
+from ApiLayers.ApiValidations.handler import BaseModelRegular
+
+
+class ListOptions(BaseModelRegular):
+ page: Optional[int] = 1
+ size: Optional[int] = 10
+ order_field: Optional[list[str]] = None
+ order_type: Optional[list[str]] = None
+ query: Optional[dict] = None
+
+
+class CrudRecords:
+ uu_id: Optional[str] = None
+ created_at: Optional[str] = None
+ updated_at: Optional[str] = None
+ created_by: Optional[str] = None
+ updated_by: Optional[str] = None
+ confirmed_by: Optional[str] = None
+ is_confirmed: Optional[bool] = None
+ active: Optional[bool] = None
+ is_notification_send: Optional[bool] = None
+ is_email_send: Optional[bool] = None
+
+
+class PydanticBaseModel(BaseModelRegular):
+
+ active: Optional[bool] = None
+ deleted: Optional[bool] = None
+ expiry_starts: Optional[str] = None
+ # expiry_ends: Optional[str] = None
+ is_confirmed: Optional[bool] = None
diff --git a/ApiLayers/ApiValidations/Request/build_living_space.py b/ApiLayers/ApiValidations/Request/build_living_space.py
new file mode 100644
index 0000000..b13e503
--- /dev/null
+++ b/ApiLayers/ApiValidations/Request/build_living_space.py
@@ -0,0 +1,22 @@
+from typing import Optional
+from ApiLayers.ApiValidations.Request import BaseModelRegular
+
+# from api_validations.validations_request import (
+# PydanticBaseModel,
+# PydanticBaseModelValidation,
+# )
+from ApiLayers.ApiValidations.Request import BaseModelRegular, PydanticBaseModel
+
+
+class InsertBuildLivingSpace(BaseModelRegular):
+ person_uu_id: str
+ build_parts_uu_id: str
+ occupant_type_uu_id: str
+ expiry_starts: str
+ expiry_ends: Optional[str] = None
+
+
+class UpdateBuildLivingSpace(PydanticBaseModel):
+ is_tenant_live: Optional[bool] = None
+ build_parts_uu_id: Optional[str] = None
+ person_uu_id: Optional[str] = None
diff --git a/ApiLayers/ApiValidations/Request/build_part.py b/ApiLayers/ApiValidations/Request/build_part.py
new file mode 100644
index 0000000..9d9d2bb
--- /dev/null
+++ b/ApiLayers/ApiValidations/Request/build_part.py
@@ -0,0 +1,43 @@
+from typing import Optional
+from ApiLayers.ApiValidations.Request import BaseModelRegular, PydanticBaseModel
+
+
+class InsertBuildTypes(BaseModelRegular):
+ function_code: str
+ type_code: str
+ lang: str
+ type_name: str
+
+
+class UpdateBuildTypes(PydanticBaseModel): ...
+
+
+class InsertBuildParts(BaseModelRegular):
+ build_uu_id: str
+ address_gov_code: str
+ part_no: int
+ part_level: int
+ build_part_type_uu_id: str
+
+ part_gross_size: Optional[int] = None
+ part_net_size: Optional[int] = None
+ default_accessory: Optional[str] = None
+ human_livable: Optional[bool] = False
+ part_direction_uu_id: Optional[str] = None
+ ref_id: Optional[str] = None
+
+
+class UpdateBuildParts(PydanticBaseModel):
+ address_gov_code: Optional[str] = None
+ part_no: Optional[int] = None
+ part_level: Optional[int] = None
+ build_part_type_uu_id: Optional[str] = None
+
+ part_code: Optional[int] = None
+ part_gross_size: Optional[int] = None
+ part_net_size: Optional[int] = None
+ default_accessory: Optional[str] = None
+ human_livable: Optional[bool] = False
+ part_direction: Optional[str] = None
+ current_owner_person_uu_id: Optional[str] = None
+ current_tenant_person_uu_id: Optional[str] = None
diff --git a/ApiLayers/ApiValidations/Request/building.py b/ApiLayers/ApiValidations/Request/building.py
new file mode 100644
index 0000000..042b62e
--- /dev/null
+++ b/ApiLayers/ApiValidations/Request/building.py
@@ -0,0 +1,43 @@
+from typing import Optional
+from datetime import datetime
+from ApiLayers.ApiValidations.Request import BaseModelRegular, PydanticBaseModel
+
+
+class InsertBuild(BaseModelRegular):
+
+ gov_address_code: str
+ build_name: str
+ build_types_uu_id: str
+ max_floor: int
+ underground_floor: int
+ address_uu_id: str
+ build_date: datetime
+ decision_period_date: datetime
+
+ tax_no: Optional[str] = None
+ lift_count: Optional[int] = None
+ heating_system: Optional[bool] = None
+ cooling_system: Optional[bool] = None
+ hot_water_system: Optional[bool] = None
+ block_service_man_count: Optional[int] = None
+ security_service_man_count: Optional[int] = None
+ garage_count: Optional[int] = None
+
+
+class UpdateBuild(PydanticBaseModel):
+ gov_address_code: Optional[str] = None
+ build_name: Optional[str] = None
+ build_no: Optional[str] = None
+ build_types_uu_id: Optional[str] = None
+ max_floor: Optional[int] = None
+ underground_floor: Optional[int] = None
+ build_date: Optional[datetime] = None
+ tax_no: Optional[str] = None
+ lift_count: Optional[int] = None
+ heating_system: Optional[bool] = None
+ cooling_system: Optional[bool] = None
+ hot_water_system: Optional[bool] = None
+ block_service_man_count: Optional[int] = None
+ security_service_man_count: Optional[int] = None
+ garage_count: Optional[int] = None
+ address_uu_id: Optional[str] = None
diff --git a/ApiLayers/ApiValidations/Request/company.py b/ApiLayers/ApiValidations/Request/company.py
new file mode 100644
index 0000000..50b394a
--- /dev/null
+++ b/ApiLayers/ApiValidations/Request/company.py
@@ -0,0 +1,32 @@
+from typing import Optional, List
+from ApiLayers.ApiValidations.Request import BaseModelRegular, PydanticBaseModel
+
+
+class InsertCompany(BaseModelRegular):
+ formal_name: str
+ company_type: str
+ commercial_type: str
+ tax_no: str
+ public_name: Optional[str] = None
+ company_tag: Optional[str] = None
+ default_lang_type: Optional[str] = None
+ default_money_type: Optional[str] = None
+ official_address_uu_id: Optional[str] = None
+ # parent_uu_id: Optional[int] = None
+
+
+class UpdateCompany(PydanticBaseModel):
+ company_uu_id: str
+ public_name: Optional[str] = None
+ formal_name: Optional[str] = None
+ tax_no: Optional[str] = None
+ company_tag: Optional[str] = None
+ default_lang_type: Optional[str] = None
+ default_money_type: Optional[str] = None
+ official_address_uu_id: Optional[str] = None
+
+
+class MatchCompany2Company(PydanticBaseModel):
+ match_company_uu_id: List[str]
+ duty_uu_id: str
+ show_only: Optional[bool] = None
diff --git a/ApiLayers/ApiValidations/Request/core_request_validations.py b/ApiLayers/ApiValidations/Request/core_request_validations.py
new file mode 100644
index 0000000..aa08831
--- /dev/null
+++ b/ApiLayers/ApiValidations/Request/core_request_validations.py
@@ -0,0 +1,116 @@
+from typing import Optional
+
+from ApiLayers.ApiValidations.Request import BaseModelRegular
+
+
+class ListOptionsValidation:
+ tr = {
+ "page": "Sayfa",
+ "size": "Boyut",
+ "order_field": "Sıralama Alanı",
+ "order_type": "Sıralama Türü",
+ "include_joins": "Alt İçerikleri",
+ "query": "Sorgu",
+ }
+ en = {
+ "page": "Page",
+ "size": "Size",
+ "order_field": "Order Field",
+ "order_type": "Order Type",
+ "include_joins": "Include Joins",
+ "query": "Query",
+ }
+
+
+class ListOptions(BaseModelRegular, ListOptionsValidation):
+ page: Optional[int] = 1
+ size: Optional[int] = 10
+ order_field: Optional[str] = "id"
+ order_type: Optional[str] = "asc"
+ include_joins: Optional[list] = None
+ query: Optional[dict] = None
+
+
+class CrudRecordValidation:
+ tr = {
+ "uu_id": "UUID",
+ "created_at": "Oluşturulma Tarihi",
+ "updated_at": "Güncellenme Tarihi",
+ "created_by": "Oluşturan",
+ "updated_by": "Güncelleyen",
+ "confirmed_by": "Onaylayan",
+ "is_confirmed": "Onay",
+ "expiry_starts": "Geçerlilik Başlangıç Tarihi",
+ "expiry_ends": "Geçerlilik Bitiş Tarihi",
+ "active": "Aktif",
+ "is_notification_send": "Bildirim Gönderildi",
+ "is_email_send": "E-posta Gönderildi",
+ }
+ en = {
+ "uu_id": "UUID",
+ "created_at": "Created At",
+ "updated_at": "Updated At",
+ "created_by": "Created By",
+ "updated_by": "Updated By",
+ "confirmed_by": "Confirmed By",
+ "is_confirmed": "Confirmed",
+ "expiry_starts": "Expiry Starts",
+ "expiry_ends": "Expiry Ends",
+ "active": "Active",
+ "is_notification_send": "Notification Send",
+ "is_email_send": "Email Send",
+ }
+
+
+class CrudRecords:
+ uu_id: Optional[str] = None
+ created_at: Optional[str] = None
+ updated_at: Optional[str] = None
+ created_by: Optional[str] = None
+ updated_by: Optional[str] = None
+ confirmed_by: Optional[str] = None
+ is_confirmed: Optional[bool] = None
+ active: Optional[bool] = None
+ is_notification_send: Optional[bool] = None
+ is_email_send: Optional[bool] = None
+
+
+class PydanticBaseModelValidation:
+ tr = {
+ "active": "Aktif",
+ "deleted": "Silinmiş",
+ "expiry_starts": "Geçerlilik Başlangıç Tarihi",
+ "expiry_ends": "Geçerlilik Bitiş Tarihi",
+ "is_confirmed": "Onay",
+ }
+ en = {
+ "active": "Active",
+ "deleted": "Deleted",
+ "expiry_starts": "Expiry Starts",
+ "expiry_ends": "Expiry Ends",
+ "is_confirmed": "Confirmed",
+ }
+
+
+class PydanticBaseModel(BaseModelRegular):
+
+ active: Optional[bool] = None
+ deleted: Optional[bool] = None
+ expiry_starts: Optional[str] = None
+ # expiry_ends: Optional[str] = None
+ is_confirmed: Optional[bool] = None
+
+
+class EndpointPydantic(BaseModelRegular):
+ data: Optional[dict] = None
+
+
+class EndpointValidation(BaseModelRegular):
+ endpoint: Optional[str] = None
+
+
+class PatchRecord(BaseModelRegular):
+
+ confirm: Optional[bool] = None
+ delete: Optional[bool] = None
+ active: Optional[bool] = None
diff --git a/ApiLayers/ApiValidations/Request/create_model.py b/ApiLayers/ApiValidations/Request/create_model.py
new file mode 100644
index 0000000..b16da10
--- /dev/null
+++ b/ApiLayers/ApiValidations/Request/create_model.py
@@ -0,0 +1,73 @@
+import typing
+
+from datetime import datetime
+
+
+class ConvertField:
+
+ def __init__(self, match, default_val=None):
+ self.match = match
+ self.default_val = default_val
+
+ def typing_return(self):
+ typing_dict = {
+ "": float,
+ "": bool,
+ "": int,
+ "": str,
+ "": dict,
+ "": list,
+ "": datetime,
+ "typing.Optional[datetime.datetime]": typing.Optional[datetime],
+ "typing.Optional[bool]": typing.Optional[bool],
+ "typing.Optional[list]": typing.Optional[list],
+ "typing.Optional[str]": typing.Optional[str],
+ "typing.Optional[int]": typing.Optional[int],
+ "typing.Optional[float]": typing.Optional[float],
+ "typing.Optional[dict]": typing.Optional[dict],
+ }
+ matches_with = typing_dict.get(self.match, typing.Optional[str])
+ default_value = getattr(self.default_val, "field_default_value", None)
+ return matches_with, default_value
+
+
+#
+# def create_model_from_database(model_id: typing.Union[int, str]):
+# if isinstance(model_id, int):
+# selected_model = Models.find_one(id=model_id)
+# else:
+# selected_model = Models.find_one(uu_id=str(model_id))
+#
+# if not selected_model:
+# raise HTTPException(
+# status_code=202,
+# detail=f"Model {selected_model.model_name} not found in database. Please add model to api.",
+# )
+# pydantic_class = getattr(root_validates, selected_model.model_type, None)
+# if not pydantic_class:
+# raise HTTPException(
+# status_code=202,
+# detail=f"Pydantic class {selected_model.model_type} not found in database. Please add model to api.",
+# )
+#
+# model_entities_records = ModelEntities.filter_all(
+# ModelEntities.model_id == selected_model.id
+# ).data
+#
+# if not model_entities_records:
+# raise HTTPException(
+# status_code=202,
+# detail="Model has no entities registered. Please add entities to model.",
+# )
+#
+# fields = {}
+# for entity in model_entities_records:
+# fields[entity.field_name] = ConvertField(
+# entity.field_type, entity.field_default_value
+# ).typing_return()
+#
+# return create_model(
+# __model_name=selected_model.model_name, # pydantic_name(User)
+# __module__=pydantic_class.__module__, # field_name(uu_id)
+# **fields, # field_name = (field_type (Optional[str]), default_value(None))
+# )
diff --git a/ApiLayers/ApiValidations/Request/decision_book.py b/ApiLayers/ApiValidations/Request/decision_book.py
new file mode 100644
index 0000000..af6e9a5
--- /dev/null
+++ b/ApiLayers/ApiValidations/Request/decision_book.py
@@ -0,0 +1,114 @@
+from typing import Optional
+from ApiLayers.ApiValidations.Request import (
+ BaseModelRegular,
+ PydanticBaseModel,
+ ListOptions,
+)
+
+
+class DecisionBookDecisionBookInvitations(BaseModelRegular):
+ build_decision_book_uu_id: str
+ message: str
+ planned_date: str
+
+
+class DecisionBookDecisionBookInvitationsAttend(BaseModelRegular):
+ token: str
+ is_attend: bool
+
+
+class DecisionBookDecisionBookInvitationsAssign(BaseModelRegular):
+ token: str
+ build_living_space_uu_id: str
+ occupant_type_uu_id: str
+
+
+class DecisionBookDecisionBookInvitationsUpdate(PydanticBaseModel):
+ token: str
+ occupant_type_uu_id: Optional[str] = None
+
+
+class ListDecisionBook(ListOptions):
+ build_decision_book_uu_id: Optional[str] = None
+
+
+class InsertDecisionBook(PydanticBaseModel):
+ build_uu_id: str
+ decision_type: str
+ meeting_date: str
+ is_out_sourced: bool
+
+ resp_company_fix_wage: Optional[float] = None
+ resp_company_uu_id: Optional[str] = None
+
+
+class InsertDecisionBookCompleted(BaseModelRegular):
+ build_decision_book_uu_id: str
+ meeting_completed_date: str
+
+
+class InsertDecisionBookPerson(BaseModelRegular):
+ person_uu_id: str
+ build_decision_book_uu_id: str
+ management_typecode_uu_id: str
+
+ dues_discount_approval_date: Optional[str] = None
+ dues_fix_discount: Optional[float] = None
+ dues_percent_discount: Optional[int] = None
+
+
+class UpdateDecisionBookPerson(PydanticBaseModel):
+
+ dues_fix_discount: Optional[float] = None
+ dues_percent_discount: Optional[int] = None
+
+
+class RemoveDecisionBookPerson(PydanticBaseModel):
+ person_uu_id: str
+ build_decision_book_person_uu_id: str
+
+
+class UpdateDecisionBook(PydanticBaseModel):
+ decision_book_pdf_path: Optional[str] = None
+ is_out_sourced: Optional[bool] = None
+ contact_agreement_path: Optional[str] = None
+ contact_agreement_date: Optional[str] = None
+ meeting_date: Optional[str] = None
+ decision_type: Optional[str] = None
+
+ resp_company_fix_wage: Optional[float] = None
+ resp_company_uu_id: Optional[str] = None
+
+
+class InsertBuildDecisionBookItems(BaseModelRegular):
+ token: str
+ info_type_uu_id: str
+ item_comment: str
+
+ currency: Optional[str] = "TL"
+ unit_type: Optional[str] = "M2"
+ debit_start_date: Optional[str] = None
+ debit_end_date: Optional[str] = None
+ unit_price_is_fixed: Optional[bool] = False
+ unit_price: Optional[float] = 0.00
+
+ # build_decision_book_uu_id: str
+ # item_objection: Optional[str] = None
+
+
+class UpdateBuildDecisionBookItems(PydanticBaseModel):
+ item_comment: Optional[str] = None
+ item_objection: Optional[str] = None
+
+
+class InsertBuildDecisionBookItemDebits(BaseModelRegular):
+ build_decision_book_item_uu_id: str
+ dues_values: dict
+ # dues_types_uu_id: str
+ # decision_taken: Optional[bool] = None
+
+
+class UpdateBuildDecisionBookItemDebits(PydanticBaseModel):
+ dues_types_uu_id: Optional[str] = None
+ dues_values: Optional[dict] = None
+ decision_taken: Optional[bool] = None
diff --git a/ApiLayers/ApiValidations/Request/departments.py b/ApiLayers/ApiValidations/Request/departments.py
new file mode 100644
index 0000000..dbdaa01
--- /dev/null
+++ b/ApiLayers/ApiValidations/Request/departments.py
@@ -0,0 +1,20 @@
+from typing import Optional
+from ApiLayers.ApiValidations.Request import PydanticBaseModel
+
+
+class DepartmentsPydantic(PydanticBaseModel):
+
+ department_code: Optional[str]
+ department_name: Optional[str]
+ department_description: Optional[str] = None
+ company_uu_id: Optional[str] = None
+ parent_department_uu_id: Optional[int] = None
+
+
+# class UpdateDepartments(PydanticBaseModel):
+#
+# department_code: Optional[str] = None
+# department_name: Optional[str] = None
+# department_description: Optional[str] = None
+# company_uu_id: Optional[str] = None
+# parent_department_uu_id: Optional[int] = None
diff --git a/ApiLayers/ApiValidations/Request/employee.py b/ApiLayers/ApiValidations/Request/employee.py
new file mode 100644
index 0000000..9a68b46
--- /dev/null
+++ b/ApiLayers/ApiValidations/Request/employee.py
@@ -0,0 +1,77 @@
+from typing import Optional
+from ApiLayers.ApiValidations.Request import BaseModelRegular, PydanticBaseModel
+
+
+class BindEmployees2People(PydanticBaseModel):
+ staff_uu_id: str
+ people_uu_id: str
+ expiry_starts: Optional[str] = None
+
+
+class UnBindEmployees2People(PydanticBaseModel):
+ people_uu_id: str
+ expiry_ends: str
+
+
+class InsertEmployees(BaseModelRegular):
+ staff_uu_id: str
+ people_uu_id: Optional[str] = None
+
+
+class InsertCompanyDuty(BaseModelRegular):
+ duty_code: str
+ duty_name: str
+ duty_description: Optional[str] = None
+
+
+class SelectDuties(BaseModelRegular):
+ duty_uu_id: Optional[str] = None
+
+
+class InsertDuties(BaseModelRegular):
+ duties_uu_id: str
+ department_uu_id: str
+ is_default_duty: Optional[bool] = False
+
+
+class UpdateDuties(PydanticBaseModel):
+ duties_uu_id: Optional[str] = None
+ department_uu_id: Optional[str] = None
+ is_default_duty: Optional[bool] = None
+
+
+class UpdateCompanyDuty(PydanticBaseModel):
+ duty_code: Optional[str] = None
+ duty_name: Optional[str] = None
+ duty_description: Optional[str] = None
+
+
+class InsertCompanyEmployeesSalaries(BaseModelRegular):
+ gross_salary: float
+ net_salary: float
+ start_date: str
+ stop_date: Optional[str] = None
+ people_id: int
+
+
+class UpdateCompanyEmployeesSalaries(PydanticBaseModel):
+ gross_salary: Optional[float] = None
+ net_salary: Optional[float] = None
+ start_date: Optional[str] = None
+ stop_date: Optional[str] = None
+ people_id: Optional[int] = None
+
+
+class InsertCompanyEmployees(BaseModelRegular):
+
+ employee_description: Optional[str] = None
+ person_uu_id: str
+ duty_uu_id: str
+
+ start_date: Optional[str] = None
+ stop_date: Optional[str] = None
+
+
+class UpdateCompanyEmployees(PydanticBaseModel):
+ stop_date: Optional[str] = None
+ employee_description: Optional[str] = None
diff --git a/ApiLayers/ApiValidations/Request/events.py b/ApiLayers/ApiValidations/Request/events.py
new file mode 100644
index 0000000..83a5ba6
--- /dev/null
+++ b/ApiLayers/ApiValidations/Request/events.py
@@ -0,0 +1,37 @@
+from typing import Optional
+from ApiLayers.ApiValidations.Request import BaseModelRegular
+
+
+class RegisterEvents2EmployeeValidation:
+ tr = {
+ "event_uu_id_list": "Etkinlikler Listesi",
+ "employee_uu_id": "Çalışan UU ID",
+ }
+ en = {
+ "event_uu_id_list": "Event List",
+ "employee_uu_id": "Employee UU ID",
+ }
+
+
+class RegisterEvents2Employee(BaseModelRegular, RegisterEvents2EmployeeValidation):
+ event_uu_id_list: list[str] = None
+ employee_uu_id: Optional[str] = None
+
+
+class RegisterEvents2OccupantValidation:
+ tr = {
+ "event_uu_id_list": "Etkinlikler Listesi",
+ "build_part_uu_id": "Bina Parça UU ID",
+ "occupant_uu_id": "Apartman Sakini UU ID",
+ }
+ en = {
+ "event_uu_id_list": "Event List",
+ "build_part_uu_id": "Building Part UU ID",
+ "occupant_uu_id": "Occupant UU ID",
+ }
+
+
+class RegisterEvents2Occupant(BaseModelRegular, RegisterEvents2OccupantValidation):
+ event_uu_id_list: list[str] = None
+ build_part_uu_id: Optional[str] = None
+ occupant_uu_id: Optional[str] = None
diff --git a/ApiLayers/ApiValidations/Request/modules.py b/ApiLayers/ApiValidations/Request/modules.py
new file mode 100644
index 0000000..26658ca
--- /dev/null
+++ b/ApiLayers/ApiValidations/Request/modules.py
@@ -0,0 +1,36 @@
+from ApiLayers.ApiValidations.Request import BaseModelRegular
+
+
+class RegisterModules2OccupantValidation:
+ tr = {
+ "modules_uu_id": "Modül Listesi",
+ "occupant_uu_id": "Mülk Sahibi",
+ "build_part_uu_id": "Daire UUID",
+ }
+ en = {
+ "modules_uu_id": "Module List",
+ "occupant_uu_id": "Occupant",
+ "build_part_uu_id": "Flat UUID",
+ }
+
+
+class RegisterModules2Occupant(BaseModelRegular, RegisterModules2OccupantValidation):
+ modules_uu_id: str
+ occupant_uu_id: str
+ build_part_uu_id: str
+
+
+class RegisterModules2EmployeeValidation:
+ tr = {
+ "modules_uu_id": "Modül Listesi",
+ "employee_uu_id": "Çalışan",
+ }
+ en = {
+ "modules_uu_id": "Module List",
+ "employee_uu_id": "Employee",
+ }
+
+
+class RegisterModules2Employee(BaseModelRegular, RegisterModules2EmployeeValidation):
+ modules_uu_id: str
+ employee_uu_id: str
diff --git a/ApiLayers/ApiValidations/Request/people.py b/ApiLayers/ApiValidations/Request/people.py
new file mode 100644
index 0000000..b898ab1
--- /dev/null
+++ b/ApiLayers/ApiValidations/Request/people.py
@@ -0,0 +1,66 @@
+from typing import Optional
+from ApiLayers.ApiValidations.Request import BaseModelRegular, PydanticBaseModel
+
+
+class InsertPerson(BaseModelRegular):
+ firstname: str
+ surname: str
+ sex_code: str
+ national_identity_id: str
+ middle_name: Optional[str] = None
+ father_name: Optional[str] = None
+ mother_name: Optional[str] = None
+ country_code: Optional[str] = "TR"
+ birth_place: Optional[str] = None
+ birth_date: Optional[str] = None
+ tax_no: Optional[str] = None
+ ref_id: Optional[str] = None
+
+
+class UpdatePerson(PydanticBaseModel):
+ firstname: Optional[str] = None
+ surname: Optional[str] = None
+ middle_name: Optional[str]
+ father_name: Optional[str] = None
+ mother_name: Optional[str] = None
+ sex_code: Optional[str] = None
+ country_code: Optional[str] = None
+ national_identity_id: Optional[str] = None
+ birth_place: Optional[str] = None
+ birth_date: Optional[str] = None
+ tax_no: Optional[str] = None
+
+
+#
+# class QueryPeople(PydanticBaseModel):
+# uu_id: Optional[str] = None
+#
+#
+# class InsertPeople(PydanticBaseModel):
+# key_id: Optional[str] = None
+# query: Optional[dict] = None
+# data: Optional[_InsertPerson] = None
+#
+#
+# class UpdatePeople(PydanticBaseModel):
+# key_id: Optional[str] = None
+# query: Optional[QueryPeople] = None
+# data: Optional[_UpdatePerson] = None
+#
+#
+# class DeletePeople(PydanticBaseModel):
+# key_id: Optional[str] = None
+# query: Optional[List[QueryPeople]] = None
+# data: Optional[dict] = None
+#
+#
+# class ListPeople(PydanticBaseModel):
+# key_id: Optional[str] = None
+# query: Optional[QueryPeople] = None
+# data: Optional[ListOptions] = None
+#
+#
+# class ActivePeople(PydanticBaseModel):
+# key_id: Optional[str] = None
+# query: Optional[List[QueryPeople]] = None
+# data: Optional[dict] = None
diff --git a/ApiLayers/ApiValidations/Request/project_decision_book.py b/ApiLayers/ApiValidations/Request/project_decision_book.py
new file mode 100644
index 0000000..1bd4c2b
--- /dev/null
+++ b/ApiLayers/ApiValidations/Request/project_decision_book.py
@@ -0,0 +1,99 @@
+from typing import Optional
+from ApiLayers.ApiValidations.Request import BaseModelRegular, PydanticBaseModel
+
+
+class InsertBuildDecisionBookProjectItems(BaseModelRegular):
+ build_decision_book_project_uu_id: str
+ item_header: str
+ item_comment: str
+ attachment_pdf_path: Optional[str] = None
+ item_objection: Optional[str] = None
+
+
+class UpdateBuildDecisionBookProjectItems(PydanticBaseModel):
+ item_header: Optional[str] = None
+ item_comment: Optional[str] = None
+ attachment_pdf_path: Optional[str] = None
+ item_estimated_cost: Optional[float] = None
+ build_decision_book_project_uu_id: Optional[str] = None
+
+
+class InsertBuildDecisionBookProjectPerson(BaseModelRegular):
+ dues_percent_discount: Optional[int] = None
+ job_fix_wage: Optional[float] = None
+ bid_price: Optional[float] = None
+ decision_price: Optional[float] = None
+ build_decision_book_project_uu_id: str
+ living_space_uu_id: str
+ project_team_type_uu_id: str
+
+
+class UpdateBuildDecisionBookProjectPerson(PydanticBaseModel):
+ dues_percent_discount: Optional[int] = None
+ job_fix_wage: Optional[float] = None
+ bid_price: Optional[float] = None
+ decision_price: Optional[float] = None
+ build_decision_book_project_uu_id: Optional[str] = None
+ living_space_uu_id: Optional[str] = None
+ project_team_type_uu_id: Optional[str] = None
+
+
+class InsertBuildDecisionBookProjects(BaseModelRegular):
+ build_decision_book_item_uu_id: str
+ project_responsible_person_uu_id: str
+ project_name: str
+ project_start_date: str
+ project_stop_date: str
+ project_type: str
+
+ is_out_sourced: Optional[bool] = False
+ project_note: Optional[str] = None
+ decision_book_pdf_path: Optional[str] = None
+ resp_company_fix_wage: Optional[float] = None
+ contact_agreement_path: Optional[str] = None
+ contact_agreement_date: Optional[str] = None
+ meeting_date: Optional[str] = None
+ currency: Optional[str] = None
+ bid_price: Optional[float] = None
+ resp_company_uu_id: Optional[str] = None
+
+
+class UpdateBuildDecisionBookProjects(PydanticBaseModel):
+ build_decision_book_project_uu_id: str
+ is_out_sourced: Optional[bool] = False
+ project_note: Optional[str] = None
+ # decision_book_pdf_path: Optional[str] = None
+ status_id: Optional[int] = None
+ resp_company_fix_wage: Optional[float] = None
+ contact_agreement_path: Optional[str] = None
+ contact_agreement_date: Optional[str] = None
+ contact_uu_id: Optional[str] = None
+ resp_company_uu_id: Optional[str] = None
+ approved_price: Optional[float] = None
+
+
+class ApprovalsBuildDecisionBookProjects(PydanticBaseModel):
+ build_decision_book_project_uu_id: str
+ project_stop_date: str
+ status_code: Optional[int] = None
+ final_price_list: Optional[list[dict]] = (
+ None # {"date": "2021-01-01", "price": 1000}
+ )
+
+
+class InsertBuildDecisionBookProjectItemDebits(PydanticBaseModel):
+ build_decision_book_project_item_uu_id: str
+ payment_date: str
+ dues_values: dict
+ is_official: Optional[bool] = False
+ discount_value: Optional[float] = None
+ discount_fix: Optional[float] = None
+ decision_taken: Optional[bool] = None
+
+
+class UpdateBuildDecisionBookProjectItemDebits(PydanticBaseModel):
+ dues_values: Optional[str] = None
+ discount_value: Optional[float] = None
+ discount_fix: Optional[float] = None
+ decision_taken: Optional[bool] = None
+ is_official: Optional[bool] = None
diff --git a/ApiLayers/ApiValidations/Request/rules.py b/ApiLayers/ApiValidations/Request/rules.py
new file mode 100644
index 0000000..cc8ef6f
--- /dev/null
+++ b/ApiLayers/ApiValidations/Request/rules.py
@@ -0,0 +1,23 @@
+from typing import Optional, List
+from ApiLayers.ApiValidations.Request import BaseModelRegular
+from ApiLayers.ApiValidations.Request import BaseModelRegular, PydanticBaseModel
+
+
+class CheckEndpointAccess(BaseModelRegular):
+ endpoint: str
+
+
+class InsertEndpointAccess(PydanticBaseModel):
+ duty_uu_id: str
+ endpoint_restriction_list_uu_ids: list
+
+
+class UpdateEndpointAccess(PydanticBaseModel):
+ endpoint_restriction_uu_id: Optional[str] = None
+ deleted: Optional[bool] = None
+ active: Optional[bool] = None
+ is_confirmed: Optional[bool] = None
+
+
+class UpdateEndpointAccessList(PydanticBaseModel):
+ endpoint_restriction_list: List[UpdateEndpointAccess]
diff --git a/ApiLayers/ApiValidations/Request/services.py b/ApiLayers/ApiValidations/Request/services.py
new file mode 100644
index 0000000..c5d71ef
--- /dev/null
+++ b/ApiLayers/ApiValidations/Request/services.py
@@ -0,0 +1,36 @@
+from ApiLayers.ApiValidations.Request import BaseModelRegular
+
+
+class RegisterServices2OccupantValidation:
+ tr = {
+ "service_uu_id": "Hizmet UUID",
+ "occupant_uu_id": "Müşteri UUID",
+ "build_part_uu_id": "Bina Parça UUID",
+ }
+ en = {
+ "service_uu_id": "Service UUID",
+ "occupant_uu_id": "Occupant UUID",
+ "build_part_uu_id": "Building Part UUID",
+ }
+
+
+class RegisterServices2Occupant(BaseModelRegular, RegisterServices2OccupantValidation):
+ service_uu_id: str
+ occupant_uu_id: str
+ build_part_uu_id: str
+
+
+class RegisterServices2EmployeeValidation:
+ tr = {
+ "service_uu_id": "Hizmet UUID",
+ "employee_uu_id": "Personel UUID",
+ }
+ en = {
+ "service_uu_id": "Service UUID",
+ "employee_uu_id": "Employee UUID",
+ }
+
+
+class RegisterServices2Employee(BaseModelRegular, RegisterServices2EmployeeValidation):
+ service_uu_id: str
+ employee_uu_id: str
diff --git a/ApiLayers/ApiValidations/Request/staff.py b/ApiLayers/ApiValidations/Request/staff.py
new file mode 100644
index 0000000..a98af03
--- /dev/null
+++ b/ApiLayers/ApiValidations/Request/staff.py
@@ -0,0 +1,39 @@
+from typing import Optional
+from ApiLayers.ApiValidations.Request import BaseModelRegular, PydanticBaseModel
+
+
+class InsertStaffValidation:
+ tr = {
+ "staff_name": "Kadro Adı",
+ "staff_description": "Kadro Açıklaması",
+ "staff_code": "Kadro Kodu",
+ "duties_uu_id": "Görev UUID",
+ }
+ en = {
+ "staff_name": "Staff Name",
+ "staff_description": "Staff Description",
+ "staff_code": "Staff Code",
+ "duties_uu_id": "Duties UUID",
+ }
+
+
+class InsertStaff(BaseModelRegular, InsertStaffValidation):
+
+ staff_name: str
+ staff_description: Optional[str] = None
+ staff_code: Optional[str] = None
+ duties_uu_id: str
+
+
+class SelectStaffValidation:
+ tr = {
+ "duties_uu_id": "Görev UUID",
+ }
+ en = {
+ "duties_uu_id": "Duties UUID",
+ }
+
+
+class SelectStaff(PydanticBaseModel, SelectStaffValidation):
+
+ duties_uu_id: str
diff --git a/ApiLayers/ApiValidations/Request/user.py b/ApiLayers/ApiValidations/Request/user.py
new file mode 100644
index 0000000..c527684
--- /dev/null
+++ b/ApiLayers/ApiValidations/Request/user.py
@@ -0,0 +1,68 @@
+from typing import Optional
+from ApiLayers.ApiValidations.Request import PydanticBaseModel
+
+
+class InsertUsersValidation:
+ tr = {
+ "people_uu_id": "Kişi UUID",
+ "user_tag": "Kullanıcı Etiketi",
+ "email": "E-posta",
+ "phone_number": "Telefon Numarası",
+ "avatar": "Avatar",
+ }
+ en = {
+ "people_uu_id": "People UUID",
+ "user_tag": "User Tag",
+ "email": "Email",
+ "phone_number": "Phone Number",
+ "avatar": "Avatar",
+ }
+
+
+class InsertUsers(PydanticBaseModel, InsertUsersValidation):
+ people_uu_id: str
+ user_tag: str
+ email: Optional[str] = None
+ phone_number: Optional[str] = None
+ avatar: Optional[str] = None
+
+
+class UpdateUsersValidation:
+ tr = {
+ "people_uu_id": "Kişi UUID",
+ "nick_name": "Kullanıcı Etiketi",
+ "domain_name": "Domain Adı",
+ "email": "E-posta",
+ "phone_number": "Telefon Numarası",
+ "avatar": "Avatar",
+ }
+ en = {
+ "people_uu_id": "People UUID",
+ "nick_name": "User Tag",
+ "domain_name": "Domain Name",
+ "email": "Email",
+ "phone_number": "Phone Number",
+ "avatar": "Avatar",
+ }
+
+
+class UpdateUsers(PydanticBaseModel, UpdateUsersValidation):
+ people_uu_id: Optional[str] = None
+ nick_name: Optional[str] = None
+ domain_name: Optional[str] = None
+ email: Optional[str] = None
+ phone_number: Optional[str] = None
+ avatar: Optional[str] = None
+
+
+class QueryUsersValidation:
+ tr = {
+ "uu_id": "UUID",
+ }
+ en = {
+ "uu_id": "UUID",
+ }
+
+
+class QueryUsers(PydanticBaseModel):
+ uu_id: Optional[str] = None
diff --git a/ApiLayers/ApiValidations/Response/__init__.py b/ApiLayers/ApiValidations/Response/__init__.py
new file mode 100644
index 0000000..6c6378b
--- /dev/null
+++ b/ApiLayers/ApiValidations/Response/__init__.py
@@ -0,0 +1,16 @@
+from .account_responses import AccountRecordResponse
+from .address_responses import ListAddressResponse
+from .auth_responses import (
+ AuthenticationLoginResponse,
+ AuthenticationRefreshResponse,
+ AuthenticationUserInfoResponse,
+)
+
+
+__all__ = [
+ "AccountRecordResponse",
+ "ListAddressResponse",
+ "AuthenticationLoginResponse",
+ "AuthenticationRefreshResponse",
+ "AuthenticationUserInfoResponse",
+]
diff --git a/ApiLayers/ApiValidations/Response/account_responses.py b/ApiLayers/ApiValidations/Response/account_responses.py
new file mode 100644
index 0000000..5e95080
--- /dev/null
+++ b/ApiLayers/ApiValidations/Response/account_responses.py
@@ -0,0 +1,260 @@
+from pydantic import BaseModel
+from typing import Optional, List
+from datetime import datetime
+from decimal import Decimal
+from uuid import UUID
+from pydantic import BaseModel
+
+
+class AccountBooksResponse(BaseModel):
+ """Response model for account books"""
+
+ country: str
+ branch_type: int
+ company_id: int
+ company_uu_id: str
+ branch_id: Optional[int]
+ branch_uu_id: Optional[str]
+
+
+class AccountCodesResponse(BaseModel):
+ """Response model for account codes"""
+
+ account_code: str
+ comment_line: str
+ is_receive_or_debit: bool
+ product_id: int = 0
+ nvi_id: str = ""
+ status_id: int = 0
+ account_code_seperator: str = "."
+ system_id: int = 0
+ locked: bool = False
+ company_id: Optional[int]
+ company_uu_id: str
+ customer_id: Optional[int]
+ customer_uu_id: str
+ person_id: Optional[int]
+ person_uu_id: str
+
+
+class AccountCodeParserResponse(BaseModel):
+ """Response model for account code parser"""
+
+ account_code_1: str
+ account_code_2: str
+ account_code_3: str
+ account_code_4: str = ""
+ account_code_5: str = ""
+ account_code_6: str = ""
+ account_code_id: int
+ account_code_uu_id: str
+
+
+class AccountMasterResponse(BaseModel):
+ """Response model for account master"""
+
+ doc_date: datetime
+ plug_type: str
+ plug_number: int
+ special_code: str = ""
+ authorization_code: str = ""
+ doc_code: str = ""
+ doc_type: int = 0
+ comment_line1: str = ""
+ comment_line2: str = ""
+ comment_line3: str = ""
+ comment_line4: str = ""
+ comment_line5: str = ""
+ comment_line6: str = ""
+ project_code: str = ""
+ module_no: str = ""
+ journal_no: int = 0
+ status_id: int = 0
+ canceled: bool = False
+ print_count: int = 0
+ total_active: Decimal = Decimal("0")
+ total_passive: Decimal = Decimal("0")
+ total_active_1: Decimal = Decimal("0")
+ total_passive_1: Decimal = Decimal("0")
+ total_active_2: Decimal = Decimal("0")
+ total_passive_2: Decimal = Decimal("0")
+ total_active_3: Decimal = Decimal("0")
+ total_passive_3: Decimal = Decimal("0")
+ total_active_4: Decimal = Decimal("0")
+ total_passive_4: Decimal = Decimal("0")
+ cross_ref: int = 0
+ data_center_id: str = ""
+ data_center_rec_num: int = 0
+ account_header_id: int
+ account_header_uu_id: str
+ project_item_id: Optional[int]
+ project_item_uu_id: Optional[str]
+ department_id: Optional[int]
+ department_uu_id: Optional[str]
+
+
+class AccountDetailResponse(BaseModel):
+ """Response model for account detail"""
+
+ doc_date: datetime
+ line_no: int
+ receive_debit: str
+ debit: Decimal
+ department: str = ""
+ special_code: str = ""
+ account_ref: int = 0
+ account_fiche_ref: int = 0
+ center_ref: int = 0
+ general_code: str = ""
+ credit: Decimal = Decimal("0")
+ currency_type: str = "TL"
+ exchange_rate: Decimal = Decimal("0")
+ debit_cur: Decimal = Decimal("0")
+ credit_cur: Decimal = Decimal("0")
+ discount_cur: Decimal = Decimal("0")
+ amount: Decimal = Decimal("0")
+ cross_account_code: str = ""
+ inf_index: Decimal = Decimal("0")
+ not_inflated: int = 0
+ not_calculated: int = 0
+ comment_line1: str = ""
+ comment_line2: str = ""
+ comment_line3: str = ""
+ comment_line4: str = ""
+ comment_line5: str = ""
+ comment_line6: str = ""
+ owner_acc_ref: int = 0
+ from_where: int = 0
+ orj_eid: int = 0
+ canceled: int = 0
+ cross_ref: int = 0
+ data_center_id: str = ""
+ data_center_rec_num: str = "0"
+ status_id: int = 0
+ plug_type_id: Optional[int]
+ plug_type_uu_id: str
+ account_header_id: int
+ account_header_uu_id: str
+ account_code_id: int
+ account_code_uu_id: str
+ account_master_id: int
+ account_master_uu_id: str
+ project_id: Optional[int]
+ project_uu_id: Optional[str]
+
+
+class AccountRecordResponse(BaseModel):
+ """Response model for account financial records.
+
+ This model represents a financial transaction record in the system,
+ including bank transaction details, amounts, and related metadata.
+
+ Attributes:
+ iban (str): International Bank Account Number
+ bank_date (datetime): Date when the transaction occurred at the bank
+ currency_value (Decimal): Original transaction amount
+ bank_balance (Decimal): Account balance after the transaction
+ currency (str): Currency code (e.g., "TRY", "USD")
+ additional_balance (Decimal): Any additional balance adjustments
+ channel_branch (str): Bank branch or channel where transaction occurred
+ process_name (str): Name/type of the transaction
+ process_type (str): Classification of the transaction
+ process_comment (str): Additional transaction details or notes
+ bank_reference_code (str): Bank's reference code for the transaction
+ add_comment_note (Optional[str]): Additional internal notes
+ is_receipt_mail_send (Optional[bool]): Whether receipt was emailed
+ found_from (Optional[str]): Source of the transaction record
+ similarity (Optional[float]): Matching confidence for duplicate detection
+ remainder_balance (Optional[Decimal]): Remaining balance if partial
+ bank_date_y (Optional[int]): Year of bank transaction
+ bank_date_m (Optional[int]): Month of bank transaction
+ bank_date_w (Optional[int]): Week of bank transaction
+ bank_date_d (Optional[int]): Day of bank transaction
+ approving_accounting_record (Optional[bool]): Accounting approval status
+ accounting_receipt_date (Optional[datetime]): When receipt was processed
+ accounting_receipt_number (Optional[int]): Receipt reference number
+ approved_record (Optional[bool]): Whether record is approved
+ import_file_name (Optional[str]): Source file if imported
+ receive_debit_uu_id (Optional[str]): Related debit record ID
+ budget_type_uu_id (Optional[str]): Associated budget type ID
+ company_uu_id (Optional[str]): Associated company ID
+ send_company_uu_id (Optional[str]): Sending company ID
+ customer_id (Optional[str]): Associated customer ID
+ customer_uu_id (Optional[str]): Associated customer UUID
+ send_person_uu_id (Optional[str]): Sending person ID
+ approving_accounting_person_uu_id (Optional[str]): Approver ID
+ build_parts_uu_id (Optional[str]): Related building part ID
+ build_decision_book_uu_id (Optional[str]): Related decision book ID
+ """
+
+ iban: str
+ bank_date: datetime
+ currency_value: Decimal
+ bank_balance: Decimal
+ currency: str = "TRY"
+ additional_balance: Decimal = Decimal("0")
+ channel_branch: str
+ process_name: str
+ process_type: str
+ process_comment: str
+ bank_reference_code: str
+ add_comment_note: Optional[str]
+ is_receipt_mail_send: Optional[bool] = False
+ found_from: Optional[str]
+ similarity: Optional[float]
+ remainder_balance: Optional[Decimal]
+ bank_date_y: Optional[int]
+ bank_date_m: Optional[int]
+ bank_date_w: Optional[int]
+ bank_date_d: Optional[int]
+ approving_accounting_record: Optional[bool]
+ accounting_receipt_date: Optional[datetime]
+ accounting_receipt_number: Optional[int]
+ approved_record: Optional[bool]
+ import_file_name: Optional[str]
+ receive_debit_uu_id: Optional[str]
+ budget_type_uu_id: Optional[str]
+ company_uu_id: Optional[str]
+ send_company_uu_id: Optional[str]
+ customer_id: Optional[str]
+ customer_uu_id: Optional[str]
+ send_person_uu_id: Optional[str]
+ approving_accounting_person_uu_id: Optional[str]
+ build_parts_uu_id: Optional[str]
+ build_decision_book_uu_id: Optional[str]
+
+
+class AccountRecordExchangeResponse(BaseModel):
+ """Response model for currency exchange records.
+
+ This model represents a currency exchange transaction, tracking
+ exchange rates and converted amounts for financial records.
+
+ Attributes:
+ account_record_id (int): ID of the related account record
+ account_record_uu_id (str): UUID of the related account record
+ exchange_rate (Decimal): Applied exchange rate
+ exchange_currency (str): Target currency code
+ exchange_value (Decimal): Converted amount
+ exchange_date (datetime): When the exchange was calculated
+ """
+
+ account_record_id: int
+ account_record_uu_id: str
+ exchange_rate: Decimal
+ exchange_currency: str = "TRY"
+ exchange_value: Decimal
+ exchange_date: datetime
+
+
+class AccountRecordsListResponse(BaseModel):
+ """Response model for account records list endpoint"""
+
+ uu_id: UUID
+ account_name: str
+ account_code: str
+ company_id: int
+ company_uu_id: str
+ created_at: datetime
+ updated_at: Optional[datetime]
+ deleted: bool = False
diff --git a/ApiLayers/ApiValidations/Response/address_responses.py b/ApiLayers/ApiValidations/Response/address_responses.py
new file mode 100644
index 0000000..e96f371
--- /dev/null
+++ b/ApiLayers/ApiValidations/Response/address_responses.py
@@ -0,0 +1,20 @@
+from typing import Optional
+from pydantic import BaseModel
+
+
+class ListAddressResponse(BaseModel):
+ build_number: Optional[str] = None
+ door_number: Optional[str] = None
+ floor_number: Optional[str] = None
+ comment_address: Optional[str] = None
+ letter_address: Optional[str] = None
+ short_letter_address: Optional[str] = None
+ latitude: Optional[float] = None
+ longitude: Optional[float] = None
+ street_uu_id: Optional[str] = None
+
+
+class AddressPostCodeResponse:
+ street_id: Optional[int] = None
+ street_uu_id: Optional[str] = None
+ postcode: Optional[str] = None
diff --git a/ApiLayers/ApiValidations/Response/auth_responses.py b/ApiLayers/ApiValidations/Response/auth_responses.py
new file mode 100644
index 0000000..7e03182
--- /dev/null
+++ b/ApiLayers/ApiValidations/Response/auth_responses.py
@@ -0,0 +1,36 @@
+from pydantic import BaseModel
+from typing import Optional, List, Dict, Any
+from datetime import datetime
+from uuid import UUID
+
+
+class AuthenticationLoginResponse(BaseModel):
+ """Response model for authentication login endpoint"""
+
+ token: str
+ refresh_token: str
+ token_type: str
+ expires_in: int
+ user_info: Dict[str, Any]
+
+
+class AuthenticationRefreshResponse(BaseModel):
+ """Response model for authentication refresh endpoint"""
+
+ token: str
+ refresh_token: str
+ token_type: str
+ expires_in: int
+
+
+class AuthenticationUserInfoResponse(BaseModel):
+ """Response model for authentication user info endpoint"""
+
+ user_id: int
+ username: str
+ email: str
+ first_name: str
+ last_name: str
+ is_active: bool
+ created_at: datetime
+ updated_at: Optional[datetime]
diff --git a/ApiLayers/ApiValidations/Response/base_responses.py b/ApiLayers/ApiValidations/Response/base_responses.py
new file mode 100644
index 0000000..06e6c02
--- /dev/null
+++ b/ApiLayers/ApiValidations/Response/base_responses.py
@@ -0,0 +1,105 @@
+from pydantic import BaseModel
+from typing import Optional, TypeVar, Generic, List
+from datetime import datetime
+from uuid import UUID
+
+T = TypeVar("T")
+
+
+class BaseResponse(BaseModel):
+ """Base response model that all response models inherit from.
+
+ This model provides common fields that are present in all database records,
+ including tracking information (created/updated timestamps), user actions
+ (created by, updated by, confirmed by), and record status (active, deleted).
+
+ Attributes:
+ uu_id (str): Unique identifier for the record, typically a UUID
+ created_at (datetime): Timestamp when the record was created
+ updated_at (Optional[datetime]): Timestamp when the record was last updated
+ created_by (Optional[str]): Username or identifier of the user who created the record
+ updated_by (Optional[str]): Username or identifier of the user who last updated the record
+ confirmed_by (Optional[str]): Username or identifier of the user who confirmed the record
+ is_confirmed (Optional[bool]): Whether the record has been confirmed/approved
+ active (Optional[bool]): Whether the record is currently active
+ deleted (Optional[bool]): Whether the record has been marked as deleted
+ expiry_starts (Optional[datetime]): When the record becomes valid/active
+ expiry_ends (Optional[datetime]): When the record expires/becomes inactive
+ is_notification_send (Optional[bool]): Whether notifications have been sent for this record
+ is_email_send (Optional[bool]): Whether emails have been sent for this record
+ """
+
+ uu_id: str
+ created_at: datetime
+ updated_at: Optional[datetime]
+ created_by: Optional[str]
+ updated_by: Optional[str]
+ confirmed_by: Optional[str]
+ is_confirmed: Optional[bool] = None
+ active: Optional[bool] = True
+ deleted: Optional[bool] = False
+ expiry_starts: Optional[datetime]
+ expiry_ends: Optional[datetime]
+ is_notification_send: Optional[bool] = False
+ is_email_send: Optional[bool] = False
+
+ class Config:
+ """Pydantic configuration for the base response model.
+
+ Attributes:
+ from_attributes (bool): Enables ORM mode for SQLAlchemy integration
+ """
+
+ from_attributes = True
+
+
+class CrudCollection(BaseModel, Generic[T]):
+ """Base collection model for paginated responses.
+
+ This model is used to return collections of items with pagination information.
+ It is generic over the type of items in the collection, allowing it to be
+ used with any response model.
+
+ Type Parameters:
+ T: The type of items in the collection
+
+ Attributes:
+ page (int): Current page number, 1-based indexing
+ size (int): Number of items per page
+ total (int): Total number of items across all pages
+ order_field (str): Field used for sorting the collection
+ order_type (str): Sort direction ('asc' or 'desc')
+ items (List[T]): List of items in the current page
+
+ Example:
+ ```python
+ class UserResponse(BaseResponse):
+ name: str
+ email: str
+
+ users = CrudCollection[UserResponse](
+ page=1,
+ size=10,
+ total=100,
+ order_field="name",
+ order_type="asc",
+ items=[...]
+ )
+ ```
+ """
+
+ page: int = 1
+ size: int = 10
+ total: int = 0
+ order_field: str = "id"
+ order_type: str = "asc"
+ items: List[T] = []
+
+ class Config:
+ """Pydantic configuration for the collection model.
+
+ Attributes:
+ from_attributes (bool): Enables ORM mode for SQLAlchemy integration
+ """
+
+ from_attributes = True
diff --git a/ApiLayers/ApiValidations/Response/budget_responses.py b/ApiLayers/ApiValidations/Response/budget_responses.py
new file mode 100644
index 0000000..9ecd619
--- /dev/null
+++ b/ApiLayers/ApiValidations/Response/budget_responses.py
@@ -0,0 +1,90 @@
+from pydantic import BaseModel
+from typing import Optional, List
+from datetime import datetime
+from uuid import UUID
+from decimal import Decimal
+from .base_responses import BaseResponse, CrudCollection
+
+
+class DecisionBookBudgetBooksResponse(BaseResponse):
+ """Response model for decision book budget books"""
+
+ country: str
+ branch_type: int = 0
+ company_id: int
+ company_uu_id: str
+ branch_id: Optional[int]
+ branch_uu_id: Optional[str]
+ build_decision_book_id: int
+ build_decision_book_uu_id: Optional[str]
+
+
+class DecisionBookBudgetBooksCollection(
+ CrudCollection[DecisionBookBudgetBooksResponse]
+):
+ """Collection of decision book budget books"""
+
+ pass
+
+
+class DecisionBookBudgetCodesResponse(BaseResponse):
+ """Response model for decision book budget codes"""
+
+ budget_code: str
+ comment_line: str
+ budget_type: str
+ budget_code_seperator: str = "."
+ system_id: int = 0
+ locked: bool = False
+ company_id: Optional[int]
+ company_uu_id: str
+ customer_id: Optional[int]
+ customer_uu_id: str
+
+
+class DecisionBookBudgetCodesCollection(
+ CrudCollection[DecisionBookBudgetCodesResponse]
+):
+ """Collection of decision book budget codes"""
+
+ pass
+
+
+class DecisionBookBudgetMasterResponse(BaseResponse):
+ """Response model for decision book budget master"""
+
+ budget_type: str
+ currency: str = "TRY"
+ total_budget: Decimal
+ tracking_period_id: Optional[int]
+ tracking_period_uu_id: Optional[str]
+ budget_books_id: int
+ budget_books_uu_id: Optional[str]
+ department_id: int
+ department_uu_id: Optional[str]
+
+
+class DecisionBookBudgetMasterCollection(
+ CrudCollection[DecisionBookBudgetMasterResponse]
+):
+ """Collection of decision book budget masters"""
+
+ pass
+
+
+class DecisionBookBudgetsResponse(BaseResponse):
+ """Response model for decision book budgets"""
+
+ process_date: datetime
+ budget_codes_id: int
+ total_budget: Decimal
+ used_budget: Decimal = Decimal("0")
+ remaining_budget: Decimal = Decimal("0")
+ decision_book_budget_master_id: int
+ decision_book_budget_master_uu_id: Optional[str]
+
+
+class DecisionBookBudgetsCollection(CrudCollection[DecisionBookBudgetsResponse]):
+ """Collection of decision book budgets"""
+
+ pass
diff --git a/ApiLayers/ApiValidations/Response/building_responses.py b/ApiLayers/ApiValidations/Response/building_responses.py
new file mode 100644
index 0000000..b639f6e
--- /dev/null
+++ b/ApiLayers/ApiValidations/Response/building_responses.py
@@ -0,0 +1,309 @@
+from typing import Optional, List, Generic
+from datetime import datetime
+from uuid import UUID
+from decimal import Decimal
+
+from api_validations.validations_response.base_responses import (
+ BaseResponse,
+ CrudCollection,
+)
+from api_validations.validations_request import PydanticBaseModel
+
+
+class ListBuildingResponse(PydanticBaseModel):
+
+ gov_address_code: str
+ build_name: str
+ build_types_uu_id: Optional[str] = None
+ build_no: Optional[str] = None
+ max_floor: Optional[int] = None
+ underground_floor: Optional[int] = None
+ address_uu_id: Optional[str] = None
+ build_date: Optional[str] = None
+ decision_period_date: Optional[str] = None
+ tax_no: Optional[str] = None
+ lift_count: Optional[int] = None
+ heating_system: Optional[bool] = None
+ cooling_system: Optional[bool] = None
+ hot_water_system: Optional[bool] = None
+ block_service_man_count: Optional[int] = None
+ security_service_man_count: Optional[int] = None
+ garage_count: Optional[int] = None
+ site_uu_id: Optional[str] = None
+
+
+class BuildAreaListResponse(BaseResponse):
+ """Response model for building area list endpoint"""
+
+ uu_id: UUID
+ build_id: int
+ build_uu_id: str
+ area_name: str
+ area_value: float
+ created_at: datetime
+ updated_at: Optional[datetime]
+ deleted: bool = False
+
+
+class BuildAreaListCollection(CrudCollection[BuildAreaListResponse]):
+ """Collection of building area list"""
+
+ pass
+
+
+class BuildSitesListResponse(BaseResponse):
+ """Response model for building sites list endpoint"""
+
+ uu_id: UUID
+ address_id: int
+ site_name: str
+ site_value: float
+ created_at: datetime
+ updated_at: Optional[datetime]
+ deleted: bool = False
+
+
+class BuildSitesListCollection(CrudCollection[BuildSitesListResponse]):
+ """Collection of building sites list"""
+
+ pass
+
+
+class BuildTypesListResponse(BaseResponse):
+ """Response model for building types list endpoint"""
+
+ uu_id: UUID
+ type_name: str
+ type_value: str
+ created_at: datetime
+ updated_at: Optional[datetime]
+ deleted: bool = False
+
+
+class BuildTypesListCollection(CrudCollection[BuildTypesListResponse]):
+ """Collection of building types list"""
+
+ pass
+
+
+class BuildTypesResponse(BaseResponse):
+ """Response model for building types"""
+
+ function_code: str
+ type_code: str
+ lang: str = "TR"
+
+
+class BuildTypesCollection(CrudCollection[BuildTypesResponse]):
+ """Collection of building types"""
+
+ pass
+
+
+class Part2EmployeeResponse(BaseResponse):
+ """Response model for part to employee mapping"""
+
+ build_id: int
+ part_id: int
+ employee_id: int
+
+
+class Part2EmployeeCollection(CrudCollection[Part2EmployeeResponse]):
+ """Collection of part to employee mappings"""
+
+ pass
+
+
+class RelationshipEmployee2BuildResponse(BaseResponse):
+ """Response model for employee to build relationship"""
+
+ company_id: int
+ employee_id: int
+ member_id: int
+ relationship_type: Optional[str] = "Employee"
+ show_only: bool = False
+
+
+class RelationshipEmployee2BuildCollection(
+ CrudCollection[RelationshipEmployee2BuildResponse]
+):
+ """Collection of employee to build relationships"""
+
+ pass
+
+
+class BuildResponse(BaseResponse):
+ """Response model for buildings"""
+
+ gov_address_code: str = ""
+ build_name: str
+ build_no: str
+ max_floor: int = 1
+ underground_floor: int = 0
+ build_date: datetime
+ decision_period_date: datetime
+ tax_no: str = ""
+ lift_count: int = 0
+ heating_system: bool = True
+ cooling_system: bool = False
+ hot_water_system: bool = False
+ block_service_man_count: int = 0
+ security_service_man_count: int = 0
+ garage_count: int = 0
+ management_room_id: Optional[int]
+ site_id: Optional[int]
+ site_uu_id: Optional[str]
+ address_id: int
+ address_uu_id: str
+ build_types_id: int
+ build_types_uu_id: Optional[str]
+
+
+class BuildCollection(CrudCollection[BuildResponse]):
+ """Collection of buildings"""
+
+ pass
+
+
+class BuildPartsResponse(BaseResponse):
+ """Response model for building parts"""
+
+ address_gov_code: str
+ part_no: int = 0
+ part_level: int = 0
+ part_code: str
+ part_gross_size: int = 0
+ part_net_size: int = 0
+ default_accessory: str = "0"
+ human_livable: bool = True
+ due_part_key: str
+ build_id: int
+ build_uu_id: str
+ part_direction_id: Optional[int]
+ part_direction_uu_id: Optional[str]
+ part_type_id: int
+ part_type_uu_id: str
+
+
+class BuildPartsCollection(CrudCollection[BuildPartsResponse]):
+ """Collection of building parts"""
+
+ pass
+
+
+class BuildLivingSpaceResponse(BaseResponse):
+ """Response model for building living space"""
+
+ fix_value: Decimal = Decimal("0")
+ fix_percent: Decimal = Decimal("0")
+ agreement_no: str = ""
+ marketing_process: bool = False
+ marketing_layer: int = 0
+ build_parts_id: int
+ build_parts_uu_id: str
+ person_id: int
+ person_uu_id: str
+ occupant_type: int
+ occupant_type_uu_id: str
+
+
+class BuildLivingSpaceCollection(CrudCollection[BuildLivingSpaceResponse]):
+ """Collection of building living spaces"""
+
+ pass
+
+
+class BuildManagementResponse(BaseResponse):
+ """Response model for building management"""
+
+ discounted_percentage: Decimal = Decimal("0.00")
+ discounted_price: Decimal = Decimal("0.00")
+ calculated_price: Decimal = Decimal("0.00")
+ occupant_type: int
+ occupant_type_uu_id: str
+ build_id: int
+ build_uu_id: str
+ build_parts_id: int
+ build_parts_uu_id: str
+
+
+class BuildManagementCollection(CrudCollection[BuildManagementResponse]):
+ """Collection of building management records"""
+
+ pass
+
+
+class BuildAreaResponse(BaseResponse):
+ """Response model for building area"""
+
+ area_name: str = ""
+ area_code: str = ""
+ area_type: str = "GREEN"
+ area_direction: str = "NN"
+ area_gross_size: Decimal = Decimal("0")
+ area_net_size: Decimal = Decimal("0")
+ width: int = 0
+ size: int = 0
+ build_id: int
+ build_uu_id: str
+ part_type_id: Optional[int]
+ part_type_uu_id: Optional[str]
+
+
+class BuildAreaCollection(CrudCollection[BuildAreaResponse]):
+ """Collection of building areas"""
+
+ pass
+
+
+class BuildSitesResponse(BaseResponse):
+ """Response model for building sites"""
+
+ site_name: str
+ site_no: str
+ address_id: int
+ address_uu_id: Optional[str]
+
+
+class BuildSitesCollection(CrudCollection[BuildSitesResponse]):
+ """Collection of building sites"""
+
+ pass
+
+
+class BuildCompaniesProvidingResponse(BaseResponse):
+ """Response model for building companies providing services"""
+
+ build_id: int
+ build_uu_id: Optional[str]
+ company_id: int
+ company_uu_id: Optional[str]
+ provide_id: Optional[int]
+ provide_uu_id: Optional[str]
+ contract_id: Optional[int]
+
+
+class BuildCompaniesProvidingCollection(
+ CrudCollection[BuildCompaniesProvidingResponse]
+):
+ """Collection of building companies providing services"""
+
+ pass
+
+
+class BuildPersonProvidingResponse(BaseResponse):
+ """Response model for building person providing services"""
+
+ build_id: int
+ build_uu_id: Optional[str]
+ people_id: int
+ people_uu_id: Optional[str]
+ provide_id: Optional[int]
+ provide_uu_id: Optional[str]
+ contract_id: Optional[int]
+
+
+class BuildPersonProvidingCollection(CrudCollection[BuildPersonProvidingResponse]):
+ """Collection of building person providing services"""
+
+ pass
diff --git a/ApiLayers/ApiValidations/Response/company_responses.py b/ApiLayers/ApiValidations/Response/company_responses.py
new file mode 100644
index 0000000..1c261c4
--- /dev/null
+++ b/ApiLayers/ApiValidations/Response/company_responses.py
@@ -0,0 +1,59 @@
+from pydantic import BaseModel
+from typing import Optional, List
+from datetime import datetime
+from uuid import UUID
+
+
+class CompanyListResponse(BaseModel):
+ """Response model for company list endpoint"""
+
+ uu_id: UUID
+ company_name: str
+ company_code: str
+ company_email: str
+ company_phone: str
+ company_address: str
+ created_at: datetime
+ updated_at: Optional[datetime]
+ deleted: bool = False
+
+
+class CompanyDepartmentListResponse(BaseModel):
+ """Response model for company department list endpoint"""
+
+ uu_id: UUID
+ department_name: str
+ department_code: str
+ company_id: int
+ company_uu_id: str
+ created_at: datetime
+ updated_at: Optional[datetime]
+ deleted: bool = False
+
+
+class CompanyDutyListResponse(BaseModel):
+ """Response model for company duty list endpoint"""
+
+ uu_id: UUID
+ duty_name: str
+ duty_code: str
+ department_id: int
+ department_uu_id: str
+ created_at: datetime
+ updated_at: Optional[datetime]
+ deleted: bool = False
+
+
+class CompanyEmployeeListResponse(BaseModel):
+ """Response model for company employee list endpoint"""
+
+ uu_id: UUID
+ employee_id: int
+ employee_uu_id: str
+ company_id: int
+ company_uu_id: str
+ duty_id: int
+ duty_uu_id: str
+ created_at: datetime
+ updated_at: Optional[datetime]
+ deleted: bool = False
diff --git a/ApiLayers/ApiValidations/Response/decision_book_responses.py b/ApiLayers/ApiValidations/Response/decision_book_responses.py
new file mode 100644
index 0000000..53d504b
--- /dev/null
+++ b/ApiLayers/ApiValidations/Response/decision_book_responses.py
@@ -0,0 +1,204 @@
+from pydantic import BaseModel
+from typing import Optional, List
+from datetime import datetime
+from uuid import UUID
+from decimal import Decimal
+from .base_responses import BaseResponse, CrudCollection
+
+
+class BuildDecisionBookResponse(BaseResponse):
+ """Response model for building decision book"""
+
+ decision_book_pdf_path: Optional[str] = ""
+ resp_company_fix_wage: float = 0
+ contact_agreement_path: Optional[str] = ""
+ contact_agreement_date: Optional[datetime]
+ meeting_date: Optional[str]
+ decision_type: Optional[str]
+
+
+class BuildDecisionBookCollection(CrudCollection[BuildDecisionBookResponse]):
+ """Collection of building decision books"""
+
+ pass
+
+
+class BuildDecisionBookInvitationsResponse(BaseResponse):
+ """Response model for building decision book invitations"""
+
+ build_id: int
+ build_uu_id: Optional[str]
+ decision_book_id: int
+ decision_book_uu_id: Optional[str]
+ invitation_type: str
+ invitation_attempt: int = 1
+ living_part_count: int = 1
+ living_part_percentage: Decimal = Decimal("0.51")
+ message: Optional[str]
+ planned_date: datetime
+ planned_date_expires: datetime
+
+
+class BuildDecisionBookInvitationsCollection(
+ CrudCollection[BuildDecisionBookInvitationsResponse]
+):
+ """Collection of building decision book invitations"""
+
+ pass
+
+
+class BuildDecisionBookPersonResponse(BaseResponse):
+ """Response model for building decision book person"""
+
+ dues_percent_discount: int = 0
+ dues_fix_discount: Decimal = Decimal("0")
+ dues_discount_approval_date: datetime
+ send_date: datetime
+ is_attending: bool = False
+ confirmed_date: Optional[datetime]
+ token: str = ""
+ vicarious_person_id: Optional[int]
+ vicarious_person_uu_id: Optional[str]
+ invite_id: int
+ invite_uu_id: str
+ build_decision_book_id: int
+ build_decision_book_uu_id: str
+ build_living_space_id: int
+ build_living_space_uu_id: Optional[str]
+ person_id: int
+
+
+class BuildDecisionBookPersonCollection(
+ CrudCollection[BuildDecisionBookPersonResponse]
+):
+ """Collection of building decision book persons"""
+
+ pass
+
+
+class BuildDecisionBookPersonOccupantsResponse(BaseResponse):
+ """Response model for building decision book person occupants"""
+
+ build_decision_book_person_id: int
+ build_decision_book_person_uu_id: Optional[str]
+ invite_id: Optional[int]
+ invite_uu_id: Optional[str]
+ occupant_type_id: int
+ occupant_type_uu_id: Optional[str]
+
+
+class BuildDecisionBookPersonOccupantsCollection(
+ CrudCollection[BuildDecisionBookPersonOccupantsResponse]
+):
+ """Collection of building decision book person occupants"""
+
+ pass
+
+
+class BuildDecisionBookItemsResponse(BaseResponse):
+ """Response model for building decision book items"""
+
+ item_order: int
+ item_comment: str
+ item_objection: Optional[str]
+ info_is_completed: bool = False
+ is_payment_created: bool = False
+ info_type_id: Optional[int]
+ info_type_uu_id: Optional[str]
+ build_decision_book_id: int
+ build_decision_book_uu_id: Optional[str]
+ item_short_comment: Optional[str]
+
+
+class BuildDecisionBookItemsCollection(CrudCollection[BuildDecisionBookItemsResponse]):
+ """Collection of building decision book items"""
+
+ pass
+
+
+class BuildDecisionBookItemsUnapprovedResponse(BaseResponse):
+ """Response model for building decision book items unapproved"""
+
+ item_objection: str
+ item_order: int
+ decision_book_item_id: int
+ decision_book_item_uu_id: Optional[str]
+ person_id: int
+ person_uu_id: Optional[str]
+ build_decision_book_item: int
+ build_decision_book_item_uu_id: Optional[str]
+
+
+class BuildDecisionBookItemsUnapprovedCollection(
+ CrudCollection[BuildDecisionBookItemsUnapprovedResponse]
+):
+ """Collection of building decision book items unapproved"""
+
+ pass
+
+
+class BuildDecisionBookPaymentsResponse(BaseResponse):
+ """Response model for building decision book payments"""
+
+ payment_plan_time_periods: str
+ process_date: datetime
+ payment_amount: Decimal
+ currency: str = "TRY"
+ payment_types_id: Optional[int]
+ payment_types_uu_id: Optional[str]
+ period_time: str
+ process_date_y: int
+ process_date_m: int
+ build_decision_book_item_id: int
+ build_decision_book_item_uu_id: str
+ build_parts_id: int
+ build_parts_uu_id: str
+ decision_book_project_id: Optional[int]
+ decision_book_project_uu_id: Optional[str]
+ account_records_id: Optional[int]
+ account_records_uu_id: Optional[str]
+
+
+class BuildDecisionBookPaymentsCollection(
+ CrudCollection[BuildDecisionBookPaymentsResponse]
+):
+ """Collection of building decision book payments"""
+
+ pass
+
+
+class BuildDecisionBookLegalResponse(BaseResponse):
+ """Response model for building decision book legal"""
+
+ period_start_date: datetime
+ lawsuits_decision_number: str
+ lawsuits_decision_date: datetime
+ period_stop_date: datetime
+ decision_book_pdf_path: Optional[str] = ""
+ resp_company_total_wage: Optional[Decimal] = Decimal("0")
+ contact_agreement_path: Optional[str] = ""
+ contact_agreement_date: Optional[datetime]
+ meeting_date: str
+ lawsuits_type: str = "C"
+ lawsuits_name: str
+ lawsuits_note: str
+ lawyer_cost: Decimal
+ mediator_lawyer_cost: Decimal
+ other_cost: Decimal
+ legal_cost: Decimal
+ approved_cost: Decimal
+ total_price: Decimal
+ build_db_item_id: int
+ build_db_item_uu_id: Optional[str]
+ resp_attorney_id: int
+ resp_attorney_uu_id: Optional[str]
+ resp_attorney_company_id: int
+ resp_attorney_company_uu_id: Optional[str]
+ mediator_lawyer_person_id: int
+ mediator_lawyer_person_uu_id: Optional[str]
+
+
+class BuildDecisionBookLegalCollection(CrudCollection[BuildDecisionBookLegalResponse]):
+ """Collection of building decision book legal records"""
+
+ pass
diff --git a/ApiLayers/ApiValidations/Response/default_response.py b/ApiLayers/ApiValidations/Response/default_response.py
new file mode 100644
index 0000000..1795142
--- /dev/null
+++ b/ApiLayers/ApiValidations/Response/default_response.py
@@ -0,0 +1,160 @@
+from typing import Optional
+from fastapi import status
+from fastapi.responses import JSONResponse
+
+from ApiLayers.LanguageModels.set_defaults.static_validation_retriever import (
+ StaticValidationRetriever,
+)
+
+
+class BaseEndpointResponse(StaticValidationRetriever):
+ __abstract__ = True
+ pass
+
+
+class EndpointSuccessResponse(BaseEndpointResponse): # 200 OK
+
+ def as_dict(self, data: Optional[dict] = None):
+ return JSONResponse(
+ status_code=status.HTTP_200_OK,
+ content=dict(completed=True, **self.response, lang=self.lang, data=data),
+ )
+
+
+class EndpointSuccessListResponse(BaseEndpointResponse): # 200 OK
+
+ def as_dict(self, data: Optional[dict] = None, pagination: Optional[dict] = None):
+ return JSONResponse(
+ status_code=status.HTTP_200_OK,
+ content=dict(
+ completed=True,
+ **self.response,
+ lang=self.lang,
+ pagination=pagination,
+ data=data,
+ ),
+ )
+
+
+class EndpointCreatedResponse(BaseEndpointResponse): # 201 Created
+
+ def as_dict(self, data: Optional[dict] = None):
+ return JSONResponse(
+ status_code=status.HTTP_201_CREATED,
+ content=dict(completed=True, **self.response, lang=self.lang, data=data),
+ )
+
+
+class EndpointAcceptedResponse(BaseEndpointResponse): # 202 Accepted
+
+ def as_dict(self, data: Optional[dict] = None):
+ return JSONResponse(
+ status_code=status.HTTP_202_ACCEPTED,
+ content=dict(completed=True, **self.response, lang=self.lang, data=data),
+ )
+
+
+class EndpointNotModifiedResponse(BaseEndpointResponse): # 304 Not Modified
+
+ def as_dict(self):
+ return JSONResponse(
+ status_code=status.HTTP_304_NOT_MODIFIED,
+ content=dict(completed=False, **self.response, lang=self.lang),
+ )
+
+
+class EndpointBadRequestResponse(BaseEndpointResponse): # 400 Bad Request
+
+ def as_dict(self, data: Optional[dict] = None):
+ return JSONResponse(
+ status_code=status.HTTP_400_BAD_REQUEST,
+ content=dict(completed=False, **self.response, lang=self.lang, data=data),
+ )
+
+
+class EndpointUnauthorizedResponse(BaseEndpointResponse): # 401 Unauthorized
+
+ def as_dict(self):
+ return JSONResponse(
+ status_code=status.HTTP_401_UNAUTHORIZED,
+ content=dict(completed=False, **self.response, lang=self.lang),
+ )
+
+
+class EndpointForbiddenResponse(BaseEndpointResponse): # 403 Forbidden
+
+ def as_dict(self):
+ return JSONResponse(
+ status_code=status.HTTP_403_FORBIDDEN,
+ content=dict(completed=False, **self.response, lang=self.lang),
+ )
+
+
+class EndpointNotFoundResponse(BaseEndpointResponse): # 404 Not Found
+
+ def as_dict(self):
+ return JSONResponse(
+ status_code=status.HTTP_404_NOT_FOUND,
+ content=dict(completed=False, **self.response, lang=self.lang),
+ )
+
+
+class EndpointMethodNotAllowedResponse(BaseEndpointResponse): # 405 Method Not Allowed
+
+ def as_dict(self):
+ return JSONResponse(
+ status_code=status.HTTP_405_METHOD_NOT_ALLOWED,
+ content=dict(completed=False, **self.response, lang=self.lang),
+ )
+
+
+class EndpointNotAcceptableResponse(BaseEndpointResponse): # 406 Not Acceptable
+
+ def as_dict(self, data: Optional[dict] = None):
+ return JSONResponse(
+ status_code=status.HTTP_406_NOT_ACCEPTABLE,
+ content=dict(completed=False, data=data, **self.response, lang=self.lang),
+ )
+
+
+class EndpointConflictResponse(BaseEndpointResponse): # 409 Conflict
+
+ def as_dict(self):
+ return JSONResponse(
+ status_code=status.HTTP_409_CONFLICT,
+ content=dict(completed=False, **self.response, lang=self.lang),
+ )
+
+
+class EndpointUnprocessableEntityResponse(
+ BaseEndpointResponse
+): # 422 Unprocessable Entity
+
+ def as_dict(self, data: Optional[dict] = None):
+ return JSONResponse(
+ status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
+ content=dict(completed=False, **self.response, lang=self.lang, data=data),
+ )
+
+
+class EndpointTooManyRequestsResponse(BaseEndpointResponse): # 429 Too Many Requests
+
+ def __init__(self, retry_after: int, lang: str, code: str):
+ super().__init__(lang=lang, code=code)
+ self.retry_after = retry_after
+
+ def as_dict(self):
+ return JSONResponse(
+ status_code=status.HTTP_429_TOO_MANY_REQUESTS,
+ headers={"Retry-After": str(self.retry_after)},
+ content=dict(completed=False, **self.response, lang=self.lang),
+ )
+
+
+class EndpointInternalErrorResponse(BaseEndpointResponse): # 500 Internal Server Error
+
+ def as_dict(self):
+ return JSONResponse(
+ status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
+ content=dict(completed=False, **self.response, lang=self.lang),
+ )
diff --git a/ApiLayers/ApiValidations/Response/living_space_responses.py b/ApiLayers/ApiValidations/Response/living_space_responses.py
new file mode 100644
index 0000000..4b4c902
--- /dev/null
+++ b/ApiLayers/ApiValidations/Response/living_space_responses.py
@@ -0,0 +1,52 @@
+from typing import Optional
+from api_validations.core_validations import BaseModelRegular
+from api_validations.validations_request import (
+ CrudRecordValidation,
+ CrudRecords,
+)
+
+
+class LivingSpaceListValidation:
+ tr = {
+ **CrudRecordValidation.tr,
+ "fix_value": "Sabit Değer",
+ "fix_percent": "Sabit Yüzde",
+ "agreement_no": "Anlaşma No",
+ "marketing_process": "Pazarlama Süreci",
+ "marketing_layer": "Pazarlama Katmanı",
+ "build_parts_id": "Bölüm ID",
+ "build_parts_uu_id": "Bölüm UUID",
+ "person_id": "Sorumlu Kişi ID",
+ "person_uu_id": "Sorumlu Kişi UUID",
+ "occupant_type": "Kiracı Tipi",
+ "occupant_type_uu_id": "Kiracı Tipi UUID",
+ }
+ en = {
+ **CrudRecordValidation.en,
+ "fix_value": "Fixed Value",
+ "fix_percent": "Fixed Percent",
+ "agreement_no": "Agreement No",
+ "marketing_process": "Marketing Process",
+ "marketing_layer": "Marketing Layer",
+ "build_parts_id": "Part ID",
+ "build_parts_uu_id": "Part UUID",
+ "person_id": "Responsible Person ID",
+ "person_uu_id": "Responsible Person UUID",
+ "occupant_type": "Occupant Type",
+ "occupant_type_uu_id": "Occupant Type UUID",
+ }
+
+
+class LivingSpaceListResponse(BaseModelRegular, CrudRecords, LivingSpaceListValidation):
+
+ fix_value: Optional[float] = None
+ fix_percent: Optional[float] = None
+ agreement_no: Optional[str] = None
+ marketing_process: Optional[str] = None
+ marketing_layer: Optional[str] = None
+ build_parts_id: Optional[int] = None
+ build_parts_uu_id: Optional[str] = None
+ person_id: Optional[int] = None
+ person_uu_id: Optional[str] = None
+ occupant_type: Optional[str] = None
+ occupant_type_uu_id: Optional[str] = None
diff --git a/ApiLayers/ApiValidations/Response/parts_responses.py b/ApiLayers/ApiValidations/Response/parts_responses.py
new file mode 100644
index 0000000..bf518ea
--- /dev/null
+++ b/ApiLayers/ApiValidations/Response/parts_responses.py
@@ -0,0 +1,54 @@
+from typing import Optional
+from api_validations.core_validations import BaseModelRegular
+from api_validations.validations_request import (
+ CrudRecordValidation,
+ CrudRecords,
+)
+
+
+class BuildPartsListValidation:
+ tr = {
+ **CrudRecordValidation.tr,
+ "address_gov_code": "Adres Kapı Kodu",
+ "part_no": "Bölüm No",
+ "part_level": "Bölüm Seviyesi",
+ "part_code": "Bölüm Kodu",
+ "part_gross": "Bölüm Brüt",
+ "part_net": "Bölüm Net",
+ "default_accessory": "Varsayılan Aksesuar",
+ "human_livable": "İnsan Yaşamı",
+ "due_part_key": "Sabit Ödeme Grubu",
+ "build_uu_id": "Bina UUID",
+ "part_direction_uu_id": "Bölüm Yönü UUID",
+ "part_type_uu_id": "Bölüm Tipi UUID",
+ }
+ en = {
+ **CrudRecordValidation.en,
+ "address_gov_code": "Address Government Code",
+ "part_no": "Part Number",
+ "part_level": "Part Level",
+ "part_code": "Part Code",
+ "part_gross": "Part Gross",
+ "part_net": "Part Net",
+ "default_accessory": "Default Accessory",
+ "human_livable": "Human Livable",
+ "due_part_key": "Constant Payment Group",
+ "build_uu_id": "Building UUID",
+ "part_direction_uu_id": "Part Direction UUID",
+ "part_type_uu_id": "Part Type UUID",
+ }
+
+
+class BuildPartsListResponse(BaseModelRegular, CrudRecords, BuildPartsListValidation):
+ address_gov_code: Optional[str] = None
+ part_no: Optional[int] = None
+ part_level: Optional[int] = None
+ part_code: Optional[str] = None
+ part_gross: Optional[int] = None
+ part_net: Optional[int] = None
+ default_accessory: Optional[str] = None
+ human_livable: Optional[bool] = None
+ due_part_key: Optional[str] = None
+ build_uu_id: Optional[str] = None
+ part_direction_uu_id: Optional[str] = None
+ part_type_uu_id: Optional[str] = None
diff --git a/ApiLayers/ApiValidations/Response/people_responses.py b/ApiLayers/ApiValidations/Response/people_responses.py
new file mode 100644
index 0000000..fcf6da4
--- /dev/null
+++ b/ApiLayers/ApiValidations/Response/people_responses.py
@@ -0,0 +1,57 @@
+from typing import Optional
+from api_validations.core_validations import BaseModelRegular
+from api_validations.validations_request import (
+ CrudRecordValidation,
+ CrudRecords,
+)
+
+
+class PeopleListValidation:
+ tr = {
+ **CrudRecordValidation.tr,
+ "firstname": "Ad",
+ "surname": "Soyad",
+ "middle_name": "Orta İsim",
+ "sex_code": "Cinsiyet Kodu",
+ "person_ref": "Kişi Referansı",
+ "person_tag": "Kişi Etiketi",
+ "father_name": "Baba Adı",
+ "mother_name": "Anne Adı",
+ "country_code": "Ülke Kodu",
+ "national_identity_id": "Kimlik Numarası",
+ "birth_place": "Doğum Yeri",
+ "birth_date": "Doğum Tarihi",
+ "tax_no": "Vergi Numarası",
+ }
+ en = {
+ **CrudRecordValidation.en,
+ "firstname": "First Name",
+ "surname": "Surname",
+ "middle_name": "Middle Name",
+ "sex_code": "Sex Code",
+ "person_ref": "Person Reference",
+ "person_tag": "Person Tag",
+ "father_name": "Father's Name",
+ "mother_name": "Mother's Name",
+ "country_code": "Country Code",
+ "national_identity_id": "National Identity ID",
+ "birth_place": "Birth Place",
+ "birth_date": "Birth Date",
+ "tax_no": "Tax Number",
+ }
+
+
+class PeopleListResponse(BaseModelRegular, CrudRecords, PeopleListValidation):
+ firstname: Optional[str] = None
+ surname: Optional[str] = None
+ middle_name: Optional[str] = None
+ sex_code: Optional[str] = None
+ person_ref: Optional[str] = None
+ person_tag: Optional[str] = None
+ father_name: Optional[str] = None
+ mother_name: Optional[str] = None
+ country_code: Optional[str] = None
+ national_identity_id: Optional[str] = None
+ birth_place: Optional[str] = None
+ birth_date: Optional[str] = None
+ tax_no: Optional[str] = None
diff --git a/ApiLayers/ApiValidations/__init__.py b/ApiLayers/ApiValidations/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/ApiLayers/ApiValidations/handler.py b/ApiLayers/ApiValidations/handler.py
new file mode 100644
index 0000000..e79d4c5
--- /dev/null
+++ b/ApiLayers/ApiValidations/handler.py
@@ -0,0 +1,58 @@
+"""
+Base validation models and utilities.
+"""
+
+from pydantic import BaseModel, ConfigDict
+
+
+def rewrite_input_data(data):
+ """Remove empty and None values from input data."""
+ return {
+ item[0]: item[1]
+ for item in data.items()
+ if not item[1] == "" and item[1] is not None
+ }
+
+
+class BaseModelRegular(BaseModel):
+ """Base model for all validation models with proper schema handling."""
+
+ model_config = ConfigDict(
+ json_schema_extra={"example": {}} # Will be populated by subclasses
+ )
+
+ def __init__(self, **kwargs):
+ super().__init__(**rewrite_input_data(kwargs))
+
+ def excluded_dump(self):
+ return self.model_dump(exclude_unset=True, exclude_none=True)
+
+ def dump(self):
+ return self.model_dump()
+
+ @classmethod
+ def model_json_schema(cls, *args, **kwargs):
+ """Generate JSON schema with proper examples."""
+ schema = super().model_json_schema(*args, **kwargs)
+
+ # Add examples based on field types
+ if "properties" in schema:
+ example = {}
+ for field_name, field_schema in schema["properties"].items():
+ field_type = field_schema.get("type")
+ if field_type == "string":
+ example[field_name] = f"example_{field_name}"
+ elif field_type == "integer":
+ example[field_name] = 0
+ elif field_type == "number":
+ example[field_name] = 0.0
+ elif field_type == "boolean":
+ example[field_name] = False
+ elif field_type == "array":
+ example[field_name] = []
+ elif field_type == "object":
+ example[field_name] = {}
+
+ schema["example"] = example
+
+ return schema
diff --git a/ApiLayers/ErrorHandlers/ErrorHandlers/api_exc_handler.py b/ApiLayers/ErrorHandlers/ErrorHandlers/api_exc_handler.py
new file mode 100644
index 0000000..2dbb61d
--- /dev/null
+++ b/ApiLayers/ErrorHandlers/ErrorHandlers/api_exc_handler.py
@@ -0,0 +1,89 @@
+import json
+from typing import Any, Union, Awaitable
+from pydantic import ValidationError
+
+from fastapi import Request, WebSocket, status
+from fastapi.responses import Response, JSONResponse
+from ApiLayers.LanguageModels.Errors.merge_all_error_languages import (
+ MergedErrorLanguageModels,
+)
+from ApiLayers.ErrorHandlers.Exceptions.api_exc import HTTPExceptionApi
+from ApiLayers.ErrorHandlers.bases import BaseErrorModelClass
+
+
+def validation_exception_handler(request, exc: ValidationError) -> JSONResponse:
+ """
+ {"message": [{
+ "type": "missing", "location": ["company_uu_id"], "message": "Field required",
+ "input": {"invalid_key_input": "e9869a25"}
+ }], "request": "http://0.0.0.0:41575/authentication/select", "title": "EmployeeSelection"
+ }
+ Validation error on pydantic model of each event validation
+ """
+ validation_messages, validation_list = exc.errors() or [], []
+ for validation in validation_messages:
+ validation_list.append(
+ {
+ "type": dict(validation).get("type"),
+ "location": dict(validation).get("loc"),
+ "message": dict(validation).get(
+ "msg"
+ ), # todo change message with language message
+ "input": dict(validation).get("input"),
+ }
+ )
+ error_response_dict = dict(
+ message=validation_list, request=str(request.url.path), title=exc.title
+ )
+ return JSONResponse(
+ content=error_response_dict, status_code=status.HTTP_422_UNPROCESSABLE_ENTITY
+ )
+
+
+class HTTPExceptionApiHandler:
+
+ def __init__(self, response_model: Any):
+ self.RESPONSE_MODEL: Any = response_model
+
+ @staticmethod
+ def retrieve_error_status_code(exc: HTTPExceptionApi) -> int:
+ error_by_codes = BaseErrorModelClass.retrieve_error_by_codes()
+ grab_status_code = error_by_codes.get(str(exc.error_code).upper(), 500)
+ return int(grab_status_code)
+
+ @staticmethod
+ def retrieve_error_message(exc: HTTPExceptionApi, error_languages) -> str:
+ from ApiLayers.ErrorHandlers import DEFAULT_ERROR
+
+ return error_languages.get(str(exc.error_code).upper(), DEFAULT_ERROR)
+
+ async def handle_exception(
+ self, request: Union[Request, WebSocket], exc: Exception
+ ) -> Union[Response, Awaitable[None]]:
+ request_string = (
+ str(request.url) if isinstance(request, Request) else request.url.path
+ )
+ if isinstance(exc, HTTPExceptionApi):
+ error_languages = MergedErrorLanguageModels.get_language_models(
+ language=exc.lang
+ )
+ status_code = self.retrieve_error_status_code(exc)
+ error_message = self.retrieve_error_message(exc, error_languages)
+ return self.RESPONSE_MODEL(
+ status_code=int(status_code),
+ content={
+ "message": error_message,
+ "lang": exc.lang,
+ "request": request_string,
+ "loc": exc.loc,
+ },
+ )
+ return self.RESPONSE_MODEL(
+ status_code=500,
+ content={
+ "message": "Internal Server Error",
+ "lang": "def",
+ "request": request_string,
+ "loc": exc.loc,
+ },
+ ) # Handle other exceptions with a generic 500 error
diff --git a/ApiLayers/ErrorHandlers/Exceptions/api_exc.py b/ApiLayers/ErrorHandlers/Exceptions/api_exc.py
new file mode 100644
index 0000000..4ff26dc
--- /dev/null
+++ b/ApiLayers/ErrorHandlers/Exceptions/api_exc.py
@@ -0,0 +1,32 @@
+from Services.Redis.Actions.actions import RedisActions
+from ApiLayers.AllConfigs.Redis.configs import RedisValidationKeys
+
+
+class HTTPExceptionApi(Exception):
+
+ def __init__(self, error_code: str, lang: str, loc: str = "", sys_msg: str = ""):
+ """
+ Initialize the HTTPExceptionApi class.
+ :param error_code: The error code. To retrieve the error message.
+ :param lang: The language. Catch error msg from redis.
+ :param loc: The location. To log where error occurred.
+ :param sys_msg: The system message. To log the error message.
+ """
+ self.error_code = error_code
+ self.lang = lang
+ self.loc = loc
+ self.sys_msg = sys_msg
+
+ def retrieve_error_message_by_code_at_redis(self):
+ """
+ Retrieve the error message from the redis by the error code.
+ """
+ error_redis_key = (
+ f"{RedisValidationKeys.LANGUAGE_MODELS}:{RedisValidationKeys.ERRORCODES}"
+ )
+ error_message = RedisActions.get_json(list_keys=[error_redis_key, self.lang])
+ if error_message.status:
+ error_message_dict = error_message.first.as_dict
+ if error_message_dict.get(self.error_code, None):
+ return error_message_dict.get(self.error_code)
+ return f"System Message -> {self.sys_msg}"
diff --git a/ApiLayers/ErrorHandlers/__init__.py b/ApiLayers/ErrorHandlers/__init__.py
new file mode 100644
index 0000000..c83ecf8
--- /dev/null
+++ b/ApiLayers/ErrorHandlers/__init__.py
@@ -0,0 +1,9 @@
+from ApiLayers.ErrorHandlers.ErrorHandlers.api_exc_handler import (
+ HTTPExceptionApiHandler,
+)
+from ApiLayers.ErrorHandlers.Exceptions.api_exc import HTTPExceptionApi
+
+
+DEFAULT_ERROR = "UNKNOWN_ERROR"
+
+__all__ = ["HTTPExceptionApiHandler", "HTTPExceptionApi", "DEFAULT_ERROR"]
diff --git a/ApiLayers/ErrorHandlers/base.py b/ApiLayers/ErrorHandlers/base.py
new file mode 100644
index 0000000..f6b32ec
--- /dev/null
+++ b/ApiLayers/ErrorHandlers/base.py
@@ -0,0 +1,13 @@
+class BaseError:
+ NOT_CREATED: int = 405
+ NOT_DELETED: int = 405
+ NOT_UPDATED: int = 405
+ NOT_LISTED: int = 404
+ NOT_FOUND: int = 404
+ ALREADY_EXISTS: int = 400
+ IS_NOT_CONFIRMED: int = 405
+ NOT_AUTHORIZED: int = 401
+ NOT_VALID: int = 406
+ NOT_ACCEPTABLE: int = 406
+ INVALID_DATA: int = 422
+ UNKNOWN_ERROR: int = 502
diff --git a/ApiLayers/ErrorHandlers/bases.py b/ApiLayers/ErrorHandlers/bases.py
new file mode 100644
index 0000000..c045b80
--- /dev/null
+++ b/ApiLayers/ErrorHandlers/bases.py
@@ -0,0 +1,18 @@
+from ApiLayers.ErrorHandlers.base import BaseError
+from ApiLayers.ErrorHandlers.statuses import Statuses
+
+
+class BaseErrorModelClass:
+ list_of_statuses = [Statuses, BaseError]
+
+ @classmethod
+ def retrieve_error_by_codes(cls):
+ language_model_status = {}
+ for list_of_language in cls.list_of_statuses:
+ clean_dict = {
+ key: value
+ for key, value in list_of_language.__dict__.items()
+ if "__" not in str(key)[0:3]
+ }
+ language_model_status.update(clean_dict)
+ return language_model_status
diff --git a/ApiLayers/ErrorHandlers/statuses.py b/ApiLayers/ErrorHandlers/statuses.py
new file mode 100644
index 0000000..df2f83b
--- /dev/null
+++ b/ApiLayers/ErrorHandlers/statuses.py
@@ -0,0 +1,58 @@
+class Statuses:
+ HTTP_100_CONTINUE = 100
+ HTTP_101_SWITCHING_PROTOCOLS = 101
+ HTTP_102_PROCESSING = 102
+ HTTP_103_EARLY_HINTS = 103
+ HTTP_200_OK = 200
+ HTTP_201_CREATED = 201
+ HTTP_202_ACCEPTED = 202
+ HTTP_203_NON_AUTHORITATIVE_INFORMATION = 203
+ HTTP_204_NO_CONTENT = 204
+ HTTP_205_RESET_CONTENT = 205
+ HTTP_206_PARTIAL_CONTENT = 206
+ HTTP_207_MULTI_STATUS = 207
+ HTTP_208_ALREADY_REPORTED = 208
+ HTTP_226_IM_USED = 226
+ HTTP_300_MULTIPLE_CHOICES = 300
+ HTTP_301_MOVED_PERMANENTLY = 301
+ HTTP_302_FOUND = 302
+ HTTP_303_SEE_OTHER = 303
+ HTTP_304_NOT_MODIFIED = 304
+ HTTP_305_USE_PROXY = 305
+ HTTP_306_RESERVED = 306
+ HTTP_307_TEMPORARY_REDIRECT = 307
+ HTTP_308_PERMANENT_REDIRECT = 308
+ HTTP_400_BAD_REQUEST = 400
+ HTTP_401_UNAUTHORIZED = 401
+ HTTP_402_PAYMENT_REQUIRED = 402
+ HTTP_403_FORBIDDEN = 403
+ HTTP_404_NOT_FOUND = 404
+ HTTP_405_METHOD_NOT_ALLOWED = 405
+ HTTP_406_NOT_ACCEPTABLE = 406
+ HTTP_407_PROXY_AUTHENTICATION_REQUIRED = 407
+ HTTP_408_REQUEST_TIMEOUT = 408
+ HTTP_409_CONFLICT = 409
+ HTTP_410_GONE = 410
+ HTTP_411_LENGTH_REQUIRED = 411
+ HTTP_412_PRECONDITION_FAILED = 412
+ HTTP_413_REQUEST_ENTITY_TOO_LARGE = 413
+ HTTP_414_REQUEST_URI_TOO_LONG = 414
+ HTTP_415_UNSUPPORTED_MEDIA_TYPE = 415
+ HTTP_416_REQUESTED_RANGE_NOT_SATISFIABLE = 416
+ HTTP_417_EXPECTATION_FAILED = 417
+ HTTP_418_IM_A_TEAPOT = 418
+ HTTP_421_MISDIRECTED_REQUEST = 421
+ HTTP_422_UNPROCESSABLE_ENTITY = 422
+ HTTP_423_LOCKED = 423
+ HTTP_424_FAILED_DEPENDENCY = 424
+ HTTP_426_UPGRADE_REQUIRED = 426
+ HTTP_428_PRECONDITION_REQUIRED = 428
+ HTTP_429_TOO_MANY_REQUESTS = 429
+ HTTP_431_REQUEST_HEADER_FIELDS_TOO_LARGE = 431
+ HTTP_451_UNAVAILABLE_FOR_LEGAL_REASONS = 451
+ HTTP_500_INTERNAL_SERVER_ERROR = 500
+ HTTP_502_BAD_GATEWAY = 502
+
+ @classmethod
+ def retrieve_error_by_code(cls, error_code: str):
+ return getattr(cls, error_code, 502)
diff --git a/ApiLayers/LanguageModels/Database/Mixins/crud_mixin.py b/ApiLayers/LanguageModels/Database/Mixins/crud_mixin.py
new file mode 100644
index 0000000..ea435ed
--- /dev/null
+++ b/ApiLayers/LanguageModels/Database/Mixins/crud_mixin.py
@@ -0,0 +1,46 @@
+CrudCollectionLanguageModel = dict(
+ tr={
+ "id": "ID",
+ "uu_id": "UUID",
+ "ref_id": "Referans ID",
+ "created_at": "Oluşturulma Tarihi",
+ "updated_at": "Güncellenme Tarihi",
+ "cryp_uu_id": "Şifreli ID",
+ "created_by": "Oluşturan",
+ "created_by_id": "Oluşturan ID",
+ "updated_by": "Güncelleyen",
+ "updated_by_id": "Güncelleyen ID",
+ "confirmed_by": "Onaylayan",
+ "confirmed_by_id": "Onaylayan ID",
+ "is_confirmed": "Onay Durumu",
+ "replication_id": "Replikasyon ID",
+ "deleted": "Silindi",
+ "active": "Aktif",
+ "is_notification_send": "Bildirim Gönderildi",
+ "is_email_send": "E-posta Gönderildi",
+ "expiry_ends": "Bitiş Tarihi",
+ "expiry_starts": "Başlangıç Tarihi",
+ },
+ en={
+ "id": "Identity",
+ "uu_id": "UUID",
+ "ref_id": "Reference Identity",
+ "created_at": "Created At",
+ "updated_at": "Updated At",
+ "cryp_uu_id": "Encrypted Identity",
+ "created_by": "Created By",
+ "created_by_id": "Created By Identity",
+ "updated_by": "Updated By",
+ "updated_by_id": "Updated By Identity",
+ "confirmed_by": "Confirmed By",
+ "confirmed_by_id": "Confirmed By Identity",
+ "is_confirmed": "Confirmation Status",
+ "replication_id": "Replication Identity",
+ "deleted": "Deleted",
+ "active": "Active",
+ "is_notification_send": "Notification Sent",
+ "is_email_send": "Email Sent",
+ "expiry_ends": "Expiration End",
+ "expiry_starts": "Expiration Start",
+ },
+)
diff --git a/ApiLayers/LanguageModels/Database/account/account.py b/ApiLayers/LanguageModels/Database/account/account.py
new file mode 100644
index 0000000..aa0e69c
--- /dev/null
+++ b/ApiLayers/LanguageModels/Database/account/account.py
@@ -0,0 +1,390 @@
+from ApiLayers.LanguageModels.Database.Mixins.crud_mixin import (
+ CrudCollectionLanguageModel,
+)
+
+AccountBooksLanguageModel = dict(
+ tr={
+ **CrudCollectionLanguageModel["tr"],
+ "country": "Ülke",
+ "branch_type": "Şube Türü",
+ "company_id": "Şirket ID",
+ "company_uu_id": "Şirket UU ID",
+ "branch_id": "Şube ID",
+ "branch_uu_id": "Şube UU ID",
+ },
+ en={
+ **CrudCollectionLanguageModel["en"],
+ "country": "Country",
+ "branch_type": "Branch Type",
+ "company_id": "Company ID",
+ "company_uu_id": "Company UU ID",
+ "branch_id": "Branch ID",
+ "branch_uu_id": "Branch UU ID",
+ },
+)
+
+AccountCodesLanguageModel = dict(
+ tr={
+ **CrudCollectionLanguageModel["tr"],
+ "account_code": "Hesap Kodu",
+ "comment_line": "Yorum Satırı",
+ "is_receive_or_debit": "Alacak veya Borç",
+ "product_id": "Ürün ID",
+ "nvi_id": "Nvi ID",
+ "status_id": "Durum ID",
+ "account_code_seperator": "Hesap Kodu Ayırıcı",
+ "system_id": "Sistem ID",
+ "locked": "Kilitli",
+ "company_id": "Şirket ID",
+ "company_uu_id": "Şirket UU ID",
+ "customer_id": "Müşteri ID",
+ "customer_uu_id": "Müşteri UU ID",
+ "person_id": "Kişi ID",
+ "person_uu_id": "Kişi UU ID",
+ },
+ en={
+ **CrudCollectionLanguageModel["en"],
+ "account_code": "Account Code",
+ "comment_line": "Comment Line",
+ "is_receive_or_debit": "Is Receive or Debit",
+ "product_id": "Product ID",
+ "nvi_id": "Nvi ID",
+ "status_id": "Status ID",
+ "account_code_seperator": "Account Code Seperator",
+ "system_id": "System ID",
+ "locked": "Locked",
+ "company_id": "Company ID",
+ "company_uu_id": "Company UU ID",
+ "customer_id": "Customer ID",
+ "customer_uu_id": "Customer UU ID",
+ "person_id": "Person ID",
+ "person_uu_id": "Person UU ID",
+ },
+)
+
+AccountCodeParserLanguageModel = dict(
+ tr={
+ **CrudCollectionLanguageModel["tr"],
+ "account_code_1": "Hesap Kodu 1",
+ "account_code_2": "Hesap Kodu 2",
+ "account_code_3": "Hesap Kodu 3",
+ "account_code_4": "Hesap Kodu 4",
+ "account_code_5": "Hesap Kodu 5",
+ "account_code_6": "Hesap Kodu 6",
+ "account_code_id": "Hesap Kodu ID",
+ "account_code_uu_id": "Hesap Kodu UU ID",
+ },
+ en={
+ **CrudCollectionLanguageModel["en"],
+ "account_code_1": "Account Code 1",
+ "account_code_2": "Account Code 2",
+ "account_code_3": "Account Code 3",
+ "account_code_4": "Account Code 4",
+ "account_code_5": "Account Code 5",
+ "account_code_6": "Account Code 6",
+ "account_code_id": "Account Code ID",
+ "account_code_uu_id": "Account Code UU ID",
+ },
+)
+
+AccountMasterLanguageModel = dict(
+ tr={
+ **CrudCollectionLanguageModel["tr"],
+ "doc_date": "Belge Tarihi",
+ "plug_type": "Fiş Türü",
+ "plug_number": "Fiş Numarası",
+ "special_code": "Özel Kod",
+ "authorization_code": "Yetki Kodu",
+ "doc_code": "Belge Kodu",
+ "doc_type": "Belge Türü",
+ "comment_line1": "Yorum Satırı 1",
+ "comment_line2": "Yorum Satırı 2",
+ "comment_line3": "Yorum Satırı 3",
+ "comment_line4": "Yorum Satırı 4",
+ "comment_line5": "Yorum Satırı 5",
+ "comment_line6": "Yorum Satırı 6",
+ "project_code": "Proje Kodu",
+ "module_no": "Modül No",
+ "journal_no": "Defter No",
+ "status_id": "Durum ID",
+ "canceled": "İptal Edildi",
+ "print_count": "Yazdırma Sayısı",
+ "total_active": "Toplam Aktif",
+ "total_passive": "Toplam Pasif",
+ "total_active_1": "Toplam Aktif 1",
+ "total_passive_1": "Toplam Pasif 1",
+ "total_active_2": "Toplam Aktif 2",
+ "total_passive_2": "Toplam Pasif 2",
+ "total_active_3": "Toplam Aktif 3",
+ "total_passive_3": "Toplam Pasif 3",
+ "total_active_4": "Toplam Aktif 4",
+ "total_passive_4": "Toplam Pasif 4",
+ "cross_ref": "Çapraz Referans",
+ "data_center_id": "Veri Merkezi ID",
+ "data_center_rec_num": "Veri Merkezi Kayıt Numarası",
+ "account_header_id": "Hesap Başlığı ID",
+ "account_header_uu_id": "Hesap Başlığı UU ID",
+ "project_item_id": "Proje Öğesi ID",
+ "project_item_uu_id": "Proje Öğesi UU ID",
+ "department_id": "Departman ID",
+ "department_uu_id": "Departman UU ID",
+ },
+ en={
+ **CrudCollectionLanguageModel["en"],
+ "doc_date": "Document Date",
+ "plug_type": "Plug Type",
+ "plug_number": "Plug Number",
+ "special_code": "Special Code",
+ "authorization_code": "Authorization Code",
+ "doc_code": "Document Code",
+ "doc_type": "Document Type",
+ "comment_line1": "Comment Line 1",
+ "comment_line2": "Comment Line 2",
+ "comment_line3": "Comment Line 3",
+ "comment_line4": "Comment Line 4",
+ "comment_line5": "Comment Line 5",
+ "comment_line6": "Comment Line 6",
+ "project_code": "Project Code",
+ "module_no": "Module No",
+ "journal_no": "Journal No",
+ "status_id": "Status ID",
+ "canceled": "Canceled",
+ "print_count": "Print Count",
+ "total_active": "Total Active",
+ "total_passive": "Total Passive",
+ "total_active_1": "Total Active 1",
+ "total_passive_1": "Total Passive 1",
+ "total_active_2": "Total Active 2",
+ "total_passive_2": "Total Passive 2",
+ "total_active_3": "Total Active 3",
+ "total_passive_3": "Total Passive 3",
+ "total_active_4": "Total Active 4",
+ "total_passive_4": "Total Passive 4",
+ "cross_ref": "Cross Reference",
+ "data_center_id": "Data Center ID",
+ "data_center_rec_num": "Data Center Record Number",
+ "account_header_id": "Account Header ID",
+ "account_header_uu_id": "Account Header UU ID",
+ "project_item_id": "Project Item ID",
+ "project_item_uu_id": "Project Item UU ID",
+ "department_id": "Department ID",
+ "department_uu_id": "Department UU ID",
+ },
+)
+
+AccountDetailLanguageModel = dict(
+ tr={
+ **CrudCollectionLanguageModel["tr"],
+ "doc_date": "Belge Tarihi",
+ "plug_type": "Fiş Türü",
+ "plug_number": "Fiş Numarası",
+ "special_code": "Özel Kod",
+ "authorization_code": "Yetki Kodu",
+ "doc_code": "Belge Kodu",
+ "doc_type": "Belge Türü",
+ "comment_line1": "Yorum Satırı 1",
+ "comment_line2": "Yorum Satırı 2",
+ "comment_line3": "Yorum Satırı 3",
+ "comment_line4": "Yorum Satırı 4",
+ "comment_line5": "Yorum Satırı 5",
+ "comment_line6": "Yorum Satırı 6",
+ "project_code": "Proje Kodu",
+ "module_no": "Modül No",
+ "journal_no": "Defter No",
+ "status_id": "Durum ID",
+ "canceled": "İptal Edildi",
+ "print_count": "Yazdırma Sayısı",
+ "total_active": "Toplam Aktif",
+ "total_passive": "Toplam Pasif",
+ "total_active_1": "Toplam Aktif 1",
+ "total_passive_1": "Toplam Pasif 1",
+ "total_active_2": "Toplam Aktif 2",
+ "total_passive_2": "Toplam Pasif 2",
+ "total_active_3": "Toplam Aktif 3",
+ "total_passive_3": "Toplam Pasif 3",
+ "total_active_4": "Toplam Aktif 4",
+ "total_passive_4": "Toplam Pasif 4",
+ "cross_ref": "Çapraz Referans",
+ "data_center_id": "Veri Merkezi ID",
+ "data_center_rec_num": "Veri Merkezi Kayıt Numarası",
+ "account_header_id": "Hesap Başlığı ID",
+ "account_header_uu_id": "Hesap Başlığı UU ID",
+ "project_item_id": "Proje Öğesi ID",
+ "project_item_uu_id": "Proje Öğesi UU ID",
+ "department_id": "Departman ID",
+ "department_uu_id": "Departman UU ID",
+ },
+ en={
+ **CrudCollectionLanguageModel["en"],
+ "doc_date": "Document Date",
+ "plug_type": "Plug Type",
+ "plug_number": "Plug Number",
+ "special_code": "Special Code",
+ "authorization_code": "Authorization Code",
+ "doc_code": "Document Code",
+ "doc_type": "Document Type",
+ "comment_line1": "Comment Line 1",
+ "comment_line2": "Comment Line 2",
+ "comment_line3": "Comment Line 3",
+ "comment_line4": "Comment Line 4",
+ "comment_line5": "Comment Line 5",
+ "comment_line6": "Comment Line 6",
+ "project_code": "Project Code",
+ "module_no": "Module No",
+ "journal_no": "Journal No",
+ "status_id": "Status ID",
+ "canceled": "Canceled",
+ "print_count": "Print Count",
+ "total_active": "Total Active",
+ "total_passive": "Total Passive",
+ "total_active_1": "Total Active 1",
+ "total_passive_1": "Total Passive 1",
+ "total_active_2": "Total Active 2",
+ "total_passive_2": "Total Passive 2",
+ "total_active_3": "Total Active 3",
+ "total_passive_3": "Total Passive 3",
+ "total_active_4": "Total Active 4",
+ "total_passive_4": "Total Passive 4",
+ "cross_ref": "Cross Reference",
+ "data_center_id": "Data Center ID",
+ "data_center_rec_num": "Data Center Record Number",
+ "account_header_id": "Account Header ID",
+ "account_header_uu_id": "Account Header UU ID",
+ "project_item_id": "Project Item ID",
+ "project_item_uu_id": "Project Item UU ID",
+ "department_id": "Department ID",
+ "department_uu_id": "Department UU ID",
+ },
+)
+
+AccountRecordsLanguageModel = dict(
+ tr={
+ **CrudCollectionLanguageModel["tr"],
+ "iban": "IBAN",
+ "bank_date": "Banka Tarihi",
+ "currency_value": "Döviz Değeri",
+ "bank_balance": "Banka Bakiyesi",
+ "currency": "Döviz",
+ "additional_balance": "Ek Bakiye",
+ "channel_branch": "Kanal Şubesi",
+ "process_name": "İşlem Türü Adı",
+ "process_type": "İşlem Türü",
+ "process_comment": "İşlem Kayıt Yorumu",
+ "process_garbage": "İşlem Kayıt Çöpü",
+ "bank_reference_code": "Banka Referans Kodu",
+ "add_comment_note": "Yorum Not Ekle",
+ "is_receipt_mail_send": "Alındı Mail Gönderildi",
+ "found_from": "Bulunduğu",
+ "similarity": "Benzerlik",
+ "remainder_balance": "Kalan Bakiye",
+ "bank_date_y": "Banka Tarihi Yıl",
+ "bank_date_m": "Banka Tarihi Ay",
+ "bank_date_w": "Banka Tarihi Hafta",
+ "bank_date_d": "Banka Tarihi Gün",
+ "approving_accounting_record": "Onaylayan Muhasebe Kaydı",
+ "accounting_receipt_date": "Muhasebe Alındı Tarihi",
+ "accounting_receipt_number": "Muhasebe Alındı Numarası",
+ "status_id": "Durum ID",
+ "approved_record": "Onaylanmış Kayıt",
+ "import_file_name": "İçe Aktarım Dosya Adı",
+ "receive_debit": "Alacak Borç",
+ "receive_debit_uu_id": "Alacak Borç UU ID",
+ "budget_type": "Bütçe Türü",
+ "budget_type_uu_id": "Bütçe Türü UU ID",
+ "company_id": "Şirket ID",
+ "company_uu_id": "Şirket UU ID",
+ "send_company_id": "Gönderen Şirket ID",
+ "send_company_uu_id": "Gönderen Şirket UU ID",
+ "send_person_id": "Gönderen Kişi ID",
+ "send_person_uu_id": "Gönderen Kişi UU ID",
+ "approving_accounting_person": "Onaylayan Muhasebe Kişi",
+ "approving_accounting_person_uu_id": "Onaylayan Muhasebe Kişi UU ID",
+ "living_space_id": "Yaşam Alanı ID",
+ "living_space_uu_id": "Yaşam Alanı UU ID",
+ "customer_id": "Müşteri ID",
+ "customer_uu_id": "Müşteri UU ID",
+ "build_id": "Bina ID",
+ "build_uu_id": "Bina UU ID",
+ "build_parts_id": "Bina Parça ID",
+ "build_parts_uu_id": "Bina Parça UU ID",
+ "build_decision_book_id": "Bina Karar Defteri ID",
+ "build_decision_book_uu_id": "Bina Karar Defteri UU ID",
+ },
+ en={
+ **CrudCollectionLanguageModel["en"],
+ "iban": "IBAN",
+ "bank_date": "Bank Date",
+ "currency_value": "Currency Value",
+ "bank_balance": "Bank Balance",
+ "currency": "Currency",
+ "additional_balance": "Additional Balance",
+ "channel_branch": "Channel Branch",
+ "process_name": "Process Type Name",
+ "process_type": "Process Type",
+ "process_comment": "Process Record Comment",
+ "process_garbage": "Process Record Garbage",
+ "bank_reference_code": "Bank Reference Code",
+ "add_comment_note": "Add Comment Note",
+ "is_receipt_mail_send": "Is Receipt Mail Send",
+ "found_from": "Found From",
+ "similarity": "Similarity",
+ "remainder_balance": "Remainder Balance",
+ "bank_date_y": "Bank Date Year",
+ "bank_date_m": "Bank Date Month",
+ "bank_date_w": "Bank Date Week",
+ "bank_date_d": "Bank Date Day",
+ "approving_accounting_record": "Approving Accounting Record",
+ "accounting_receipt_date": "Accounting Receipt Date",
+ "accounting_receipt_number": "Accounting Receipt Number",
+ "status_id": "Status ID",
+ "approved_record": "Approved Record",
+ "import_file_name": "Import File Name",
+ "receive_debit": "Receive Debit",
+ "receive_debit_uu_id": "Receive Debit UU ID",
+ "budget_type": "Budget Type",
+ "budget_type_uu_id": "Budget Type UU ID",
+ "company_id": "Company ID",
+ "company_uu_id": "Company UU ID",
+ "send_company_id": "Send Company ID",
+ "send_company_uu_id": "Send Company UU ID",
+ "send_person_id": "Send Person ID",
+ "send_person_uu_id": "Send Person UU ID",
+ "approving_accounting_person": "Approving Accounting Person",
+ "approving_accounting_person_uu_id": "Approving Accounting Person UU ID",
+ "living_space_id": "Living Space ID",
+ "living_space_uu_id": "Living Space UU ID",
+ "customer_id": "Customer ID",
+ "customer_uu_id": "Customer UU ID",
+ "build_id": "Build ID",
+ "build_uu_id": "Build UU ID",
+ "build_parts_id": "Build Parts ID",
+ "build_parts_uu_id": "Build Parts UU ID",
+ "build_decision_book_id": "Build Decision Book ID",
+ "build_decision_book_uu_id": "Build Decision Book UU ID",
+ },
+)
+
+AccountRecordExchangesLanguageModel = dict(
+ tr={
+ **CrudCollectionLanguageModel["tr"],
+ "are_currency": "Para Birimi",
+ "are_exchange_rate": "Döviz Kuru",
+ "usd_exchange_rate_value": "USD Döviz Kuru Değeri",
+ "eur_exchange_rate_value": "EUR Döviz Kuru Değeri",
+ "gbp_exchange_rate_value": "GBP Döviz Kuru Değeri",
+ "cny_exchange_rate_value": "CNY Döviz Kuru Değeri",
+ "account_records_id": "Hesap Kayıt ID",
+ "account_records_uu_id": "Hesap Kayıt UU ID",
+ },
+ en={
+ **CrudCollectionLanguageModel["en"],
+ "are_currency": "Currency",
+ "are_exchange_rate": "Exchange Rate",
+ "usd_exchange_rate_value": "USD Exchange Rate Value",
+ "eur_exchange_rate_value": "EUR Exchange Rate Value",
+ "gbp_exchange_rate_value": "GBP Exchange Rate Value",
+ "cny_exchange_rate_value": "CNY Exchange Rate Value",
+ "account_records_id": "Account Record ID",
+ "account_records_uu_id": "Account Record UU ID",
+ },
+)
diff --git a/ApiLayers/LanguageModels/Database/account/iban.py b/ApiLayers/LanguageModels/Database/account/iban.py
new file mode 100644
index 0000000..33e31dc
--- /dev/null
+++ b/ApiLayers/LanguageModels/Database/account/iban.py
@@ -0,0 +1,54 @@
+from ApiLayers.LanguageModels.Database.Mixins.crud_mixin import (
+ CrudCollectionLanguageModel,
+)
+
+BuildIbansLanguageModel = dict(
+ tr={
+ **CrudCollectionLanguageModel["tr"],
+ "iban": "IBAN Numarası",
+ "start_date": "Banka İşlem Başlangıç Tarihi",
+ "stop_date": "Banka İşlem Bitiş Tarihi",
+ "bank_code": "Banka Kodu",
+ "xcomment": "Yorum",
+ "build_id": "Bina ID",
+ "build_uu_id": "Bina UUID",
+ },
+ en={
+ **CrudCollectionLanguageModel["en"],
+ "iban": "IBAN Number",
+ "start_date": "Bank Transaction Start Date",
+ "stop_date": "Bank Transaction End Date",
+ "bank_code": "Bank Code",
+ "xcomment": "Comment",
+ "build_id": "Build ID",
+ "build_uu_id": "Build UUID",
+ },
+)
+
+
+BuildIbanDescriptionLanguageModel = dict(
+ tr={
+ **CrudCollectionLanguageModel["tr"],
+ "iban": "IBAN Numarası",
+ "group_id": "Grup ID",
+ "search_word": "Arama Kelimesi",
+ "customer_id": "Müşteri ID",
+ "customer_uu_id": "Müşteri UUID",
+ "company_id": "Şirket ID",
+ "company_uu_id": "Şirket UUID",
+ "build_parts_id": "Bina Parça ID",
+ "build_parts_uu_id": "Bina Parça UUID",
+ },
+ en={
+ **CrudCollectionLanguageModel["en"],
+ "iban": "IBAN Number",
+ "group_id": "Group ID",
+ "search_word": "Search Word",
+ "customer_id": "Customer ID",
+ "customer_uu_id": "Customer UUID",
+ "company_id": "Company ID",
+ "company_uu_id": "Company UUID",
+ "build_parts_id": "Build Parts ID",
+ "build_parts_uu_id": "Build Parts UUID",
+ },
+)
diff --git a/ApiLayers/LanguageModels/Database/building/budget.py b/ApiLayers/LanguageModels/Database/building/budget.py
new file mode 100644
index 0000000..1c70090
--- /dev/null
+++ b/ApiLayers/LanguageModels/Database/building/budget.py
@@ -0,0 +1,103 @@
+from ApiLayers.LanguageModels.Database.Mixins.crud_mixin import (
+ CrudCollectionLanguageModel,
+)
+
+DecisionBookBudgetBooksLanguageModel = dict(
+ tr={
+ **CrudCollectionLanguageModel["tr"],
+ "country": "Ülke",
+ "branch_type": "Şube Tipi",
+ "company_id": "Şirket ID",
+ "company_uu_id": "Şirket UUID",
+ "branch_id": "Şube ID",
+ "branch_uu_id": "Şube UUID",
+ "build_decision_book_id": "Karar Defteri ID",
+ "build_decision_book_uu_id": "Karar Defteri UUID",
+ },
+ en={
+ **CrudCollectionLanguageModel["en"],
+ "country": "Country",
+ "branch_type": "Branch Type",
+ "company_id": "Company ID",
+ "company_uu_id": "Company UUID",
+ "branch_id": "Branch ID",
+ "branch_uu_id": "Branch UUID",
+ "build_decision_book_id": "Build Decision Book ID",
+ "build_decision_book_uu_id": "Build Decision Book UUID",
+ },
+)
+
+DecisionBookBudgetCodesLanguageModel = dict(
+ tr={
+ **CrudCollectionLanguageModel["tr"],
+ "budget_code": "Bütçe Kodu",
+ "comment_line": "Yorum Satırı",
+ "build_decision_book_id": "Karar Defteri ID",
+ "build_decision_book_uu_id": "Karar Defteri UUID",
+ "build_parts_id": "Bina Parça ID",
+ "build_parts_uu_id": "Bina Parça UUID",
+ "company_id": "Şirket ID",
+ "company_uu_id": "Şirket UUID",
+ },
+ en={
+ **CrudCollectionLanguageModel["en"],
+ "budget_code": "Budget Code",
+ "comment_line": "Comment Line",
+ "build_decision_book_id": "Build Decision Book ID",
+ "build_decision_book_uu_id": "Build Decision Book UUID",
+ "build_parts_id": "Build Parts ID",
+ "build_parts_uu_id": "Build Parts UUID",
+ "company_id": "Company ID",
+ "company_uu_id": "Company UUID",
+ },
+)
+
+DecisionBookBudgetMasterLanguageModel = dict(
+ tr={
+ **CrudCollectionLanguageModel["tr"],
+ "budget_type": "Bütçe Tipi",
+ "currency": "Para Birimi",
+ "total_budget": "Toplam Bütçe",
+ "tracking_period_id": "Takip Dönemi ID",
+ "tracking_period_uu_id": "Takip Dönemi UUID",
+ "budget_books_id": "Bütçe Kitapları ID",
+ "budget_books_uu_id": "Bütçe Kitapları UUID",
+ "department_id": "Departman ID",
+ "department_uu_id": "Departman UUID",
+ },
+ en={
+ **CrudCollectionLanguageModel["en"],
+ "budget_type": "Budget Type",
+ "currency": "Currency",
+ "total_budget": "Total Budget",
+ "tracking_period_id": "Tracking Period ID",
+ "tracking_period_uu_id": "Tracking Period UUID",
+ "budget_books_id": "Budget Books ID",
+ "budget_books_uu_id": "Budget Books UUID",
+ "department_id": "Department ID",
+ "department_uu_id": "Department UUID",
+ },
+)
+
+DecisionBookBudgetsLanguageModel = dict(
+ tr={
+ **CrudCollectionLanguageModel["tr"],
+ "process_date": "İşlem Tarihi",
+ "budget_codes_id": "Bütçe Kodları ID",
+ "total_budget": "Toplam Bütçe",
+ "used_budget": "Kullanılan Bütçe",
+ "remaining_budget": "Kalan Bütçe",
+ "decision_book_budget_master_id": "Karar Defteri Bütçesi ID",
+ "decision_book_budget_master_uu_id": "Karar Defteri Bütçesi UUID",
+ },
+ en={
+ **CrudCollectionLanguageModel["en"],
+ "process_date": "Process Date",
+ "budget_codes_id": "Budget Codes ID",
+ "total_budget": "Total Budget",
+ "used_budget": "Used Budget",
+ "remaining_budget": "Remaining Budget",
+ "decision_book_budget_master_id": "Decision Book Budget Master ID",
+ "decision_book_budget_master_uu_id": "Decision Book Budget Master UUID",
+ },
+)
diff --git a/ApiLayers/LanguageModels/Database/building/build.py b/ApiLayers/LanguageModels/Database/building/build.py
new file mode 100644
index 0000000..74973b0
--- /dev/null
+++ b/ApiLayers/LanguageModels/Database/building/build.py
@@ -0,0 +1,303 @@
+from ApiLayers.LanguageModels.Database.Mixins.crud_mixin import (
+ CrudCollectionLanguageModel,
+)
+
+BuildTypesLanguageModel = dict(
+ tr={
+ **CrudCollectionLanguageModel["tr"],
+ "function_code": "Fonksiyon Kodu",
+ "type_code": "Yapı Tipi Kodu",
+ "lang": "Dil",
+ "type_name": "Tip Adı",
+ },
+ en={
+ **CrudCollectionLanguageModel["en"],
+ "function_code": "Function Code",
+ "type_code": "Type Code",
+ "lang": "Language",
+ "type_name": "Type Name",
+ },
+)
+
+Part2EmployeeLanguageModel = dict(
+ tr={
+ **CrudCollectionLanguageModel["tr"],
+ "build_id": "Bina ID",
+ "part_id": "Bina Parça ID",
+ "employee_id": "Çalışan ID",
+ },
+ en={
+ **CrudCollectionLanguageModel["en"],
+ "build_id": "Build ID",
+ "part_id": "Build Part ID",
+ "employee_id": "Employee ID",
+ },
+)
+
+RelationshipEmployee2BuildLanguageModel = dict(
+ tr={
+ **CrudCollectionLanguageModel["tr"],
+ "company_id": "Şirket ID",
+ "employee_id": "Çalışan ID",
+ "member_id": "Üye ID",
+ "relationship_type": "İlişki Tipi",
+ "show_only": "Sadece Göster",
+ },
+ en={
+ **CrudCollectionLanguageModel["en"],
+ "company_id": "Company ID",
+ "employee_id": "Employee ID",
+ "member_id": "Member ID",
+ "relationship_type": "Relationship Type",
+ "show_only": "Show Only",
+ },
+)
+
+BuildLanguageModel = dict(
+ tr={
+ **CrudCollectionLanguageModel["tr"],
+ "gov_address_code": "Adres Kodu",
+ "build_name": "Bina Adı",
+ "build_no": "Bina No",
+ "max_floor": "Max Kat",
+ "underground_floor": "Zemin Kat",
+ "build_date": "Bina Tarihi",
+ "decision_period_date": "Karar Dönemi Tarihi",
+ "tax_no": "Vergi No",
+ "lift_count": "Asansör Sayısı",
+ "heating_system": "Isıtma Sistemi",
+ "cooling_system": "Soğutma Sistemi",
+ "hot_water_system": "Sıcak Su Sistemi",
+ "block_service_man_count": "Blok Hizmet Görevlisi Sayısı",
+ "security_service_man_count": "Güvenlik Görevlisi Sayısı",
+ "garage_count": "Garaj Sayısı",
+ "management_room_id": "Yönetim Odası ID",
+ "site_id": "Site ID",
+ "site_uu_id": "Site UUID",
+ "address_id": "Adres ID",
+ "address_uu_id": "Adres UUID",
+ "build_types_id": "Bina Tipi ID",
+ "build_types_uu_id": "Bina Tipi UUID",
+ },
+ en={
+ **CrudCollectionLanguageModel["en"],
+ "gov_address_code": "Address Code",
+ "build_name": "Building Name",
+ "build_no": "Building Number",
+ "max_floor": "Max Floor",
+ "underground_floor": "Underground Floor",
+ "build_date": "Building Date",
+ "decision_period_date": "Decision Period Date",
+ "tax_no": "Tax No",
+ "lift_count": "Lift Count",
+ "heating_system": "Heating System",
+ "cooling_system": "Cooling System",
+ "hot_water_system": "Hot Water System",
+ "block_service_man_count": "Block Service Man Count",
+ "security_service_man_count": "Security Service Man Count",
+ "garage_count": "Garage Count",
+ "management_room_id": "Management Room ID",
+ "site_id": "Site ID",
+ "site_uu_id": "Site UUID",
+ "address_id": "Address ID",
+ "address_uu_id": "Address UUID",
+ "build_types_id": "Build Types ID",
+ "build_types_uu_id": "Build Types UUID",
+ },
+)
+
+BuildPartsLanguageModel = dict(
+ tr={
+ **CrudCollectionLanguageModel["tr"],
+ "address_gov_code": "Adres Kodu",
+ "part_no": "Bina Parça No",
+ "part_level": "Bina Parça Katı",
+ "part_code": "Bina Parça Kodu",
+ "part_gross_size": "Bina Parça Brüt Alanı",
+ "part_net_size": "Bina Parça Net Alanı",
+ "default_accessory": "Varsayılan Aksesuar",
+ "human_livable": "İnsan Yaşam Alanı",
+ "due_part_key": "Ödeme Grubu",
+ "build_id": "Bina ID",
+ "build_uu_id": "Bina UUID",
+ "part_direction_id": "Bina Parça Yönü ID",
+ "part_direction_uu_id": "Bina Parça Yönü UUID",
+ "part_type_id": "Bina Parça Tipi ID",
+ "part_type_uu_id": "Bina Parça Tipi UUID",
+ },
+ en={
+ **CrudCollectionLanguageModel["en"],
+ "address_gov_code": "Address Code",
+ "part_no": "Part Number",
+ "part_level": "Part Level",
+ "part_code": "Part Code",
+ "part_gross_size": "Part Gross Size",
+ "part_net_size": "Part Net Size",
+ "default_accessory": "Default Accessory",
+ "human_livable": "Human Livable",
+ "due_part_key": "Due Part Key",
+ "build_id": "Build ID",
+ "build_uu_id": "Build UUID",
+ "part_direction_id": "Part Direction ID",
+ "part_direction_uu_id": "Part Direction UUID",
+ "part_type_id": "Part Type ID",
+ "part_type_uu_id": "Part Type UUID",
+ },
+)
+
+
+BuildLivingSpaceLanguageModel = dict(
+ tr={
+ **CrudCollectionLanguageModel["tr"],
+ "fix_value": "Düz Değer",
+ "fix_percent": "Düz Yüzde",
+ "agreement_no": "Anlaşma No",
+ "marketing_process": "Pazarlama İşlemi",
+ "marketing_layer": "Pazarlama Katmanı",
+ "build_parts_id": "Bina Parça ID",
+ "build_parts_uu_id": "Bina Parça UUID",
+ "person_id": "Kişi ID",
+ "person_uu_id": "Kişi UUID",
+ "occupant_type": "Sakin Tipi",
+ "occupant_type_uu_id": "Sakin Tipi UUID",
+ },
+ en={
+ **CrudCollectionLanguageModel["en"],
+ "fix_value": "Fixed Value",
+ "fix_percent": "Fixed Percent",
+ "agreement_no": "Agreement No",
+ "marketing_process": "Marketing Process",
+ "marketing_layer": "Marketing Layer",
+ "build_parts_id": "Build Part ID",
+ "build_parts_uu_id": "Build Part UUID",
+ "person_id": "Person ID",
+ "person_uu_id": "Person UUID",
+ "occupant_type": "Occupant Type",
+ "occupant_type_uu_id": "Occupant Type UUID",
+ },
+)
+
+BuildManagementLanguageModel = dict(
+ tr={
+ **CrudCollectionLanguageModel["tr"],
+ "discounted_percentage": "İndirim Yüzdesi",
+ "discounted_price": "İndirimli Fiyat",
+ "calculated_price": "Hesaplanan Fiyat",
+ "occupant_type": "Sakin Tipi",
+ "occupant_type_uu_id": "Sakin Tipi UUID",
+ "build_id": "Bina ID",
+ "build_uu_id": "Bina UUID",
+ "build_parts_id": "Bina Parça ID",
+ "build_parts_uu_id": "Bina Parça UUID",
+ },
+ en={
+ **CrudCollectionLanguageModel["en"],
+ "discounted_percentage": "Discounted Percentage",
+ "discounted_price": "Discounted Price",
+ "calculated_price": "Calculated Price",
+ "occupant_type": "Occupant Type",
+ "occupant_type_uu_id": "Occupant Type UUID",
+ "build_id": "Build ID",
+ "build_uu_id": "Build UUID",
+ "build_parts_id": "Build Part ID",
+ "build_parts_uu_id": "Build Part UUID",
+ },
+)
+
+BuildAreaLanguageModel = dict(
+ tr={
+ **CrudCollectionLanguageModel["tr"],
+ "area_name": "Alan Adı",
+ "area_code": "Alan Kodu",
+ "area_type": "Alan Tipi",
+ "area_direction": "Alan Yönü",
+ "area_gross_size": "Alan Brüt Alanı",
+ "area_net_size": "Alan Net Alanı",
+ "width": "En",
+ "size": "Boyut",
+ "build_id": "Bina ID",
+ "build_uu_id": "Bina UUID",
+ "part_type_id": "Bina Parça Tipi ID",
+ "part_type_uu_id": "Bina Parça Tipi UUID",
+ },
+ en={
+ **CrudCollectionLanguageModel["en"],
+ "area_name": "Area Name",
+ "area_code": "Area Code",
+ "area_type": "Area Type",
+ "area_direction": "Area Direction",
+ "area_gross_size": "Area Gross Size",
+ "area_net_size": "Area Net Size",
+ "width": "Width",
+ "size": "Size",
+ "build_id": "Build ID",
+ "build_uu_id": "Build UUID",
+ "part_type_id": "Part Type ID",
+ "part_type_uu_id": "Part Type UUID",
+ },
+)
+
+
+BuildSitesLanguageModel = dict(
+ tr={
+ **CrudCollectionLanguageModel["tr"],
+ "site_name": "Site Adı",
+ "site_no": "Site No",
+ "address_id": "Adres ID",
+ "address_uu_id": "Adres UUID",
+ },
+ en={
+ **CrudCollectionLanguageModel["en"],
+ "site_name": "Site Name",
+ "site_no": "Site No",
+ "address_id": "Address ID",
+ "address_uu_id": "Address UUID",
+ },
+)
+
+BuildCompaniesProvidingLanguageModel = dict(
+ tr={
+ **CrudCollectionLanguageModel["tr"],
+ "build_id": "Bina ID",
+ "build_uu_id": "Bina UUID",
+ "company_id": "Şirket ID",
+ "company_uu_id": "Şirket UUID",
+ "provide_id": "Sağlayıcı ID",
+ "provide_uu_id": "Sağlayıcı UUID",
+ "contract_id": "Sözleşme ID",
+ },
+ en={
+ **CrudCollectionLanguageModel["en"],
+ "build_id": "Build ID",
+ "build_uu_id": "Build UUID",
+ "company_id": "Company ID",
+ "company_uu_id": "Company UUID",
+ "provide_id": "Provide ID",
+ "provide_uu_id": "Provide UUID",
+ "contract_id": "Contract ID",
+ },
+)
+
+
+BuildPersonProvidingLanguageModel = dict(
+ tr={
+ **CrudCollectionLanguageModel["tr"],
+ "build_id": "Bina ID",
+ "build_uu_id": "Bina UUID",
+ "people_id": "Kişi ID",
+ "people_uu_id": "Kişi UUID",
+ "provide_id": "Sağlayıcı ID",
+ "provide_uu_id": "Sağlayıcı UUID",
+ "contract_id": "Sözleşme ID",
+ },
+ en={
+ **CrudCollectionLanguageModel["en"],
+ "build_id": "Build ID",
+ "build_uu_id": "Build UUID",
+ "people_id": "People ID",
+ "people_uu_id": "People UUID",
+ "provide_id": "Provide ID",
+ "provide_uu_id": "Provide UUID",
+ "contract_id": "Contract ID",
+ },
+)
diff --git a/ApiLayers/LanguageModels/Database/building/decision_book.py b/ApiLayers/LanguageModels/Database/building/decision_book.py
new file mode 100644
index 0000000..e259c38
--- /dev/null
+++ b/ApiLayers/LanguageModels/Database/building/decision_book.py
@@ -0,0 +1,400 @@
+from ApiLayers.LanguageModels.Database.Mixins.crud_mixin import (
+ CrudCollectionLanguageModel,
+)
+
+BuildDecisionBookLanguageModel = dict(
+ tr={
+ **CrudCollectionLanguageModel["tr"],
+ "decision_book_pdf_path": "Karar Defteri PDF Yolu",
+ "resp_company_fix_wage": "Firma Sabit Ücreti",
+ "is_out_sourced": "Dış Kaynak Kullanımı",
+ "meeting_date": "Toplantı Tarihi",
+ "decision_type": "Karar Türü",
+ "meeting_is_completed": "Toplantı Tamamlandı",
+ "meeting_completed_date": "Toplantı Tamamlanma Tarihi",
+ "build_id": "Bina ID",
+ "build_uu_id": "Bina UUID",
+ "resp_company_id": "Sorumlu Firma ID",
+ "resp_company_uu_id": "Sorumlu Firma UUID",
+ "contact_id": "İletişim ID",
+ "contact_uu_id": "İletişim UUID",
+ },
+ en={
+ **CrudCollectionLanguageModel["en"],
+ "decision_book_pdf_path": "Decision Book PDF Path",
+ "resp_company_fix_wage": "Resp Company Fix Wage",
+ "is_out_sourced": "Is Out Sourced",
+ "meeting_date": "Meeting Date",
+ "decision_type": "Decision Type",
+ "meeting_is_completed": "Meeting Is Completed",
+ "meeting_completed_date": "Meeting Completed Date",
+ "build_id": "Build ID",
+ "build_uu_id": "Build UUID",
+ "resp_company_id": "Resp Company ID",
+ "resp_company_uu_id": "Resp Company UUID",
+ "contact_id": "Contact ID",
+ "contact_uu_id": "Contact UUID",
+ },
+)
+
+BuildDecisionBookInvitationsLanguageModel = dict(
+ tr={
+ **CrudCollectionLanguageModel["tr"],
+ "build_id": "Bina ID",
+ "build_uu_id": "Bina UUID",
+ "decision_book_id": "Karar Defteri ID",
+ "decision_book_uu_id": "Karar Defteri UUID",
+ "invitation_type": "Davet Türü",
+ "invitation_attempt": "Davet Denemesi",
+ "living_part_count": "Yaşam Bölüm Sayısı",
+ "living_part_percentage": "Yaşam Bölüm Yüzdesi",
+ "message": "Davet Mesajı",
+ "planned_date": "Planlanan Tarih",
+ "planned_date_expires": "Planlanan Tarih Bitiş",
+ },
+ en={
+ **CrudCollectionLanguageModel["en"],
+ "build_id": "Build ID",
+ "build_uu_id": "Build UUID",
+ "decision_book_id": "Decision Book ID",
+ "decision_book_uu_id": "Decision Book UUID",
+ "invitation_type": "Invitation Type",
+ "invitation_attempt": "Invitation Attempt",
+ "living_part_count": "Living Part Count",
+ "living_part_percentage": "Living Part Percentage",
+ "message": "Message",
+ "planned_date": "Planned Date",
+ "planned_date_expires": "Planned Date Expires",
+ },
+)
+
+BuildDecisionBookPersonLanguageModel = dict(
+ tr={
+ **CrudCollectionLanguageModel["tr"],
+ "dues_percent_discount": "Aidat İndirim Oranı",
+ "dues_fix_discount": "Aidat Sabit İndirim",
+ "dues_discount_approval_date": "İndirim Onay Tarihi",
+ "send_date": "Gönderme Tarihi",
+ "is_attending": "Katılıyor",
+ "confirmed_date": "Onay Tarihi",
+ "token": "Token",
+ "vicarious_person_id": "Vekil Kişi ID",
+ "vicarious_person_uu_id": "Vekil Kişi UUID",
+ "invite_id": "Davet ID",
+ "invite_uu_id": "Davet UUID",
+ "build_decision_book_id": "Karar Defteri ID",
+ "build_decision_book_uu_id": "Karar Defteri UUID",
+ "build_living_space_id": "Yaşam Alanı ID",
+ "build_living_space_uu_id": "Yaşam Alanı UUID",
+ "person_id": "Kişi ID",
+ "person_uu_id": "Kişi UUID",
+ },
+ en={
+ **CrudCollectionLanguageModel["en"],
+ "dues_percent_discount": "Dues Percent Discount",
+ "dues_fix_discount": "Dues Fix Discount",
+ "dues_discount_approval_date": "Dues Discount Approval Date",
+ "send_date": "Send Date",
+ "is_attending": "Is Attending",
+ "confirmed_date": "Confirmed Date",
+ "token": "Token",
+ "vicarious_person_id": "Vicarious Person ID",
+ "vicarious_person_uu_id": "Vicarious Person UUID",
+ "invite_id": "Invite ID",
+ "invite_uu_id": "Invite UUID",
+ "build_decision_book_id": "Decision Book ID",
+ "build_decision_book_uu_id": "Decision Book UUID",
+ "build_living_space_id": "Living Space ID",
+ "build_living_space_uu_id": "Living Space UUID",
+ "person_id": "Person ID",
+ "person_uu_id": "Person UUID",
+ },
+)
+
+BuildDecisionBookPersonOccupantsLanguageModel = dict(
+ tr={
+ **CrudCollectionLanguageModel["tr"],
+ "build_decision_book_person_id": "Karar Defteri Kişi ID",
+ "build_decision_book_person_uu_id": "Karar Defter Kişi UUID",
+ "invite_id": "Davet ID",
+ "invite_uu_id": "Davet UUID",
+ "occupant_type_id": "Kişi Tür ID",
+ "occupant_type_uu_id": "Kişi Tür UUID",
+ },
+ en={
+ **CrudCollectionLanguageModel["en"],
+ "build_decision_book_person_id": "Build Decision Book Person ID",
+ "build_decision_book_person_uu_id": "Build Decision Book Person UUID",
+ "invite_id": "Invite ID",
+ "invite_uu_id": "Invite UUID",
+ "occupant_type_id": "Occupant Type ID",
+ "occupant_type_uu_id": "Occupant Type UUID",
+ },
+)
+
+BuildDecisionBookItemsLanguageModel = dict(
+ tr={
+ **CrudCollectionLanguageModel["tr"],
+ "item_order": "Madde Sırası",
+ "item_comment": "Madde Yorumu",
+ "item_objection": "Madde İtirazı",
+ "info_is_completed": "Bilgi Tamamlandı",
+ "is_payment_created": "Ödeme Yapıldı",
+ "info_type_id": "Bilgi Türü ID",
+ "info_type_uu_id": "Bilgi Türü UUID",
+ "build_decision_book_id": "Karar Defteri ID",
+ "build_decision_book_uu_id": "Karar Defteri UUID",
+ "item_short_comment": "Kısa Yorum",
+ },
+ en={
+ **CrudCollectionLanguageModel["en"],
+ "item_order": "Item Order",
+ "item_comment": "Item Comment",
+ "item_objection": "Item Objection",
+ "info_is_completed": "Info Is Completed",
+ "is_payment_created": "Is Payment Created",
+ "info_type_id": "Info Type ID",
+ "info_type_uu_id": "Info Type UUID",
+ "build_decision_book_id": "Build Decision Book ID",
+ "build_decision_book_uu_id": "Build Decision Book UUID",
+ "item_short_comment": "Item Short Comment",
+ },
+)
+
+BuildDecisionBookItemsUnapprovedLanguageModel = dict(
+ tr={
+ **CrudCollectionLanguageModel["tr"],
+ "item_objection": "Madde İtirazı",
+ "item_order": "Madde Sırası",
+ "decision_book_item_id": "Karar Defteri Madde ID",
+ "decision_book_item_uu_id": "Karar Defteri Madde UUID",
+ "person_id": "Kişi ID",
+ "person_uu_id": "Kişi UUID",
+ "build_decision_book_item": "Karar Defteri Madde ID",
+ "build_decision_book_item_uu_id": "Karar Defteri Madde UUID",
+ },
+ en={
+ **CrudCollectionLanguageModel["en"],
+ "item_objection": "Item Objection",
+ "item_order": "Item Order",
+ "decision_book_item_id": "Decision Book Item ID",
+ "decision_book_item_uu_id": "Decision Book Item UUID",
+ "person_id": "Person ID",
+ "person_uu_id": "Person UUID",
+ "build_decision_book_item": "Build Decision Book Item ID",
+ "build_decision_book_item_uu_id": "Build Decision Book Item UUID",
+ },
+)
+
+BuildDecisionBookPaymentsLanguageModel = dict(
+ tr={
+ **CrudCollectionLanguageModel["tr"],
+ "payment_plan_time_periods": "Ödeme Planı Zaman Periyodu",
+ "process_date": "Ödeme Tarihi",
+ "payment_amount": "Ödeme Miktarı",
+ "currency": "Para Birimi",
+ "payment_types_id": "Ödeme Türü ID",
+ "payment_types_uu_id": "Ödeme Türü UUID",
+ "period_time": "Dönem Zamanı",
+ "process_date_y": "Tarih Yılı",
+ "process_date_m": "Tarih Ayı",
+ "build_decision_book_item_id": "Karar Defteri Madde ID",
+ "build_decision_book_item_uu_id": "Karar Defteri Madde UUID",
+ "build_parts_id": "Bina Parça ID",
+ "build_parts_uu_id": "Bina Parça UUID",
+ "decision_book_project_id": "Karar Defteri Proje ID",
+ "decision_book_project_uu_id": "Karar Defteri Proje UUID",
+ "account_records_id": "Hesap Kayıtları ID",
+ "account_records_uu_id": "Hesap Kayıtları UUID",
+ },
+ en={
+ **CrudCollectionLanguageModel["en"],
+ "payment_plan_time_periods": "Payment Plan Time Periods",
+ "process_date": "Process Date",
+ "payment_amount": "Payment Amount",
+ "currency": "Currency",
+ "payment_types_id": "Payment Types ID",
+ "payment_types_uu_id": "Payment Types UUID",
+ "period_time": "Period Time",
+ "process_date_y": "Process Date Year",
+ "process_date_m": "Process Date Month",
+ "build_decision_book_item_id": "Build Decision Book Item ID",
+ "build_decision_book_item_uu_id": "Build Decision Book Item UUID",
+ "build_parts_id": "Build Parts ID",
+ "build_parts_uu_id": "Build Parts UUID",
+ "decision_book_project_id": "Decision Book Project ID",
+ "decision_book_project_uu_id": "Decision Book Project UUID",
+ "account_records_id": "Account Records ID",
+ "account_records_uu_id": "Account Records UUID",
+ },
+)
+
+BuildDecisionBookLegalLanguageModel = dict(
+ tr={
+ **CrudCollectionLanguageModel["tr"],
+ "period_start_date": "Dönem Başlangıç Tarihi",
+ "lawsuits_decision_number": "Dava Karar Numarası",
+ "lawsuits_decision_date": "Dava Karar Tarihi",
+ "period_stop_date": "Dönem Bitiş Tarihi",
+ "decision_book_pdf_path": "Karar Defteri PDF Yolu",
+ "resp_company_total_wage": "Firma Toplam Ücreti",
+ "contact_agreement_path": "İletişim Anlaşma Yolu",
+ "contact_agreement_date": "İletişim Anlaşma Tarihi",
+ "meeting_date": "Toplantı Tarihi",
+ "lawsuits_type": "Dava Türü",
+ "lawsuits_name": "Dava Adı",
+ "lawsuits_note": "Dava Notu",
+ "lawyer_cost": "Avukat Ücreti",
+ "mediator_lawyer_cost": "Arabulucu Avukat Ücreti",
+ "other_cost": "Diğer Ücret",
+ "legal_cost": "Yasal Ücret",
+ "approved_cost": "Onaylanan Ücret",
+ "total_price": "Toplam Ücret",
+ "build_db_item_id": "Karar Defteri Madde ID",
+ "build_db_item_uu_id": "Karar Defteri Madde UUID",
+ "resp_attorney_id": "Sorumlu Avukat ID",
+ "resp_attorney_uu_id": "Sorumlu Avukat UUID",
+ "resp_attorney_company_id": "Sorumlu Avukat Firma ID",
+ "resp_attorney_company_uu_id": "Sorumlu Avukat Firma UUID",
+ "mediator_lawyer_person_id": "Arabulucu Avukat Kişi ID",
+ "mediator_lawyer_person_uu_id": "Arabulucu Avukat Kişi UUID",
+ },
+ en={
+ **CrudCollectionLanguageModel["en"],
+ "period_start_date": "Period Start Date",
+ "lawsuits_decision_number": "Lawsuits Decision Number",
+ "lawsuits_decision_date": "Lawsuits Decision Date",
+ "period_stop_date": "Period Stop Date",
+ "decision_book_pdf_path": "Decision Book PDF Path",
+ "resp_company_total_wage": "Resp Company Total Wage",
+ "contact_agreement_path": "Contact Agreement Path",
+ "contact_agreement_date": "Contact Agreement Date",
+ "meeting_date": "Meeting Date",
+ "lawsuits_type": "Lawsuits Type",
+ "lawsuits_name": "Lawsuits Name",
+ "lawsuits_note": "Lawsuits Note",
+ "lawyer_cost": "Lawyer Cost",
+ "mediator_lawyer_cost": "Mediator Lawyer Cost",
+ "other_cost": "Other Cost",
+ "legal_cost": "Legal Cost",
+ "approved_cost": "Approved Cost",
+ "total_price": "Total Price",
+ "build_db_item_id": "Build Decision Book Item ID",
+ "build_db_item_uu_id": "Build Decision Book Item UUID",
+ "resp_attorney_id": "Resp Attorney ID",
+ "resp_attorney_uu_id": "Resp Attorney UUID",
+ "resp_attorney_company_id": "Resp Attorney Company ID",
+ "resp_attorney_company_uu_id": "Resp Attorney Company UUID",
+ "mediator_lawyer_person_id": "Mediator Lawyer Person ID",
+ "mediator_lawyer_person_uu_id": "Mediator Lawyer Person UUID",
+ },
+)
+
+BuildDecisionBookProjectsLanguageModel = dict(
+ tr={
+ **CrudCollectionLanguageModel["tr"],
+ "project_no": "Proje No",
+ "project_name": "Proje Adı",
+ "project_start_date": "Proje Başlangıç Tarihi",
+ "project_stop_date": "Proje Bitiş Tarihi",
+ "project_type": "Proje Türü",
+ "project_note": "Proje Notu",
+ "decision_book_pdf_path": "Karar Defteri PDF Yolu",
+ "is_completed": "Proje Tamamlandı",
+ "status_code": "Durum Kodu",
+ "resp_company_fix_wage": "Firma Sabit Ücreti",
+ "is_out_sourced": "Dış Kaynak Kullanımı",
+ "meeting_date": "Toplantı Tarihi",
+ "currency": "Para Birimi",
+ "bid_price": "Teklif Fiyatı",
+ "approved_price": "Onaylanan Fiyat",
+ "final_price": "Son Fiyat",
+ "contact_id": "İletişim ID",
+ "contact_uu_id": "İletişim UUID",
+ "build_decision_book_id": "Karar Defteri ID",
+ "build_decision_book_uu_id": "Karar Defteri UUID",
+ "build_decision_book_item_id": "Karar Defteri Madde ID",
+ "build_decision_book_item_uu_id": "Karar Defteri Madde UUID",
+ "project_response_living_space_id": "Proje Yanıt Yaşam Alanı ID",
+ "project_response_living_space_uu_id": "Proje Yanıt Yaşam Alanı UUID",
+ "resp_company_id": "Sorumlu Firma ID",
+ "resp_company_uu_id": "Sorumlu Firma UUID",
+ },
+ en={
+ **CrudCollectionLanguageModel["en"],
+ "project_no": "Project No",
+ "project_name": "Project Name",
+ "project_start_date": "Project Start Date",
+ "project_stop_date": "Project Stop Date",
+ "project_type": "Project Type",
+ "project_note": "Project Note",
+ "decision_book_pdf_path": "Decision Book PDF Path",
+ "is_completed": "Is Completed",
+ "status_code": "Status Code",
+ "resp_company_fix_wage": "Resp Company Fix Wage",
+ "is_out_sourced": "Is Out Sourced",
+ "meeting_date": "Meeting Date",
+ "currency": "Currency",
+ "bid_price": "Bid Price",
+ "approved_price": "Approved Price",
+ "final_price": "Final Price",
+ "contact_id": "Contact ID",
+ "contact_uu_id": "Contact UUID",
+ "build_decision_book_id": "Build Decision Book ID",
+ "build_decision_book_uu_id": "Build Decision Book UUID",
+ "build_decision_book_item_id": "Build Decision Book Item ID",
+ "build_decision_book_item_uu_id": "Build Decision Book Item UUID",
+ "project_response_living_space_id": "Project Response Living Space ID",
+ "project_response_living_space_uu_id": "Project Response Living Space UUID",
+ "resp_company_id": "Resp Company ID",
+ "resp_company_uu_id": "Resp Company UUID",
+ },
+)
+
+BuildDecisionBookProjectPersonLanguageModel = dict(
+ tr={
+ **CrudCollectionLanguageModel["tr"],
+ "dues_percent_discount": "Aidat İndirim Oranı",
+ "job_fix_wage": "İş Sabit Ücreti",
+ "bid_price": "Teklif Fiyatı",
+ "decision_price": "Karar Fiyatı",
+ "build_decision_book_project_id": "Karar Defteri Proje ID",
+ "build_decision_book_project_uu_id": "Karar Defteri Proje UUID",
+ "living_space_id": "Yaşam Alanı ID",
+ "living_space_uu_id": "Yaşam Alanı UUID",
+ },
+ en={
+ **CrudCollectionLanguageModel["en"],
+ "dues_percent_discount": "Dues Percent Discount",
+ "job_fix_wage": "Job Fix Wage",
+ "bid_price": "Bid Price",
+ "decision_price": "Decision Price",
+ "build_decision_book_project_id": "Build Decision Book Project ID",
+ "build_decision_book_project_uu_id": "Build Decision Book Project UUID",
+ "living_space_id": "Living Space ID",
+ "living_space_uu_id": "Living Space UUID",
+ },
+)
+
+BuildDecisionBookProjectItemsLanguageModel = dict(
+ tr={
+ **CrudCollectionLanguageModel["tr"],
+ "item_header": "Madde Başlığı",
+ "item_comment": "Madde Yorumu",
+ "attachment_pdf_path": "Ek PDF Yolu",
+ "item_estimated_cost": "Tahmini Maliyet",
+ "item_short_comment": "Kısa Yorum",
+ "build_decision_book_project_id": "Karar Defteri Proje ID",
+ "build_decision_book_project_uu_id": "Karar Defteri Proje UUID",
+ },
+ en={
+ **CrudCollectionLanguageModel["en"],
+ "item_header": "Item Header",
+ "item_comment": "Item Comment",
+ "attachment_pdf_path": "Attachment PDF Path",
+ "item_estimated_cost": "Estimated Cost",
+ "item_short_comment": "Item Short Comment",
+ "build_decision_book_project_id": "Build Decision Book Project ID",
+ "build_decision_book_project_uu_id": "Build Decision Book Project UUID",
+ },
+)
diff --git a/ApiLayers/LanguageModels/Database/company/company.py b/ApiLayers/LanguageModels/Database/company/company.py
new file mode 100644
index 0000000..a3e35dd
--- /dev/null
+++ b/ApiLayers/LanguageModels/Database/company/company.py
@@ -0,0 +1,67 @@
+from ApiLayers.LanguageModels.Database.Mixins.crud_mixin import (
+ CrudCollectionLanguageModel,
+)
+
+RelationshipDutyCompanyLanguageModel = dict(
+ tr={
+ **CrudCollectionLanguageModel["tr"],
+ "owner_id": "Sahip ID",
+ "duties_id": "Görev ID",
+ "member_id": "Üye ID",
+ "parent_id": "Üst ID",
+ "relationship_type": "İlişki Tipi",
+ "child_count": "Çocuk Sayısı",
+ "show_only": "Sadece Göster",
+ },
+ en={
+ **CrudCollectionLanguageModel["en"],
+ "owner_id": "Owner ID",
+ "duties_id": "Duties ID",
+ "member_id": "Member ID",
+ "parent_id": "Parent ID",
+ "relationship_type": "Relationship Type",
+ "child_count": "Child Count",
+ "show_only": "Show Only",
+ },
+)
+
+CompaniesLanguageModel = dict(
+ tr={
+ **CrudCollectionLanguageModel["tr"],
+ "formal_name": "Resmi Ad",
+ "company_type": "Şirket Tipi",
+ "commercial_type": "Ticari Tip",
+ "tax_no": "Vergi No",
+ "public_name": "Kamu Adı",
+ "company_tag": "Şirket Etiketi",
+ "default_lang_type": "Varsayılan Dil Tipi",
+ "default_money_type": "Varsayılan Para Tipi",
+ "is_commercial": "Ticari",
+ "is_blacklist": "Kara Liste",
+ "parent_id": "Üst ID",
+ "workplace_no": "İşyeri No",
+ "official_address_id": "Resmi Adres ID",
+ "official_address_uu_id": "Resmi Adres UUID",
+ "top_responsible_company_id": "Üst Sorumlu Şirket ID",
+ "top_responsible_company_uu_id": "Üst Sorumlu Şirket UUID",
+ },
+ en={
+ **CrudCollectionLanguageModel["en"],
+ "formal_name": "Formal Name",
+ "company_type": "Company Type",
+ "commercial_type": "Commercial Type",
+ "tax_no": "Tax No",
+ "public_name": "Public Name",
+ "company_tag": "Company Tag",
+ "default_lang_type": "Default Language Type",
+ "default_money_type": "Default Money Type",
+ "is_commercial": "Commercial",
+ "is_blacklist": "Blacklist",
+ "parent_id": "Parent ID",
+ "workplace_no": "Workplace No",
+ "official_address_id": "Official Address ID",
+ "official_address_uu_id": "Official Address UUID",
+ "top_responsible_company_id": "Top Responsible Company ID",
+ "top_responsible_company_uu_id": "Top Responsible Company UUID",
+ },
+)
diff --git a/ApiLayers/LanguageModels/Database/company/department.py b/ApiLayers/LanguageModels/Database/company/department.py
new file mode 100644
index 0000000..fb84c02
--- /dev/null
+++ b/ApiLayers/LanguageModels/Database/company/department.py
@@ -0,0 +1,64 @@
+from ApiLayers.LanguageModels.Database.Mixins.crud_mixin import (
+ CrudCollectionLanguageModel,
+)
+
+DepartmentsLanguageModel = dict(
+ tr={
+ **CrudCollectionLanguageModel["tr"],
+ "parent_department_id": "Üst Departman ID",
+ "department_code": "Departman Kodu",
+ "department_name": "Departman Adı",
+ "department_description": "Departman Açıklaması",
+ "company_id": "Şirket ID",
+ "company_uu_id": "Şirket UUID",
+ },
+ en={
+ **CrudCollectionLanguageModel["en"],
+ "parent_department_id": "Parent Department ID",
+ "department_code": "Department Code",
+ "department_name": "Department Name",
+ "department_description": "Department Description",
+ "company_id": "Company ID",
+ "company_uu_id": "Company UUID",
+ },
+)
+
+DutiesLanguageModel = dict(
+ tr={
+ **CrudCollectionLanguageModel["tr"],
+ "users_default_duty": "Kullanıcılar için Varsayılan Görev",
+ "company_id": "Şirket ID",
+ "company_uu_id": "Şirket UUID",
+ "duties_id": "Görev ID",
+ "duties_uu_id": "Görev UUID",
+ "department_id": "Departman ID",
+ "department_uu_id": "Departman UUID",
+ "management_duty": "Yönetim Görevi",
+ },
+ en={
+ **CrudCollectionLanguageModel["en"],
+ "users_default_duty": "Default Duty for Users",
+ "company_id": "Company ID",
+ "company_uu_id": "Company UUID",
+ "duties_id": "Duty ID",
+ "duties_uu_id": "Duty UUID",
+ "department_id": "Department ID",
+ "department_uu_id": "Department UUID",
+ "management_duty": "Management Duty",
+ },
+)
+
+DutyLanguageModel = dict(
+ tr={
+ **CrudCollectionLanguageModel["tr"],
+ "duty_name": "Görev Adı",
+ "duty_code": "Görev Kodu",
+ "duty_description": "Görev Açıklaması",
+ },
+ en={
+ **CrudCollectionLanguageModel["en"],
+ "duty_name": "Duty Name",
+ "duty_code": "Duty Code",
+ "duty_description": "Duty Description",
+ },
+)
diff --git a/ApiLayers/LanguageModels/Database/company/employee.py b/ApiLayers/LanguageModels/Database/company/employee.py
new file mode 100644
index 0000000..bea1965
--- /dev/null
+++ b/ApiLayers/LanguageModels/Database/company/employee.py
@@ -0,0 +1,73 @@
+from ApiLayers.LanguageModels.Database.Mixins.crud_mixin import (
+ CrudCollectionLanguageModel,
+)
+
+StaffLanguageModel = dict(
+ tr={
+ **CrudCollectionLanguageModel["tr"],
+ "staff_description": "Personel Açıklaması",
+ "staff_name": "Personel Adı",
+ "staff_code": "Personel Kodu",
+ "duties_id": "Görev ID",
+ "duties_uu_id": "Görev UUID",
+ },
+ en={
+ **CrudCollectionLanguageModel["en"],
+ "staff_description": "Staff Description",
+ "staff_name": "Staff Name",
+ "staff_code": "Staff Code",
+ "duties_id": "Duty ID",
+ "duties_uu_id": "Duty UUID",
+ },
+)
+
+EmployeesLanguageModel = dict(
+ tr={
+ **CrudCollectionLanguageModel["tr"],
+ "staff_id": "Personel ID",
+ "staff_uu_id": "Personel UUID",
+ "people_id": "Kişi ID",
+ "people_uu_id": "Kişi UUID",
+ },
+ en={
+ **CrudCollectionLanguageModel["en"],
+ "staff_id": "Staff ID",
+ "staff_uu_id": "Staff UUID",
+ "people_id": "People ID",
+ "people_uu_id": "People UUID",
+ },
+)
+
+EmployeeHistoryLanguageModel = dict(
+ tr={
+ **CrudCollectionLanguageModel["tr"],
+ "staff_id": "Personel ID",
+ "staff_uu_id": "Personel UUID",
+ "people_id": "Kişi ID",
+ "people_uu_id": "Kişi UUID",
+ },
+ en={
+ **CrudCollectionLanguageModel["en"],
+ "staff_id": "Staff ID",
+ "staff_uu_id": "Staff UUID",
+ "people_id": "People ID",
+ "people_uu_id": "People UUID",
+ },
+)
+
+EmployeesSalariesLanguageModel = dict(
+ tr={
+ **CrudCollectionLanguageModel["tr"],
+ "gross_salary": "Brüt Maaş",
+ "net_salary": "Net Maaş",
+ "people_id": "Kişi ID",
+ "people_uu_id": "Kişi UUID",
+ },
+ en={
+ **CrudCollectionLanguageModel["en"],
+ "gross_salary": "Gross Salary",
+ "net_salary": "Net Salary",
+ "people_id": "People ID",
+ "people_uu_id": "People UUID",
+ },
+)
diff --git a/ApiLayers/LanguageModels/Database/event/event.py b/ApiLayers/LanguageModels/Database/event/event.py
new file mode 100644
index 0000000..587c0fb
--- /dev/null
+++ b/ApiLayers/LanguageModels/Database/event/event.py
@@ -0,0 +1,187 @@
+from ApiLayers.LanguageModels.Database.Mixins.crud_mixin import (
+ CrudCollectionLanguageModel,
+)
+
+
+EventsLanguageModel = dict(
+ tr={
+ **CrudCollectionLanguageModel["tr"],
+ "event_type": "Etkinlik Türü",
+ "function_code": "Fonksiyon Kodu",
+ "function_class": "Fonksiyon Sınıfı",
+ "description": "Açıklama",
+ "property_description": "Özellik Açıklaması",
+ "marketing_layer": "Pazarlama Katmanı",
+ "cost": "Maliyet",
+ "unit_price": "Birim Fiyat",
+ "endpoint_id": "Endpoint ID",
+ "endpoint_uu_id": "Endpoint UUID",
+ },
+ en={
+ **CrudCollectionLanguageModel["en"],
+ "event_type": "Event Type",
+ "function_code": "Function Code",
+ "function_class": "Function Class",
+ "description": "Description",
+ "property_description": "Property Description",
+ "marketing_layer": "Marketing Layer",
+ "cost": "Cost",
+ "unit_price": "Unit Price",
+ "endpoint_id": "Endpoint ID",
+ "endpoint_uu_id": "Endpoint UUID",
+ },
+)
+
+ModulesLanguageModel = dict(
+ tr={
+ **CrudCollectionLanguageModel["tr"],
+ "module_name": "Modül Adı",
+ "module_description": "Modül Açıklaması",
+ "module_code": "Modül Kodu",
+ "module_layer": "Modül Katmanı",
+ "is_default_module": "Varsayılan Modül",
+ },
+ en={
+ **CrudCollectionLanguageModel["en"],
+ "module_name": "Module Name",
+ "module_description": "Module Description",
+ "module_code": "Module Code",
+ "module_layer": "Module Layer",
+ "is_default_module": "Default Module",
+ },
+)
+
+ServicesLanguageModel = dict(
+ tr={
+ **CrudCollectionLanguageModel["tr"],
+ "module_id": "Modül ID",
+ "module_uu_id": "Modül UUID",
+ "service_name": "Servis Adı",
+ "service_description": "Servis Açıklaması",
+ "service_code": "Servis Kodu",
+ "related_responsibility": "İlgili Sorumluluk",
+ },
+ en={
+ **CrudCollectionLanguageModel["en"],
+ "module_id": "Module ID",
+ "module_uu_id": "Module UUID",
+ "service_name": "Service Name",
+ "service_description": "Service Description",
+ "service_code": "Service Code",
+ "related_responsibility": "Related Responsibility",
+ },
+)
+
+Service2EventsLanguageModel = dict(
+ tr={
+ **CrudCollectionLanguageModel["tr"],
+ "service_id": "Servis ID",
+ "service_uu_id": "Servis UUID",
+ "event_id": "Etkinlik ID",
+ "event_uu_id": "Etkinlik UUID",
+ },
+ en={
+ **CrudCollectionLanguageModel["en"],
+ "service_id": "Service ID",
+ "service_uu_id": "Service UUID",
+ "event_id": "Event ID",
+ "event_uu_id": "Event UUID",
+ },
+)
+
+Event2OccupantExtraLanguageModel = dict(
+ tr={
+ **CrudCollectionLanguageModel["tr"],
+ "build_living_space_id": "Bina Yaşam Alanı ID",
+ "build_living_space_uu_id": "Bina Yaşam Alanı UUID",
+ "event_id": "Etkinlik ID",
+ "event_uu_id": "Etkinlik UUID",
+ },
+ en={
+ **CrudCollectionLanguageModel["en"],
+ "build_living_space_id": "Build Living Space ID",
+ "build_living_space_uu_id": "Build Living Space UUID",
+ "event_id": "Event ID",
+ "event_uu_id": "Event UUID",
+ },
+)
+
+Event2EmployeeExtraLanguageModel = dict(
+ tr={
+ **CrudCollectionLanguageModel["tr"],
+ "employee_id": "Çalışan ID",
+ "employee_uu_id": "Çalışan UUID",
+ "event_id": "Etkinlik ID",
+ "event_uu_id": "Etkinlik UUID",
+ },
+ en={
+ **CrudCollectionLanguageModel["en"],
+ "employee_id": "Employee ID",
+ "employee_uu_id": "Employee UUID",
+ "event_id": "Event ID",
+ "event_uu_id": "Event UUID",
+ },
+)
+
+Event2EmployeeLanguageModel = dict(
+ tr={
+ **CrudCollectionLanguageModel["tr"],
+ "employee_id": "Çalışan ID",
+ "employee_uu_id": "Çalışan UUID",
+ "event_service_id": "Etkinlik Servis ID",
+ "event_service_uu_id": "Etkinlik Servis UUID",
+ },
+ en={
+ **CrudCollectionLanguageModel["en"],
+ "employee_id": "Employee ID",
+ "employee_uu_id": "Employee UUID",
+ "event_service_id": "Event Service ID",
+ "event_service_uu_id": "Event Service UUID",
+ },
+)
+
+Event2OccupantLanguageModel = dict(
+ tr={
+ **CrudCollectionLanguageModel["tr"],
+ "build_living_space_id": "Bina Yaşam Alanı ID",
+ "build_living_space_uu_id": "Bina Yaşam Alanı UUID",
+ "event_service_id": "Etkinlik Servis ID",
+ "event_service_uu_id": "Etkinlik Servis UUID",
+ },
+ en={
+ **CrudCollectionLanguageModel["en"],
+ "build_living_space_id": "Build Living Space ID",
+ "build_living_space_uu_id": "Build Living Space UUID",
+ "event_service_id": "Event Service ID",
+ "event_service_uu_id": "Event Service UUID",
+ },
+)
+
+ModulePriceLanguageModel = dict(
+ tr={
+ **CrudCollectionLanguageModel["tr"],
+ "campaign_code": "Kampanya Kodu",
+ "module_id": "Modül ID",
+ "module_uu_id": "Modül UUID",
+ "service_id": "Servis ID",
+ "service_uu_id": "Servis UUID",
+ "event_id": "Etkinlik ID",
+ "event_uu_id": "Etkinlik UUID",
+ "is_counted_percentage": "İndirim Oranı",
+ "discounted_price": "İndirimli Fiyat",
+ "calculated_price": "Hesaplanan Fiyat",
+ },
+ en={
+ **CrudCollectionLanguageModel["en"],
+ "campaign_code": "Campaign Code",
+ "module_id": "Module ID",
+ "module_uu_id": "Module UUID",
+ "service_id": "Service ID",
+ "service_uu_id": "Service UUID",
+ "event_id": "Event ID",
+ "event_uu_id": "Event UUID",
+ "is_counted_percentage": "Discount Rate",
+ "discounted_price": "Discounted Price",
+ "calculated_price": "Calculated Price",
+ },
+)
diff --git a/ApiLayers/LanguageModels/Database/identity/identity.py b/ApiLayers/LanguageModels/Database/identity/identity.py
new file mode 100644
index 0000000..112aed0
--- /dev/null
+++ b/ApiLayers/LanguageModels/Database/identity/identity.py
@@ -0,0 +1,426 @@
+from ApiLayers.LanguageModels.Database.Mixins.crud_mixin import (
+ CrudCollectionLanguageModel,
+)
+
+UsersTokensLanguageModel = dict(
+ tr={
+ **CrudCollectionLanguageModel["tr"],
+ "user_id": "Kullanıcı ID",
+ "token_type": "Token Türü",
+ "token": "Token",
+ "domain": "Domain",
+ "expires_at": "Bitiş Tarihi",
+ },
+ en={
+ **CrudCollectionLanguageModel["en"],
+ "user_id": "User ID",
+ "token_type": "Token Type",
+ "token": "Token",
+ "domain": "Domain",
+ "expires_at": "Expires At",
+ },
+)
+
+UsersLanguageModel = dict(
+ tr={
+ **CrudCollectionLanguageModel["tr"],
+ "user_tag": "Kullanıcı Etiketi",
+ "email": "E-posta",
+ "phone_number": "Telefon Numarası",
+ "via": "Via",
+ "avatar": "Avatar",
+ "hash_password": "Şifre",
+ "password_token": "Şifre Token",
+ "remember_me": "Beni Hatırla",
+ "password_expires_day": "Şifre Son Kullanma Günü",
+ "password_expiry_begins": "Şifre Son Kullanma Başlangıç",
+ "related_company": "İlgili Şirket",
+ "person_id": "Kişi ID",
+ "person_uu_id": "Kişi UUID",
+ },
+ en={
+ **CrudCollectionLanguageModel["en"],
+ "user_tag": "User Tag",
+ "email": "Email",
+ "phone_number": "Phone Number",
+ "via": "Via",
+ "avatar": "Avatar",
+ "hash_password": "Password",
+ "password_token": "Password Token",
+ "remember_me": "Remember Me",
+ "password_expires_day": "Password Expires Day",
+ "password_expiry_begins": "Password Expiry Begins",
+ "related_company": "Related Company",
+ "person_id": "Person ID",
+ "person_uu_id": "Person UUID",
+ },
+)
+
+RelationshipDutyPeopleLanguageModel = dict(
+ tr={
+ **CrudCollectionLanguageModel["tr"],
+ "company_id": "Şirket ID",
+ "duties_id": "Görev ID",
+ "member_id": "Üye ID",
+ "relationship_type": "İlişki Türü",
+ "show_only": "Sadece Göster",
+ },
+ en={
+ **CrudCollectionLanguageModel["en"],
+ "company_id": "Company ID",
+ "duties_id": "Duty ID",
+ "member_id": "Member ID",
+ "relationship_type": "Relationship Type",
+ "show_only": "Show Only",
+ },
+)
+
+PeopleLanguageModel = dict(
+ tr={
+ **CrudCollectionLanguageModel["tr"],
+ "firstname": "Ad",
+ "surname": "Soyad",
+ "middle_name": "Orta Ad",
+ "sex_code": "Cinsiyet Kodu",
+ "person_ref": "Kişi Referansı",
+ "person_tag": "Kişi Etiketi",
+ "father_name": "Baba Adı",
+ "mother_name": "Anne Adı",
+ "country_code": "Ülke Kodu",
+ "national_identity_id": "Kimlik Numarası",
+ "birth_place": "Doğum Yeri",
+ "birth_date": "Doğum Tarihi",
+ "tax_no": "Vergi Numarası",
+ },
+ en={
+ **CrudCollectionLanguageModel["en"],
+ "firstname": "First Name",
+ "surname": "Last Name",
+ "middle_name": "Middle Name",
+ "sex_code": "Gender",
+ "person_ref": "Person Reference",
+ "person_tag": "Person Tag",
+ "father_name": "Father Name",
+ "mother_name": "Mother Name",
+ "country_code": "Country Code",
+ "national_identity_id": "National Identity ID",
+ "birth_place": "Birth Place",
+ "birth_date": "Birth Date",
+ "tax_no": "Tax No",
+ },
+)
+
+RelationshipEmployee2PostCodeLanguageModel = dict(
+ tr={
+ **CrudCollectionLanguageModel["tr"],
+ "company_id": "Şirket ID",
+ "employee_id": "Çalışan ID",
+ "member_id": "Üye ID",
+ "relationship_type": "İlişki Türü",
+ "show_only": "Sadece Göster",
+ },
+ en={
+ **CrudCollectionLanguageModel["en"],
+ "company_id": "Company ID",
+ "employee_id": "Employee ID",
+ "member_id": "Member ID",
+ "relationship_type": "Relationship Type",
+ "show_only": "Show Only",
+ },
+)
+
+AddressPostcodeLanguageModel = dict(
+ tr={
+ **CrudCollectionLanguageModel["tr"],
+ "street_id": "Sokak ID",
+ "street_uu_id": "Sokak UUID",
+ "postcode": "Posta Kodu",
+ },
+ en={
+ **CrudCollectionLanguageModel["en"],
+ "street_id": "Street ID",
+ "street_uu_id": "Street UUID",
+ "postcode": "Postcode",
+ },
+)
+
+AddressesLanguageModel = dict(
+ tr={
+ **CrudCollectionLanguageModel["tr"],
+ "build_number": "Bina Numarası",
+ "door_number": "Kapı Numarası",
+ "floor_number": "Kat Numarası",
+ "comment_address": "Adres",
+ "letter_address": "Adres",
+ "short_letter_address": "Adres",
+ "latitude": "Enlem",
+ "longitude": "Boylam",
+ "street_id": "Sokak ID",
+ "street_uu_id": "Sokak UUID",
+ },
+ en={
+ **CrudCollectionLanguageModel["en"],
+ "build_number": "Build Number",
+ "door_number": "Door Number",
+ "floor_number": "Floor Number",
+ "comment_address": "Address",
+ "letter_address": "Address",
+ "short_letter_address": "Address",
+ "latitude": "Latitude",
+ "longitude": "Longitude",
+ "street_id": "Street ID",
+ "street_uu_id": "Street UUID",
+ },
+)
+
+AddressGeographicLocationsLanguageModel = dict(
+ tr={
+ **CrudCollectionLanguageModel["tr"],
+ "geo_table": "Tablo Adı",
+ "geo_id": "ID",
+ "geo_name": "Ad",
+ "geo_latitude": "Enlem",
+ "geo_longitude": "Boylam",
+ "geo_altitude": "Yükseklik",
+ "geo_description": "Açıklama",
+ "geo_area_size": "Alan",
+ "geo_population": "Nüfus",
+ },
+ en={
+ **CrudCollectionLanguageModel["en"],
+ "geo_table": "Table Name",
+ "geo_id": "ID",
+ "geo_name": "Name",
+ "geo_latitude": "Latitude",
+ "geo_longitude": "Longitude",
+ "geo_altitude": "Altitude",
+ "geo_description": "Description",
+ "geo_area_size": "Area",
+ "geo_population": "Population",
+ },
+)
+
+AddressCountryLanguageModel = dict(
+ tr={
+ **CrudCollectionLanguageModel["tr"],
+ "country_code": "Ülke Kodu",
+ "country_name": "Ülke Adı",
+ "money_code": "Para Kodu",
+ "language": "Dil Kodu",
+ "address_geographic_id": "Adres Coğrafi ID",
+ },
+ en={
+ **CrudCollectionLanguageModel["en"],
+ "country_code": "Country Code",
+ "country_name": "Country Name",
+ "money_code": "Money Code",
+ "language": "Language Code",
+ "address_geographic_id": "Address Geographic ID",
+ },
+)
+
+AddressStateLanguageModel = dict(
+ tr={
+ **CrudCollectionLanguageModel["tr"],
+ "state_code": "Eyalet Kodu",
+ "state_name": "Eyalet Adı",
+ "licence_plate": "Plaka Kodu",
+ "phone_code": "Telefon Kodu",
+ "gov_code": "Hükümet Kodu",
+ "address_geographic_id": "Adres Coğrafi ID",
+ "country_id": "Ülke ID",
+ "country_uu_id": "Ülke UUID",
+ },
+ en={
+ **CrudCollectionLanguageModel["en"],
+ "state_code": "State Code",
+ "state_name": "State Name",
+ "licence_plate": "Licence Plate",
+ "phone_code": "Phone Code",
+ "gov_code": "Government Code",
+ "address_geographic_id": "Address Geographic ID",
+ "country_id": "Country ID",
+ "country_uu_id": "Country UUID",
+ },
+)
+
+AddressCityLanguageModel = dict(
+ tr={
+ **CrudCollectionLanguageModel["tr"],
+ "city_code": "Şehir Kodu",
+ "city_name": "Şehir Adı",
+ "licence_plate": "Plaka Kodu",
+ "phone_code": "Telefon Kodu",
+ "gov_code": "Hükümet Kodu",
+ "address_geographic_id": "Adres Coğrafi ID",
+ "state_id": "Eyalet ID",
+ "state_uu_id": "Eyalet UUID",
+ },
+ en={
+ **CrudCollectionLanguageModel["en"],
+ "city_code": "City Code",
+ "city_name": "City Name",
+ "licence_plate": "Licence Plate",
+ "phone_code": "Phone Code",
+ "gov_code": "Government Code",
+ "address_geographic_id": "Address Geographic ID",
+ "state_id": "State ID",
+ "state_uu_id": "State UUID",
+ },
+)
+
+AddressDistrictLanguageModel = dict(
+ tr={
+ **CrudCollectionLanguageModel["tr"],
+ "district_code": "İlçe Kodu",
+ "district_name": "İlçe Adı",
+ "phone_code": "Telefon Kodu",
+ "gov_code": "Hükümet Kodu",
+ "address_geographic_id": "Adres Coğrafi ID",
+ "city_id": "Şehir ID",
+ "city_uu_id": "Şehir UUID",
+ },
+ en={
+ **CrudCollectionLanguageModel["en"],
+ "district_code": "District Code",
+ "district_name": "District Name",
+ "phone_code": "Phone Code",
+ "gov_code": "Government Code",
+ "address_geographic_id": "Address Geographic ID",
+ "city_id": "City ID",
+ "city_uu_id": "City UUID",
+ },
+)
+
+AddressLocalityLanguageModel = dict(
+ tr={
+ **CrudCollectionLanguageModel["tr"],
+ "locality_code": "Mahalle Kodu",
+ "locality_name": "Mahalle Adı",
+ "type_code": "Tip Kodu",
+ "type_description": "Tip Açıklaması",
+ "gov_code": "Hükümet Kodu",
+ "address_show": "Adres Göster",
+ "address_geographic_id": "Adres Coğrafi ID",
+ "district_id": "İlçe ID",
+ "district_uu_id": "İlçe UUID",
+ },
+ en={
+ **CrudCollectionLanguageModel["en"],
+ "locality_code": "Locality Code",
+ "locality_name": "Locality Name",
+ "type_code": "Type Code",
+ "type_description": "Type Description",
+ "gov_code": "Government Code",
+ "address_show": "Address Show",
+ "address_geographic_id": "Address Geographic ID",
+ "district_id": "District ID",
+ "district_uu_id": "District UUID",
+ },
+)
+
+AddressNeighborhoodLanguageModel = dict(
+ tr={
+ **CrudCollectionLanguageModel["tr"],
+ "neighborhood_code": "Mahalle Kodu",
+ "neighborhood_name": "Mahalle Adı",
+ "type_code": "Tip Kodu",
+ "type_description": "Tip Açıklaması",
+ "gov_code": "Hükümet Kodu",
+ "address_show": "Adres Göster",
+ "address_geographic_id": "Adres Coğrafi ID",
+ "district_id": "İlçe ID",
+ "district_uu_id": "İlçe UUID",
+ "locality_id": "Mahalle ID",
+ "locality_uu_id": "Mahalle UUID",
+ },
+ en={
+ **CrudCollectionLanguageModel["en"],
+ "neighborhood_code": "Neighborhood Code",
+ "neighborhood_name": "Neighborhood Name",
+ "type_code": "Type Code",
+ "type_description": "Type Description",
+ "gov_code": "Government Code",
+ "address_show": "Address Show",
+ "address_geographic_id": "Address Geographic ID",
+ "district_id": "District ID",
+ "district_uu_id": "District UUID",
+ "locality_id": "Locality ID",
+ "locality_uu_id": "Locality UUID",
+ },
+)
+
+AddressStreetLanguageModel = dict(
+ tr={
+ **CrudCollectionLanguageModel["tr"],
+ "street_code": "Sokak Kodu",
+ "street_name": "Sokak Adı",
+ "type_code": "Tip Kodu",
+ "type_description": "Tip Açıklaması",
+ "gov_code": "Hükümet Kodu",
+ "address_geographic_id": "Adres Coğrafi ID",
+ "neighborhood_id": "Mahalle ID",
+ "neighborhood_uu_id": "Mahalle UUID",
+ },
+ en={
+ **CrudCollectionLanguageModel["en"],
+ "street_code": "Street Code",
+ "street_name": "Street Name",
+ "type_code": "Type Code",
+ "type_description": "Type Description",
+ "gov_code": "Government Code",
+ "address_geographic_id": "Address Geographic ID",
+ "neighborhood_id": "Neighborhood ID",
+ "neighborhood_uu_id": "Neighborhood UUID",
+ },
+)
+
+OccupantTypesLanguageModel = dict(
+ tr={
+ **CrudCollectionLanguageModel["tr"],
+ "occupant_type": "Kişi Türü",
+ "occupant_description": "Kişi Açıklaması",
+ "occupant_code": "Kişi Kodu",
+ "occupant_category": "Kişi Kategori",
+ "occupant_category_type": "Kişi Kategori Türü",
+ "occupant_is_unique": "Kişi Benzersiz",
+ },
+ en={
+ **CrudCollectionLanguageModel["en"],
+ "occupant_type": "Occupant Type",
+ "occupant_description": "Occupant Description",
+ "occupant_code": "Occupant Code",
+ "occupant_category": "Occupant Category",
+ "occupant_category_type": "Occupant Category Type",
+ "occupant_is_unique": "Occupant Unique",
+ },
+)
+
+ContractsLanguageModel = dict(
+ tr={
+ **CrudCollectionLanguageModel["tr"],
+ "contract_type": "Sözleşme Türü",
+ "contract_title": "Sözleşme Başlığı",
+ "contract_details": "Sözleşme Detayları",
+ "contract_terms": "Sözleşme Şartları",
+ "contract_code": "Sözleşme Kodu",
+ "contract_date": "Sözleşme Tarihi",
+ "company_id": "Şirket ID",
+ "company_uu_id": "Şirket UUID",
+ "person_id": "Kişi ID",
+ "person_uu_id": "Kişi UUID",
+ },
+ en={
+ **CrudCollectionLanguageModel["en"],
+ "contract_type": "Contract Type",
+ "contract_title": "Contract Title",
+ "contract_details": "Contract Details",
+ "contract_terms": "Contract Terms",
+ "contract_code": "Contract Code",
+ "contract_date": "Contract Date",
+ "company_id": "Company ID",
+ "company_uu_id": "Company UUID",
+ "person_id": "Person ID",
+ "person_uu_id": "Person UUID",
+ },
+)
diff --git a/ApiLayers/LanguageModels/Database/rules/rules.py b/ApiLayers/LanguageModels/Database/rules/rules.py
new file mode 100644
index 0000000..2e0496d
--- /dev/null
+++ b/ApiLayers/LanguageModels/Database/rules/rules.py
@@ -0,0 +1,22 @@
+from ApiLayers.LanguageModels.Database.Mixins.crud_mixin import (
+ CrudCollectionLanguageModel,
+)
+
+EndpointRestrictionLanguageModel = dict(
+ tr={
+ **CrudCollectionLanguageModel["tr"],
+ "endpoint_function": "API Fonksiyonu",
+ "endpoint_name": "API Adı",
+ "endpoint_method": "API Metodu",
+ "endpoint_desc": "API Açıklaması",
+ "endpoint_code": "API Kodu",
+ },
+ en={
+ **CrudCollectionLanguageModel["en"],
+ "endpoint_function": "API Function",
+ "endpoint_name": "API Name",
+ "endpoint_method": "API Method",
+ "endpoint_desc": "API Description",
+ "endpoint_code": "API Code",
+ },
+)
diff --git a/ApiLayers/LanguageModels/Errors/all_errors.py b/ApiLayers/LanguageModels/Errors/all_errors.py
new file mode 100644
index 0000000..1f9e19d
--- /dev/null
+++ b/ApiLayers/LanguageModels/Errors/all_errors.py
@@ -0,0 +1,4 @@
+from .defualt_error import default_errors
+
+
+all_errors_list = [default_errors]
diff --git a/ApiLayers/LanguageModels/Errors/base_languages.py b/ApiLayers/LanguageModels/Errors/base_languages.py
new file mode 100644
index 0000000..c741faf
--- /dev/null
+++ b/ApiLayers/LanguageModels/Errors/base_languages.py
@@ -0,0 +1,35 @@
+class BaseErrorLanguageModelTurkish:
+
+ NOT_CREATED: str = "Kayıt oluşturulamadı."
+ NOT_DELETED: str = "Kayıt silinemedi."
+ NOT_UPDATED: str = "Kayıt güncellenemedi."
+ NOT_LISTED: str = "Kayıt listelenemedi."
+ NOT_FOUND: str = "Kayıt bulunamadı."
+ ALREADY_EXISTS: str = "Kayıt zaten mevcut."
+ IS_NOT_CONFIRMED: str = "Kayıt onaylanmadı."
+ NOT_AUTHORIZED: str = "Yetkisiz kullanıcı."
+ NOT_VALID: str = "Gecersiz veri."
+ NOT_ACCEPTABLE: str = "Gecersiz veri."
+ INVALID_DATA: str = "Gecersiz veri."
+ UNKNOWN_ERROR: str = "Bilinmeyen bir hata oluştu."
+
+
+class BaseErrorLanguageModelEnglish:
+
+ NOT_CREATED: str = "Not Created."
+ NOT_DELETED: str = "Not Deleted."
+ NOT_UPDATED: str = "Not Updated."
+ NOT_LISTED: str = "Not Listed."
+ NOT_FOUND: str = "Not Found."
+ ALREADY_EXISTS: str = "Already Exists."
+ IS_NOT_CONFIRMED: str = "Not Confirmed."
+ NOT_AUTHORIZED: str = "Not Authorized."
+ NOT_VALID: str = "Not Valid."
+ NOT_ACCEPTABLE: str = "Not Acceptable."
+ INVALID_DATA: str = "Invalid Data."
+ UNKNOWN_ERROR: str = "Unknown Error occured."
+
+
+class BaseErrorLanguageModels:
+ tr: BaseErrorLanguageModelTurkish = BaseErrorLanguageModelTurkish
+ en: BaseErrorLanguageModelEnglish = BaseErrorLanguageModelEnglish
diff --git a/ApiLayers/LanguageModels/Errors/defualt_error.py b/ApiLayers/LanguageModels/Errors/defualt_error.py
new file mode 100644
index 0000000..d79f47f
--- /dev/null
+++ b/ApiLayers/LanguageModels/Errors/defualt_error.py
@@ -0,0 +1,98 @@
+default_errors = {
+ "NOT_CREATED": {
+ "tr": {
+ "message": "Kayıt oluşturulamadı. Lütfen tekrar deneyiniz.",
+ },
+ "en": {
+ "message": "Record could not be created. Please try again.",
+ },
+ },
+ "NOT_DELETED": {
+ "tr": {
+ "message": "Kayıt silinemedi. Lütfen tekrar deneyiniz.",
+ },
+ "en": {
+ "message": "Record could not be deleted. Please try again.",
+ },
+ },
+ "NOT_UPDATED": {
+ "tr": {
+ "message": "Kayıt güncellenemedi. Lütfen tekrar deneyiniz.",
+ },
+ "en": {
+ "message": "Record could not be updated. Please try again.",
+ },
+ },
+ "NOT_LISTED": {
+ "tr": {
+ "message": "Kayıt listelenemedi. Lütfen tekrar deneyiniz.",
+ },
+ "en": {
+ "message": "Record could not be listed. Please try again.",
+ },
+ },
+ "NOT_FOUND": {
+ "tr": {
+ "message": "Kayıt bulunamadı. Lütfen tekrar deneyiniz.",
+ },
+ "en": {
+ "message": "Record could not be found. Please try again.",
+ },
+ },
+ "ALREADY_EXISTS": {
+ "tr": {
+ "message": "Kayıt zaten mevcut. Lütfen tekrar deneyiniz.",
+ },
+ "en": {
+ "message": "Record already exists. Please try again.",
+ },
+ },
+ "IS_NOT_CONFIRMED": {
+ "tr": {
+ "message": "Kayıt onaylanmadı. Lütfen tekrar deneyiniz.",
+ },
+ "en": {
+ "message": "Record is not confirmed. Please try again.",
+ },
+ },
+ "NOT_AUTHORIZED": {
+ "tr": {
+ "message": "Yetkisiz kullanıcı. Lütfen tekrar deneyiniz.",
+ },
+ "en": {
+ "message": "Unauthorized user. Please try again.",
+ },
+ },
+ "NOT_VALID": {
+ "tr": {
+ "message": "Geçersiz veri. Lütfen tekrar deneyiniz.",
+ },
+ "en": {
+ "message": "Invalid data. Please try again.",
+ },
+ },
+ "NOT_ACCEPTABLE": {
+ "tr": {
+ "message": "Geçersiz veri. Lütfen tekrar deneyiniz.",
+ },
+ "en": {
+ "message": "Invalid data. Please try again.",
+ },
+ },
+ "INVALID_DATA": {
+ "tr": {
+ "message": "Geçersiz veri. Lütfen tekrar deneyiniz.",
+ },
+ "en": {
+ "message": "Invalid data. Please try again.",
+ },
+ },
+ "UNKNOWN_ERROR": {
+ "tr": {
+ "message": "Bilinmeyen bir hata oluştu. Lütfen tekrar deneyiniz.",
+ },
+ "en": {
+ "message": "An unknown error occured. Please try again.",
+ },
+ },
+}
diff --git a/ApiLayers/LanguageModels/Errors/merge_all_error_languages.py b/ApiLayers/LanguageModels/Errors/merge_all_error_languages.py
new file mode 100644
index 0000000..17c6c2d
--- /dev/null
+++ b/ApiLayers/LanguageModels/Errors/merge_all_error_languages.py
@@ -0,0 +1,18 @@
+from ApiLayers.LanguageModels.Errors.base_languages import BaseErrorLanguageModels
+
+
+class MergedErrorLanguageModels:
+ list_of_languages = [BaseErrorLanguageModels]
+
+ @classmethod
+ def get_language_models(cls, language: str):
+ language_model_keys = {}
+ for list_of_language in cls.list_of_languages:
+ language_model_class = getattr(list_of_language, language, None)
+ clean_dict = {
+ key: value
+ for key, value in language_model_class.__dict__.items()
+ if "__" not in str(key)[0:3]
+ }
+ language_model_keys.update(clean_dict)
+ return language_model_keys
diff --git a/ApiLayers/LanguageModels/Request/Auth/login.py b/ApiLayers/LanguageModels/Request/Auth/login.py
new file mode 100644
index 0000000..639e7c3
--- /dev/null
+++ b/ApiLayers/LanguageModels/Request/Auth/login.py
@@ -0,0 +1,29 @@
+from typing import Dict
+
+
+LoginRequestLanguageModel: Dict[str, Dict[str, str]] = {
+ "tr": {
+ "domain": "Domain",
+ "access_key": "Erişim Anahtarı",
+ "password": "Parola",
+ "remember_me": "Beni Hatırla",
+ },
+ "en": {
+ "domain": "Domain",
+ "access_key": "Access Key",
+ "password": "Password",
+ "remember_me": "Remember Me",
+ },
+}
+
+
+SelectRequestLanguageModel: Dict[str, Dict[str, str]] = {
+ "tr": {
+ "company_uu_id": "Şirket UU ID",
+ "build_living_space_uu_id": "Bina Konut UU ID",
+ },
+ "en": {
+ "company_uu_id": "Company UU ID",
+ "build_living_space_uu_id": "Build Living Space UU ID",
+ },
+}
diff --git a/ApiLayers/LanguageModels/Request/__init__.py b/ApiLayers/LanguageModels/Request/__init__.py
new file mode 100644
index 0000000..51a4fc1
--- /dev/null
+++ b/ApiLayers/LanguageModels/Request/__init__.py
@@ -0,0 +1,9 @@
+from .Auth.login import (
+ LoginRequestLanguageModel,
+ SelectRequestLanguageModel,
+)
+
+__all__ = [
+ "LoginRequestLanguageModel",
+ "SelectRequestLanguageModel",
+]
diff --git a/ApiLayers/LanguageModels/Response/accounts/accounts.py b/ApiLayers/LanguageModels/Response/accounts/accounts.py
new file mode 100644
index 0000000..da7c61e
--- /dev/null
+++ b/ApiLayers/LanguageModels/Response/accounts/accounts.py
@@ -0,0 +1,26 @@
+accountResponses = {
+ "ACCOUNTS_LIST": {
+ "tr": {
+ "message": "Hesap Bilgileri gönderilen sorgu ve filtreleme seçeneklerine göre başarılı bir şekilde listelendi.",
+ },
+ "en": {
+ "message": "Account Information listed successfully regarding to the sent query and filtering options.",
+ },
+ },
+ "ACCOUNT_CREATED": {
+ "tr": {
+ "message": "Hesap bilgileri başarılı bir şekilde eklendi.",
+ },
+ "en": {
+ "message": "Account information added successfully.",
+ },
+ },
+ "ACCOUNT_UPDATED": {
+ "tr": {
+ "message": "Hesap bilgileri başarılı bir şekilde güncellendi.",
+ },
+ "en": {
+ "message": "Account information updated successfully.",
+ },
+ },
+}
diff --git a/ApiLayers/LanguageModels/Response/all_responses.py b/ApiLayers/LanguageModels/Response/all_responses.py
new file mode 100644
index 0000000..15b794e
--- /dev/null
+++ b/ApiLayers/LanguageModels/Response/all_responses.py
@@ -0,0 +1,5 @@
+from .authentication.auth import authResponses
+from .accounts.accounts import accountResponses
+
+
+all_response_list = [authResponses, accountResponses]
diff --git a/ApiLayers/LanguageModels/Response/authentication/auth.py b/ApiLayers/LanguageModels/Response/authentication/auth.py
new file mode 100644
index 0000000..be19940
--- /dev/null
+++ b/ApiLayers/LanguageModels/Response/authentication/auth.py
@@ -0,0 +1,98 @@
+authResponses = {
+ "LOGIN_SELECT": {
+ "tr": {
+ "message": "Şirket/Görev başarılı bir şekilde seçildi.",
+ },
+ "en": {
+ "message": "Company/Duty selected successfully.",
+ },
+ },
+ "LOGIN_SUCCESS": {
+ "tr": {
+ "message": "Giriş başırı ile tamamlandı. Devam etmek için bir şirket/görev seçiniz.",
+ },
+ "en": {
+ "message": "Login successful. Please select an company/duty to continue.",
+ },
+ },
+ "TOKEN_VALID": {
+ "tr": {
+ "message": "Header'da belirtilen token geçerli.",
+ },
+ "en": {
+ "message": "The token specified in the header is valid.",
+ },
+ },
+ "USER_INFO_REFRESHED": {
+ "tr": {
+ "message": "Token aracılığıyla kullanıcı bilgileri başarılı bir şekilde güncellendi.",
+ },
+ "en": {
+ "message": "User information updated successfully via token.",
+ },
+ },
+ "CREATED_PASSWORD": {
+ "tr": {
+ "message": "Şifre başarılı bir şekilde oluşturuldu.",
+ },
+ "en": {
+ "message": "Password created successfully.",
+ },
+ },
+ "PASSWORD_CHANGED": {
+ "tr": {
+ "message": "Şifre başarılı bir şekilde değiştirildi.",
+ },
+ "en": {
+ "message": "Password changed successfully.",
+ },
+ },
+ "LOGOUT_USER": {
+ "tr": {
+ "message": "Kullanıcı başarılı bir şekilde çıkış yaptı.",
+ },
+ "en": {
+ "message": "User successfully logged out.",
+ },
+ },
+ "DISCONNECTED_USER": {
+ "tr": {
+ "message": "Kullanıcı tüm cihazlardan başarılı bir şekilde çıkış yaptı.",
+ },
+ "en": {
+ "message": "User successfully logged out of all devices.",
+ },
+ },
+ "USER_NOT_FOUND": {
+ "tr": {
+ "message": "Kullanıcı bulunamadı. Lütfen tekrar deneyiniz.",
+ },
+ "en": {
+ "message": "User not found. Please try again.",
+ },
+ },
+ "FORGOT_PASSWORD": {
+ "tr": {
+ "message": "Şifre sıfırlama talebi başarılı bir şekilde oluşturuldu.",
+ },
+ "en": {
+ "message": "Password reset request created successfully.",
+ },
+ },
+ "USER_AVATAR": {
+ "tr": {
+ "message": "Kullanıcı avatarı data blogunda belirtildiği şekildedir.",
+ },
+ "en": {
+ "message": "User avatar is as specified in the data block.",
+ },
+ },
+ "TOKEN_REFRESH": {
+ "tr": {
+ "message": "Token başarılı bir şekilde yenilendi.",
+ },
+ "en": {
+ "message": "Token successfully refreshed.",
+ },
+ },
+}
diff --git a/ApiLayers/LanguageModels/Response/how_to.py b/ApiLayers/LanguageModels/Response/how_to.py
new file mode 100644
index 0000000..a07a48d
--- /dev/null
+++ b/ApiLayers/LanguageModels/Response/how_to.py
@@ -0,0 +1,227 @@
+from Events.Engine import MethodToEvent
+from Events.Engine.abstract_class import PageInfo
+
+# from .account_records import (
+# AccountRecordsUpdateEventMethods,
+# AccountRecordsCreateEventMethods,
+# AccountRecordsListEventMethods,
+# )
+
+cluster_name = "AccountCluster"
+prefix = "/accounts"
+
+
+class AccountRecordsUpdateEventMethods(MethodToEvent):
+ pass
+
+
+class AccountRecordsCreateEventMethods(MethodToEvent):
+ pass
+
+
+class AccountRecordsListEventMethods(MethodToEvent):
+ pass
+
+
+class LanguageModels:
+ SITE_URL: str
+ COMPONENT: str = "Table"
+ PREFIX_URL: str = ""
+ PAGE_INFO: dict
+
+ def as_dict(self):
+ return {
+ "SITE_URL": self.SITE_URL,
+ "COMPONENT": self.COMPONENT,
+ "PREFIX_URL": self.PREFIX_URL,
+ "PAGE_INFO": self.PAGE_INFO,
+ }
+
+
+account_language_update_models = LanguageModels()
+account_language_update_models.COMPONENT = "Link"
+account_language_update_models.SITE_URL = f"/update?site={cluster_name}"
+account_language_update_models.PREFIX_URL = (
+ f"{prefix}{AccountRecordsUpdateEventMethods.URL}"
+)
+account_language_update_models.PAGE_INFO = {
+ "en": {
+ "page": "Update Account Records",
+ },
+ "tr": {
+ "page": "Hesap Kayıdı Güncelle",
+ },
+}
+account_language_model_as_dict = account_language_update_models.as_dict()
+
+
+account_language_created_models = LanguageModels()
+account_language_created_models.COMPONENT = "Link"
+account_language_created_models.SITE_URL = f"/create?site={cluster_name}"
+account_language_created_models.PREFIX_URL = (
+ f"{prefix}{AccountRecordsCreateEventMethods.URL}"
+)
+account_language_created_models.PAGE_INFO = {
+ "en": {
+ "page": "Create Account Records",
+ },
+ "tr": {"page": "Hesap Kayıdı Oluştur"},
+}
+account_language_created_models_as_dict = account_language_created_models.as_dict()
+
+account_language_list_models = LanguageModels()
+account_language_list_models.COMPONENT = "Table"
+account_language_list_models.SITE_URL = f"/list?site={cluster_name}"
+account_language_list_models.PREFIX_URL = (
+ f"{prefix}{AccountRecordsListEventMethods.URL}"
+)
+account_language_list_models.PAGE_INFO = {
+ "en": {
+ "page": "List Account Records",
+ },
+ "tr": {
+ "page": "Hesap Kayıtlarını Listele",
+ },
+}
+
+account_language_list_models_as_dict = account_language_list_models.as_dict()
+
+account_language_create_models = LanguageModels()
+account_language_create_models.COMPONENT = "Form"
+account_language_create_models.SITE_URL = f"/create?site={cluster_name}"
+account_language_create_models.PREFIX_URL = (
+ f"{prefix}{AccountRecordsListEventMethods.URL}"
+)
+account_language_create_models.PAGE_INFO = {
+ "en": {"page": "List Account Records", "button:": "Create"},
+ "tr": {"page": "Hesap Kayıtlarını Listele", "button:": "Oluştur"},
+}
+
+account_language_create_models_as_dict = account_language_create_models.as_dict()
+
+account_language_update_form_models = LanguageModels()
+account_language_update_form_models.COMPONENT = "Form"
+account_language_update_form_models.SITE_URL = f"/update?site={cluster_name}"
+account_language_update_form_models.PREFIX_URL = (
+ f"{prefix}{AccountRecordsUpdateEventMethods.URL}"
+)
+account_language_update_form_models.PAGE_INFO = {
+ "en": {"page": "Update Account Records", "button:": "Update"},
+ "tr": {"page": "Hesap Kayıdı Güncelle", "button:": "Güncelle"},
+}
+account_language_update_form_models_as_dict = (
+ account_language_update_form_models.as_dict()
+)
+
+
+create_key = f"{prefix}{AccountRecordsCreateEventMethods.URL}"
+update_key = f"{prefix}{AccountRecordsUpdateEventMethods.URL}"
+list_key = f"{prefix}{AccountRecordsListEventMethods.URL}"
+
+
+dashboard_page_info = PageInfo(
+ name=f"{cluster_name}",
+ url=f"/dashboard?site={cluster_name}",
+ icon="Building",
+ instructions={
+ str(list_key): {
+ "headers": {
+ "store": True,
+ "url": "/validations/header",
+ "data": {"event_code": f"{prefix}/list", "asked_field": "headers"},
+ },
+ "data": {
+ "store": True,
+ "url": f"{prefix}/list",
+ "data": dict(page=1, limit=1),
+ },
+ },
+ },
+ page_info={
+ "page": {
+ "en": "Account Records for reaching user all types account information",
+ "tr": "Kullanıcı tüm hesap bilgilerine ulaşmak için Hesap Kayıtları",
+ },
+ },
+ endpoints={
+ str(update_key): AccountRecordsUpdateEventMethods.retrieve_all_event_keys(),
+ str(create_key): AccountRecordsCreateEventMethods.retrieve_all_event_keys(),
+ str(list_key): AccountRecordsListEventMethods.retrieve_all_event_keys(),
+ },
+ language_models={
+ account_language_update_models.PREFIX_URL: account_language_model_as_dict,
+ account_language_created_models.PREFIX_URL: account_language_created_models_as_dict,
+ account_language_list_models.PREFIX_URL: account_language_list_models_as_dict,
+ },
+)
+
+create_page_info = PageInfo(
+ name=f"{cluster_name}",
+ url=f"/create?site={cluster_name}",
+ icon="Building",
+ instructions={
+ str(create_key): {
+ "validation": {
+ "store": True,
+ "url": "/validations/validation",
+ "data": {"event_code": f"{prefix}/create", "asked_field": "validation"},
+ },
+ "headers": {
+ "store": True,
+ "url": "/validations/header",
+ "data": {"event_code": f"{prefix}/create", "asked_field": "headers"},
+ },
+ },
+ },
+ page_info={
+ "page": {
+ "en": "Account Records creating for user all types account information",
+ "tr": "Kullanıcı tüm hesap bilgilerine ulaşmak için Hesap Kayıtları oluştur",
+ },
+ },
+ endpoints={
+ str(create_key): AccountRecordsCreateEventMethods.retrieve_all_event_keys(),
+ },
+ language_models={
+ account_language_create_models.PREFIX_URL: account_language_create_models_as_dict,
+ },
+)
+
+update_page_info = PageInfo(
+ name=f"{cluster_name}",
+ url=f"/update?site={cluster_name}",
+ icon="Building",
+ instructions={
+ str(update_key): {
+ "validation": {
+ "store": True,
+ "url": "/validations/validation",
+ "data": {"event_code": f"{prefix}/update", "asked_field": "validation"},
+ },
+ "headers": {
+ "store": True,
+ "url": "/validations/header",
+ "data": {"event_code": f"{prefix}/update", "asked_field": "headers"},
+ },
+ },
+ },
+ page_info={
+ "page": {
+ "en": "Account Records updating for user all types account information",
+ "tr": "Kullanıcı tüm hesap bilgilerine ulaşmak için Hesap Kayıtları güncelle",
+ },
+ },
+ endpoints={
+ str(update_key): AccountRecordsUpdateEventMethods.retrieve_all_event_keys(),
+ },
+ language_models={
+ account_language_update_form_models.PREFIX_URL: account_language_update_form_models_as_dict,
+ },
+)
+
+
+account_page_info = {
+ f"/dashboard?site={cluster_name}": dashboard_page_info,
+ f"/create?site={cluster_name}": create_page_info,
+ f"/update?site={cluster_name}": update_page_info,
+}
diff --git a/ApiLayers/LanguageModels/__init__.py b/ApiLayers/LanguageModels/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/ApiLayers/LanguageModels/default_template.py b/ApiLayers/LanguageModels/default_template.py
new file mode 100644
index 0000000..905a8df
--- /dev/null
+++ b/ApiLayers/LanguageModels/default_template.py
@@ -0,0 +1,10 @@
+responses = {
+ "LOGIN_SELECT": {
+ "tr": {
+ "": "",
+ },
+ "en": {
+ "": "",
+ },
+ },
+}
diff --git a/ApiLayers/LanguageModels/set_defaults/language_setters.py b/ApiLayers/LanguageModels/set_defaults/language_setters.py
new file mode 100644
index 0000000..b6c226c
--- /dev/null
+++ b/ApiLayers/LanguageModels/set_defaults/language_setters.py
@@ -0,0 +1,177 @@
+from ApiLayers.AllConfigs.Redis.configs import (
+ RedisValidationKeysAction,
+ RedisValidationKeys,
+)
+from ApiLayers.AllConfigs.main import LanguageConfig
+from Events.Engine.set_defaults.category_cluster_models import CategoryClusterController
+from Services.Redis.Actions.actions import RedisActions
+
+
+class SetDefaultLanguageModelsRedis:
+
+ std_out: str = ""
+
+ def __init__(
+ self,
+ set_response_languages_list: list[dict],
+ set_errors_languages_list: list[dict],
+ ):
+ self.responses_list: list[dict] = set_response_languages_list
+ self.errors_list: list[dict] = set_errors_languages_list
+
+ def __str__(self):
+ return f"\nPrepareLanguageModels:\n\n{self.std_out}"
+
+ def set_all(self):
+
+ # RedisActions.delete(list_keys=["*"])
+ RedisActions.delete(list_keys=[f"{RedisValidationKeys.LANGUAGE_MODELS}:*"])
+
+ for response in self.responses_list:
+ for lang in list(LanguageConfig.SUPPORTED_LANGUAGES):
+ for code, dict_to_set in response.items():
+ # [SAVE]REDIS => LANGUAGE_MODELS:STATIC:RESPONSES:{ResponseCode}:tr = {...}
+ set_key = (
+ f"{RedisValidationKeysAction.static_response_key}:{code}:{lang}"
+ )
+ RedisActions.set_json(list_keys=[set_key], value=dict_to_set[lang])
+
+ self.std_out += f"Language Response Models are set to Redis\n"
+ for response in self.errors_list:
+ for lang in list(LanguageConfig.SUPPORTED_LANGUAGES):
+ for code, dict_to_set in response.items():
+ # [SAVE]REDIS => LANGUAGE_MODELS:STATIC:ERRORCODES:{ErrorCode}:en = {...}
+ set_key = f"{RedisValidationKeysAction.static_error_code_key}:{code}:{lang}"
+ RedisActions.set_json(list_keys=[set_key], value=dict_to_set[lang])
+
+ self.std_out += f"Language Error Models are set to Redis\n"
+
+
+class SetClusterLanguageModelsRedis:
+
+ std_out: str = ""
+ events_lm_dict: dict[str, dict[str, dict]] = {}
+ events_rq_dict: dict[str, dict[str, dict]] = {}
+ events_rs_dict: dict[str, dict[str, dict]] = {}
+
+ def __init__(self, cluster_controller_group: CategoryClusterController):
+ self.cluster_controller_group = cluster_controller_group
+
+ def __str__(self):
+ return f"\nPrepareLanguageModels:\n\n{self.std_out}"
+
+ @staticmethod
+ def merge_language_dicts(list_of_lang_models: list[dict]):
+ """
+ Merges the language models of the events to a single dictionary.
+ """
+ merged_lang_models: dict[str, dict] = {}
+ for lang_model in list_of_lang_models:
+ for lang in list(LanguageConfig.SUPPORTED_LANGUAGES):
+ if not lang_model.get(lang, None):
+ raise ValueError(
+ f"Language model for {lang} not found in {lang_model}"
+ )
+ if lang not in merged_lang_models:
+ merged_lang_models[lang] = lang_model[lang]
+ else:
+ merged_lang_models[lang].update(lang_model[lang])
+ return merged_lang_models
+
+ def set_models_from_cluster(self):
+ """
+ iterate(ClusterToMethod) to set all models by pairing function codes
+ """
+ for cluster_control in self.cluster_controller_group.imports:
+ self.std_out += f"Setting models from cluster : {cluster_control.name}\n"
+ for endpoint in cluster_control.category_cluster.ENDPOINTS.values():
+ for key_event, event in endpoint.EVENTS.items():
+ merged_language_dict = self.merge_language_dicts(
+ event.LANGUAGE_MODELS
+ )
+ request_validation = getattr(
+ event.REQUEST_VALIDATOR, "model_fields", None
+ )
+ response_validation = getattr(
+ event.RESPONSE_VALIDATOR, "model_fields", None
+ )
+ objects_missing = bool(request_validation) and bool(
+ merged_language_dict
+ )
+ if not objects_missing:
+ continue
+ if merged_language_dict:
+ self.events_lm_dict[key_event] = merged_language_dict
+ if request_validation:
+ self.events_rq_dict[key_event] = request_validation
+ if response_validation:
+ self.events_rs_dict[key_event] = response_validation
+ self.std_out += f"Request/Response/Language validation model is set {key_event}\n"
+
+ def set_all(self):
+ """
+ Set all language models from cluster list by pairing event code and models
+ """
+ self.set_models_from_cluster()
+ if self.events_lm_dict and self.events_rq_dict:
+ """
+ [SAVE]REDIS => LANGUAGE_MODELS:DYNAMIC:HEADERS:REQUEST:{FunctionCode}:tr = {...}
+ Get Request BaseModel pydantic model_fields of each event and set headers which are included in model_fields
+ """
+ for lang in list(
+ LanguageConfig.SUPPORTED_LANGUAGES
+ ): # Iterate(languages ["tr", "en"])
+ for key_field in self.events_rq_dict.keys(): # Iterate(function_code)
+ request_model = self.events_rq_dict[key_field]
+ if not request_model:
+ self.std_out += (
+ f"Request validation model not found for {key_field}\n"
+ )
+ continue
+ if (
+ key_field not in self.events_rq_dict
+ or key_field not in self.events_lm_dict
+ ):
+ self.std_out += (
+ f"Request language model are missing {key_field}\n"
+ )
+ continue
+
+ value_to_set = {}
+ set_key = f"{RedisValidationKeysAction.dynamic_header_request_key}:{key_field}:{lang}"
+ for key in request_model.keys():
+ value_to_set[key] = self.events_lm_dict[key_field][lang][key]
+ RedisActions.set_json(list_keys=[set_key], value=value_to_set)
+
+ self.std_out += f"Language Request Headers are set to Redis\n"
+ if self.events_lm_dict and self.events_rs_dict:
+ """
+ [SAVE]REDIS => LANGUAGE_MODELS:DYNAMIC:HEADERS:RESPONSE:{FunctionCode}:en = {...}
+ Get Response BaseModel pydantic model_fields of each event and set headers which are included in model_fields
+ """
+ for lang in list(
+ LanguageConfig.SUPPORTED_LANGUAGES
+ ): # Iterate(languages ["tr", "en"])
+ for key_field in self.events_rs_dict.keys(): # Iterate(function_code)
+ response_model = self.events_rs_dict[key_field]
+ if not response_model:
+ self.std_out += (
+ f"Response validation model not found for {key_field}\n"
+ )
+ continue
+ if (
+ key_field not in self.events_rs_dict
+ or key_field not in self.events_lm_dict
+ ):
+ self.std_out += (
+ f"Response language model are missing {key_field}\n"
+ )
+ continue
+
+ value_to_set = {}
+ set_key = f"{RedisValidationKeysAction.dynamic_header_response_key}:{key_field}:{lang}"
+ for key in response_model.keys():
+ value_to_set[key] = self.events_lm_dict[key_field][lang][key]
+ RedisActions.set_json(list_keys=[set_key], value=value_to_set)
+
+ self.std_out += f"Language Response Headers are set to Redis\n"
diff --git a/ApiLayers/LanguageModels/set_defaults/static_validation_retriever.py b/ApiLayers/LanguageModels/set_defaults/static_validation_retriever.py
new file mode 100644
index 0000000..764b4eb
--- /dev/null
+++ b/ApiLayers/LanguageModels/set_defaults/static_validation_retriever.py
@@ -0,0 +1,26 @@
+from typing import Optional
+from Services.Redis import RedisActions
+from ApiLayers.AllConfigs.Redis.configs import RedisValidationKeysAction
+
+
+class StaticValidationRetriever:
+
+ lang: str = "tr"
+ code: str = ""
+
+ def __init__(self, lang: str, code: str):
+ self.lang = lang
+ self.code = code
+
+ @property
+ def response(self) -> Optional[dict]:
+ language_model = RedisActions.get_json(
+ list_keys=[
+ RedisValidationKeysAction.static_response_key,
+ self.code,
+ self.lang,
+ ]
+ )
+ if language_model.status:
+ return language_model.first
+ return {"message": f"{self.code} -> Language model not found"}
diff --git a/ApiLayers/Middleware/__init__.py b/ApiLayers/Middleware/__init__.py
new file mode 100644
index 0000000..74f70f1
--- /dev/null
+++ b/ApiLayers/Middleware/__init__.py
@@ -0,0 +1,14 @@
+from .token_event_middleware import TokenEventMiddleware
+from .auth_middleware import (
+ LoggerTimingMiddleware,
+ RequestTimingMiddleware,
+ MiddlewareModule,
+)
+
+
+__all__ = [
+ "TokenEventMiddleware",
+ "RequestTimingMiddleware",
+ "MiddlewareModule",
+ "LoggerTimingMiddleware",
+]
diff --git a/ApiLayers/Middleware/auth_middleware.py b/ApiLayers/Middleware/auth_middleware.py
new file mode 100644
index 0000000..c42cba5
--- /dev/null
+++ b/ApiLayers/Middleware/auth_middleware.py
@@ -0,0 +1,175 @@
+"""
+Authentication and Authorization middleware for FastAPI applications.
+
+This module provides authentication decorator for protecting endpoints
+and a middleware for request timing measurements.
+"""
+
+import inspect
+
+from time import perf_counter
+from typing import Callable
+from functools import wraps
+
+from fastapi import Request, Response
+from starlette.middleware.base import BaseHTTPMiddleware
+
+from ApiLayers.ApiLibrary.common.line_number import get_line_number_for_error
+from ApiLayers.ApiValidations.Custom.wrapper_contexts import AuthContext
+from ApiLayers.ErrorHandlers.ErrorHandlers.api_exc_handler import HTTPExceptionApi
+from ApiLayers.AllConfigs.Token.config import Auth
+from ApiLayers.ApiServices.Token.token_handler import TokenService
+
+
+class MiddlewareModule:
+ """
+ Middleware module for handling authentication and request timing.
+ """
+
+ @staticmethod
+ def get_user_from_request(request: Request) -> dict:
+ """
+ Get authenticated token context from request.
+
+ Args:
+ request: FastAPI request object
+
+ Returns:
+ AuthContext: Context containing the authenticated token data
+
+ Raises:
+ HTTPExceptionApi: If token is missing, invalid, or user not found
+ """
+
+ # Get token and validate - will raise HTTPExceptionApi if invalid
+ redis_token = TokenService.get_access_token_from_request(request=request)
+ # Get token context - will validate token and raise appropriate errors
+ token_context = TokenService.get_object_via_access_key(access_token=redis_token)
+ if not token_context:
+ raise HTTPExceptionApi(
+ error_code="USER_NOT_FOUND",
+ lang="tr",
+ loc=get_line_number_for_error(),
+ sys_msg="TokenService: Token Context couldnt retrieved from redis",
+ )
+ return token_context
+
+ @classmethod
+ def auth_required(cls, func: Callable) -> Callable:
+ """
+ Decorator for protecting FastAPI endpoints with authentication.
+
+ Usage:
+ @router.get("/protected")
+ @MiddlewareModule.auth_required
+ async def protected_endpoint(request: Request):
+ auth = protected_endpoint.auth # Access auth context
+ if auth.is_employee:
+ # Handle employee logic
+ employee_id = auth.token_context.employee_id
+ else:
+ # Handle occupant logic
+ occupant_id = auth.token_context.occupant_id
+ return {"user_id": auth.user_id}
+
+ Args:
+ func: The FastAPI route handler function to protect
+
+ Returns:
+ Callable: Wrapped function that checks authentication before execution
+
+ Raises:
+ HTTPExceptionApi: If authentication fails
+ """
+
+ @wraps(func)
+ async def wrapper(request: Request, *args, **kwargs):
+ # Get and validate token context from request
+ endpoint_url = str(request.url.path)
+ token_context = cls.get_user_from_request(request=request)
+ auth_context = AuthContext(
+ auth=token_context, url=endpoint_url, request=request
+ )
+
+ # Set auth context on the wrapper function itself
+ setattr(func, "auth_context", auth_context)
+ setattr(wrapper, "auth_context", auth_context)
+
+ # Call the original endpoint function
+ if inspect.iscoroutinefunction(func):
+ result = await func(request, *args, **kwargs)
+ else:
+ result = func(request, *args, **kwargs)
+
+ # Set auth context on the wrapper function itself
+ setattr(func, "auth_context", auth_context)
+ setattr(wrapper, "auth_context", auth_context)
+
+ return result
+
+ return wrapper
+
+
+class RequestTimingMiddleware(BaseHTTPMiddleware):
+ """
+ Middleware for measuring and logging request timing.
+ Only handles timing, no authentication.
+ """
+
+ async def dispatch(self, request: Request, call_next: Callable) -> Response:
+ """
+ Process each request through the middleware.
+
+ Args:
+ request: FastAPI request object
+ call_next: Next middleware in the chain
+
+ Returns:
+ Response: Processed response with timing headers
+ """
+ start_time = perf_counter()
+ # Process the request
+ response = await call_next(request)
+
+ # Add timing information to response headers
+ end_time = perf_counter()
+ elapsed = (end_time - start_time) * 1000 # Convert to milliseconds
+
+ response.headers.update(
+ {
+ "request-start": f"{start_time:.6f}",
+ "request-end": f"{end_time:.6f}",
+ "request-duration": f"{elapsed:.2f}ms",
+ }
+ )
+
+ return response
+
+
+class LoggerTimingMiddleware(BaseHTTPMiddleware):
+ """
+ Middleware for measuring and logging request timing.
+ Only handles timing, no authentication.
+ """
+
+ async def dispatch(self, request: Request, call_next: Callable) -> Response:
+ # Log the request
+ import arrow
+
+ headers = dict(request.headers)
+ response = await call_next(request)
+ # Log the response
+ print(
+ "Loggers :",
+ {
+ "url": request.url,
+ "method": request.method,
+ "access_token": headers.get(Auth.ACCESS_TOKEN_TAG, ""),
+ "referer": headers.get("referer", ""),
+ "origin": headers.get("origin", ""),
+ "user-agent": headers.get("user-agent", ""),
+ "datetime": arrow.now().format("YYYY-MM-DD HH:mm:ss ZZ"),
+ "status_code": response.status_code,
+ },
+ )
+ return response
diff --git a/ApiLayers/Middleware/function_wrappers.py b/ApiLayers/Middleware/function_wrappers.py
new file mode 100644
index 0000000..e69de29
diff --git a/ApiLayers/Middleware/token_event_middleware.py b/ApiLayers/Middleware/token_event_middleware.py
new file mode 100644
index 0000000..f24cd56
--- /dev/null
+++ b/ApiLayers/Middleware/token_event_middleware.py
@@ -0,0 +1,198 @@
+"""
+Token event middleware for handling authentication and event tracking.
+"""
+
+import inspect
+
+from functools import wraps
+from typing import Callable, Dict, Any, Optional, Tuple, Union
+from fastapi import Request
+from pydantic import BaseModel
+
+from ApiLayers.ApiLibrary.common.line_number import get_line_number_for_error
+from ApiLayers.ApiServices.Token.token_handler import TokenService
+from ApiLayers.ApiValidations.Custom.wrapper_contexts import EventContext
+from ApiLayers.ErrorHandlers.Exceptions.api_exc import HTTPExceptionApi
+from ApiLayers.Schemas import Events, EndpointRestriction
+from ApiLayers.AllConfigs.Redis.configs import RedisCategoryKeys
+
+from Services.Redis.Actions.actions import RedisActions
+
+from .auth_middleware import MiddlewareModule
+
+
+class TokenEventMiddleware:
+ """
+ Module containing token and event handling functionality.
+
+ This class provides:
+ - Token and event context management
+ - Event validation decorator for endpoints
+ """
+
+ @staticmethod
+ def retrieve_access_content(request_from_scope: Request) -> Tuple[str, list[str]]:
+ """
+ Retrieves the access token and validates it.
+
+ Args:
+ request_from_scope: The FastAPI request object
+
+ Returns:
+ Tuple[str, list[str]]: The access token and a list of reachable event codes
+ """
+ # Get token context from request
+ access_token = TokenService.get_access_token_from_request(request_from_scope)
+ if not access_token:
+ raise HTTPExceptionApi(
+ error_code="",
+ lang="en",
+ loc=get_line_number_for_error(),
+ sys_msg="Token not found",
+ )
+
+ # Get token context from Redis by access token and collect reachable event codes
+ token_context = TokenService.get_object_via_access_key(
+ access_token=access_token
+ )
+ if token_context.is_employee:
+ reachable_event_codes: list[str] = (
+ token_context.selected_company.reachable_event_codes
+ )
+ elif token_context.is_occupant:
+ reachable_event_codes: list[str] = (
+ token_context.selected_occupant.reachable_event_codes
+ )
+ else:
+ raise HTTPExceptionApi(
+ error_code="",
+ lang="en",
+ loc=get_line_number_for_error(),
+ sys_msg="Token not found",
+ )
+ return token_context, reachable_event_codes
+
+ @staticmethod
+ def retrieve_intersected_event_code(
+ request: Request, reachable_event_codes: list[str]
+ ) -> Tuple[str, str]:
+ """
+ Match an endpoint with accessible events.
+
+ Args:
+ request: The endpoint to match
+ reachable_event_codes: The list of event codes accessible to the user
+
+ Returns:
+ Dict containing the endpoint registration data
+ None if endpoint is not found in database
+ """
+ endpoint_url = str(request.url.path)
+ # Get the endpoint URL for matching with events
+ function_codes_of_endpoint = RedisActions.get_json(
+ list_keys=[RedisCategoryKeys.METHOD_FUNCTION_CODES, "*", endpoint_url]
+ )
+ function_code_list_of_event = function_codes_of_endpoint.first
+ if not function_codes_of_endpoint.status:
+ raise HTTPExceptionApi(
+ error_code="",
+ lang="en",
+ loc=get_line_number_for_error(),
+ sys_msg="Function code not found",
+ )
+
+ # Intersect function codes with user accers objects available event codes
+ # reachable_event_codes = ["36a165fe-a2f3-437b-80ee-1ee44670fe70"]
+ intersected_code = list(
+ set(function_code_list_of_event) & set(reachable_event_codes)
+ )
+ if not len(intersected_code) == 1:
+ raise HTTPExceptionApi(
+ error_code="",
+ lang="en",
+ loc=get_line_number_for_error(),
+ sys_msg="No event is registered for this user.",
+ )
+ return endpoint_url, intersected_code[0]
+
+ @classmethod
+ def event_required(cls, func: Callable) -> Callable:
+ """
+ Decorator for endpoints with token and event requirements.
+ This decorator:
+ 1. First validates authentication using MiddlewareModule.auth_required
+ 2. Then adds event tracking context
+
+ Args:
+ func: The function to be decorated
+
+ Returns:
+ Callable: The wrapped function with both auth and event handling
+ """
+
+ @wraps(func)
+ async def wrapper(request: Request, *args, **kwargs) -> Dict[str, Any]:
+
+ # Get and validate token context from request
+ token_context, reachable_event_codes = cls.retrieve_access_content(request)
+ endpoint_url, reachable_event_code = cls.retrieve_intersected_event_code(
+ request, reachable_event_codes
+ )
+ event_context = EventContext(
+ auth=token_context,
+ code=reachable_event_code,
+ url=endpoint_url,
+ request=request,
+ )
+
+ # Get auth context from the authenticated function's wrapper
+ if token_context is not None:
+ setattr(wrapper, "event_context", event_context)
+ setattr(func, "event_context", event_context)
+
+ # Execute the authenticated function and get its result
+ if inspect.iscoroutinefunction(func):
+ result = await func(request, *args, **kwargs)
+ else:
+ result = func(request, *args, **kwargs)
+ return result
+
+ return wrapper
+
+ # event_required is already sets function_code state to wrapper
+ # @classmethod
+ # def validation_required(cls, func: Callable[..., Dict[str, Any]]) -> Callable[..., Dict[str, Any]]:
+ # """
+ # Decorator for endpoints with token and event requirements.
+ # This decorator:
+ # 1. First validates authentication using MiddlewareModule.auth_required
+ # 2. Then adds event tracking context
+
+ # Args:
+ # func: The function to be decorated
+
+ # Returns:
+ # Callable: The wrapped function with both auth and event handling
+ # """
+
+ # @wraps(func)
+ # async def wrapper(request: Request, *args: Any, **kwargs: Any) -> Union[Dict[str, Any], BaseModel]:
+
+ # # Get and validate token context from request
+ # token_context, reachable_event_codes = cls.retrieve_access_content(request)
+ # endpoint_url, reachable_event_code = cls.retrieve_intersected_event_code(request, reachable_event_codes)
+
+ # # Get auth context from the authenticated function's wrapper
+ # if token_context is not None:
+ # setattr(wrapper, 'auth', token_context)
+ # setattr(wrapper, 'url', endpoint_url)
+ # setattr(wrapper, 'func_code', reachable_event_code)
+
+ # # Execute the authenticated function and get its result
+ # if inspect.iscoroutinefunction(func):
+ # result = await func(request, *args, **kwargs)
+ # else:
+ # result = func(request, *args, **kwargs)
+ # return result
+
+ # return wrapper
diff --git a/ApiLayers/Schemas/__init__.py b/ApiLayers/Schemas/__init__.py
new file mode 100644
index 0000000..1e28a98
--- /dev/null
+++ b/ApiLayers/Schemas/__init__.py
@@ -0,0 +1,179 @@
+# SQL Models
+from .account.account import (
+ AccountBooks,
+ AccountCodeParser,
+ AccountRecords,
+ AccountCodes,
+ AccountDetail,
+ AccountMaster,
+ AccountRecordExchanges,
+)
+from .building.budget import (
+ DecisionBookBudgetBooks,
+ DecisionBookBudgetCodes,
+ DecisionBookBudgetMaster,
+ DecisionBookBudgets,
+)
+from .account.iban import (
+ BuildIbans,
+ BuildIbanDescription,
+)
+from .api.encrypter import CrypterEngine
+from .building.build import (
+ Build,
+ BuildTypes,
+ BuildParts,
+ BuildArea,
+ BuildSites,
+ BuildLivingSpace,
+ BuildPersonProviding,
+ BuildCompaniesProviding,
+ RelationshipEmployee2Build,
+)
+from .building.decision_book import (
+ BuildDecisionBook,
+ BuildDecisionBookItems,
+ BuildDecisionBookPerson,
+ BuildDecisionBookLegal,
+ BuildDecisionBookItemsUnapproved,
+ BuildDecisionBookInvitations,
+ BuildDecisionBookPayments,
+ BuildDecisionBookProjects,
+ BuildDecisionBookProjectPerson,
+ BuildDecisionBookPersonOccupants,
+ BuildDecisionBookProjectItems,
+)
+from .company.company import (
+ Companies,
+ RelationshipDutyCompany,
+)
+from .company.employee import (
+ Employees,
+ EmployeesSalaries,
+ EmployeeHistory,
+ Staff,
+)
+from .company.department import (
+ Duty,
+ Duties,
+ Departments,
+)
+from .event.event import (
+ Modules,
+ Services,
+ Service2Events,
+ Events,
+ Event2Occupant,
+ Event2Employee,
+ Event2OccupantExtra,
+ Event2EmployeeExtra,
+)
+from .identity.identity import (
+ Addresses,
+ AddressCity,
+ AddressStreet,
+ AddressLocality,
+ AddressDistrict,
+ AddressNeighborhood,
+ AddressState,
+ AddressCountry,
+ AddressPostcode,
+ AddressGeographicLocations,
+ UsersTokens,
+ OccupantTypes,
+ People,
+ Users,
+ RelationshipDutyPeople,
+ RelationshipEmployee2PostCode,
+ Contracts,
+)
+from .others.enums import (
+ ApiEnumDropdown,
+)
+from .rules.rules import (
+ EndpointRestriction,
+)
+
+# # NO-SQL Models
+# from Schemas.no_sql_models.mongo_database import (
+# MongoQuery,
+# )
+# from Schemas.no_sql_models.identity import (
+# MongoQueryIdentity,
+# )
+
+__all__ = [
+ "AccountBooks",
+ "AccountCodeParser",
+ "AccountRecords",
+ "AccountCodes",
+ "AccountDetail",
+ "AccountMaster",
+ "AccountRecordExchanges",
+ "BuildIbans",
+ "BuildIbanDescription",
+ "CrypterEngine",
+ "Build",
+ "BuildTypes",
+ "BuildParts",
+ "BuildArea",
+ "BuildSites",
+ "BuildLivingSpace",
+ "BuildPersonProviding",
+ "BuildCompaniesProviding",
+ "BuildDecisionBook",
+ "BuildDecisionBookItems",
+ "BuildDecisionBookPerson",
+ "BuildDecisionBookLegal",
+ "BuildDecisionBookItemsUnapproved",
+ "BuildDecisionBookInvitations",
+ "BuildDecisionBookPayments",
+ "BuildDecisionBookProjects",
+ "BuildDecisionBookProjectPerson",
+ "BuildDecisionBookPersonOccupants",
+ "BuildDecisionBookProjectItems",
+ "DecisionBookBudgetBooks",
+ "DecisionBookBudgetCodes",
+ "DecisionBookBudgetMaster",
+ "DecisionBookBudgets",
+ "Companies",
+ "RelationshipDutyCompany",
+ "Employees",
+ "EmployeesSalaries",
+ "EmployeeHistory",
+ "Staff",
+ "Duty",
+ "Duties",
+ "Departments",
+ "Modules",
+ "Services",
+ "Service2Events",
+ "Events",
+ "Event2Occupant",
+ "Event2Employee",
+ "Event2OccupantExtra",
+ "Event2EmployeeExtra",
+ "Addresses",
+ "AddressCity",
+ "AddressStreet",
+ "AddressLocality",
+ "AddressDistrict",
+ "AddressNeighborhood",
+ "AddressState",
+ "AddressCountry",
+ "AddressPostcode",
+ "AddressGeographicLocations",
+ "UsersTokens",
+ "OccupantTypes",
+ "People",
+ "Users",
+ "RelationshipDutyPeople",
+ "RelationshipEmployee2PostCode",
+ "Contracts",
+ "ApiEnumDropdown",
+ "EndpointRestriction",
+ "RelationshipEmployee2Build",
+ # ------------------------------------------------
+ "MongoQuery",
+ "MongoQueryIdentity",
+]
diff --git a/ApiLayers/Schemas/account/account.py b/ApiLayers/Schemas/account/account.py
new file mode 100644
index 0000000..2a30db6
--- /dev/null
+++ b/ApiLayers/Schemas/account/account.py
@@ -0,0 +1,641 @@
+from sqlalchemy.orm import mapped_column, Mapped
+from sqlalchemy import (
+ String,
+ Integer,
+ Boolean,
+ ForeignKey,
+ Index,
+ TIMESTAMP,
+ Numeric,
+ SmallInteger,
+)
+
+from Services.PostgresDb import CrudCollection
+from ApiLayers.LanguageModels.Database.account.account import (
+ AccountBooksLanguageModel,
+ AccountCodesLanguageModel,
+ AccountRecordsLanguageModel,
+ AccountRecordExchangesLanguageModel,
+ AccountDetailLanguageModel,
+ AccountCodeParserLanguageModel,
+ AccountMasterLanguageModel,
+)
+
+
+class AccountBooks(CrudCollection):
+
+ __tablename__ = "account_books"
+ __exclude__fields__ = []
+ __language_model__ = AccountBooksLanguageModel
+
+ country: Mapped[str] = mapped_column(String, nullable=False)
+ branch_type: Mapped[int] = mapped_column(SmallInteger, server_default="0")
+
+ company_id: Mapped[int] = mapped_column(ForeignKey("companies.id"), nullable=False)
+ company_uu_id: Mapped[str] = mapped_column(String, nullable=False)
+ branch_id: Mapped[int] = mapped_column(ForeignKey("companies.id"))
+ branch_uu_id: Mapped[str] = mapped_column(String, comment="Branch UU ID")
+
+ __table_args__ = (
+ Index("account_companies_book_ndx_00", company_id, "expiry_starts"),
+ {"comment": "Account Book Information"},
+ )
+
+
+class AccountCodes(CrudCollection):
+
+ __tablename__ = "account_codes"
+ __exclude__fields__ = []
+ __language_model__ = AccountCodesLanguageModel
+
+ account_code: Mapped[str] = mapped_column(
+ String(48), nullable=False, comment="Account Code"
+ )
+ comment_line: Mapped[str] = mapped_column(
+ String(128), nullable=False, comment="Comment Line"
+ )
+
+ is_receive_or_debit: Mapped[bool] = mapped_column(Boolean)
+ product_id: Mapped[int] = mapped_column(Integer, server_default="0")
+ nvi_id: Mapped[str] = mapped_column(String(48), server_default="")
+ status_id: Mapped[int] = mapped_column(SmallInteger, server_default="0")
+ account_code_seperator: Mapped[str] = mapped_column(String(1), server_default=".")
+
+ system_id: Mapped[int] = mapped_column(SmallInteger, server_default="0")
+ locked: Mapped[int] = mapped_column(SmallInteger, server_default="0")
+
+ company_id: Mapped[int] = mapped_column(ForeignKey("companies.id"))
+ company_uu_id: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Company UU ID"
+ )
+ customer_id: Mapped[int] = mapped_column(ForeignKey("companies.id"))
+ customer_uu_id: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Customer UU ID"
+ )
+ person_id: Mapped[int] = mapped_column(ForeignKey("people.id"))
+ person_uu_id: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Person UU ID"
+ )
+
+ __table_args__ = ({"comment": "Account Code Information"},)
+
+
+class AccountCodeParser(CrudCollection):
+
+ __tablename__ = "account_code_parser"
+ __exclude__fields__ = []
+ __language_model__ = AccountCodesLanguageModel
+
+ account_code_1: Mapped[str] = mapped_column(String, nullable=False, comment="Order")
+ account_code_2: Mapped[str] = mapped_column(String, nullable=False, comment="Order")
+ account_code_3: Mapped[str] = mapped_column(String, nullable=False, comment="Order")
+ account_code_4: Mapped[str] = mapped_column(String, server_default="")
+ account_code_5: Mapped[str] = mapped_column(String, server_default="")
+ account_code_6: Mapped[str] = mapped_column(String, server_default="")
+
+ account_code_id: Mapped[int] = mapped_column(
+ ForeignKey("account_codes.id"), nullable=False
+ )
+ account_code_uu_id: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Account Code UU ID"
+ )
+
+ __table_args__ = (
+ Index("_account_code_parser_ndx_00", account_code_id),
+ {"comment": "Account Code Parser Information"},
+ )
+
+ @property
+ def get_account_code(self):
+ return f"{self.account_codes.account_code_seperator}".join(
+ [
+ getattr(self, f"account_code_{i}")
+ for i in range(1, 7)
+ if getattr(self, f"account_code_{i}")
+ ]
+ )
+
+
+class AccountMaster(CrudCollection):
+ """
+ AccountCodes class based on declarative_base and CrudCollection via session
+ """
+
+ __tablename__ = "account_master"
+ __exclude__fields__ = []
+ __language_model__ = AccountMasterLanguageModel
+
+ doc_date: Mapped[TIMESTAMP] = mapped_column(
+ TIMESTAMP(timezone=True), nullable=False, comment="Document Date"
+ )
+ plug_type: Mapped[str] = mapped_column(String, nullable=False, comment="Plug Type")
+ plug_number: Mapped[int] = mapped_column(
+ Integer, nullable=False, comment="Plug Number"
+ )
+
+ special_code: Mapped[str] = mapped_column(String(12), server_default="")
+ authorization_code: Mapped[str] = mapped_column(String(12), server_default="")
+
+ doc_code: Mapped[str] = mapped_column(String(12), server_default="")
+ doc_type: Mapped[int] = mapped_column(SmallInteger, server_default="0")
+
+ comment_line1: Mapped[str] = mapped_column(String, server_default="")
+ comment_line2: Mapped[str] = mapped_column(String, server_default="")
+ comment_line3: Mapped[str] = mapped_column(String, server_default="")
+ comment_line4: Mapped[str] = mapped_column(String, server_default="")
+ comment_line5: Mapped[str] = mapped_column(String, server_default="")
+ comment_line6: Mapped[str] = mapped_column(String, server_default="")
+ project_code: Mapped[str] = mapped_column(String(12), server_default="")
+ module_no: Mapped[str] = mapped_column(String, server_default="")
+ journal_no: Mapped[int] = mapped_column(Integer, server_default="0")
+
+ status_id: Mapped[int] = mapped_column(SmallInteger, server_default="0")
+ canceled: Mapped[bool] = mapped_column(Boolean, server_default="0")
+ print_count: Mapped[int] = mapped_column(SmallInteger, server_default="0")
+ total_active: Mapped[float] = mapped_column(Numeric(20, 6), server_default="0")
+ total_passive: Mapped[float] = mapped_column(Numeric(20, 6), server_default="0")
+ total_active_1: Mapped[float] = mapped_column(Numeric(20, 6), server_default="0")
+ total_passive_1: Mapped[float] = mapped_column(Numeric(20, 6), server_default="0")
+ total_active_2: Mapped[float] = mapped_column(Numeric(20, 6), server_default="0")
+ total_passive_2: Mapped[float] = mapped_column(Numeric(20, 6), server_default="0")
+ total_active_3: Mapped[float] = mapped_column(Numeric(20, 6), server_default="0")
+ total_passive_3: Mapped[float] = mapped_column(Numeric(20, 6), server_default="0")
+ total_active_4: Mapped[float] = mapped_column(Numeric(20, 6), server_default="0")
+ total_passive_4: Mapped[float] = mapped_column(Numeric(20, 6), server_default="0")
+ cross_ref: Mapped[int] = mapped_column(Integer, server_default="0")
+ data_center_id: Mapped[str] = mapped_column(String, server_default="")
+ data_center_rec_num: Mapped[int] = mapped_column(Integer, server_default="0")
+
+ account_header_id: Mapped[int] = mapped_column(
+ ForeignKey("account_books.id"), nullable=False
+ )
+ account_header_uu_id: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Account Header UU ID"
+ )
+ project_item_id: Mapped[int] = mapped_column(
+ ForeignKey("build_decision_book_projects.id")
+ )
+ project_item_uu_id: Mapped[str] = mapped_column(
+ String, comment="Project Item UU ID"
+ )
+ department_id: Mapped[int] = mapped_column(ForeignKey("departments.id"))
+ department_uu_id: Mapped[str] = mapped_column(String, comment="Department UU ID")
+
+ __table_args__ = (
+ Index("_account_master_ndx_00", doc_date, account_header_id),
+ {"comment": "Account Master Information"},
+ )
+
+
+class AccountDetail(CrudCollection):
+ """
+ AccountCodes class based on declarative_base and CrudCollection via session
+ """
+
+ __tablename__ = "account_detail"
+ __exclude__fields__ = []
+ __enum_list__ = [("plug_type", "AccountingReceiptTypes", "M")]
+ __language_model__ = AccountDetailLanguageModel
+
+ doc_date: Mapped[TIMESTAMP] = mapped_column(
+ TIMESTAMP(timezone=True), nullable=False, comment="Document Date"
+ )
+ line_no: Mapped[int] = mapped_column(
+ SmallInteger, nullable=False, comment="Line Number"
+ )
+ receive_debit: Mapped[str] = mapped_column(
+ String(1), nullable=False, comment="Receive Debit"
+ )
+ debit: Mapped[float] = mapped_column(
+ Numeric(20, 6), nullable=False, comment="Debit"
+ )
+
+ department: Mapped[str] = mapped_column(String(24), server_default="")
+ special_code: Mapped[str] = mapped_column(String(12), server_default="")
+ account_ref: Mapped[int] = mapped_column(Integer, server_default="0")
+ account_fiche_ref: Mapped[int] = mapped_column(Integer, server_default="0")
+ center_ref: Mapped[int] = mapped_column(Integer, server_default="0")
+ general_code: Mapped[str] = mapped_column(String(32), server_default="")
+ credit: Mapped[float] = mapped_column(Numeric(20, 6), server_default="0")
+ currency_type: Mapped[str] = mapped_column(String(4), server_default="TL")
+ exchange_rate: Mapped[float] = mapped_column(Numeric(20, 6), server_default="0")
+ debit_cur: Mapped[float] = mapped_column(Numeric(20, 6), server_default="0")
+ credit_cur: Mapped[float] = mapped_column(Numeric(20, 6), server_default="0")
+ discount_cur: Mapped[float] = mapped_column(Numeric(20, 6), server_default="0")
+ amount: Mapped[float] = mapped_column(Numeric(20, 6), server_default="0")
+ cross_account_code: Mapped[float] = mapped_column(String(32), server_default="")
+ inf_index: Mapped[float] = mapped_column(Numeric(20, 6), server_default="0")
+ not_inflated: Mapped[int] = mapped_column(SmallInteger, server_default="0")
+ not_calculated: Mapped[int] = mapped_column(SmallInteger, server_default="0")
+ comment_line1: Mapped[str] = mapped_column(String(64), server_default="")
+ comment_line2: Mapped[str] = mapped_column(String(64), server_default="")
+ comment_line3: Mapped[str] = mapped_column(String(64), server_default="")
+ comment_line4: Mapped[str] = mapped_column(String(64), server_default="")
+ comment_line5: Mapped[str] = mapped_column(String(64), server_default="")
+ comment_line6: Mapped[str] = mapped_column(String(64), server_default="")
+ owner_acc_ref: Mapped[int] = mapped_column(Integer, server_default="0")
+ from_where: Mapped[int] = mapped_column(Integer, server_default="0")
+ orj_eid: Mapped[int] = mapped_column(Integer, server_default="0")
+ canceled: Mapped[int] = mapped_column(SmallInteger, server_default="0")
+ cross_ref: Mapped[int] = mapped_column(Integer, server_default="0")
+ data_center_id: Mapped[str] = mapped_column(String, server_default="")
+ data_center_rec_num: Mapped[int] = mapped_column(Integer, server_default="0")
+ status_id: Mapped[int] = mapped_column(SmallInteger, server_default="0")
+
+ plug_type_id: Mapped[int] = mapped_column(
+ ForeignKey("api_enum_dropdown.id"), nullable=True
+ )
+ plug_type_uu_id = mapped_column(String, nullable=False, comment="Plug Type UU ID")
+ account_header_id: Mapped[int] = mapped_column(
+ ForeignKey("account_books.id"), nullable=False
+ )
+ account_header_uu_id: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Account Header UU ID"
+ )
+ account_code_id: Mapped[int] = mapped_column(
+ ForeignKey("account_codes.id"), nullable=False
+ )
+ account_code_uu_id: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Account Code UU ID"
+ )
+ account_master_id: Mapped[int] = mapped_column(
+ ForeignKey("account_master.id"), nullable=False
+ )
+ account_master_uu_id: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Account Master UU ID"
+ )
+ project_id: Mapped[int] = mapped_column(
+ ForeignKey("build_decision_book_projects.id")
+ )
+ project_uu_id: Mapped[str] = mapped_column(String, comment="Project UU ID")
+
+ __table_args__ = (
+ Index(
+ "_account_detail_ndx_00",
+ account_master_id,
+ doc_date,
+ line_no,
+ account_header_id,
+ unique=True,
+ ),
+ {"comment": "Account Detail Information"},
+ )
+
+
+class AccountRecords(CrudCollection):
+ """
+ build_decision_book_id = kaydın sorumlu olduğu karar defteri
+ send_company_id = kaydı gönderen firma, send_person_id = gönderen kişi
+ customer_id = sorumlu kullanıcı bilgisi, company_id = sorumlu firma
+ """
+
+ __tablename__ = "account_records"
+ __exclude__fields__ = []
+ __enum_list__ = [
+ ("receive_debit", "DebitTypes", "D"),
+ ("budget_type", "BudgetType", "B"),
+ ]
+ __language_model__ = AccountRecordsLanguageModel
+
+ iban: Mapped[str] = mapped_column(
+ String(64), nullable=False, comment="IBAN Number of Bank"
+ )
+ bank_date: Mapped[TIMESTAMP] = mapped_column(
+ TIMESTAMP(timezone=True), nullable=False, comment="Bank Transaction Date"
+ )
+
+ currency_value: Mapped[float] = mapped_column(
+ Numeric(20, 6), nullable=False, comment="Currency Value"
+ )
+ bank_balance: Mapped[float] = mapped_column(
+ Numeric(20, 6), nullable=False, comment="Bank Balance"
+ )
+ currency: Mapped[str] = mapped_column(
+ String(5), nullable=False, comment="Unit of Currency"
+ )
+ additional_balance: Mapped[float] = mapped_column(
+ Numeric(20, 6), nullable=False, comment="Additional Balance"
+ )
+ channel_branch: Mapped[str] = mapped_column(
+ String(120), nullable=False, comment="Branch Bank"
+ )
+ process_name: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Bank Process Type Name"
+ )
+ process_type: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Bank Process Type"
+ )
+ process_comment: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Transaction Record Comment"
+ )
+ process_garbage: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Transaction Record Garbage"
+ )
+ bank_reference_code: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Bank Reference Code"
+ )
+
+ add_comment_note: Mapped[str] = mapped_column(String, server_default="")
+ is_receipt_mail_send: Mapped[bool] = mapped_column(Boolean, server_default="0")
+ found_from = mapped_column(String, server_default="")
+ similarity: Mapped[float] = mapped_column(Numeric(20, 6), server_default="0")
+ remainder_balance: Mapped[float] = mapped_column(Numeric(20, 6), server_default="0")
+
+ bank_date_y: Mapped[int] = mapped_column(Integer)
+ bank_date_m: Mapped[int] = mapped_column(SmallInteger)
+ bank_date_w: Mapped[int] = mapped_column(SmallInteger)
+ bank_date_d: Mapped[int] = mapped_column(SmallInteger)
+
+ approving_accounting_record: Mapped[bool] = mapped_column(
+ Boolean, server_default="0"
+ )
+ accounting_receipt_date: Mapped[TIMESTAMP] = mapped_column(
+ TIMESTAMP(timezone=True), server_default="1900-01-01 00:00:00"
+ )
+ accounting_receipt_number: Mapped[int] = mapped_column(Integer, server_default="0")
+ status_id: Mapped[int] = mapped_column(SmallInteger, server_default="0")
+
+ approved_record: Mapped[bool] = mapped_column(Boolean, server_default="0")
+ import_file_name: Mapped[str] = mapped_column(
+ String, nullable=True, comment="XLS Key"
+ )
+
+ receive_debit: Mapped[int] = mapped_column(
+ ForeignKey("api_enum_dropdown.id"), nullable=True
+ )
+ receive_debit_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Debit UU ID"
+ )
+ budget_type: Mapped[int] = mapped_column(
+ ForeignKey("api_enum_dropdown.id"), nullable=True
+ )
+ budget_type_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Budget Type UU ID"
+ )
+ company_id: Mapped[int] = mapped_column(ForeignKey("companies.id"), nullable=True)
+ company_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Company UU ID"
+ )
+ send_company_id: Mapped[int] = mapped_column(
+ ForeignKey("companies.id"), nullable=True
+ )
+ send_company_uu_id = mapped_column(
+ String, nullable=True, comment="Send Company UU ID"
+ )
+
+ send_person_id: Mapped[int] = mapped_column(ForeignKey("people.id"), nullable=True)
+ send_person_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Send Person UU ID"
+ )
+ approving_accounting_person: Mapped[int] = mapped_column(
+ ForeignKey("people.id"), nullable=True
+ )
+ approving_accounting_person_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Approving Accounting Person UU ID"
+ )
+
+ living_space_id: Mapped[int] = mapped_column(
+ ForeignKey("build_living_space.id"), nullable=True
+ )
+ living_space_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Living Space UU ID"
+ )
+ customer_id: Mapped[int] = mapped_column(ForeignKey("people.id"), nullable=True)
+ customer_uu_id = mapped_column(String, nullable=True, comment="Customer UU ID")
+
+ build_id: Mapped[int] = mapped_column(ForeignKey("build.id"), nullable=True)
+ build_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Build UU ID"
+ )
+ build_parts_id: Mapped[int] = mapped_column(
+ ForeignKey("build_parts.id"), nullable=True
+ )
+ build_parts_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Build Parts UU ID"
+ )
+ build_decision_book_id: Mapped[int] = mapped_column(
+ ForeignKey("build_decision_book.id"), nullable=True
+ )
+ build_decision_book_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Build Decision Book UU ID"
+ )
+
+ __table_args__ = (
+ Index("_budget_records_ndx_00", is_receipt_mail_send, bank_date),
+ Index(
+ "_budget_records_ndx_01",
+ iban,
+ bank_date,
+ bank_reference_code,
+ bank_balance,
+ unique=True,
+ ),
+ Index("_budget_records_ndx_02", status_id, bank_date),
+ {
+ "comment": "Bank Records that are related to building and financial transactions"
+ },
+ )
+
+ # def payment_budget_record_close(self):
+ # from database_sql_models import (
+ # DecisionBookProjectPaymentsMaster,
+ # ApiEnumDropdown,
+ # BuildDecisionBook,
+ # BuildDecisionBookPaymentsMaster,
+ # )
+ #
+ # budget_record = self
+ # if self.receive_debit == ApiEnumDropdown.uuid_of_enum(
+ # enum_class="DebitTypes", key="R"
+ # ):
+ # print(
+ # "This record is not debit. Debit:",
+ # self.receive_debit,
+ # "DebitTypes.R.name",
+ # # str(DebitTypes.R.name),
+ # )
+ # return
+ # if abs(budget_record.currency_value + budget_record.remainder_balance) > 0:
+ # payment_dict = {
+ # "budget_records_id": self.id,
+ # "build_decision_book_id": budget_record.build_decision_book_id,
+ # "build_parts_id": budget_record.build_parts_id,
+ # "start_date": budget_record.bank_date,
+ # "paid_value": budget_record.currency_value
+ # - budget_record.remainder_balance,
+ # "is_all": False,
+ # }
+ # (paid_value, start_paid_value, balance) = (
+ # float(budget_record.currency_value - budget_record.remainder_balance),
+ # float(budget_record.currency_value - budget_record.remainder_balance),
+ # float(budget_record.remainder_balance),
+ # )
+ # print(
+ # "self.id",
+ # self.id,
+ # "paid_value",
+ # paid_value,
+ # "start_paid_value",
+ # start_paid_value,
+ # "balance",
+ # balance,
+ # self.receive_debit,
+ # )
+ #
+ # if not BuildDecisionBook.find_one(
+ # id=payment_dict["build_decision_book_id"]
+ # ):
+ # return paid_value
+ #
+ # if budget_record.replication_id == 55:
+ # if paid_value > 0:
+ # payment_dict["dues_type"] = ApiEnumDropdown.uuid_of_enum(
+ # enum_class="BuildDuesTypes", key="L"
+ # )
+ # paid_value = (
+ # DecisionBookProjectPaymentsMaster.pay_law_and_ren_of_build_part(
+ # **payment_dict
+ # )
+ # )
+ # print("dues_type", payment_dict["dues_type"], paid_value)
+ # if paid_value > 0:
+ # payment_dict.pop("dues_type", None)
+ # paid_value = BuildDecisionBookPaymentsMaster.pay_dues_of_build_part(
+ # **payment_dict
+ # )
+ # print("dues_type", None, paid_value)
+ # if paid_value > 0:
+ # payment_dict["dues_type"] = ApiEnumDropdown.uuid_of_enum(
+ # enum_class="BuildDuesTypes", key="R"
+ # )
+ # paid_value = (
+ # DecisionBookProjectPaymentsMaster.pay_law_and_ren_of_build_part(
+ # **payment_dict
+ # )
+ # )
+ # print("dues_type", payment_dict["dues_type"], paid_value)
+ # payment_dict["is_all"] = True
+ # if paid_value > 0:
+ # payment_dict["dues_type"] = ApiEnumDropdown.uuid_of_enum(
+ # enum_class="BuildDuesTypes", key="L"
+ # )
+ # paid_value = (
+ # DecisionBookProjectPaymentsMaster.pay_law_and_ren_of_build_part(
+ # **payment_dict
+ # )
+ # )
+ # print("is all dues_type", payment_dict["dues_type"], paid_value)
+ # if paid_value > 0:
+ # payment_dict.pop("dues_type", None)
+ # paid_value = BuildDecisionBookPaymentsMaster.pay_dues_of_build_part(
+ # **payment_dict
+ # )
+ # print("is all dues_type", None, paid_value)
+ # if paid_value > 0:
+ # payment_dict["dues_type"] = ApiEnumDropdown.uuid_of_enum(
+ # enum_class="BuildDuesTypes", key="R"
+ # )
+ # paid_value = (
+ # DecisionBookProjectPaymentsMaster.pay_law_and_ren_of_build_part(
+ # **payment_dict
+ # )
+ # )
+ # print("is all dues_type", payment_dict["dues_type"], paid_value)
+
+
+# class AccountRecordDecisionPaymentClosed(CrudCollection):
+#
+# __tablename__ = "account_record_decision_payment_closed"
+# __exclude__fields__ = []
+#
+# arc_currency: Mapped[str] = mapped_column(
+# String(5), nullable=False, comment="Unit of Currency"
+# )
+# arc_processing_time: Mapped[TIMESTAMP] = mapped_column(
+# TIMESTAMP(timezone=True), nullable=False, comment="Processing Time"
+# )
+# arc_currency_value: Mapped[float] = mapped_column(
+# Numeric(20, 6), nullable=False, comment="Currency Value"
+# )
+#
+# decision_book_budgets_id: Mapped[int] = mapped_column(
+# ForeignKey("decision_book_budgets.id"), nullable=True
+# )
+# decision_book_budgets_uu_id: Mapped[str] = mapped_column(
+# String, nullable=True, comment="Budget UUID"
+# )
+#
+# build_decision_book_payment_id: Mapped[int] = mapped_column(
+# ForeignKey("build_decision_book_payments.id")
+# )
+# build_decision_book_payment_uu_id: Mapped[str] = mapped_column(
+# String, nullable=True, comment="Build Decision Book Payment UU ID"
+# )
+# account_records_id: Mapped[int] = mapped_column(ForeignKey("account_records.id"))
+# account_records_uu_id: Mapped[str] = mapped_column(
+# String, nullable=True, comment="Account Record UU ID"
+# )
+#
+# __table_args__ = (
+# Index(
+# "_account_record_decision_payment_closed_ndx_00",
+# account_records_id,
+# build_decision_book_payment_id,
+# arc_processing_time,
+# ),
+# Index(
+# "_account_record_decision_payment_closed_ndx_01",
+# build_decision_book_payment_id,
+# account_records_id,
+# arc_processing_time,
+# ),
+# {"comment": "Account Record Decision Payment Closed Information"},
+# )
+
+
+class AccountRecordExchanges(CrudCollection):
+
+ __tablename__ = "account_record_exchanges"
+ __exclude__fields__ = []
+ __language_model__ = AccountRecordExchangesLanguageModel
+
+ are_currency: Mapped[str] = mapped_column(
+ String(5), nullable=False, comment="Unit of Currency"
+ )
+ are_exchange_rate: Mapped[float] = mapped_column(
+ Numeric(18, 6), nullable=False, server_default="1"
+ )
+ usd_exchange_rate_value: Mapped[float] = mapped_column(
+ Numeric(18, 6),
+ nullable=True,
+ server_default="0",
+ comment="It will be written by multiplying the usd exchange rate with the current value result.",
+ )
+ eur_exchange_rate_value: Mapped[float] = mapped_column(
+ Numeric(18, 6),
+ nullable=True,
+ server_default="0",
+ comment="It will be written by multiplying the eur exchange rate with the current value result.",
+ )
+ gbp_exchange_rate_value: Mapped[float] = mapped_column(
+ Numeric(18, 6),
+ nullable=True,
+ server_default="0",
+ comment="It will be written by multiplying the gpd exchange rate with the current value result.",
+ )
+ cny_exchange_rate_value: Mapped[float] = mapped_column(
+ Numeric(18, 6),
+ nullable=True,
+ server_default="0",
+ comment="It will be written by multiplying the cny exchange rate with the current value result.",
+ )
+
+ account_records_id: Mapped[int] = mapped_column(ForeignKey("account_records.id"))
+ account_records_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Account Record UU ID"
+ )
+
+ __table_args__ = (
+ Index("_account_record_exchanges_ndx_00", account_records_id),
+ {"comment": "Account Record Exchanges Information"},
+ )
diff --git a/ApiLayers/Schemas/account/iban.py b/ApiLayers/Schemas/account/iban.py
new file mode 100644
index 0000000..5010eb8
--- /dev/null
+++ b/ApiLayers/Schemas/account/iban.py
@@ -0,0 +1,103 @@
+from sqlalchemy import String, ForeignKey, Index, TIMESTAMP, SmallInteger, Identity
+from sqlalchemy.orm import mapped_column, Mapped
+
+from Services.PostgresDb import CrudCollection
+
+
+class BuildIbans(CrudCollection):
+ """
+ BuildParts class based on declarative_base and BaseMixin via session
+ """
+
+ __tablename__ = "build_ibans"
+ __exclude__fields__ = []
+
+ iban: Mapped[str] = mapped_column(
+ String(40), server_default="", nullable=False, comment="IBAN number"
+ )
+ start_date: Mapped[TIMESTAMP] = mapped_column(
+ TIMESTAMP(timezone=True), nullable=False, comment="Bank Transaction Start Date"
+ )
+
+ stop_date: Mapped[TIMESTAMP] = mapped_column(
+ TIMESTAMP(timezone=True), server_default="2900-01-01 00:00:00"
+ )
+ bank_code: Mapped[str] = mapped_column(String(24), server_default="TR0000000000000")
+ xcomment: Mapped[str] = mapped_column(String(64), server_default="????")
+
+ build_id: Mapped[int] = mapped_column(
+ ForeignKey("build.id"), nullable=True, comment="Building ID"
+ )
+ build_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Building UUID", index=True
+ )
+ # building: Mapped["Build"] = relationship(
+ # "Build", back_populates="build_ibans", foreign_keys=[build_id]
+ # )
+
+ __table_args__ = (
+ Index("_build_ibans_ndx_01", iban, start_date, unique=True),
+ {"comment": "IBANs related to money transactions due to building objects"},
+ )
+
+
+class BuildIbanDescription(CrudCollection):
+ """
+ SearchComments class based on declarative_base and CrudCollection via session
+ """
+
+ __tablename__ = "build_iban_description"
+ __exclude__fields__ = []
+
+ iban: Mapped[str] = mapped_column(String, nullable=False, comment="IBAN Number")
+ group_id: Mapped[int] = mapped_column(
+ SmallInteger, nullable=False, comment="Group ID"
+ )
+ search_word: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Search Word", index=True
+ )
+
+ # decision_book_project_id: Mapped[int] = mapped_column(
+ # ForeignKey("build_decision_book_projects.id")
+ # )
+ # decision_book_project_uu_id: Mapped[str] = mapped_column(
+ # String, nullable=False, comment="Decision Book Project UUID"
+ # )
+ customer_id: Mapped[int] = mapped_column(ForeignKey("people.id"), nullable=True)
+ customer_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Customer UUID"
+ )
+ company_id: Mapped[int] = mapped_column(ForeignKey("companies.id"), nullable=True)
+ company_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Company UUID"
+ )
+ build_parts_id: Mapped[int] = mapped_column(
+ ForeignKey("build_parts.id"), nullable=True
+ )
+ build_parts_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Build Parts UUID"
+ )
+
+ # decision_book_project: Mapped["BuildDecisionBookProjects"] = relationship(
+ # "BuildDecisionBookProjects",
+ # back_populates="search_iban_description",
+ # foreign_keys=[decision_book_project_id],
+ # )
+ # customer: Mapped["People"] = relationship(
+ # "People", back_populates="search_iban_description", foreign_keys=[customer_id]
+ # )
+ # company: Mapped["Companies"] = relationship(
+ # "Company", back_populates="search_iban_description", foreign_keys=[company_id]
+ # )
+ # parts: Mapped["BuildParts"] = relationship(
+ # "BuildParts",
+ # back_populates="search_iban_description",
+ # foreign_keys=[build_parts_id],
+ # )
+
+ __table_args__ = (
+ Index(
+ "_search_iban_description_ndx_00", iban, search_word, group_id, unique=True
+ ),
+ {"comment": "Search Iban Description Information"},
+ )
diff --git a/ApiLayers/Schemas/api/encrypter.py b/ApiLayers/Schemas/api/encrypter.py
new file mode 100644
index 0000000..84bc8d2
--- /dev/null
+++ b/ApiLayers/Schemas/api/encrypter.py
@@ -0,0 +1,124 @@
+import random
+
+from datetime import datetime, timedelta
+
+from sqlalchemy import String
+from sqlalchemy.orm import mapped_column, Mapped
+
+from cryptography.fernet import Fernet, MultiFernet
+from Services.PostgresDb import CrudCollection
+
+
+class CrypterEngine(CrudCollection):
+
+ __tablename__ = "crypter_engine"
+ __table_args__ = ()
+ encrypt_list = []
+ decrypt_list = []
+ keys_error = "Unable to retrieve encrypt keys"
+ alchemy_error = "Alchemy object is empty"
+
+ key_first: Mapped[str] = mapped_column(String, nullable=False)
+ key_second: Mapped[str] = mapped_column(String, nullable=False)
+
+ @classmethod
+ def get_valid_keys(cls, row=None):
+ cls.encrypt_list, cls.decrypt_list = [], []
+ if not cls.filter_all(cls.created_at > datetime.now() - timedelta(days=29)).get(
+ 1
+ ):
+ cls.create_encrypt_keys(count=100)
+ if decrypt_identifier := getattr(row, "cryp_uu_id", None):
+ if decrypt_row := cls.find_one(uu_id=str(decrypt_identifier)):
+ return (
+ decrypt_row.key_first.decode(),
+ decrypt_row.key_second.decode(),
+ decrypt_row.uu_id,
+ )
+ if encrypt_rows := cls.filter_all(
+ cls.created_at > datetime.now() - timedelta(days=29)
+ ).data:
+ encrypt_row = random.choice(encrypt_rows)
+ return (
+ encrypt_row.key_first.encode(),
+ encrypt_row.key_second.encode(),
+ encrypt_rows.uu_id,
+ )
+ return None, None, None
+
+ @classmethod
+ def create_encrypt_keys(cls, count: int):
+ for _ in range(count):
+ key_first = Fernet.generate_key()
+ key_second = Fernet.generate_key()
+ cls.find_or_create(
+ key_first=key_first.decode(), key_second=key_second.decode()
+ )
+
+ @classmethod
+ def raise_exception(cls, message=None):
+ raise Exception(message if message else cls.keys_error)
+
+ @classmethod
+ def encrypt_given_alchemy_list(cls, alchemy_object_list: list):
+ for alchemy_object in alchemy_object_list:
+ key_first, key_second, cryp_uu_id = cls.get_valid_keys()
+ fernet_keys = MultiFernet([Fernet(key_first), Fernet(key_second)])
+ if not key_first or not key_second:
+ cls.raise_exception()
+ alchemy_dict = alchemy_object.get_dict() if alchemy_object else None
+ if not alchemy_dict:
+ cls.raise_exception(cls.alchemy_error)
+ for key, plain_row in alchemy_dict.items():
+ if key in alchemy_object.__encrypt_list__:
+ alchemy_dict[key] = fernet_keys.encrypt(plain_row).decode()
+ alchemy_dict["cryp_uu_id"] = cryp_uu_id
+ cls.encrypt_list.append(alchemy_object.update(**alchemy_dict))
+ return cls.encrypt_list
+
+ @classmethod
+ def encrypt_given_alchemy_object(cls, alchemy_object_object):
+ key_first, key_second, cryp_uu_id = cls.get_valid_keys()
+ fernet_keys = MultiFernet([Fernet(key_first), Fernet(key_second)])
+ if not key_first or not key_second:
+ cls.raise_exception()
+ alchemy_dict = (
+ alchemy_object_object.get_dict() if alchemy_object_object else None
+ )
+ if not alchemy_dict:
+ cls.raise_exception(cls.alchemy_error)
+ for key, plain_row in alchemy_dict.items():
+ if key in alchemy_object_object.__encrypt_list__:
+ alchemy_dict[key] = fernet_keys.encrypt(plain_row).decode()
+ alchemy_dict["cryp_uu_id"] = cryp_uu_id
+ return alchemy_object_object.update(**alchemy_dict)
+
+ @classmethod
+ def decrypt_given_alchemy(cls, alchemy_object_list: list):
+ for alchemy_object in alchemy_object_list:
+ key_first, key_second, cryp_uu_id = cls.get_valid_keys(row=alchemy_object)
+ fernet_keys = MultiFernet([Fernet(key_first), Fernet(key_second)])
+ if not key_first or not key_second:
+ cls.raise_exception()
+ alchemy_dict = alchemy_object.get_dict() if alchemy_object else None
+ if not alchemy_dict:
+ cls.raise_exception(cls.alchemy_error)
+ for key, plain_row in alchemy_dict.items():
+ if key in alchemy_object.__encrypt_list__:
+ alchemy_dict[key] = fernet_keys.decrypt(plain_row).decode()
+ cls.decrypt_list.append(alchemy_dict)
+ return cls.decrypt_list
+
+ @classmethod
+ def decrypt_given_alchemy_object(cls, alchemy_object):
+ key_first, key_second, cryp_uu_id = cls.get_valid_keys(row=alchemy_object)
+ fernet_keys = MultiFernet([Fernet(key_first), Fernet(key_second)])
+ if not key_first or not key_second:
+ cls.raise_exception()
+ alchemy_dict = alchemy_object.get_dict() if alchemy_object else None
+ if not alchemy_dict:
+ cls.raise_exception(cls.alchemy_error)
+ for key, plain_row in alchemy_dict.items():
+ if key in alchemy_object.__encrypt_list__:
+ alchemy_dict[key] = fernet_keys.decrypt(plain_row).decode()
+ return alchemy_dict
diff --git a/ApiLayers/Schemas/building/budget.py b/ApiLayers/Schemas/building/budget.py
new file mode 100644
index 0000000..37af08c
--- /dev/null
+++ b/ApiLayers/Schemas/building/budget.py
@@ -0,0 +1,156 @@
+from sqlalchemy import (
+ String,
+ ForeignKey,
+ Index,
+ SmallInteger,
+ TIMESTAMP,
+ Text,
+ Numeric,
+ Integer,
+)
+from sqlalchemy.orm import mapped_column, Mapped
+from Services.PostgresDb import CrudCollection
+
+
+class DecisionBookBudgetBooks(CrudCollection):
+
+ __tablename__ = "decision_book_budget_books"
+ __exclude__fields__ = []
+
+ country: Mapped[str] = mapped_column(String, nullable=False)
+ branch_type: Mapped[int] = mapped_column(SmallInteger, server_default="0")
+
+ company_id: Mapped[int] = mapped_column(ForeignKey("companies.id"), nullable=False)
+ company_uu_id: Mapped[str] = mapped_column(String, nullable=False)
+ branch_id: Mapped[int] = mapped_column(ForeignKey("companies.id"), nullable=True)
+ branch_uu_id: Mapped[str] = mapped_column(
+ String, comment="Branch UU ID", nullable=True
+ )
+ build_decision_book_id: Mapped[int] = mapped_column(
+ ForeignKey("build_decision_book.id"), nullable=False
+ )
+ build_decision_book_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Build Decision Book UU ID"
+ )
+
+ __table_args__ = (
+ Index(
+ "_decision_book_budget_companies_book_ndx_00",
+ company_id,
+ "created_at",
+ ),
+ {"comment": "budget Book Information"},
+ )
+
+
+class DecisionBookBudgetCodes(CrudCollection):
+
+ __tablename__ = "decision_book_budget_codes"
+ __exclude__fields__ = []
+
+ budget_code: Mapped[str] = mapped_column(
+ String(48), nullable=False, comment="budget Code"
+ )
+ comment_line: Mapped[str] = mapped_column(
+ Text, nullable=False, comment="Comment Line"
+ )
+
+ build_decision_book_id: Mapped[int] = mapped_column(
+ ForeignKey("build_decision_book.id"), nullable=True
+ )
+ build_decision_book_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Build Decision Book UU ID"
+ )
+
+ build_parts_id: Mapped[int] = mapped_column(
+ ForeignKey("build_parts.id"), nullable=True
+ )
+ build_parts_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Build Parts UU ID"
+ )
+
+ company_id: Mapped[int] = mapped_column(ForeignKey("companies.id"), nullable=True)
+ company_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Company UU ID"
+ )
+
+ __table_args__ = (
+ Index("_decision_book_budget_codes_ndx_00", budget_code, "created_at"),
+ Index("_decision_book_budget_codes_ndx_01", company_id, "created_at"),
+ {"comment": "budget Book Information"},
+ )
+
+
+class DecisionBookBudgetMaster(CrudCollection):
+
+ __tablename__ = "decision_book_budget_master"
+ __exclude__fields__ = []
+
+ budget_type: Mapped[str] = mapped_column(
+ String(50), nullable=False
+ ) # Bütçe tipi (örneğin: Operasyonel, Yatırım)
+ currency: Mapped[str] = mapped_column(
+ String(8), server_default="TRY"
+ ) # Bütçe para birimi
+ total_budget: Mapped[float] = mapped_column(
+ Numeric(10, 2), nullable=False
+ ) # Toplam bütçe
+
+ tracking_period_id: Mapped[int] = mapped_column(
+ ForeignKey("api_enum_dropdown.id"), nullable=True
+ )
+ tracking_period_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Part Direction UUID"
+ )
+ budget_books_id: Mapped[int] = mapped_column(
+ Integer, ForeignKey("decision_book_budget_books.id"), nullable=False
+ )
+ budget_books_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Budget Books UU ID"
+ )
+ department_id: Mapped[int] = mapped_column(
+ Integer, ForeignKey("departments.id"), nullable=False
+ ) # Departman ile ilişki
+ department_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Department UU ID"
+ )
+
+ __table_args__ = ({"comment": "budget Book Information"},)
+
+
+class DecisionBookBudgets(CrudCollection):
+
+ __tablename__ = "decision_book_budgets"
+ __exclude__fields__ = []
+
+ process_date: Mapped[TIMESTAMP] = mapped_column(
+ TIMESTAMP(timezone=True), nullable=False
+ ) # Başlangıç tarihi
+ budget_codes_id: Mapped[int] = mapped_column(
+ Integer, ForeignKey("decision_book_budget_codes.id"), nullable=False
+ )
+ total_budget: Mapped[float] = mapped_column(
+ Numeric(10, 2), nullable=False
+ ) # Toplam bütçe
+ used_budget: Mapped[float] = mapped_column(
+ Numeric(10, 2), nullable=False, default=0.0
+ ) # Kullanılan bütçe
+ remaining_budget: Mapped[float] = mapped_column(
+ Numeric(10, 2), nullable=False, default=0.0
+ ) # Kullanılan bütçe
+
+ decision_book_budget_master_id: Mapped[int] = mapped_column(
+ Integer, ForeignKey("decision_book_budget_master.id"), nullable=False
+ )
+ decision_book_budget_master_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Decision Book Budget Master UU ID"
+ )
+
+ __table_args__ = (
+ Index(
+ "_decision_book_budgets_ndx_00",
+ decision_book_budget_master_uu_id,
+ process_date,
+ ),
+ {"comment": "budget Book Information"},
+ )
diff --git a/ApiLayers/Schemas/building/build.py b/ApiLayers/Schemas/building/build.py
new file mode 100644
index 0000000..ef7bef8
--- /dev/null
+++ b/ApiLayers/Schemas/building/build.py
@@ -0,0 +1,806 @@
+from datetime import timedelta
+from typing import List, Union
+
+from fastapi import HTTPException, status
+from sqlalchemy.orm import mapped_column, relationship, Mapped
+from sqlalchemy import (
+ String,
+ Integer,
+ Boolean,
+ ForeignKey,
+ Index,
+ TIMESTAMP,
+ Text,
+ Numeric,
+ or_,
+)
+
+from ApiLayers.ApiLibrary import system_arrow, SelectActionWithEmployee
+from Services.PostgresDb import CrudCollection
+from ApiLayers.ApiValidations.Request import (
+ InsertBuild,
+ InsertBuildParts,
+ InsertBuildLivingSpace,
+ UpdateBuild,
+)
+
+from ApiLayers.ApiValidations.Custom.token_objects import (
+ EmployeeTokenObject,
+ OccupantTokenObject,
+)
+from ApiLayers.LanguageModels.Database.building.build import (
+ BuildTypesLanguageModel,
+ Part2EmployeeLanguageModel,
+ BuildPartsLanguageModel,
+ BuildSitesLanguageModel,
+ RelationshipEmployee2BuildLanguageModel,
+ BuildLanguageModel,
+ BuildPartsLanguageModel,
+ BuildLivingSpaceLanguageModel,
+ BuildManagementLanguageModel,
+ BuildAreaLanguageModel,
+ BuildCompaniesProvidingLanguageModel,
+ BuildPersonProvidingLanguageModel,
+)
+
+
+class BuildTypes(CrudCollection):
+ """
+ BuildTypes class based on declarative_base and BaseMixin via session
+
+ """
+
+ __tablename__ = "build_types"
+ __exclude__fields__ = []
+ __language_model__ = BuildTypesLanguageModel
+ __include__fields__ = []
+
+ function_code: Mapped[str] = mapped_column(
+ String(12), server_default="", nullable=False, comment="Function Code"
+ )
+ type_code: Mapped[str] = mapped_column(
+ String(12), server_default="", nullable=False, comment="Structure Type Code"
+ )
+ lang: Mapped[str] = mapped_column(
+ String(4), server_default="TR", nullable=False, comment="Language"
+ )
+ type_name: Mapped[str] = mapped_column(
+ String(48), server_default="", nullable=False, comment="Type Name"
+ )
+
+ __table_args__ = (
+ Index("_build_types_ndx_00", type_code, function_code, lang, unique=True),
+ {"comment": "Function group of building types with their language information"},
+ )
+
+
+class Part2Employee(CrudCollection):
+ """
+ Employee2Parts class based on declarative_base and BaseMixin via session
+ In between start and end date, a part can be assigned to only one employee
+ """
+
+ __tablename__ = "part2employee"
+ __exclude__fields__ = []
+ __language_model__ = Part2EmployeeLanguageModel
+ __include__fields__ = []
+
+ build_id: Mapped[int] = mapped_column(Integer, comment="Building ID")
+ part_id: Mapped[int] = mapped_column(
+ ForeignKey("build_parts.id"), nullable=False, comment="Part ID"
+ )
+ employee_id: Mapped[int] = mapped_column(
+ ForeignKey("employees.id"), nullable=False, comment="Employee ID"
+ )
+
+ __table_args__ = (
+ Index("_part2employee_ndx_00", employee_id, part_id, unique=True),
+ {"comment": "Employee2Parts Information"},
+ )
+
+
+class RelationshipEmployee2Build(CrudCollection):
+ """
+ CompanyRelationship class based on declarative_base and CrudCollection via session
+ Company -> Sub Company -> Sub-Sub Company
+
+ """
+
+ __tablename__ = "relationship_employee2build"
+ __exclude__fields__ = []
+ __language_model__ = RelationshipEmployee2BuildLanguageModel
+
+ company_id: Mapped[int] = mapped_column(
+ ForeignKey("companies.id"), nullable=False
+ ) # 1, 2, 3
+ employee_id: Mapped[int] = mapped_column(
+ ForeignKey("employees.id"), nullable=False
+ ) # employee -> (n)person Evyos LTD
+ member_id: Mapped[int] = mapped_column(
+ ForeignKey("build.id"), nullable=False
+ ) # 2, 3, 4
+
+ relationship_type: Mapped[str] = mapped_column(
+ String, nullable=True, server_default="Employee"
+ ) # Commercial
+ show_only: Mapped[bool] = mapped_column(Boolean, server_default="False")
+
+ __table_args__ = (
+ Index(
+ "relationship_build_employee_ndx_00",
+ company_id,
+ employee_id,
+ member_id,
+ relationship_type,
+ unique=True,
+ ),
+ {"comment": "Build & Employee Relationship Information"},
+ )
+
+
+class Build(CrudCollection, SelectActionWithEmployee):
+ """
+ Builds class based on declarative_base and BaseMixin via session
+ """
+
+ __tablename__ = "build"
+ __exclude__fields__ = []
+ __language_model__ = BuildLanguageModel
+ __include__fields__ = []
+ __access_by__ = []
+ __many__table__ = RelationshipEmployee2Build
+ # __explain__ = AbstractBuild()
+
+ gov_address_code: Mapped[str] = mapped_column(
+ String, server_default="", unique=True
+ )
+ build_name: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Building Name"
+ )
+ build_no: Mapped[str] = mapped_column(
+ String(8), nullable=False, comment="Building Number"
+ )
+
+ max_floor: Mapped[int] = mapped_column(
+ Integer, server_default="1", nullable=False, comment="Max Floor"
+ )
+ underground_floor: Mapped[int] = mapped_column(
+ Integer, server_default="0", nullable=False, comment="Underground Floor"
+ )
+ build_date: Mapped[TIMESTAMP] = mapped_column(
+ TIMESTAMP(timezone=True), server_default="1900-01-01"
+ )
+ decision_period_date: Mapped[TIMESTAMP] = mapped_column(
+ TIMESTAMP(timezone=True),
+ server_default="1900-01-01",
+ comment="Building annual ordinary meeting period",
+ )
+ tax_no: Mapped[str] = mapped_column(String(24), server_default="")
+ lift_count: Mapped[int] = mapped_column(Integer, server_default="0")
+ heating_system: Mapped[bool] = mapped_column(Boolean, server_default="True")
+ cooling_system: Mapped[bool] = mapped_column(Boolean, server_default="False")
+ hot_water_system: Mapped[bool] = mapped_column(Boolean, server_default="False")
+ block_service_man_count: Mapped[int] = mapped_column(Integer, server_default="0")
+ security_service_man_count: Mapped[int] = mapped_column(Integer, server_default="0")
+ garage_count: Mapped[int] = mapped_column(
+ Integer, server_default="0", comment="Garage Count"
+ )
+ management_room_id: Mapped[int] = mapped_column(
+ Integer, nullable=True, comment="Management Room ID"
+ )
+
+ site_id: Mapped[int] = mapped_column(ForeignKey("build_sites.id"), nullable=True)
+ site_uu_id: Mapped[str] = mapped_column(String, comment="Site UUID", nullable=True)
+ address_id: Mapped[int] = mapped_column(ForeignKey("addresses.id"), nullable=False)
+ address_uu_id: Mapped[str] = mapped_column(
+ String, comment="Address UUID", nullable=False
+ )
+ build_types_id: Mapped[int] = mapped_column(
+ ForeignKey("build_types.id"), nullable=False, comment="Building Type"
+ )
+ build_types_uu_id: Mapped[str] = mapped_column(String, comment="Building Type UUID")
+
+ parts: Mapped[List["BuildParts"]] = relationship(
+ "BuildParts", back_populates="buildings", foreign_keys="BuildParts.build_id"
+ )
+ decision_books: Mapped[List["BuildDecisionBook"]] = relationship(
+ "BuildDecisionBook",
+ back_populates="buildings",
+ foreign_keys="BuildDecisionBook.build_id",
+ )
+
+ # build_ibans: Mapped["BuildIbans"] = relationship(
+ # "BuildIbans", back_populates="building", foreign_keys="BuildIbans.build_id"
+ # )
+ # areas: Mapped["BuildArea"] = relationship(
+ # "BuildArea", back_populates="buildings", foreign_keys="BuildArea.build_id"
+ # )
+ # response_companies: Mapped["Companies"] = relationship(
+ # "Companies",
+ # back_populates="response_buildings",
+ # foreign_keys=[response_company_id],
+ # )
+ # addresses: Mapped[List["Address"]] = relationship(
+ # "Address", back_populates="buildings", foreign_keys=[address_id]
+ # )
+ # peoples: Mapped["People"] = relationship(
+ # "People", back_populates="buildings", foreign_keys=[people_id]
+ # )
+ # sites: Mapped["BuildSites"] = relationship(
+ # "BuildSites", back_populates="buildings", foreign_keys=[site_id]
+ # )
+
+ __table_args__ = (
+ Index("_builds_ndx_00", gov_address_code),
+ Index("_builds_ndx_01", build_name, build_no),
+ {
+ "comment": "Build objects are building that are created for living and store purposes"
+ },
+ )
+
+ @property
+ def management_room(self):
+ if management_room := BuildParts.filter_by_one(
+ system=True, id=self.management_room_id, build_id=self.id
+ ).data:
+ return management_room
+ return None
+
+ @classmethod
+ def create_action(cls, data: InsertBuild, token):
+ from Schemas import Addresses
+
+ data_dict = data.excluded_dump()
+ data_dict["address_id"] = None
+ if data.address_uu_id:
+ official_address = Addresses.filter_one(
+ Addresses.uu_id == data.address_uu_id,
+ ).data
+ data_dict["address_id"] = official_address.id
+ data_dict["build_no"] = str(official_address.build_number)
+ if not data_dict["address_id"]:
+ raise HTTPException(
+ status_code=status.HTTP_404_NOT_FOUND,
+ detail="Address is not found in database. Re-enter address record then try again.",
+ )
+ build_type = BuildTypes.filter_by_one(
+ system=True, uu_id=str(data.build_types_uu_id)
+ ).data
+ data_dict["build_types_id"] = build_type.id
+ build_created = cls.find_or_create(**data_dict)
+ created_build_relation = cls.__many__table__.find_or_create(
+ company_id=token.selected_company.company_id,
+ employee_id=token.selected_company.employee_id,
+ member_id=build_created.id,
+ )
+ build_created.save()
+ build_created.update(is_confirmed=True)
+ build_created.save()
+ created_build_relation.update(is_confirmed=True)
+ created_build_relation.save()
+ return build_created
+
+ @classmethod
+ def update_action(cls, data: UpdateBuild, build_uu_id: str, token):
+ from Schemas import Addresses
+
+ data_dict = data.excluded_dump()
+ db = Addresses.new_session()
+ if data.address_uu_id:
+ official_address = Addresses.filter_one(
+ Addresses.uu_id == data.address_uu_id, db=db
+ ).first
+ data_dict["address_id"] = official_address.id if official_address else None
+ if build_to_update := cls.filter_one(cls.uu_id == build_uu_id, db=db).first:
+ updated_build = build_to_update.update(**data_dict)
+ updated_build.save()
+ return updated_build
+
+ @property
+ def top_flat(self):
+ max_flat_no = 0
+ for part in self.parts:
+ if part.part_no > self.max_floor:
+ max_flat_no = part.part_no
+ return max_flat_no
+
+ @property
+ def bottom_flat(self):
+ min_flat_no = 0
+ for part in self.parts:
+ if part.part_no < self.max_floor:
+ min_flat_no = part.part_no
+ return min_flat_no
+
+ @property
+ def human_livable_parts(self) -> tuple:
+ parts = list(part for part in self.parts if part.human_livable)
+ return parts, len(parts)
+
+ @property
+ def livable_part_count(self):
+ livable_parts = BuildParts.filter_all(
+ BuildParts.build_id == self.id,
+ BuildParts.human_livable == True,
+ )
+ if not livable_parts.data:
+ raise HTTPException(
+ status_code=status.HTTP_404_NOT_FOUND,
+ detail="There is no livable part in this building.",
+ )
+ return livable_parts.count
+
+ @property
+ def part_type_count(self):
+ building_types = None
+ for part in self.parts:
+ building_types = {}
+ build_type = BuildTypes.filter_by_one(
+ system=True, id=part.build_part_type_id
+ ).data
+ if build_type.type_code in building_types:
+ building_types[build_type.type_code]["list"].append(part.part_no)
+ else:
+ building_types[build_type.type_code] = {"list": [part.part_no]}
+
+ # for key, val in building_types.items():
+ # list_parts = val["list"]
+ # building_types[key] = {
+ # "list": list_parts,
+ # "min": min(list_parts),
+ # "max": max(list_parts),
+ # "count": len(list_parts),
+ # }
+ return building_types
+
+
+class BuildParts(CrudCollection):
+ """
+ BuildParts class based on declarative_base and BaseMixin via session
+ Attentions: Part_no is unique for each building and Every building must have a management section.!!! default no 0
+ """
+
+ __tablename__ = "build_parts"
+ __exclude__fields__ = []
+ __language_model__ = BuildPartsLanguageModel
+ __include__fields__ = []
+ __enum_list__ = [("part_direction", "Directions", "NN")]
+
+ # https://adres.nvi.gov.tr/VatandasIslemleri/AdresSorgu
+ address_gov_code: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Goverment Door Code"
+ )
+ # part_name: Mapped[str] = mapped_column(String(24), server_default="", nullable=False, comment="Part Name")
+ part_no: Mapped[int] = mapped_column(
+ Integer, server_default="0", nullable=False, comment="Part Number"
+ )
+ part_level: Mapped[int] = mapped_column(
+ Integer, server_default="0", comment="Building Part Level"
+ )
+ part_code: Mapped[str] = mapped_column(
+ String, server_default="", nullable=False, comment="Part Code"
+ )
+ part_gross_size: Mapped[int] = mapped_column(
+ Integer, server_default="0", comment="Part Gross Size"
+ )
+ part_net_size: Mapped[int] = mapped_column(
+ Integer, server_default="0", comment="Part Net Size"
+ )
+ default_accessory: Mapped[str] = mapped_column(
+ Text, server_default="0", comment="Default Accessory"
+ )
+ human_livable: Mapped[bool] = mapped_column(
+ Boolean, server_default="1", comment="Human Livable"
+ )
+ due_part_key: Mapped[str] = mapped_column(
+ String, server_default="", nullable=False, comment="Constant Payment Group"
+ )
+
+ build_id: Mapped[int] = mapped_column(
+ ForeignKey("build.id"), nullable=False, comment="Building ID"
+ )
+ build_uu_id: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Building UUID"
+ )
+ part_direction_id: Mapped[int] = mapped_column(
+ ForeignKey("api_enum_dropdown.id"), nullable=True
+ )
+ part_direction_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Part Direction UUID"
+ )
+ part_type_id: Mapped[int] = mapped_column(
+ ForeignKey("build_types.id"), nullable=False, comment="Building Part Type"
+ )
+ part_type_uu_id: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Building Part Type UUID"
+ )
+
+ buildings: Mapped["Build"] = relationship(
+ "Build", back_populates="parts", foreign_keys=[build_id]
+ )
+
+ __table_args__ = (
+ Index("build_parts_ndx_01", build_id, part_no, unique=True),
+ {"comment": "Part objects that are belong to building objects"},
+ )
+
+ @classmethod
+ def create_action(cls, data: InsertBuildParts, token):
+ from Schemas import ApiEnumDropdown
+
+ data_dict = data.dump()
+ build_from_duty = Build.select_action(
+ employee_id=token.selected_company.employee_id,
+ filter_expr=[Build.uu_id == data.build_uu_id],
+ )
+ building = build_from_duty.first()
+ if not building:
+ raise HTTPException(
+ status_code=status.HTTP_406_NOT_ACCEPTABLE,
+ detail="This Employee can not reach this building or building uu-id not found in database. "
+ "Check with your supervisor.",
+ )
+
+ if build_types := BuildTypes.filter_one(
+ BuildTypes.uu_id == data.build_part_type_uu_id,
+ ).data:
+ part_direction = ApiEnumDropdown.get_by_uuid(
+ uuid=str(data.part_direction_uu_id)
+ )
+
+ data_dict["part_gross_size"] = data.part_gross_size
+ data_dict["part_net_size"] = data.part_net_size
+ data_dict["part_type_id"] = build_types.id
+ data_dict["part_level"] = data.part_level
+ data_dict["build_id"] = building.id
+ data_dict["part_no"] = data.part_no
+ data_dict["part_code"] = (
+ f"{build_types.type_code}:{str(data_dict['part_no']).zfill(2)}"
+ )
+ data_dict["address_gov_code"] = data.address_gov_code
+ data_dict["default_accessory"] = data.default_accessory
+ data_dict["human_livable"] = bool(data.human_livable)
+
+ data_dict["build_uu_id"] = str(data.build_uu_id)
+ data_dict["part_type_id"] = build_types.id
+ data_dict["part_type_uu_id"] = str(build_types.uu_id)
+ data_dict["part_direction_id"] = part_direction.id
+ data_dict["part_direction_uu_id"] = str(part_direction.uu_id)
+ # data_dict["part_direction"] = str(data.part_direction_uu_id)
+
+ if not data_dict["part_gross_size"]:
+ raise HTTPException(
+ status_code=status.HTTP_406_NOT_ACCEPTABLE,
+ detail="Part Gross Size can not be empty.",
+ )
+
+ if not data_dict["part_net_size"]:
+ raise HTTPException(
+ status_code=status.HTTP_406_NOT_ACCEPTABLE,
+ detail="Part Net Size can not be empty.",
+ )
+ pt = int(data_dict["part_net_size"])
+ data_dict["due_part_key"] = str(pt + (5 - (pt % 5))) + "M2"
+ del data_dict["build_part_type_uu_id"]
+ return cls.find_or_create(**data_dict)
+
+ raise HTTPException(
+ status_code=status.HTTP_418_IM_A_TEAPOT,
+ detail="Build Part can not be created.",
+ )
+
+ @property
+ def part_name(self):
+ if build_type := BuildTypes.filter_by_one(
+ system=True, id=self.part_type_id
+ ).data:
+ return f"{str(build_type.type_name).upper()} : {str(self.part_no).upper()}"
+ return f"Undefined:{str(build_type.type_name).upper()}"
+
+
+class BuildLivingSpace(CrudCollection):
+ """
+ LivingSpace class based on declarative_base and BaseMixin via session
+ Owner or live person = Occupant of the build part
+ + Query OR(owner_person_id == person_id, life_person_id == person_id) AND (now(date))
+ """
+
+ __tablename__ = "build_living_space"
+ __exclude__fields__ = []
+ __language_model__ = BuildLivingSpaceLanguageModel
+ __include__fields__ = []
+
+ fix_value: Mapped[float] = mapped_column(
+ Numeric(20, 6),
+ server_default="0",
+ comment="Fixed value is deducted from debit.",
+ )
+ fix_percent: Mapped[float] = mapped_column(
+ Numeric(6, 2),
+ server_default="0",
+ comment="Fixed percent is deducted from debit.",
+ )
+
+ agreement_no: Mapped[str] = mapped_column(
+ String, server_default="", comment="Agreement No"
+ )
+ marketing_process: Mapped[bool] = mapped_column(Boolean, server_default="False")
+ marketing_layer: Mapped[int] = mapped_column(Integer, server_default="0")
+
+ build_parts_id: Mapped[int] = mapped_column(
+ ForeignKey("build_parts.id"),
+ nullable=False,
+ index=True,
+ comment="Build Part ID",
+ )
+ build_parts_uu_id: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Build Part UUID"
+ )
+ person_id: Mapped[int] = mapped_column(
+ ForeignKey("people.id"),
+ nullable=False,
+ index=True,
+ comment="Responsible People ID",
+ )
+ person_uu_id: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Responsible People UUID"
+ )
+ occupant_type: Mapped[int] = mapped_column(
+ ForeignKey("occupant_types.id"),
+ nullable=False,
+ comment="Occupant Type",
+ )
+ occupant_type_uu_id: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Occupant Type UUID"
+ )
+
+ __table_args__ = (
+ {"comment": "Living Space inside building parts that are related to people"},
+ )
+
+ @classmethod
+ def create_action(
+ cls,
+ data: dict,
+ token_dict: Union[EmployeeTokenObject, OccupantTokenObject],
+ ):
+ from Schemas import Services, OccupantTypes
+ from api_events.events.events.events_bind_modules import (
+ ModulesBindOccupantEventMethods,
+ )
+
+ if data.get("expiry_starts"):
+ data["expiry_starts"] = str(system_arrow.get(data["expiry_starts"]))
+ if data.get("expiry_ends"):
+ data["expiry_ends"] = str(system_arrow.get(data["expiry_ends"]))
+ created_living_space = BuildLivingSpace.find_or_create(**data)
+ occupant_type = OccupantTypes.filter_by_one(
+ system=True, uu_id=created_living_space.occupant_type_uu_id
+ ).data
+ related_service = Services.filter_by_one(
+ related_responsibility=occupant_type.occupant_code,
+ ).data
+ if not related_service:
+ raise HTTPException(
+ status_code=status.HTTP_418_IM_A_TEAPOT,
+ detail="Service is not found in database. Re-enter service record then try again.",
+ )
+ ModulesBindOccupantEventMethods.bind_default_module_for_first_init_occupant(
+ build_living_space_id=created_living_space.id,
+ )
+ created_living_space.save_and_confirm()
+ return created_living_space
+
+ @classmethod
+ def find_living_from_customer_id(
+ cls, customer_id, process_date, add_days: int = 32
+ ):
+ from ApiLibrary.date_time_actions.date_functions import system_arrow
+
+ formatted_date = system_arrow.get(str(process_date))
+ living_spaces = cls.filter_all(
+ or_(
+ cls.owner_person_id == customer_id,
+ cls.life_person_id == customer_id,
+ ),
+ cls.start_date < formatted_date - timedelta(days=add_days),
+ cls.stop_date > formatted_date + timedelta(days=add_days),
+ )
+ return living_spaces.data, living_spaces.count
+
+
+class BuildManagement(CrudCollection):
+
+ __tablename__ = "build_management"
+ __exclude__fields__ = []
+ __language_model__ = BuildManagementLanguageModel
+
+ discounted_percentage: Mapped[float] = mapped_column(
+ Numeric(6, 2), server_default="0.00"
+ ) # %22
+ discounted_price: Mapped[float] = mapped_column(
+ Numeric(20, 2), server_default="0.00"
+ ) # Normal: 78.00 TL
+ calculated_price: Mapped[float] = mapped_column(
+ Numeric(20, 2), server_default="0.00"
+ ) # sana düz 75.00 TL yapar
+
+ occupant_type: Mapped[int] = mapped_column(
+ ForeignKey("occupant_types.id"),
+ nullable=False,
+ comment="Occupant Type",
+ )
+ occupant_type_uu_id: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Occupant Type UUID"
+ )
+ build_id: Mapped[int] = mapped_column(
+ ForeignKey("build.id"), nullable=False, comment="Building ID"
+ )
+ build_uu_id: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Building UUID"
+ )
+ build_parts_id: Mapped[int] = mapped_column(
+ ForeignKey("build_parts.id"),
+ nullable=False,
+ index=True,
+ comment="Build Part ID",
+ )
+ build_parts_uu_id: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Build Part UUID"
+ )
+
+ __table_args__ = (
+ Index(
+ "build_management_ndx_00",
+ build_parts_id,
+ occupant_type,
+ "expiry_starts",
+ unique=True,
+ ),
+ {"comment": "Management of the building parts that are related to people"},
+ )
+
+
+class BuildArea(CrudCollection):
+ """
+ Builds class based on declarative_base and BaseMixin via session
+ """
+
+ __tablename__ = "build_area"
+ __exclude__fields__ = []
+ __language_model__ = BuildAreaLanguageModel
+
+ area_name: Mapped[str] = mapped_column(String, server_default="")
+ area_code: Mapped[str] = mapped_column(String, server_default="")
+ area_type: Mapped[str] = mapped_column(String, server_default="GREEN")
+ area_direction: Mapped[str] = mapped_column(String(2), server_default="NN")
+ area_gross_size: Mapped[float] = mapped_column(Numeric(20, 6), server_default="0")
+ area_net_size: Mapped[float] = mapped_column(Numeric(20, 6), server_default="0")
+ width = mapped_column(Integer, server_default="0")
+ size = mapped_column(Integer, server_default="0")
+
+ build_id: Mapped[int] = mapped_column(ForeignKey("build.id"))
+ build_uu_id: Mapped[str] = mapped_column(String, comment="Building UUID")
+ part_type_id: Mapped[int] = mapped_column(
+ ForeignKey("build_types.id"), nullable=True, comment="Building Part Type"
+ )
+ part_type_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Building Part Type UUID"
+ )
+
+ # buildings: Mapped["Build"] = relationship(
+ # "Build", back_populates="areas", foreign_keys=[build_id]
+ # )
+
+ _table_args_ = (
+ Index("_edm_build_parts_area_ndx_00", build_id, area_code, unique=True),
+ )
+
+
+class BuildSites(CrudCollection):
+ """
+ Builds class based on declarative_base and BaseMixin via session
+ """
+
+ __tablename__ = "build_sites"
+ __exclude__fields__ = []
+ __language_model__ = BuildSitesLanguageModel
+ __include__fields__ = []
+
+ site_name: Mapped[str] = mapped_column(String(24), nullable=False)
+ site_no: Mapped[str] = mapped_column(String(8), nullable=False)
+
+ address_id: Mapped[int] = mapped_column(ForeignKey("addresses.id"))
+ address_uu_id: Mapped[str] = mapped_column(String, comment="Address UUID")
+
+ # addresses: Mapped["Address"] = relationship(
+ # "Address", back_populates="site", foreign_keys=[address_id]
+ # )
+ # buildings: Mapped["Build"] = relationship(
+ # "Build", back_populates="sites", foreign_keys="Build.site_id"
+ # )
+
+ __table_args__ = (
+ Index("_sites_ndx_01", site_no, site_name),
+ {"comment": "Sites that groups building objets"},
+ )
+
+
+class BuildCompaniesProviding(CrudCollection):
+ """ """
+
+ __tablename__ = "build_companies_providing"
+ __exclude__fields__ = []
+ __language_model__ = BuildCompaniesProvidingLanguageModel
+ __include__fields__ = []
+
+ build_id = mapped_column(
+ ForeignKey("build.id"), nullable=False, comment="Building ID"
+ )
+ build_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Providing UUID"
+ )
+ company_id: Mapped[int] = mapped_column(ForeignKey("companies.id"))
+ company_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Providing UUID"
+ )
+ provide_id: Mapped[int] = mapped_column(
+ ForeignKey("api_enum_dropdown.id"), nullable=True
+ )
+ provide_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Providing UUID"
+ )
+ contract_id: Mapped[int] = mapped_column(
+ Integer, ForeignKey("companies.id"), nullable=True
+ )
+
+ __table_args__ = (
+ Index(
+ "_build_companies_providing_ndx_00",
+ build_id,
+ company_id,
+ provide_id,
+ unique=True,
+ ),
+ {"comment": "Companies providing services for building"},
+ )
+
+
+class BuildPersonProviding(CrudCollection):
+ """ """
+
+ __tablename__ = "build_person_providing"
+ __exclude__fields__ = []
+ __language_model__ = BuildPersonProvidingLanguageModel
+ __include__fields__ = []
+
+ build_id = mapped_column(
+ ForeignKey("build.id"), nullable=False, comment="Building ID"
+ )
+ build_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Providing UUID"
+ )
+ people_id: Mapped[int] = mapped_column(ForeignKey("people.id"))
+ people_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="People UUID"
+ )
+ provide_id: Mapped[int] = mapped_column(
+ ForeignKey("api_enum_dropdown.id"), nullable=True
+ )
+ provide_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Providing UUID"
+ )
+ contract_id: Mapped[int] = mapped_column(
+ Integer, ForeignKey("companies.id"), nullable=True
+ )
+
+ __table_args__ = (
+ Index(
+ "_build_person_providing_ndx_00",
+ build_id,
+ people_id,
+ provide_id,
+ unique=True,
+ ),
+ {"comment": "People providing services for building"},
+ )
diff --git a/ApiLayers/Schemas/building/decision_book.py b/ApiLayers/Schemas/building/decision_book.py
new file mode 100644
index 0000000..de8e53c
--- /dev/null
+++ b/ApiLayers/Schemas/building/decision_book.py
@@ -0,0 +1,1793 @@
+import math
+from datetime import datetime, timedelta
+from decimal import Decimal
+from typing import List
+from fastapi import HTTPException, status
+
+from ApiLayers.ApiLibrary.date_time_actions.date_functions import system_arrow
+
+from sqlalchemy import (
+ String,
+ ForeignKey,
+ Index,
+ SmallInteger,
+ Boolean,
+ TIMESTAMP,
+ Text,
+ Numeric,
+ Integer,
+)
+from sqlalchemy.orm import Mapped, mapped_column, relationship
+
+from ApiLayers.ApiValidations.Request import (
+ InsertDecisionBook,
+ InsertBuildDecisionBookItems,
+ InsertBuildDecisionBookItemDebits,
+ InsertBuildDecisionBookProjects,
+)
+from Services.PostgresDb import CrudCollection
+from ApiLayers.LanguageModels.Database.building.decision_book import (
+ BuildDecisionBookLanguageModel,
+ BuildDecisionBookInvitationsLanguageModel,
+ BuildDecisionBookPersonLanguageModel,
+ BuildDecisionBookPersonOccupantsLanguageModel,
+ BuildDecisionBookItemsLanguageModel,
+ BuildDecisionBookItemsUnapprovedLanguageModel,
+ BuildDecisionBookPaymentsLanguageModel,
+ BuildDecisionBookLegalLanguageModel,
+ BuildDecisionBookProjectsLanguageModel,
+ BuildDecisionBookProjectPersonLanguageModel,
+ BuildDecisionBookProjectItemsLanguageModel,
+)
+
+
+class BuildDecisionBook(CrudCollection):
+ """
+ Builds class based on declarative_base and BaseMixin via session
+ The start dates of the decision log periods are determined from the 'decision_period_date' field in the decision log table within the building information.
+ decision_period_date = Her yıl yapılan karar toplantısı + 365 gün her yıl tekrar eden
+ decision_book_pdf_path: Karar defteri pdf dosyasının yolu
+ resp_company_fix_wage: Karar defterinin oluşmasını sağlayan dışardaki danışmanlık ücreti
+ is_out_sourced: Karar defterinin dışardan alınan hizmetle oluşturulup oluşturulmadığı
+ contact_agreement_path: Karar defterinin oluşmasını sağlayan dışardaki danışmanlık anlaşması dosyasının yolu
+ contact_agreement_date: Karar defterinin oluşmasını sağlayan dışardaki danışmanlık anlaşma tarihi
+ meeting_date: Karar defterinin oluşmasını sağlayan toplantı tarihi
+ decision_type: Karar defterinin tipi (Bina Yönetim Toplantısı (BYT), Yıllık Acil Toplantı (YAT)
+ """
+
+ __tablename__ = "build_decision_book"
+ __exclude__fields__ = []
+ __language_model__ = BuildDecisionBookLanguageModel
+
+ decision_book_pdf_path: Mapped[str] = mapped_column(
+ String, server_default="", nullable=True
+ )
+ resp_company_fix_wage: Mapped[float] = mapped_column(
+ Numeric(10, 2), server_default="0"
+ ) #
+ is_out_sourced: Mapped[bool] = mapped_column(Boolean, server_default="0")
+ meeting_date: Mapped[TIMESTAMP] = mapped_column(
+ TIMESTAMP(timezone=True), server_default="1900-01-01"
+ )
+ decision_type: Mapped[str] = mapped_column(String(3), server_default="RBM")
+ meeting_is_completed: Mapped[bool] = mapped_column(Boolean, server_default="0")
+ meeting_completed_date: Mapped[TIMESTAMP] = mapped_column(
+ TIMESTAMP(timezone=True), nullable=True, comment="Meeting Completed Date"
+ )
+
+ build_id: Mapped[int] = mapped_column(ForeignKey("build.id"), nullable=False)
+ build_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Build UUID"
+ )
+ resp_company_id: Mapped[int] = mapped_column(ForeignKey("companies.id"))
+ resp_company_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Company UUID"
+ )
+ contact_id: Mapped[int] = mapped_column(
+ ForeignKey("contracts.id"), nullable=True, comment="Contract id"
+ )
+ contact_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Contract UUID"
+ )
+
+ buildings: Mapped["Build"] = relationship(
+ "Build",
+ back_populates="decision_books",
+ foreign_keys=build_id,
+ )
+ decision_book_items: Mapped[List["BuildDecisionBookItems"]] = relationship(
+ "BuildDecisionBookItems",
+ back_populates="decision_books",
+ foreign_keys="BuildDecisionBookItems.build_decision_book_id",
+ )
+
+ __table_args__ = (
+ Index("build_decision_book_ndx_011", meeting_date, build_id),
+ Index("build_decision_book_ndx_011", build_id, "expiry_starts", "expiry_ends"),
+ {
+ "comment": "Decision Book objects that are related to decision taken at building meetings"
+ },
+ )
+
+ @classmethod
+ def retrieve_active_rbm(cls):
+ from Schemas.building.build import Build
+
+ related_build = Build.find_one(id=cls.build_id)
+ related_date = system_arrow.get(related_build.build_date)
+ date_processed = related_date.replace(
+ year=system_arrow.now().date().year, month=related_date.month, day=1
+ )
+ if system_arrow.now().date() <= date_processed:
+ book = cls.filter_one(
+ cls.expiry_ends <= date_processed,
+ cls.decision_type == "RBM",
+ cls.build_id == related_build.id,
+ ).data
+ if not book:
+ cls.raise_http_exception(
+ status_code="HTTP_404_NOT_FOUND",
+ error_case="NOTFOUND",
+ message=f"Decision Book is not found for {related_build.build_name}-RBM",
+ data=dict(
+ build_id=str(related_build.uu_id),
+ build_name=related_build.build_name,
+ decision_type="RBM",
+ ),
+ )
+ return book
+ return
+
+ @classmethod
+ def select_action(cls, duty_id, token=None):
+ from Schemas import Build, Companies
+
+ related_companies = Companies.select_action(duty_id_list=[int(duty_id)])
+ related_companies_ids = list(
+ related_.id for related_ in related_companies.all()
+ )
+ related_building = Build.filter_all(Build.company_id.in_(related_companies_ids))
+ related_building_ids = list(related_.id for related_ in related_building.data)
+ return cls.filter_all(cls.build_id.in_(related_building_ids)).query
+
+ @classmethod
+ def create_action(cls, data: InsertDecisionBook, token=None):
+ from Schemas import (
+ Build,
+ Companies,
+ )
+
+ data_dict = data.model_dump()
+ if building := Build.find_one(uu_id=data.build_uu_id):
+ data_dict["build_id"] = building.id
+ if response_company := Companies.find_one(
+ uu_id=data_dict["resp_company_uu_id"]
+ ):
+ data_dict["resp_company_id"] = response_company.id
+ if not building:
+ raise HTTPException(
+ status_code=status.HTTP_406_NOT_ACCEPTABLE,
+ detail="Building must be given to create decision book.",
+ )
+ expiry_starts = system_arrow.get(str(data_dict.get("expiry_starts"))).format(
+ "%Y-%m-%d"
+ )
+ data_dict["expiry_starts"] = str(expiry_starts)
+ expiry_ends = system_arrow.get(str(data_dict.get("expiry_ends"))).format(
+ "%Y-%m-%d"
+ )
+ data_dict["expiry_ends"] = str(
+ expiry_ends.replace(month=expiry_ends.month + 1, day=1) - timedelta(days=1)
+ )
+
+ if decision_book := BuildDecisionBook.filter_one(
+ BuildDecisionBook.build_id == building.id,
+ BuildDecisionBook.expiry_ends > data_dict["expiry_starts"],
+ BuildDecisionBook.decision_type == data_dict.get("decision_type"),
+ ).data: # Decision book is already exist:
+ cls.raise_http_exception(
+ status_code=status.HTTP_409_CONFLICT,
+ error_case="RECORDEXITS",
+ message="Decision Book is already exist.",
+ data=decision_book.get_dict(),
+ )
+
+ data_dict["expiry_starts"] = str(expiry_starts.replace(day=1))
+ data_dict["expiry_ends"] = str(
+ expiry_ends.replace(month=expiry_ends.month + 1, day=1) - timedelta(days=1)
+ )
+ del data_dict["build_uu_id"], data_dict["resp_company_uu_id"]
+ return cls.find_or_create(**data_dict)
+
+ @property
+ def semester(self):
+ start_format = "".join(
+ [str(self.expiry_starts.year), "-", str(self.expiry_starts.month)]
+ )
+ end_format = "".join(
+ [str(self.expiry_ends.year), "-", str(self.expiry_ends.month)]
+ )
+ return "".join([start_format, " ", end_format])
+
+ def check_book_is_valid(self, bank_date: str):
+ if all(
+ [True if letter in str(bank_date) else False for letter in ["-", " ", ":"]]
+ ):
+ bank_date = datetime.strptime(str(bank_date), "%Y-%m-%d %H:%M:%S")
+ date_valid = (
+ system_arrow.get(self.expiry_starts)
+ < system_arrow.get(bank_date)
+ < system_arrow.get(self.expiry_ends)
+ )
+ return date_valid and self.active and not self.deleted
+
+ @classmethod
+ def retrieve_valid_book(cls, bank_date, iban):
+ from Schemas import (
+ BuildIbans,
+ )
+
+ if all(
+ [True if letter in str(bank_date) else False for letter in ["-", " ", ":"]]
+ ):
+ bank_date = datetime.strptime(str(bank_date), "%Y-%m-%d %H:%M:%S")
+ build_iban = BuildIbans.find_one(iban=iban)
+ decision_book: cls = cls.filter_one(
+ cls.build_id == build_iban.build_id,
+ cls.expiry_starts < bank_date,
+ cls.expiry_ends > bank_date,
+ cls.active == True,
+ cls.deleted == False,
+ ).data
+ decision_book.check_book_is_valid(bank_date.__str__())
+ return decision_book
+ return
+
+
+class BuildDecisionBookInvitations(CrudCollection):
+ """
+ Builds class based on declarative_base and BaseMixin via session
+ """
+
+ __tablename__ = "build_decision_book_invitations"
+ __exclude__fields__ = []
+ __language_model__ = BuildDecisionBookInvitationsLanguageModel
+
+ build_id: Mapped[int] = mapped_column(Integer, nullable=False)
+ build_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Build UUID"
+ )
+ decision_book_id: Mapped[int] = mapped_column(
+ ForeignKey("build_decision_book.id"), nullable=False
+ )
+ decision_book_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Decision Book UUID"
+ )
+
+ invitation_type: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Invite Type"
+ )
+ invitation_attempt: Mapped[int] = mapped_column(SmallInteger, server_default="1")
+ living_part_count: Mapped[int] = mapped_column(SmallInteger, server_default="1")
+ living_part_percentage: Mapped[float] = mapped_column(
+ Numeric(10, 2), server_default="0.51"
+ )
+
+ message: Mapped[str] = mapped_column(
+ Text, nullable=True, comment="Invitation Message"
+ )
+ planned_date: Mapped[TIMESTAMP] = mapped_column(
+ TIMESTAMP(timezone=True), nullable=False, comment="Planned Meeting Date"
+ )
+ planned_date_expires: Mapped[TIMESTAMP] = mapped_column(
+ TIMESTAMP(timezone=True), nullable=False, comment="Planned Meeting Date Expires"
+ )
+
+ __table_args__ = (
+ Index(
+ "_build_decision_book_invitations_ndx_01",
+ invitation_type,
+ planned_date,
+ invitation_attempt,
+ unique=True,
+ ),
+ {"comment": "People that are invited to building meetings."},
+ )
+
+ @classmethod
+ def check_invites_are_ready_for_meeting(cls, selected_decision_book, token_dict):
+ first_book_invitation = BuildDecisionBookInvitations.filter_one(
+ BuildDecisionBookInvitations.build_id
+ == token_dict.selected_occupant.build_id,
+ BuildDecisionBookInvitations.decision_book_id == selected_decision_book.id,
+ BuildDecisionBookInvitations.invitation_attempt == 1,
+ ).data
+ if not first_book_invitation:
+ raise HTTPException(
+ status_code=status.HTTP_404_NOT_FOUND,
+ detail=f"First Meeting Invitation is not found for Decision Book UUID : {selected_decision_book.uu_id}",
+ )
+ need_attend_count = int(first_book_invitation.living_part_count) * Decimal(
+ first_book_invitation.living_part_percentage
+ )
+ valid_invite_count = (
+ BuildDecisionBookPerson.filter_all(
+ BuildDecisionBookPerson.invite_id == first_book_invitation.id,
+ BuildDecisionBookPerson.build_decision_book_id
+ == selected_decision_book.id,
+ BuildDecisionBookPerson.is_attending == True,
+ system=True,
+ )
+ .query.distinct(BuildDecisionBookPerson.person_id)
+ .count()
+ )
+
+ second_book_invitation = BuildDecisionBookInvitations.filter_one(
+ BuildDecisionBookInvitations.build_id
+ == token_dict.selected_occupant.build_id,
+ BuildDecisionBookInvitations.decision_book_id == selected_decision_book.id,
+ BuildDecisionBookInvitations.invitation_attempt == 2,
+ system=True,
+ ).data
+ if not valid_invite_count >= need_attend_count and not second_book_invitation:
+ raise HTTPException(
+ status_code=status.HTTP_400_BAD_REQUEST,
+ detail=f"In order meeting to be held, {math.ceil(need_attend_count)} people must attend "
+ f"to the meeting. Only {valid_invite_count} people are attending to the meeting.",
+ )
+ return first_book_invitation or second_book_invitation
+
+
+class BuildDecisionBookPerson(CrudCollection):
+ """
+ Builds class based on declarative_base and BaseMixin via session
+ Karar Defteri toplantılarına katılan kişiler veya yetkililer
+ dues_percent_discount: Katılımcının aidat indirim oranı Aidatdan yüzde indirim alır
+ dues_fix_discount: Katılımcının aidat sabit miktarı Aidatdan sabit bir miktar indirim alır
+ dues_discount_approval_date: Bu kişinin indiriminin onayladığı tarih
+ management_typecode: Kişinin toplantı görevi
+ """
+
+ __tablename__ = "build_decision_book_person"
+ __exclude__fields__ = []
+ __enum_list__ = [("management_typecode", "BuildManagementType", "bm")]
+ __language_model__ = BuildDecisionBookPersonLanguageModel
+
+ dues_percent_discount: Mapped[int] = mapped_column(SmallInteger, server_default="0")
+ dues_fix_discount: Mapped[float] = mapped_column(Numeric(10, 2), server_default="0")
+ dues_discount_approval_date: Mapped[TIMESTAMP] = mapped_column(
+ TIMESTAMP(timezone=True), server_default="1900-01-01 00:00:00"
+ )
+ send_date: Mapped[TIMESTAMP] = mapped_column(
+ TIMESTAMP(timezone=True), nullable=False, comment="Confirmation Date"
+ )
+ is_attending: Mapped[bool] = mapped_column(
+ Boolean, server_default="0", comment="Occupant is Attending to invitation"
+ )
+ confirmed_date: Mapped[TIMESTAMP] = mapped_column(
+ TIMESTAMP(timezone=True), nullable=True, comment="Confirmation Date"
+ )
+ token: Mapped[str] = mapped_column(
+ String, server_default="", comment="Invitation Token"
+ )
+
+ vicarious_person_id: Mapped[int] = mapped_column(
+ ForeignKey("people.id"), nullable=True, comment="Vicarious Person ID"
+ )
+ vicarious_person_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Vicarious Person UUID"
+ )
+
+ invite_id: Mapped[int] = mapped_column(
+ ForeignKey("build_decision_book_invitations.id"), nullable=False
+ )
+ invite_uu_id: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Invite UUID"
+ )
+
+ build_decision_book_id: Mapped[int] = mapped_column(
+ ForeignKey("build_decision_book.id"), nullable=False
+ )
+ build_decision_book_uu_id: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Decision Book UUID"
+ )
+ build_living_space_id: Mapped[int] = mapped_column(
+ ForeignKey("build_living_space.id"), nullable=False
+ )
+ build_living_space_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Living Space UUID"
+ )
+ person_id: Mapped[int] = mapped_column(ForeignKey("people.id"), nullable=False)
+ # person_uu_id: Mapped[str] = mapped_column(String, nullable=False, comment="Person UUID")
+
+ __table_args__ = (
+ Index(
+ "_build_decision_book_person_ndx_01",
+ build_decision_book_id,
+ invite_id,
+ build_living_space_id,
+ unique=True,
+ ),
+ {"comment": "People that are attended to building meetings."},
+ )
+
+ def retrieve_all_occupant_types(self):
+ all_decision_book_people = self.filter_all(
+ BuildDecisionBookPersonOccupants.invite_id == self.invite_id,
+ system=True,
+ )
+ BuildDecisionBookPersonOccupants.pre_query = all_decision_book_people.query
+ return BuildDecisionBookPersonOccupants.filter_all(system=True).data
+
+ def add_occupant_type(self, occupant_type, build_living_space_id: int = None):
+ from Schemas import (
+ Build,
+ BuildLivingSpace,
+ Services,
+ )
+
+ # from api_events.events.events.events_bind_services import (
+ # ServiceBindOccupantEventMethods,
+ # )
+
+ book_dict = dict(
+ build_decision_book_person_id=self.id,
+ build_decision_book_person_uu_id=str(self.uu_id),
+ invite_id=self.invite_id,
+ invite_uu_id=str(self.invite_uu_id),
+ occupant_type_id=occupant_type.id,
+ occupant_type_uu_id=str(occupant_type.uu_id),
+ )
+ if person_occupants := BuildDecisionBookPersonOccupants.find_or_create(
+ **book_dict
+ ):
+ person_occupants.save_and_confirm()
+
+ decision_book = BuildDecisionBook.filter_one(
+ BuildDecisionBook.id == self.build_decision_book_id,
+ ).data
+ person_occupants.update(
+ expiry_starts=decision_book.expiry_starts,
+ expiry_ends=decision_book.expiry_ends,
+ )
+ if build_living_space_id:
+ related_service = Services.filter_by_one(
+ related_responsibility=str(occupant_type.occupant_code),
+ **Services.valid_record_dict,
+ ).data
+ if not related_service:
+ raise HTTPException(
+ status_code=status.HTTP_404_NOT_FOUND,
+ detail=f"Service is not found for {occupant_type.occupant_code}",
+ )
+
+ decision_build = Build.filter_one(
+ Build.id == decision_book.build_id,
+ ).data
+ management_room = decision_build.management_room
+ if not management_room:
+ raise HTTPException(
+ status_code=status.HTTP_404_NOT_FOUND,
+ detail=f"Management Room is not found in {decision_build.build_name}",
+ )
+
+ living_space = BuildLivingSpace.filter_one(
+ BuildLivingSpace.id == build_living_space_id,
+ ).data
+ expiry_ends = str(
+ system_arrow.get(decision_book.meeting_date).shift(hours=23)
+ )
+ expiry_starts = str(system_arrow.get(decision_book.meeting_date))
+ related_living_space = BuildLivingSpace.find_or_create(
+ build_parts_id=management_room.id,
+ build_parts_uu_id=str(management_room.uu_id),
+ occupant_type=occupant_type.id,
+ occupant_type_uu_id=str(occupant_type.uu_id),
+ person_id=living_space.person_id,
+ person_uu_id=str(living_space.person_uu_id),
+ expiry_starts=expiry_starts,
+ expiry_ends=expiry_ends,
+ )
+ expires_at = str(
+ system_arrow.get(decision_book.meeting_date).shift(days=15)
+ )
+ related_living_space.save_and_confirm()
+ ServiceBindOccupantEventMethods.bind_services_occupant_system(
+ build_living_space_id=related_living_space.id,
+ service_id=related_service.id,
+ expires_at=expires_at,
+ )
+ return person_occupants
+ return
+
+ def get_occupant_types(self):
+ if occupants := BuildDecisionBookPersonOccupants.filter_all(
+ BuildDecisionBookPersonOccupants.build_decision_book_person_id == self.id,
+ ).data:
+ return occupants
+ return
+
+ def check_occupant_type(self, occupant_type):
+ book_person_occupant_type = BuildDecisionBookPersonOccupants.filter_one(
+ BuildDecisionBookPersonOccupants.build_decision_book_person_id == self.id,
+ BuildDecisionBookPersonOccupants.occupant_type_id == occupant_type.id,
+ BuildDecisionBookPersonOccupants.active == True,
+ BuildDecisionBookPersonOccupants.is_confirmed == True,
+ ).data
+ if not book_person_occupant_type:
+ raise HTTPException(
+ status_code=status.HTTP_404_NOT_FOUND,
+ detail=f"Occupant Type : {occupant_type.occupant_code} is not found in "
+ f"Decision Book Person UUID {self.uu_id}",
+ )
+
+
+class BuildDecisionBookPersonOccupants(CrudCollection):
+ """
+ Builds class based on declarative_base and BaseMixin via session
+ """
+
+ __tablename__ = "build_decision_book_person_occupants"
+ __exclude__fields__ = []
+ __language_model__ = BuildDecisionBookPersonOccupantsLanguageModel
+
+ build_decision_book_person_id: Mapped[int] = mapped_column(
+ ForeignKey("build_decision_book_person.id"), nullable=False
+ )
+ build_decision_book_person_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Decision Book Person UUID"
+ )
+ invite_id: Mapped[int] = mapped_column(
+ ForeignKey("build_decision_book_invitations.id"), nullable=True
+ )
+ invite_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Invite UUID"
+ )
+
+ occupant_type_id: Mapped[int] = mapped_column(
+ ForeignKey("occupant_types.id"), nullable=False
+ )
+ occupant_type_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Occupant UUID"
+ )
+
+ __table_args__ = (
+ Index(
+ "_build_decision_book_person_occupants_ndx_01",
+ build_decision_book_person_id,
+ occupant_type_id,
+ unique=True,
+ ),
+ {"comment": "Occupant Types of People that are attended to building meetings."},
+ )
+
+
+class BuildDecisionBookItems(CrudCollection):
+ """
+ Builds class based on declarative_base and BaseMixin via session
+ item_commentary = metine itiraz şerh maddesi için
+ item_order = maddelerin sıralanma numarası
+ item_objection = maddelerin itiraz şerhi Text şeklinde
+ """
+
+ __tablename__ = "build_decision_book_items"
+ __exclude__fields__ = []
+ __language_model__ = BuildDecisionBookItemsLanguageModel
+
+ item_order: Mapped[int] = mapped_column(
+ SmallInteger, nullable=False, comment="Order Number of Item"
+ )
+ item_comment: Mapped[str] = mapped_column(
+ Text, nullable=False, comment="Comment Content"
+ )
+ item_objection: Mapped[str] = mapped_column(
+ Text, nullable=True, comment="Objection Content"
+ )
+ info_is_completed: Mapped[bool] = mapped_column(
+ Boolean, server_default="0", comment="Info process is Completed"
+ )
+ is_payment_created: Mapped[bool] = mapped_column(
+ Boolean, server_default="0", comment="Are payment Records Created"
+ )
+
+ info_type_id: Mapped[int] = mapped_column(
+ ForeignKey("api_enum_dropdown.id"), nullable=True
+ )
+ info_type_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Info Type UUID"
+ )
+
+ build_decision_book_id: Mapped[int] = mapped_column(
+ ForeignKey("build_decision_book.id"), nullable=False
+ )
+ build_decision_book_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Decision Book UUID"
+ )
+ item_short_comment: Mapped[str] = mapped_column(
+ String(24),
+ nullable=True,
+ comment="This field is reserved for use in grouping data or in the pivot heading.",
+ )
+
+ decision_books: Mapped["BuildDecisionBook"] = relationship(
+ "BuildDecisionBook",
+ back_populates="decision_book_items",
+ foreign_keys=[build_decision_book_id],
+ )
+ decision_book_project: Mapped["BuildDecisionBookProjects"] = relationship(
+ "BuildDecisionBookProjects",
+ back_populates="build_decision_book_item",
+ foreign_keys="BuildDecisionBookProjects.build_decision_book_item_id",
+ )
+
+ __table_args__ = (
+ Index("_build_decision_book_item_ndx_01", build_decision_book_id),
+ Index(
+ "_build_decision_book_item_ndx_02",
+ build_decision_book_id,
+ item_order,
+ unique=True,
+ ),
+ {
+ "comment": "Decision Book Items that are related to decision taken at building meetings"
+ },
+ )
+
+ @classmethod
+ def select_action(cls, duty_id, token=None):
+ from Schemas import (
+ Build,
+ Companies,
+ )
+
+ related_companies = Companies.select_action(duty_id_list=[duty_id])
+ related_companies_ids = list(
+ related_.id for related_ in related_companies.all()
+ )
+ related_building = Build.query.filter(
+ Build.company_id.in_(related_companies_ids)
+ )
+ related_building_ids = list(related_.id for related_ in related_building.all())
+ related_decision_books = BuildDecisionBook.query.filter(
+ BuildDecisionBook.build_id.in_(related_building_ids)
+ )
+ related_decision_books_ids = list(
+ related_.id for related_ in related_decision_books.all()
+ )
+ return cls.query.filter(
+ cls.build_decision_book_id.in_(related_decision_books_ids)
+ )
+
+ @classmethod
+ def create_action(cls, data: InsertBuildDecisionBookItems, token):
+ data_dict = data.dump()
+ BuildDecisionBook.pre_query = BuildDecisionBook.select_action(
+ duty_id=token.duty_list["duty_id"]
+ )
+ cls.pre_query = cls.select_action(duty_id=token.duty_list["duty_id"])
+ if decision_book := BuildDecisionBook.filter_one(
+ BuildDecisionBook.uu_id == data.build_decision_book_uu_id
+ ).data:
+ found_dict = dict(
+ item_order=data.item_order, build_decision_book_id=decision_book.id
+ )
+ if decision_book_is_already := cls.find_one(**found_dict):
+ decision_book_is_already.is_found = True
+ return decision_book_is_already.get_dict()
+ data_dict["build_decision_book_id"] = decision_book.id
+ data_dict["is_confirmed"] = True
+ del data_dict["build_decision_book_uu_id"]
+ return BuildDecisionBookItems.find_or_create(**data_dict)
+
+ @classmethod
+ def check_meeting_is_valid_to_start_add_attendance(cls, decision_book, token_dict):
+ from Schemas import (
+ People,
+ OccupantTypes,
+ )
+
+ active_invite = (
+ BuildDecisionBookInvitations.check_invites_are_ready_for_meeting(
+ selected_decision_book=decision_book,
+ token_dict=token_dict,
+ )
+ )
+ occupant_type_required_list = ("MT-PRS", "MT-WRT", "BU-MNG", "BU-SPV")
+ occupant_type_list = OccupantTypes.filter_all(
+ OccupantTypes.occupant_code.in_(occupant_type_required_list),
+ system=True,
+ ).data
+ # active_invite = invitations[1] if invitations[1] else invitations[0]
+ invitation = BuildDecisionBookInvitations.filter_one(
+ BuildDecisionBookInvitations.id == active_invite.id
+ ).data
+ people_book_attend_count = None
+ if int(invitation.invitation_attempt) == 1:
+ people_book_attend_is_attending = BuildDecisionBookPerson.filter_all(
+ BuildDecisionBookPerson.invite_id == invitation.id,
+ BuildDecisionBookPerson.is_attending == True,
+ )
+ people_book_attend = BuildDecisionBookPersonOccupants.filter_all(
+ BuildDecisionBookPersonOccupants.build_decision_book_person_id.in_(
+ [person.id for person in people_book_attend_is_attending.data]
+ ),
+ BuildDecisionBookPersonOccupants.occupant_type_id.in_(
+ [occupant_type.id for occupant_type in occupant_type_list]
+ ),
+ )
+ people_book_attend_count = people_book_attend.count
+ if not people_book_attend_count == len(occupant_type_required_list) - 1:
+ error_detail = " - ".join(occupant_type_required_list)
+ raise HTTPException(
+ status_code=status.HTTP_400_BAD_REQUEST,
+ detail=f"{error_detail} occupant types must be attend to meeting. "
+ f"Check attendants and try again",
+ )
+
+ comment = (
+ lambda id_, occ_type, full_name: f"{full_name} is nomindated for {occ_type} at Meeting Invite Code : {id_}"
+ )
+ book_items_dict = dict(
+ build_decision_book_id=decision_book.id,
+ build_decision_book_uu_id=str(decision_book.uu_id),
+ is_confirmed=True,
+ active=True,
+ is_payment_created=True,
+ )
+ occupant_type_pre = OccupantTypes.filter_by_one(
+ system=True, occupant_code="MT-PRS", occupant_category_type="MT"
+ ).data
+ occupant_type_wrt = OccupantTypes.filter_by_one(
+ system=True, occupant_code="MT-WRT", occupant_category_type="MT"
+ ).data
+ occupant_type_mng = OccupantTypes.filter_by_one(
+ system=True, occupant_code="BU-MNG", occupant_category_type="BU"
+ ).data
+
+ person_occupants_pre = BuildDecisionBookPersonOccupants.filter_one(
+ BuildDecisionBookPersonOccupants.invite_id == invitation.id,
+ BuildDecisionBookPersonOccupants.occupant_type_id == occupant_type_pre.id,
+ ).data
+ person_invite_pret = BuildDecisionBookPerson.filter_one(
+ BuildDecisionBookPerson.id
+ == person_occupants_pre.build_decision_book_person_id
+ ).data
+ person = People.filter_one(People.id == person_invite_pret.person_id).data
+ created_attendance = BuildDecisionBookItems.find_or_create(
+ **book_items_dict,
+ item_order=1,
+ item_comment=comment(
+ id_=person_invite_pret.invite_uu_id,
+ occ_type=occupant_type_pre.occupant_type,
+ full_name=person.full_name,
+ ),
+ )
+ created_attendance.save_and_confirm()
+
+ person_occupants_wrt = BuildDecisionBookPersonOccupants.filter_one(
+ BuildDecisionBookPersonOccupants.invite_id == invitation.id,
+ BuildDecisionBookPersonOccupants.occupant_type_id == occupant_type_wrt.id,
+ ).data
+ person_invite_wrt = BuildDecisionBookPerson.filter_one(
+ BuildDecisionBookPerson.id
+ == person_occupants_wrt.build_decision_book_person_id
+ ).data
+ person = People.filter_one(People.id == person_invite_pret.person_id).data
+ created_attendance = BuildDecisionBookItems.find_or_create(
+ **book_items_dict,
+ item_order=2,
+ item_comment=comment(
+ id_=person_invite_wrt.invite_uu_id,
+ occ_type=occupant_type_wrt.occupant_type,
+ full_name=person.full_name,
+ ),
+ )
+ created_attendance.save_and_confirm()
+
+ person_occupants_mng = BuildDecisionBookPersonOccupants.filter_one(
+ BuildDecisionBookPersonOccupants.invite_id == invitation.id,
+ BuildDecisionBookPersonOccupants.occupant_type_id == occupant_type_mng.id,
+ ).data
+ person_invite_mng = BuildDecisionBookPerson.filter_one(
+ BuildDecisionBookPerson.id
+ == person_occupants_mng.build_decision_book_person_id
+ ).data
+ person = People.filter_one(People.id == person_invite_pret.person_id).data
+ created_attendance = BuildDecisionBookItems.find_or_create(
+ **book_items_dict,
+ item_order=3,
+ item_comment=comment(
+ id_=person_invite_mng.invite_uu_id,
+ occ_type=occupant_type_mng.occupant_type,
+ full_name=person.full_name,
+ ),
+ )
+ created_attendance.save_and_confirm()
+ return people_book_attend_count
+
+
+class BuildDecisionBookItemsUnapproved(CrudCollection):
+ """
+ Builds class based on declarative_base and BaseMixin via session unapproved personnel
+ """
+
+ __tablename__ = "build_decision_book_items_unapproved"
+ __exclude__fields__ = []
+ __language_model__ = BuildDecisionBookItemsUnapprovedLanguageModel
+
+ item_objection: Mapped[str] = mapped_column(
+ Text, nullable=False, comment="Objection Content"
+ )
+ item_order: Mapped[int] = mapped_column(
+ SmallInteger, nullable=False, comment="Order Number"
+ )
+
+ decision_book_item_id: Mapped[int] = mapped_column(
+ ForeignKey("build_decision_book_items.id"), nullable=False
+ )
+ decision_book_item_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Decision Book Item"
+ )
+ person_id: Mapped[int] = mapped_column(ForeignKey("people.id"), nullable=False)
+ person_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Person UUID"
+ )
+ build_decision_book_item: Mapped[int] = mapped_column(
+ ForeignKey("build_decision_book_items.id"), nullable=False
+ )
+ build_decision_book_item_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Decision Book Item UUID"
+ )
+
+ __table_args__ = (
+ Index("_build_decision_book_item_unapproved_ndx_01", build_decision_book_item),
+ {
+ "comment": "People that are unapproved partially or completely in decision book items"
+ },
+ )
+
+
+class BuildDecisionBookPayments(CrudCollection):
+ """
+ Builds class based on declarative_base and BaseMixin via session
+ period_time = to_char(NEW.process_date, 'YYYY-MM');
+ """
+
+ __tablename__ = "build_decision_book_payments"
+ __exclude__fields__ = []
+ __enum_list__ = [("receive_debit", "DebitTypes", "D")]
+ __language_model__ = BuildDecisionBookPaymentsLanguageModel
+
+ payment_plan_time_periods: Mapped[str] = mapped_column(
+ String(10), nullable=False, comment="Payment Plan Time Periods"
+ )
+ process_date: Mapped[TIMESTAMP] = mapped_column(
+ TIMESTAMP(timezone=True), nullable=False, comment="Payment Due Date"
+ )
+ payment_amount: Mapped[float] = mapped_column(
+ Numeric(16, 2), nullable=False, comment="Payment Amount"
+ )
+ currency: Mapped[str] = mapped_column(String(8), server_default="TRY")
+
+ payment_types_id: Mapped[int] = mapped_column(
+ ForeignKey("api_enum_dropdown.id"), nullable=True
+ )
+ payment_types_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Dues Type UUID"
+ )
+
+ period_time: Mapped[str] = mapped_column(String(12))
+ process_date_y: Mapped[int] = mapped_column(SmallInteger)
+ process_date_m: Mapped[int] = mapped_column(SmallInteger)
+
+ build_decision_book_item_id: Mapped[int] = mapped_column(
+ ForeignKey("build_decision_book_items.id"),
+ nullable=False,
+ comment="Build Decision Book Item ID",
+ )
+ build_decision_book_item_uu_id: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Decision Book Item UUID"
+ )
+ # build_decision_book_id: Mapped[int] = mapped_column(
+ # ForeignKey("build_decision_book.id"), nullable=True
+ # )
+ # build_decision_book_uu_id: Mapped[str] = mapped_column(
+ # String, nullable=True, comment="Decision Book UUID"
+ # )
+ build_parts_id: Mapped[int] = mapped_column(
+ ForeignKey("build_parts.id"), nullable=False
+ )
+ build_parts_uu_id: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Build Part UUID"
+ )
+ decision_book_project_id: Mapped[int] = mapped_column(
+ ForeignKey("build_decision_book_projects.id"),
+ nullable=True,
+ comment="Decision Book Project ID",
+ )
+ decision_book_project_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Decision Book Project UUID"
+ )
+ account_records_id: Mapped[int] = mapped_column(
+ ForeignKey("account_records.id"), nullable=True
+ )
+ account_records_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Account Record UU ID"
+ )
+
+ # budget_records_id: Mapped[int] = mapped_column(ForeignKey("account_records.id"), nullable=True)
+ # budget_records_uu_id: Mapped[str] = mapped_column(
+ # String, nullable=True, comment="Budget UUID"
+ # )
+ # accounting_id: Mapped[int] = mapped_column(ForeignKey("account_detail.id"), nullable=True)
+ # accounting_uu_id: Mapped[str] = mapped_column(
+ # String, nullable=True, comment="Accounting UUID"
+ # )
+ # receive_debit_id: Mapped[int] = mapped_column(ForeignKey("api_enum_dropdown.id"), nullable=True)
+ # receive_debit_uu_id: Mapped[str] = mapped_column(String, nullable=True, comment="Debit UUID")
+
+ # accounting: Mapped["AccountDetail"] = relationship(
+ # "AccountDetail",
+ # back_populates="decision_book_payment_detail",
+ # foreign_keys=[accounting_id],
+ # )
+ #
+ # decision_book_master: Mapped["BuildDecisionBookPaymentsMaster"] = relationship(
+ # "BuildDecisionBookPaymentsMaster",
+ # back_populates="decision_book_payment_detail",
+ # foreign_keys=[build_decision_book_payments_master_id],
+ # )
+ # budget_records: Mapped["CompanyBudgetRecords"] = relationship(
+ # "CompanyBudgetRecords",
+ # back_populates="decision_book_payment_detail",
+ # foreign_keys=[budget_records_id],
+ # )
+
+ __table_args__ = (
+ Index(
+ "build_decision_book_payments_detail_ndx_00",
+ build_decision_book_item_id,
+ build_parts_id,
+ payment_plan_time_periods,
+ process_date,
+ payment_types_id,
+ account_records_id,
+ unique=True,
+ ),
+ Index("build_decision_book_payments_detail_ndx_01", account_records_id),
+ {"comment": "Payment Details of Decision Book Payments"},
+ )
+
+
+class BuildDecisionBookLegal(CrudCollection):
+ """
+ Builds class based on declarative_base and BaseMixin via session
+ lawsuits_type C:Court Mehkeme M: mediator arabulucu
+ """
+
+ __tablename__ = "build_decision_book_legal"
+ __exclude__fields__ = []
+ __language_model__ = BuildDecisionBookLegalLanguageModel
+
+ period_start_date: Mapped[TIMESTAMP] = mapped_column(
+ TIMESTAMP(timezone=True), nullable=False, comment="Start Date of Legal Period"
+ )
+ lawsuits_decision_number: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Lawsuits Decision Number"
+ )
+ lawsuits_decision_date: Mapped[TIMESTAMP] = mapped_column(
+ TIMESTAMP(timezone=True), nullable=False, comment="Lawsuits Decision Date"
+ )
+
+ period_stop_date: Mapped[TIMESTAMP] = mapped_column(
+ TIMESTAMP(timezone=True), server_default="2099-12-31 23:59:59"
+ )
+ decision_book_pdf_path: Mapped[str] = mapped_column(
+ String, server_default="", nullable=True
+ )
+ resp_company_total_wage: Mapped[float] = mapped_column(
+ Numeric(10, 2), server_default="0", nullable=True
+ )
+ contact_agreement_path: Mapped[str] = mapped_column(
+ String, server_default="", nullable=True
+ )
+ contact_agreement_date: Mapped[TIMESTAMP] = mapped_column(
+ TIMESTAMP(timezone=True), server_default="1900-01-01 00:00:00", nullable=True
+ )
+ meeting_date: Mapped[TIMESTAMP] = mapped_column(
+ TIMESTAMP(timezone=True), server_default="1900-01-01 00:00:00"
+ )
+ lawsuits_type: Mapped[str] = mapped_column(String(1), server_default="C")
+ lawsuits_name: Mapped[str] = mapped_column(String(128))
+ lawsuits_note: Mapped[str] = mapped_column(String(512))
+ lawyer_cost: Mapped[float] = mapped_column(Numeric(20, 2))
+ mediator_lawyer_cost: Mapped[float] = mapped_column(Numeric(20, 2))
+ other_cost: Mapped[float] = mapped_column(Numeric(20, 2))
+ legal_cost: Mapped[float] = mapped_column(Numeric(20, 2))
+ approved_cost: Mapped[float] = mapped_column(Numeric(20, 2))
+ total_price: Mapped[float] = mapped_column(Numeric(20, 2))
+
+ build_db_item_id: Mapped[int] = mapped_column(
+ ForeignKey("build_decision_book_items.id"), nullable=False
+ )
+ build_db_item_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Decision Book Item UUID"
+ )
+ resp_attorney_id: Mapped[int] = mapped_column(
+ ForeignKey("people.id"), nullable=False
+ )
+ resp_attorney_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Attorney UUID"
+ )
+ resp_attorney_company_id: Mapped[int] = mapped_column(ForeignKey("companies.id"))
+ resp_attorney_company_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Company UUID"
+ )
+ mediator_lawyer_person_id: Mapped[int] = mapped_column(ForeignKey("people.id"))
+ mediator_lawyer_person_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Mediator Lawyer UUID"
+ )
+
+ __table_args__ = (
+ Index("_build_decision_book_legal_ndx_00", meeting_date),
+ {
+ "comment": "Legal items related to decision book items recoreded at building meetings"
+ },
+ )
+
+
+class BuildDecisionBookProjects(CrudCollection):
+ """
+ Builds class based on declarative_base and BaseMixin via session
+ project_type = C:Court Mehkeme M: mediator arabulucu
+ """
+
+ __tablename__ = "build_decision_book_projects"
+ __exclude__fields__ = []
+ __language_model__ = BuildDecisionBookProjectsLanguageModel
+
+ project_no: Mapped[str] = mapped_column(
+ String(12), nullable=True, comment="Project Number of Decision Book"
+ )
+ project_name: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Project Name"
+ )
+ project_start_date: Mapped[TIMESTAMP] = mapped_column(
+ TIMESTAMP(timezone=True), nullable=False, comment="Project Start Date"
+ )
+ project_stop_date: Mapped[TIMESTAMP] = mapped_column(
+ TIMESTAMP(timezone=True), server_default="2099-12-31 23:59:59"
+ )
+ project_type: Mapped[str] = mapped_column(String, server_default="C")
+ project_note: Mapped[str] = mapped_column(Text)
+
+ decision_book_pdf_path: Mapped[str] = mapped_column(
+ String, server_default="", nullable=True
+ )
+ is_completed: Mapped[bool] = mapped_column(
+ Boolean, server_default="0", comment="Project is Completed"
+ )
+ status_code: Mapped[int] = mapped_column(SmallInteger, nullable=True)
+ resp_company_fix_wage: Mapped[float] = mapped_column(
+ Numeric(10, 2), server_default="0"
+ )
+ is_out_sourced: Mapped[bool] = mapped_column(Boolean, server_default="0")
+
+ meeting_date: Mapped[TIMESTAMP] = mapped_column(
+ TIMESTAMP(timezone=True), server_default="1900-01-01 00:00:00", index=True
+ )
+ currency: Mapped[str] = mapped_column(String(8), server_default="TRY")
+ bid_price: Mapped[float] = mapped_column(Numeric(16, 4), server_default="0")
+ approved_price: Mapped[float] = mapped_column(Numeric(16, 4), server_default="0")
+ final_price: Mapped[float] = mapped_column(Numeric(16, 4), server_default="0")
+
+ contact_id: Mapped[int] = mapped_column(
+ ForeignKey("contracts.id"), nullable=True, comment="Contract id"
+ )
+ contact_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Contract UUID"
+ )
+ build_decision_book_id: Mapped[int] = mapped_column(
+ ForeignKey("build_decision_book.id"), nullable=False
+ )
+ build_decision_book_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Decision Book UUID"
+ )
+ build_decision_book_item_id: Mapped[int] = mapped_column(
+ ForeignKey("build_decision_book_items.id"), nullable=False
+ )
+ build_decision_book_item_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Decision Book Item UUID"
+ )
+ project_response_living_space_id: Mapped[int] = mapped_column(
+ ForeignKey("build_living_space.id"),
+ nullable=True,
+ comment="Project Response Person ID",
+ )
+ project_response_living_space_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Project Response Person UUID"
+ )
+ resp_company_id: Mapped[int] = mapped_column(
+ ForeignKey("companies.id"), nullable=True
+ )
+ resp_company_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Company UUID"
+ )
+
+ build_decision_book_item: Mapped["BuildDecisionBookItems"] = relationship(
+ "BuildDecisionBookItems",
+ back_populates="decision_book_project",
+ foreign_keys=[build_decision_book_item_id],
+ )
+
+ @classmethod
+ def select_action(cls, duty_id, token=None):
+ from Schemas import (
+ Build,
+ Companies,
+ )
+
+ related_companies = Companies.select_action(duty_id_list=[duty_id])
+ related_companies_ids = list(
+ related_.id for related_ in related_companies.all()
+ )
+ related_building = Build.filter_all(Build.company_id.in_(related_companies_ids))
+ related_building_ids = list(related_.id for related_ in related_building.data)
+ related_decision_books = BuildDecisionBook.filter_all(
+ BuildDecisionBook.build_id.in_(related_building_ids),
+ ).data
+ related_decision_books_ids = list(
+ related_.id for related_ in related_decision_books
+ )
+ related_decision_books_items = BuildDecisionBookItems.filter_all(
+ BuildDecisionBookItems.build_decision_book_id.in_(
+ related_decision_books_ids
+ ),
+ ).data
+ related_decision_books_items_ids = list(
+ related_.id for related_ in related_decision_books_items
+ )
+ return cls.filter_all(
+ cls.build_decision_book_item_id.in_(related_decision_books_items_ids),
+ ).query
+
+ @classmethod
+ def create_action(cls, data: InsertBuildDecisionBookProjects, token=None):
+ from Schemas import (
+ People,
+ Companies,
+ )
+
+ data_dict = data.dump()
+ BuildDecisionBookItems.pre_query = BuildDecisionBookItems.select_action(
+ duty_id=token.duty_list["duty_id"]
+ )
+ People.pre_query = People.select_action(
+ duty_id_list=[token.duty_list["duty_id"]]
+ )
+ decision_book_project_item = BuildDecisionBookItems.find_one_or_abort(
+ uu_id=data_dict.get("build_decision_book_item_uu_id")
+ )
+ project_response_person = People.find_one_or_abort(
+ uu_id=data_dict.get("project_response_person_uu_id")
+ )
+ data_dict["build_decision_book_item_id"] = decision_book_project_item.id
+ data_dict["project_response_person_id"] = project_response_person.id
+ if data.resp_company_uu_id:
+ resp_company = Companies.find_one(uu_id=data.resp_company_uu_id)
+ data_dict["resp_company_id"] = resp_company.id
+ del (
+ data_dict["build_decision_book_item_uu_id"],
+ data_dict["project_response_person_uu_id"],
+ )
+ del data_dict["resp_company_uu_id"]
+ data_dict["is_confirmed"] = True
+ return cls.find_or_create(**data_dict)
+
+ __table_args__ = (
+ Index(
+ "_build_decision_book_project_ndx_00",
+ project_no,
+ project_start_date,
+ unique=True,
+ ),
+ {
+ "comment": "Project related to decision taken at building meetings on book items"
+ },
+ )
+
+ @property
+ def get_project_year(self):
+ return self.decision_book_items.decision_books.period_start_date.year
+
+ @property
+ def get_project_no(self):
+ return f"{self.get_project_year}-{str(self.id)[-4:].zfill(4)}"
+
+
+class BuildDecisionBookProjectPerson(CrudCollection):
+ """
+ Builds class based on declarative_base and BaseMixin via session
+ """
+
+ __tablename__ = "build_decision_book_project_person"
+ __exclude__fields__ = []
+ __language_model__ = BuildDecisionBookProjectPersonLanguageModel
+ # __enum_list__ = [("management_typecode", "ProjectTeamTypes", "PTT-EMP")]
+
+ dues_percent_discount: Mapped[int] = mapped_column(SmallInteger, server_default="0")
+ job_fix_wage: Mapped[float] = mapped_column(Numeric(10, 2), server_default="0")
+ bid_price: Mapped[float] = mapped_column(Numeric(10, 2), server_default="0")
+ decision_price: Mapped[float] = mapped_column(Numeric(10, 2), server_default="0")
+
+ build_decision_book_project_id: Mapped[int] = mapped_column(
+ ForeignKey("build_decision_book_projects.id"), nullable=False
+ )
+ build_decision_book_project_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Decision Book Project UUID"
+ )
+ living_space_id: Mapped[int] = mapped_column(
+ ForeignKey("build_living_space.id"), nullable=False
+ )
+ living_space_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Living Space UUID"
+ )
+
+ __table_args__ = (
+ {"comment": "People that are attended to building project meetings."},
+ )
+
+
+class BuildDecisionBookProjectItems(CrudCollection):
+ """
+ Builds class based on declarative_base and BaseMixin via session
+ """
+
+ __tablename__ = "build_decision_book_project_items"
+ __exclude__fields__ = []
+ __language_model__ = BuildDecisionBookProjectItemsLanguageModel
+
+ item_header: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Item Header"
+ )
+ item_comment: Mapped[str] = mapped_column(
+ Text, nullable=False, comment="Item Comment"
+ )
+ attachment_pdf_path: Mapped[str] = mapped_column(
+ String, server_default="", nullable=True, comment="Attachment PDF Path"
+ )
+ item_estimated_cost: Mapped[float] = mapped_column(
+ Numeric(16, 2), server_default="0", comment="Estimated Cost"
+ )
+ item_short_comment: Mapped[str] = mapped_column(
+ String(24),
+ nullable=True,
+ comment="This field is reserved for use in grouping data or in the pivot heading.",
+ )
+
+ build_decision_book_project_id: Mapped[int] = mapped_column(
+ ForeignKey("build_decision_book_projects.id"), nullable=False
+ )
+ build_decision_book_project_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Decision Book Project UUID"
+ )
+
+ __table_args__ = (
+ {"comment": "Project Items related to decision taken at building meetings"},
+ )
+
+
+#
+# class BuildDecisionBookPaymentsMaster(CrudCollection):
+# """
+# Builds class based on declarative_base and BaseMixin via session
+# """
+#
+# __tablename__ = "build_decision_book_payments_master"
+# __exclude__fields__ = []
+# __enum_list__ = [("dues_types", "BuildDuesTypes", "D")]
+#
+# payment_plan_time_periods = mapped_column(
+# String(8), nullable=False, comment="Payment Plan Time Periods"
+# )
+# default_payment_amount = mapped_column(
+# Numeric(20, 2), nullable=False, comment="Default Payment Amount"
+# )
+#
+# dues_types_id: Mapped[int] = mapped_column(ForeignKey("api_enum_dropdown.id"), nullable=True)
+# dues_types_uu_id = mapped_column(String, nullable=True, comment="Dues Type UUID")
+# build_decision_book_item_debits_id = mapped_column(
+# ForeignKey("build_decision_book_item_debits.id"), nullable=False
+# )
+# build_decision_book_item_debits_uu_id = mapped_column(
+# String, nullable=True, comment="Decision Book Item Debit UUID"
+# )
+# build_parts_id: Mapped[int] = mapped_column(ForeignKey("build_parts.id"), nullable=False)
+# build_parts_uu_id = mapped_column(String, nullable=True, comment="Build Part UUID")
+#
+# # decision_books_item_debits: Mapped["BuildDecisionBookItemDebits"] = relationship(
+# # "BuildDecisionBookItemDebits",
+# # back_populates="decision_book_payment_masters",
+# # foreign_keys=[build_decision_book_item_debits_id],
+# # )
+# # parts: Mapped["BuildParts"] = relationship(
+# # "BuildParts",
+# # back_populates="decision_book_payment_master",
+# # foreign_keys=[build_parts_id],
+# # )
+# # decision_book_payment_detail: Mapped[List["BuildDecisionBookPaymentsDetail"]] = (
+# # relationship(
+# # "BuildDecisionBookPaymentsDetail",
+# # back_populates="decision_book_master",
+# # foreign_keys="BuildDecisionBookPaymentsDetail.build_decision_book_payments_master_id",
+# # )
+# # )
+#
+# __table_args__ = (
+# Index(
+# "_build_decision_book_payments_master_ndx_00",
+# build_decision_book_item_debits_id,
+# build_parts_id,
+# dues_types_id,
+# unique=True,
+# ),
+# {
+# "comment": "Master Payment Items related to decision taken at building meetings"
+# },
+# )
+#
+# # @classmethod
+# # def pay_dues_of_build_part(
+# # cls,
+# # budget_records_id,
+# # build_decision_book_id,
+# # build_parts_id,
+# # start_date,
+# # paid_value,
+# # is_all=False,
+# # is_limited=False,
+# # ):
+# #
+# # book_payment_master = cls.find_one(
+# # build_decision_book_id=build_decision_book_id,
+# # build_parts_id=build_parts_id,
+# # dues_types=BuildDuesTypes.D.name,
+# # )
+# # paid_amount = 0
+# # if book_payment_master:
+# # month_start_date = (
+# # find_first_day_of_month(start_date)
+# # if not is_all
+# # else datetime(1900, 1, 1)
+# # )
+# # last_date = (
+# # find_last_day_of_month(start_date) if not is_limited else start_date
+# # )
+# # payment_dues, count = BuildDecisionBookPaymentsDetail.filter(
+# # and_(
+# # BuildDecisionBookPaymentsDetail.build_decision_book_payments_master_id
+# # == book_payment_master.id,
+# # BuildDecisionBookPaymentsDetail.process_date >= month_start_date,
+# # BuildDecisionBookPaymentsDetail.process_date <= last_date,
+# # )
+# # )
+# # period_amount = {}
+# # for payment_due in payment_dues:
+# # if payment_due.period_time not in period_amount:
+# # period_amount[payment_due.period_time] = 0
+# # period_amount[payment_due.period_time] += float(
+# # payment_due.payment_amount
+# # )
+# # paid_amount += payment_due.payment_amount
+# # print(
+# # "period_amount",
+# # period_amount,
+# # "paid_amount",
+# # paid_amount,
+# # "paid_value",
+# # paid_value,
+# # )
+# # if paid_amount > 0:
+# # return float(paid_value)
+# # period_amounts = sorted(
+# # period_amount.items(), key=lambda x: x[0], reverse=False
+# # )
+# # for period_amount in period_amounts:
+# # if period_amount[1] >= 0:
+# # continue
+# # if not paid_value > 0:
+# # break
+# # if budget_record := CompanyBudgetRecords.find_one(id=budget_records_id):
+# # debit_to_pay = abs(float(period_amount[1]))
+# # debit_to_pay = (
+# # paid_value if debit_to_pay > paid_value else debit_to_pay
+# # )
+# # budget_record.remainder_balance = float(debit_to_pay) + float(
+# # budget_record.remainder_balance
+# # )
+# # budget_record.save()
+# # BuildDecisionBookPaymentsDetail.find_or_create(
+# # build_decision_book_payments_master_id=book_payment_master.id,
+# # budget_records_id=budget_records_id,
+# # process_date=str(start_date),
+# # receive_debit=DebitTypes.R.name,
+# # period_time=str(period_amount[0]),
+# # process_date_y=str(period_amount[0]).split("-")[0],
+# # process_date_m=str(period_amount[0]).split("-")[1],
+# # payment_amount=abs(debit_to_pay),
+# # )
+# # paid_value = float(paid_value) - float(debit_to_pay)
+# # return float(paid_value)
+# #
+
+#
+# class BuildDecisionBookItemDebits(CrudCollection):
+# """
+# Builds class based on declarative_base and BaseMixin via session
+# dues_values = due_key, due_value
+# """
+#
+# __tablename__ = "build_decision_book_item_debits"
+# __exclude__fields__ = []
+# __enum_list__ = [("dues_types", "BuildDuesTypes", "D")]
+#
+# dues_types_id: Mapped[int] = mapped_column(ForeignKey("api_enum_dropdown.id"), nullable=True)
+# dues_types_uu_id = mapped_column(String, nullable=True, comment="Dues Type UUID")
+# # dues_values = mapped_column(
+# # MutableDict.as_mutable(JSONB()),
+# # nullable=False,
+# # comment="Due Part Key Description of inner parts",
+# # )
+# flat_type = mapped_column(
+# String, nullable=True, comment="Flat Type of Building Part"
+# )
+# flat_payment = mapped_column(
+# Numeric(20, 2), nullable=True, comment="Flat Payment Amount"
+# )
+# decision_taken: Mapped[bool] = mapped_column(Boolean, server_default="0")
+#
+# build_decision_book_item_id = mapped_column(
+# ForeignKey("build_decision_book_items.id"), nullable=False
+# )
+# build_decision_book_item_uu_id = mapped_column(
+# String, nullable=True, comment="Decision Book Item UUID"
+# )
+#
+# @classmethod
+# def select_action(cls, duty_id, token=None):
+# from database_sql_models import Companies
+#
+# related_companies = Companies.select_action(duty_id=duty_id)
+# related_companies_ids = list(
+# related_.id for related_ in related_companies.all()
+# )
+# related_building = Build.query.filter(
+# Build.company_id.in_(related_companies_ids)
+# )
+# related_building_ids = list(related_.id for related_ in related_building.all())
+# related_decision_books = BuildDecisionBook.query.filter(
+# BuildDecisionBook.build_id.in_(related_building_ids)
+# )
+# related_decision_books_ids = list(
+# related_.id for related_ in related_decision_books.all()
+# )
+# related_decision_books_items = BuildDecisionBookItems.query.filter(
+# BuildDecisionBookItems.build_decision_book_id.in_(
+# related_decision_books_ids
+# )
+# )
+# related_decision_books_items_ids = list(
+# related_.id for related_ in related_decision_books_items.all()
+# )
+# return cls.query.filter(
+# cls.build_decision_book_item_id.in_(related_decision_books_items_ids)
+# )
+#
+# @classmethod
+# def create_action(cls, data: InsertBuildDecisionBookItemDebits, token):
+# from database_sql_models import ApiEnumDropdown
+# from application.shared_functions import find_last_day_of_month
+#
+# data_dict = data.dump()
+# BuildDecisionBookItems.pre_query = BuildDecisionBookItems.select_action(
+# duty_id=token.duty_list["duty_id"]
+# )
+# cls.pre_query = cls.select_action(duty_id=token.duty_list["duty_id"])
+# if decision_book_item := BuildDecisionBookItems.find_one_or_abort(
+# uu_id=data.build_decision_book_item_uu_id
+# ):
+# data_dict["build_decision_book_item_id"] = decision_book_item.id
+# dues_values, payment_master_list = data_dict["dues_values"], []
+# data_dict["is_confirmed"] = True
+# del data_dict["build_decision_book_item_uu_id"]
+# item_debits = cls.find_or_create(**data_dict)
+# debit_dropdown = ApiEnumDropdown.find_one(
+# enum_class="DebitTypes", value="Debit"
+# )
+# for dues_key, dues_value in dues_values.items():
+# building_parts = decision_book_item.decision_books.buildings.parts
+# decision_book = decision_book_item.decision_books
+# for building_part in building_parts:
+# detail_list = []
+# if str(building_part.due_part_key) == str(dues_key):
+# book_master = BuildDecisionBookPaymentsMaster.create(
+# build_decision_book_item_debits_id=item_debits.id,
+# build_parts_id=building_part.id,
+# dues_types=debit_dropdown.uu_id,
+# payment_plan_time_periods="M",
+# default_payment_amount=dues_value,
+# is_confirmed=True,
+# )
+# if book_master:
+# start_date = decision_book.expiry_starts
+# while start_date <= decision_book.expiry_ends:
+# start_date = find_last_day_of_month(start_date)
+# data_detail = BuildDecisionBookPaymentsDetail.find_or_create(
+# build_decision_book_payments_master_id=book_master.id,
+# budget_records_id=None,
+# process_date=start_date,
+# receive_debit=debit_dropdown.uu_id,
+# period_time=start_date.strftime("%Y-%m"),
+# process_date_y=start_date.year,
+# process_date_m=start_date.month,
+# accounting_id=None,
+# payment_amount=float(dues_value) * -1,
+# is_confirmed=True,
+# )
+# start_date = start_date + timedelta(days=2)
+# detail_list.append(data_detail.get_dict())
+# payment_master_list.append(
+# {**book_master.get_dict(), "detail_list": detail_list}
+# )
+# return_dict = {
+# **item_debits.get_dict(),
+# "debit_lists": payment_master_list,
+# }
+# return return_dict
+#
+# __table_args__ = (
+# {
+# "comment": "Debits of Decision Book Items that are related to decision taken at building meetings"
+# },
+# )
+
+
+#
+# class BuildDecisionBookBudget(CrudCollection):
+# """
+# Builds class based on declarative_base and BaseMixin via session
+# """
+#
+# __tablename__ = "build_decision_book_budget"
+#
+# item_order = mapped_column(SmallInteger, nullable=False, comment="Order Number")
+# budget_type = mapped_column(String, nullable=False, comment="Budget Type")
+# plan_value: Mapped[float] = mapped_column(Numeric(10, 2), nullable=False, comment="Plan Value")
+#
+# line_comment = mapped_column(String(32), server_default="")
+# process_date_y: Mapped[int] = mapped_column(SmallInteger)
+# process_date_m: Mapped[int] = mapped_column(SmallInteger)
+# process_date_w: Mapped[int] = mapped_column(SmallInteger)
+# period_time = mapped_column(String(12), server_default="")
+#
+# build_decision_book_id: Mapped[int] = mapped_column(ForeignKey("build_decision_book.id"))
+# accounting_id = mapped_column(ForeignKey("account_detail.id"))
+#
+# __table_args__ = (
+# Index("_build_decision_book_budget_ndx_01", accounting_id),
+# {"comment": "Budget Items related to decision taken at building meetings"},
+# )
+#
+#
+# class BuildDecisionBookBudgetItem(CrudCollection):
+# """
+# Builds class based on declarative_base and BaseMixin via session
+# """
+#
+# __tablename__ = "build_decision_book_budget_item"
+# __exclude__fields__ = []
+#
+# paid_date = mapped_column(TIMESTAMP, nullable=False, comment="Payment Due Date")
+# period_time = mapped_column(String(12), server_default="")
+# paid_value: Mapped[float] = mapped_column(Numeric(10, 2), server_default="0")
+#
+# build_decision_book_budget_id = mapped_column(
+# ForeignKey("build_decision_book_budget.id"), nullable=False
+# )
+#
+# __table_args__ = (
+# Index(
+# "_build_decision_book_budget_item_ndx_01",
+# build_decision_book_budget_id,
+# paid_date,
+# ),
+# )
+#
+
+
+# buildings: Mapped["Build"] = relationship(
+# "Build", back_populates="decision_books", foreign_keys=[build_id]
+# )
+# companies: Mapped[List["Company"]] = relationship(
+# "Company", back_populates="decision_books", foreign_keys=[resp_company_id]
+# )
+# budget_records: Mapped[List["CompanyBudgetRecords"]] = relationship(
+# "CompanyBudgetRecords",
+# back_populates="decision_books",
+# foreign_keys="CompanyBudgetRecords.build_decision_book_id",
+# )
+# decision_book_items: Mapped[List["BuildDecisionBookItems"]] = relationship(
+# "BuildDecisionBookItems",
+# back_populates="decision_books",
+# foreign_keys="BuildDecisionBookItems.build_decision_book_id",
+# )
+#
+# decision_book_management: Mapped["BuildDecisionBookManagement"] = relationship(
+# "BuildDecisionBookManagement",
+# back_populates="decision_book",
+# foreign_keys="BuildDecisionBookManagement.build_decision_book_id",
+# )
+#
+# decision_book_people: Mapped[List["BuildDecisionBookPerson"]] = relationship(
+# "BuildDecisionBookPerson",
+# back_populates="decision_books",
+# foreign_keys="BuildDecisionBookPerson.build_decision_book_id",
+# )
+#
+# # decision_book_projects: Mapped[List["DecisionBookProjects"]] = relationship(
+# # "DecisionBookProjects",
+# # back_populates="decision_books",
+# # foreign_keys="DecisionBookProjects.build_decision_book_id",
+# # )
+# # decision_book_project_people: Mapped[List["BuildDecisionBookProjectPerson"]] = (
+# # relationship(
+# # "BuildDecisionBookProjectPerson",
+# # back_populates="decision_books",
+# # foreign_keys="BuildDecisionBookProjectPerson.build_decision_book_id",
+# # )
+# # )
+# decision_book_legal_people: Mapped["BuildDecisionBookProjectsLegal"] = relationship(
+# "BuildDecisionBookProjectsLegal",
+# back_populates="decision_books",
+# foreign_keys="BuildDecisionBookProjectsLegal.build_decision_book_id",
+# )
+#
+# decision_book_budget: Mapped["BuildDecisionBookBudget"] = relationship(
+# "BuildDecisionBookBudget",
+# back_populates="decision_book",
+# foreign_keys="BuildDecisionBookBudget.build_decision_book_id",
+# )
+
+# decision_book_items: Mapped[List["BuildDecisionBookItems"]] = relationship(
+# "BuildDecisionBookItems",
+# back_populates="decision_book_item_debits",
+# foreign_keys=[build_decision_book_item_id],
+# )
+# decision_book_payment_masters: Mapped[List["BuildDecisionBookPaymentsMaster"]] = relationship(
+# "BuildDecisionBookPaymentsMaster",
+# back_populates="decision_books_item_debits",
+# foreign_keys="BuildDecisionBookPaymentsMaster.build_decision_book_item_debits_id",
+# )
+#
+# decision_books: Mapped["BuildDecisionBook"] = relationship(
+# "BuildDecisionBook",
+# back_populates="decision_book_items",
+# foreign_keys=[build_decision_book_id],
+# )
+# decision_book_item_debits: Mapped[List["BuildDecisionBookItemDebits"]] = (
+# relationship(
+# "BuildDecisionBookItemDebits",
+# back_populates="decision_book_items",
+# foreign_keys="BuildDecisionBookItemDebits.build_decision_book_item_id",
+# )
+# )
+# decision_book_projects: Mapped["DecisionBookProjects"] = relationship(
+# "DecisionBookProjects",
+# back_populates="decision_book_items",
+# foreign_keys="DecisionBookProjects.build_decision_book_item_id",
+# )
+# decision_book_legal: Mapped["BuildDecisionBookLegal"] = relationship(
+# "BuildDecisionBookLegal",
+# back_populates="decision_books_items",
+# foreign_keys="BuildDecisionBookLegal.build_db_item_id",
+# )
+#
+# build_decision_book_item_unapproved: Mapped[
+# List["BuildDecisionBookItemsUnapproved"]
+# ] = relationship(
+# "BuildDecisionBookItemsUnapproved",
+# back_populates="decision_book_items",
+# foreign_keys="BuildDecisionBookItemsUnapproved.build_decision_book_item",
+# )
+
+# decision_books_items: Mapped["BuildDecisionBookItems"] = relationship(
+# "BuildDecisionBookItems",
+# back_populates="decision_book_legal",
+# foreign_keys=[build_db_item_id],
+# )
+# attorney_companies: Mapped["Companies"] = relationship(
+# "Company",
+# back_populates="decision_book_legal",
+# foreign_keys=[resp_attorney_company],
+# )
+# attorney_persons: Mapped["People"] = relationship(
+# "People",
+# back_populates="attorney_decision_book_legal",
+# foreign_keys=[resp_attorney_id],
+# )
+# lawyer_persons: Mapped["People"] = relationship(
+# "People",
+# back_populates="lawyer_decision_book_legal",
+# foreign_keys=[mediator_lawyer_person_id],
+# )
+
+# decision_books: Mapped["BuildDecisionBook"] = relationship(
+# "BuildDecisionBook",
+# back_populates="decision_book_people",
+# foreign_keys=[build_decision_book_id],
+# )
+# people: Mapped["People"] = relationship(
+# "People", back_populates="decision_book_people", foreign_keys=[person_id]
+# )
+
+# decision_book_budget: Mapped["BuildDecisionBookBudget"] = relationship(
+# "BuildDecisionBookBudget",
+# back_populates="decision_book_budget_item",
+# foreign_keys=[build_decision_book_budget_id],
+# )
+
+# accounting: Mapped["AccountDetail"] = relationship(
+# "AccountDetail",
+# back_populates="decision_book_budget",
+# foreign_keys=[accounting_id],
+# )
+# decision_book: Mapped["BuildDecisionBook"] = relationship(
+# "BuildDecisionBook",
+# back_populates="decision_book_budget",
+# foreign_keys=[build_decision_book_id],
+# )
+# decision_book_budget_item: Mapped["BuildDecisionBookBudgetItem"] = relationship(
+# "BuildDecisionBookBudgetItem",
+# back_populates="decision_book_budget",
+# foreign_keys="BuildDecisionBookBudgetItem.build_decision_book_budget_id",
+# )
+
+# decision_book_items: Mapped["BuildDecisionBookItems"] = relationship(
+# "BuildDecisionBookItems",
+# back_populates="build_decision_book_item_unapproved",
+# foreign_keys=[build_decision_book_item],
+# )
+#
+# peoples: Mapped["People"] = relationship(
+# "People",
+# back_populates="build_decision_book_item_unapproved",
+# foreign_keys=[person_id],
+# )
+#
+# class BuildDecisionBookInvitationsPerson(CrudCollection):
+# """
+# Builds class based on declarative_base and BaseMixin via session
+# """
+#
+# __tablename__ = "build_decision_book_invitations_person"
+# __exclude__fields__ = []
+#
+# invite_id = mapped_column(ForeignKey("build_decision_book_invitations.id"), nullable=False)
+# invite_uu_id = mapped_column(String, nullable=True, comment="Invite UUID")
+# person_id = mapped_column(ForeignKey("people.id"), nullable=False)
+# person_uu_id = mapped_column(String, nullable=False, comment="Person UUID")
+#
+# send_date = mapped_column(TIMESTAMP, nullable=False, comment="Confirmation Date")
+# is_confirmed: Mapped[bool] = mapped_column(Boolean, server_default="0", comment="Message is Confirmed")
+# confirmed_date = mapped_column(TIMESTAMP, nullable=True, comment="Confirmation Date")
+# token = mapped_column(String, server_default="", comment="Invitation Token")
+#
+# __table_args__ = (
+# Index(
+# "decision_book_invitations_person_ndx_01",
+# invite_id,
+# person_id,
+# unique=True,
+# ),
+# {"comment": "People that are invited to building meetings."},
+# )
diff --git a/ApiLayers/Schemas/company/company.py b/ApiLayers/Schemas/company/company.py
new file mode 100644
index 0000000..73752a2
--- /dev/null
+++ b/ApiLayers/Schemas/company/company.py
@@ -0,0 +1,581 @@
+from fastapi.exceptions import HTTPException
+from sqlalchemy import (
+ String,
+ Integer,
+ Boolean,
+ ForeignKey,
+ Index,
+ Identity,
+ TIMESTAMP,
+ func,
+)
+from sqlalchemy.orm import mapped_column, relationship, Mapped
+
+from ApiLayers.ApiLibrary.extensions.select import SelectAction
+from ApiLayers.ApiValidations.Custom.token_objects import EmployeeTokenObject
+from ApiLayers.ApiValidations.Request import (
+ InsertCompany,
+ UpdateCompany,
+ MatchCompany2Company,
+)
+from ApiLayers.LanguageModels.Database.company.company import (
+ RelationshipDutyCompanyLanguageModel,
+ CompaniesLanguageModel,
+ # CompanyDutiesLanguageModel,
+)
+from Services.PostgresDb import CrudCollection
+
+
+class RelationshipDutyCompany(CrudCollection):
+ """
+ CompanyRelationship class based on declarative_base and CrudCollection via session
+ Company -> Sub Company -> Sub-Sub Company
+
+ if owner_id == parent_id: can manipulate data of any record
+ else: Read-Only
+ duty_id = if relationship_type == base An organization / not operational / no responsible person
+
+ relationship = company_id filter -> Action filter(company_id) relationship_type = Organization
+ relationship = company_id filter -> Action filter(company_id) relationship_type = Commercial
+ """
+
+ __tablename__ = "relationship_duty_company"
+ __exclude__fields__ = []
+ __language_model__ = RelationshipDutyCompanyLanguageModel
+
+ owner_id: Mapped[int] = mapped_column(
+ ForeignKey("companies.id"), nullable=False
+ ) # 1
+ duties_id: Mapped[int] = mapped_column(
+ ForeignKey("duties.id"), nullable=False
+ ) # duty -> (n)employee Evyos LTD
+
+ member_id: Mapped[int] = mapped_column(
+ ForeignKey("companies.id"), nullable=False
+ ) # 2, 3, 4
+ parent_id: Mapped[int] = mapped_column(
+ ForeignKey("companies.id"), nullable=True
+ ) # None
+
+ relationship_type: Mapped[str] = mapped_column(
+ String, nullable=True, server_default="Commercial"
+ ) # Commercial, Organization # Bulk
+ child_count: Mapped[int] = mapped_column(Integer) # 0
+ show_only: Mapped[bool] = mapped_column(Boolean, server_default="0")
+
+ # related_company: Mapped[List["Companies"]] = relationship(
+ # "Companies",
+ # back_populates="related_companies",
+ # foreign_keys=[related_company_id],
+ # )
+
+ @classmethod
+ def match_company_to_company_commercial(cls, data: MatchCompany2Company, token):
+ from Schemas import (
+ Duties,
+ )
+
+ token_duties_id, token_company_id = token.get("duty_id"), token.get(
+ "company_id"
+ )
+ list_match_company_id = []
+ send_duties = Duties.filter_one(
+ Duties.uu_id == data.duty_uu_id,
+ )
+ send_user_duties = Duties.filter_one(
+ Duties.duties_id == send_duties.id,
+ Duties.company_id == token_duties_id,
+ )
+ if not send_user_duties:
+ raise Exception(
+ "Send Duty is not found in company. Please check duty uuid and try again."
+ )
+
+ for company_uu_id in list(data.match_company_uu_id):
+ company = Companies.filter_one(
+ Companies.uu_id == company_uu_id,
+ )
+ bulk_company = RelationshipDutyCompany.filter_one(
+ RelationshipDutyCompany.owner_id == token_company_id,
+ RelationshipDutyCompany.relationship_type == "Bulk",
+ RelationshipDutyCompany.member_id == company.id,
+ )
+ if not bulk_company:
+ raise Exception(
+ f"Bulk Company is not found in company. "
+ f"Please check company uuid {bulk_company.uu_id} and try again."
+ )
+ list_match_company_id.append(bulk_company)
+
+ for match_company_id in list_match_company_id:
+ RelationshipDutyCompany.find_or_create(
+ owner_id=token_company_id,
+ duties_id=send_user_duties.id,
+ member_id=match_company_id.id,
+ parent_id=match_company_id.parent_id,
+ relationship_type="Commercial",
+ show_only=False,
+ )
+
+ @classmethod
+ def match_company_to_company_organization(cls, data: MatchCompany2Company, token):
+ from Schemas import (
+ Duties,
+ )
+
+ token_duties_id, token_company_id = token.get("duty_id"), token.get(
+ "company_id"
+ )
+ list_match_company_id = []
+ send_duties = Duties.filter_one(
+ Duties.uu_id == data.duty_uu_id,
+ )
+ send_user_duties = Duties.filter_one(
+ Duties.duties_id == send_duties.id,
+ Duties.company_id == token_duties_id,
+ )
+ if not send_user_duties:
+ raise Exception(
+ "Send Duty is not found in company. Please check duty uuid and try again."
+ )
+
+ for company_uu_id in list(data.match_company_uu_id):
+ company = Companies.filter_one(
+ Companies.uu_id == company_uu_id,
+ )
+ bulk_company = RelationshipDutyCompany.filter_one(
+ RelationshipDutyCompany.owner_id == token_company_id,
+ RelationshipDutyCompany.relationship_type == "Bulk",
+ RelationshipDutyCompany.member_id == company.id,
+ )
+ if not bulk_company:
+ raise Exception(
+ f"Bulk Company is not found in company. "
+ f"Please check company uuid {bulk_company.uu_id} and try again."
+ )
+ list_match_company_id.append(bulk_company)
+
+ for match_company_id in list_match_company_id:
+ Duties.init_a_company_default_duties(
+ company_id=match_company_id.id,
+ company_uu_id=str(match_company_id.uu_id),
+ )
+ RelationshipDutyCompany.find_or_create(
+ owner_id=token_company_id,
+ duties_id=send_user_duties.id,
+ member_id=match_company_id.id,
+ parent_id=match_company_id.parent_id,
+ relationship_type="Organization",
+ show_only=False,
+ )
+
+ __table_args__ = (
+ Index(
+ "_company_relationship_ndx_01",
+ duties_id,
+ owner_id,
+ member_id,
+ relationship_type,
+ unique=True,
+ ),
+ {"comment": "Company Relationship Information"},
+ )
+
+
+class Companies(CrudCollection, SelectAction):
+ """
+ Company class based on declarative_base and CrudCollection via session
+ formal_name = Government register name by offical
+ public_name = Public registered name by User
+ nick_name = Search by nickname, commercial_type = Tüzel veya birey
+ """
+
+ __tablename__ = "companies"
+
+ __exclude__fields__ = ["is_blacklist", "is_commercial"]
+ __access_by__ = []
+ __many__table__ = RelationshipDutyCompany
+ __language_model__ = CompaniesLanguageModel
+ # __explain__ = AbstractCompany()
+
+ formal_name: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Formal Name"
+ )
+ company_type: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Company Type"
+ )
+ commercial_type: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Commercial Type"
+ )
+ tax_no: Mapped[str] = mapped_column(
+ String, index=True, unique=True, nullable=False, comment="Tax No"
+ )
+
+ public_name: Mapped[str] = mapped_column(String, comment="Public Name of a company")
+ company_tag: Mapped[str] = mapped_column(String, comment="Company Tag")
+ default_lang_type: Mapped[str] = mapped_column(String, server_default="TR")
+ default_money_type: Mapped[str] = mapped_column(String, server_default="TL")
+ is_commercial: Mapped[bool] = mapped_column(Boolean, server_default="False")
+ is_blacklist: Mapped[bool] = mapped_column(Boolean, server_default="False")
+ parent_id = mapped_column(Integer, nullable=True)
+ workplace_no: Mapped[str] = mapped_column(String, nullable=True)
+
+ official_address_id: Mapped[int] = mapped_column(
+ ForeignKey("addresses.id"), nullable=True
+ )
+ official_address_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Official Address UUID"
+ )
+ top_responsible_company_id: Mapped[int] = mapped_column(
+ ForeignKey("companies.id"), nullable=True
+ )
+ top_responsible_company_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Top Responsible Company UUID"
+ )
+
+ # buildings: Mapped[List["Build"]] = relationship(
+ # "Build",
+ # back_populates="companies",
+ # foreign_keys="Build.company_id",
+ # )
+
+ __table_args__ = (
+ Index("_company_ndx_01", tax_no, unique=True),
+ Index("_company_ndx_02", formal_name, public_name),
+ {"comment": "Company Information"},
+ )
+
+ @classmethod
+ def create_action(cls, data: InsertCompany, token: EmployeeTokenObject):
+ from Schemas import Addresses, Duties
+
+ data_dict = data.model_dump()
+ if cls.filter_one(cls.tax_no == str(data.tax_no).strip(), system=True).data:
+ raise HTTPException(
+ status_code=400,
+ detail="Company already exists. Please ask supervisor to make company visible for your duty.",
+ )
+
+ official_address = Addresses.filter_one(
+ Addresses.uu_id == data.official_address_uu_id,
+ ).data
+ # if not official_address:
+ # raise HTTPException(
+ # status_code=400,
+ # detail="Official address is not found. Please check address uuid and try again.",
+ # )
+
+ bulk_duties = Duties.get_bulk_duties_of_a_company(
+ company_id=token.selected_company.company_id
+ )
+
+ if official_address:
+ data_dict["official_address_id"] = official_address.id
+ data_dict["official_address_uu_id"] = str(official_address.uu_id)
+
+ data_dict["parent_id"] = token.selected_company.company_id
+ data_dict["top_responsible_company_id"] = token.selected_company.company_id
+ data_dict["top_responsible_company_uu_id"] = (
+ token.selected_company.company_uu_id
+ )
+ company_created = cls.find_or_create(**data_dict)
+ company_created.save_and_confirm()
+ company_relationship_created = RelationshipDutyCompany.find_or_create(
+ owner_id=token.selected_company.company_id,
+ duties_id=bulk_duties.id,
+ member_id=company_created.id,
+ parent_id=company_created.parent_id,
+ child_count=0,
+ relationship_type="Bulk",
+ show_only=False,
+ )
+ company_relationship_created.save_and_confirm()
+ return company_created
+
+ @classmethod
+ def update_action(cls, data: UpdateCompany, token):
+ from Schemas import (
+ Addresses,
+ )
+
+ data_dict = data.excluded_dump()
+ duty_id = token.get("duty_id")
+ company_id = token.get("company_id")
+ if data.official_address_uu_id:
+ official_address = Addresses.filter_one(
+ Addresses.uu_id == data.official_address_uu_id,
+ *Addresses.valid_record_args(Addresses),
+ ).data
+ data_dict["official_address_id"] = official_address.id
+ del data_dict["official_address_uu_id"], data_dict["company_uu_id"]
+ company_to_update = cls.select_action(
+ duty_id_list=[duty_id],
+ filter_expr=[
+ cls.uu_id == data.company_uu_id,
+ RelationshipDutyCompany.parent_id == company_id,
+ ],
+ )
+ return company_to_update.update(**data_dict)
+
+ # parent_id = mapped_column(ForeignKey("companies.id"))
+ # if data.parent_uu_id:
+ # company = Companies.find_one(uu_id=data.parent_uu_id)
+ # data_dict["parent_id"] = company.id
+ # def is_access_valid(self, endpoint_ext: str):
+ # try:
+ # if (
+ # not arrow.get(self.stop_date)
+ # > arrow.utcnow()
+ # > arrow.get(self.start_date)
+ # ):
+ # message = f"Kullanıcı yetkileri süresi dolmuştur. {self.endpoint_name} için supervisor ile görüşünüz."
+ # SystemLogs.create_log(
+ # log_type="ERROR",
+ # log_code="ACCESS_EXPIRED",
+ # log_action=self.__tablename__,
+ # log_message=message,
+ # )
+ # return False
+ # except Exception as e:
+ # SystemLogs.create_log(
+ # log_type="ERROR",
+ # log_code="ACCESS_EXPIRED",
+ # log_action=self.__tablename__,
+ # log_message=e,
+ # )
+ # return False
+ #
+ # access_dict = {
+ # "LIST": self.access_read,
+ # "INSERT": self.access_write,
+ # "UPDATE": self.access_update,
+ # "DELETE": self.access_delete,
+ # "ACTIVE": self.access_update,
+ # "PRINT": self.report_print,
+ # "EXPORT": self.report_export,
+ # }
+ # return access_dict.get(endpoint_ext.upper(), False)
+
+ # official_address: Mapped[List["Address"]] = relationship(
+ # "Address",
+ # back_populates="official_companies",
+ # foreign_keys=[official_address_id],
+ # )
+ #
+ # emails: Mapped[List["UsersEmails"]] = relationship(
+ # "UsersEmails", back_populates="companies", foreign_keys="UsersEmails.company_id"
+ # )
+ # phones: Mapped[List["UsersPhones"]] = relationship(
+ # "UsersPhones", back_populates="company", foreign_keys="UsersPhones.company_id"
+ # )
+ # buildings: Mapped[List["Build"]] = relationship(
+ # "Build",
+ # back_populates="companies",
+ # foreign_keys="Build.company_id",
+ # )
+ # response_buildings: Mapped[List["Build"]] = relationship(
+ # "Build",
+ # back_populates="response_companies",
+ # foreign_keys="Build.response_company_id",
+ # )
+ # departments: Mapped[List["CompanyDepartments"]] = relationship(
+ # "CompanyDepartments",
+ # back_populates="company",
+ # foreign_keys="CompanyDepartments.company_id",
+ # )
+ # budget_records: Mapped[List["CompanyBudgetRecords"]] = relationship(
+ # "CompanyBudgetRecords",
+ # back_populates="companies",
+ # foreign_keys="CompanyBudgetRecords.company_id",
+ # )
+ # send_budget_records: Mapped[List["CompanyBudgetRecords"]] = relationship(
+ # "CompanyBudgetRecords",
+ # back_populates="send_companies",
+ # foreign_keys="CompanyBudgetRecords.send_company_id",
+ # )
+ # decision_books: Mapped[List["BuildDecisionBook"]] = relationship(
+ # "BuildDecisionBook",
+ # back_populates="companies",
+ # foreign_keys="BuildDecisionBook.resp_company_id",
+ # )
+ # decision_book_projects: Mapped[List["BuildDecisionBookProjects"]] = relationship(
+ # "BuildDecisionBookProjects",
+ # back_populates="companies",
+ # foreign_keys="BuildDecisionBookProjects.resp_company_id",
+ # )
+ # decision_book_legal: Mapped["BuildDecisionBookLegal"] = relationship(
+ # "BuildDecisionBookLegal",
+ # back_populates="attorney_companies",
+ # foreign_keys="BuildDecisionBookLegal.resp_attorney_company",
+ # )
+ #
+ # company_account_books: Mapped["AccountBooks"] = relationship(
+ # "AccountBooks",
+ # back_populates="company",
+ # foreign_keys="AccountBooks.company_id",
+ # )
+ # branch_account_books: Mapped["AccountBooks"] = relationship(
+ # "AccountBooks",
+ # back_populates="branch",
+ # foreign_keys="AccountBooks.branch_id",
+ # )
+ # account_codes: Mapped["AccountCodes"] = relationship(
+ # "AccountCodes", back_populates="company", foreign_keys="AccountCodes.company_id"
+ # )
+ # search_iban_description: Mapped["BuildIbanDescription"] = relationship(
+ # "BuildIbanDescription",
+ # back_populates="company",
+ # foreign_keys="BuildIbanDescription.company_id",
+ # )
+ # related_companies: Mapped[List["CompanyRelationship"]] = relationship(
+ # "CompanyRelationship",
+ # back_populates="related_company",
+ # foreign_keys="CompanyRelationship.related_company_id",
+ # )
+
+
+#
+# class AbstractCompany:
+# """
+# Abstract and explanation of Company class for end-user guide
+# """
+#
+# formal_name = Explanation(
+# explanation="Devletin resmi kayıtlarında bulunan şirket ünvanıdır.",
+# usage="Devletin resmi kayıtlarında bulunan şirket adı istendiğinde kullanılır.",
+# alias="Resmi Ünvan",
+# example=["X Şirketi LTD", "Y Şirketi A.Ş."],
+# )
+# company_type = Explanation(
+# explanation="Şirketin türüdür.",
+# usage="Şirketin türü istendiğinde kullanılır.",
+# alias="Şirket Türü",
+# example=[
+# "Şahıs",
+# "Limited",
+# "Anonim",
+# "Kolektif",
+# "Komandit",
+# "Kooperatif",
+# "Serbest Meslek",
+# "Adi Ortaklık",
+# ],
+# )
+# commercial_type = Explanation(
+# explanation="Şirketin ticari türüdür.",
+# usage="Şirketin ticari türü istendiğinde kullanılır.",
+# alias="Ticari Tür",
+# example=["Tüzel", "Birey"],
+# )
+# tax_no = Explanation(
+# explanation="Şirketin vergi numarasıdır.",
+# usage="Şirketin vergi numarası istendiğinde kullanılır.",
+# alias="Vergi No",
+# example=["1234567890"],
+# )
+# public_name = Explanation(
+# explanation="Şirketin kamuoyunda bilinen adıdır.",
+# usage="Şirketin kamuoyunda bilinen adı istendiğinde kullanılır.",
+# alias="Piyasada Bilinen Adı",
+# example=["X Şirketi", "Y Şirketi"],
+# )
+# company_tag = Explanation(
+# explanation="Şirketin takma adı veya etiketidir.",
+# usage="Şirketin yöneticisin karar verdiği takma adı veya etiketi istendiğinde kullanılır.",
+# alias="Şirket Etiketi veya Takma Adı",
+# example=["X", "Y"],
+# )
+# default_lang_type = Explanation(
+# explanation="Şirketin varsayılan dil türüdür.",
+# usage="Şirketin varsayılan dil türü istendiğinde kullanılır.",
+# alias="Şirketin Dil Türü",
+# example=["TR", "EN"],
+# )
+# default_money_type = Explanation(
+# explanation="Şirketin varsayılan para birimi türüdür.",
+# usage="Şirketin varsayılan para birimi türü istendiğinde kullanılır.",
+# alias="Şirketin Para Birimi Türü",
+# example=["TL", "USD", "EUR"],
+# )
+# is_commercial = Explanation(
+# explanation="Şirketin ticari olup olmadığını belirtir.",
+# usage="Şirketin ticari olup olmadığını applikasyonun anlaması için kullanılır.",
+# condition=lambda commercial_type: True if commercial_type == "Şahıs" else False,
+# alias="Şirket Ticari mi?",
+# )
+# is_blacklist = Explanation(
+# explanation="Şirketin kara listeye alınıp alınmadığını belirtir.",
+# usage="Şirketin kara listeye alınıp alınmadığını applikasyonun anlaması için kullanılır.",
+# alias="Kara Listeye alınsın mı?",
+# example=[True, False],
+# )
+# parent_id = Explanation(
+# explanation="Şirketin sorumlu olduğu şirketin ID'sidir.",
+# usage="Şirketin sorumlu olduğu şirketin ID'si istendiğinde kullanılır.",
+# alias="Sorumlu Şirket",
+# example=[
+# "Bir şirketin sorumlu şirketi hangisi olduğunu bulmak için kullanılır.",
+# ],
+# )
+# workplace_no = Explanation(
+# explanation="Şirketin iş yeri numarasıdır.",
+# usage="Şirketin iş yeri numarası istendiğinde kullanılır.",
+# alias="İş Yeri No",
+# example=["1234567890"],
+# )
+# official_address_id = Explanation(
+# explanation="Şirketin resmi adresidi.",
+# usage="Şirketin resmi adresinin ne olduğunu bulmak için kullanılır.",
+# alias="Resmi Adres",
+# example=[
+# "Bu şirketin adresi nedir sorusuna cevap vermek için kullanılır.",
+# ],
+# )
+# top_responsible_company_id = Explanation(
+# explanation="Şirketin en üst sorumlu şirketin ID'sidir.",
+# usage="Şirketin en üst sorumlu şirketin hangisi olduğunu bulmak için kullanılır.",
+# alias="Ana Yetkili Şirket",
+# example=[
+# "Bölge veya ülke genelinde en üst sorumlu şirketin hangisi olduğunu belirtmek için kullanılır.",
+# ],
+# )
+# buildings = Explanation(
+# explanation="Şirketin sahip olduğu binaların listesidir.",
+# usage="Şirketin sahip olduğu binaların listesini bulmak için kullanılır.",
+# alias="Sorumlu olduğu binalar Binalar",
+# example=[
+# "Şirketin sahip olduğu binaların listesini bulmak için kullanılır.",
+# ],
+# )
+#
+# def wag_create_company(self):
+# """
+# Er kişiye wag_create_company fonksiyonu = fieldları manipule edebilir?
+# 78 ile oluşturulan bir user için wag_create_company fonksiyonu = fieldları manipule edebilir?
+# """
+# return {
+# "commercial_type": self.commercial_type,
+# "formal_name": self.formal_name,
+# "public_name": self.public_name,
+# "company_type": self.company_type,
+# "tax_no": self.tax_no,
+# "workplace_no": self.workplace_no,
+# "company_tag": self.company_tag,
+# "default_lang_type": self.default_lang_type,
+# "default_money_type": self.default_money_type,
+# "official_address_id": self.official_address_id,
+# }
+#
+# def wag_update_company(self):
+# return {
+# "commercial_type": self.commercial_type,
+# "formal_name": self.formal_name,
+# "public_name": self.public_name,
+# "company_type": self.company_type,
+# "tax_no": self.tax_no,
+# "workplace_no": self.workplace_no,
+# "company_tag": self.company_tag,
+# "default_lang_type": self.default_lang_type,
+# "default_money_type": self.default_money_type,
+# "official_address_id": self.official_address_id,
+# }
diff --git a/ApiLayers/Schemas/company/department.py b/ApiLayers/Schemas/company/department.py
new file mode 100644
index 0000000..74da05c
--- /dev/null
+++ b/ApiLayers/Schemas/company/department.py
@@ -0,0 +1,232 @@
+from sqlalchemy import String, Integer, ForeignKey, Index, Boolean, Identity
+from sqlalchemy.orm import mapped_column, Mapped
+
+from Services.PostgresDb import CrudCollection
+
+
+class Departments(CrudCollection):
+
+ __tablename__ = "departments"
+ __exclude__fields__ = []
+
+ parent_department_id = mapped_column(Integer, server_default="0")
+ department_code = mapped_column(
+ String(16), nullable=False, index=True, comment="Department Code"
+ )
+ department_name: Mapped[str] = mapped_column(
+ String(128), nullable=False, comment="Department Name"
+ )
+ department_description: Mapped[str] = mapped_column(String, server_default="")
+
+ company_id: Mapped[int] = mapped_column(ForeignKey("companies.id"), nullable=False)
+ company_uu_id: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Company UUID"
+ )
+
+ # @classmethod
+ # def create_action(cls, data: DepartmentsPydantic, token):
+ # data_dict = data.model_dump()
+ # data_dict["company_id"] = token.selected_company.company_id
+ # return cls.find_or_create(**data_dict)
+
+ __table_args__ = {"comment": "Departments Information"}
+
+
+class Duty(CrudCollection):
+
+ __tablename__ = "duty"
+ __exclude__fields__ = []
+
+ duty_name: Mapped[str] = mapped_column(
+ String, unique=True, nullable=False, comment="Duty Name"
+ )
+ duty_code: Mapped[str] = mapped_column(String, nullable=False, comment="Duty Code")
+ duty_description: Mapped[str] = mapped_column(String, comment="Duty Description")
+
+ # @classmethod
+ # def create_action(cls, data: InsertCompanyDuty, token):
+ # # if not cls.__is_super__:
+ # # raise HTTPException(
+ # # status_code=401, detail="You are not authorized to create a duty."
+ # # )
+ # data_dict = data.model_dump()
+ #
+ # return cls.find_or_create(**data_dict)
+
+ __table_args__ = ({"comment": "Duty Information"},)
+
+
+class Duties(CrudCollection):
+
+ __tablename__ = "duties"
+ __exclude__fields__ = []
+
+ users_default_duty = mapped_column(
+ ForeignKey("duty.id"), nullable=True, comment="Default Duty for Users"
+ )
+ company_id: Mapped[int] = mapped_column(Integer)
+ company_uu_id: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Company UUID"
+ )
+ duties_id: Mapped[int] = mapped_column(ForeignKey("duty.id"), nullable=False)
+ duties_uu_id: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Duty UUID"
+ )
+ department_id = mapped_column(
+ ForeignKey("departments.id"), nullable=False, comment="Department ID"
+ )
+ department_uu_id: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Department UUID"
+ )
+ # priority_id: Mapped[int] = mapped_column(ForeignKey("priority.id"), nullable=True)
+ management_duty = mapped_column(
+ Boolean, server_default="0"
+ ) # is this a prime Company Duty ???
+
+ @classmethod
+ def init_a_company_default_duties(cls, company_id, company_uu_id):
+ __default_init__ = ["Execution Office", "IT Department"]
+
+ active_row = dict(
+ is_confirmed=True, active=True, deleted=False, is_notification_send=True
+ )
+ list_of_created = []
+
+ execution = Departments.find_or_create(
+ department_name="Execution Office",
+ department_code="EO001",
+ company_id=company_id,
+ company_uu_id=str(company_uu_id),
+ **active_row,
+ )
+ list_of_created.append(execution)
+ it_dept = Departments.find_or_create(
+ department_name="IT Department",
+ department_code="ITD001",
+ company_id=company_id,
+ company_uu_id=str(company_uu_id),
+ **active_row,
+ )
+ list_of_created.append(it_dept)
+ bm_duty = Duty.find_or_create(
+ duty_name="Business Manager",
+ duty_code="BM0001",
+ duty_description="Business Manager",
+ **active_row,
+ )
+ list_of_created.append(bm_duty)
+ it_duty = Duty.find_or_create(
+ duty_name="IT Manager",
+ duty_code="IT0001",
+ duty_description="IT Manager",
+ **active_row,
+ )
+ list_of_created.append(it_duty)
+ bulk_duty = Duty.find_or_create(
+ duty_name="BULK",
+ duty_code="BULK",
+ duty_description="BULK RECORDS OF THE COMPANY",
+ **active_row,
+ )
+ list_of_created.append(bulk_duty)
+ occu_duty = Duty.find_or_create(
+ duty_name="OCCUPANT",
+ duty_code="OCCUPANT",
+ duty_description="OCCUPANT RECORDS OF THE COMPANY",
+ **active_row,
+ )
+ list_of_created.append(occu_duty)
+ duties_created_bm = cls.find_or_create(
+ company_id=company_id,
+ company_uu_id=str(company_uu_id),
+ duties_id=bm_duty.id,
+ duties_uu_id=str(bm_duty.uu_id),
+ department_id=execution.id,
+ department_uu_id=str(execution.uu_id),
+ **active_row,
+ )
+ list_of_created.append(duties_created_bm)
+ duties_created_it = cls.find_or_create(
+ company_id=company_id,
+ company_uu_id=str(company_uu_id),
+ duties_id=it_duty.id,
+ duties_uu_id=str(it_duty.uu_id),
+ department_id=it_dept.id,
+ department_uu_id=str(it_dept.uu_id),
+ **active_row,
+ )
+ list_of_created.append(duties_created_it)
+ duties_created__ex = cls.find_or_create(
+ company_id=company_id,
+ company_uu_id=str(company_uu_id),
+ duties_id=bulk_duty.id,
+ duties_uu_id=str(bulk_duty.uu_id),
+ department_id=execution.id,
+ department_uu_id=str(execution.uu_id),
+ **active_row,
+ )
+ list_of_created.append(duties_created__ex)
+ duties_created_at = cls.find_or_create(
+ company_id=company_id,
+ company_uu_id=str(company_uu_id),
+ duties_id=occu_duty.id,
+ duties_uu_id=str(occu_duty.uu_id),
+ department_id=execution.id,
+ department_uu_id=str(execution.uu_id),
+ **active_row,
+ )
+ list_of_created.append(duties_created_at)
+ return list_of_created
+
+ @classmethod
+ def get_bulk_duties_of_a_company(cls, company_id):
+ duties_id = Duty.filter_by_one(system=True, duty_code="BULK").data
+ if bulk_duties := Duties.filter_by_one(
+ duties_id=getattr(duties_id, "id", None),
+ company_id=company_id,
+ **Duties.valid_record_dict,
+ ).data:
+ return bulk_duties
+ raise Exception("Bulk Duty not found. Please contact with supervisor.")
+
+ # @classmethod
+ # def create_action(cls, data: InsertCompanyDuty):
+ # data_dict = data.model_dump()
+ # if department := Departments.find_one(uu_id=data.department_uu_id):
+ # data_dict["department_id"] = department.id
+ # del data_dict["department_uu_id"]
+ # return cls.find_or_create(**data_dict)
+
+ __table_args__ = (
+ Index("duty_ndx_00", company_id, duties_id, department_id, unique=True),
+ {"comment": "Duty & Company & Department Information"},
+ )
+
+ # department: Mapped[List["CompanyDepartments"]] = relationship(
+ # "CompanyDepartments", back_populates="duties", foreign_keys=[department_id]
+ # )
+ # employees: Mapped[List["CompanyEmployees"]] = relationship(
+ # "CompanyEmployees",
+ # back_populates="duty",
+ # foreign_keys="CompanyEmployees.duty_id",
+ # )
+ # duty_app: Mapped["CompanyDutyApp"] = relationship(
+ # "CompanyDutyApp", back_populates="duties", foreign_keys="CompanyDutyApp.company_duty_id"
+ # )
+
+ # def get_language_of_duty(self, lang):
+ # if erp_text := ErpText.find_one(lang=lang, text_code=self.duty_code):
+ # return erp_text.text_name, erp_text.text_description
+ # return None, None
+
+ # company: Mapped["Companies"] = relationship(
+ # "Company", back_populates="departments", foreign_keys=[company_id]
+ # )
+ # duties: Mapped[List["CompanyDuty"]] = relationship(
+ # "CompanyDuty",
+ # back_populates="department",
+ # foreign_keys="CompanyDuty.department_id",
+ # )
+ # app_item: Mapped["AppItems"] = relationship(
+ # "AppItems", back_populates="department", foreign_keys="AppItems.department_id"
+ # )
diff --git a/ApiLayers/Schemas/company/employee.py b/ApiLayers/Schemas/company/employee.py
new file mode 100644
index 0000000..748fc93
--- /dev/null
+++ b/ApiLayers/Schemas/company/employee.py
@@ -0,0 +1,153 @@
+from sqlalchemy import (
+ String,
+ ForeignKey,
+ Index,
+ Numeric,
+)
+from sqlalchemy.orm import mapped_column, Mapped
+
+from ApiLayers.LanguageModels.Database.company.employee import (
+ StaffLanguageModel,
+ EmployeesLanguageModel,
+ EmployeeHistoryLanguageModel,
+ EmployeesSalariesLanguageModel,
+)
+from Services.PostgresDb import CrudCollection
+
+from ApiLayers.ApiValidations.Request import InsertCompanyEmployees
+
+
+class Staff(CrudCollection):
+
+ __tablename__ = "staff"
+ __exclude__fields__ = []
+ __language_model__ = StaffLanguageModel
+
+ staff_description: Mapped[str] = mapped_column(
+ String, server_default="", comment="Staff Description"
+ )
+ staff_name: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Staff Name"
+ )
+ staff_code: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Staff Code"
+ )
+
+ duties_id: Mapped[int] = mapped_column(ForeignKey("duties.id"), nullable=False)
+ duties_uu_id: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Duty UUID"
+ )
+
+ # people: Mapped["People"] = relationship(
+ # "People", back_populates="employees", foreign_keys=[people_id], uselist=True
+ # )
+ # duty: Mapped["CompanyDuty"] = relationship(
+ # "CompanyDuty", back_populates="employees", foreign_keys=[duty_id]
+ # )
+
+ @classmethod
+ def create_action(cls, data: InsertCompanyEmployees):
+ from Schemas import Duties
+
+ data_dict = data.model_dump()
+ if duty := Duties.find_one(uu_id=data.duty_uu_id):
+ data_dict["duty_id"] = duty.id
+ # if person := People.find_one(uu_id=data.person_uu_id):
+ # data_dict["people_id"] = person.id
+ if data.start_date:
+ data_dict["expiry_starts"] = data.start_date
+ if data.stop_date:
+ data_dict["expiry_ends"] = data.stop_date
+ # del data_dict["duty_uu_id"], data_dict["person_uu_id"]
+ del data_dict["start_date"], data_dict["stop_date"], data_dict["duty_uu_id"]
+ return cls.find_or_create(**data_dict)
+
+ __table_args__ = ({"comment": "Staff Information"},)
+
+
+class Employees(CrudCollection):
+
+ __tablename__ = "employees"
+ __exclude__fields__ = []
+ __language_model__ = EmployeesLanguageModel
+
+ staff_id: Mapped[int] = mapped_column(ForeignKey("staff.id"))
+ staff_uu_id: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Staff UUID"
+ )
+ people_id: Mapped[int] = mapped_column(ForeignKey("people.id"), nullable=True)
+ people_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="People UUID"
+ )
+
+ __table_args__ = (
+ Index("employees_ndx_00", people_id, staff_id, unique=True),
+ {"comment": "Employee Person Information"},
+ )
+
+
+class EmployeeHistory(CrudCollection):
+
+ __tablename__ = "employee_history"
+ __exclude__fields__ = []
+ __language_model__ = EmployeeHistoryLanguageModel
+
+ staff_id: Mapped[int] = mapped_column(
+ ForeignKey("staff.id"), nullable=False, comment="Staff ID"
+ )
+ staff_uu_id: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Staff UUID"
+ )
+ people_id: Mapped[int] = mapped_column(
+ ForeignKey("people.id"), nullable=False, comment="People ID"
+ )
+ people_uu_id: Mapped[str] = mapped_column(
+ String, nullable=False, comment="People UUID"
+ )
+
+ __table_args__ = (
+ Index("_employee_history_ndx_00", people_id, staff_id),
+ {"comment": "Employee History Information"},
+ )
+
+
+class EmployeesSalaries(CrudCollection):
+
+ __tablename__ = "employee_salaries"
+ __exclude__fields__ = []
+ __language_model__ = EmployeesSalariesLanguageModel
+
+ gross_salary: Mapped[float] = mapped_column(
+ Numeric(20, 6), nullable=False, comment="Gross Salary"
+ )
+ net_salary: Mapped[float] = mapped_column(
+ Numeric(20, 6), nullable=False, comment="Net Salary"
+ )
+
+ people_id: Mapped[int] = mapped_column(ForeignKey("people.id"), nullable=False)
+ people_uu_id: Mapped[str] = mapped_column(
+ String, nullable=False, comment="People UUID"
+ )
+
+ # people: Mapped["People"] = relationship(
+ # "People", back_populates="employee_salaries", foreign_keys=[people_id]
+ # )
+
+ __table_args__ = (
+ Index("_employee_salaries_ndx_00", people_id, "expiry_starts"),
+ {"comment": "Employee Salaries Information"},
+ )
+
+
+# class Events2Employees(CrudCollection):
+#
+# __tablename__ = "events2employees"
+# __exclude__fields__ = []
+#
+# event_id = mapped_column(ForeignKey("events.id"), nullable=False)
+# employees_id = mapped_column(ForeignKey("employees.id"), nullable=False)
+#
+# __table_args__ = (
+# Index("_events2employees_ndx_00", event_id, employees_id),
+# {"comment": "Events2Employees Information"},
+# )
diff --git a/ApiLayers/Schemas/event/event.py b/ApiLayers/Schemas/event/event.py
new file mode 100644
index 0000000..f8f1707
--- /dev/null
+++ b/ApiLayers/Schemas/event/event.py
@@ -0,0 +1,449 @@
+from Services.PostgresDb import CrudCollection
+from ApiLayers.LanguageModels.Database.event.event import (
+ EventsLanguageModel,
+ ModulesLanguageModel,
+ ServicesLanguageModel,
+ Service2EventsLanguageModel,
+ Event2OccupantExtraLanguageModel,
+ Event2EmployeeExtraLanguageModel,
+ Event2EmployeeLanguageModel,
+ Event2OccupantLanguageModel,
+ ModulePriceLanguageModel,
+)
+
+from sqlalchemy import (
+ String,
+ ForeignKey,
+ Numeric,
+ SmallInteger,
+ Boolean,
+ Integer,
+ Index,
+)
+from sqlalchemy.orm import mapped_column, Mapped
+
+
+class Events(CrudCollection):
+ """
+ Events class based on declarative_base and BaseMixin via session
+ If Events2Occupants and Events2Employees are not found for user request, response 401 Unauthorized
+ """
+
+ __tablename__ = "events"
+ __exclude__fields__ = []
+ __language_model__ = EventsLanguageModel
+
+ event_type: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Event Type"
+ )
+ function_code: Mapped[str] = mapped_column(
+ String, nullable=False, comment="function code"
+ )
+ function_class: Mapped[str] = mapped_column(
+ String, nullable=False, comment="class name"
+ )
+
+ # name: Mapped[str] = mapped_column(String, nullable=True) # form or page title
+ description: Mapped[str] = mapped_column(
+ String, server_default=""
+ ) # form or page description
+ property_description: Mapped[str] = mapped_column(String, server_default="")
+
+ marketing_layer = mapped_column(SmallInteger, server_default="3")
+ cost: Mapped[float] = mapped_column(Numeric(20, 2), server_default="0.00")
+ unit_price: Mapped[float] = mapped_column(Numeric(20, 2), server_default="0.00")
+
+ endpoint_id: Mapped[int] = mapped_column(
+ ForeignKey("endpoint_restriction.id"), nullable=True
+ )
+ endpoint_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Endpoint UUID"
+ )
+
+ __table_args__ = ({"comment": "Events Information"},)
+
+
+class Modules(CrudCollection):
+ """
+ Modules class based on declarative_base and BaseMixin via session
+ """
+
+ __tablename__ = "modules"
+ __exclude__fields__ = []
+ __language_model__ = ModulesLanguageModel
+
+ module_name: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Module Name"
+ )
+ module_description: Mapped[str] = mapped_column(String, server_default="")
+ module_code: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Module Code"
+ )
+ module_layer = mapped_column(Integer, nullable=False, comment="Module Layer")
+ is_default_module = mapped_column(Boolean, server_default="0")
+
+ def retrieve_services(self):
+ services = Services.filter_all(Services.module_id == self.id).data
+ if not services:
+ self.raise_http_exception(
+ status_code="HTTP_404_NOT_FOUND",
+ error_case="RECORD_NOT_FOUND",
+ message=f"No services found for this module : {str(self.uu_id)}",
+ data={
+ "module_uu_id": str(self.uu_id),
+ },
+ )
+ return services
+
+ __table_args__ = ({"comment": "Modules Information"},)
+
+
+class Services(CrudCollection):
+ """
+ Services class based on declarative_base and BaseMixin via session
+ """
+
+ __tablename__ = "services"
+ __exclude__fields__ = []
+ __language_model__ = ServicesLanguageModel
+
+ module_id: Mapped[int] = mapped_column(ForeignKey("modules.id"), nullable=False)
+ module_uu_id: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Module UUID"
+ )
+ service_name: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Service Name"
+ )
+ service_description: Mapped[str] = mapped_column(String, server_default="")
+ service_code: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Service Code"
+ )
+ related_responsibility: Mapped[str] = mapped_column(String, server_default="")
+
+ @classmethod
+ def retrieve_service_via_occupant_code(cls, occupant_code):
+ from Schemas import OccupantTypes
+
+ occupant_type = OccupantTypes.filter_by_one(
+ system=True,
+ occupant_code=occupant_code,
+ ).data
+ if not occupant_type:
+ cls.raise_http_exception(
+ status_code="HTTP_404_NOT_FOUND",
+ error_case="RECORD_NOT_FOUND",
+ message=f"No occupant type found for this code : {occupant_code}",
+ data={
+ "occupant_code": occupant_code,
+ },
+ )
+ return cls.filter_one(
+ cls.related_responsibility == occupant_type.occupant_code
+ ).data
+
+ __table_args__ = ({"comment": "Services Information"},)
+
+
+class Service2Events(CrudCollection):
+ """
+ Service2Actions class based on declarative_base and BaseMixin via session
+ """
+
+ __tablename__ = "services2events"
+ __exclude__fields__ = []
+ __language_model__ = Service2EventsLanguageModel
+
+ service_id: Mapped[int] = mapped_column(ForeignKey("services.id"), nullable=False)
+ service_uu_id = mapped_column(String, nullable=False, comment="Service UUID")
+ event_id: Mapped[int] = mapped_column(ForeignKey("events.id"), nullable=False)
+ event_uu_id = mapped_column(String, nullable=False, comment="Event UUID")
+
+ __table_args__ = ({"comment": "Service2Events Information"},)
+
+
+class Event2OccupantExtra(CrudCollection):
+
+ __tablename__ = "event2occupant_extra"
+ __exclude__fields__ = []
+ __language_model__ = Event2OccupantExtraLanguageModel
+
+ build_living_space_id: Mapped[int] = mapped_column(
+ ForeignKey("build_living_space.id"), nullable=False
+ )
+ build_living_space_uu_id: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Build Living Space UUID"
+ )
+ event_id: Mapped[int] = mapped_column(ForeignKey("events.id"), nullable=False)
+ event_uu_id: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Event UUID"
+ )
+
+ __table_args__ = (
+ Index(
+ "event2occupant_extra_bind_event_to_occupant",
+ build_living_space_id,
+ event_id,
+ unique=True,
+ ),
+ {"comment": "Occupant2Event Information"},
+ )
+
+
+class Event2EmployeeExtra(CrudCollection):
+ """
+ Employee2Event class based on declarative_base and BaseMixin via session
+ """
+
+ __tablename__ = "event2employee_extra"
+ __exclude__fields__ = []
+ __language_model__ = Event2EmployeeExtraLanguageModel
+
+ employee_id: Mapped[int] = mapped_column(ForeignKey("employees.id"), nullable=False)
+ employee_uu_id: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Employee UUID"
+ )
+
+ event_id: Mapped[int] = mapped_column(ForeignKey("events.id"), nullable=False)
+ event_uu_id: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Event UUID"
+ )
+
+ __table_args__ = (
+ Index(
+ "event2employee_extra_employee_to_event",
+ employee_id,
+ event_id,
+ unique=True,
+ ),
+ {"comment": "Employee to Event Information"},
+ )
+
+
+class Event2Employee(CrudCollection):
+ """
+ Employee2Event class based on declarative_base and BaseMixin via session
+ """
+
+ __tablename__ = "event2employee"
+ __exclude__fields__ = []
+ __language_model__ = Event2EmployeeLanguageModel
+
+ employee_id: Mapped[int] = mapped_column(ForeignKey("employees.id"), nullable=False)
+ employee_uu_id: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Employee UUID"
+ )
+ event_service_id: Mapped[int] = mapped_column(
+ ForeignKey("services.id"), nullable=False
+ )
+ event_service_uu_id: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Event Cluster UUID"
+ )
+
+ __table_args__ = (
+ Index(
+ "event2employee_employee_to_event",
+ employee_id,
+ event_service_id,
+ unique=True,
+ ),
+ {"comment": "Employee to Event Information"},
+ )
+
+ @classmethod
+ def get_event_codes(cls, employee_id: int) -> list:
+ db = cls.new_session()
+ employee_events = cls.filter_all(
+ cls.employee_id == employee_id,
+ db=db,
+ ).data
+ active_event_ids = Service2Events.filter_all_system(
+ Service2Events.service_id.in_(
+ [event.event_service_id for event in employee_events]
+ ),
+ db=db,
+ ).data
+ active_events = Events.filter_all(
+ Events.id.in_([event.event_id for event in active_event_ids]),
+ db=db,
+ ).data
+ if extra_events := Event2EmployeeExtra.filter_all(
+ Event2EmployeeExtra.employee_id == employee_id,
+ db=db,
+ ).data:
+ events_extra = Events.filter_all(
+ Events.id.in_([event.event_id for event in extra_events]),
+ db=db,
+ ).data
+ active_events.extend(events_extra)
+ return [event.function_code for event in active_events]
+
+ # @classmethod
+ # def get_event_endpoints(cls, employee_id: int) -> list:
+ # from Schemas import EndpointRestriction
+ #
+ # db = cls.new_session()
+ # employee_events = cls.filter_all(
+ # cls.employee_id == employee_id,
+ # db=db,
+ # ).data
+ # active_event_ids = Service2Events.filter_all(
+ # Service2Events.service_id.in_(
+ # [event.event_service_id for event in employee_events]
+ # ),
+ # db=db,
+ # system=True,
+ # ).data
+ # active_events = Events.filter_all(
+ # Events.id.in_([event.event_id for event in active_event_ids]),
+ # db=db,
+ # ).data
+ # if extra_events := Event2EmployeeExtra.filter_all(
+ # Event2EmployeeExtra.employee_id == employee_id,
+ # db=db,
+ # ).data:
+ # events_extra = Events.filter_all(
+ # Events.id.in_([event.event_id for event in extra_events]),
+ # db=db,
+ # ).data
+ # active_events.extend(events_extra)
+ # endpoint_restrictions = EndpointRestriction.filter_all(
+ # EndpointRestriction.id.in_([event.endpoint_id for event in active_events]),
+ # db=db,
+ # ).data
+ # return [event.endpoint_name for event in endpoint_restrictions]
+ #
+
+
+class Event2Occupant(CrudCollection):
+ """
+ Occupant2Event class based on declarative_base and BaseMixin via session
+ """
+
+ __tablename__ = "event2occupant"
+ __exclude__fields__ = []
+ __language_model__ = Event2OccupantLanguageModel
+
+ build_living_space_id: Mapped[str] = mapped_column(
+ ForeignKey("build_living_space.id"), nullable=False
+ )
+ build_living_space_uu_id: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Build Living Space UUID"
+ )
+ event_service_id: Mapped[int] = mapped_column(
+ ForeignKey("services.id"), nullable=False
+ )
+ event_service_uu_id: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Event Cluster UUID"
+ )
+ # event_id: Mapped[int] = mapped_column(ForeignKey("events.id"), nullable=False)
+ # event_uu_id = mapped_column(String, nullable=False, comment="Event UUID")
+
+ __table_args__ = (
+ Index(
+ "event2occupant_bind_event_to_occupant",
+ build_living_space_id,
+ event_service_id,
+ unique=True,
+ ),
+ {"comment": "Occupant2Event Information"},
+ )
+
+ @classmethod
+ def get_event_codes(cls, build_living_space_id) -> list:
+ db = cls.new_session()
+ occupant_events = cls.filter_all(
+ cls.build_living_space_id == build_living_space_id,
+ db=db,
+ ).data
+ active_event_ids = Service2Events.filter_all_system(
+ Service2Events.service_id.in_(
+ [event.event_service_id for event in occupant_events]
+ ),
+ db=db,
+ ).data
+ active_events = Events.filter_all(
+ Events.id.in_([event.event_id for event in active_event_ids]),
+ db=db,
+ ).data
+ if extra_events := Event2OccupantExtra.filter_all(
+ Event2OccupantExtra.build_living_space_id == build_living_space_id,
+ db=db,
+ ).data:
+ events_extra = Events.filter_all(
+ Events.id.in_([event.event_id for event in extra_events]),
+ db=db,
+ ).data
+ active_events.extend(events_extra)
+ return [event.function_code for event in active_events]
+
+ # @classmethod
+ # def get_event_endpoints(cls, build_living_space_id) -> list:
+ # from Schemas import EndpointRestriction
+ #
+ # db = cls.new_session()
+ # occupant_events = cls.filter_all(
+ # cls.build_living_space_id == build_living_space_id,
+ # db=db,
+ # ).data
+ # active_event_ids = Service2Events.filter_all(
+ # Service2Events.service_id.in_(
+ # [event.event_service_id for event in occupant_events]
+ # ),
+ # db=db,
+ # system=True,
+ # ).data
+ # active_events = Events.filter_all(
+ # Events.id.in_([event.event_id for event in active_event_ids]),
+ # db=db,
+ # ).data
+ # if extra_events := Event2OccupantExtra.filter_all(
+ # Event2OccupantExtra.build_living_space_id == build_living_space_id,
+ # db=db,
+ # ).data:
+ # events_extra = Events.filter_all(
+ # Events.id.in_([event.event_id for event in extra_events]),
+ # db=db,
+ # ).data
+ # active_events.extend(events_extra)
+ # endpoint_restrictions = EndpointRestriction.filter_all(
+ # EndpointRestriction.id.in_([event.endpoint_id for event in active_events]),
+ # db=db,
+ # ).data
+ # return [event.endpoint_name for event in endpoint_restrictions]
+
+
+class ModulePrice(CrudCollection):
+ """
+ ModulePrice class based on declarative_base and BaseMixin via session
+ """
+
+ __tablename__ = "module_price"
+ __exclude__fields__ = []
+ __language_model__ = ModulePriceLanguageModel
+
+ campaign_code: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Campaign Code"
+ )
+ module_id: Mapped[int] = mapped_column(ForeignKey("modules.id"), nullable=False)
+ module_uu_id: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Module UUID"
+ )
+ service_id: Mapped[int] = mapped_column(ForeignKey("services.id"), nullable=False)
+ service_uu_id: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Service UUID"
+ )
+ event_id: Mapped[int] = mapped_column(ForeignKey("events.id"), nullable=False)
+ event_uu_id: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Event UUID"
+ )
+ is_counted_percentage: Mapped[float] = mapped_column(
+ Numeric(6, 2), server_default="0.00"
+ ) # %22
+ discounted_price: Mapped[float] = mapped_column(
+ Numeric(20, 2), server_default="0.00"
+ ) # Normal: 78.00 TL
+ calculated_price: Mapped[float] = mapped_column(
+ Numeric(20, 2), server_default="0.00"
+ ) # sana düz 75.00 TL yapar
+
+ __table_args__ = ({"comment": "ModulePrice Information"},)
diff --git a/ApiLayers/Schemas/identity/identity.py b/ApiLayers/Schemas/identity/identity.py
new file mode 100644
index 0000000..a0d0c0c
--- /dev/null
+++ b/ApiLayers/Schemas/identity/identity.py
@@ -0,0 +1,1076 @@
+from datetime import timedelta
+from fastapi import HTTPException
+from sqlalchemy import (
+ String,
+ Integer,
+ Boolean,
+ ForeignKey,
+ Index,
+ TIMESTAMP,
+ Text,
+ BigInteger,
+ Numeric,
+ func,
+ or_,
+)
+from sqlalchemy.orm import mapped_column, relationship, Mapped
+
+from Services.PostgresDb import CrudCollection
+from config import ApiStatic
+
+from ApiLayers.ApiLibrary.date_time_actions.date_functions import system_arrow
+from ApiLayers.ApiLibrary.extensions.select import (
+ SelectAction,
+ SelectActionWithEmployee,
+)
+from ApiLayers.AllConfigs.Token.config import Auth
+
+from ApiLayers.ApiValidations.Request import InsertUsers, InsertPerson
+from ApiLayers.LanguageModels.Database.identity.identity import (
+ UsersTokensLanguageModel,
+ UsersLanguageModel,
+ PeopleLanguageModel,
+ RelationshipDutyPeopleLanguageModel,
+ RelationshipEmployee2PostCodeLanguageModel,
+ AddressPostcodeLanguageModel,
+ AddressesLanguageModel,
+ AddressGeographicLocationsLanguageModel,
+ AddressCountryLanguageModel,
+ AddressStateLanguageModel,
+ AddressCityLanguageModel,
+ AddressDistrictLanguageModel,
+ AddressLocalityLanguageModel,
+ AddressNeighborhoodLanguageModel,
+ AddressStreetLanguageModel,
+ OccupantTypesLanguageModel,
+ ContractsLanguageModel,
+)
+
+
+class UsersTokens(CrudCollection):
+
+ __tablename__ = "users_tokens"
+ __exclude__fields__ = []
+ __language_model__ = UsersTokensLanguageModel
+
+ user_id: Mapped[int] = mapped_column(ForeignKey("users.id"), nullable=False)
+
+ token_type: Mapped[str] = mapped_column(String(16), server_default="RememberMe")
+ token: Mapped[str] = mapped_column(String, server_default="")
+ domain: Mapped[str] = mapped_column(String, server_default="")
+ expires_at: Mapped[TIMESTAMP] = mapped_column(
+ TIMESTAMP(timezone=True),
+ default=str(system_arrow.shift(date=system_arrow.now(), days=3)),
+ )
+
+ # users = relationship("Users", back_populates="tokens", foreign_keys=[user_id])
+
+
+class Users(CrudCollection, SelectAction):
+ """
+ Application User frame to connect to API with assigned token-based HTTP connection
+ """
+
+ __tablename__ = "users"
+ __exclude__fields__ = [
+ "hash_password",
+ "password_token",
+ "expiry_begins",
+ "related_company",
+ ]
+ __language_model__ = UsersLanguageModel
+
+ user_tag: Mapped[str] = mapped_column(
+ String(64), server_default="", comment="Unique tag for the user", index=True
+ )
+ email: Mapped[str] = mapped_column(
+ String(128), server_default="", comment="Email address of the user", index=True
+ )
+ phone_number: Mapped[str] = mapped_column(
+ String, server_default="", comment="Phone number of the user", index=True
+ )
+ via: Mapped[str] = mapped_column(
+ String,
+ server_default="111",
+ comment="Email 1/ Phone 2/ User Tag 3 All 111 Only 100",
+ )
+
+ avatar: Mapped[str] = mapped_column(
+ String, server_default="", comment="Avatar URL for the user"
+ )
+ hash_password: Mapped[str] = mapped_column(
+ String(256), server_default="", comment="Hashed password for security"
+ )
+ password_token: Mapped[str] = mapped_column(
+ String(256), server_default="", comment="Token for password reset"
+ )
+ remember_me: Mapped[bool] = mapped_column(
+ Boolean, server_default="0", comment="Flag to remember user login"
+ )
+
+ password_expires_day: Mapped[int] = mapped_column(
+ Integer,
+ server_default=str(Auth.PASSWORD_EXPIRE_DAY.days),
+ comment="Password expires in days",
+ )
+ password_expiry_begins: Mapped[TIMESTAMP] = mapped_column(
+ TIMESTAMP(timezone=True),
+ server_default=func.now(),
+ comment="Timestamp when password expiry begins",
+ )
+ related_company: Mapped[str] = mapped_column(String, comment="Related Company UUID")
+
+ person_id: Mapped[int] = mapped_column(
+ ForeignKey("people.id"), nullable=False, comment="Foreign key to person table"
+ )
+ person_uu_id: Mapped[str] = mapped_column(
+ String, server_default="", comment="Person UUID", index=True
+ )
+ local_timezone = mapped_column(
+ String, server_default="GMT+3", comment="Local timezone of user"
+ )
+ person = relationship("People", back_populates="user", foreign_keys=[person_id])
+
+ @property
+ def is_occupant(self):
+ return not str(self.email).split("@")[1] == Auth.ACCESS_EMAIL_EXT
+
+ @property
+ def is_employee(self):
+ return str(self.email).split("@")[1] == Auth.ACCESS_EMAIL_EXT
+
+ @property
+ def user_type(self):
+ return "Occupant" if self.is_occupant else "Employee"
+
+ @classmethod
+ def credentials(cls):
+ db_session = cls.new_session()
+ person_object: People = People.filter_by_one(
+ db=db_session, system=True, id=cls.person_id
+ ).data
+ if person_object:
+ return {
+ "person_id": person_object.id,
+ "person_uu_id": str(person_object.uu_id),
+ }
+ return {
+ "person_id": None,
+ "person_uu_id": None,
+ }
+
+ @property
+ def password_expiry_ends(self):
+ """Calculates the expiry end date based on expiry begins and expires day"""
+ return self.password_expiry_begins + timedelta(
+ days=int(
+ "".join(
+ [
+ _
+ for _ in str(self.password_expires_day).split(",")[0]
+ if _.isdigit()
+ ]
+ )
+ )
+ )
+
+ @classmethod
+ def create_action(cls, create_user: InsertUsers, token_dict):
+ db_session = cls.new_session()
+ found_person = People.filter_one(
+ People.uu_id == create_user.people_uu_id,
+ db=db_session,
+ ).data
+
+ if not found_person:
+ raise HTTPException(status_code=400, detail="Person not found.")
+ if (
+ not any(i in str(create_user.email) for i in ["@", "."])
+ and not len(str(create_user.phone_number)) >= 10
+ ):
+ raise HTTPException(
+ status_code=400,
+ detail="Please enter at least one valid email or phone number.",
+ )
+ if not create_user.avatar:
+ create_user.avatar = ApiStatic.PLACEHOLDER
+ create_dict = create_user.model_dump()
+ del create_dict["people_uu_id"]
+ create_dict["person_id"] = found_person.id
+ create_dict["person_uu_id"] = str(found_person.uu_id)
+ create_dict["related_company"] = token_dict.selected_company.company_uu_id
+ created_user = cls.find_or_create(**create_dict)
+ created_user.reset_password_token(found_user=created_user)
+ return created_user
+
+ def get_employee_and_duty_details(self):
+ from ApiLayers.Schemas import Employees, Duties
+
+ db_session = self.new_session()
+ found_person = People.filter_one(
+ People.id == self.person_id,
+ db=db_session,
+ )
+ found_employees = Employees.filter_by_active(
+ people_id=found_person.id, is_confirmed=True, db=db_session
+ )
+ found_duties = Duties.filter_all(
+ Duties.is_confirmed == True,
+ Duties.id.in_(
+ list(found_employee.duty_id for found_employee in found_employees.data)
+ ),
+ db=db_session,
+ )
+ if not found_employees.count:
+ raise HTTPException(
+ status_code=401,
+ detail={
+ "message": "Person has no confirmed duty. No employee match please register "
+ "your super admin",
+ "completed": False,
+ },
+ )
+ return {
+ "duty_list": [
+ {
+ "duty_id": duty.id,
+ "duty_uu_id": duty.uu_id.__str__(),
+ "duty_code": duty.duty_code,
+ "duty_name": duty.duty_name,
+ "duty_description": duty.duty_description,
+ }
+ for duty in found_duties.data
+ ],
+ }
+
+ def get_main_domain_and_other_domains(self, get_main_domain: bool = True):
+ from ApiLayers.Schemas import MongoQueryIdentity
+
+ query_engine = MongoQueryIdentity(company_uuid=self.related_company)
+ domain_via_user = query_engine.get_domain_via_user(user_uu_id=str(self.uu_id))
+ if not domain_via_user:
+ raise HTTPException(
+ status_code=401,
+ detail="Domain not found. Please contact the admin.",
+ )
+ domain_via_user = domain_via_user[0]
+ if get_main_domain:
+ return domain_via_user.get("main_domain", None)
+ return domain_via_user.get("other_domains_list", None)
+
+
+class RelationshipDutyPeople(CrudCollection):
+
+ __tablename__ = "relationship_duty_people"
+ __exclude__fields__ = []
+ __language_model__ = RelationshipDutyPeopleLanguageModel
+
+ company_id: Mapped[int] = mapped_column(
+ ForeignKey("companies.id"), nullable=False
+ ) # 1, 2, 3
+ duties_id: Mapped[int] = mapped_column(
+ ForeignKey("duties.id"), nullable=False
+ ) # duty -> (n)person Evyos LTD
+ member_id: Mapped[int] = mapped_column(
+ ForeignKey("people.id"), nullable=False
+ ) # 2, 3, 4
+
+ relationship_type: Mapped[str] = mapped_column(
+ String, nullable=True, server_default="Employee"
+ ) # Commercial
+ show_only: Mapped[bool] = mapped_column(Boolean, server_default="0")
+
+ # related_company: Mapped[List["Company"]] = relationship(
+ # "Company",
+ # back_populates="related_companies",
+ # foreign_keys=[related_company_id],
+ # )
+
+ __table_args__ = (
+ Index(
+ "person_relationship_ndx_01",
+ company_id,
+ duties_id,
+ member_id,
+ relationship_type,
+ unique=True,
+ ),
+ {"comment": "Person Relationship Information"},
+ )
+
+
+class People(CrudCollection, SelectAction):
+ """
+ People that are related to users in the application
+ """
+
+ __tablename__ = "people"
+ __exclude__fields__ = []
+ __many__table__ = RelationshipDutyPeople
+ __encrypt_list__ = [
+ "father_name",
+ "mother_name",
+ "country_code",
+ "national_identity_id",
+ "birth_place",
+ "birth_date",
+ "tax_no",
+ ]
+ __language_model__ = PeopleLanguageModel
+
+ firstname: Mapped[str] = mapped_column(
+ String, nullable=False, comment="First name of the person"
+ )
+ surname: Mapped[str] = mapped_column(
+ String(24), nullable=False, comment="Surname of the person"
+ )
+ middle_name: Mapped[str] = mapped_column(
+ String, server_default="", comment="Middle name of the person"
+ )
+ sex_code: Mapped[str] = mapped_column(
+ String(1), nullable=False, comment="Sex code of the person (e.g., M/F)"
+ )
+ person_ref: Mapped[str] = mapped_column(
+ String, server_default="", comment="Reference ID for the person"
+ )
+ person_tag: Mapped[str] = mapped_column(
+ String, server_default="", comment="Unique tag for the person"
+ )
+
+ # ENCRYPT DATA
+ father_name: Mapped[str] = mapped_column(
+ String, server_default="", comment="Father's name of the person"
+ )
+ mother_name: Mapped[str] = mapped_column(
+ String, server_default="", comment="Mother's name of the person"
+ )
+ country_code: Mapped[str] = mapped_column(
+ String(4), server_default="TR", comment="Country code of the person"
+ )
+ national_identity_id: Mapped[str] = mapped_column(
+ String, server_default="", comment="National identity ID of the person"
+ )
+ birth_place: Mapped[str] = mapped_column(
+ String, server_default="", comment="Birth place of the person"
+ )
+ birth_date: Mapped[TIMESTAMP] = mapped_column(
+ TIMESTAMP(timezone=True),
+ server_default="1900-01-01",
+ comment="Birth date of the person",
+ )
+ tax_no: Mapped[str] = mapped_column(
+ String, server_default="", comment="Tax number of the person"
+ )
+ # Receive at Create person
+ # language = mapped_column(
+ # String, comment="Language code of the person"
+ # )
+ # currency = mapped_column(
+ # String, comment="Currency code of the person"
+ # )
+
+ # ENCRYPT DATA
+ user = relationship(
+ "Users", back_populates="person", foreign_keys="Users.person_id"
+ )
+
+ __table_args__ = (
+ Index(
+ "person_ndx_001",
+ national_identity_id,
+ unique=True,
+ ),
+ {"comment": "Person Information"},
+ )
+
+ @property
+ def full_name(self):
+ if self.middle_name:
+ return f"{self.firstname} {self.middle_name} {self.surname}"
+ return f"{self.firstname} {self.surname}"
+
+ @classmethod
+ def create_action(cls, data: InsertPerson, token):
+ from ApiLayers.Schemas import Duties
+
+ token_duties_id, token_company_id = (
+ token.selected_company.duty_id,
+ token.selected_company.company_id,
+ )
+ bulk_duty = Duties.get_bulk_duties_of_a_company(company_id=token_company_id)
+
+ if str(data.country_code) == "TR":
+ if not len(data.national_identity_id) == 11:
+ raise HTTPException(
+ status_code=400,
+ detail="Please enter a valid national identity number.",
+ )
+ if data.tax_no and not len(str(data.tax_no)) == 10:
+ raise HTTPException(
+ status_code=400,
+ detail="Please enter a valid tax number.",
+ )
+
+ create_dict = data.model_dump()
+ create_dict["firstname"] = str(create_dict["firstname"]).capitalize()
+ create_dict["middle_name"] = str(create_dict["middle_name"]).capitalize()
+ create_dict["surname"] = str(create_dict["surname"]).upper()
+ create_dict["birth_place"] = str(create_dict["birth_place"]).upper()
+ created_people = cls.find_or_create(**create_dict)
+ created_people.update(is_confirmed=True)
+ duty_people = RelationshipDutyPeople.find_or_create(
+ company_id=token.selected_company.company_id,
+ duties_id=bulk_duty.id,
+ member_id=created_people.id,
+ )
+ duty_people.update(is_confirmed=True)
+ return created_people
+
+
+class RelationshipEmployee2PostCode(CrudCollection):
+ """
+ Build2EmployeeRelationship class based on declarative_base and BaseMixin via session
+ """
+
+ __tablename__ = "relationship_employee2postcode"
+ __exclude__fields__ = []
+ __include__fields__ = []
+ __language_model__ = RelationshipEmployee2PostCodeLanguageModel
+
+ company_id: Mapped[int] = mapped_column(
+ ForeignKey("companies.id"), nullable=True
+ ) # 1, 2, 3
+ employee_id: Mapped[int] = mapped_column(ForeignKey("employees.id"), nullable=False)
+ member_id: Mapped[int] = mapped_column(
+ ForeignKey("address_postcode.id"), nullable=False
+ )
+
+ relationship_type: Mapped[str] = mapped_column(
+ String, nullable=True, server_default="Employee"
+ ) # Commercial
+ show_only: Mapped[bool] = mapped_column(Boolean, server_default="0")
+
+ __table_args__ = ({"comment": "Build2Employee Relationship Information"},)
+
+
+class AddressPostcode(CrudCollection, SelectActionWithEmployee):
+ """
+ Postcode class based on declarative_base and BaseMixin via session
+ """
+
+ __tablename__ = "address_postcode"
+ __exclude__fields__ = []
+ __access_by__ = []
+ __many__table__ = RelationshipEmployee2PostCode
+ __language_model__ = AddressPostcodeLanguageModel
+
+ street_id: Mapped[int] = mapped_column(ForeignKey("address_street.id"))
+ street_uu_id: Mapped[str] = mapped_column(
+ String, server_default="", comment="Street UUID"
+ )
+ postcode: Mapped[str] = mapped_column(
+ String(32), nullable=False, comment="Postcode"
+ )
+
+ __table_args__ = ({"comment": "Postcode Information"},)
+
+
+class Addresses(CrudCollection):
+ """
+ Address class based on declarative_base and BaseMixin via session
+ """
+
+ __tablename__ = "addresses"
+ __exclude__fields__ = []
+ __language_model__ = AddressesLanguageModel
+
+ build_number: Mapped[str] = mapped_column(
+ String(24), nullable=False, comment="Build Number"
+ )
+ door_number: Mapped[str] = mapped_column(
+ String(24), nullable=True, comment="Door Number"
+ )
+ floor_number: Mapped[str] = mapped_column(
+ String(24), nullable=True, comment="Floor Number"
+ )
+
+ comment_address: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Address"
+ )
+ letter_address: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Address"
+ )
+ short_letter_address: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Address"
+ )
+
+ latitude: Mapped[float] = mapped_column(Numeric(20, 12), server_default="0")
+ longitude: Mapped[float] = mapped_column(Numeric(20, 12), server_default="0")
+
+ street_id: Mapped[int] = mapped_column(
+ ForeignKey("address_street.id"), nullable=False
+ )
+ street_uu_id: Mapped[str] = mapped_column(
+ String, server_default="", comment="Street UUID"
+ )
+
+ @classmethod
+ def list_via_employee(cls, token_dict, filter_expr=None):
+ post_code_list = RelationshipEmployee2PostCode.filter_all(
+ RelationshipEmployee2PostCode.employee_id
+ == token_dict.selected_company.employee_id,
+ ).data
+ post_code_id_list = [post_code.member_id for post_code in post_code_list]
+ if not post_code_id_list:
+ raise HTTPException(
+ status_code=404,
+ detail="User has no post code registered. User can not list addresses.",
+ )
+ cls.pre_query = cls.filter_all(cls.post_code_id.in_(post_code_id_list)).query
+ filter_cls = cls.filter_all(*filter_expr or [])
+ cls.pre_query = None
+ return filter_cls.data
+
+ # buildings: Mapped["Build"] = relationship(
+ # "Build", back_populates="addresses", foreign_keys="Build.address_id"
+ # )
+ # site: Mapped["BuildSites"] = relationship(
+ # "BuildSites", back_populates="addresses", foreign_keys="BuildSites.address_id"
+ # )
+ # official_companies: Mapped["Companies"] = relationship(
+ # "Company",
+ # back_populates="official_address",
+ # foreign_keys="Company.official_address_id",
+ # )
+
+ # @classmethod
+ # def create_action(cls, request, create_address: InsertAddress):
+ # from services.redis.auth_actions.token import parse_token_object_to_dict
+ #
+ # token_dict = parse_token_object_to_dict(request=request)
+ # data_dict = create_address.model_dump()
+ # post_code = AddressPostcode.find_one(uu_id=create_address.post_code_uu_id)
+ # if not post_code:
+ # raise HTTPException(
+ # status_code=404,
+ # detail="Post code not found.",
+ # )
+ # if Employee2AddressRelationship.post_code_id.find_one(
+ # employee_id=token_dict.selected_company.employee_id,
+ # post_code_id=post_code.id,
+ # ):
+ # data_dict["post_code_id"] = post_code.id
+ # del data_dict["post_code_uu_id"]
+ # return cls.find_or_create(**create_address.model_dump())
+ # raise HTTPException(
+ # status_code=401,
+ # detail=f"User is not qualified to create address at this post code {post_code.postcode}",
+ # )
+
+ # __table_args__ = (
+ # Index("_address_ndx_00", country_code, b_state, city, district),
+ # {"comment": "Address Information"},
+ # )
+
+
+class AddressGeographicLocations(CrudCollection):
+ """
+ Country class based on declarative_base and BaseMixin via session
+ """
+
+ __tablename__ = "address_geographic_locations"
+ __exclude__fields__ = []
+ __language_model__ = AddressGeographicLocationsLanguageModel
+
+ geo_table: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Address Table Name"
+ )
+ geo_id: Mapped[int] = mapped_column(
+ Integer, nullable=False, comment="Address Table ID"
+ )
+ geo_name: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Geographic Location Name"
+ )
+ geo_latitude: Mapped[float] = mapped_column(
+ Numeric(20, 6), server_default="0", comment="Geographic Location Name"
+ )
+ geo_longitude: Mapped[float] = mapped_column(
+ Numeric(20, 6), server_default="0", comment="Geographic Location Latitude"
+ )
+ geo_altitude: Mapped[float] = mapped_column(
+ Numeric(20, 6), server_default="0", comment="Geographic Location Longitude"
+ )
+ geo_description: Mapped[str] = mapped_column(
+ Text, nullable=False, comment="Geographic Location Description"
+ )
+ geo_area_size: Mapped[float] = mapped_column(
+ Numeric(20, 2),
+ nullable=True,
+ server_default="0",
+ comment="Geographic Location Area Size",
+ )
+ geo_population: Mapped[int] = mapped_column(
+ BigInteger, nullable=True, comment="Geographic Location Population"
+ )
+ # geo_geom_point = mapped_column(Geometry('POINT', srid=4326), nullable=True, comment="Geographic Location Points")
+ # geo_geom_polygon = mapped_column(Geometry('POLYGON', srid=4326), nullable=True,
+ # comment="Geographic Location Vector geographic information (polygon)")
+ # geo_centroid = mapped_column( GEOMETRY(POINT, 4326), nullable=True,
+ # comment="Geographic Location center of gravity of the region(points)")
+
+ __table_args__ = (
+ Index("_address_geographic_locations_ndx_00", geo_table, geo_id),
+ Index("_address_geographic_locations_ndx_01", geo_latitude, geo_longitude),
+ {"comment": "Geographic Location Information"},
+ )
+
+
+class AddressCountry(CrudCollection):
+ """
+ Country class based on declarative_base and BaseMixin via session
+ """
+
+ __tablename__ = "address_country"
+ __exclude__fields__ = []
+ __language_model__ = AddressCountryLanguageModel
+
+ country_code: Mapped[str] = mapped_column(
+ String(16), nullable=False, comment="Country Code"
+ )
+ country_name: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Country Name"
+ )
+ money_code: Mapped[str] = mapped_column(
+ String(12), nullable=True, comment="Money Code"
+ )
+ language: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Language Code"
+ )
+ address_geographic_id: Mapped[int] = mapped_column(
+ BigInteger, nullable=True, comment="Address Geographic Id"
+ )
+
+ __table_args__ = (
+ Index("_address_country_ndx_00", money_code),
+ Index("_address_country_ndx_01", country_code, unique=True),
+ {"comment": "Country Information"},
+ )
+
+
+class AddressState(CrudCollection):
+ """
+ State class based on declarative_base and BaseMixin via session
+ """
+
+ __tablename__ = "address_state"
+ __exclude__fields__ = []
+ __language_model__ = AddressStateLanguageModel
+
+ state_code: Mapped[str] = mapped_column(
+ String(16), nullable=False, comment="State Code"
+ )
+ state_name: Mapped[str] = mapped_column(
+ String, nullable=False, comment="State Name"
+ )
+ licence_plate: Mapped[str] = mapped_column(
+ String(24), nullable=True, comment="Sign Code"
+ )
+ phone_code: Mapped[str] = mapped_column(
+ String(36), nullable=True, comment="Phone Code"
+ )
+ gov_code: Mapped[str] = mapped_column(
+ String(128), nullable=True, comment="Government Code"
+ )
+ address_geographic_id: Mapped[int] = mapped_column(
+ BigInteger, nullable=True, comment="Address Geographic Id"
+ )
+
+ country_id: Mapped[int] = mapped_column(ForeignKey("address_country.id"))
+ country_uu_id: Mapped[str] = mapped_column(
+ String, server_default="", comment="Country UUID"
+ )
+
+ __table_args__ = (
+ Index(
+ "_address_state_ndx_01",
+ country_id,
+ state_code,
+ unique=True,
+ ),
+ {"comment": "State Information"},
+ )
+
+
+class AddressCity(CrudCollection):
+ """
+ City class based on declarative_base and BaseMixin via session
+ """
+
+ __tablename__ = "address_city"
+ __exclude__fields__ = []
+ __language_model__ = AddressCityLanguageModel
+
+ city_code: Mapped[str] = mapped_column(
+ String(24), nullable=False, comment="City Code"
+ )
+ city_name: Mapped[str] = mapped_column(String, nullable=False, comment="City Name")
+ licence_plate: Mapped[str] = mapped_column(
+ String(24), nullable=True, comment="Sign Code"
+ )
+ phone_code: Mapped[str] = mapped_column(
+ String(36), nullable=True, comment="Phone Code"
+ )
+ gov_code: Mapped[str] = mapped_column(
+ String(128), nullable=True, comment="Government Code"
+ )
+ address_geographic_id: Mapped[int] = mapped_column(
+ BigInteger, nullable=True, comment="Address Geographic Id"
+ )
+
+ state_id: Mapped[int] = mapped_column(ForeignKey("address_state.id"))
+ state_uu_id: Mapped[str] = mapped_column(
+ String, server_default="", comment="State UUID"
+ )
+
+ __table_args__ = (
+ Index(
+ "_address_city_ndx_01",
+ state_id,
+ city_code,
+ unique=True,
+ ),
+ {"comment": "City Information"},
+ )
+
+
+class AddressDistrict(CrudCollection):
+ """
+ District class based on declarative_base and BaseMixin via session
+ """
+
+ __tablename__ = "address_district"
+ __exclude__fields__ = []
+ __language_model__ = AddressDistrictLanguageModel
+
+ district_code: Mapped[str] = mapped_column(
+ String(16), nullable=False, comment="District Code"
+ )
+ district_name: Mapped[str] = mapped_column(
+ String, nullable=False, comment="District Name"
+ )
+ phone_code: Mapped[str] = mapped_column(
+ String(36), nullable=True, comment="Phone Code"
+ )
+ gov_code: Mapped[str] = mapped_column(
+ String(128), nullable=True, comment="Government Code"
+ )
+ address_geographic_id: Mapped[int] = mapped_column(
+ BigInteger, nullable=True, comment="Address Geographic Id"
+ )
+
+ city_id: Mapped[int] = mapped_column(
+ ForeignKey("address_city.id"), nullable=False, comment="City ID"
+ )
+ city_uu_id: Mapped[str] = mapped_column(
+ String, server_default="", comment="City UUID"
+ )
+
+ __table_args__ = (
+ Index(
+ "_address_district_ndx_01",
+ city_id,
+ district_code,
+ unique=True,
+ ),
+ {"comment": "District Information"},
+ )
+
+
+class AddressLocality(CrudCollection):
+ """
+ Locality class based on declarative_base and BaseMixin via session
+ """
+
+ __tablename__ = "address_locality"
+ __exclude__fields__ = []
+ __language_model__ = AddressLocalityLanguageModel
+
+ locality_code: Mapped[str] = mapped_column(
+ String(16), nullable=False, comment="Locality Code"
+ )
+ locality_name: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Locality Name"
+ )
+ type_code: Mapped[str] = mapped_column(String, nullable=True, comment="Type Name")
+ type_description: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Type Name"
+ )
+ gov_code: Mapped[str] = mapped_column(
+ String(128), nullable=True, comment="Government Code"
+ )
+ address_show: Mapped[bool] = mapped_column(Boolean, server_default="1")
+ address_geographic_id: Mapped[int] = mapped_column(
+ BigInteger, nullable=True, comment="Address Geographic Id"
+ )
+
+ district_id: Mapped[int] = mapped_column(
+ ForeignKey("address_district.id"), nullable=False, comment="District ID"
+ )
+ district_uu_id: Mapped[str] = mapped_column(
+ String, server_default="", comment="District UUID"
+ )
+
+ __table_args__ = (
+ Index(
+ "_address_locality_ndx_01",
+ district_id,
+ locality_code,
+ unique=True,
+ ),
+ {"comment": "Locality Information"},
+ )
+
+
+class AddressNeighborhood(CrudCollection):
+ """
+ Neighborhood class based on declarative_base and BaseMixin via session
+ """
+
+ __tablename__ = "address_neighborhood"
+ __exclude__fields__ = []
+ __language_model__ = AddressNeighborhoodLanguageModel
+
+ neighborhood_code: Mapped[str] = mapped_column(
+ String(16), nullable=False, comment="Neighborhood Code"
+ )
+ neighborhood_name: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Neighborhood Name"
+ )
+ type_code: Mapped[str] = mapped_column(String, nullable=True, comment="Type Name")
+ type_description: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Type Name"
+ )
+ gov_code: Mapped[str] = mapped_column(
+ String(128), nullable=True, comment="Government Code"
+ )
+ address_show: Mapped[bool] = mapped_column(Boolean, server_default="1")
+ address_geographic_id: Mapped[int] = mapped_column(
+ BigInteger, nullable=True, comment="Address Geographic Id"
+ )
+
+ district_id: Mapped[int] = mapped_column(
+ ForeignKey("address_district.id"), nullable=True, comment="District ID"
+ )
+ district_uu_id: Mapped[str] = mapped_column(
+ String, server_default="", comment="District UUID"
+ )
+ locality_id: Mapped[int] = mapped_column(
+ ForeignKey("address_locality.id"), nullable=True, comment="Locality ID"
+ )
+ locality_uu_id: Mapped[str] = mapped_column(
+ String, server_default="", comment="Locality UUID"
+ )
+
+ __table_args__ = (
+ Index(
+ "_address_neighborhood_ndx_01",
+ locality_id,
+ neighborhood_code,
+ unique=True,
+ ),
+ {"comment": "Neighborhood Information"},
+ )
+
+
+class AddressStreet(CrudCollection):
+ """
+ Street class based on declarative_base and BaseMixin via session
+ """
+
+ __tablename__ = "address_street"
+ __exclude__fields__ = []
+ __language_model__ = AddressStreetLanguageModel
+
+ street_code: Mapped[str] = mapped_column(
+ String(16), nullable=False, comment="Street Code"
+ )
+ street_name: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Street Name"
+ )
+ type_code: Mapped[str] = mapped_column(String, nullable=True, comment="Type Name")
+ type_description: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Type Name"
+ )
+ gov_code: Mapped[str] = mapped_column(
+ String(128), nullable=True, comment="Government Code"
+ )
+
+ address_geographic_id: Mapped[int] = mapped_column(
+ BigInteger, nullable=True, comment="Address Geographic Id"
+ )
+ neighborhood_id: Mapped[int] = mapped_column(
+ ForeignKey("address_neighborhood.id"), nullable=False, comment="Neighborhood ID"
+ )
+ neighborhood_uu_id: Mapped[str] = mapped_column(
+ String, server_default="", comment="Neighborhood UUID"
+ )
+
+ __table_args__ = (
+ Index("_address_street_ndx_01", neighborhood_id, street_code, unique=True),
+ {"comment": "Street Information"},
+ )
+
+ @classmethod
+ def search_address_text(cls, search_text, token_dict=None):
+ field_dict = {
+ "AddressStreet.uu_id": cls.uu_id,
+ "AddressCountry.uu_id": AddressCountry.uu_id,
+ "AddressState.uu_id": AddressState.uu_id,
+ "AddressCity.uu_id": AddressCity.uu_id,
+ "AddressDistrict.uu_id": AddressDistrict.uu_id,
+ "AddressLocality.uu_id": AddressLocality.uu_id,
+ "AddressNeighborhood.uu_id": AddressNeighborhood.uu_id,
+ "AddressCountry.country_name": AddressCountry.country_name,
+ "AddressState.state_name": AddressState.state_name,
+ "AddressCity.city_name": AddressCity.city_name,
+ "AddressDistrict.district_name": AddressDistrict.district_name,
+ "AddressLocality.locality_name": AddressLocality.locality_name,
+ "AddressNeighborhood.neighborhood_name": AddressNeighborhood.neighborhood_name,
+ "AddressStreet.street_name": cls.street_name,
+ }
+ joined_data = (
+ cls.session.query(*list(field_dict.values()))
+ .select_from(cls)
+ .join(AddressNeighborhood, AddressNeighborhood.id == cls.neighborhood_id)
+ .join(
+ AddressLocality, AddressLocality.id == AddressNeighborhood.locality_id
+ )
+ .join(AddressDistrict, AddressDistrict.id == AddressLocality.district_id)
+ .join(AddressCity, AddressCity.id == AddressDistrict.city_id)
+ .join(AddressState, AddressState.id == AddressCity.state_id)
+ .join(AddressCountry, AddressCountry.id == AddressState.country_id)
+ .filter(
+ or_(
+ AddressNeighborhood.neighborhood_name.ilike(
+ f"%{str(search_text).upper()}%"
+ ),
+ AddressLocality.locality_name.ilike(
+ f"%{str(search_text).upper()}%"
+ ),
+ AddressDistrict.district_name.ilike(
+ f"%{str(search_text).upper()}%"
+ ),
+ # AddressCity.city_name.ilike(f"%{str(search_text).upper()}%"),
+ # AddressState.state_name.ilike(f"%{str(search_text).upper()}%"),
+ # AddressCountry.country_name.ilike(f"%{str(search_text).upper()}%"),
+ cls.street_name.ilike(f"%{str(search_text).upper()}%"),
+ ),
+ )
+ )
+ # select([mytable.c.id]).where(
+ # func.to_tsvector('english', mytable.c.title) \
+ # .match('somestring', postgresql_regconfig='english')
+ # )
+ joined_statement = joined_data
+ joined_data = joined_data.first()
+ if not joined_data:
+ raise HTTPException(
+ status_code=404,
+ detail="No address found with the given search text.",
+ )
+ return dict(
+ query=joined_statement,
+ schema=list(field_dict.keys()),
+ )
+
+
+class OccupantTypes(CrudCollection):
+ """
+ Occupant Types class based on declarative_base and BaseMixin via session
+ """
+
+ __tablename__ = "occupant_types"
+ __exclude__fields__ = []
+ __language_model__ = OccupantTypesLanguageModel
+
+ occupant_type: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Occupant Type"
+ )
+ occupant_description: Mapped[str] = mapped_column(String, server_default="")
+ occupant_code: Mapped[str] = mapped_column(String, server_default="")
+ occupant_category: Mapped[str] = mapped_column(String, server_default="")
+ occupant_category_type: Mapped[str] = mapped_column(String, server_default="")
+ occupant_is_unique: Mapped[bool] = mapped_column(Boolean, server_default="0")
+
+ __table_args__ = ({"comment": "Occupant Types Information"},)
+
+ @classmethod
+ def get_manager_occupant_type(cls):
+ if occupant_types := cls.filter_all(
+ cls.occupant_is_unique == True, cls.occupant_category_type == "MT"
+ ).data:
+ return [occupant.uu_id.__str__() for occupant in occupant_types]
+ raise HTTPException(
+ status_code=404,
+ detail="No manager type found.",
+ )
+
+
+class Contracts(CrudCollection):
+ """
+ Contract class based on declarative_base and BaseMixin via session
+ """
+
+ __tablename__ = "contracts"
+ __exclude__fields__ = []
+ __language_model__ = ContractsLanguageModel
+
+ contract_type: Mapped[str] = mapped_column(
+ String(5),
+ nullable=False,
+ comment="The code for personnel is P and the code for companies is C.",
+ )
+ contract_title: Mapped[str] = mapped_column(String(255))
+ contract_details: Mapped[str] = mapped_column(Text)
+ contract_terms: Mapped[str] = mapped_column(Text)
+
+ contract_code: Mapped[str] = mapped_column(
+ String(100),
+ nullable=False,
+ comment="contract_code is the unique code given by the system.",
+ )
+ contract_date: Mapped[TIMESTAMP] = mapped_column(
+ TIMESTAMP(timezone=True),
+ server_default="2099-12-31 23:59:59",
+ comment="contract date is the date the contract is made. "
+ "expire start is the start date of the contract, expire en is the end date of the contract.",
+ )
+
+ company_id: Mapped[int] = mapped_column(
+ Integer, ForeignKey("companies.id"), nullable=True
+ )
+ company_uu_id: Mapped[str] = mapped_column(
+ String, server_default="", comment="Company UUID"
+ )
+
+ person_id: Mapped[int] = mapped_column(
+ Integer, ForeignKey("people.id"), nullable=True
+ )
+ person_uu_id: Mapped[str] = mapped_column(
+ String, server_default="", comment="Person UUID"
+ )
+
+ @classmethod
+ def retrieve_contact_no(cls):
+ # from api_library.date_time_actions.date_functions import system_arrow
+
+ # todo When create record contract_code == below string
+ related_date, counter = Contracts.client_arrow.now(), 1
+ return (
+ f"{related_date.date().year}{str(cls.contract_type)}{str(counter).zfill(6)}"
+ )
+
+ __table_args__ = (
+ Index("_contract_ndx_01", contract_code, unique=True),
+ {"comment": "Contract Information"},
+ )
diff --git a/ApiLayers/Schemas/others/enums.py b/ApiLayers/Schemas/others/enums.py
new file mode 100644
index 0000000..72949a2
--- /dev/null
+++ b/ApiLayers/Schemas/others/enums.py
@@ -0,0 +1,103 @@
+from fastapi.exceptions import HTTPException
+
+from sqlalchemy import (
+ UUID,
+ String,
+ text,
+)
+from sqlalchemy.orm import (
+ Mapped,
+ mapped_column,
+)
+from Services.PostgresDb import CrudCollection
+
+
+class ApiEnumDropdown(CrudCollection):
+ __tablename__ = "api_enum_dropdown"
+ __exclude__fields__ = ["enum_class"]
+ __language_model__ = None
+
+ id: Mapped[int] = mapped_column(primary_key=True)
+ uu_id: Mapped[str] = mapped_column(
+ UUID, server_default=text("gen_random_uuid()"), index=True, unique=True
+ )
+ enum_class: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Enum Constant Name"
+ )
+ key: Mapped[str] = mapped_column(String, nullable=False, comment="Enum Key")
+ value: Mapped[str] = mapped_column(String, nullable=False, comment="Enum Value")
+ description: Mapped[str] = mapped_column(String, nullable=True)
+
+ __table_args__ = ({"comment": "Enum objets that are linked to tables"},)
+
+ @classmethod
+ def get_by_uuid(cls, uuid: str):
+ return cls.filter_by_one(system=True, uu_id=str(uuid)).data
+
+ @classmethod
+ def get_debit_search(cls, search_debit: str = None, search_uu_id: str = None):
+ if search_uu_id:
+ if search := cls.filter_one(
+ cls.enum_class.in_(["DebitTypes"]),
+ cls.uu_id == search_uu_id,
+ system=True,
+ ).data:
+ return search
+ elif search_debit:
+ if search := cls.filter_one(
+ cls.enum_class.in_(["DebitTypes"]), cls.key == search_debit, system=True
+ ).data:
+ return search
+ return cls.filter_all(cls.enum_class.in_(["DebitTypes"]), system=True).data
+
+ @classmethod
+ def get_due_types(cls):
+ if due_list := cls.filter_all(
+ cls.enum_class == "BuildDuesTypes",
+ cls.key.in_(["BDT-A", "BDT-D"]),
+ system=True,
+ ).data:
+ return [due.uu_id.__str__() for due in due_list]
+ raise HTTPException(
+ status_code=404,
+ detail="No dues types found",
+ )
+
+ @classmethod
+ def due_type_search(cls, search_management: str = None, search_uu_id: str = None):
+ if search_uu_id:
+ if search := cls.filter_one(
+ cls.enum_class.in_(["BuildDuesTypes"]),
+ cls.uu_id == search_uu_id,
+ system=True,
+ ).data:
+ return search
+ elif search_management:
+ if search := cls.filter_one(
+ cls.enum_class.in_(["BuildDuesTypes"]),
+ cls.key == search_management,
+ system=True,
+ ).data:
+ return search
+ return cls.filter_all(cls.enum_class.in_(["BuildDuesTypes"]), system=True).data
+
+ def get_enum_dict(self):
+ return {
+ "uu_id": str(self.uu_id),
+ "enum_class": self.enum_class,
+ "key": self.key,
+ "value": self.value,
+ "description": self.description,
+ }
+
+ @classmethod
+ def uuid_of_enum(cls, enum_class: str, key: str):
+ return str(
+ getattr(
+ cls.filter_one(
+ cls.enum_class == enum_class, cls.key == key, system=True
+ ).data,
+ "uu_id",
+ None,
+ )
+ )
diff --git a/ApiLayers/Schemas/rules/rules.py b/ApiLayers/Schemas/rules/rules.py
new file mode 100644
index 0000000..304e8ba
--- /dev/null
+++ b/ApiLayers/Schemas/rules/rules.py
@@ -0,0 +1,33 @@
+from sqlalchemy import String
+from sqlalchemy.orm import mapped_column, Mapped
+
+from ApiLayers.LanguageModels.Database.rules.rules import (
+ EndpointRestrictionLanguageModel,
+)
+from Services.PostgresDb import CrudCollection
+
+
+class EndpointRestriction(CrudCollection):
+ """
+ Initialize Endpoint Restriction with default values
+ """
+
+ __tablename__ = "endpoint_restriction"
+ __exclude__fields__ = []
+ __language_model__ = EndpointRestrictionLanguageModel
+
+ endpoint_function: Mapped[str] = mapped_column(
+ String, server_default="", comment="Function name of the API endpoint"
+ )
+ endpoint_name: Mapped[str] = mapped_column(
+ String, server_default="", comment="Name of the API endpoint"
+ )
+ endpoint_method: Mapped[str] = mapped_column(
+ String, server_default="", comment="HTTP method used by the endpoint"
+ )
+ endpoint_desc: Mapped[str] = mapped_column(
+ String, server_default="", comment="Description of the endpoint"
+ )
+ endpoint_code: Mapped[str] = mapped_column(
+ String, server_default="", unique=True, comment="Unique code for the endpoint"
+ )
diff --git a/ApiLayers/__init__.py b/ApiLayers/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/DockerApiServices/AuthServiceApi/Dockerfile b/DockerApiServices/AuthServiceApi/Dockerfile
new file mode 100644
index 0000000..e45dd65
--- /dev/null
+++ b/DockerApiServices/AuthServiceApi/Dockerfile
@@ -0,0 +1,40 @@
+FROM python:3.12-slim
+
+WORKDIR /app
+
+# Install system dependencies and Poetry
+RUN apt-get update \
+ && apt-get install -y --no-install-recommends \
+ gcc \
+ && rm -rf /var/lib/apt/lists/* \
+ && pip install --no-cache-dir poetry
+
+# Copy Poetry configuration
+COPY DockerApiServices/pyproject.toml ./pyproject.toml
+
+# Configure Poetry and install dependencies with optimizations
+RUN poetry config virtualenvs.create false \
+ && poetry install --no-interaction --no-ansi --no-root --only main \
+ && pip cache purge \
+ && rm -rf ~/.cache/pypoetry
+
+# Copy application code
+COPY DockerApiServices/AuthServiceApi /app
+
+# Copy application code
+COPY ApiLayers /app/ApiLayers
+COPY Services /app/Services
+
+# Events
+COPY Events/Engine /app/Events/Engine
+COPY Events/base_request_model.py /app/Events/base_request_model.py
+COPY Events/AllEvents/authentication /app/Events/AllEvents/authentication
+COPY DockerApiServices/AuthServiceApi/events_file.py /app/Events/AllEvents/events_file.py
+
+# Set Python path to include app directory
+ENV PYTHONPATH=/app \
+ PYTHONUNBUFFERED=1 \
+ PYTHONDONTWRITEBYTECODE=1
+
+# Run the application using the configured uvicorn server
+CMD ["poetry", "run", "python", "app.py"]
diff --git a/DockerApiServices/AuthServiceApi/app.py b/DockerApiServices/AuthServiceApi/app.py
new file mode 100644
index 0000000..68ed880
--- /dev/null
+++ b/DockerApiServices/AuthServiceApi/app.py
@@ -0,0 +1,27 @@
+"""
+FastAPI Application Entry Point
+
+This module initializes and configures the FastAPI application with:
+- CORS middleware for cross-origin requests
+- Request timing middleware for performance monitoring
+- Custom exception handlers for consistent error responses
+- Prometheus instrumentation for metrics
+- API routers for endpoint organization
+"""
+
+import uvicorn
+
+from prometheus_fastapi_instrumentator import Instrumentator
+from app_handler import setup_middleware
+from create_file import create_app
+from config import ApiConfig
+
+
+app = create_app() # Initialize FastAPI application
+Instrumentator().instrument(app=app).expose(app=app) # Setup Prometheus metrics
+setup_middleware(app) # Configure middleware and exception handlers
+
+
+if __name__ == "__main__":
+ # Run the application with Uvicorn
+ uvicorn.Server(uvicorn.Config(**ApiConfig.as_dict())).run()
diff --git a/DockerApiServices/AuthServiceApi/app_handler.py b/DockerApiServices/AuthServiceApi/app_handler.py
new file mode 100644
index 0000000..e46c2c9
--- /dev/null
+++ b/DockerApiServices/AuthServiceApi/app_handler.py
@@ -0,0 +1,84 @@
+"""
+FastAPI Application Handler Module
+
+This module contains all the handler functions for configuring and setting up the FastAPI application:
+- CORS middleware configuration
+- Exception handlers setup
+- Uvicorn server configuration
+"""
+
+from fastapi import FastAPI, Request, status
+from fastapi.middleware.cors import CORSMiddleware
+from pydantic import ValidationError
+from fastapi.responses import JSONResponse
+
+from ApiLayers.ErrorHandlers.ErrorHandlers.api_exc_handler import (
+ HTTPExceptionApiHandler,
+ validation_exception_handler,
+)
+from ApiLayers.ErrorHandlers.Exceptions.api_exc import HTTPExceptionApi
+from ApiLayers.Middleware.auth_middleware import (
+ RequestTimingMiddleware,
+ LoggerTimingMiddleware,
+)
+
+
+def setup_cors_middleware(app: FastAPI) -> None:
+ """
+ Configure CORS middleware for the application.
+
+ Args:
+ app: FastAPI application instance
+ """
+ app.add_middleware(
+ CORSMiddleware,
+ allow_origins=["*"],
+ allow_credentials=True,
+ allow_methods=["*"],
+ allow_headers=["*"],
+ )
+
+
+async def generic_exception_handler(request: Request, exc: Exception) -> JSONResponse:
+ """
+ Handle generic exceptions and return formatted error responses.
+
+ Args:
+ request: FastAPI request object
+ exc: Exception instance
+
+ Returns:
+ JSONResponse: Formatted error response
+ """
+ return JSONResponse(
+ status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
+ content={"detail": "Internal server error", "error_code": "INTERNAL_ERROR"},
+ )
+
+
+def setup_exception_handlers(app: FastAPI) -> None:
+ """
+ Configure custom exception handlers for the application.
+
+ Args:
+ app: FastAPI application instance
+ """
+ custom_exception_handler = HTTPExceptionApiHandler(response_model=JSONResponse)
+ app.add_exception_handler(ValidationError, validation_exception_handler)
+ app.add_exception_handler(
+ HTTPExceptionApi, custom_exception_handler.handle_exception
+ )
+ app.add_exception_handler(Exception, generic_exception_handler)
+
+
+def setup_middleware(app: FastAPI) -> None:
+ """
+ Configure all middleware for the application.
+
+ Args:
+ app: FastAPI application instance
+ """
+ setup_cors_middleware(app)
+ app.add_middleware(RequestTimingMiddleware)
+ app.add_middleware(LoggerTimingMiddleware)
+ setup_exception_handlers(app)
diff --git a/DockerApiServices/AuthServiceApi/config.py b/DockerApiServices/AuthServiceApi/config.py
new file mode 100644
index 0000000..b8dd8f9
--- /dev/null
+++ b/DockerApiServices/AuthServiceApi/config.py
@@ -0,0 +1,71 @@
+class DefaultApiConfig:
+ app: str
+ host: str
+ port: int
+ log_level: str
+ reload: bool
+
+ @classmethod
+ def as_dict(cls):
+ return {
+ "app": cls.app,
+ "host": cls.host,
+ "port": int(cls.port),
+ "log_level": cls.log_level,
+ "reload": bool(cls.reload),
+ }
+
+
+class ApiStatic:
+ PLACEHOLDER = "https://s.tmimgcdn.com/scr/800x500/276800/building-home-nature-logo-vector-template-3_276851-original.jpg"
+ FORGOT_LINK = "https://www.evyos.com.tr/password/create?tokenUrl="
+ BLACKLIST_LINK = "https://www.evyos.com.tr/support/unknown-login-notice/"
+ APP_DIR = "/home/berkay/git-evyos/api-managment-backend/"
+
+ @classmethod
+ def forgot_link(cls, forgot_key):
+ return cls.FORGOT_LINK + forgot_key
+
+ @classmethod
+ def blacklist_login(cls, record_id):
+ return cls.BLACKLIST_LINK + record_id
+
+
+class HostConfig:
+ MAIN_HOST = "10.10.2.36" # http://10.10.2.36
+ EMAIL_HOST = "10.10.2.34" # http://10.10.2.34
+
+
+class ApiConfig(DefaultApiConfig):
+ # Application Information
+ APP_NAME = "evyos-auth-api-gateway"
+ TITLE = "WAG API Auth Api Gateway"
+ DESCRIPTION = (
+ "This api is serves as web auth api gateway only to evyos web services."
+ )
+ APP_URL = "https://www.auth.eys.gen.tr"
+
+ # Server Configuration
+ app = "app:app"
+ host = "0.0.0.0"
+ port = 41575
+ log_level = "info"
+ reload = True
+
+
+class MainConfig:
+
+ # Date and Time Configuration
+ DATETIME_FORMAT = "YYYY-MM-DD HH:mm:ss Z"
+ DATETIME_FORMAT_JS = "YYYY-MM-DD HH:mm:ss +0"
+
+ # Timezone Configuration
+ DEFAULT_TIMEZONE = "GMT+3" # Default timezone for the application
+ SYSTEM_TIMEZONE = "GMT+0" # System timezone (used for internal operations)
+ SUPPORTED_TIMEZONES = ["GMT+0", "GMT+3"] # List of supported timezones
+
+
+class LanguageConfig:
+
+ SUPPORTED_LANGUAGES = ["en", "tr"]
+ DEFAULT_LANGUAGE = "tr"
diff --git a/DockerApiServices/AuthServiceApi/create_file.py b/DockerApiServices/AuthServiceApi/create_file.py
new file mode 100644
index 0000000..68b3f88
--- /dev/null
+++ b/DockerApiServices/AuthServiceApi/create_file.py
@@ -0,0 +1,48 @@
+"""
+FastAPI Application Factory Module
+
+This module provides functionality to create and configure a FastAPI application with:
+- Custom OpenAPI schema configuration
+- Security scheme configuration for Bearer authentication
+- Automatic router registration
+- Response class configuration
+- Security requirements for protected endpoints
+"""
+
+from fastapi import FastAPI
+from fastapi.responses import JSONResponse, RedirectResponse
+
+from config import ApiConfig
+from create_routes import get_all_routers
+
+
+def create_app() -> FastAPI:
+ """
+ Create and configure a FastAPI application with dynamic route creation.
+
+ Returns:
+ FastAPI: Configured FastAPI application instance
+ """
+
+ from open_api_creator import create_openapi_schema
+
+ app = FastAPI(
+ title=ApiConfig.TITLE,
+ description=ApiConfig.DESCRIPTION,
+ default_response_class=JSONResponse,
+ ) # Initialize FastAPI app
+
+ @app.get("/", include_in_schema=False, summary=str(ApiConfig.DESCRIPTION))
+ def home() -> RedirectResponse:
+ """Redirect root path to API documentation."""
+ return RedirectResponse(url="/docs")
+
+ # Get all routers and protected routes using the dynamic route creation
+ prepare_routing = get_all_routers()
+
+ # Include all routers
+ for router in prepare_routing.routers:
+ app.include_router(router)
+
+ app.openapi = lambda app=app: create_openapi_schema(app)
+ return app
diff --git a/DockerApiServices/AuthServiceApi/create_routes.py b/DockerApiServices/AuthServiceApi/create_routes.py
new file mode 100644
index 0000000..1cb86b0
--- /dev/null
+++ b/DockerApiServices/AuthServiceApi/create_routes.py
@@ -0,0 +1,42 @@
+"""
+Route configuration and factory module.
+Handles dynamic route creation based on configurations.
+"""
+
+from typing import Optional
+
+from Events.Engine.set_defaults.run import get_cluster_controller_group
+from Events.Engine.set_defaults.setClusters import (
+ PrepareRouting,
+ SetItems2Redis,
+ PrepareEvents,
+)
+
+
+routers: Optional[PrepareRouting] = None
+
+
+def get_all_routers() -> PrepareRouting:
+ """
+ Get all routers and protected routes from route configurations.
+
+ Returns:
+ tuple: PrepareRouting
+ """
+ global routers
+ if routers:
+ return routers
+
+ cluster_list = get_cluster_controller_group()
+ routers = PrepareRouting(cluster_controller_group=cluster_list)
+ return routers
+
+
+# async def health_check(request: Request):
+# """Default health check endpoint."""
+# return {"status": "healthy", "message": "Service is running"}
+#
+#
+# async def ping_test(request: Request, service_name: str = "base-router"):
+# """Default ping test endpoint."""
+# return {"ping": "pong", "service": service_name}
diff --git a/DockerApiServices/AuthServiceApi/events_file.py b/DockerApiServices/AuthServiceApi/events_file.py
new file mode 100644
index 0000000..52d0bf4
--- /dev/null
+++ b/DockerApiServices/AuthServiceApi/events_file.py
@@ -0,0 +1,11 @@
+import Events.AllEvents.authentication as auths_events
+
+
+events_list = (auths_events,)
+
+
+def retrieve_cluster_by_name(cluster_name: str):
+ for module in events_list:
+ if hasattr(module, cluster_name):
+ return getattr(module, cluster_name, None)
+ return
diff --git a/DockerApiServices/AuthServiceApi/open_api_creator.py b/DockerApiServices/AuthServiceApi/open_api_creator.py
new file mode 100644
index 0000000..e8c1067
--- /dev/null
+++ b/DockerApiServices/AuthServiceApi/open_api_creator.py
@@ -0,0 +1,256 @@
+"""
+OpenAPI Schema Creator Module
+
+This module provides functionality to create and customize OpenAPI documentation:
+- Custom security schemes (Bearer Auth, API Key)
+- Response schemas and examples
+- Tag management and descriptions
+- Error responses and validation
+- Custom documentation extensions
+"""
+
+from typing import Any, Dict
+from fastapi import FastAPI
+from fastapi.routing import APIRoute
+from fastapi.openapi.utils import get_openapi
+
+from create_routes import get_all_routers
+from config import ApiConfig
+
+
+class OpenAPISchemaCreator:
+ """
+ OpenAPI schema creator and customizer for FastAPI applications.
+ """
+
+ def __init__(self, app: FastAPI):
+ """
+ Initialize the OpenAPI schema creator.
+
+ Args:
+ app: FastAPI application instance
+ """
+ self.app = app
+ self.cluster = get_all_routers()
+ self.safe_endpoint_list = (
+ self.cluster.safe_endpoints
+ if hasattr(self.cluster, "safe_endpoints")
+ else []
+ )
+
+ def _create_security_schemes(self) -> Dict[str, Any]:
+ """
+ Create security scheme definitions.
+
+ Returns:
+ Dict[str, Any]: Security scheme configurations
+ """
+ from ApiLayers.AllConfigs.Token.config import Auth
+
+ return {
+ "BearerAuth": {
+ "type": "apiKey",
+ "in": "header",
+ "name": Auth.ACCESS_TOKEN_TAG,
+ "description": "Enter: **'Bearer <JWT>'**, where JWT is the access token",
+ }
+ }
+
+ def _create_common_responses(self) -> Dict[str, Any]:
+ """
+ Create common response schemas.
+
+ Returns:
+ Dict[str, Any]: Common response configurations
+ """
+ return {
+ "401": {
+ "description": "Unauthorized - Invalid or missing credentials",
+ "content": {
+ "application/json": {
+ "schema": {"$ref": "#/components/schemas/HTTPValidationError"}
+ }
+ },
+ },
+ "403": {
+ "description": "Forbidden - Insufficient permissions",
+ "content": {
+ "application/json": {
+ "schema": {"$ref": "#/components/schemas/HTTPValidationError"}
+ }
+ },
+ },
+ "422": {
+ "description": "Validation Error",
+ "content": {
+ "application/json": {
+ "schema": {"$ref": "#/components/schemas/HTTPValidationError"}
+ }
+ },
+ },
+ "500": {
+ "description": "Internal Server Error",
+ "content": {
+ "application/json": {
+ "schema": {
+ "type": "object",
+ "properties": {
+ "detail": {"type": "string"},
+ "error_code": {"type": "string"},
+ },
+ },
+ "example": {
+ "detail": "Internal server error occurred",
+ "error_code": "INTERNAL_ERROR",
+ },
+ }
+ },
+ },
+ }
+
+ def _process_request_body(
+ self, path: str, method: str, schema: Dict[str, Any]
+ ) -> None:
+ """
+ Process request body to include examples from model config.
+
+ Args:
+ path: Route path
+ method: HTTP method
+ schema: OpenAPI schema to modify
+ """
+ try:
+ route_schema = schema["paths"][path][method]
+ if "requestBody" in route_schema:
+ request_body = route_schema["requestBody"]
+ if "content" in request_body:
+ content = request_body["content"]
+ if "application/json" in content:
+ json_content = content["application/json"]
+ if (
+ "schema" in json_content
+ and "$ref" in json_content["schema"]
+ ):
+ ref = json_content["schema"]["$ref"]
+ model_name = ref.split("/")[-1]
+ if model_name in schema["components"]["schemas"]:
+ model_schema = schema["components"]["schemas"][
+ model_name
+ ]
+ if "example" in model_schema:
+ json_content["example"] = model_schema["example"]
+ except KeyError:
+ pass
+
+ def _process_response_examples(
+ self, path: str, method: str, schema: Dict[str, Any]
+ ) -> None:
+ """
+ Process response body to include examples from model config.
+
+ Args:
+ path: Route path
+ method: HTTP method
+ schema: OpenAPI schema to modify
+ """
+ try:
+ route_schema = schema["paths"][path][method]
+ if "responses" in route_schema:
+ responses = route_schema["responses"]
+ if "200" in responses:
+ response = responses["200"]
+ if "content" in response:
+ content = response["content"]
+ if "application/json" in content:
+ json_content = content["application/json"]
+ if (
+ "schema" in json_content
+ and "$ref" in json_content["schema"]
+ ):
+ ref = json_content["schema"]["$ref"]
+ model_name = ref.split("/")[-1]
+ if model_name in schema["components"]["schemas"]:
+ model_schema = schema["components"]["schemas"][
+ model_name
+ ]
+ if "example" in model_schema:
+ json_content["example"] = model_schema[
+ "example"
+ ]
+ except KeyError:
+ pass
+
+ def configure_route_security(
+ self, path: str, method: str, schema: Dict[str, Any]
+ ) -> None:
+ """
+ Configure security requirements for a specific route.
+
+ Args:
+ path: Route path
+ method: HTTP method
+ schema: OpenAPI schema to modify
+ """
+ if not schema.get("paths", {}).get(path, {}).get(method):
+ return
+
+ # Check if endpoint is in safe list
+ endpoint_path = f"{path}:{method}"
+ if endpoint_path not in [
+ f"{e.URL}:{e.METHOD.lower()}" for e in self.safe_endpoint_list
+ ]:
+ if "security" not in schema["paths"][path][method]:
+ schema["paths"][path][method]["security"] = []
+ schema["paths"][path][method]["security"].append({"BearerAuth": []})
+
+ def create_schema(self) -> Dict[str, Any]:
+ """
+ Create the complete OpenAPI schema.
+
+ Returns:
+ Dict[str, Any]: Complete OpenAPI schema
+ """
+ openapi_schema = get_openapi(
+ title=ApiConfig.TITLE,
+ description=ApiConfig.DESCRIPTION,
+ version="1.1.1",
+ routes=self.app.routes,
+ )
+
+ # Add security schemes
+ if "components" not in openapi_schema:
+ openapi_schema["components"] = {}
+
+ openapi_schema["components"][
+ "securitySchemes"
+ ] = self._create_security_schemes()
+
+ # Configure route security and responses
+ for route in self.app.routes:
+ if isinstance(route, APIRoute) and route.include_in_schema:
+ path = str(route.path)
+ methods = [method.lower() for method in route.methods]
+ for method in methods:
+ self.configure_route_security(path, method, openapi_schema)
+
+ # Add custom documentation extensions
+ openapi_schema["x-documentation"] = {
+ "postman_collection": "/docs/postman",
+ "swagger_ui": "/docs",
+ "redoc": "/redoc",
+ }
+ return openapi_schema
+
+
+def create_openapi_schema(app: FastAPI) -> Dict[str, Any]:
+ """
+ Create OpenAPI schema for a FastAPI application.
+
+ Args:
+ app: FastAPI application instance
+
+ Returns:
+ Dict[str, Any]: Complete OpenAPI schema
+ """
+ creator = OpenAPISchemaCreator(app)
+ return creator.create_schema()
diff --git a/DockerApiServices/AuthServiceApi/requirements.txt b/DockerApiServices/AuthServiceApi/requirements.txt
new file mode 100644
index 0000000..562468b
--- /dev/null
+++ b/DockerApiServices/AuthServiceApi/requirements.txt
@@ -0,0 +1,15 @@
+fastapi==0.104.1
+uvicorn==0.24.0.post1
+pydantic==2.10.5
+sqlalchemy==2.0.37
+psycopg2-binary==2.9.10
+python-dateutil==2.9.0.post0
+motor==3.3.2
+redis==5.2.1
+pytest==7.4.4
+pytest-asyncio==0.21.2
+pytest-cov==4.1.0
+coverage==7.6.10
+arrow==1.3.0
+redmail==0.6.0
+sqlalchemy-mixins==2.0.5
diff --git a/DockerApiServices/EventServiceApi/Dockerfile b/DockerApiServices/EventServiceApi/Dockerfile
new file mode 100644
index 0000000..c4b9a39
--- /dev/null
+++ b/DockerApiServices/EventServiceApi/Dockerfile
@@ -0,0 +1,40 @@
+FROM python:3.12-slim
+
+WORKDIR /app
+
+# Install system dependencies and Poetry
+RUN apt-get update \
+ && apt-get install -y --no-install-recommends \
+ gcc \
+ && rm -rf /var/lib/apt/lists/* \
+ && pip install --no-cache-dir poetry
+
+# Copy Poetry configuration
+COPY DockerApiServices/pyproject.toml ./pyproject.toml
+
+# Configure Poetry and install dependencies with optimizations
+RUN poetry config virtualenvs.create false \
+ && poetry install --no-interaction --no-ansi --no-root --only main \
+ && pip cache purge \
+ && rm -rf ~/.cache/pypoetry
+
+# Copy application code
+COPY DockerApiServices/EventServiceApi /app
+
+# Copy application code
+COPY ApiLayers /app/ApiLayers
+COPY Services /app/Services
+
+# Events
+COPY Events/Engine /app/Events/Engine
+COPY Events/base_request_model.py /app/Events/base_request_model.py
+COPY Events/AllEvents/events /app/Events/AllEvents/events
+COPY DockerApiServices/EventServiceApi/events_file.py /app/Events/AllEvents/events_file.py
+
+# Set Python path to include app directory
+ENV PYTHONPATH=/app \
+ PYTHONUNBUFFERED=1 \
+ PYTHONDONTWRITEBYTECODE=1
+
+# Run the application using the configured uvicorn server
+CMD ["poetry", "run", "python", "app.py"]
diff --git a/DockerApiServices/EventServiceApi/app.py b/DockerApiServices/EventServiceApi/app.py
new file mode 100644
index 0000000..68ed880
--- /dev/null
+++ b/DockerApiServices/EventServiceApi/app.py
@@ -0,0 +1,27 @@
+"""
+FastAPI Application Entry Point
+
+This module initializes and configures the FastAPI application with:
+- CORS middleware for cross-origin requests
+- Request timing middleware for performance monitoring
+- Custom exception handlers for consistent error responses
+- Prometheus instrumentation for metrics
+- API routers for endpoint organization
+"""
+
+import uvicorn
+
+from prometheus_fastapi_instrumentator import Instrumentator
+from app_handler import setup_middleware
+from create_file import create_app
+from config import ApiConfig
+
+
+app = create_app() # Initialize FastAPI application
+Instrumentator().instrument(app=app).expose(app=app) # Setup Prometheus metrics
+setup_middleware(app) # Configure middleware and exception handlers
+
+
+if __name__ == "__main__":
+ # Run the application with Uvicorn
+ uvicorn.Server(uvicorn.Config(**ApiConfig.as_dict())).run()
diff --git a/DockerApiServices/EventServiceApi/app_handler.py b/DockerApiServices/EventServiceApi/app_handler.py
new file mode 100644
index 0000000..7050498
--- /dev/null
+++ b/DockerApiServices/EventServiceApi/app_handler.py
@@ -0,0 +1,82 @@
+"""
+FastAPI Application Handler Module
+
+This module contains all the handler functions for configuring and setting up the FastAPI application:
+- CORS middleware configuration
+- Exception handlers setup
+- Uvicorn server configuration
+"""
+
+from fastapi import FastAPI, Request, status
+from fastapi.middleware.cors import CORSMiddleware
+from fastapi.responses import JSONResponse
+
+from ApiLayers.ErrorHandlers.Exceptions.api_exc import HTTPExceptionApi
+from ApiLayers.Middleware.auth_middleware import (
+ RequestTimingMiddleware,
+ LoggerTimingMiddleware,
+)
+
+
+def setup_cors_middleware(app: FastAPI) -> None:
+ """
+ Configure CORS middleware for the application.
+
+ Args:
+ app: FastAPI application instance
+ """
+ app.add_middleware(
+ CORSMiddleware,
+ allow_origins=["*"],
+ allow_credentials=True,
+ allow_methods=["*"],
+ allow_headers=["*"],
+ )
+
+
+async def generic_exception_handler(request: Request, exc: Exception) -> JSONResponse:
+ """
+ Handle generic exceptions and return formatted error responses.
+
+ Args:
+ request: FastAPI request object
+ exc: Exception instance
+
+ Returns:
+ JSONResponse: Formatted error response
+ """
+ return JSONResponse(
+ status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
+ content={"detail": "Internal server error", "error_code": "INTERNAL_ERROR"},
+ )
+
+
+def setup_exception_handlers(app: FastAPI) -> None:
+ """
+ Configure custom exception handlers for the application.
+
+ Args:
+ app: FastAPI application instance
+ """
+ from ApiLayers.ErrorHandlers.ErrorHandlers.api_exc_handler import (
+ HTTPExceptionApiHandler,
+ )
+
+ custom_exception_handler = HTTPExceptionApiHandler(response_model=JSONResponse)
+ app.add_exception_handler(
+ HTTPExceptionApi, custom_exception_handler.handle_exception
+ )
+ app.add_exception_handler(Exception, generic_exception_handler)
+
+
+def setup_middleware(app: FastAPI) -> None:
+ """
+ Configure all middleware for the application.
+
+ Args:
+ app: FastAPI application instance
+ """
+ setup_cors_middleware(app)
+ app.add_middleware(RequestTimingMiddleware)
+ app.add_middleware(LoggerTimingMiddleware)
+ setup_exception_handlers(app)
diff --git a/DockerApiServices/EventServiceApi/config.py b/DockerApiServices/EventServiceApi/config.py
new file mode 100644
index 0000000..24c43bb
--- /dev/null
+++ b/DockerApiServices/EventServiceApi/config.py
@@ -0,0 +1,59 @@
+class DefaultApiConfig:
+ app: str
+ host: str
+ port: int
+ log_level: str
+ reload: bool
+
+ @classmethod
+ def as_dict(cls):
+ return {
+ "app": cls.app,
+ "host": cls.host,
+ "port": int(cls.port),
+ "log_level": cls.log_level,
+ "reload": bool(cls.reload),
+ }
+
+
+class ApiStatic:
+ PLACEHOLDER = "https://s.tmimgcdn.com/scr/800x500/276800/building-home-nature-logo-vector-template-3_276851-original.jpg"
+ FORGOT_LINK = "https://www.evyos.com.tr/password/create?tokenUrl="
+ BLACKLIST_LINK = "https://www.evyos.com.tr/support/unknown-login-notice/"
+ APP_DIR = "/home/berkay/git-evyos/api-managment-backend/"
+
+ @classmethod
+ def forgot_link(cls, forgot_key):
+ return cls.FORGOT_LINK + forgot_key
+
+ @classmethod
+ def blacklist_login(cls, record_id):
+ return cls.BLACKLIST_LINK + record_id
+
+
+class ApiConfig(DefaultApiConfig):
+ # Api configuration
+ APP_NAME = "evyos-event-api-gateway"
+ TITLE = "WAG API Event Api Gateway"
+ DESCRIPTION = (
+ "This api is serves as web event api gateway only to evyos web services."
+ )
+ APP_URL = "https://www.event.eys.gen.tr"
+
+ # Uvicorn server configuration
+ app = "app:app"
+ host = "0.0.0.0"
+ port = 41576
+ log_level = "info"
+ reload = True
+
+
+class MainConfig:
+ # Date and Time Configuration
+ DATETIME_FORMAT = "YYYY-MM-DD HH:mm:ss Z"
+ DATETIME_FORMAT_JS = "YYYY-MM-DD HH:mm:ss +0"
+
+ # Timezone Configuration
+ DEFAULT_TIMEZONE = "GMT+3" # Default timezone for the application
+ SYSTEM_TIMEZONE = "GMT+0" # System timezone (used for internal operations)
+ SUPPORTED_TIMEZONES = ["GMT+0", "GMT+3"] # List of supported timezones
diff --git a/DockerApiServices/EventServiceApi/create_file.py b/DockerApiServices/EventServiceApi/create_file.py
new file mode 100644
index 0000000..68b3f88
--- /dev/null
+++ b/DockerApiServices/EventServiceApi/create_file.py
@@ -0,0 +1,48 @@
+"""
+FastAPI Application Factory Module
+
+This module provides functionality to create and configure a FastAPI application with:
+- Custom OpenAPI schema configuration
+- Security scheme configuration for Bearer authentication
+- Automatic router registration
+- Response class configuration
+- Security requirements for protected endpoints
+"""
+
+from fastapi import FastAPI
+from fastapi.responses import JSONResponse, RedirectResponse
+
+from config import ApiConfig
+from create_routes import get_all_routers
+
+
+def create_app() -> FastAPI:
+ """
+ Create and configure a FastAPI application with dynamic route creation.
+
+ Returns:
+ FastAPI: Configured FastAPI application instance
+ """
+
+ from open_api_creator import create_openapi_schema
+
+ app = FastAPI(
+ title=ApiConfig.TITLE,
+ description=ApiConfig.DESCRIPTION,
+ default_response_class=JSONResponse,
+ ) # Initialize FastAPI app
+
+ @app.get("/", include_in_schema=False, summary=str(ApiConfig.DESCRIPTION))
+ def home() -> RedirectResponse:
+ """Redirect root path to API documentation."""
+ return RedirectResponse(url="/docs")
+
+ # Get all routers and protected routes using the dynamic route creation
+ prepare_routing = get_all_routers()
+
+ # Include all routers
+ for router in prepare_routing.routers:
+ app.include_router(router)
+
+ app.openapi = lambda app=app: create_openapi_schema(app)
+ return app
diff --git a/DockerApiServices/EventServiceApi/create_routes.py b/DockerApiServices/EventServiceApi/create_routes.py
new file mode 100644
index 0000000..1cb86b0
--- /dev/null
+++ b/DockerApiServices/EventServiceApi/create_routes.py
@@ -0,0 +1,42 @@
+"""
+Route configuration and factory module.
+Handles dynamic route creation based on configurations.
+"""
+
+from typing import Optional
+
+from Events.Engine.set_defaults.run import get_cluster_controller_group
+from Events.Engine.set_defaults.setClusters import (
+ PrepareRouting,
+ SetItems2Redis,
+ PrepareEvents,
+)
+
+
+routers: Optional[PrepareRouting] = None
+
+
+def get_all_routers() -> PrepareRouting:
+ """
+ Get all routers and protected routes from route configurations.
+
+ Returns:
+ tuple: PrepareRouting
+ """
+ global routers
+ if routers:
+ return routers
+
+ cluster_list = get_cluster_controller_group()
+ routers = PrepareRouting(cluster_controller_group=cluster_list)
+ return routers
+
+
+# async def health_check(request: Request):
+# """Default health check endpoint."""
+# return {"status": "healthy", "message": "Service is running"}
+#
+#
+# async def ping_test(request: Request, service_name: str = "base-router"):
+# """Default ping test endpoint."""
+# return {"ping": "pong", "service": service_name}
diff --git a/DockerApiServices/EventServiceApi/events_file.py b/DockerApiServices/EventServiceApi/events_file.py
new file mode 100644
index 0000000..ba4e656
--- /dev/null
+++ b/DockerApiServices/EventServiceApi/events_file.py
@@ -0,0 +1,11 @@
+import Events.AllEvents.events as events_events
+
+
+events_list = (events_events,)
+
+
+def retrieve_cluster_by_name(cluster_name: str):
+ for module in events_list:
+ if hasattr(module, cluster_name):
+ return getattr(module, cluster_name, None)
+ return
diff --git a/DockerApiServices/EventServiceApi/open_api_creator.py b/DockerApiServices/EventServiceApi/open_api_creator.py
new file mode 100644
index 0000000..e8c1067
--- /dev/null
+++ b/DockerApiServices/EventServiceApi/open_api_creator.py
@@ -0,0 +1,256 @@
+"""
+OpenAPI Schema Creator Module
+
+This module provides functionality to create and customize OpenAPI documentation:
+- Custom security schemes (Bearer Auth, API Key)
+- Response schemas and examples
+- Tag management and descriptions
+- Error responses and validation
+- Custom documentation extensions
+"""
+
+from typing import Any, Dict
+from fastapi import FastAPI
+from fastapi.routing import APIRoute
+from fastapi.openapi.utils import get_openapi
+
+from create_routes import get_all_routers
+from config import ApiConfig
+
+
+class OpenAPISchemaCreator:
+ """
+ OpenAPI schema creator and customizer for FastAPI applications.
+ """
+
+ def __init__(self, app: FastAPI):
+ """
+ Initialize the OpenAPI schema creator.
+
+ Args:
+ app: FastAPI application instance
+ """
+ self.app = app
+ self.cluster = get_all_routers()
+ self.safe_endpoint_list = (
+ self.cluster.safe_endpoints
+ if hasattr(self.cluster, "safe_endpoints")
+ else []
+ )
+
+ def _create_security_schemes(self) -> Dict[str, Any]:
+ """
+ Create security scheme definitions.
+
+ Returns:
+ Dict[str, Any]: Security scheme configurations
+ """
+ from ApiLayers.AllConfigs.Token.config import Auth
+
+ return {
+ "BearerAuth": {
+ "type": "apiKey",
+ "in": "header",
+ "name": Auth.ACCESS_TOKEN_TAG,
+ "description": "Enter: **'Bearer <JWT>'**, where JWT is the access token",
+ }
+ }
+
+ def _create_common_responses(self) -> Dict[str, Any]:
+ """
+ Create common response schemas.
+
+ Returns:
+ Dict[str, Any]: Common response configurations
+ """
+ return {
+ "401": {
+ "description": "Unauthorized - Invalid or missing credentials",
+ "content": {
+ "application/json": {
+ "schema": {"$ref": "#/components/schemas/HTTPValidationError"}
+ }
+ },
+ },
+ "403": {
+ "description": "Forbidden - Insufficient permissions",
+ "content": {
+ "application/json": {
+ "schema": {"$ref": "#/components/schemas/HTTPValidationError"}
+ }
+ },
+ },
+ "422": {
+ "description": "Validation Error",
+ "content": {
+ "application/json": {
+ "schema": {"$ref": "#/components/schemas/HTTPValidationError"}
+ }
+ },
+ },
+ "500": {
+ "description": "Internal Server Error",
+ "content": {
+ "application/json": {
+ "schema": {
+ "type": "object",
+ "properties": {
+ "detail": {"type": "string"},
+ "error_code": {"type": "string"},
+ },
+ },
+ "example": {
+ "detail": "Internal server error occurred",
+ "error_code": "INTERNAL_ERROR",
+ },
+ }
+ },
+ },
+ }
+
+ def _process_request_body(
+ self, path: str, method: str, schema: Dict[str, Any]
+ ) -> None:
+ """
+ Process request body to include examples from model config.
+
+ Args:
+ path: Route path
+ method: HTTP method
+ schema: OpenAPI schema to modify
+ """
+ try:
+ route_schema = schema["paths"][path][method]
+ if "requestBody" in route_schema:
+ request_body = route_schema["requestBody"]
+ if "content" in request_body:
+ content = request_body["content"]
+ if "application/json" in content:
+ json_content = content["application/json"]
+ if (
+ "schema" in json_content
+ and "$ref" in json_content["schema"]
+ ):
+ ref = json_content["schema"]["$ref"]
+ model_name = ref.split("/")[-1]
+ if model_name in schema["components"]["schemas"]:
+ model_schema = schema["components"]["schemas"][
+ model_name
+ ]
+ if "example" in model_schema:
+ json_content["example"] = model_schema["example"]
+ except KeyError:
+ pass
+
+ def _process_response_examples(
+ self, path: str, method: str, schema: Dict[str, Any]
+ ) -> None:
+ """
+ Process response body to include examples from model config.
+
+ Args:
+ path: Route path
+ method: HTTP method
+ schema: OpenAPI schema to modify
+ """
+ try:
+ route_schema = schema["paths"][path][method]
+ if "responses" in route_schema:
+ responses = route_schema["responses"]
+ if "200" in responses:
+ response = responses["200"]
+ if "content" in response:
+ content = response["content"]
+ if "application/json" in content:
+ json_content = content["application/json"]
+ if (
+ "schema" in json_content
+ and "$ref" in json_content["schema"]
+ ):
+ ref = json_content["schema"]["$ref"]
+ model_name = ref.split("/")[-1]
+ if model_name in schema["components"]["schemas"]:
+ model_schema = schema["components"]["schemas"][
+ model_name
+ ]
+ if "example" in model_schema:
+ json_content["example"] = model_schema[
+ "example"
+ ]
+ except KeyError:
+ pass
+
+ def configure_route_security(
+ self, path: str, method: str, schema: Dict[str, Any]
+ ) -> None:
+ """
+ Configure security requirements for a specific route.
+
+ Args:
+ path: Route path
+ method: HTTP method
+ schema: OpenAPI schema to modify
+ """
+ if not schema.get("paths", {}).get(path, {}).get(method):
+ return
+
+ # Check if endpoint is in safe list
+ endpoint_path = f"{path}:{method}"
+ if endpoint_path not in [
+ f"{e.URL}:{e.METHOD.lower()}" for e in self.safe_endpoint_list
+ ]:
+ if "security" not in schema["paths"][path][method]:
+ schema["paths"][path][method]["security"] = []
+ schema["paths"][path][method]["security"].append({"BearerAuth": []})
+
+ def create_schema(self) -> Dict[str, Any]:
+ """
+ Create the complete OpenAPI schema.
+
+ Returns:
+ Dict[str, Any]: Complete OpenAPI schema
+ """
+ openapi_schema = get_openapi(
+ title=ApiConfig.TITLE,
+ description=ApiConfig.DESCRIPTION,
+ version="1.1.1",
+ routes=self.app.routes,
+ )
+
+ # Add security schemes
+ if "components" not in openapi_schema:
+ openapi_schema["components"] = {}
+
+ openapi_schema["components"][
+ "securitySchemes"
+ ] = self._create_security_schemes()
+
+ # Configure route security and responses
+ for route in self.app.routes:
+ if isinstance(route, APIRoute) and route.include_in_schema:
+ path = str(route.path)
+ methods = [method.lower() for method in route.methods]
+ for method in methods:
+ self.configure_route_security(path, method, openapi_schema)
+
+ # Add custom documentation extensions
+ openapi_schema["x-documentation"] = {
+ "postman_collection": "/docs/postman",
+ "swagger_ui": "/docs",
+ "redoc": "/redoc",
+ }
+ return openapi_schema
+
+
+def create_openapi_schema(app: FastAPI) -> Dict[str, Any]:
+ """
+ Create OpenAPI schema for a FastAPI application.
+
+ Args:
+ app: FastAPI application instance
+
+ Returns:
+ Dict[str, Any]: Complete OpenAPI schema
+ """
+ creator = OpenAPISchemaCreator(app)
+ return creator.create_schema()
diff --git a/DockerApiServices/InitServiceApi/Dockerfile b/DockerApiServices/InitServiceApi/Dockerfile
new file mode 100644
index 0000000..34acace
--- /dev/null
+++ b/DockerApiServices/InitServiceApi/Dockerfile
@@ -0,0 +1,40 @@
+FROM python:3.12-slim
+
+WORKDIR /app
+
+# Install system dependencies and Poetry
+RUN apt-get update \
+ && apt-get install -y --no-install-recommends \
+ gcc \
+ && rm -rf /var/lib/apt/lists/* \
+ && pip install --no-cache-dir poetry
+
+# Copy Poetry configuration
+COPY DockerApiServices/pyproject.toml ./pyproject.toml
+
+# Configure Poetry and install dependencies with optimizations
+RUN poetry config virtualenvs.create false \
+ && poetry install --no-interaction --no-ansi --no-root --only main \
+ && pip cache purge \
+ && rm -rf ~/.cache/pypoetry
+
+# Copy application code
+COPY DockerApiServices/InitServiceApi /app
+
+# Copy application code
+COPY ApiLayers /app/ApiLayers
+COPY Services /app/Services
+
+# Events
+# COPY Events/base_request_model.py /app/Events/base_request_model.py
+COPY Events/Engine /app/Events/Engine
+COPY Events/AllEvents /app/Events/AllEvents
+COPY Events/base_request_model.py /app/Events/base_request_model.py
+
+# Set Python path to include app directory
+ENV PYTHONPATH=/app \
+ PYTHONUNBUFFERED=1 \
+ PYTHONDONTWRITEBYTECODE=1
+
+# Run the application using the configured uvicorn server
+CMD ["poetry", "run", "python", "app.py"]
diff --git a/DockerApiServices/InitServiceApi/app.py b/DockerApiServices/InitServiceApi/app.py
new file mode 100644
index 0000000..c17a3ed
--- /dev/null
+++ b/DockerApiServices/InitServiceApi/app.py
@@ -0,0 +1,5 @@
+from create_all_dependecies import SetRedisDefaults
+
+
+if __name__ == "__main__":
+ SetRedisDefaults.set_all()
diff --git a/DockerApiServices/InitServiceApi/config.py b/DockerApiServices/InitServiceApi/config.py
new file mode 100644
index 0000000..8b7eb16
--- /dev/null
+++ b/DockerApiServices/InitServiceApi/config.py
@@ -0,0 +1,77 @@
+class DefaultApiConfig:
+ app: str
+ host: str
+ port: int
+ log_level: str
+ reload: bool
+
+ @classmethod
+ def as_dict(cls):
+ return {
+ "app": cls.app,
+ "host": cls.host,
+ "port": int(cls.port),
+ "log_level": cls.log_level,
+ "reload": bool(cls.reload),
+ }
+
+
+class ApiStatic:
+ PLACEHOLDER = "https://s.tmimgcdn.com/scr/800x500/276800/building-home-nature-logo-vector-template-3_276851-original.jpg"
+ FORGOT_LINK = "https://www.evyos.com.tr/password/create?tokenUrl="
+ BLACKLIST_LINK = "https://www.evyos.com.tr/support/unknown-login-notice/"
+ APP_DIR = "/home/berkay/git-evyos/api-managment-backend/"
+
+ @classmethod
+ def forgot_link(cls, forgot_key):
+ return cls.FORGOT_LINK + forgot_key
+
+ @classmethod
+ def blacklist_login(cls, record_id):
+ return cls.BLACKLIST_LINK + record_id
+
+
+class HostConfig:
+ MAIN_HOST = "10.10.2.36" # http://10.10.2.36
+ EMAIL_HOST = "10.10.2.34" # http://10.10.2.34
+
+
+class ApiConfig(DefaultApiConfig):
+ # Application Information
+ APP_NAME = "evyos-auth-api-gateway"
+ TITLE = "WAG API Auth Api Gateway"
+ DESCRIPTION = (
+ "This api is serves as web auth api gateway only to evyos web services."
+ )
+ APP_URL = "https://www.auth.eys.gen.tr"
+
+ # Server Configuration
+ app = "app:app"
+ host = "0.0.0.0"
+ port = 41575
+ log_level = "info"
+ reload = True
+
+
+class MainConfig:
+
+ # Date and Time Configuration
+ DATETIME_FORMAT = "YYYY-MM-DD HH:mm:ss Z"
+ DATETIME_FORMAT_JS = "YYYY-MM-DD HH:mm:ss +0"
+
+ # Timezone Configuration
+ DEFAULT_TIMEZONE = "GMT+3" # Default timezone for the application
+ SYSTEM_TIMEZONE = "GMT+0" # System timezone (used for internal operations)
+ SUPPORTED_TIMEZONES = ["GMT+0", "GMT+3"] # List of supported timezones
+
+
+class LanguageConfig:
+
+ SUPPORTED_LANGUAGES = ["en", "tr"]
+ DEFAULT_LANGUAGE = "tr"
+
+
+class ValidationsConfig:
+
+ SUPPORTED_VALIDATIONS = ["header", "validation", "all"]
+ DEFAULT_VALIDATION = "all"
diff --git a/DockerApiServices/InitServiceApi/create_all_dependecies.py b/DockerApiServices/InitServiceApi/create_all_dependecies.py
new file mode 100644
index 0000000..3df56ae
--- /dev/null
+++ b/DockerApiServices/InitServiceApi/create_all_dependecies.py
@@ -0,0 +1,45 @@
+"""
+Route configuration and factory module.
+Handles dynamic route creation based on configurations.
+"""
+
+from Events.Engine.set_defaults.run import get_cluster_controller_group
+from Events.Engine.set_defaults.setClusters import SetItems2Redis, PrepareEvents
+
+from ApiLayers.LanguageModels.set_defaults.language_setters import (
+ SetClusterLanguageModelsRedis,
+ SetDefaultLanguageModelsRedis,
+)
+from ApiLayers.LanguageModels.Response.all_responses import all_response_list
+from ApiLayers.LanguageModels.Errors.all_errors import all_errors_list
+
+
+class SetRedisDefaults:
+
+ @classmethod
+ def set_all(cls) -> None:
+ """
+ Get all routers and protected routes from route configurations.
+
+ Returns:
+ None
+ """
+ cluster_list = get_cluster_controller_group()
+ default_dict = dict(
+ set_response_languages_list=all_response_list,
+ set_errors_languages_list=all_errors_list,
+ )
+ prepare_events = PrepareEvents(cluster_controller_group=cluster_list)
+ SetItems2Redis(prepare_events=prepare_events)
+ SetDefaultLanguageModelsRedis(**default_dict).set_all()
+ SetClusterLanguageModelsRedis(cluster_controller_group=cluster_list).set_all()
+
+
+# async def health_check(request: Request):
+# """Default health check endpoint."""
+# return {"status": "healthy", "message": "Service is running"}
+#
+#
+# async def ping_test(request: Request, service_name: str = "base-router"):
+# """Default ping test endpoint."""
+# return {"ping": "pong", "service": service_name}
diff --git a/DockerApiServices/README.md b/DockerApiServices/README.md
new file mode 100644
index 0000000..ad934c6
--- /dev/null
+++ b/DockerApiServices/README.md
@@ -0,0 +1,57 @@
+# Docker Services Guide
+
+This repository contains multiple microservices that can be run using Docker Compose.
+
+## Quick Start (With Cache)
+For regular development when dependencies haven't changed:
+```bash
+# Build and run Auth Service
+docker compose -f docker-compose-services.yml up auth-service
+
+# Build and run Event Service
+docker compose -f docker-compose-services.yml up event-service
+
+# Build and run Validation Service
+docker compose -f docker-compose-services.yml up validation-service
+
+# Build and run all services
+docker compose -f docker-compose-services.yml up
+```
+
+## Clean Build (No Cache)
+Use these commands when changing Dockerfile or dependencies:
+```bash
+# Auth Service
+docker compose -f docker-compose-services.yml build --no-cache auth-service && docker compose -f docker-compose-services.yml up auth-service
+
+# Event Service
+docker compose -f docker-compose-services.yml build --no-cache event-service && docker compose -f docker-compose-services.yml up event-service
+
+# Validation Service
+docker compose -f docker-compose-services.yml build --no-cache validation-service && docker compose -f docker-compose-services.yml up validation-service
+
+# All Services
+docker compose -f docker-compose-services.yml build --no-cache && docker compose -f docker-compose-services.yml up
+```
+
+## Service Ports
+- Auth Service: http://localhost:41575
+- Event Service: http://localhost:41576
+- Validation Service: http://localhost:41577
+
+## Development Notes
+- Use clean build (--no-cache) when:
+ - Changing Dockerfile
+ - Updating dependencies
+ - Experiencing caching issues
+- Use regular build (with cache) when:
+ - Only changing application code
+ - For faster development iterations
+- Run in detached mode:
+ ```bash
+ docker compose -f docker-compose-services.yml up -d auth-service
+ ```
+- Stop services:
+ ```bash
+ docker compose -f docker-compose-services.yml down
+ ```
diff --git a/DockerApiServices/ValidationServiceApi/Dockerfile b/DockerApiServices/ValidationServiceApi/Dockerfile
new file mode 100644
index 0000000..66b8731
--- /dev/null
+++ b/DockerApiServices/ValidationServiceApi/Dockerfile
@@ -0,0 +1,44 @@
+FROM python:3.12-slim
+
+WORKDIR /app
+
+# Install system dependencies and Poetry
+RUN apt-get update \
+ && apt-get install -y --no-install-recommends \
+ gcc \
+ && rm -rf /var/lib/apt/lists/* \
+ && pip install --no-cache-dir poetry
+
+# Copy Poetry configuration
+COPY DockerApiServices/pyproject.toml ./pyproject.toml
+
+# Configure Poetry and install dependencies with optimizations
+RUN poetry config virtualenvs.create false \
+ && poetry install --no-interaction --no-ansi --no-root --only main \
+ && pip cache purge \
+ && rm -rf ~/.cache/pypoetry
+
+# Copy application code
+COPY DockerApiServices/ValidationServiceApi /app
+
+# Copy application code
+COPY ApiLayers /app/ApiLayers
+COPY Services /app/Services
+
+# Events
+# COPY Events/base_request_model.py /app/Events/base_request_model.py
+COPY Events/Engine /app/Events/Engine
+COPY Events/AllEvents/validations /app/Events/AllEvents/validations
+COPY Events/base_request_model.py /app/Events/base_request_model.py
+COPY DockerApiServices/ValidationServiceApi/events_file.py /app/Events/AllEvents/events_file.py
+
+COPY Events/AllEvents /app/Events/JustEvents
+COPY Events/AllEvents/just_events_file.py /app/Events/JustEvents/events_file.py
+
+# Set Python path to include app directory
+ENV PYTHONPATH=/app \
+ PYTHONUNBUFFERED=1 \
+ PYTHONDONTWRITEBYTECODE=1
+
+# Run the application using the configured uvicorn server
+CMD ["poetry", "run", "python", "app.py"]
diff --git a/DockerApiServices/ValidationServiceApi/app.py b/DockerApiServices/ValidationServiceApi/app.py
new file mode 100644
index 0000000..68ed880
--- /dev/null
+++ b/DockerApiServices/ValidationServiceApi/app.py
@@ -0,0 +1,27 @@
+"""
+FastAPI Application Entry Point
+
+This module initializes and configures the FastAPI application with:
+- CORS middleware for cross-origin requests
+- Request timing middleware for performance monitoring
+- Custom exception handlers for consistent error responses
+- Prometheus instrumentation for metrics
+- API routers for endpoint organization
+"""
+
+import uvicorn
+
+from prometheus_fastapi_instrumentator import Instrumentator
+from app_handler import setup_middleware
+from create_file import create_app
+from config import ApiConfig
+
+
+app = create_app() # Initialize FastAPI application
+Instrumentator().instrument(app=app).expose(app=app) # Setup Prometheus metrics
+setup_middleware(app) # Configure middleware and exception handlers
+
+
+if __name__ == "__main__":
+ # Run the application with Uvicorn
+ uvicorn.Server(uvicorn.Config(**ApiConfig.as_dict())).run()
diff --git a/DockerApiServices/ValidationServiceApi/app_handler.py b/DockerApiServices/ValidationServiceApi/app_handler.py
new file mode 100644
index 0000000..7050498
--- /dev/null
+++ b/DockerApiServices/ValidationServiceApi/app_handler.py
@@ -0,0 +1,82 @@
+"""
+FastAPI Application Handler Module
+
+This module contains all the handler functions for configuring and setting up the FastAPI application:
+- CORS middleware configuration
+- Exception handlers setup
+- Uvicorn server configuration
+"""
+
+from fastapi import FastAPI, Request, status
+from fastapi.middleware.cors import CORSMiddleware
+from fastapi.responses import JSONResponse
+
+from ApiLayers.ErrorHandlers.Exceptions.api_exc import HTTPExceptionApi
+from ApiLayers.Middleware.auth_middleware import (
+ RequestTimingMiddleware,
+ LoggerTimingMiddleware,
+)
+
+
+def setup_cors_middleware(app: FastAPI) -> None:
+ """
+ Configure CORS middleware for the application.
+
+ Args:
+ app: FastAPI application instance
+ """
+ app.add_middleware(
+ CORSMiddleware,
+ allow_origins=["*"],
+ allow_credentials=True,
+ allow_methods=["*"],
+ allow_headers=["*"],
+ )
+
+
+async def generic_exception_handler(request: Request, exc: Exception) -> JSONResponse:
+ """
+ Handle generic exceptions and return formatted error responses.
+
+ Args:
+ request: FastAPI request object
+ exc: Exception instance
+
+ Returns:
+ JSONResponse: Formatted error response
+ """
+ return JSONResponse(
+ status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
+ content={"detail": "Internal server error", "error_code": "INTERNAL_ERROR"},
+ )
+
+
+def setup_exception_handlers(app: FastAPI) -> None:
+ """
+ Configure custom exception handlers for the application.
+
+ Args:
+ app: FastAPI application instance
+ """
+ from ApiLayers.ErrorHandlers.ErrorHandlers.api_exc_handler import (
+ HTTPExceptionApiHandler,
+ )
+
+ custom_exception_handler = HTTPExceptionApiHandler(response_model=JSONResponse)
+ app.add_exception_handler(
+ HTTPExceptionApi, custom_exception_handler.handle_exception
+ )
+ app.add_exception_handler(Exception, generic_exception_handler)
+
+
+def setup_middleware(app: FastAPI) -> None:
+ """
+ Configure all middleware for the application.
+
+ Args:
+ app: FastAPI application instance
+ """
+ setup_cors_middleware(app)
+ app.add_middleware(RequestTimingMiddleware)
+ app.add_middleware(LoggerTimingMiddleware)
+ setup_exception_handlers(app)
diff --git a/DockerApiServices/ValidationServiceApi/config.py b/DockerApiServices/ValidationServiceApi/config.py
new file mode 100644
index 0000000..bb77af3
--- /dev/null
+++ b/DockerApiServices/ValidationServiceApi/config.py
@@ -0,0 +1,64 @@
+class DefaultApiConfig:
+ app: str
+ host: str
+ port: int
+ log_level: str
+ reload: bool
+
+ @classmethod
+ def as_dict(cls):
+ return {
+ "app": cls.app,
+ "host": cls.host,
+ "port": int(cls.port),
+ "log_level": cls.log_level,
+ "reload": bool(cls.reload),
+ }
+
+
+class ApiStatic:
+ PLACEHOLDER = "https://s.tmimgcdn.com/scr/800x500/276800/building-home-nature-logo-vector-template-3_276851-original.jpg"
+ FORGOT_LINK = "https://www.evyos.com.tr/password/create?tokenUrl="
+ BLACKLIST_LINK = "https://www.evyos.com.tr/support/unknown-login-notice/"
+ APP_DIR = "/home/berkay/git-evyos/api-managment-backend/"
+
+ @classmethod
+ def forgot_link(cls, forgot_key):
+ return cls.FORGOT_LINK + forgot_key
+
+ @classmethod
+ def blacklist_login(cls, record_id):
+ return cls.BLACKLIST_LINK + record_id
+
+
+class ApiConfig(DefaultApiConfig):
+ # Api configuration
+ APP_NAME = "evyos-validation-api-gateway"
+ TITLE = "WAG API Validation Api Gateway"
+ DESCRIPTION = (
+ "This api is serves as web validation api gateway only to evyos web services."
+ )
+ APP_URL = "https://www.validation.eys.gen.tr"
+ # App configuration
+ app = "app:app"
+ host = "0.0.0.0"
+ port = 41577
+ log_level = "info"
+ reload = True
+
+
+class MainConfig:
+ # Main configuration
+ DATETIME_FORMAT = "YYYY-MM-DD HH:mm:ss Z"
+ DATETIME_FORMAT_JS = "YYYY-MM-DD HH:mm:ss +0"
+
+ # Timezone Configuration
+ DEFAULT_TIMEZONE = "GMT+3" # Default timezone for the application
+ SYSTEM_TIMEZONE = "GMT+0" # System timezone (used for internal operations)
+ SUPPORTED_TIMEZONES = ["GMT+0", "GMT+3"] # List of supported timezones
+
+
+class ValidationsConfig:
+
+ SUPPORTED_VALIDATIONS = ["header", "validation", "all"]
+ DEFAULT_VALIDATION = "all"
diff --git a/DockerApiServices/ValidationServiceApi/create_file.py b/DockerApiServices/ValidationServiceApi/create_file.py
new file mode 100644
index 0000000..68b3f88
--- /dev/null
+++ b/DockerApiServices/ValidationServiceApi/create_file.py
@@ -0,0 +1,48 @@
+"""
+FastAPI Application Factory Module
+
+This module provides functionality to create and configure a FastAPI application with:
+- Custom OpenAPI schema configuration
+- Security scheme configuration for Bearer authentication
+- Automatic router registration
+- Response class configuration
+- Security requirements for protected endpoints
+"""
+
+from fastapi import FastAPI
+from fastapi.responses import JSONResponse, RedirectResponse
+
+from config import ApiConfig
+from create_routes import get_all_routers
+
+
+def create_app() -> FastAPI:
+ """
+ Create and configure a FastAPI application with dynamic route creation.
+
+ Returns:
+ FastAPI: Configured FastAPI application instance
+ """
+
+ from open_api_creator import create_openapi_schema
+
+ app = FastAPI(
+ title=ApiConfig.TITLE,
+ description=ApiConfig.DESCRIPTION,
+ default_response_class=JSONResponse,
+ ) # Initialize FastAPI app
+
+ @app.get("/", include_in_schema=False, summary=str(ApiConfig.DESCRIPTION))
+ def home() -> RedirectResponse:
+ """Redirect root path to API documentation."""
+ return RedirectResponse(url="/docs")
+
+ # Get all routers and protected routes using the dynamic route creation
+ prepare_routing = get_all_routers()
+
+ # Include all routers
+ for router in prepare_routing.routers:
+ app.include_router(router)
+
+ app.openapi = lambda app=app: create_openapi_schema(app)
+ return app
diff --git a/DockerApiServices/ValidationServiceApi/create_routes.py b/DockerApiServices/ValidationServiceApi/create_routes.py
new file mode 100644
index 0000000..1cb86b0
--- /dev/null
+++ b/DockerApiServices/ValidationServiceApi/create_routes.py
@@ -0,0 +1,42 @@
+"""
+Route configuration and factory module.
+Handles dynamic route creation based on configurations.
+"""
+
+from typing import Optional
+
+from Events.Engine.set_defaults.run import get_cluster_controller_group
+from Events.Engine.set_defaults.setClusters import (
+ PrepareRouting,
+ SetItems2Redis,
+ PrepareEvents,
+)
+
+
+routers: Optional[PrepareRouting] = None
+
+
+def get_all_routers() -> PrepareRouting:
+ """
+ Get all routers and protected routes from route configurations.
+
+ Returns:
+ tuple: PrepareRouting
+ """
+ global routers
+ if routers:
+ return routers
+
+ cluster_list = get_cluster_controller_group()
+ routers = PrepareRouting(cluster_controller_group=cluster_list)
+ return routers
+
+
+# async def health_check(request: Request):
+# """Default health check endpoint."""
+# return {"status": "healthy", "message": "Service is running"}
+#
+#
+# async def ping_test(request: Request, service_name: str = "base-router"):
+# """Default ping test endpoint."""
+# return {"ping": "pong", "service": service_name}
diff --git a/DockerApiServices/ValidationServiceApi/events_file.py b/DockerApiServices/ValidationServiceApi/events_file.py
new file mode 100644
index 0000000..58a92e9
--- /dev/null
+++ b/DockerApiServices/ValidationServiceApi/events_file.py
@@ -0,0 +1,10 @@
+import Events.AllEvents.validations as validations_events
+
+events_list = (validations_events,)
+
+
+def retrieve_cluster_by_name(cluster_name: str):
+ for module in events_list:
+ if hasattr(module, cluster_name):
+ return getattr(module, cluster_name, None)
+ return
diff --git a/DockerApiServices/ValidationServiceApi/open_api_creator.py b/DockerApiServices/ValidationServiceApi/open_api_creator.py
new file mode 100644
index 0000000..e8c1067
--- /dev/null
+++ b/DockerApiServices/ValidationServiceApi/open_api_creator.py
@@ -0,0 +1,256 @@
+"""
+OpenAPI Schema Creator Module
+
+This module provides functionality to create and customize OpenAPI documentation:
+- Custom security schemes (Bearer Auth, API Key)
+- Response schemas and examples
+- Tag management and descriptions
+- Error responses and validation
+- Custom documentation extensions
+"""
+
+from typing import Any, Dict
+from fastapi import FastAPI
+from fastapi.routing import APIRoute
+from fastapi.openapi.utils import get_openapi
+
+from create_routes import get_all_routers
+from config import ApiConfig
+
+
+class OpenAPISchemaCreator:
+ """
+ OpenAPI schema creator and customizer for FastAPI applications.
+ """
+
+ def __init__(self, app: FastAPI):
+ """
+ Initialize the OpenAPI schema creator.
+
+ Args:
+ app: FastAPI application instance
+ """
+ self.app = app
+ self.cluster = get_all_routers()
+ self.safe_endpoint_list = (
+ self.cluster.safe_endpoints
+ if hasattr(self.cluster, "safe_endpoints")
+ else []
+ )
+
+ def _create_security_schemes(self) -> Dict[str, Any]:
+ """
+ Create security scheme definitions.
+
+ Returns:
+ Dict[str, Any]: Security scheme configurations
+ """
+ from ApiLayers.AllConfigs.Token.config import Auth
+
+ return {
+ "BearerAuth": {
+ "type": "apiKey",
+ "in": "header",
+ "name": Auth.ACCESS_TOKEN_TAG,
+ "description": "Enter: **'Bearer <JWT>'**, where JWT is the access token",
+ }
+ }
+
+ def _create_common_responses(self) -> Dict[str, Any]:
+ """
+ Create common response schemas.
+
+ Returns:
+ Dict[str, Any]: Common response configurations
+ """
+ return {
+ "401": {
+ "description": "Unauthorized - Invalid or missing credentials",
+ "content": {
+ "application/json": {
+ "schema": {"$ref": "#/components/schemas/HTTPValidationError"}
+ }
+ },
+ },
+ "403": {
+ "description": "Forbidden - Insufficient permissions",
+ "content": {
+ "application/json": {
+ "schema": {"$ref": "#/components/schemas/HTTPValidationError"}
+ }
+ },
+ },
+ "422": {
+ "description": "Validation Error",
+ "content": {
+ "application/json": {
+ "schema": {"$ref": "#/components/schemas/HTTPValidationError"}
+ }
+ },
+ },
+ "500": {
+ "description": "Internal Server Error",
+ "content": {
+ "application/json": {
+ "schema": {
+ "type": "object",
+ "properties": {
+ "detail": {"type": "string"},
+ "error_code": {"type": "string"},
+ },
+ },
+ "example": {
+ "detail": "Internal server error occurred",
+ "error_code": "INTERNAL_ERROR",
+ },
+ }
+ },
+ },
+ }
+
+ def _process_request_body(
+ self, path: str, method: str, schema: Dict[str, Any]
+ ) -> None:
+ """
+ Process request body to include examples from model config.
+
+ Args:
+ path: Route path
+ method: HTTP method
+ schema: OpenAPI schema to modify
+ """
+ try:
+ route_schema = schema["paths"][path][method]
+ if "requestBody" in route_schema:
+ request_body = route_schema["requestBody"]
+ if "content" in request_body:
+ content = request_body["content"]
+ if "application/json" in content:
+ json_content = content["application/json"]
+ if (
+ "schema" in json_content
+ and "$ref" in json_content["schema"]
+ ):
+ ref = json_content["schema"]["$ref"]
+ model_name = ref.split("/")[-1]
+ if model_name in schema["components"]["schemas"]:
+ model_schema = schema["components"]["schemas"][
+ model_name
+ ]
+ if "example" in model_schema:
+ json_content["example"] = model_schema["example"]
+ except KeyError:
+ pass
+
+ def _process_response_examples(
+ self, path: str, method: str, schema: Dict[str, Any]
+ ) -> None:
+ """
+ Process response body to include examples from model config.
+
+ Args:
+ path: Route path
+ method: HTTP method
+ schema: OpenAPI schema to modify
+ """
+ try:
+ route_schema = schema["paths"][path][method]
+ if "responses" in route_schema:
+ responses = route_schema["responses"]
+ if "200" in responses:
+ response = responses["200"]
+ if "content" in response:
+ content = response["content"]
+ if "application/json" in content:
+ json_content = content["application/json"]
+ if (
+ "schema" in json_content
+ and "$ref" in json_content["schema"]
+ ):
+ ref = json_content["schema"]["$ref"]
+ model_name = ref.split("/")[-1]
+ if model_name in schema["components"]["schemas"]:
+ model_schema = schema["components"]["schemas"][
+ model_name
+ ]
+ if "example" in model_schema:
+ json_content["example"] = model_schema[
+ "example"
+ ]
+ except KeyError:
+ pass
+
+ def configure_route_security(
+ self, path: str, method: str, schema: Dict[str, Any]
+ ) -> None:
+ """
+ Configure security requirements for a specific route.
+
+ Args:
+ path: Route path
+ method: HTTP method
+ schema: OpenAPI schema to modify
+ """
+ if not schema.get("paths", {}).get(path, {}).get(method):
+ return
+
+ # Check if endpoint is in safe list
+ endpoint_path = f"{path}:{method}"
+ if endpoint_path not in [
+ f"{e.URL}:{e.METHOD.lower()}" for e in self.safe_endpoint_list
+ ]:
+ if "security" not in schema["paths"][path][method]:
+ schema["paths"][path][method]["security"] = []
+ schema["paths"][path][method]["security"].append({"BearerAuth": []})
+
+ def create_schema(self) -> Dict[str, Any]:
+ """
+ Create the complete OpenAPI schema.
+
+ Returns:
+ Dict[str, Any]: Complete OpenAPI schema
+ """
+ openapi_schema = get_openapi(
+ title=ApiConfig.TITLE,
+ description=ApiConfig.DESCRIPTION,
+ version="1.1.1",
+ routes=self.app.routes,
+ )
+
+ # Add security schemes
+ if "components" not in openapi_schema:
+ openapi_schema["components"] = {}
+
+ openapi_schema["components"][
+ "securitySchemes"
+ ] = self._create_security_schemes()
+
+ # Configure route security and responses
+ for route in self.app.routes:
+ if isinstance(route, APIRoute) and route.include_in_schema:
+ path = str(route.path)
+ methods = [method.lower() for method in route.methods]
+ for method in methods:
+ self.configure_route_security(path, method, openapi_schema)
+
+ # Add custom documentation extensions
+ openapi_schema["x-documentation"] = {
+ "postman_collection": "/docs/postman",
+ "swagger_ui": "/docs",
+ "redoc": "/redoc",
+ }
+ return openapi_schema
+
+
+def create_openapi_schema(app: FastAPI) -> Dict[str, Any]:
+ """
+ Create OpenAPI schema for a FastAPI application.
+
+ Args:
+ app: FastAPI application instance
+
+ Returns:
+ Dict[str, Any]: Complete OpenAPI schema
+ """
+ creator = OpenAPISchemaCreator(app)
+ return creator.create_schema()
diff --git a/DockerApiServices/pyproject.toml b/DockerApiServices/pyproject.toml
new file mode 100644
index 0000000..bea12c3
--- /dev/null
+++ b/DockerApiServices/pyproject.toml
@@ -0,0 +1,83 @@
+[tool.poetry]
+name = "wag-management-api-services"
+version = "0.1.1"
+description = "WAG Management API Service"
+authors = ["Karatay Berkay "]
+
+[tool.poetry.dependencies]
+python = "^3.9"
+# FastAPI and Web
+fastapi = "^0.104.1"
+uvicorn = "^0.24.0"
+pydantic = "^2.5.2"
+
+# MongoDB
+motor = "3.3.2" # Pinned version
+pymongo = "4.5.0" # Pinned version to match motor
+
+# PostgreSQL
+sqlalchemy = "^2.0.23"
+sqlalchemy-mixins = "^2.0.5"
+psycopg2-binary = "^2.9.9"
+
+# Redis
+redis = "^5.0.1"
+arrow = "^1.3.0"
+
+# Email
+redmail = "^0.6.0"
+
+# Testing
+pytest = "^7.4.3"
+pytest-asyncio = "^0.21.1"
+pytest-cov = "^4.1.0"
+
+# Monitoring
+prometheus-client = "^0.19.0"
+prometheus-fastapi-instrumentator = "^6.1.0"
+
+# Cryptography
+cryptography = "^43.0.3"
+
+# Utilities
+python-dateutil = "^2.8.2"
+typing-extensions = "^4.8.0"
+
+[tool.poetry.group.dev.dependencies]
+black = "^23.11.0"
+isort = "^5.12.0"
+mypy = "^1.7.1"
+flake8 = "^6.1.0"
+
+[build-system]
+requires = ["poetry-core>=1.0.0"]
+build-backend = "poetry.core.masonry.api"
+
+[tool.black]
+line-length = 88
+target-version = ['py39']
+include = '\.pyi?$'
+
+[tool.isort]
+profile = "black"
+multi_line_output = 3
+include_trailing_comma = true
+force_grid_wrap = 0
+use_parentheses = true
+line_length = 88
+
+[tool.mypy]
+python_version = "3.9"
+warn_return_any = true
+warn_unused_configs = true
+disallow_untyped_defs = true
+check_untyped_defs = true
+
+[tool.pytest.ini_options]
+minversion = "6.0"
+addopts = "-ra -q --cov=Services"
+testpaths = [
+ "Ztest",
+]
+python_files = ["test_*.py"]
+asyncio_mode = "auto"
diff --git a/DockerApiServices/requirements.txt b/DockerApiServices/requirements.txt
new file mode 100644
index 0000000..3516646
--- /dev/null
+++ b/DockerApiServices/requirements.txt
@@ -0,0 +1,17 @@
+fastapi==0.104.1
+uvicorn==0.24.0.post1
+pydantic==2.10.5
+sqlalchemy==2.0.37
+psycopg2-binary==2.9.10
+python-dateutil==2.9.0.post0
+motor==3.3.2
+redis==5.2.1
+pytest==7.4.4
+pytest-asyncio==0.21.2
+pytest-cov==4.1.0
+coverage==7.6.10
+arrow==1.3.0
+redmail==0.6.0
+sqlalchemy-mixins==2.0.5
+prometheus-client==0.19.0
+prometheus-fastapi-instrumentator==6.1.0
diff --git a/DockerApiServices/steps.txt b/DockerApiServices/steps.txt
new file mode 100644
index 0000000..024c74d
--- /dev/null
+++ b/DockerApiServices/steps.txt
@@ -0,0 +1,29 @@
+WAG Management API Microservices Setup
+
+1. Authentication Service (Port 8000)
+ - User authentication and authorization
+ - JWT token management
+ - Role-based access control
+ - Uses PostgreSQL for user data
+
+2. Event Service (Port 8001)
+ - Event processing and handling
+ - Message queue integration
+ - Real-time notifications
+ - Uses MongoDB for event storage
+
+3. Validation Service (Port 8002)
+ - Request validation
+ - Data sanitization
+ - Schema validation
+ - Uses Redis for caching
+
+To run the services:
+```bash
+docker compose up --build
+```
+
+Access services at:
+- Auth Service: http://localhost:8000
+- Event Service: http://localhost:8001
+- Validation Service: http://localhost:8002
diff --git a/DockerStoreServices/.dockerignore b/DockerStoreServices/.dockerignore
new file mode 100644
index 0000000..1449c90
--- /dev/null
+++ b/DockerStoreServices/.dockerignore
@@ -0,0 +1,93 @@
+# Git
+.git
+.gitignore
+.gitattributes
+
+
+# CI
+.codeclimate.yml
+.travis.yml
+.taskcluster.yml
+
+# Docker
+docker-compose.yml
+service_app/Dockerfile
+.docker
+.dockerignore
+
+# Byte-compiled / optimized / DLL files
+**/__pycache__/
+**/*.py[cod]
+
+# C extensions
+*.so
+
+# Distribution / packaging
+.Python
+service_app/env/
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+*.egg-info/
+.installed.cfg
+*.egg
+
+# PyInstaller
+# Usually these files are written by a python script from a template
+# before PyInstaller builds the exe, so as to inject date/other infos into it.
+*.manifest
+*.spec
+
+# Installer logs
+pip-log.txt
+pip-delete-this-directory.txt
+
+# Unit test / coverage reports
+htmlcov/
+.tox/
+.coverage
+.cache
+nosetests.xml
+coverage.xml
+
+# Translations
+*.mo
+*.pot
+
+# Django stuff:
+*.log
+
+# Sphinx documentation
+docs/_build/
+
+# PyBuilder
+target/
+
+# Virtual environment
+service_app/.env
+.venv/
+venv/
+
+# PyCharm
+.idea
+
+# Python mode for VIM
+.ropeproject
+**/.ropeproject
+
+# Vim swap files
+**/*.swp
+
+# VS Code
+.vscode/
+
+test_application/
+
+
diff --git a/DockerStoreServices/commercial_main_memory_service.env b/DockerStoreServices/commercial_main_memory_service.env
new file mode 100644
index 0000000..68d59ef
--- /dev/null
+++ b/DockerStoreServices/commercial_main_memory_service.env
@@ -0,0 +1,4 @@
+REDIS_HOST=commercial_redis_service
+REDIS_PASSWORD=commercial_redis_password
+REDIS_PORT=6379
+REDIS_DB=0
\ No newline at end of file
diff --git a/DockerStoreServices/commercial_main_mongo_service.env b/DockerStoreServices/commercial_main_mongo_service.env
new file mode 100644
index 0000000..a9f3e83
--- /dev/null
+++ b/DockerStoreServices/commercial_main_mongo_service.env
@@ -0,0 +1,8 @@
+MONGODB_DISABLE_ENFORCE_AUTH=true
+MONGODB_ROOT_PASSWORD=root
+MONGODB_DATABASE=mongo_database
+MONGODB_USERNAME=mongo_user
+MONGODB_PASSWORD=mongo_password
+MONGO_INITDB_ROOT_USERNAME=mongo_user
+MONGO_INITDB_ROOT_PASSWORD=mongo_password
+MONGO_INITDB_DATABASE=mongo_database
\ No newline at end of file
diff --git a/DockerStoreServices/debian-docker-compose.yml b/DockerStoreServices/debian-docker-compose.yml
new file mode 100644
index 0000000..834b019
--- /dev/null
+++ b/DockerStoreServices/debian-docker-compose.yml
@@ -0,0 +1,48 @@
+services:
+
+ commercial_main_mongo_service:
+ container_name: commercial_main_mongo_service
+# image: "bitnami/mongodb:latest"
+ image: "bitnami/mongodb:4.4.1-debian-10-r3"
+ networks:
+ - network_store_services
+ restart: on-failure
+ env_file:
+ - commercial_main_mongo_service.env
+ volumes:
+ - wag_commercial_mongodb_main_data:/bitnami/mongodb
+ ports:
+ - "11777:27017"
+
+ commercial_main_memory_service:
+ container_name: commercial_main_memory_service
+ image: 'bitnami/redis:latest'
+ networks:
+ - network_store_services
+ restart: on-failure
+ env_file:
+ - commercial_main_memory_service.env
+ ports:
+ - "11222:6379"
+
+ postgres_main_commercial:
+ image: 'bitnami/postgresql:latest'
+ container_name: postgres_main_commercial
+ networks:
+ - network_store_services
+ restart: on-failure
+ env_file:
+ - postgres_main_commercial.env
+ depends_on:
+ - commercial_main_mongo_service
+ ports:
+ - "5444:5432"
+ volumes:
+ - wag_postgres_main_commercial_data:/bitnami/postgresql
+
+networks:
+ network_store_services:
+
+volumes:
+ wag_postgres_main_commercial_data:
+ wag_commercial_mongodb_main_data:
diff --git a/DockerStoreServices/local-docker-compose.yml b/DockerStoreServices/local-docker-compose.yml
new file mode 100644
index 0000000..0dddd32
--- /dev/null
+++ b/DockerStoreServices/local-docker-compose.yml
@@ -0,0 +1,48 @@
+services:
+
+ commercial_main_mongo_service:
+ container_name: commercial_main_mongo_service
+ image: "bitnami/mongodb:latest"
+# image: "bitnami/mongodb:4.4.1-debian-10-r3"
+ networks:
+ - network_store_services
+ restart: on-failure
+ env_file:
+ - commercial_main_mongo_service.env
+ volumes:
+ - wag_commercial_mongodb_main_data:/bitnami/mongodb
+ ports:
+ - "11777:27017"
+
+ commercial_main_memory_service:
+ container_name: commercial_main_memory_service
+ image: 'bitnami/redis:latest'
+ networks:
+ - network_store_services
+ restart: on-failure
+ env_file:
+ - commercial_main_memory_service.env
+ ports:
+ - "11222:6379"
+
+ postgres_main_commercial:
+ image: 'bitnami/postgresql:latest'
+ container_name: postgres_main_commercial
+ networks:
+ - network_store_services
+ restart: on-failure
+ env_file:
+ - postgres_main_commercial.env
+ depends_on:
+ - commercial_mongo_service
+ ports:
+ - "5444:5432"
+ volumes:
+ - wag_postgres_main_commercial_data:/bitnami/postgresql
+
+networks:
+ network_store_services:
+
+volumes:
+ wag_postgres_main_commercial_data:
+ wag_commercial_mongodb_main_data:
diff --git a/DockerStoreServices/postgres_main_commercial.env b/DockerStoreServices/postgres_main_commercial.env
new file mode 100644
index 0000000..bda9322
--- /dev/null
+++ b/DockerStoreServices/postgres_main_commercial.env
@@ -0,0 +1,3 @@
+POSTGRES_DB=wag_database
+POSTGRES_USER=berkay_wag_user
+POSTGRES_PASSWORD=berkay_wag_user_password
\ No newline at end of file
diff --git a/Events/AllEvents/__init__.py b/Events/AllEvents/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/Events/AllEvents/authentication/__init__.py b/Events/AllEvents/authentication/__init__.py
new file mode 100644
index 0000000..ad73225
--- /dev/null
+++ b/Events/AllEvents/authentication/__init__.py
@@ -0,0 +1,9 @@
+"""
+Authentication package initialization.
+"""
+
+from .auth.cluster import AuthCluster
+
+__all__ = [
+ "AuthCluster",
+]
diff --git a/Events/AllEvents/authentication/auth/api_events.py b/Events/AllEvents/authentication/auth/api_events.py
new file mode 100644
index 0000000..faccdff
--- /dev/null
+++ b/Events/AllEvents/authentication/auth/api_events.py
@@ -0,0 +1,200 @@
+from Events.Engine.abstract_class import Event
+from ApiLayers.LanguageModels.Request import (
+ LoginRequestLanguageModel,
+ SelectRequestLanguageModel,
+)
+
+from .models import AuthenticationRequestModels, AuthenticationResponseModels
+from .function_handlers import AuthenticationFunctions
+
+
+# Auth Login
+authentication_login_super_user_event = Event(
+ name="authentication_login_super_user_event",
+ key="a5d2d0d1-3e9b-4b0f-8c7d-6d4a4b4c4d4e",
+ request_validator=AuthenticationRequestModels.LoginSuperUserRequestModel,
+ language_models=[LoginRequestLanguageModel],
+ statics="LOGIN_SUCCESS",
+ description="Login super user",
+)
+
+
+authentication_login_super_user_event.endpoint_callable = (
+ AuthenticationFunctions.authentication_login_with_domain_and_creds
+)
+
+
+# Auth Select Company or Occupant Type
+authentication_select_super_user_event = Event(
+ name="authentication_select_super_user_event",
+ key="f951ae1a-7950-4eab-ae2d-5bd9c2d21173",
+ request_validator=AuthenticationRequestModels.SelectCompanyOrOccupantTypeSuperUserRequestModel,
+ language_models=[SelectRequestLanguageModel],
+ statics="LOGIN_SELECT",
+ description="Select company or occupant type super user",
+)
+
+
+authentication_select_super_user_event.endpoint_callable = (
+ AuthenticationFunctions.authentication_select_company_or_occupant_type
+)
+
+
+# Check Token Validity
+authentication_check_token_event = Event(
+ name="authentication_check_token_event",
+ key="b6e3d1e2-4f9c-5c1g-9d8e-7e5f6f5e5d5f",
+ request_validator=None, # TODO: Add request validator
+ language_models=[],
+ # response_validator=None, # TODO: Add response validator
+ description="Check if token is valid",
+)
+
+
+authentication_check_token_event.endpoint_callable = (
+ AuthenticationFunctions.authentication_check_token_is_valid
+)
+
+
+# Refresh User Info
+authentication_refresh_user_info_event = Event(
+ name="authentication_refresh_user_info_event",
+ key="c7f4e2f3-5g0d-6d2h-0e9f-8f6g7g6f6e6g",
+ request_validator=None, # TODO: Add request validator
+ language_models=[],
+ # response_validator=None, # TODO: Add response validator
+ description="Refresh user information",
+)
+
+
+authentication_refresh_user_info_event.endpoint_callable = (
+ AuthenticationFunctions.authentication_access_token_user_info
+)
+
+
+# Change Password
+authentication_change_password_event = Event(
+ name="authentication_change_password_event",
+ key="d8g5f3g4-6h1e-7e3i-1f0g-9g7h8h7g7f7h",
+ request_validator=None, # TODO: Add request validator
+ language_models=[],
+ # response_validator=None, # TODO: Add response validator
+ description="Change user password",
+)
+
+
+authentication_change_password_event.endpoint_callable = (
+ AuthenticationFunctions.authentication_change_password
+)
+
+
+# Create Password
+authentication_create_password_event = Event(
+ name="authentication_create_password_event",
+ key="e9h6g4h5-7i2f-8f4j-2g1h-0h8i9i8h8g8i",
+ request_validator=None, # TODO: Add request validator
+ language_models=[],
+ # response_validator=None, # TODO: Add response validator
+ description="Create new password",
+)
+
+
+authentication_create_password_event.endpoint_callable = (
+ AuthenticationFunctions.authentication_create_password
+)
+
+
+# Disconnect User
+authentication_disconnect_user_event = Event(
+ name="authentication_disconnect_user_event",
+ key="f0i7h5i6-8j3g-9g5k-3h2i-1i9j0j9i9h9j",
+ request_validator=None, # TODO: Add request validator
+ language_models=[],
+ # response_validator=None, # TODO: Add response validator
+ description="Disconnect all user sessions",
+)
+
+
+authentication_disconnect_user_event.endpoint_callable = (
+ AuthenticationFunctions.authentication_disconnect_user
+)
+
+
+# Logout User
+authentication_logout_user_event = Event(
+ name="authentication_logout_user_event",
+ key="g1j8i6j7-9k4h-0h6l-4i3j-2j0k1k0j0i0k",
+ request_validator=AuthenticationRequestModels.LogoutRequestModel,
+ language_models=[],
+ # response_validator=None, # TODO: Add response validator
+ description="Logout user session",
+)
+
+
+authentication_logout_user_event.endpoint_callable = (
+ AuthenticationFunctions.authentication_logout_user
+)
+
+
+# Refresh Token
+authentication_refresher_token_event = Event(
+ name="authentication_refresher_token_event",
+ key="h2k9j7k8-0l5i-1i7m-5j4k-3k1l2l1k1j1l",
+ request_validator=AuthenticationRequestModels.RefresherRequestModel, # TODO: Add request validator
+ language_models=[],
+ # response_validator=None,
+ description="Refresh authentication token",
+)
+
+
+authentication_refresher_token_event.endpoint_callable = (
+ AuthenticationFunctions.authentication_refresher_token
+)
+
+
+# Forgot Password
+authentication_forgot_password_event = Event(
+ name="authentication_forgot_password_event",
+ key="i3l0k8l9-1m6j-2j8n-6k5l-4l2m3m2l2k2m",
+ request_validator=None, # TODO: Add request validator
+ language_models=[],
+ # response_validator=None, # TODO: Add response validator
+ description="Request password reset",
+)
+
+
+authentication_forgot_password_event.endpoint_callable = (
+ AuthenticationFunctions.authentication_forgot_password
+)
+
+
+# Reset Password
+authentication_reset_password_event = Event(
+ name="authentication_reset_password_event",
+ key="j4m1l9m0-2n7k-3k9o-7l6m-5m3n4n3m3l3n",
+ request_validator=None, # TODO: Add request validator
+ language_models=[],
+ # response_validator=None, # TODO: Add response validator
+ description="Reset user password",
+)
+
+
+authentication_reset_password_event.endpoint_callable = (
+ AuthenticationFunctions.authentication_reset_password
+)
+
+
+# Download Avatar
+authentication_download_avatar_event = Event(
+ name="authentication_download_avatar_event",
+ key="k5n2m0n1-3o8l-4l0p-8m7n-6n4o5o4n4m4o",
+ request_validator=None, # TODO: Add request validator
+ language_models=[],
+ # response_validator=None, # TODO: Add response validator
+ description="Download user avatar and profile info",
+)
+
+
+authentication_download_avatar_event.endpoint_callable = (
+ AuthenticationFunctions.authentication_download_avatar
+)
diff --git a/Events/AllEvents/authentication/auth/auth.py b/Events/AllEvents/authentication/auth/auth.py
new file mode 100644
index 0000000..4ae7313
--- /dev/null
+++ b/Events/AllEvents/authentication/auth/auth.py
@@ -0,0 +1,383 @@
+"""
+Authentication related API endpoints.
+"""
+
+from typing import Any, Dict
+from fastapi import Request
+
+from ApiLayers.Middleware import MiddlewareModule
+from Events.Engine.abstract_class import MethodToEvent
+from Events.base_request_model import EndpointBaseRequestModel, ContextRetrievers
+
+from .api_events import (
+ authentication_login_super_user_event,
+ authentication_select_super_user_event,
+ authentication_check_token_event,
+ authentication_refresh_user_info_event,
+ authentication_change_password_event,
+ authentication_create_password_event,
+ authentication_disconnect_user_event,
+ authentication_logout_user_event,
+ authentication_refresher_token_event,
+ authentication_forgot_password_event,
+ authentication_reset_password_event,
+ authentication_download_avatar_event,
+)
+from .function_handlers import AuthenticationFunctions
+
+
+AuthenticationLoginEventMethods = MethodToEvent(
+ name="AuthenticationLoginEventMethods",
+ events={
+ authentication_login_super_user_event.key: authentication_login_super_user_event,
+ },
+ headers=[],
+ errors=[],
+ url="/login",
+ method="POST",
+ summary="Login via domain and access key : [email] | [phone]",
+ description="Login to the system via domain, access key : [email] | [phone]",
+)
+
+
+def authentication_login_with_domain_and_creds_endpoint(
+ request: Request, data: EndpointBaseRequestModel
+) -> Dict[str, Any]:
+ event_2_catch = AuthenticationLoginEventMethods.retrieve_event(
+ event_function_code=f"{authentication_login_super_user_event.key}"
+ )
+ data = event_2_catch.REQUEST_VALIDATOR(**data.data)
+ return event_2_catch.endpoint_callable(request=request, data=data)
+
+
+AuthenticationLoginEventMethods.endpoint_callable = (
+ authentication_login_with_domain_and_creds_endpoint
+)
+
+
+AuthenticationSelectEventMethods = MethodToEvent(
+ name="AuthenticationSelectEventMethods",
+ events={
+ authentication_select_super_user_event.key: authentication_select_super_user_event,
+ },
+ decorators_list=[MiddlewareModule.auth_required],
+ headers=[],
+ errors=[],
+ url="/select",
+ method="POST",
+ summary="Select company or occupant type",
+ description="Select company or occupant type",
+)
+
+
+def authentication_select_company_or_occupant_type(
+ request: Request, data: EndpointBaseRequestModel
+) -> Dict[str, Any]:
+ """
+ Select company or occupant type.
+ """
+ context_retriever = ContextRetrievers(
+ func=authentication_select_company_or_occupant_type
+ )
+ function = AuthenticationSelectEventMethods.retrieve_event(
+ event_function_code=f"{authentication_select_super_user_event.key}"
+ )
+ AuthenticationFunctions.context_retriever = context_retriever
+ data_model = None
+ if context_retriever.token.is_employee:
+ data_model = function.REQUEST_VALIDATOR.get("EmployeeSelection", None)(
+ **data.data
+ )
+ elif context_retriever.token.is_occupant:
+ data_model = function.REQUEST_VALIDATOR.get("OccupantSelection", None)(
+ **data.data
+ )
+ return function.endpoint_callable(data=data_model)
+
+
+AuthenticationSelectEventMethods.endpoint_callable = (
+ authentication_select_company_or_occupant_type
+)
+
+
+AuthenticationCheckTokenEventMethods = MethodToEvent(
+ name="AuthenticationCheckTokenEventMethods",
+ events={authentication_check_token_event.key: authentication_check_token_event},
+ headers=[],
+ errors=[],
+ decorators_list=[MiddlewareModule.auth_required],
+ url="/check-token",
+ method="POST",
+ summary="Check if token is valid",
+ description="Check if access token is valid for user",
+)
+
+
+def authentication_check_token_is_valid(request: Request):
+ context_retriever = ContextRetrievers(func=authentication_check_token_is_valid)
+ function = AuthenticationCheckTokenEventMethods.retrieve_event(
+ event_function_code=f"{authentication_check_token_event.key}"
+ )
+ AuthenticationFunctions.context_retriever = context_retriever
+ return function.endpoint_callable()
+
+
+AuthenticationCheckTokenEventMethods.endpoint_callable = (
+ authentication_check_token_is_valid
+)
+
+
+AuthenticationRefreshEventMethods = MethodToEvent(
+ name="AuthenticationRefreshEventMethods",
+ events={
+ authentication_refresh_user_info_event.key: authentication_refresh_user_info_event
+ },
+ headers=[],
+ errors=[],
+ decorators_list=[MiddlewareModule.auth_required],
+ url="/refresh",
+ method="POST",
+ summary="Refresh user info",
+ description="Refresh user info using access token",
+)
+
+
+def authentication_refresh_user_info(request: Request):
+ context_retriever = ContextRetrievers(func=authentication_refresh_user_info)
+ function = AuthenticationRefreshEventMethods.retrieve_event(
+ event_function_code=f"{authentication_refresh_user_info_event.key}"
+ )
+ AuthenticationFunctions.context_retriever = context_retriever
+ return function.endpoint_callable()
+
+
+AuthenticationRefreshEventMethods.endpoint_callable = authentication_refresh_user_info
+
+
+AuthenticationChangePasswordEventMethods = MethodToEvent(
+ name="AuthenticationChangePasswordEventMethods",
+ events={
+ authentication_change_password_event.key: authentication_change_password_event
+ },
+ headers=[],
+ errors=[],
+ decorators_list=[MiddlewareModule.auth_required],
+ url="/change-password",
+ method="POST",
+ summary="Change password",
+ description="Change password with access token",
+)
+
+
+def authentication_change_password_event_callable(
+ request: Request, data: EndpointBaseRequestModel
+):
+ context_retriever = ContextRetrievers(
+ func=authentication_change_password_event_callable
+ )
+ function = AuthenticationChangePasswordEventMethods.retrieve_event(
+ event_function_code=f"{authentication_change_password_event.key}"
+ )
+ AuthenticationFunctions.context_retriever = context_retriever
+ return function.endpoint_callable(data=data)
+
+
+AuthenticationChangePasswordEventMethods.endpoint_callable = (
+ authentication_change_password_event_callable
+)
+
+AuthenticationCreatePasswordEventMethods = MethodToEvent(
+ name="AuthenticationCreatePasswordEventMethods",
+ events={
+ authentication_create_password_event.key: authentication_create_password_event
+ },
+ headers=[],
+ errors=[],
+ url="/create-password",
+ method="POST",
+ summary="Create password",
+ description="Create password with password reset token requested via email",
+)
+
+
+def authentication_create_password(request: Request, data: EndpointBaseRequestModel):
+ context_retriever = ContextRetrievers(func=authentication_create_password)
+ function = AuthenticationCreatePasswordEventMethods.retrieve_event(
+ event_function_code=f"{authentication_create_password_event.key}"
+ )
+ AuthenticationFunctions.context_retriever = context_retriever
+ return function.endpoint_callable(data=data)
+
+
+AuthenticationCreatePasswordEventMethods.endpoint_callable = (
+ authentication_create_password
+)
+
+
+AuthenticationDisconnectUserEventMethods = MethodToEvent(
+ name="AuthenticationDisconnectUserEventMethods",
+ events={
+ authentication_disconnect_user_event.key: authentication_disconnect_user_event
+ },
+ decorators_list=[MiddlewareModule.auth_required],
+ headers=[],
+ errors=[],
+ url="/disconnect",
+ method="POST",
+ summary="Disconnect all sessions",
+ description="Disconnect all sessions of user in access token",
+)
+
+
+def authentication_disconnect_user(request: Request):
+ context_retriever = ContextRetrievers(func=authentication_disconnect_user)
+ function = AuthenticationDisconnectUserEventMethods.retrieve_event(
+ event_function_code=f"{authentication_disconnect_user_event.key}"
+ )
+ AuthenticationFunctions.context_retriever = context_retriever
+ return function.endpoint_callable()
+
+
+AuthenticationDisconnectUserEventMethods.endpoint_callable = (
+ authentication_disconnect_user
+)
+
+
+AuthenticationLogoutEventMethods = MethodToEvent(
+ name="AuthenticationLogoutEventMethods",
+ events={authentication_logout_user_event.key: authentication_logout_user_event},
+ headers=[],
+ errors=[],
+ decorators_list=[MiddlewareModule.auth_required],
+ url="/logout",
+ method="POST",
+ summary="Logout user",
+ description="Logout only single session of user which domain is provided",
+)
+
+
+def authentication_logout_user(request: Request, data: EndpointBaseRequestModel):
+ context_retriever = ContextRetrievers(func=authentication_logout_user)
+ function = AuthenticationLogoutEventMethods.retrieve_event(
+ event_function_code=f"{authentication_logout_user_event.key}"
+ )
+ validated_data = function.REQUEST_VALIDATOR(**data.data)
+ AuthenticationFunctions.context_retriever = context_retriever
+ return function.endpoint_callable(data=validated_data)
+
+
+AuthenticationLogoutEventMethods.endpoint_callable = authentication_logout_user
+
+
+AuthenticationRefreshTokenEventMethods = MethodToEvent(
+ name="AuthenticationRefreshTokenEventMethods",
+ events={
+ authentication_refresher_token_event.key: authentication_refresher_token_event
+ },
+ headers=[],
+ errors=[],
+ decorators_list=[],
+ url="/refresh-token",
+ method="POST",
+ summary="Refresh token",
+ description="Refresh access token with refresher token",
+)
+
+
+def authentication_refresher_token(request: Request, data: EndpointBaseRequestModel):
+ function = AuthenticationRefreshTokenEventMethods.retrieve_event(
+ event_function_code=f"{authentication_refresher_token_event.key}"
+ )
+ validated_data = function.REQUEST_VALIDATOR(**data.data)
+ return function.endpoint_callable(request=request, data=validated_data)
+
+
+AuthenticationRefreshTokenEventMethods.endpoint_callable = (
+ authentication_refresher_token
+)
+
+
+AuthenticationForgotPasswordEventMethods = MethodToEvent(
+ name="AuthenticationForgotPasswordEventMethods",
+ events={
+ authentication_forgot_password_event.key: authentication_forgot_password_event
+ },
+ headers=[],
+ errors=[],
+ url="/forgot-password",
+ method="POST",
+ summary="Request password reset",
+ description="Send an email to user for a valid password reset token",
+)
+
+
+def authentication_forgot_password(request: Request, data: EndpointBaseRequestModel):
+ context_retriever = ContextRetrievers(func=authentication_forgot_password)
+ function = AuthenticationForgotPasswordEventMethods.retrieve_event(
+ event_function_code=f"{authentication_forgot_password_event.key}"
+ )
+ AuthenticationFunctions.context_retriever = context_retriever
+ return function.endpoint_callable(data=data)
+
+
+AuthenticationForgotPasswordEventMethods.endpoint_callable = (
+ authentication_forgot_password
+)
+
+
+AuthenticationResetPasswordEventMethods = MethodToEvent(
+ name="AuthenticationResetPasswordEventMethods",
+ events={
+ authentication_reset_password_event.key: authentication_reset_password_event
+ },
+ headers=[],
+ errors=[],
+ decorators_list=[MiddlewareModule.auth_required],
+ url="/reset-password",
+ method="POST",
+ summary="Reset password",
+ description="Reset user password",
+)
+
+
+def authentication_reset_password(request: Request, data: EndpointBaseRequestModel):
+ context_retriever = ContextRetrievers(func=authentication_reset_password)
+ function = AuthenticationResetPasswordEventMethods.retrieve_event(
+ event_function_code=f"{authentication_reset_password_event.key}"
+ )
+ AuthenticationFunctions.context_retriever = context_retriever
+ return function.endpoint_callable(data=data.data)
+
+
+AuthenticationResetPasswordEventMethods.endpoint_callable = (
+ authentication_reset_password
+)
+
+
+AuthenticationDownloadAvatarEventMethods = MethodToEvent(
+ name="AuthenticationDownloadAvatarEventMethods",
+ events={
+ authentication_download_avatar_event.key: authentication_download_avatar_event
+ },
+ headers=[],
+ errors=[],
+ decorators_list=[MiddlewareModule.auth_required],
+ url="/download-avatar",
+ method="POST",
+ summary="Download avatar",
+ description="Download avatar icon and profile info of user",
+)
+
+
+def authentication_download_avatar(request: Request):
+ context_retriever = ContextRetrievers(func=authentication_download_avatar)
+ function = AuthenticationDownloadAvatarEventMethods.retrieve_event(
+ event_function_code=f"{authentication_download_avatar_event.key}"
+ )
+ AuthenticationFunctions.context_retriever = context_retriever
+ return function.endpoint_callable()
+
+
+AuthenticationDownloadAvatarEventMethods.endpoint_callable = (
+ authentication_download_avatar
+)
diff --git a/Events/AllEvents/authentication/auth/cluster.py b/Events/AllEvents/authentication/auth/cluster.py
new file mode 100644
index 0000000..62082bc
--- /dev/null
+++ b/Events/AllEvents/authentication/auth/cluster.py
@@ -0,0 +1,42 @@
+from Events.Engine.abstract_class import CategoryCluster
+
+from .info import authentication_page_info
+from .auth import (
+ AuthenticationLoginEventMethods,
+ AuthenticationLogoutEventMethods,
+ AuthenticationRefreshTokenEventMethods,
+ AuthenticationForgotPasswordEventMethods,
+ AuthenticationChangePasswordEventMethods,
+ AuthenticationCheckTokenEventMethods,
+ AuthenticationCreatePasswordEventMethods,
+ AuthenticationDisconnectUserEventMethods,
+ AuthenticationDownloadAvatarEventMethods,
+ AuthenticationResetPasswordEventMethods,
+ AuthenticationRefreshEventMethods,
+ AuthenticationSelectEventMethods,
+)
+
+
+AuthCluster = CategoryCluster(
+ name="AuthCluster",
+ tags=["authentication"],
+ prefix="/authentication",
+ description="Authentication cluster",
+ pageinfo=authentication_page_info,
+ endpoints={
+ "AuthenticationLoginEventMethods": AuthenticationLoginEventMethods,
+ "AuthenticationLogoutEventMethods": AuthenticationLogoutEventMethods,
+ "AuthenticationRefreshTokenEventMethods": AuthenticationRefreshTokenEventMethods,
+ "AuthenticationForgotPasswordEventMethods": AuthenticationForgotPasswordEventMethods,
+ "AuthenticationChangePasswordEventMethods": AuthenticationChangePasswordEventMethods,
+ "AuthenticationCheckTokenEventMethods": AuthenticationCheckTokenEventMethods,
+ "AuthenticationCreatePasswordEventMethods": AuthenticationCreatePasswordEventMethods,
+ "AuthenticationDisconnectUserEventMethods": AuthenticationDisconnectUserEventMethods,
+ "AuthenticationDownloadAvatarEventMethods": AuthenticationDownloadAvatarEventMethods,
+ "AuthenticationResetPasswordEventMethods": AuthenticationResetPasswordEventMethods,
+ "AuthenticationRefreshEventMethods": AuthenticationRefreshEventMethods,
+ "AuthenticationSelectEventMethods": AuthenticationSelectEventMethods,
+ },
+ include_in_schema=True,
+ sub_category=[],
+)
diff --git a/Events/AllEvents/authentication/auth/function_handlers.py b/Events/AllEvents/authentication/auth/function_handlers.py
new file mode 100644
index 0000000..f3e7038
--- /dev/null
+++ b/Events/AllEvents/authentication/auth/function_handlers.py
@@ -0,0 +1,529 @@
+from typing import Any, Union
+from fastapi import Request
+
+from ApiLayers.ApiLibrary.common.line_number import get_line_number_for_error
+from ApiLayers.ApiServices.Login.user_login_handler import UserLoginModule
+from ApiLayers.ApiServices.Token.token_handler import TokenService
+from ApiLayers.ApiValidations.Custom.token_objects import CompanyToken, OccupantToken
+from ApiLayers.ApiValidations.Response.default_response import (
+ EndpointSuccessResponse,
+ EndpointNotAcceptableResponse,
+ EndpointBadRequestResponse,
+)
+from ApiLayers.ErrorHandlers import HTTPExceptionApi
+from ApiLayers.Schemas import (
+ BuildLivingSpace,
+ BuildParts,
+ RelationshipEmployee2Build,
+ Companies,
+ Departments,
+ Duties,
+ Duty,
+ Staff,
+ Employees,
+ Event2Employee,
+ Event2Occupant,
+ OccupantTypes,
+ Users,
+ UsersTokens,
+)
+from Events.base_request_model import TokenDictType, BaseRouteModel
+from Services.Redis.Actions.actions import RedisActions
+from ApiLayers.AllConfigs.Redis.configs import RedisAuthKeys
+
+
+class Handlers:
+ """Class for handling authentication functions"""
+
+ @classmethod # Requires no auth context
+ def handle_employee_selection(
+ cls, request: Request, data: Any, token_dict: TokenDictType
+ ):
+ db = Users.new_session()
+ if data.company_uu_id not in token_dict.companies_uu_id_list:
+ raise HTTPExceptionApi(
+ error_code="HTTP_400_BAD_REQUEST",
+ lang=token_dict.lang,
+ loc=get_line_number_for_error(),
+ sys_msg="Company not found in token",
+ )
+ selected_company: Companies = Companies.filter_one(
+ Companies.uu_id == data.company_uu_id, db=db
+ ).data
+ if not selected_company:
+ raise HTTPExceptionApi(
+ error_code="HTTP_400_BAD_REQUEST",
+ lang=token_dict.lang,
+ loc=get_line_number_for_error(),
+ sys_msg="Company not found in token",
+ )
+
+ # Get duties IDs for the company
+ duties_ids = [
+ duty.id
+ for duty in Duties.filter_all(
+ Duties.company_id == selected_company.id, db=db
+ ).data
+ ]
+
+ # Get staff IDs
+ staff_ids = [
+ staff.id
+ for staff in Staff.filter_all(Staff.duties_id.in_(duties_ids), db=db).data
+ ]
+
+ # Get employee
+ employee: Employees = Employees.filter_one(
+ Employees.people_id == token_dict.person_id,
+ Employees.staff_id.in_(staff_ids),
+ db=db,
+ ).data
+
+ if not employee:
+ raise HTTPExceptionApi(
+ error_code="HTTP_400_BAD_REQUEST",
+ lang=token_dict.lang,
+ loc=get_line_number_for_error(),
+ sys_msg="Employee not found in token",
+ )
+
+ # Get reachable events
+ reachable_event_codes = Event2Employee.get_event_codes(employee_id=employee.id)
+
+ # Get staff and duties
+ staff = Staff.filter_one(Staff.id == employee.staff_id, db=db).data
+ duties = Duties.filter_one(Duties.id == staff.duties_id, db=db).data
+ department = Departments.filter_one(
+ Departments.id == duties.department_id, db=db
+ ).data
+
+ # Get bulk duty
+ bulk_id = Duty.filter_by_one(system=True, duty_code="BULK", db=db).data
+ bulk_duty_id = Duties.filter_by_one(
+ company_id=selected_company.id,
+ duties_id=bulk_id.id,
+ db=db,
+ ).data
+
+ # Create company token
+ company_token = CompanyToken(
+ company_uu_id=selected_company.uu_id.__str__(),
+ company_id=selected_company.id,
+ department_id=department.id,
+ department_uu_id=department.uu_id.__str__(),
+ duty_id=duties.id,
+ duty_uu_id=duties.uu_id.__str__(),
+ bulk_duties_id=bulk_duty_id.id,
+ staff_id=staff.id,
+ staff_uu_id=staff.uu_id.__str__(),
+ employee_id=employee.id,
+ employee_uu_id=employee.uu_id.__str__(),
+ reachable_event_codes=reachable_event_codes,
+ )
+
+ try: # Update Redis
+ return TokenService.update_token_at_redis(
+ request=request, add_payload=company_token
+ )
+ except Exception as e:
+ raise HTTPExceptionApi(
+ error_code="",
+ lang="en",
+ loc=get_line_number_for_error(),
+ sys_msg=f"{e}",
+ )
+
+ @classmethod # Requires no auth context
+ def handle_occupant_selection(
+ cls, request: Request, data: Any, token_dict: TokenDictType
+ ):
+ """Handle occupant type selection"""
+ db = BuildLivingSpace.new_session()
+ # Get selected occupant type
+ selected_build_living_space: BuildLivingSpace = BuildLivingSpace.filter_one(
+ BuildLivingSpace.uu_id == data.build_living_space_uu_id,
+ db=db,
+ ).data
+ if not selected_build_living_space:
+ raise HTTPExceptionApi(
+ error_code="HTTP_400_BAD_REQUEST",
+ lang=token_dict.lang,
+ loc=get_line_number_for_error(),
+ sys_msg="Selected occupant type not found",
+ )
+
+ # Get reachable events
+ reachable_event_codes = Event2Occupant.get_event_codes(
+ build_living_space_id=selected_build_living_space.id
+ )
+ occupant_type = OccupantTypes.filter_one_system(
+ OccupantTypes.id == selected_build_living_space.occupant_type_id,
+ db=db,
+ ).data
+ build_part = BuildParts.filter_one(
+ BuildParts.id == selected_build_living_space.build_parts_id,
+ db=db,
+ ).data
+ build = BuildParts.filter_one(
+ BuildParts.id == build_part.build_id,
+ db=db,
+ ).data
+ responsible_employee = Employees.filter_one(
+ Employees.id == build_part.responsible_employee_id,
+ db=db,
+ ).data
+ related_company = RelationshipEmployee2Build.filter_one(
+ RelationshipEmployee2Build.member_id == build.id,
+ db=db,
+ ).data
+ # Get company
+ company_related = Companies.filter_one(
+ Companies.id == related_company.company_id,
+ db=db,
+ ).data
+
+ # Create occupant token
+ occupant_token = OccupantToken(
+ living_space_id=selected_build_living_space.id,
+ living_space_uu_id=selected_build_living_space.uu_id.__str__(),
+ occupant_type_id=occupant_type.id,
+ occupant_type_uu_id=occupant_type.uu_id.__str__(),
+ occupant_type=occupant_type.occupant_type,
+ build_id=build.id,
+ build_uuid=build.uu_id.__str__(),
+ build_part_id=build_part.id,
+ build_part_uuid=build_part.uu_id.__str__(),
+ responsible_employee_id=responsible_employee.id,
+ responsible_employee_uuid=responsible_employee.uu_id.__str__(),
+ responsible_company_id=company_related.id,
+ responsible_company_uuid=company_related.uu_id.__str__(),
+ reachable_event_codes=reachable_event_codes,
+ )
+
+ try: # Update Redis
+ return TokenService.update_token_at_redis(
+ request=request, add_payload=occupant_token
+ )
+ except Exception as e:
+ raise HTTPExceptionApi(
+ error_code="",
+ lang="en",
+ loc=get_line_number_for_error(),
+ sys_msg=f"{e}",
+ )
+
+
+class AuthenticationFunctions(BaseRouteModel):
+ """Class for handling authentication functions"""
+
+ @classmethod # Requires no auth context
+ def authentication_login_with_domain_and_creds(cls, request: Request, data: Any):
+ """
+ Authenticate user with domain and credentials.
+
+ Args:
+ request: FastAPI request object
+ data: Request body containing login credentials
+ {
+ "data": {
+ "domain": "evyos.com.tr",
+ "access_key": "karatay.berkay.sup@evyos.com.tr",
+ "password": "string",
+ "remember_me": false
+ }
+ }
+ Returns:
+ SuccessResponse containing authentication token and user info
+ """
+
+ # Get token from login module
+ user_login_module = UserLoginModule(request=request)
+ user_login_module.login_user_via_credentials(access_data=data)
+ user_login_module.language = "en"
+
+ # Return response with token and headers
+ return EndpointSuccessResponse(
+ code="LOGIN_SUCCESS", lang=user_login_module.language
+ ).as_dict(data=user_login_module.as_dict)
+
+ @classmethod # Requires auth context
+ def authentication_select_company_or_occupant_type(cls, data: Any):
+ """
+ Handle selection of company or occupant type
+ {"data": {"build_living_space_uu_id": ""}} | {"data": {"company_uu_id": ""}}
+ {
+ "data": {"company_uu_id": "e9869a25-ba4d-49dc-bb0d-8286343b184b"}
+ }
+
+ {
+ "data": {"build_living_space_uu_id": "e9869a25-ba4d-49dc-bb0d-8286343b184b"}
+ }
+ """
+ selection_dict = dict(
+ request=cls.context_retriever.request,
+ token_dict=cls.context_retriever.token,
+ data=data,
+ )
+ if cls.context_retriever.token.is_employee:
+ if Handlers.handle_employee_selection(**selection_dict):
+ return EndpointSuccessResponse(
+ code="LOGIN_SELECT", lang=cls.context_retriever.token.lang
+ ).as_dict(
+ data={"selected": data.company_uu_id, **cls.context_retriever.base}
+ )
+ elif cls.context_retriever.token.is_occupant:
+ if Handlers.handle_occupant_selection(**selection_dict):
+ return EndpointSuccessResponse(
+ code="LOGIN_SELECT", lang=cls.context_retriever.token.lang
+ ).as_dict(
+ data={
+ "selected": data.build_living_space_uu_id,
+ **cls.context_retriever.base,
+ }
+ )
+
+ @classmethod # Requires auth context
+ def authentication_check_token_is_valid(cls):
+ """Check if token is valid for user"""
+ if cls.context_retriever.token:
+ return EndpointSuccessResponse(
+ code="TOKEN_VALID", lang=cls.context_retriever.token.lang
+ ).as_dict(data=cls.context_retriever.base)
+ return {
+ "completed": False,
+ "message": "Token is not valid",
+ }
+
+ @classmethod # Requires not auth context
+ def authentication_access_token_user_info(cls):
+ """Refresh user info using access token"""
+ if cls.context_retriever.token:
+ db = Users.new_session()
+ if found_user := Users.filter_one(
+ Users.id == cls.context_retriever.token.user_id, db=db
+ ).data:
+ return EndpointSuccessResponse(
+ code="USER_INFO_REFRESHED", lang=cls.context_retriever.token.lang
+ ).as_dict(
+ {
+ "access_token": cls.context_retriever.get_token,
+ "user": found_user.get_dict(),
+ }
+ )
+ if not found_user:
+ return EndpointNotAcceptableResponse(
+ code="USER_NOT_FOUND", lang=cls.context_retriever.token.lang
+ ).as_dict(data={})
+
+ @classmethod # Requires no auth context
+ def authentication_change_password(cls, data: Any):
+ """Change password with access token"""
+ if cls.context_retriever.token:
+ db = Users.new_session()
+ if found_user := Users.filter_one(
+ Users.id == cls.context_retriever.token.user_id, db=db
+ ).data:
+ found_user.set_password(data.new_password)
+ return EndpointSuccessResponse(
+ code="PASSWORD_CHANGED", lang=cls.context_retriever.token.lang
+ ).as_dict(data={"user": found_user.get_dict()})
+ if not found_user:
+ return EndpointNotAcceptableResponse(
+ code="USER_NOT_FOUND", lang=cls.context_retriever.token.lang
+ ).as_dict(data={})
+
+ @classmethod # Requires not auth context
+ def authentication_create_password(cls, data: Any):
+ """Create password with password reset token requested via email"""
+ db = Users.new_session()
+ if not data.re_password == data.password:
+ return EndpointNotAcceptableResponse(
+ code="PASSWORD_NOT_MATCH", lang=cls.context_retriever.token.lang
+ ).as_dict(data={"password": data.password, "re_password": data.re_password})
+ if found_user := Users.filter_one(
+ Users.password_token == data.password_token, db=db
+ ).data:
+ found_user.create_password(found_user=found_user, password=data.password)
+ found_user.password_token = ""
+ found_user.save()
+ return EndpointSuccessResponse(
+ code="CREATED_PASSWORD", lang=cls.context_retriever.token.lang
+ ).as_dict(data={"user": found_user.get_dict()})
+
+ @classmethod # Requires auth context
+ def authentication_disconnect_user(cls):
+ """Disconnect all sessions of user in access token"""
+ db = Users.new_session()
+ found_user = Users.filter_one_system(
+ Users.id == cls.context_retriever.token.user_id, db=db
+ ).data
+ if not found_user:
+ return EndpointNotAcceptableResponse(
+ code="USER_NOT_FOUND", lang=cls.context_retriever.token.lang
+ ).as_dict(data={})
+ registered_tokens = UsersTokens.filter_all(
+ UsersTokens.user_id == cls.context_retriever.token.user_id, db=db
+ )
+ if registered_tokens.count:
+ registered_tokens.query.delete()
+ UsersTokens.save(db=db)
+
+ RedisActions.delete(
+ list_keys=[f"{RedisAuthKeys.AUTH}:*:{str(found_user.uu_id)}"]
+ )
+ return EndpointSuccessResponse(
+ code="DISCONNECTED_USER", lang=cls.context_retriever.token.lang
+ ).as_dict(data={"user": found_user.get_dict()})
+
+ @classmethod # Requires auth context
+ def authentication_logout_user(cls, data: Any):
+ """Logout only single session of user which domain is provided"""
+ db = Users.new_session()
+ found_user = Users.filter_one_system(
+ Users.id == cls.context_retriever.token.user_id, db=db
+ ).data
+ if not found_user:
+ return EndpointNotAcceptableResponse(
+ code="USER_NOT_FOUND", lang=cls.context_retriever.token.lang
+ ).as_dict(data={})
+ registered_tokens = UsersTokens.filter_all_system(
+ UsersTokens.user_id == cls.context_retriever.token.user_id,
+ UsersTokens.domain == cls.context_retriever.token.domain,
+ db=db,
+ )
+ if registered_tokens.count:
+ registered_tokens.query.delete()
+ UsersTokens.save(db=db)
+ TokenService.remove_token_with_domain(user=found_user, domain=data.domain)
+ return EndpointSuccessResponse(
+ code="LOGOUT_USER", lang=cls.context_retriever.token.lang
+ ).as_dict(data={"user": found_user.get_dict()})
+
+ @classmethod # Requires not auth context
+ def authentication_refresher_token(cls, request: Request, data: Any):
+ """
+ Refresh access token with refresher token
+ {
+ "data": {
+ "refresh_token": "string",
+ "domain": "string"
+ }
+ }
+ """
+ import arrow
+ from ApiLayers.ApiServices.Token.token_handler import TokenService
+
+ db = UsersTokens.new_session()
+ token_refresher: UsersTokens = UsersTokens.filter_by_one(
+ token=data.refresh_token,
+ domain=data.domain,
+ db=db,
+ ).data
+ language = request.headers.get("evyos-language", "tr")
+ if not token_refresher:
+ return EndpointNotAcceptableResponse(
+ code="REFRESHER_NOT_FOUND", lang=language
+ ).as_dict(data={"refresh_token": data.refresh_token})
+
+ if found_user := Users.filter_one(
+ Users.id == token_refresher.user_id, db=db
+ ).data:
+ token_created = TokenService.set_access_token_to_redis(
+ request=request,
+ user=found_user,
+ domain=data.domain,
+ remember=True,
+ )
+ found_user.last_agent = request.headers.get("User-Agent", None)
+ found_user.last_platform = request.headers.get("Origin", None)
+ found_user.last_remote_addr = getattr(
+ request, "remote_addr", None
+ ) or request.headers.get("X-Forwarded-For", None)
+ found_user.last_seen = str(arrow.now())
+ response_data = {
+ "access_token": token_created.get("access_token"),
+ "refresh_token": data.refresh_token,
+ }
+ return EndpointSuccessResponse(code="TOKEN_REFRESH", lang=language).as_dict(
+ data=response_data
+ )
+ raise EndpointNotAcceptableResponse(
+ code="USER_NOT_FOUND", lang=language
+ ).as_dict(data={})
+
+ @classmethod # Requires not auth context
+ def authentication_forgot_password(cls, data: Any):
+ """Send an email to user for a valid password reset token"""
+ import arrow
+ from ApiLayers.ApiServices.Token.token_handler import TokenService
+ from ApiLayers.AllConfigs.Templates.password_templates import (
+ change_your_password_template,
+ )
+ from Services.Email.send_email import email_sender
+
+ from config import ApiStatic
+
+ db = Users.new_session()
+ request = cls.context_retriever.request
+ found_user: Users = Users.check_user_exits(
+ access_key=data.access_key, domain=data.domain
+ )
+ forgot_key = TokenService._create_access_token(access=False)
+ forgot_link = ApiStatic.forgot_link(forgot_key=forgot_key)
+ send_email_completed = email_sender.send_email(
+ subject=f"Dear {found_user.user_tag}, your forgot password link has been sent.",
+ receivers=[str(found_user.email)],
+ html=change_your_password_template(
+ user_name=found_user.user_tag, forgot_link=forgot_link
+ ),
+ )
+ if not send_email_completed:
+ return EndpointBadRequestResponse(
+ code="EMAIL_NOT_SENT", lang=cls.context_retriever.token.lang
+ ).as_dict(data={"email": found_user.email})
+ found_user.password_token = forgot_key
+ found_user.password_token_is_valid = str(arrow.now().shift(days=1))
+ found_user.save(db=db)
+ return EndpointSuccessResponse(
+ code="FORGOT_PASSWORD", lang=cls.context_retriever.token.lang
+ ).as_dict(
+ data={
+ "user": found_user.get_dict(),
+ "forgot_link": forgot_link,
+ "token": forgot_key,
+ }
+ )
+
+ @classmethod # Requires not auth context
+ def authentication_reset_password(cls, data: Any):
+ """Reset password with forgot password token"""
+ return cls.context_retriever.base
+
+ @classmethod # Requires not auth context
+ def authentication_download_avatar(cls):
+ """Download avatar icon and profile info of user"""
+ import arrow
+
+ db = Users.new_session()
+ if found_user := Users.filter_one(
+ Users.id == cls.context_retriever.token.user_id, db=db
+ ).data:
+ expired_starts = str(arrow.now() - arrow.get(str(found_user.expiry_ends)))
+ expired_int = (
+ arrow.now().datetime - arrow.get(str(found_user.expiry_ends)).datetime
+ )
+ user_info = {
+ "lang": cls.context_retriever.token.lang,
+ "full_name": found_user.person.full_name,
+ "avatar": found_user.avatar,
+ "remember_me": found_user.remember_me,
+ "expiry_ends": str(found_user.expiry_ends),
+ "expired_humanized": expired_starts,
+ "expired_day": int(expired_int.days) * -1,
+ }
+ return EndpointSuccessResponse(
+ code="USER_AVATAR", lang=cls.context_retriever.token.lang
+ ).as_dict(data=user_info)
+ return EndpointNotAcceptableResponse(
+ code="USER_NOT_FOUND", lang=cls.context_retriever.token.lang
+ ).as_dict(data={})
diff --git a/Events/AllEvents/authentication/auth/info.py b/Events/AllEvents/authentication/auth/info.py
new file mode 100644
index 0000000..9f9b853
--- /dev/null
+++ b/Events/AllEvents/authentication/auth/info.py
@@ -0,0 +1,11 @@
+from Events.Engine.abstract_class import PageInfo
+
+
+authentication_page_info = PageInfo(
+ name="Authentication",
+ url="",
+ language_models={},
+ endpoints={},
+ icon="Authentication",
+ sub_components=[],
+)
diff --git a/Events/AllEvents/authentication/auth/models.py b/Events/AllEvents/authentication/auth/models.py
new file mode 100644
index 0000000..f4572e6
--- /dev/null
+++ b/Events/AllEvents/authentication/auth/models.py
@@ -0,0 +1,24 @@
+from ApiLayers.ApiValidations.Request import (
+ Login,
+ EmployeeSelection,
+ OccupantSelection,
+ Logout,
+ CreatePassword,
+ ChangePassword,
+ Forgot,
+ Remember,
+)
+
+
+class AuthenticationRequestModels:
+ LoginSuperUserRequestModel = Login
+ SelectCompanyOrOccupantTypeSuperUserRequestModel = {
+ "EmployeeSelection": EmployeeSelection,
+ "OccupantSelection": OccupantSelection,
+ }
+ RefresherRequestModel = Remember
+ LogoutRequestModel = Logout
+
+
+class AuthenticationResponseModels:
+ pass
diff --git a/Events/AllEvents/events/__init__.py b/Events/AllEvents/events/__init__.py
new file mode 100644
index 0000000..8c95085
--- /dev/null
+++ b/Events/AllEvents/events/__init__.py
@@ -0,0 +1,9 @@
+"""
+Events package initialization.
+"""
+
+from .account.cluster import AccountCluster
+from .address.cluster import AddressCluster
+
+
+__all__ = ["AccountCluster", "AddressCluster"]
diff --git a/Events/AllEvents/events/account/account_records.py b/Events/AllEvents/events/account/account_records.py
new file mode 100644
index 0000000..539ecc9
--- /dev/null
+++ b/Events/AllEvents/events/account/account_records.py
@@ -0,0 +1,124 @@
+"""
+Account related API endpoints.
+"""
+
+from typing import Any, Dict
+from fastapi import Request
+
+from Events.Engine.abstract_class import MethodToEvent
+from Events.base_request_model import EndpointBaseRequestModel, ContextRetrievers
+from ApiLayers.Middleware.token_event_middleware import TokenEventMiddleware
+from ApiLayers.ApiValidations.Response.default_response import (
+ EndpointSuccessListResponse,
+)
+
+from .function_handlers import AccountListEventMethods
+from .api_events import SuperUserAccountEvents
+
+
+AccountRecordsListEventMethods = MethodToEvent(
+ name="AccountRecordsListEventMethods",
+ events={
+ SuperUserAccountEvents.SuperUserListEvent.key: SuperUserAccountEvents.SuperUserListEvent,
+ },
+ headers=[],
+ errors=[],
+ decorators_list=[TokenEventMiddleware.event_required],
+ url="/list",
+ method="POST",
+ summary="List all accounts by given previligous",
+ description="List all accounts by given previligous",
+)
+
+
+def account_list_event_endpoint(
+ request: Request, data: EndpointBaseRequestModel
+) -> Dict[str, Any]:
+ context_retriever = ContextRetrievers(func=account_list_event_endpoint)
+ event_2_catch = AccountRecordsListEventMethods.retrieve_event(
+ event_function_code=f"{SuperUserAccountEvents.SuperUserListEvent.key}"
+ )
+ context_retriever.RESPONSE_VALIDATOR = event_2_catch.RESPONSE_VALIDATOR
+ data = event_2_catch.REQUEST_VALIDATOR(**data.data)
+ AccountListEventMethods.context_retriever = context_retriever
+ pagination_result = event_2_catch.endpoint_callable(data=data)
+ return EndpointSuccessListResponse(
+ code=event_2_catch.static_key, lang=context_retriever.token.lang
+ ).as_dict(
+ data=pagination_result.data, pagination=pagination_result.pagination.as_dict()
+ )
+
+
+AccountRecordsListEventMethods.endpoint_callable = account_list_event_endpoint
+
+
+AccountRecordsCreateEventMethods = MethodToEvent(
+ name="AccountRecordsCreateEventMethods",
+ events={
+ SuperUserAccountEvents.SuperUserCreateEvent.key: SuperUserAccountEvents.SuperUserCreateEvent,
+ },
+ headers=[],
+ errors=[],
+ decorators_list=[TokenEventMiddleware.event_required],
+ url="/create",
+ method="POST",
+ summary="Create Account via given data and previligous",
+ description="Create Account via given data and previligous",
+)
+
+
+def account_create_event_endpoint(
+ request: Request, data: EndpointBaseRequestModel
+) -> Dict[str, Any]:
+ context_retriever = ContextRetrievers(func=account_create_event_endpoint)
+ event_2_catch = AccountRecordsCreateEventMethods.retrieve_event(
+ event_function_code=f"{SuperUserAccountEvents.SuperUserCreateEvent.key}"
+ )
+ context_retriever.RESPONSE_VALIDATOR = event_2_catch.RESPONSE_VALIDATOR
+ data = event_2_catch.REQUEST_VALIDATOR(**data.data)
+ AccountListEventMethods.context_retriever = context_retriever
+ pagination_result = event_2_catch.endpoint_callable(data=data)
+ return EndpointSuccessListResponse(
+ code=event_2_catch.static_key, lang=context_retriever.token.lang
+ ).as_dict(
+ data=pagination_result.data, pagination=pagination_result.pagination.as_dict()
+ )
+
+
+AccountRecordsCreateEventMethods.endpoint_callable = account_create_event_endpoint
+
+
+AccountRecordsUpdateEventMethods = MethodToEvent(
+ name="AccountRecordsUpdateEventMethods",
+ events={
+ SuperUserAccountEvents.SuperUserUpdateEvent.key: SuperUserAccountEvents.SuperUserUpdateEvent,
+ },
+ headers=[],
+ errors=[],
+ decorators_list=[TokenEventMiddleware.event_required],
+ url="/update",
+ method="POST",
+ summary="Update Account via given data and previligous",
+ description="Update Account via given data and previligous",
+)
+
+
+def account_update_event_endpoint(
+ request: Request, data: EndpointBaseRequestModel
+) -> Dict[str, Any]:
+ context_retriever = ContextRetrievers(func=account_update_event_endpoint)
+ event_2_catch = AccountRecordsUpdateEventMethods.retrieve_event(
+ event_function_code=f"{SuperUserAccountEvents.SuperUserUpdateEvent.key}"
+ )
+ context_retriever.RESPONSE_VALIDATOR = event_2_catch.RESPONSE_VALIDATOR
+ data = event_2_catch.REQUEST_VALIDATOR(**data.data)
+ AccountListEventMethods.context_retriever = context_retriever
+ pagination_result = event_2_catch.endpoint_callable(data=data)
+ return EndpointSuccessListResponse(
+ code=event_2_catch.static_key, lang=context_retriever.token.lang
+ ).as_dict(
+ data=pagination_result.data, pagination=pagination_result.pagination.as_dict()
+ )
+
+
+AccountRecordsUpdateEventMethods.endpoint_callable = account_update_event_endpoint
diff --git a/Events/AllEvents/events/account/api_events.py b/Events/AllEvents/events/account/api_events.py
new file mode 100644
index 0000000..e3b0b9c
--- /dev/null
+++ b/Events/AllEvents/events/account/api_events.py
@@ -0,0 +1,77 @@
+from Events.Engine.abstract_class import Event
+
+from .models import AccountRequestValidators
+from .function_handlers import (
+ AccountListEventMethods,
+ AccountCreateEventMethods,
+ AccountUpdateEventMethods,
+)
+
+
+# class SelectResponseAccount(BaseModel):
+# """
+# Response model for account list.
+# """
+# neighborhood_code: str
+# neighborhood_name: str
+# type_code: str
+# type_description: str
+#
+
+
+# Auth Login
+account_list_super_user_event = Event(
+ name="account_list_super_user_event",
+ key="7192c2aa-5352-4e36-98b3-dafb7d036a3d",
+ request_validator=AccountRequestValidators.ListAccountRecord,
+ # response_validator=SelectResponseAccount,
+ # language_models=[AccountRecords.__language_model__],
+ language_models=[],
+ statics="ACCOUNTS_LIST",
+ description="List all types of accounts by validation list options and queries.",
+)
+
+
+account_list_super_user_event.endpoint_callable = (
+ AccountListEventMethods.account_records_list
+)
+
+
+account_insert_super_user_event = Event(
+ name="account_insert_super_user_event",
+ key="31f4f32f-0cd4-4995-8a6a-f9f56335848a",
+ request_validator=AccountRequestValidators.InsertAccountRecord,
+ # response_validator=SelectResponseAccount,
+ # language_models=[AccountRecords.__language_model__],
+ language_models=[],
+ statics="ACCOUNT_CREATED",
+ description="Create a new account by validation list options and queries.",
+)
+
+
+account_insert_super_user_event.endpoint_callable = (
+ AccountCreateEventMethods.account_records_create
+)
+
+
+account_update_super_user_event = Event(
+ name="account_insert_super_user_event",
+ key="208e6273-17ef-44f0-814a-8098f816b63a",
+ request_validator=AccountRequestValidators.UpdateAccountRecord,
+ # response_validator=SelectResponseAccount,
+ # language_models=[AccountRecords.__language_model__],
+ language_models=[],
+ statics="ACCOUNT_UPDATED",
+ description="Update a specific account by validation list options and queries.",
+)
+
+
+account_update_super_user_event.endpoint_callable = (
+ AccountUpdateEventMethods.account_records_update
+)
+
+
+class SuperUserAccountEvents:
+ SuperUserListEvent = account_list_super_user_event
+ SuperUserCreateEvent = account_insert_super_user_event
+ SuperUserUpdateEvent = account_update_super_user_event
diff --git a/Events/AllEvents/events/account/bases.py b/Events/AllEvents/events/account/bases.py
new file mode 100644
index 0000000..f253e3d
--- /dev/null
+++ b/Events/AllEvents/events/account/bases.py
@@ -0,0 +1,38 @@
+from Events.Engine.abstract_class import DefaultClusterName
+
+
+cluster_name = "AccountCluster"
+prefix = "/accounts"
+icon = "Building"
+
+
+# Keys for the cluster
+class KeyValidations:
+ headers = "headers"
+ data = "data"
+ validation = "validations"
+
+# Key URLS for the cluster
+class KeyURLs:
+ validations = "/validations/validations"
+
+# Keys for the cluster
+class KeyBases:
+ create_key = f"{prefix}/create"
+ update_key = f"{prefix}/update"
+ list_key = f"{prefix}/list"
+
+
+# Page Variations of the cluster
+class PageBases:
+ CREATE = f"/create?{DefaultClusterName}={cluster_name}"
+ UPDATE = f"/update?{DefaultClusterName}={cluster_name}"
+ DASHBOARD = f"/dashboard?{DefaultClusterName}={cluster_name}"
+
+
+# Match the keys with the pages
+page_2_keys = {
+ KeyBases.create_key: PageBases.CREATE,
+ KeyBases.update_key: PageBases.UPDATE,
+ KeyBases.list_key: PageBases.DASHBOARD,
+}
diff --git a/Events/AllEvents/events/account/cluster.py b/Events/AllEvents/events/account/cluster.py
new file mode 100644
index 0000000..7eb9a51
--- /dev/null
+++ b/Events/AllEvents/events/account/cluster.py
@@ -0,0 +1,26 @@
+from Events.Engine.abstract_class import CategoryCluster
+from .account_records import (
+ AccountRecordsListEventMethods,
+ AccountRecordsCreateEventMethods,
+ AccountRecordsUpdateEventMethods,
+)
+from .bases import cluster_name, prefix, page_2_keys
+from .info import page_infos
+
+
+AccountCluster = CategoryCluster(
+ name=cluster_name,
+ tags=["Account Records"],
+ prefix=prefix,
+ description="Account Cluster Actions",
+ pageinfo=page_infos,
+ endpoints={
+ "AccountRecordsCreateEventMethods": AccountRecordsCreateEventMethods,
+ "AccountRecordsUpdateEventMethods": AccountRecordsUpdateEventMethods,
+ "AccountRecordsListEventMethods": AccountRecordsListEventMethods,
+ },
+ mapping=page_2_keys,
+ include_in_schema=True,
+ sub_category=[],
+ is_client=True,
+)
diff --git a/Events/AllEvents/events/account/function_handlers.py b/Events/AllEvents/events/account/function_handlers.py
new file mode 100644
index 0000000..004ec34
--- /dev/null
+++ b/Events/AllEvents/events/account/function_handlers.py
@@ -0,0 +1,305 @@
+"""
+Account records service implementation.
+"""
+
+from typing import Any, Union, Optional
+
+from ApiLayers.ApiLibrary import system_arrow
+from ApiLayers.ApiValidations.Custom.token_objects import (
+ OccupantTokenObject,
+ EmployeeTokenObject,
+)
+from ApiLayers.ApiValidations.Request import (
+ InsertAccountRecord,
+ UpdateAccountRecord,
+ ListOptions,
+)
+from ApiLayers.Schemas import (
+ BuildLivingSpace,
+ BuildDecisionBookPayments,
+ AccountRecords,
+ BuildIbans,
+ ApiEnumDropdown,
+)
+from ApiLayers.ApiValidations.Response import AccountRecordResponse
+
+from Events.base_request_model import BaseRouteModel, ListOptionsBase
+from Services.PostgresDb.Models.pagination import PaginationResult
+
+
+class AccountListEventMethods(BaseRouteModel):
+ """
+ Account records list by with full privileges.
+ Accepts List Options
+ {
+ "data": {
+ "page": 1,
+ "size": 10,
+ "order_field": ["uu_id",]
+ "order_type": ["desc"],
+ "query": {
+ "process_date__gt": "2021-09-01",
+ }
+ }
+ }
+ """
+
+ @classmethod
+ def account_records_list(
+ cls, data: Optional[Union[dict, ListOptions]]
+ ) -> PaginationResult:
+ list_options_base = ListOptionsBase(
+ table=AccountRecords,
+ list_options=data,
+ model_query=None,
+ )
+ db_session, query_options = list_options_base.init_list_options()
+ if cls.context_retriever.token.is_occupant:
+ AccountRecords.pre_query = AccountRecords.filter_all(
+ AccountRecords.company_id
+ == cls.context_retriever.token.selected_occupant.responsible_company_id,
+ db=db_session,
+ ).query
+ elif cls.context_retriever.token.is_employee:
+ AccountRecords.pre_query = AccountRecords.filter_all(
+ AccountRecords.company_id
+ == cls.context_retriever.token.selected_company.company_id,
+ db=db_session,
+ ).query
+ records = AccountRecords.filter_all(*query_options.convert(), db=db_session)
+ return list_options_base.paginated_result(
+ records=records,
+ response_model=getattr(cls.context_retriever, "RESPONSE_VALIDATOR", None),
+ )
+
+
+class AccountCreateEventMethods(BaseRouteModel):
+
+ @classmethod
+ def account_records_create(cls, data: Any):
+ data_dict = data.excluded_dump()
+ db_session = AccountRecords.new_session()
+ if cls.context_retriever.token.is_occupant:
+ build_iban = BuildIbans.filter_one(
+ BuildIbans.iban == data.iban,
+ BuildIbans.build_id
+ == cls.context_retriever.token.selected_occupant.build_id,
+ db=db_session,
+ ).data
+ if not build_iban:
+ raise BuildIbans.raise_http_exception(
+ status_code="HTTP_404_NOT_FOUND",
+ error_case="UNAUTHORIZED",
+ message=f"{data.iban} is not found in company related to your organization",
+ data={"iban": data.iban},
+ )
+ account_record = AccountRecords.find_or_create(**data.excluded_dump())
+ # return AlchemyJsonResponse(
+ # completed=True,
+ # message="Account record created successfully",
+ # result=account_record,
+ # )
+ elif cls.context_retriever.token.is_employee:
+ # Build.pre_query = Build.select_action(
+ # employee_id=token_dict.selected_employee.employee_id,
+ # )
+ # build_ids_list = Build.filter_all(
+ # )
+ # build_iban = BuildIbans.filter_one(
+ # BuildIbans.iban == data.iban,
+ # BuildIbans.build_id.in_([build.id for build in build_ids_list.data]),
+ # ).data
+ # if not build_iban:
+ # BuildIbans.raise_http_exception(
+ # status_code="HTTP_404_NOT_FOUND",
+ # error_case="UNAUTHORIZED",
+ # message=f"{data.iban} is not found in company related to your organization",
+ # data={
+ # "iban": data.iban,
+ # },
+ # )
+ bank_date = system_arrow.get(data.bank_date)
+ data_dict["bank_date_w"] = bank_date.weekday()
+ data_dict["bank_date_m"] = bank_date.month
+ data_dict["bank_date_d"] = bank_date.day
+ data_dict["bank_date_y"] = bank_date.year
+
+ if int(data.currency_value) < 0:
+ debit_type = ApiEnumDropdown.filter_by_one(
+ system=True, enum_class="DebitTypes", key="DT-D", db=db_session
+ ).data
+ data_dict["receive_debit"] = debit_type.id
+ data_dict["receive_debit_uu_id"] = str(debit_type.uu_id)
+ else:
+ debit_type = ApiEnumDropdown.filter_by_one(
+ system=True, enum_class="DebitTypes", key="DT-R", db=db_session
+ ).data
+ data_dict["receive_debit"] = debit_type.id
+ data_dict["receive_debit_uu_id"] = str(debit_type.uu_id)
+
+ account_record = AccountRecords.find_or_create(
+ data_dict, db=db_session
+ ).data
+ # return AlchemyJsonResponse(
+ # completed=True,
+ # message="Account record created successfully",
+ # result=account_record,
+ # )
+
+
+class AccountUpdateEventMethods(BaseRouteModel):
+
+ @classmethod
+ def account_records_update(cls, build_uu_id: str, data: Any):
+ if cls.context_retriever.token.is_occupant:
+ pass
+ elif cls.context_retriever.token.is_employee:
+ pass
+ AccountRecords.build_parts_id = (
+ cls.context_retriever.token.selected_occupant.build_part_id
+ )
+
+ # return AlchemyJsonResponse(
+ # completed=True,
+ # message="Account record updated successfully",
+ # result=account_record,
+ # cls_object=AccountRecords,
+ # response_model=UpdateAccountRecord,
+ # )
+
+
+ # @classmethod
+ # def account_records_list_flt_res(cls, list_options: ListOptions) -> PaginationResult:
+ # list_options_base = ListOptionsBase(
+ # table=AccountRecords, list_options=list_options, model_query=None,
+ # )
+ # db_session, query_options = list_options_base.init_list_options()
+ # if not cls.context_retriever.token.is_occupant:
+ # raise AccountRecords.raise_http_exception(
+ # status_code="HTTP_404_NOT_FOUND",
+ # error_case="UNAUTHORIZED",
+ # message="Only Occupant can see this data",
+ # data={},
+ # )
+ #
+ # return_list = []
+ # living_space: BuildLivingSpace = BuildLivingSpace.filter_by_one(
+ # id=cls.context_retriever.token.selected_occupant.living_space_id, db=db_session
+ # ).data
+ # if not living_space:
+ # raise AccountRecords.raise_http_exception(
+ # status_code="HTTP_404_NOT_FOUND",
+ # error_case="UNAUTHORIZED",
+ # message="Living space not found",
+ # data={},
+ # )
+ #
+ # if not list_options:
+ # list_options = ListOptions()
+ #
+ # main_filters = [
+ # AccountRecords.living_space_id
+ # == cls.context_retriever.token.selected_occupant.living_space_id,
+ # BuildDecisionBookPayments.process_date
+ # >= str(system_arrow.now().shift(months=-3).date()),
+ # BuildDecisionBookPayments.process_date
+ # < str(system_arrow.find_last_day_of_month(living_space.expiry_ends)),
+ # BuildDecisionBookPayments.process_date
+ # >= str(system_arrow.get(living_space.expiry_starts)),
+ # BuildDecisionBookPayments.is_confirmed == True,
+ # AccountRecords.active == True,
+ # ]
+ # order_type = "desc"
+ # if list_options.order_type:
+ # order_type = "asc" if list_options.order_type[0] == "a" else "desc"
+ #
+ # order_by_list = BuildDecisionBookPayments.process_date.desc()
+ # if list_options.order_field:
+ # if list_options.order_field == "process_date":
+ # order_by_list = (
+ # BuildDecisionBookPayments.process_date.asc()
+ # if order_type == "asc"
+ # else BuildDecisionBookPayments.process_date.desc()
+ # )
+ # if list_options.order_field == "bank_date":
+ # order_by_list = (
+ # AccountRecords.bank_date.desc()
+ # if order_type == "asc"
+ # else AccountRecords.bank_date.asc()
+ # )
+ # if list_options.order_field == "currency_value":
+ # order_by_list = (
+ # AccountRecords.currency_value.desc()
+ # if order_type == "asc"
+ # else AccountRecords.currency_value.asc()
+ # )
+ # if list_options.order_field == "process_comment":
+ # order_by_list = (
+ # AccountRecords.process_comment.desc()
+ # if order_type == "asc"
+ # else AccountRecords.process_comment.asc()
+ # )
+ # if list_options.order_field == "payment_amount":
+ # order_by_list = (
+ # BuildDecisionBookPayments.payment_amount.desc()
+ # if order_type == "asc"
+ # else BuildDecisionBookPayments.payment_amount.asc()
+ # )
+ #
+ # if list_options.query:
+ # for key, value in list_options.query.items():
+ # if key == "process_date":
+ # main_filters.append(BuildDecisionBookPayments.process_date == value)
+ # if key == "bank_date":
+ # main_filters.append(AccountRecords.bank_date == value)
+ # if key == "currency":
+ # main_filters.append(BuildDecisionBookPayments.currency == value)
+ # if key == "currency_value":
+ # main_filters.append(AccountRecords.currency_value == value)
+ # if key == "process_comment":
+ # main_filters.append(AccountRecords.process_comment == value)
+ # if key == "payment_amount":
+ # main_filters.append(
+ # BuildDecisionBookPayments.payment_amount == value
+ # )
+ #
+ # query = (
+ # AccountRecords.session.query(
+ # BuildDecisionBookPayments.process_date,
+ # BuildDecisionBookPayments.payment_amount,
+ # BuildDecisionBookPayments.currency,
+ # AccountRecords.bank_date,
+ # AccountRecords.currency_value,
+ # AccountRecords.process_comment,
+ # BuildDecisionBookPayments.uu_id,
+ # )
+ # .join(
+ # AccountRecords,
+ # AccountRecords.id == BuildDecisionBookPayments.account_records_id,
+ # )
+ # .filter(*main_filters)
+ # ).order_by(order_by_list)
+ #
+ # query.limit(list_options.size or 5).offset(
+ # (list_options.page or 1 - 1) * list_options.size or 5
+ # )
+ # for list_of_values in query.all() or []:
+ # return_list.append(
+ # {
+ # "process_date": list_of_values[0],
+ # "payment_amount": list_of_values[1],
+ # "currency": list_of_values[2],
+ # "bank_date": list_of_values[3],
+ # "currency_value": list_of_values[4],
+ # "process_comment": list_of_values[5],
+ # }
+ # )
+ # return AlchemyJsonResponse(
+ # completed=True,
+ # message="Account records listed successfully",
+ # result=return_list,
+ # cls_object=AccountRecords,
+ # filter_attributes=list_options,
+ # response_model=AccountRecordResponse,
+ # )
+
diff --git a/Events/AllEvents/events/account/info.py b/Events/AllEvents/events/account/info.py
new file mode 100644
index 0000000..c7ba706
--- /dev/null
+++ b/Events/AllEvents/events/account/info.py
@@ -0,0 +1,140 @@
+from Events.Engine.abstract_class import PageInfo
+from .bases import KeyValidations, cluster_name, KeyBases, PageBases, icon, KeyURLs
+from .account_records import (
+ AccountRecordsUpdateEventMethods,
+ AccountRecordsCreateEventMethods,
+ AccountRecordsListEventMethods,
+)
+from .lang_models import (
+ account_language_create_models_as_dict,
+ account_language_model_as_dict,
+ account_language_list_models_as_dict,
+ account_language_created_models_as_dict,
+ account_language_update_form_models_as_dict,
+)
+
+
+class ClustersPageInfo:
+
+ # Cluster Page Infos that are available for the client
+ dashboard_page_info = PageInfo(
+ name=f"{cluster_name}",
+ url=PageBases.DASHBOARD,
+ icon=icon,
+ page_info={
+ "en": {
+ "page": "Account Records for reaching user all types account information",
+ },
+ "tr": {
+ "page": "Kullanıcı tüm hesap bilgilerine ulaşmak için Hesap Kayıtları",
+ },
+ },
+ instructions={
+ str(KeyBases.list_key): {
+ "headers": {
+ "store": True,
+ "url": KeyURLs.validations,
+ "data": {"event_code": f"{KeyBases.list_key}", "asked_field": KeyValidations.headers},
+ },
+ "data": {
+ "store": True,
+ "url": f"{KeyBases.list_key}",
+ "data": dict(page=1, limit=1),
+ },
+ },
+ },
+ endpoints={
+ str(KeyBases.update_key): AccountRecordsUpdateEventMethods.retrieve_all_event_keys(),
+ str(KeyBases.create_key): AccountRecordsCreateEventMethods.retrieve_all_event_keys(),
+ str(KeyBases.list_key): AccountRecordsListEventMethods.retrieve_all_event_keys(),
+ },
+ language_models={
+ str(KeyBases.list_key): {
+ str(KeyBases.update_key): account_language_model_as_dict,
+ str(KeyBases.create_key): account_language_created_models_as_dict,
+ str(KeyBases.list_key): account_language_list_models_as_dict,
+ }
+ },
+ )
+
+ create_page_info = PageInfo(
+ name=f"{cluster_name}",
+ url=PageBases.CREATE,
+ icon=icon,
+ instructions={
+ str(KeyBases.create_key): {
+ "validation": {
+ "store": True,
+ "url": KeyURLs.validations,
+ "data": {"event_code": f"{KeyBases.create_key}", "asked_field": KeyValidations.validation },
+ },
+ "headers": {
+ "store": True,
+ "url": KeyURLs.validations,
+ "data": {"event_code": f"{KeyBases.create_key}", "asked_field": KeyValidations.headers},
+ },
+ },
+ },
+ page_info={
+ "en": {
+ "page": "Create Account Records for reaching user all types account information",
+ },
+ "tr": {
+ "page": "Kullanıcı tüm hesap bilgilerine ulaşmak için Hesap Kayıt Oluştur",
+ },
+ },
+ endpoints={
+ str(KeyBases.create_key): AccountRecordsCreateEventMethods.retrieve_all_event_keys(),
+ },
+ language_models={
+ str(KeyBases.create_key): account_language_create_models_as_dict,
+ },
+ )
+
+ update_page_info = PageInfo(
+ name=f"{cluster_name}",
+ url=PageBases.UPDATE,
+ icon=icon,
+ instructions={
+ str(KeyBases.update_key): {
+ "validation": {
+ "store": True,
+ "url": KeyURLs.validations,
+ "data": {"event_code": f"{KeyBases.update_key}", "asked_field": KeyValidations.validation},
+ },
+ "headers": {
+ "store": True,
+ "url": KeyURLs.validations,
+ "data": {"event_code": f"{KeyBases.update_key}", "asked_field": KeyValidations.headers},
+ },
+ },
+ },
+ page_info={
+ "en": {
+ "page": "Update Account Records via all types account information",
+ },
+ "tr": {
+ "page": "Tüm hesap bilgileri aracılığıyla Hesap Kayıtlarını Güncelle",
+ },
+ },
+ endpoints={
+ str(KeyBases.update_key): AccountRecordsUpdateEventMethods.retrieve_all_event_keys(),
+ },
+ language_models={
+ str(KeyBases.update_key): account_language_update_form_models_as_dict,
+ },
+ )
+
+
+# Page Variations of the cluster
+page_infos = {
+ ClustersPageInfo.dashboard_page_info.URL: ClustersPageInfo.dashboard_page_info,
+ ClustersPageInfo.create_page_info.URL: ClustersPageInfo.create_page_info,
+ ClustersPageInfo.update_page_info.URL: ClustersPageInfo.update_page_info,
+}
+
+
+# Check if all the page info is implemented in the mappings
+for t in [x for k, x in PageBases.__dict__.items() if not str(k).startswith("__")]:
+ if t not in list(dict(page_infos).keys()):
+ raise NotImplementedError(f"Page Info of : {t} is not implemented in mappings")
diff --git a/Events/AllEvents/events/account/lang_models.py b/Events/AllEvents/events/account/lang_models.py
new file mode 100644
index 0000000..3424baa
--- /dev/null
+++ b/Events/AllEvents/events/account/lang_models.py
@@ -0,0 +1,78 @@
+from Events.Engine.abstract_class import DefaultClusterName, LanguageModels
+from .bases import KeyBases, cluster_name
+
+
+account_language_update_models = LanguageModels()
+account_language_update_models.COMPONENT = "Link"
+account_language_update_models.SITE_URL = f"/update?{DefaultClusterName}={cluster_name}"
+account_language_update_models.PREFIX_URL = (
+ KeyBases.update_key
+)
+account_language_update_models.PAGE_INFO = {
+ "en": {
+ "page": "Update Account Records",
+ },
+ "tr": {
+ "page": "Hesap Kayıdı Güncelle",
+ },
+}
+account_language_model_as_dict = account_language_update_models.as_dict()
+
+
+account_language_created_models = LanguageModels()
+account_language_created_models.COMPONENT = "Link"
+account_language_created_models.SITE_URL = f"/create?{DefaultClusterName}={cluster_name}"
+account_language_created_models.PREFIX_URL = (
+ KeyBases.create_key
+)
+account_language_created_models.PAGE_INFO = {
+ "en": {
+ "page": "Create Account Records",
+ },
+ "tr": {"page": "Hesap Kayıdı Oluştur"},
+}
+account_language_created_models_as_dict = account_language_created_models.as_dict()
+
+account_language_list_models = LanguageModels()
+account_language_list_models.COMPONENT = "Table"
+account_language_list_models.SITE_URL = f"/dashboard?{DefaultClusterName}={cluster_name}"
+account_language_list_models.PREFIX_URL = (
+ KeyBases.list_key
+)
+account_language_list_models.PAGE_INFO = {
+ "en": {
+ "page": "List Account Records",
+ },
+ "tr": {
+ "page": "Hesap Kayıtlarını Listele",
+ },
+}
+
+account_language_list_models_as_dict = account_language_list_models.as_dict()
+
+account_language_create_form_models = LanguageModels()
+account_language_create_form_models.COMPONENT = "Form"
+account_language_create_form_models.SITE_URL = f"/create?{DefaultClusterName}={cluster_name}"
+account_language_create_form_models.PREFIX_URL = (
+ KeyBases.create_key
+)
+account_language_create_form_models.PAGE_INFO = {
+ "en": {"page": "List Account Records", "button:": "Create"},
+ "tr": {"page": "Hesap Kayıtlarını Listele", "button:": "Oluştur"},
+}
+
+account_language_create_models_as_dict = account_language_create_form_models.as_dict()
+
+account_language_update_form_models = LanguageModels()
+account_language_update_form_models.COMPONENT = "Form"
+account_language_update_form_models.SITE_URL = f"/update?{DefaultClusterName}={cluster_name}"
+account_language_update_form_models.PREFIX_URL = (
+ KeyBases.update_key
+)
+account_language_update_form_models.PAGE_INFO = {
+ "en": {"page": "Update Account Records", "button:": "Update"},
+ "tr": {"page": "Hesap Kayıdı Güncelle", "button:": "Güncelle"},
+}
+account_language_update_form_models_as_dict = (
+ account_language_update_form_models.as_dict()
+)
diff --git a/Events/AllEvents/events/account/models.py b/Events/AllEvents/events/account/models.py
new file mode 100644
index 0000000..24671af
--- /dev/null
+++ b/Events/AllEvents/events/account/models.py
@@ -0,0 +1,97 @@
+"""
+Account records request and response models.
+"""
+
+from typing import Optional
+
+from ApiLayers.ApiValidations.Request import PydanticBaseModel, ListOptions
+
+
+class InsertAccountRecord(PydanticBaseModel):
+
+ iban: str
+ bank_date: str
+ currency_value: float
+ bank_balance: float
+ currency: str
+ additional_balance: float
+ channel_branch: str
+ process_name: str
+ process_type: str
+ process_comment: str
+ bank_reference_code: str
+
+ add_comment_note: Optional[str] = None
+ is_receipt_mail_send: Optional[bool] = None
+ found_from: Optional[str] = None
+ similarity: Optional[float] = None
+ remainder_balance: Optional[float] = None
+ bank_date_y: Optional[int] = None
+ bank_date_m: Optional[int] = None
+ bank_date_w: Optional[int] = None
+ bank_date_d: Optional[int] = None
+ approving_accounting_record: Optional[bool] = None
+ accounting_receipt_date: Optional[str] = None
+ accounting_receipt_number: Optional[int] = None
+ approved_record: Optional[bool] = None
+ import_file_name: Optional[str] = None
+ # receive_debit_uu_id: Optional[str] = None
+ budget_type_uu_id: Optional[str] = None
+ company_uu_id: Optional[str] = None
+ send_company_uu_id: Optional[str] = None
+ customer_id: Optional[str] = None
+ customer_uu_id: Optional[str] = None
+ send_person_uu_id: Optional[str] = None
+ approving_accounting_person_uu_id: Optional[str] = None
+ build_parts_uu_id: Optional[str] = None
+ build_decision_book_uu_id: Optional[str] = None
+
+
+class UpdateAccountRecord(PydanticBaseModel):
+
+ iban: Optional[str] = None
+ bank_date: Optional[str] = None
+ currency_value: Optional[float] = None
+ bank_balance: Optional[float] = None
+ currency: Optional[str] = None
+ additional_balance: Optional[float] = None
+ channel_branch: Optional[str] = None
+ process_name: Optional[str] = None
+ process_type: Optional[str] = None
+ process_comment: Optional[str] = None
+ bank_reference_code: Optional[str] = None
+
+ add_comment_note: Optional[str] = None
+ is_receipt_mail_send: Optional[bool] = None
+ found_from: Optional[str] = None
+ similarity: Optional[float] = None
+ remainder_balance: Optional[float] = None
+ bank_date_y: Optional[int] = None
+ bank_date_m: Optional[int] = None
+ bank_date_w: Optional[int] = None
+ bank_date_d: Optional[int] = None
+ approving_accounting_record: Optional[bool] = None
+ accounting_receipt_date: Optional[str] = None
+ accounting_receipt_number: Optional[int] = None
+ approved_record: Optional[bool] = None
+ import_file_name: Optional[str] = None
+ receive_debit_uu_id: Optional[str] = None
+ budget_type_uu_id: Optional[str] = None
+ company_uu_id: Optional[str] = None
+ send_company_uu_id: Optional[str] = None
+ customer_id: Optional[str] = None
+ customer_uu_id: Optional[str] = None
+ send_person_uu_id: Optional[str] = None
+ approving_accounting_person_uu_id: Optional[str] = None
+ build_parts_uu_id: Optional[str] = None
+ build_decision_book_uu_id: Optional[str] = None
+
+
+class ListAccountRecord(ListOptions):
+ pass
+
+
+class AccountRequestValidators:
+ InsertAccountRecord = InsertAccountRecord
+ UpdateAccountRecord = UpdateAccountRecord
+ ListAccountRecord = ListAccountRecord
diff --git a/Events/AllEvents/events/address/address.py b/Events/AllEvents/events/address/address.py
new file mode 100644
index 0000000..a5c15e0
--- /dev/null
+++ b/Events/AllEvents/events/address/address.py
@@ -0,0 +1,166 @@
+"""
+Account related API endpoints.
+"""
+
+from typing import Any, Dict
+from fastapi import Request
+
+from Events.Engine.abstract_class import MethodToEvent
+from Events.base_request_model import EndpointBaseRequestModel, ContextRetrievers
+
+from ApiLayers.Middleware.token_event_middleware import TokenEventMiddleware
+from ApiLayers.ApiValidations.Response.default_response import (
+ EndpointSuccessListResponse,
+)
+
+from .function_handlers import (
+ AddressListFunctions,
+ AddressUpdateFunctions,
+ AddressSearchFunctions,
+ AddressCreateFunctions,
+)
+from .api_events import AddressSuperUserEvents
+
+
+AddressListEventMethods = MethodToEvent(
+ name="AddressListEventMethods",
+ events={
+ AddressSuperUserEvents.AddressListEvents.key: AddressSuperUserEvents.AddressListEvents,
+ },
+ headers=[],
+ errors=[],
+ decorators_list=[TokenEventMiddleware.event_required],
+ url="/list",
+ method="POST",
+ summary="List all accounts by given previligous",
+ description="List all accounts by given previligous",
+)
+
+
+def account_list_event_endpoint(
+ request: Request, data: EndpointBaseRequestModel
+) -> Dict[str, Any]:
+ context_retriever = ContextRetrievers(func=account_list_event_endpoint)
+ event_2_catch = AddressListEventMethods.retrieve_event(
+ event_function_code=f"{AddressSuperUserEvents.AddressListEvents.key}"
+ )
+ context_retriever.RESPONSE_VALIDATOR = event_2_catch.RESPONSE_VALIDATOR
+ data = event_2_catch.REQUEST_VALIDATOR(**data.data)
+ AddressListFunctions.context_retriever = context_retriever
+ pagination_result = event_2_catch.endpoint_callable(data=data)
+ return EndpointSuccessListResponse(
+ code=event_2_catch.static_key, lang=context_retriever.token.lang
+ ).as_dict(
+ data=pagination_result.data, pagination=pagination_result.pagination.as_dict()
+ )
+
+
+AddressListEventMethods.endpoint_callable = account_list_event_endpoint
+
+
+AddressCreateEventMethods = MethodToEvent(
+ name="AddressCreateEventMethods",
+ events={
+ AddressSuperUserEvents.AddressCreateEvents.key: AddressSuperUserEvents.AddressCreateEvents,
+ },
+ headers=[],
+ errors=[],
+ decorators_list=[TokenEventMiddleware.event_required],
+ url="/create",
+ method="POST",
+ summary="Create Address via given data and previligous",
+ description="Create Address via given data and previligous",
+)
+
+
+def account_create_event_endpoint(
+ request: Request, data: EndpointBaseRequestModel
+) -> Dict[str, Any]:
+ context_retriever = ContextRetrievers(func=account_create_event_endpoint)
+ event_2_catch = AddressCreateEventMethods.retrieve_event(
+ event_function_code=f"{AddressSuperUserEvents.AddressCreateEvents.key}"
+ )
+ context_retriever.RESPONSE_VALIDATOR = event_2_catch.RESPONSE_VALIDATOR
+ data = event_2_catch.REQUEST_VALIDATOR(**data.data)
+ AddressCreateFunctions.context_retriever = context_retriever
+ pagination_result = event_2_catch.endpoint_callable(data=data)
+ return EndpointSuccessListResponse(
+ code=event_2_catch.static_key, lang=context_retriever.token.lang
+ ).as_dict(
+ data=pagination_result.data, pagination=pagination_result.pagination.as_dict()
+ )
+
+
+AddressCreateEventMethods.endpoint_callable = account_create_event_endpoint
+
+
+AddressUpdateEventMethods = MethodToEvent(
+ name="AddressUpdateEventMethods",
+ events={
+ AddressSuperUserEvents.AddressUpdateEvents.key: AddressSuperUserEvents.AddressUpdateEvents,
+ },
+ headers=[],
+ errors=[],
+ decorators_list=[TokenEventMiddleware.event_required],
+ url="/update",
+ method="POST",
+ summary="Update Address via given data and previligous",
+ description="Update Address via given data and previligous",
+)
+
+
+def account_update_event_endpoint(
+ request: Request, data: EndpointBaseRequestModel
+) -> Dict[str, Any]:
+ context_retriever = ContextRetrievers(func=account_update_event_endpoint)
+ event_2_catch = AddressUpdateEventMethods.retrieve_event(
+ event_function_code=f"{AddressSuperUserEvents.AddressUpdateEvents.key}"
+ )
+ context_retriever.RESPONSE_VALIDATOR = event_2_catch.RESPONSE_VALIDATOR
+ data = event_2_catch.REQUEST_VALIDATOR(**data.data)
+ AddressUpdateFunctions.context_retriever = context_retriever
+ pagination_result = event_2_catch.endpoint_callable(data=data)
+ return EndpointSuccessListResponse(
+ code=event_2_catch.static_key, lang=context_retriever.token.lang
+ ).as_dict(
+ data=pagination_result.data, pagination=pagination_result.pagination.as_dict()
+ )
+
+
+AddressUpdateEventMethods.endpoint_callable = account_update_event_endpoint
+
+
+AddressSearchEventMethods = MethodToEvent(
+ name="AddressSearchEventMethods",
+ events={
+ AddressSuperUserEvents.AddressSearchEvents.key: AddressSuperUserEvents.AddressSearchEvents,
+ },
+ headers=[],
+ errors=[],
+ decorators_list=[TokenEventMiddleware.event_required],
+ url="/search",
+ method="POST",
+ summary="Search Address via given data and previligous",
+ description="Search Address via given data and previligous",
+)
+
+
+def address_search_event_endpoint(
+ request: Request, data: EndpointBaseRequestModel
+) -> Dict[str, Any]:
+ context_retriever = ContextRetrievers(func=account_update_event_endpoint)
+ event_2_catch = AddressUpdateEventMethods.retrieve_event(
+ event_function_code=f"{AddressSuperUserEvents.AddressSearchEvents.key}"
+ )
+ context_retriever.RESPONSE_VALIDATOR = event_2_catch.RESPONSE_VALIDATOR
+ data = event_2_catch.REQUEST_VALIDATOR(**data.data)
+ AddressSearchFunctions.context_retriever = context_retriever
+ pagination_result = event_2_catch.endpoint_callable(data=data)
+ return EndpointSuccessListResponse(
+ code=event_2_catch.static_key, lang=context_retriever.token.lang
+ ).as_dict(
+ data=pagination_result.data, pagination=pagination_result.pagination.as_dict()
+ )
+
+
+AddressSearchEventMethods.endpoint_callable = address_search_event_endpoint
diff --git a/Events/AllEvents/events/address/api_events.py b/Events/AllEvents/events/address/api_events.py
new file mode 100644
index 0000000..a5b417a
--- /dev/null
+++ b/Events/AllEvents/events/address/api_events.py
@@ -0,0 +1,85 @@
+from Events.Engine.abstract_class import Event
+from ApiLayers.LanguageModels.Request import (
+ LoginRequestLanguageModel,
+)
+
+# from models import TemplateResponseModels, TemplateRequestModels
+from .function_handlers import AddressSuperUserFunctions
+
+
+# Address List for super_user event
+address_list_super_user_event = Event(
+ name="account_insert_super_user_event",
+ key="7ce855ce-db79-4397-b0ec-f5e408ea6447",
+ # request_validator=AccountRequestValidators.ListAccountRecord,
+ # response_validator=SelectResponseAccount,
+ # language_models=[AccountRecords.__language_model__],
+ language_models=[],
+ statics="",
+ description="List address by validation list options and queries.",
+)
+
+
+address_list_super_user_event.endpoint_callable = (
+ AddressSuperUserFunctions.AddressListFunctions.template_example_function_list
+)
+
+# Address Create for super_user event
+address_create_super_user_event = Event(
+ name="account_insert_super_user_event",
+ key="d638a6b2-cf2e-4361-99a4-021183b75ec1",
+ # request_validator=AccountRequestValidators.ListAccountRecord,
+ # response_validator=SelectResponseAccount,
+ # language_models=[AccountRecords.__language_model__],
+ language_models=[],
+ statics="",
+ description="Create address by validation list options and queries.",
+)
+
+
+address_create_super_user_event.endpoint_callable = (
+ AddressSuperUserFunctions.AddressCreateFunctions.template_example_function_list
+)
+
+
+# Address Update for super_user event
+address_update_super_user_event = Event(
+ name="account_insert_super_user_event",
+ key="455b8bf5-52e4-47fa-9338-102bfcd364e5",
+ # request_validator=AccountRequestValidators.ListAccountRecord,
+ # response_validator=SelectResponseAccount,
+ # language_models=[AccountRecords.__language_model__],
+ language_models=[],
+ statics="",
+ description="Update address by validation list options and queries.",
+)
+
+
+address_update_super_user_event.endpoint_callable = (
+ AddressSuperUserFunctions.AddressUpdateFunctions.template_example_function_list
+)
+
+
+# Address Update for super_user event
+address_search_super_user_event = Event(
+ name="account_insert_super_user_event",
+ key="7dd8c122-fae5-4a6d-a439-068312bb4df3",
+ # request_validator=AccountRequestValidators.ListAccountRecord,
+ # response_validator=SelectResponseAccount,
+ # language_models=[AccountRecords.__language_model__],
+ language_models=[],
+ statics="",
+ description="Search address by validation list options and queries.",
+)
+
+
+address_search_super_user_event.endpoint_callable = (
+ AddressSuperUserFunctions.AddressSearchFunctions.template_example_function_list
+)
+
+
+class AddressSuperUserEvents:
+ AddressListEvents = address_list_super_user_event
+ AddressCreateEvents = address_create_super_user_event
+ AddressUpdateEvents = address_update_super_user_event
+ AddressSearchEvents = address_search_super_user_event
diff --git a/Events/AllEvents/events/address/cluster.py b/Events/AllEvents/events/address/cluster.py
new file mode 100644
index 0000000..473d930
--- /dev/null
+++ b/Events/AllEvents/events/address/cluster.py
@@ -0,0 +1,27 @@
+from Events.Engine.abstract_class import CategoryCluster
+
+from .address import (
+ AddressListEventMethods,
+ AddressCreateEventMethods,
+ AddressUpdateEventMethods,
+ AddressSearchEventMethods,
+)
+from .info import address_page_info
+
+
+AddressCluster = CategoryCluster(
+ name="AddressCluster",
+ tags=["Address"],
+ prefix="/address",
+ description="Address Cluster",
+ pageinfo=address_page_info,
+ endpoints={
+ "AddressListEventMethods": AddressListEventMethods,
+ "AddressCreateEventMethods": AddressCreateEventMethods,
+ "AddressUpdateEventMethods": AddressUpdateEventMethods,
+ "AddressSearchEventMethods": AddressSearchEventMethods,
+ },
+ include_in_schema=True,
+ sub_category=[],
+ is_client=True,
+)
diff --git a/Events/AllEvents/events/address/function_handlers.py b/Events/AllEvents/events/address/function_handlers.py
new file mode 100644
index 0000000..d0ceac4
--- /dev/null
+++ b/Events/AllEvents/events/address/function_handlers.py
@@ -0,0 +1,157 @@
+from typing import Union, Optional
+
+from ApiLayers.ApiValidations.Request import ListOptions
+from Events.base_request_model import BaseRouteModel, ListOptionsBase
+from Services.PostgresDb.Models.pagination import PaginationResult
+from ApiLayers.Schemas import AddressNeighborhood
+
+
+class Handlers:
+ """Class for handling authentication functions"""
+
+ @classmethod # Requires no auth context
+ def handle_function(cls, **kwargs):
+ """Handle function with kwargs"""
+ return
+
+
+class AddressListFunctions(BaseRouteModel):
+
+ @classmethod
+ def template_example_function_list(
+ cls, data: Optional[Union[dict, ListOptions]]
+ ) -> PaginationResult:
+ list_options_base = ListOptionsBase(
+ table=AddressNeighborhood,
+ list_options=data,
+ model_query=None,
+ )
+ db_session, query_options = list_options_base.init_list_options()
+ if cls.context_retriever.token.is_occupant:
+ AddressNeighborhood.pre_query = AddressNeighborhood.filter_all(
+ AddressNeighborhood.neighborhood_code.icontains("10"),
+ db=db_session,
+ ).query
+ elif cls.context_retriever.token.is_employee:
+ AddressNeighborhood.pre_query = AddressNeighborhood.filter_all(
+ AddressNeighborhood.neighborhood_code.icontains("9"),
+ db=db_session,
+ ).query
+ records = AddressNeighborhood.filter_all(
+ *query_options.convert(), db=db_session
+ )
+ return list_options_base.paginated_result(
+ records=records,
+ response_model=getattr(cls.context_retriever, "RESPONSE_VALIDATOR", None),
+ )
+
+
+class AddressCreateFunctions(BaseRouteModel):
+
+ @classmethod
+ def template_example_function_list(
+ cls, data: Optional[Union[dict, ListOptions]]
+ ) -> PaginationResult:
+ from ApiLayers.Schemas import AddressNeighborhood
+
+ list_options_base = ListOptionsBase(
+ table=AddressNeighborhood,
+ list_options=data,
+ model_query=None,
+ )
+ db_session, query_options = list_options_base.init_list_options()
+ if cls.context_retriever.token.is_occupant:
+ AddressNeighborhood.pre_query = AddressNeighborhood.filter_all(
+ AddressNeighborhood.neighborhood_code.icontains("10"),
+ db=db_session,
+ ).query
+ elif cls.context_retriever.token.is_employee:
+ AddressNeighborhood.pre_query = AddressNeighborhood.filter_all(
+ AddressNeighborhood.neighborhood_code.icontains("9"),
+ db=db_session,
+ ).query
+ records = AddressNeighborhood.filter_all(
+ *query_options.convert(), db=db_session
+ )
+ return list_options_base.paginated_result(
+ records=records,
+ response_model=getattr(cls.context_retriever, "RESPONSE_VALIDATOR", None),
+ )
+
+
+class AddressSearchFunctions(BaseRouteModel):
+ """Event methods for searching addresses.
+
+ This class handles address search functionality including text search
+ and filtering.
+ """
+
+ @classmethod
+ def template_example_function_list(
+ cls, data: Optional[Union[dict, ListOptions]]
+ ) -> PaginationResult:
+ from ApiLayers.Schemas import AddressNeighborhood
+
+ list_options_base = ListOptionsBase(
+ table=AddressNeighborhood,
+ list_options=data,
+ model_query=None,
+ )
+ db_session, query_options = list_options_base.init_list_options()
+ if cls.context_retriever.token.is_occupant:
+ AddressNeighborhood.pre_query = AddressNeighborhood.filter_all(
+ AddressNeighborhood.neighborhood_code.icontains("10"),
+ db=db_session,
+ ).query
+ elif cls.context_retriever.token.is_employee:
+ AddressNeighborhood.pre_query = AddressNeighborhood.filter_all(
+ AddressNeighborhood.neighborhood_code.icontains("9"),
+ db=db_session,
+ ).query
+ records = AddressNeighborhood.filter_all(
+ *query_options.convert(), db=db_session
+ )
+ return list_options_base.paginated_result(
+ records=records,
+ response_model=getattr(cls.context_retriever, "RESPONSE_VALIDATOR", None),
+ )
+
+
+class AddressUpdateFunctions(BaseRouteModel):
+
+ @classmethod
+ def template_example_function_list(
+ cls, data: Optional[Union[dict, ListOptions]]
+ ) -> PaginationResult:
+ from ApiLayers.Schemas import AddressNeighborhood
+
+ list_options_base = ListOptionsBase(
+ table=AddressNeighborhood,
+ list_options=data,
+ model_query=None,
+ )
+ db_session, query_options = list_options_base.init_list_options()
+ if cls.context_retriever.token.is_occupant:
+ AddressNeighborhood.pre_query = AddressNeighborhood.filter_all(
+ AddressNeighborhood.neighborhood_code.icontains("10"),
+ db=db_session,
+ ).query
+ elif cls.context_retriever.token.is_employee:
+ AddressNeighborhood.pre_query = AddressNeighborhood.filter_all(
+ AddressNeighborhood.neighborhood_code.icontains("9"),
+ db=db_session,
+ ).query
+ records = AddressNeighborhood.filter_all(
+ *query_options.convert(), db=db_session
+ )
+ return list_options_base.paginated_result(
+ records=records,
+ response_model=getattr(cls.context_retriever, "RESPONSE_VALIDATOR", None),
+ )
+
+
+class AddressSuperUserFunctions:
+ AddressListFunctions = AddressListFunctions
+ AddressCreateFunctions = AddressCreateFunctions
+ AddressSearchFunctions = AddressSearchFunctions
+ AddressUpdateFunctions = AddressUpdateFunctions
diff --git a/Events/AllEvents/events/address/info.py b/Events/AllEvents/events/address/info.py
new file mode 100644
index 0000000..6327997
--- /dev/null
+++ b/Events/AllEvents/events/address/info.py
@@ -0,0 +1,77 @@
+from Events.Engine.abstract_class import PageInfo
+from .address import (
+ AddressListEventMethods,
+ AddressCreateEventMethods,
+ AddressUpdateEventMethods,
+ AddressSearchEventMethods,
+)
+
+prefix = "/address"
+cluster_name = "AddressCluster"
+address_page_info = PageInfo(
+ name=f"{cluster_name}",
+ url=f"/dashboard?site={cluster_name}",
+ icon="Building",
+ endpoints={
+ str(
+ f"{prefix}{AddressUpdateEventMethods.URL}"
+ ): AddressUpdateEventMethods.retrieve_all_event_keys(),
+ str(
+ f"{prefix}{AddressCreateEventMethods.URL}"
+ ): AddressCreateEventMethods.retrieve_all_event_keys(),
+ str(
+ f"{prefix}{AddressSearchEventMethods.URL}"
+ ): AddressSearchEventMethods.retrieve_all_event_keys(),
+ str(
+ f"{prefix}{AddressListEventMethods.URL}"
+ ): AddressListEventMethods.retrieve_all_event_keys(),
+ },
+ language_models={
+ "page_info": {
+ "key": "pair", # key: pair, value: dict
+ "description": {
+ "en": "Account Records for reaching user all types account information",
+ "tr": "Kullanıcı tüm hesap bilgilerine ulaşmak için Hesap Kayıtları",
+ },
+ },
+ f"{prefix}{AddressUpdateEventMethods.URL}": {
+ "component": "Button",
+ "site_url": f"/update?site={cluster_name}",
+ "page_info": {
+ "text": {
+ "en": "Update Account Records",
+ "tr": "Hesap Kayıdı Güncelle",
+ },
+ },
+ },
+ f"{prefix}{AddressCreateEventMethods.URL}": {
+ "component": "Button",
+ "site_url": f"/create?site={cluster_name}",
+ "page_info": {
+ "text": {
+ "en": "Create Account Records",
+ "tr": "Hesap Kayıdı Oluştur",
+ },
+ },
+ },
+ f"{prefix}{AddressSearchEventMethods.URL}": {
+ "component": "Search",
+ "page_info": {
+ "text": {
+ "en": "Search Account Records",
+ "tr": "Hesap Kayıtlarını Ara",
+ },
+ },
+ },
+ f"{prefix}{AddressListEventMethods.URL}": {
+ "component": "Table",
+ "fetch_url": AddressListEventMethods.URL,
+ "page_info": {
+ "description": {
+ "en": "Account Records for reaching user all types account information",
+ "tr": "Kullanıcı tüm hesap bilgilerine ulaşmak için Hesap Kayıtları",
+ },
+ },
+ },
+ },
+)
diff --git a/Events/AllEvents/events/address/models.py b/Events/AllEvents/events/address/models.py
new file mode 100644
index 0000000..e69de29
diff --git a/Events/AllEvents/events/building/build_area/api_events.py b/Events/AllEvents/events/building/build_area/api_events.py
new file mode 100644
index 0000000..be66917
--- /dev/null
+++ b/Events/AllEvents/events/building/build_area/api_events.py
@@ -0,0 +1,21 @@
+from Events.Engine.abstract_class import Event
+from ApiLayers.LanguageModels.Request import (
+ LoginRequestLanguageModel,
+)
+
+from models import TemplateResponseModels, TemplateRequestModels
+from function_handlers import TemplateFunctions
+
+
+# Auth Login
+template_event = Event(
+ name="authentication_login_super_user_event",
+ key="a5d2d0d1-3e9b-4b0f-8c7d-6d4a4b4c4d4e",
+ request_validator=TemplateRequestModels.TemplateRequestModelX,
+ language_models=[LoginRequestLanguageModel],
+ response_validation_static="LOGIN_SUCCESS",
+ description="Login super user",
+)
+
+
+template_event.endpoint_callable = TemplateFunctions.template_example_function()
diff --git a/Events/AllEvents/events/building/build_area/build_area.py b/Events/AllEvents/events/building/build_area/build_area.py
new file mode 100644
index 0000000..e69de29
diff --git a/Events/AllEvents/events/building/build_area/cluster.py b/Events/AllEvents/events/building/build_area/cluster.py
new file mode 100644
index 0000000..de5066b
--- /dev/null
+++ b/Events/AllEvents/events/building/build_area/cluster.py
@@ -0,0 +1,14 @@
+from Events.Engine.abstract_class import CategoryCluster
+from info import template_page_info
+
+
+TemplateCluster = CategoryCluster(
+ name="TemplateCluster",
+ tags=["template"],
+ prefix="/template",
+ description="Template cluster",
+ pageinfo=template_page_info,
+ endpoints={},
+ include_in_schema=True,
+ sub_category=[],
+)
diff --git a/Events/AllEvents/events/building/build_area/function_handlers.py b/Events/AllEvents/events/building/build_area/function_handlers.py
new file mode 100644
index 0000000..cbfbc0f
--- /dev/null
+++ b/Events/AllEvents/events/building/build_area/function_handlers.py
@@ -0,0 +1,78 @@
+from typing import Union, Optional
+
+from ApiLayers.ApiValidations.Request import ListOptions
+from Events.base_request_model import BaseRouteModel, ListOptionsBase
+from Services.PostgresDb.Models.pagination import PaginationResult
+
+
+class Handlers:
+ """Class for handling authentication functions"""
+
+ @classmethod # Requires no auth context
+ def handle_function(cls, **kwargs):
+ """Handle function with kwargs"""
+ return
+
+
+class TemplateFunctions(BaseRouteModel):
+ """
+ Class for handling authentication functions
+ Is a template 4 TokenMiddleware.event_required decorator function groups.
+ results as :
+ STATIC_MESSAGE & LANG retrieved from redis
+ {
+ "completed": true,
+ "message": STATIC_MESSAGE,
+ "lang": LANG,
+ "pagination": {
+ "size": 10,
+ "page": 2,
+ "allCount": 28366,
+ "totalCount": 18,
+ "totalPages": 2,
+ "pageCount": 8,
+ "orderField": ["type_code", "neighborhood_name"],
+ "orderType": ["asc", "desc"]
+ },
+ "data": [
+ {
+ "created_at": "2025-01-12 09:39:48 +00:00",
+ "active": true,
+ "expiry_starts": "2025-01-12 09:39:48 +00:00",
+ "locality_uu_id": "771fd152-aca1-4d75-a42e-9b29ea7112b5",
+ "uu_id": "e1baa3bc-93ce-4099-a078-a11b71d3b1a8"
+ },
+ ...
+ ]
+ }
+ """
+
+ @classmethod
+ def template_example_function_list(
+ cls, data: Optional[Union[dict, ListOptions]]
+ ) -> PaginationResult:
+ from ApiLayers.Schemas import AddressNeighborhood
+
+ list_options_base = ListOptionsBase(
+ table=AddressNeighborhood,
+ list_options=data,
+ model_query=None,
+ )
+ db_session, query_options = list_options_base.init_list_options()
+ if cls.context_retriever.token.is_occupant:
+ AddressNeighborhood.pre_query = AddressNeighborhood.filter_all(
+ AddressNeighborhood.neighborhood_code.icontains("10"),
+ db=db_session,
+ ).query
+ elif cls.context_retriever.token.is_employee:
+ AddressNeighborhood.pre_query = AddressNeighborhood.filter_all(
+ AddressNeighborhood.neighborhood_code.icontains("9"),
+ db=db_session,
+ ).query
+ records = AddressNeighborhood.filter_all(
+ *query_options.convert(), db=db_session
+ )
+ return list_options_base.paginated_result(
+ records=records,
+ response_model=getattr(cls.context_retriever, "RESPONSE_VALIDATOR", None),
+ )
diff --git a/Events/AllEvents/events/building/build_area/info.py b/Events/AllEvents/events/building/build_area/info.py
new file mode 100644
index 0000000..89d163f
--- /dev/null
+++ b/Events/AllEvents/events/building/build_area/info.py
@@ -0,0 +1,11 @@
+from Events.Engine.abstract_class import PageInfo
+
+
+template_page_info = PageInfo(
+ name="template",
+ title={"en": "template"},
+ description={"en": "template"},
+ icon="",
+ parent="",
+ url="",
+)
diff --git a/Events/AllEvents/events/building/build_parts/api_events.py b/Events/AllEvents/events/building/build_parts/api_events.py
new file mode 100644
index 0000000..be66917
--- /dev/null
+++ b/Events/AllEvents/events/building/build_parts/api_events.py
@@ -0,0 +1,21 @@
+from Events.Engine.abstract_class import Event
+from ApiLayers.LanguageModels.Request import (
+ LoginRequestLanguageModel,
+)
+
+from models import TemplateResponseModels, TemplateRequestModels
+from function_handlers import TemplateFunctions
+
+
+# Auth Login
+template_event = Event(
+ name="authentication_login_super_user_event",
+ key="a5d2d0d1-3e9b-4b0f-8c7d-6d4a4b4c4d4e",
+ request_validator=TemplateRequestModels.TemplateRequestModelX,
+ language_models=[LoginRequestLanguageModel],
+ response_validation_static="LOGIN_SUCCESS",
+ description="Login super user",
+)
+
+
+template_event.endpoint_callable = TemplateFunctions.template_example_function()
diff --git a/Events/AllEvents/events/building/build_parts/build_parts.py b/Events/AllEvents/events/building/build_parts/build_parts.py
new file mode 100644
index 0000000..e69de29
diff --git a/Events/AllEvents/events/building/build_parts/cluster.py b/Events/AllEvents/events/building/build_parts/cluster.py
new file mode 100644
index 0000000..de5066b
--- /dev/null
+++ b/Events/AllEvents/events/building/build_parts/cluster.py
@@ -0,0 +1,14 @@
+from Events.Engine.abstract_class import CategoryCluster
+from info import template_page_info
+
+
+TemplateCluster = CategoryCluster(
+ name="TemplateCluster",
+ tags=["template"],
+ prefix="/template",
+ description="Template cluster",
+ pageinfo=template_page_info,
+ endpoints={},
+ include_in_schema=True,
+ sub_category=[],
+)
diff --git a/Events/AllEvents/events/building/build_parts/function_handlers.py b/Events/AllEvents/events/building/build_parts/function_handlers.py
new file mode 100644
index 0000000..cbfbc0f
--- /dev/null
+++ b/Events/AllEvents/events/building/build_parts/function_handlers.py
@@ -0,0 +1,78 @@
+from typing import Union, Optional
+
+from ApiLayers.ApiValidations.Request import ListOptions
+from Events.base_request_model import BaseRouteModel, ListOptionsBase
+from Services.PostgresDb.Models.pagination import PaginationResult
+
+
+class Handlers:
+ """Class for handling authentication functions"""
+
+ @classmethod # Requires no auth context
+ def handle_function(cls, **kwargs):
+ """Handle function with kwargs"""
+ return
+
+
+class TemplateFunctions(BaseRouteModel):
+ """
+ Class for handling authentication functions
+ Is a template 4 TokenMiddleware.event_required decorator function groups.
+ results as :
+ STATIC_MESSAGE & LANG retrieved from redis
+ {
+ "completed": true,
+ "message": STATIC_MESSAGE,
+ "lang": LANG,
+ "pagination": {
+ "size": 10,
+ "page": 2,
+ "allCount": 28366,
+ "totalCount": 18,
+ "totalPages": 2,
+ "pageCount": 8,
+ "orderField": ["type_code", "neighborhood_name"],
+ "orderType": ["asc", "desc"]
+ },
+ "data": [
+ {
+ "created_at": "2025-01-12 09:39:48 +00:00",
+ "active": true,
+ "expiry_starts": "2025-01-12 09:39:48 +00:00",
+ "locality_uu_id": "771fd152-aca1-4d75-a42e-9b29ea7112b5",
+ "uu_id": "e1baa3bc-93ce-4099-a078-a11b71d3b1a8"
+ },
+ ...
+ ]
+ }
+ """
+
+ @classmethod
+ def template_example_function_list(
+ cls, data: Optional[Union[dict, ListOptions]]
+ ) -> PaginationResult:
+ from ApiLayers.Schemas import AddressNeighborhood
+
+ list_options_base = ListOptionsBase(
+ table=AddressNeighborhood,
+ list_options=data,
+ model_query=None,
+ )
+ db_session, query_options = list_options_base.init_list_options()
+ if cls.context_retriever.token.is_occupant:
+ AddressNeighborhood.pre_query = AddressNeighborhood.filter_all(
+ AddressNeighborhood.neighborhood_code.icontains("10"),
+ db=db_session,
+ ).query
+ elif cls.context_retriever.token.is_employee:
+ AddressNeighborhood.pre_query = AddressNeighborhood.filter_all(
+ AddressNeighborhood.neighborhood_code.icontains("9"),
+ db=db_session,
+ ).query
+ records = AddressNeighborhood.filter_all(
+ *query_options.convert(), db=db_session
+ )
+ return list_options_base.paginated_result(
+ records=records,
+ response_model=getattr(cls.context_retriever, "RESPONSE_VALIDATOR", None),
+ )
diff --git a/Events/AllEvents/events/building/build_parts/info.py b/Events/AllEvents/events/building/build_parts/info.py
new file mode 100644
index 0000000..89d163f
--- /dev/null
+++ b/Events/AllEvents/events/building/build_parts/info.py
@@ -0,0 +1,11 @@
+from Events.Engine.abstract_class import PageInfo
+
+
+template_page_info = PageInfo(
+ name="template",
+ title={"en": "template"},
+ description={"en": "template"},
+ icon="",
+ parent="",
+ url="",
+)
diff --git a/Events/AllEvents/events/building/build_sites/api_events.py b/Events/AllEvents/events/building/build_sites/api_events.py
new file mode 100644
index 0000000..be66917
--- /dev/null
+++ b/Events/AllEvents/events/building/build_sites/api_events.py
@@ -0,0 +1,21 @@
+from Events.Engine.abstract_class import Event
+from ApiLayers.LanguageModels.Request import (
+ LoginRequestLanguageModel,
+)
+
+from models import TemplateResponseModels, TemplateRequestModels
+from function_handlers import TemplateFunctions
+
+
+# Auth Login
+template_event = Event(
+ name="authentication_login_super_user_event",
+ key="a5d2d0d1-3e9b-4b0f-8c7d-6d4a4b4c4d4e",
+ request_validator=TemplateRequestModels.TemplateRequestModelX,
+ language_models=[LoginRequestLanguageModel],
+ response_validation_static="LOGIN_SUCCESS",
+ description="Login super user",
+)
+
+
+template_event.endpoint_callable = TemplateFunctions.template_example_function()
diff --git a/Events/AllEvents/events/building/build_sites/build_sites.py b/Events/AllEvents/events/building/build_sites/build_sites.py
new file mode 100644
index 0000000..e69de29
diff --git a/Events/AllEvents/events/building/build_sites/cluster.py b/Events/AllEvents/events/building/build_sites/cluster.py
new file mode 100644
index 0000000..de5066b
--- /dev/null
+++ b/Events/AllEvents/events/building/build_sites/cluster.py
@@ -0,0 +1,14 @@
+from Events.Engine.abstract_class import CategoryCluster
+from info import template_page_info
+
+
+TemplateCluster = CategoryCluster(
+ name="TemplateCluster",
+ tags=["template"],
+ prefix="/template",
+ description="Template cluster",
+ pageinfo=template_page_info,
+ endpoints={},
+ include_in_schema=True,
+ sub_category=[],
+)
diff --git a/Events/AllEvents/events/building/build_sites/function_handlers.py b/Events/AllEvents/events/building/build_sites/function_handlers.py
new file mode 100644
index 0000000..cbfbc0f
--- /dev/null
+++ b/Events/AllEvents/events/building/build_sites/function_handlers.py
@@ -0,0 +1,78 @@
+from typing import Union, Optional
+
+from ApiLayers.ApiValidations.Request import ListOptions
+from Events.base_request_model import BaseRouteModel, ListOptionsBase
+from Services.PostgresDb.Models.pagination import PaginationResult
+
+
+class Handlers:
+ """Class for handling authentication functions"""
+
+ @classmethod # Requires no auth context
+ def handle_function(cls, **kwargs):
+ """Handle function with kwargs"""
+ return
+
+
+class TemplateFunctions(BaseRouteModel):
+ """
+ Class for handling authentication functions
+ Is a template 4 TokenMiddleware.event_required decorator function groups.
+ results as :
+ STATIC_MESSAGE & LANG retrieved from redis
+ {
+ "completed": true,
+ "message": STATIC_MESSAGE,
+ "lang": LANG,
+ "pagination": {
+ "size": 10,
+ "page": 2,
+ "allCount": 28366,
+ "totalCount": 18,
+ "totalPages": 2,
+ "pageCount": 8,
+ "orderField": ["type_code", "neighborhood_name"],
+ "orderType": ["asc", "desc"]
+ },
+ "data": [
+ {
+ "created_at": "2025-01-12 09:39:48 +00:00",
+ "active": true,
+ "expiry_starts": "2025-01-12 09:39:48 +00:00",
+ "locality_uu_id": "771fd152-aca1-4d75-a42e-9b29ea7112b5",
+ "uu_id": "e1baa3bc-93ce-4099-a078-a11b71d3b1a8"
+ },
+ ...
+ ]
+ }
+ """
+
+ @classmethod
+ def template_example_function_list(
+ cls, data: Optional[Union[dict, ListOptions]]
+ ) -> PaginationResult:
+ from ApiLayers.Schemas import AddressNeighborhood
+
+ list_options_base = ListOptionsBase(
+ table=AddressNeighborhood,
+ list_options=data,
+ model_query=None,
+ )
+ db_session, query_options = list_options_base.init_list_options()
+ if cls.context_retriever.token.is_occupant:
+ AddressNeighborhood.pre_query = AddressNeighborhood.filter_all(
+ AddressNeighborhood.neighborhood_code.icontains("10"),
+ db=db_session,
+ ).query
+ elif cls.context_retriever.token.is_employee:
+ AddressNeighborhood.pre_query = AddressNeighborhood.filter_all(
+ AddressNeighborhood.neighborhood_code.icontains("9"),
+ db=db_session,
+ ).query
+ records = AddressNeighborhood.filter_all(
+ *query_options.convert(), db=db_session
+ )
+ return list_options_base.paginated_result(
+ records=records,
+ response_model=getattr(cls.context_retriever, "RESPONSE_VALIDATOR", None),
+ )
diff --git a/Events/AllEvents/events/building/build_sites/info.py b/Events/AllEvents/events/building/build_sites/info.py
new file mode 100644
index 0000000..89d163f
--- /dev/null
+++ b/Events/AllEvents/events/building/build_sites/info.py
@@ -0,0 +1,11 @@
+from Events.Engine.abstract_class import PageInfo
+
+
+template_page_info = PageInfo(
+ name="template",
+ title={"en": "template"},
+ description={"en": "template"},
+ icon="",
+ parent="",
+ url="",
+)
diff --git a/Events/AllEvents/events/building/build_types/api_events.py b/Events/AllEvents/events/building/build_types/api_events.py
new file mode 100644
index 0000000..be66917
--- /dev/null
+++ b/Events/AllEvents/events/building/build_types/api_events.py
@@ -0,0 +1,21 @@
+from Events.Engine.abstract_class import Event
+from ApiLayers.LanguageModels.Request import (
+ LoginRequestLanguageModel,
+)
+
+from models import TemplateResponseModels, TemplateRequestModels
+from function_handlers import TemplateFunctions
+
+
+# Auth Login
+template_event = Event(
+ name="authentication_login_super_user_event",
+ key="a5d2d0d1-3e9b-4b0f-8c7d-6d4a4b4c4d4e",
+ request_validator=TemplateRequestModels.TemplateRequestModelX,
+ language_models=[LoginRequestLanguageModel],
+ response_validation_static="LOGIN_SUCCESS",
+ description="Login super user",
+)
+
+
+template_event.endpoint_callable = TemplateFunctions.template_example_function()
diff --git a/Events/AllEvents/events/building/build_types/build_types.py b/Events/AllEvents/events/building/build_types/build_types.py
new file mode 100644
index 0000000..e69de29
diff --git a/Events/AllEvents/events/building/build_types/cluster.py b/Events/AllEvents/events/building/build_types/cluster.py
new file mode 100644
index 0000000..de5066b
--- /dev/null
+++ b/Events/AllEvents/events/building/build_types/cluster.py
@@ -0,0 +1,14 @@
+from Events.Engine.abstract_class import CategoryCluster
+from info import template_page_info
+
+
+TemplateCluster = CategoryCluster(
+ name="TemplateCluster",
+ tags=["template"],
+ prefix="/template",
+ description="Template cluster",
+ pageinfo=template_page_info,
+ endpoints={},
+ include_in_schema=True,
+ sub_category=[],
+)
diff --git a/Events/AllEvents/events/building/build_types/function_handlers.py b/Events/AllEvents/events/building/build_types/function_handlers.py
new file mode 100644
index 0000000..cbfbc0f
--- /dev/null
+++ b/Events/AllEvents/events/building/build_types/function_handlers.py
@@ -0,0 +1,78 @@
+from typing import Union, Optional
+
+from ApiLayers.ApiValidations.Request import ListOptions
+from Events.base_request_model import BaseRouteModel, ListOptionsBase
+from Services.PostgresDb.Models.pagination import PaginationResult
+
+
+class Handlers:
+ """Class for handling authentication functions"""
+
+ @classmethod # Requires no auth context
+ def handle_function(cls, **kwargs):
+ """Handle function with kwargs"""
+ return
+
+
+class TemplateFunctions(BaseRouteModel):
+ """
+ Class for handling authentication functions
+ Is a template 4 TokenMiddleware.event_required decorator function groups.
+ results as :
+ STATIC_MESSAGE & LANG retrieved from redis
+ {
+ "completed": true,
+ "message": STATIC_MESSAGE,
+ "lang": LANG,
+ "pagination": {
+ "size": 10,
+ "page": 2,
+ "allCount": 28366,
+ "totalCount": 18,
+ "totalPages": 2,
+ "pageCount": 8,
+ "orderField": ["type_code", "neighborhood_name"],
+ "orderType": ["asc", "desc"]
+ },
+ "data": [
+ {
+ "created_at": "2025-01-12 09:39:48 +00:00",
+ "active": true,
+ "expiry_starts": "2025-01-12 09:39:48 +00:00",
+ "locality_uu_id": "771fd152-aca1-4d75-a42e-9b29ea7112b5",
+ "uu_id": "e1baa3bc-93ce-4099-a078-a11b71d3b1a8"
+ },
+ ...
+ ]
+ }
+ """
+
+ @classmethod
+ def template_example_function_list(
+ cls, data: Optional[Union[dict, ListOptions]]
+ ) -> PaginationResult:
+ from ApiLayers.Schemas import AddressNeighborhood
+
+ list_options_base = ListOptionsBase(
+ table=AddressNeighborhood,
+ list_options=data,
+ model_query=None,
+ )
+ db_session, query_options = list_options_base.init_list_options()
+ if cls.context_retriever.token.is_occupant:
+ AddressNeighborhood.pre_query = AddressNeighborhood.filter_all(
+ AddressNeighborhood.neighborhood_code.icontains("10"),
+ db=db_session,
+ ).query
+ elif cls.context_retriever.token.is_employee:
+ AddressNeighborhood.pre_query = AddressNeighborhood.filter_all(
+ AddressNeighborhood.neighborhood_code.icontains("9"),
+ db=db_session,
+ ).query
+ records = AddressNeighborhood.filter_all(
+ *query_options.convert(), db=db_session
+ )
+ return list_options_base.paginated_result(
+ records=records,
+ response_model=getattr(cls.context_retriever, "RESPONSE_VALIDATOR", None),
+ )
diff --git a/Events/AllEvents/events/building/build_types/info.py b/Events/AllEvents/events/building/build_types/info.py
new file mode 100644
index 0000000..89d163f
--- /dev/null
+++ b/Events/AllEvents/events/building/build_types/info.py
@@ -0,0 +1,11 @@
+from Events.Engine.abstract_class import PageInfo
+
+
+template_page_info = PageInfo(
+ name="template",
+ title={"en": "template"},
+ description={"en": "template"},
+ icon="",
+ parent="",
+ url="",
+)
diff --git a/Events/AllEvents/events/building/building/api_events.py b/Events/AllEvents/events/building/building/api_events.py
new file mode 100644
index 0000000..132a7df
--- /dev/null
+++ b/Events/AllEvents/events/building/building/api_events.py
@@ -0,0 +1,25 @@
+from Events.Engine.abstract_class import Event
+from ApiLayers.LanguageModels.Request import (
+ LoginRequestLanguageModel,
+)
+
+# from models import TemplateResponseModels, TemplateRequestModels
+from function_handlers import TemplateFunctions
+
+
+# Auth Login
+building_event = Event(
+ name="authentication_login_super_user_event",
+ key="a5d2d0d1-3e9b-4b0f-8c7d-6d4a4b4c4d4e",
+ # request_validator=TemplateRequestModels.TemplateRequestModelX,
+ language_models=[LoginRequestLanguageModel],
+ statics="LOGIN_SUCCESS",
+ description="Login super user",
+)
+
+
+building_event.endpoint_callable = TemplateFunctions.template_example_function_list
+
+
+class BuildingSuperUserEvents:
+ BuildingEvent = building_event
diff --git a/Events/AllEvents/events/building/building/building.py b/Events/AllEvents/events/building/building/building.py
new file mode 100644
index 0000000..a02ab24
--- /dev/null
+++ b/Events/AllEvents/events/building/building/building.py
@@ -0,0 +1,58 @@
+"""
+Account related API endpoints.
+"""
+
+from typing import Any, Dict
+from fastapi import Request
+
+from Events.Engine.abstract_class import MethodToEvent
+from Events.base_request_model import EndpointBaseRequestModel, ContextRetrievers
+
+from ApiLayers.Middleware.token_event_middleware import TokenEventMiddleware
+from ApiLayers.ApiValidations.Response.default_response import (
+ EndpointSuccessListResponse,
+)
+
+# from .function_handlers import (
+# AddressListFunctions,
+# AddressUpdateFunctions,
+# AddressSearchFunctions,
+# AddressCreateFunctions,
+# )
+from .api_events import BuildingSuperUserEvents
+
+
+BuildingListEventMethods = MethodToEvent(
+ name="BuildingListEventMethods",
+ events={
+ BuildingSuperUserEvents.BuildingEvent.key: BuildingSuperUserEvents.BuildingEvent,
+ },
+ headers=[],
+ errors=[],
+ decorators_list=[TokenEventMiddleware.event_required],
+ url="/list",
+ method="POST",
+ summary="List all accounts by given previligous",
+ description="List all accounts by given previligous",
+)
+
+
+def account_list_event_endpoint(
+ request: Request, data: EndpointBaseRequestModel
+) -> Dict[str, Any]:
+ context_retriever = ContextRetrievers(func=account_list_event_endpoint)
+ event_2_catch = BuildingListEventMethods.retrieve_event(
+ event_function_code=f"{BuildingSuperUserEvents.BuildingEvent.key}"
+ )
+ context_retriever.RESPONSE_VALIDATOR = event_2_catch.RESPONSE_VALIDATOR
+ data = event_2_catch.REQUEST_VALIDATOR(**data.data)
+ BuildingListFunctions.context_retriever = context_retriever
+ pagination_result = event_2_catch.endpoint_callable(data=data)
+ return EndpointSuccessListResponse(
+ code=event_2_catch.static_key, lang=context_retriever.token.lang
+ ).as_dict(
+ data=pagination_result.data, pagination=pagination_result.pagination.as_dict()
+ )
+
+
+BuildingListEventMethods.endpoint_callable = account_list_event_endpoint
diff --git a/Events/AllEvents/events/building/building/cluster.py b/Events/AllEvents/events/building/building/cluster.py
new file mode 100644
index 0000000..bbb567b
--- /dev/null
+++ b/Events/AllEvents/events/building/building/cluster.py
@@ -0,0 +1,15 @@
+from Events.Engine.abstract_class import CategoryCluster
+
+from .info import building_page_info
+
+
+BuildingCluster = CategoryCluster(
+ name="BuildingCluster",
+ tags=["Building"],
+ prefix="/building",
+ description="Building Cluster",
+ pageinfo=building_page_info,
+ endpoints={},
+ include_in_schema=True,
+ sub_category=[],
+)
diff --git a/Events/AllEvents/events/building/building/function_handlers.py b/Events/AllEvents/events/building/building/function_handlers.py
new file mode 100644
index 0000000..cbfbc0f
--- /dev/null
+++ b/Events/AllEvents/events/building/building/function_handlers.py
@@ -0,0 +1,78 @@
+from typing import Union, Optional
+
+from ApiLayers.ApiValidations.Request import ListOptions
+from Events.base_request_model import BaseRouteModel, ListOptionsBase
+from Services.PostgresDb.Models.pagination import PaginationResult
+
+
+class Handlers:
+ """Class for handling authentication functions"""
+
+ @classmethod # Requires no auth context
+ def handle_function(cls, **kwargs):
+ """Handle function with kwargs"""
+ return
+
+
+class TemplateFunctions(BaseRouteModel):
+ """
+ Class for handling authentication functions
+ Is a template 4 TokenMiddleware.event_required decorator function groups.
+ results as :
+ STATIC_MESSAGE & LANG retrieved from redis
+ {
+ "completed": true,
+ "message": STATIC_MESSAGE,
+ "lang": LANG,
+ "pagination": {
+ "size": 10,
+ "page": 2,
+ "allCount": 28366,
+ "totalCount": 18,
+ "totalPages": 2,
+ "pageCount": 8,
+ "orderField": ["type_code", "neighborhood_name"],
+ "orderType": ["asc", "desc"]
+ },
+ "data": [
+ {
+ "created_at": "2025-01-12 09:39:48 +00:00",
+ "active": true,
+ "expiry_starts": "2025-01-12 09:39:48 +00:00",
+ "locality_uu_id": "771fd152-aca1-4d75-a42e-9b29ea7112b5",
+ "uu_id": "e1baa3bc-93ce-4099-a078-a11b71d3b1a8"
+ },
+ ...
+ ]
+ }
+ """
+
+ @classmethod
+ def template_example_function_list(
+ cls, data: Optional[Union[dict, ListOptions]]
+ ) -> PaginationResult:
+ from ApiLayers.Schemas import AddressNeighborhood
+
+ list_options_base = ListOptionsBase(
+ table=AddressNeighborhood,
+ list_options=data,
+ model_query=None,
+ )
+ db_session, query_options = list_options_base.init_list_options()
+ if cls.context_retriever.token.is_occupant:
+ AddressNeighborhood.pre_query = AddressNeighborhood.filter_all(
+ AddressNeighborhood.neighborhood_code.icontains("10"),
+ db=db_session,
+ ).query
+ elif cls.context_retriever.token.is_employee:
+ AddressNeighborhood.pre_query = AddressNeighborhood.filter_all(
+ AddressNeighborhood.neighborhood_code.icontains("9"),
+ db=db_session,
+ ).query
+ records = AddressNeighborhood.filter_all(
+ *query_options.convert(), db=db_session
+ )
+ return list_options_base.paginated_result(
+ records=records,
+ response_model=getattr(cls.context_retriever, "RESPONSE_VALIDATOR", None),
+ )
diff --git a/Events/AllEvents/events/building/building/info.py b/Events/AllEvents/events/building/building/info.py
new file mode 100644
index 0000000..dc019bb
--- /dev/null
+++ b/Events/AllEvents/events/building/building/info.py
@@ -0,0 +1,42 @@
+from Events.Engine.abstract_class import PageInfo
+from Events.Engine.abstract_class import PageComponent
+
+
+create_building = PageComponent(
+ name="CreateBuilding",
+ url="/create",
+ language_models={
+ "en": "Create Building",
+ "tr": "Bina Oluştur",
+ },
+)
+
+update_building = PageComponent(
+ name="UpdateBuilding",
+ url="/update",
+ language_models={
+ "en": "Update Building",
+ "tr": "Bina Güncelle",
+ },
+)
+
+list_building = PageComponent(
+ name="ListBuilding",
+ url="/dashboard",
+ language_models={
+ "en": "List Building",
+ "tr": "Bina Listele",
+ },
+)
+
+
+building_page_info = PageInfo(
+ name="BuildingCluster",
+ language_models={
+ "en": {},
+ "tr": {},
+ },
+ icon="Building",
+ sub_components=[create_building, update_building, list_building],
+ url="/dashboard?site=AddressCluster",
+)
diff --git a/Events/AllEvents/events/building/building/models.py b/Events/AllEvents/events/building/building/models.py
new file mode 100644
index 0000000..ec9c70e
--- /dev/null
+++ b/Events/AllEvents/events/building/building/models.py
@@ -0,0 +1,325 @@
+"""
+ request models.
+"""
+
+from typing import TYPE_CHECKING, Dict, Any, Literal, Optional, TypedDict, Union
+from pydantic import BaseModel, Field, model_validator, RootModel, ConfigDict
+from ApiEvents.base_request_model import BaseRequestModel, DictRequestModel
+from ApiValidations.Custom.token_objects import EmployeeTokenObject, OccupantTokenObject
+from ApiValidations.Request.base_validations import ListOptions
+from ErrorHandlers.Exceptions.api_exc import HTTPExceptionApi
+from Schemas.identity.identity import (
+ AddressPostcode,
+ Addresses,
+ RelationshipEmployee2PostCode,
+)
+
+
+if TYPE_CHECKING:
+ from fastapi import Request
+
+
+class AddressListEventMethods(MethodToEvent):
+
+ event_type = "SELECT"
+ event_description = "List Address records"
+ event_category = "Address"
+
+ __event_keys__ = {
+ "9c251d7d-da70-4d63-a72c-e69c26270442": "address_list_super_user",
+ "52afe375-dd95-4f4b-aaa2-4ec61bc6de52": "address_list_employee",
+ }
+ __event_validation__ = {
+ "9c251d7d-da70-4d63-a72c-e69c26270442": ListAddressResponse,
+ "52afe375-dd95-4f4b-aaa2-4ec61bc6de52": ListAddressResponse,
+ }
+
+ @classmethod
+ def address_list_super_user(
+ cls,
+ list_options: ListOptions,
+ token_dict: Union[EmployeeTokenObject, OccupantTokenObject],
+ ):
+ db = RelationshipEmployee2PostCode.new_session()
+ post_code_list = RelationshipEmployee2PostCode.filter_all(
+ RelationshipEmployee2PostCode.company_id
+ == token_dict.selected_company.company_id,
+ db=db,
+ ).data
+ post_code_id_list = [post_code.member_id for post_code in post_code_list]
+ if not post_code_id_list:
+ raise HTTPExceptionApi(
+ status_code=404,
+ detail="User has no post code registered. User can not list addresses.",
+ )
+ get_street_ids = [
+ street_id[0]
+ for street_id in AddressPostcode.select_only(
+ AddressPostcode.id.in_(post_code_id_list),
+ select_args=[AddressPostcode.street_id],
+ order_by=AddressPostcode.street_id.desc(),
+ ).data
+ ]
+ if not get_street_ids:
+ raise HTTPExceptionApi(
+ status_code=404,
+ detail="User has no street registered. User can not list addresses.",
+ )
+ Addresses.pre_query = Addresses.filter_all(
+ Addresses.street_id.in_(get_street_ids),
+ ).query
+ Addresses.filter_attr = list_options
+ records = Addresses.filter_all().data
+ return
+ # return AlchemyJsonResponse(
+ # completed=True, message="List Address records", result=records
+ # )
+
+ @classmethod
+ def address_list_employee(
+ cls,
+ list_options: ListOptions,
+ token_dict: Union[EmployeeTokenObject, OccupantTokenObject],
+ ):
+ Addresses.filter_attr = list_options
+ Addresses.pre_query = Addresses.filter_all(
+ Addresses.street_id.in_(get_street_ids),
+ )
+ records = Addresses.filter_all().data
+ return
+ # return AlchemyJsonResponse(
+ # completed=True, message="List Address records", result=records
+ # )
+
+
+class AddressCreateEventMethods(MethodToEvent):
+
+ event_type = "CREATE"
+ event_description = ""
+ event_category = ""
+
+ __event_keys__ = {
+ "ffdc445f-da10-4ce4-9531-d2bdb9a198ae": "create_address",
+ }
+ __event_validation__ = {
+ "ffdc445f-da10-4ce4-9531-d2bdb9a198ae": InsertAddress,
+ }
+
+ @classmethod
+ def create_address(
+ cls,
+ data: InsertAddress,
+ token_dict: Union[EmployeeTokenObject, OccupantTokenObject],
+ ):
+ post_code = AddressPostcode.filter_one(
+ AddressPostcode.uu_id == data.post_code_uu_id,
+ ).data
+ if not post_code:
+ raise HTTPExceptionApi(
+ status_code=404,
+ detail="Post code not found. User can not create address without post code.",
+ )
+
+ data_dict = data.excluded_dump()
+ data_dict["street_id"] = post_code.street_id
+ data_dict["street_uu_id"] = str(post_code.street_uu_id)
+ del data_dict["post_code_uu_id"]
+ address = Addresses.find_or_create(**data_dict)
+ address.save()
+ address.update(is_confirmed=True)
+ address.save()
+ return AlchemyJsonResponse(
+ completed=True,
+ message="Address created successfully",
+ result=address.get_dict(),
+ )
+
+
+class AddressSearchEventMethods(MethodToEvent):
+ """Event methods for searching addresses.
+
+ This class handles address search functionality including text search
+ and filtering.
+ """
+
+ event_type = "SEARCH"
+ event_description = "Search for addresses using text and filters"
+ event_category = "Address"
+
+ __event_keys__ = {
+ "e0ac1269-e9a7-4806-9962-219ac224b0d0": "search_address",
+ }
+ __event_validation__ = {
+ "e0ac1269-e9a7-4806-9962-219ac224b0d0": SearchAddress,
+ }
+
+ @classmethod
+ def _build_order_clause(
+ cls, filter_list: Dict[str, Any], schemas: List[str], filter_table: Any
+ ) -> Any:
+ """Build the ORDER BY clause for the query.
+
+ Args:
+ filter_list: Dictionary of filter options
+ schemas: List of available schema fields
+ filter_table: SQLAlchemy table to query
+
+ Returns:
+ SQLAlchemy order_by clause
+ """
+ # Default to ordering by UUID if field not in schema
+ if filter_list.get("order_field") not in schemas:
+ filter_list["order_field"] = "uu_id"
+ else:
+ # Extract table and field from order field
+ table_name, field_name = str(filter_list.get("order_field")).split(".")
+ filter_table = getattr(databases.sql_models, table_name)
+ filter_list["order_field"] = field_name
+
+ # Build order clause
+ field = getattr(filter_table, filter_list.get("order_field"))
+ return (
+ field.desc()
+ if str(filter_list.get("order_type"))[0] == "d"
+ else field.asc()
+ )
+
+ @classmethod
+ def _format_record(cls, record: Any, schemas: List[str]) -> Dict[str, str]:
+ """Format a database record into a dictionary.
+
+ Args:
+ record: Database record to format
+ schemas: List of schema fields
+
+ Returns:
+ Formatted record dictionary
+ """
+ result = {}
+ for index, schema in enumerate(schemas):
+ value = str(record[index])
+ # Special handling for UUID fields
+ if "uu_id" in value:
+ value = str(value)
+ result[schema] = value
+ return result
+
+ @classmethod
+ def search_address(
+ cls,
+ data: SearchAddress,
+ token_dict: Union[EmployeeTokenObject, OccupantTokenObject],
+ ) -> JSONResponse:
+ """Search for addresses using text search and filters.
+
+ Args:
+ data: Search parameters including text and filters
+ token_dict: Authentication token
+
+ Returns:
+ JSON response with search results
+
+ Raises:
+ HTTPExceptionApi: If search fails
+ """
+ try:
+ # Start performance measurement
+ start_time = perf_counter()
+
+ # Get initial query
+ search_result = AddressStreet.search_address_text(search_text=data.search)
+ if not search_result:
+ raise HTTPExceptionApi(
+ status_code=status.HTTP_404_NOT_FOUND,
+ detail="No addresses found matching search criteria",
+ )
+
+ query = search_result.get("query")
+ schemas = search_result.get("schema")
+
+ # Apply filters
+ filter_list = data.list_options.dump()
+ filter_table = AddressStreet
+
+ # Build and apply order clause
+ order = cls._build_order_clause(filter_list, schemas, filter_table)
+
+ # Apply pagination
+ page_size = int(filter_list.get("size"))
+ offset = (int(filter_list.get("page")) - 1) * page_size
+
+ # Execute query
+ query = (
+ query.order_by(order)
+ .limit(page_size)
+ .offset(offset)
+ .populate_existing()
+ )
+ records = list(query.all())
+
+ # Format results
+ results = [cls._format_record(record, schemas) for record in records]
+
+ # Log performance
+ duration = perf_counter() - start_time
+ print(f"Address search completed in {duration:.3f}s")
+
+ return AlchemyJsonResponse(
+ completed=True, message="Address search results", result=results
+ )
+
+ except HTTPExceptionApi as e:
+ # Re-raise HTTP exceptions
+ raise e
+ except Exception as e:
+ # Log and wrap other errors
+ print(f"Address search error: {str(e)}")
+ raise HTTPExceptionApi(
+ status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
+ detail="Failed to search addresses",
+ ) from e
+
+
+class AddressUpdateEventMethods(MethodToEvent):
+
+ event_type = "UPDATE"
+ event_description = ""
+ event_category = ""
+
+ __event_keys__ = {
+ "1f9c3a9c-e5bd-4dcd-9b9a-3742d7e03a27": "update_address",
+ }
+ __event_validation__ = {
+ "1f9c3a9c-e5bd-4dcd-9b9a-3742d7e03a27": UpdateAddress,
+ }
+
+ @classmethod
+ def update_address(
+ cls,
+ address_uu_id: str,
+ data: UpdateAddress,
+ token_dict: Union[EmployeeTokenObject, OccupantTokenObject],
+ ):
+ if isinstance(token_dict, EmployeeTokenObject):
+ address = Addresses.filter_one(
+ Addresses.uu_id == address_uu_id,
+ ).data
+ if not address:
+ raise HTTPExceptionApi(
+ status_code=404,
+ detail=f"Address not found. User can not update with given address uuid : {address_uu_id}",
+ )
+
+ data_dict = data.excluded_dump()
+ updated_address = address.update(**data_dict)
+ updated_address.save()
+ return AlchemyJsonResponse(
+ completed=True,
+ message="Address updated successfully",
+ result=updated_address.get_dict(),
+ )
+ elif isinstance(token_dict, OccupantTokenObject):
+ raise HTTPExceptionApi(
+ status_code=403,
+ detail="Occupant can not update address.",
+ )
diff --git a/Events/AllEvents/events/building/living_spaces/api_events.py b/Events/AllEvents/events/building/living_spaces/api_events.py
new file mode 100644
index 0000000..be66917
--- /dev/null
+++ b/Events/AllEvents/events/building/living_spaces/api_events.py
@@ -0,0 +1,21 @@
+from Events.Engine.abstract_class import Event
+from ApiLayers.LanguageModels.Request import (
+ LoginRequestLanguageModel,
+)
+
+from models import TemplateResponseModels, TemplateRequestModels
+from function_handlers import TemplateFunctions
+
+
+# Auth Login
+template_event = Event(
+ name="authentication_login_super_user_event",
+ key="a5d2d0d1-3e9b-4b0f-8c7d-6d4a4b4c4d4e",
+ request_validator=TemplateRequestModels.TemplateRequestModelX,
+ language_models=[LoginRequestLanguageModel],
+ response_validation_static="LOGIN_SUCCESS",
+ description="Login super user",
+)
+
+
+template_event.endpoint_callable = TemplateFunctions.template_example_function()
diff --git a/Events/AllEvents/events/building/living_spaces/cluster.py b/Events/AllEvents/events/building/living_spaces/cluster.py
new file mode 100644
index 0000000..de5066b
--- /dev/null
+++ b/Events/AllEvents/events/building/living_spaces/cluster.py
@@ -0,0 +1,14 @@
+from Events.Engine.abstract_class import CategoryCluster
+from info import template_page_info
+
+
+TemplateCluster = CategoryCluster(
+ name="TemplateCluster",
+ tags=["template"],
+ prefix="/template",
+ description="Template cluster",
+ pageinfo=template_page_info,
+ endpoints={},
+ include_in_schema=True,
+ sub_category=[],
+)
diff --git a/Events/AllEvents/events/building/living_spaces/function_handlers.py b/Events/AllEvents/events/building/living_spaces/function_handlers.py
new file mode 100644
index 0000000..cbfbc0f
--- /dev/null
+++ b/Events/AllEvents/events/building/living_spaces/function_handlers.py
@@ -0,0 +1,78 @@
+from typing import Union, Optional
+
+from ApiLayers.ApiValidations.Request import ListOptions
+from Events.base_request_model import BaseRouteModel, ListOptionsBase
+from Services.PostgresDb.Models.pagination import PaginationResult
+
+
+class Handlers:
+ """Class for handling authentication functions"""
+
+ @classmethod # Requires no auth context
+ def handle_function(cls, **kwargs):
+ """Handle function with kwargs"""
+ return
+
+
+class TemplateFunctions(BaseRouteModel):
+ """
+ Class for handling authentication functions
+ Is a template 4 TokenMiddleware.event_required decorator function groups.
+ results as :
+ STATIC_MESSAGE & LANG retrieved from redis
+ {
+ "completed": true,
+ "message": STATIC_MESSAGE,
+ "lang": LANG,
+ "pagination": {
+ "size": 10,
+ "page": 2,
+ "allCount": 28366,
+ "totalCount": 18,
+ "totalPages": 2,
+ "pageCount": 8,
+ "orderField": ["type_code", "neighborhood_name"],
+ "orderType": ["asc", "desc"]
+ },
+ "data": [
+ {
+ "created_at": "2025-01-12 09:39:48 +00:00",
+ "active": true,
+ "expiry_starts": "2025-01-12 09:39:48 +00:00",
+ "locality_uu_id": "771fd152-aca1-4d75-a42e-9b29ea7112b5",
+ "uu_id": "e1baa3bc-93ce-4099-a078-a11b71d3b1a8"
+ },
+ ...
+ ]
+ }
+ """
+
+ @classmethod
+ def template_example_function_list(
+ cls, data: Optional[Union[dict, ListOptions]]
+ ) -> PaginationResult:
+ from ApiLayers.Schemas import AddressNeighborhood
+
+ list_options_base = ListOptionsBase(
+ table=AddressNeighborhood,
+ list_options=data,
+ model_query=None,
+ )
+ db_session, query_options = list_options_base.init_list_options()
+ if cls.context_retriever.token.is_occupant:
+ AddressNeighborhood.pre_query = AddressNeighborhood.filter_all(
+ AddressNeighborhood.neighborhood_code.icontains("10"),
+ db=db_session,
+ ).query
+ elif cls.context_retriever.token.is_employee:
+ AddressNeighborhood.pre_query = AddressNeighborhood.filter_all(
+ AddressNeighborhood.neighborhood_code.icontains("9"),
+ db=db_session,
+ ).query
+ records = AddressNeighborhood.filter_all(
+ *query_options.convert(), db=db_session
+ )
+ return list_options_base.paginated_result(
+ records=records,
+ response_model=getattr(cls.context_retriever, "RESPONSE_VALIDATOR", None),
+ )
diff --git a/Events/AllEvents/events/building/living_spaces/info.py b/Events/AllEvents/events/building/living_spaces/info.py
new file mode 100644
index 0000000..89d163f
--- /dev/null
+++ b/Events/AllEvents/events/building/living_spaces/info.py
@@ -0,0 +1,11 @@
+from Events.Engine.abstract_class import PageInfo
+
+
+template_page_info = PageInfo(
+ name="template",
+ title={"en": "template"},
+ description={"en": "template"},
+ icon="",
+ parent="",
+ url="",
+)
diff --git a/Events/AllEvents/events/building/living_spaces/living_spaces.py b/Events/AllEvents/events/building/living_spaces/living_spaces.py
new file mode 100644
index 0000000..e69de29
diff --git a/Events/AllEvents/events/company/company/api_events.py b/Events/AllEvents/events/company/company/api_events.py
new file mode 100644
index 0000000..be66917
--- /dev/null
+++ b/Events/AllEvents/events/company/company/api_events.py
@@ -0,0 +1,21 @@
+from Events.Engine.abstract_class import Event
+from ApiLayers.LanguageModels.Request import (
+ LoginRequestLanguageModel,
+)
+
+from models import TemplateResponseModels, TemplateRequestModels
+from function_handlers import TemplateFunctions
+
+
+# Auth Login
+template_event = Event(
+ name="authentication_login_super_user_event",
+ key="a5d2d0d1-3e9b-4b0f-8c7d-6d4a4b4c4d4e",
+ request_validator=TemplateRequestModels.TemplateRequestModelX,
+ language_models=[LoginRequestLanguageModel],
+ response_validation_static="LOGIN_SUCCESS",
+ description="Login super user",
+)
+
+
+template_event.endpoint_callable = TemplateFunctions.template_example_function()
diff --git a/Events/AllEvents/events/company/company/cluster.py b/Events/AllEvents/events/company/company/cluster.py
new file mode 100644
index 0000000..de5066b
--- /dev/null
+++ b/Events/AllEvents/events/company/company/cluster.py
@@ -0,0 +1,14 @@
+from Events.Engine.abstract_class import CategoryCluster
+from info import template_page_info
+
+
+TemplateCluster = CategoryCluster(
+ name="TemplateCluster",
+ tags=["template"],
+ prefix="/template",
+ description="Template cluster",
+ pageinfo=template_page_info,
+ endpoints={},
+ include_in_schema=True,
+ sub_category=[],
+)
diff --git a/Events/AllEvents/events/company/company/company.py b/Events/AllEvents/events/company/company/company.py
new file mode 100644
index 0000000..e69de29
diff --git a/Events/AllEvents/events/company/company/function_handlers.py b/Events/AllEvents/events/company/company/function_handlers.py
new file mode 100644
index 0000000..cbfbc0f
--- /dev/null
+++ b/Events/AllEvents/events/company/company/function_handlers.py
@@ -0,0 +1,78 @@
+from typing import Union, Optional
+
+from ApiLayers.ApiValidations.Request import ListOptions
+from Events.base_request_model import BaseRouteModel, ListOptionsBase
+from Services.PostgresDb.Models.pagination import PaginationResult
+
+
+class Handlers:
+ """Class for handling authentication functions"""
+
+ @classmethod # Requires no auth context
+ def handle_function(cls, **kwargs):
+ """Handle function with kwargs"""
+ return
+
+
+class TemplateFunctions(BaseRouteModel):
+ """
+ Class for handling authentication functions
+ Is a template 4 TokenMiddleware.event_required decorator function groups.
+ results as :
+ STATIC_MESSAGE & LANG retrieved from redis
+ {
+ "completed": true,
+ "message": STATIC_MESSAGE,
+ "lang": LANG,
+ "pagination": {
+ "size": 10,
+ "page": 2,
+ "allCount": 28366,
+ "totalCount": 18,
+ "totalPages": 2,
+ "pageCount": 8,
+ "orderField": ["type_code", "neighborhood_name"],
+ "orderType": ["asc", "desc"]
+ },
+ "data": [
+ {
+ "created_at": "2025-01-12 09:39:48 +00:00",
+ "active": true,
+ "expiry_starts": "2025-01-12 09:39:48 +00:00",
+ "locality_uu_id": "771fd152-aca1-4d75-a42e-9b29ea7112b5",
+ "uu_id": "e1baa3bc-93ce-4099-a078-a11b71d3b1a8"
+ },
+ ...
+ ]
+ }
+ """
+
+ @classmethod
+ def template_example_function_list(
+ cls, data: Optional[Union[dict, ListOptions]]
+ ) -> PaginationResult:
+ from ApiLayers.Schemas import AddressNeighborhood
+
+ list_options_base = ListOptionsBase(
+ table=AddressNeighborhood,
+ list_options=data,
+ model_query=None,
+ )
+ db_session, query_options = list_options_base.init_list_options()
+ if cls.context_retriever.token.is_occupant:
+ AddressNeighborhood.pre_query = AddressNeighborhood.filter_all(
+ AddressNeighborhood.neighborhood_code.icontains("10"),
+ db=db_session,
+ ).query
+ elif cls.context_retriever.token.is_employee:
+ AddressNeighborhood.pre_query = AddressNeighborhood.filter_all(
+ AddressNeighborhood.neighborhood_code.icontains("9"),
+ db=db_session,
+ ).query
+ records = AddressNeighborhood.filter_all(
+ *query_options.convert(), db=db_session
+ )
+ return list_options_base.paginated_result(
+ records=records,
+ response_model=getattr(cls.context_retriever, "RESPONSE_VALIDATOR", None),
+ )
diff --git a/Events/AllEvents/events/company/company/info.py b/Events/AllEvents/events/company/company/info.py
new file mode 100644
index 0000000..89d163f
--- /dev/null
+++ b/Events/AllEvents/events/company/company/info.py
@@ -0,0 +1,11 @@
+from Events.Engine.abstract_class import PageInfo
+
+
+template_page_info = PageInfo(
+ name="template",
+ title={"en": "template"},
+ description={"en": "template"},
+ icon="",
+ parent="",
+ url="",
+)
diff --git a/Events/AllEvents/events/company/department/api_events.py b/Events/AllEvents/events/company/department/api_events.py
new file mode 100644
index 0000000..be66917
--- /dev/null
+++ b/Events/AllEvents/events/company/department/api_events.py
@@ -0,0 +1,21 @@
+from Events.Engine.abstract_class import Event
+from ApiLayers.LanguageModels.Request import (
+ LoginRequestLanguageModel,
+)
+
+from models import TemplateResponseModels, TemplateRequestModels
+from function_handlers import TemplateFunctions
+
+
+# Auth Login
+template_event = Event(
+ name="authentication_login_super_user_event",
+ key="a5d2d0d1-3e9b-4b0f-8c7d-6d4a4b4c4d4e",
+ request_validator=TemplateRequestModels.TemplateRequestModelX,
+ language_models=[LoginRequestLanguageModel],
+ response_validation_static="LOGIN_SUCCESS",
+ description="Login super user",
+)
+
+
+template_event.endpoint_callable = TemplateFunctions.template_example_function()
diff --git a/Events/AllEvents/events/company/department/cluster.py b/Events/AllEvents/events/company/department/cluster.py
new file mode 100644
index 0000000..de5066b
--- /dev/null
+++ b/Events/AllEvents/events/company/department/cluster.py
@@ -0,0 +1,14 @@
+from Events.Engine.abstract_class import CategoryCluster
+from info import template_page_info
+
+
+TemplateCluster = CategoryCluster(
+ name="TemplateCluster",
+ tags=["template"],
+ prefix="/template",
+ description="Template cluster",
+ pageinfo=template_page_info,
+ endpoints={},
+ include_in_schema=True,
+ sub_category=[],
+)
diff --git a/Events/AllEvents/events/company/department/department.py b/Events/AllEvents/events/company/department/department.py
new file mode 100644
index 0000000..e69de29
diff --git a/Events/AllEvents/events/company/department/function_handlers.py b/Events/AllEvents/events/company/department/function_handlers.py
new file mode 100644
index 0000000..cbfbc0f
--- /dev/null
+++ b/Events/AllEvents/events/company/department/function_handlers.py
@@ -0,0 +1,78 @@
+from typing import Union, Optional
+
+from ApiLayers.ApiValidations.Request import ListOptions
+from Events.base_request_model import BaseRouteModel, ListOptionsBase
+from Services.PostgresDb.Models.pagination import PaginationResult
+
+
+class Handlers:
+ """Class for handling authentication functions"""
+
+ @classmethod # Requires no auth context
+ def handle_function(cls, **kwargs):
+ """Handle function with kwargs"""
+ return
+
+
+class TemplateFunctions(BaseRouteModel):
+ """
+ Class for handling authentication functions
+ Is a template 4 TokenMiddleware.event_required decorator function groups.
+ results as :
+ STATIC_MESSAGE & LANG retrieved from redis
+ {
+ "completed": true,
+ "message": STATIC_MESSAGE,
+ "lang": LANG,
+ "pagination": {
+ "size": 10,
+ "page": 2,
+ "allCount": 28366,
+ "totalCount": 18,
+ "totalPages": 2,
+ "pageCount": 8,
+ "orderField": ["type_code", "neighborhood_name"],
+ "orderType": ["asc", "desc"]
+ },
+ "data": [
+ {
+ "created_at": "2025-01-12 09:39:48 +00:00",
+ "active": true,
+ "expiry_starts": "2025-01-12 09:39:48 +00:00",
+ "locality_uu_id": "771fd152-aca1-4d75-a42e-9b29ea7112b5",
+ "uu_id": "e1baa3bc-93ce-4099-a078-a11b71d3b1a8"
+ },
+ ...
+ ]
+ }
+ """
+
+ @classmethod
+ def template_example_function_list(
+ cls, data: Optional[Union[dict, ListOptions]]
+ ) -> PaginationResult:
+ from ApiLayers.Schemas import AddressNeighborhood
+
+ list_options_base = ListOptionsBase(
+ table=AddressNeighborhood,
+ list_options=data,
+ model_query=None,
+ )
+ db_session, query_options = list_options_base.init_list_options()
+ if cls.context_retriever.token.is_occupant:
+ AddressNeighborhood.pre_query = AddressNeighborhood.filter_all(
+ AddressNeighborhood.neighborhood_code.icontains("10"),
+ db=db_session,
+ ).query
+ elif cls.context_retriever.token.is_employee:
+ AddressNeighborhood.pre_query = AddressNeighborhood.filter_all(
+ AddressNeighborhood.neighborhood_code.icontains("9"),
+ db=db_session,
+ ).query
+ records = AddressNeighborhood.filter_all(
+ *query_options.convert(), db=db_session
+ )
+ return list_options_base.paginated_result(
+ records=records,
+ response_model=getattr(cls.context_retriever, "RESPONSE_VALIDATOR", None),
+ )
diff --git a/Events/AllEvents/events/company/department/info.py b/Events/AllEvents/events/company/department/info.py
new file mode 100644
index 0000000..89d163f
--- /dev/null
+++ b/Events/AllEvents/events/company/department/info.py
@@ -0,0 +1,11 @@
+from Events.Engine.abstract_class import PageInfo
+
+
+template_page_info = PageInfo(
+ name="template",
+ title={"en": "template"},
+ description={"en": "template"},
+ icon="",
+ parent="",
+ url="",
+)
diff --git a/Events/AllEvents/events/company/duties/api_events.py b/Events/AllEvents/events/company/duties/api_events.py
new file mode 100644
index 0000000..be66917
--- /dev/null
+++ b/Events/AllEvents/events/company/duties/api_events.py
@@ -0,0 +1,21 @@
+from Events.Engine.abstract_class import Event
+from ApiLayers.LanguageModels.Request import (
+ LoginRequestLanguageModel,
+)
+
+from models import TemplateResponseModels, TemplateRequestModels
+from function_handlers import TemplateFunctions
+
+
+# Auth Login
+template_event = Event(
+ name="authentication_login_super_user_event",
+ key="a5d2d0d1-3e9b-4b0f-8c7d-6d4a4b4c4d4e",
+ request_validator=TemplateRequestModels.TemplateRequestModelX,
+ language_models=[LoginRequestLanguageModel],
+ response_validation_static="LOGIN_SUCCESS",
+ description="Login super user",
+)
+
+
+template_event.endpoint_callable = TemplateFunctions.template_example_function()
diff --git a/Events/AllEvents/events/company/duties/cluster.py b/Events/AllEvents/events/company/duties/cluster.py
new file mode 100644
index 0000000..de5066b
--- /dev/null
+++ b/Events/AllEvents/events/company/duties/cluster.py
@@ -0,0 +1,14 @@
+from Events.Engine.abstract_class import CategoryCluster
+from info import template_page_info
+
+
+TemplateCluster = CategoryCluster(
+ name="TemplateCluster",
+ tags=["template"],
+ prefix="/template",
+ description="Template cluster",
+ pageinfo=template_page_info,
+ endpoints={},
+ include_in_schema=True,
+ sub_category=[],
+)
diff --git a/Events/AllEvents/events/company/duties/duties.py b/Events/AllEvents/events/company/duties/duties.py
new file mode 100644
index 0000000..e69de29
diff --git a/Events/AllEvents/events/company/duties/function_handlers.py b/Events/AllEvents/events/company/duties/function_handlers.py
new file mode 100644
index 0000000..cbfbc0f
--- /dev/null
+++ b/Events/AllEvents/events/company/duties/function_handlers.py
@@ -0,0 +1,78 @@
+from typing import Union, Optional
+
+from ApiLayers.ApiValidations.Request import ListOptions
+from Events.base_request_model import BaseRouteModel, ListOptionsBase
+from Services.PostgresDb.Models.pagination import PaginationResult
+
+
+class Handlers:
+ """Class for handling authentication functions"""
+
+ @classmethod # Requires no auth context
+ def handle_function(cls, **kwargs):
+ """Handle function with kwargs"""
+ return
+
+
+class TemplateFunctions(BaseRouteModel):
+ """
+ Class for handling authentication functions
+ Is a template 4 TokenMiddleware.event_required decorator function groups.
+ results as :
+ STATIC_MESSAGE & LANG retrieved from redis
+ {
+ "completed": true,
+ "message": STATIC_MESSAGE,
+ "lang": LANG,
+ "pagination": {
+ "size": 10,
+ "page": 2,
+ "allCount": 28366,
+ "totalCount": 18,
+ "totalPages": 2,
+ "pageCount": 8,
+ "orderField": ["type_code", "neighborhood_name"],
+ "orderType": ["asc", "desc"]
+ },
+ "data": [
+ {
+ "created_at": "2025-01-12 09:39:48 +00:00",
+ "active": true,
+ "expiry_starts": "2025-01-12 09:39:48 +00:00",
+ "locality_uu_id": "771fd152-aca1-4d75-a42e-9b29ea7112b5",
+ "uu_id": "e1baa3bc-93ce-4099-a078-a11b71d3b1a8"
+ },
+ ...
+ ]
+ }
+ """
+
+ @classmethod
+ def template_example_function_list(
+ cls, data: Optional[Union[dict, ListOptions]]
+ ) -> PaginationResult:
+ from ApiLayers.Schemas import AddressNeighborhood
+
+ list_options_base = ListOptionsBase(
+ table=AddressNeighborhood,
+ list_options=data,
+ model_query=None,
+ )
+ db_session, query_options = list_options_base.init_list_options()
+ if cls.context_retriever.token.is_occupant:
+ AddressNeighborhood.pre_query = AddressNeighborhood.filter_all(
+ AddressNeighborhood.neighborhood_code.icontains("10"),
+ db=db_session,
+ ).query
+ elif cls.context_retriever.token.is_employee:
+ AddressNeighborhood.pre_query = AddressNeighborhood.filter_all(
+ AddressNeighborhood.neighborhood_code.icontains("9"),
+ db=db_session,
+ ).query
+ records = AddressNeighborhood.filter_all(
+ *query_options.convert(), db=db_session
+ )
+ return list_options_base.paginated_result(
+ records=records,
+ response_model=getattr(cls.context_retriever, "RESPONSE_VALIDATOR", None),
+ )
diff --git a/Events/AllEvents/events/company/duties/info.py b/Events/AllEvents/events/company/duties/info.py
new file mode 100644
index 0000000..89d163f
--- /dev/null
+++ b/Events/AllEvents/events/company/duties/info.py
@@ -0,0 +1,11 @@
+from Events.Engine.abstract_class import PageInfo
+
+
+template_page_info = PageInfo(
+ name="template",
+ title={"en": "template"},
+ description={"en": "template"},
+ icon="",
+ parent="",
+ url="",
+)
diff --git a/Events/AllEvents/events/company/duty/api_events.py b/Events/AllEvents/events/company/duty/api_events.py
new file mode 100644
index 0000000..be66917
--- /dev/null
+++ b/Events/AllEvents/events/company/duty/api_events.py
@@ -0,0 +1,21 @@
+from Events.Engine.abstract_class import Event
+from ApiLayers.LanguageModels.Request import (
+ LoginRequestLanguageModel,
+)
+
+from models import TemplateResponseModels, TemplateRequestModels
+from function_handlers import TemplateFunctions
+
+
+# Auth Login
+template_event = Event(
+ name="authentication_login_super_user_event",
+ key="a5d2d0d1-3e9b-4b0f-8c7d-6d4a4b4c4d4e",
+ request_validator=TemplateRequestModels.TemplateRequestModelX,
+ language_models=[LoginRequestLanguageModel],
+ response_validation_static="LOGIN_SUCCESS",
+ description="Login super user",
+)
+
+
+template_event.endpoint_callable = TemplateFunctions.template_example_function()
diff --git a/Events/AllEvents/events/company/duty/cluster.py b/Events/AllEvents/events/company/duty/cluster.py
new file mode 100644
index 0000000..de5066b
--- /dev/null
+++ b/Events/AllEvents/events/company/duty/cluster.py
@@ -0,0 +1,14 @@
+from Events.Engine.abstract_class import CategoryCluster
+from info import template_page_info
+
+
+TemplateCluster = CategoryCluster(
+ name="TemplateCluster",
+ tags=["template"],
+ prefix="/template",
+ description="Template cluster",
+ pageinfo=template_page_info,
+ endpoints={},
+ include_in_schema=True,
+ sub_category=[],
+)
diff --git a/Events/AllEvents/events/company/duty/duty.py b/Events/AllEvents/events/company/duty/duty.py
new file mode 100644
index 0000000..e69de29
diff --git a/Events/AllEvents/events/company/duty/function_handlers.py b/Events/AllEvents/events/company/duty/function_handlers.py
new file mode 100644
index 0000000..cbfbc0f
--- /dev/null
+++ b/Events/AllEvents/events/company/duty/function_handlers.py
@@ -0,0 +1,78 @@
+from typing import Union, Optional
+
+from ApiLayers.ApiValidations.Request import ListOptions
+from Events.base_request_model import BaseRouteModel, ListOptionsBase
+from Services.PostgresDb.Models.pagination import PaginationResult
+
+
+class Handlers:
+ """Class for handling authentication functions"""
+
+ @classmethod # Requires no auth context
+ def handle_function(cls, **kwargs):
+ """Handle function with kwargs"""
+ return
+
+
+class TemplateFunctions(BaseRouteModel):
+ """
+ Class for handling authentication functions
+ Is a template 4 TokenMiddleware.event_required decorator function groups.
+ results as :
+ STATIC_MESSAGE & LANG retrieved from redis
+ {
+ "completed": true,
+ "message": STATIC_MESSAGE,
+ "lang": LANG,
+ "pagination": {
+ "size": 10,
+ "page": 2,
+ "allCount": 28366,
+ "totalCount": 18,
+ "totalPages": 2,
+ "pageCount": 8,
+ "orderField": ["type_code", "neighborhood_name"],
+ "orderType": ["asc", "desc"]
+ },
+ "data": [
+ {
+ "created_at": "2025-01-12 09:39:48 +00:00",
+ "active": true,
+ "expiry_starts": "2025-01-12 09:39:48 +00:00",
+ "locality_uu_id": "771fd152-aca1-4d75-a42e-9b29ea7112b5",
+ "uu_id": "e1baa3bc-93ce-4099-a078-a11b71d3b1a8"
+ },
+ ...
+ ]
+ }
+ """
+
+ @classmethod
+ def template_example_function_list(
+ cls, data: Optional[Union[dict, ListOptions]]
+ ) -> PaginationResult:
+ from ApiLayers.Schemas import AddressNeighborhood
+
+ list_options_base = ListOptionsBase(
+ table=AddressNeighborhood,
+ list_options=data,
+ model_query=None,
+ )
+ db_session, query_options = list_options_base.init_list_options()
+ if cls.context_retriever.token.is_occupant:
+ AddressNeighborhood.pre_query = AddressNeighborhood.filter_all(
+ AddressNeighborhood.neighborhood_code.icontains("10"),
+ db=db_session,
+ ).query
+ elif cls.context_retriever.token.is_employee:
+ AddressNeighborhood.pre_query = AddressNeighborhood.filter_all(
+ AddressNeighborhood.neighborhood_code.icontains("9"),
+ db=db_session,
+ ).query
+ records = AddressNeighborhood.filter_all(
+ *query_options.convert(), db=db_session
+ )
+ return list_options_base.paginated_result(
+ records=records,
+ response_model=getattr(cls.context_retriever, "RESPONSE_VALIDATOR", None),
+ )
diff --git a/Events/AllEvents/events/company/duty/info.py b/Events/AllEvents/events/company/duty/info.py
new file mode 100644
index 0000000..89d163f
--- /dev/null
+++ b/Events/AllEvents/events/company/duty/info.py
@@ -0,0 +1,11 @@
+from Events.Engine.abstract_class import PageInfo
+
+
+template_page_info = PageInfo(
+ name="template",
+ title={"en": "template"},
+ description={"en": "template"},
+ icon="",
+ parent="",
+ url="",
+)
diff --git a/Events/AllEvents/events/company/employee/api_events.py b/Events/AllEvents/events/company/employee/api_events.py
new file mode 100644
index 0000000..be66917
--- /dev/null
+++ b/Events/AllEvents/events/company/employee/api_events.py
@@ -0,0 +1,21 @@
+from Events.Engine.abstract_class import Event
+from ApiLayers.LanguageModels.Request import (
+ LoginRequestLanguageModel,
+)
+
+from models import TemplateResponseModels, TemplateRequestModels
+from function_handlers import TemplateFunctions
+
+
+# Auth Login
+template_event = Event(
+ name="authentication_login_super_user_event",
+ key="a5d2d0d1-3e9b-4b0f-8c7d-6d4a4b4c4d4e",
+ request_validator=TemplateRequestModels.TemplateRequestModelX,
+ language_models=[LoginRequestLanguageModel],
+ response_validation_static="LOGIN_SUCCESS",
+ description="Login super user",
+)
+
+
+template_event.endpoint_callable = TemplateFunctions.template_example_function()
diff --git a/Events/AllEvents/events/company/employee/cluster.py b/Events/AllEvents/events/company/employee/cluster.py
new file mode 100644
index 0000000..de5066b
--- /dev/null
+++ b/Events/AllEvents/events/company/employee/cluster.py
@@ -0,0 +1,14 @@
+from Events.Engine.abstract_class import CategoryCluster
+from info import template_page_info
+
+
+TemplateCluster = CategoryCluster(
+ name="TemplateCluster",
+ tags=["template"],
+ prefix="/template",
+ description="Template cluster",
+ pageinfo=template_page_info,
+ endpoints={},
+ include_in_schema=True,
+ sub_category=[],
+)
diff --git a/Events/AllEvents/events/company/employee/employee.py b/Events/AllEvents/events/company/employee/employee.py
new file mode 100644
index 0000000..e69de29
diff --git a/Events/AllEvents/events/company/employee/function_handlers.py b/Events/AllEvents/events/company/employee/function_handlers.py
new file mode 100644
index 0000000..cbfbc0f
--- /dev/null
+++ b/Events/AllEvents/events/company/employee/function_handlers.py
@@ -0,0 +1,78 @@
+from typing import Union, Optional
+
+from ApiLayers.ApiValidations.Request import ListOptions
+from Events.base_request_model import BaseRouteModel, ListOptionsBase
+from Services.PostgresDb.Models.pagination import PaginationResult
+
+
+class Handlers:
+ """Class for handling authentication functions"""
+
+ @classmethod # Requires no auth context
+ def handle_function(cls, **kwargs):
+ """Handle function with kwargs"""
+ return
+
+
+class TemplateFunctions(BaseRouteModel):
+ """
+ Class for handling authentication functions
+ Is a template 4 TokenMiddleware.event_required decorator function groups.
+ results as :
+ STATIC_MESSAGE & LANG retrieved from redis
+ {
+ "completed": true,
+ "message": STATIC_MESSAGE,
+ "lang": LANG,
+ "pagination": {
+ "size": 10,
+ "page": 2,
+ "allCount": 28366,
+ "totalCount": 18,
+ "totalPages": 2,
+ "pageCount": 8,
+ "orderField": ["type_code", "neighborhood_name"],
+ "orderType": ["asc", "desc"]
+ },
+ "data": [
+ {
+ "created_at": "2025-01-12 09:39:48 +00:00",
+ "active": true,
+ "expiry_starts": "2025-01-12 09:39:48 +00:00",
+ "locality_uu_id": "771fd152-aca1-4d75-a42e-9b29ea7112b5",
+ "uu_id": "e1baa3bc-93ce-4099-a078-a11b71d3b1a8"
+ },
+ ...
+ ]
+ }
+ """
+
+ @classmethod
+ def template_example_function_list(
+ cls, data: Optional[Union[dict, ListOptions]]
+ ) -> PaginationResult:
+ from ApiLayers.Schemas import AddressNeighborhood
+
+ list_options_base = ListOptionsBase(
+ table=AddressNeighborhood,
+ list_options=data,
+ model_query=None,
+ )
+ db_session, query_options = list_options_base.init_list_options()
+ if cls.context_retriever.token.is_occupant:
+ AddressNeighborhood.pre_query = AddressNeighborhood.filter_all(
+ AddressNeighborhood.neighborhood_code.icontains("10"),
+ db=db_session,
+ ).query
+ elif cls.context_retriever.token.is_employee:
+ AddressNeighborhood.pre_query = AddressNeighborhood.filter_all(
+ AddressNeighborhood.neighborhood_code.icontains("9"),
+ db=db_session,
+ ).query
+ records = AddressNeighborhood.filter_all(
+ *query_options.convert(), db=db_session
+ )
+ return list_options_base.paginated_result(
+ records=records,
+ response_model=getattr(cls.context_retriever, "RESPONSE_VALIDATOR", None),
+ )
diff --git a/Events/AllEvents/events/company/employee/info.py b/Events/AllEvents/events/company/employee/info.py
new file mode 100644
index 0000000..89d163f
--- /dev/null
+++ b/Events/AllEvents/events/company/employee/info.py
@@ -0,0 +1,11 @@
+from Events.Engine.abstract_class import PageInfo
+
+
+template_page_info = PageInfo(
+ name="template",
+ title={"en": "template"},
+ description={"en": "template"},
+ icon="",
+ parent="",
+ url="",
+)
diff --git a/Events/AllEvents/events/company/staff/api_events.py b/Events/AllEvents/events/company/staff/api_events.py
new file mode 100644
index 0000000..be66917
--- /dev/null
+++ b/Events/AllEvents/events/company/staff/api_events.py
@@ -0,0 +1,21 @@
+from Events.Engine.abstract_class import Event
+from ApiLayers.LanguageModels.Request import (
+ LoginRequestLanguageModel,
+)
+
+from models import TemplateResponseModels, TemplateRequestModels
+from function_handlers import TemplateFunctions
+
+
+# Auth Login
+template_event = Event(
+ name="authentication_login_super_user_event",
+ key="a5d2d0d1-3e9b-4b0f-8c7d-6d4a4b4c4d4e",
+ request_validator=TemplateRequestModels.TemplateRequestModelX,
+ language_models=[LoginRequestLanguageModel],
+ response_validation_static="LOGIN_SUCCESS",
+ description="Login super user",
+)
+
+
+template_event.endpoint_callable = TemplateFunctions.template_example_function()
diff --git a/Events/AllEvents/events/company/staff/cluster.py b/Events/AllEvents/events/company/staff/cluster.py
new file mode 100644
index 0000000..de5066b
--- /dev/null
+++ b/Events/AllEvents/events/company/staff/cluster.py
@@ -0,0 +1,14 @@
+from Events.Engine.abstract_class import CategoryCluster
+from info import template_page_info
+
+
+TemplateCluster = CategoryCluster(
+ name="TemplateCluster",
+ tags=["template"],
+ prefix="/template",
+ description="Template cluster",
+ pageinfo=template_page_info,
+ endpoints={},
+ include_in_schema=True,
+ sub_category=[],
+)
diff --git a/Events/AllEvents/events/company/staff/function_handlers.py b/Events/AllEvents/events/company/staff/function_handlers.py
new file mode 100644
index 0000000..cbfbc0f
--- /dev/null
+++ b/Events/AllEvents/events/company/staff/function_handlers.py
@@ -0,0 +1,78 @@
+from typing import Union, Optional
+
+from ApiLayers.ApiValidations.Request import ListOptions
+from Events.base_request_model import BaseRouteModel, ListOptionsBase
+from Services.PostgresDb.Models.pagination import PaginationResult
+
+
+class Handlers:
+ """Class for handling authentication functions"""
+
+ @classmethod # Requires no auth context
+ def handle_function(cls, **kwargs):
+ """Handle function with kwargs"""
+ return
+
+
+class TemplateFunctions(BaseRouteModel):
+ """
+ Class for handling authentication functions
+ Is a template 4 TokenMiddleware.event_required decorator function groups.
+ results as :
+ STATIC_MESSAGE & LANG retrieved from redis
+ {
+ "completed": true,
+ "message": STATIC_MESSAGE,
+ "lang": LANG,
+ "pagination": {
+ "size": 10,
+ "page": 2,
+ "allCount": 28366,
+ "totalCount": 18,
+ "totalPages": 2,
+ "pageCount": 8,
+ "orderField": ["type_code", "neighborhood_name"],
+ "orderType": ["asc", "desc"]
+ },
+ "data": [
+ {
+ "created_at": "2025-01-12 09:39:48 +00:00",
+ "active": true,
+ "expiry_starts": "2025-01-12 09:39:48 +00:00",
+ "locality_uu_id": "771fd152-aca1-4d75-a42e-9b29ea7112b5",
+ "uu_id": "e1baa3bc-93ce-4099-a078-a11b71d3b1a8"
+ },
+ ...
+ ]
+ }
+ """
+
+ @classmethod
+ def template_example_function_list(
+ cls, data: Optional[Union[dict, ListOptions]]
+ ) -> PaginationResult:
+ from ApiLayers.Schemas import AddressNeighborhood
+
+ list_options_base = ListOptionsBase(
+ table=AddressNeighborhood,
+ list_options=data,
+ model_query=None,
+ )
+ db_session, query_options = list_options_base.init_list_options()
+ if cls.context_retriever.token.is_occupant:
+ AddressNeighborhood.pre_query = AddressNeighborhood.filter_all(
+ AddressNeighborhood.neighborhood_code.icontains("10"),
+ db=db_session,
+ ).query
+ elif cls.context_retriever.token.is_employee:
+ AddressNeighborhood.pre_query = AddressNeighborhood.filter_all(
+ AddressNeighborhood.neighborhood_code.icontains("9"),
+ db=db_session,
+ ).query
+ records = AddressNeighborhood.filter_all(
+ *query_options.convert(), db=db_session
+ )
+ return list_options_base.paginated_result(
+ records=records,
+ response_model=getattr(cls.context_retriever, "RESPONSE_VALIDATOR", None),
+ )
diff --git a/Events/AllEvents/events/company/staff/info.py b/Events/AllEvents/events/company/staff/info.py
new file mode 100644
index 0000000..89d163f
--- /dev/null
+++ b/Events/AllEvents/events/company/staff/info.py
@@ -0,0 +1,11 @@
+from Events.Engine.abstract_class import PageInfo
+
+
+template_page_info = PageInfo(
+ name="template",
+ title={"en": "template"},
+ description={"en": "template"},
+ icon="",
+ parent="",
+ url="",
+)
diff --git a/Events/AllEvents/events/company/staff/staff.py b/Events/AllEvents/events/company/staff/staff.py
new file mode 100644
index 0000000..e69de29
diff --git a/Events/AllEvents/events/decision_book/book_payment/api_events.py b/Events/AllEvents/events/decision_book/book_payment/api_events.py
new file mode 100644
index 0000000..be66917
--- /dev/null
+++ b/Events/AllEvents/events/decision_book/book_payment/api_events.py
@@ -0,0 +1,21 @@
+from Events.Engine.abstract_class import Event
+from ApiLayers.LanguageModels.Request import (
+ LoginRequestLanguageModel,
+)
+
+from models import TemplateResponseModels, TemplateRequestModels
+from function_handlers import TemplateFunctions
+
+
+# Auth Login
+template_event = Event(
+ name="authentication_login_super_user_event",
+ key="a5d2d0d1-3e9b-4b0f-8c7d-6d4a4b4c4d4e",
+ request_validator=TemplateRequestModels.TemplateRequestModelX,
+ language_models=[LoginRequestLanguageModel],
+ response_validation_static="LOGIN_SUCCESS",
+ description="Login super user",
+)
+
+
+template_event.endpoint_callable = TemplateFunctions.template_example_function()
diff --git a/Events/AllEvents/events/decision_book/book_payment/book_payment.py b/Events/AllEvents/events/decision_book/book_payment/book_payment.py
new file mode 100644
index 0000000..e69de29
diff --git a/Events/AllEvents/events/decision_book/book_payment/cluster.py b/Events/AllEvents/events/decision_book/book_payment/cluster.py
new file mode 100644
index 0000000..de5066b
--- /dev/null
+++ b/Events/AllEvents/events/decision_book/book_payment/cluster.py
@@ -0,0 +1,14 @@
+from Events.Engine.abstract_class import CategoryCluster
+from info import template_page_info
+
+
+TemplateCluster = CategoryCluster(
+ name="TemplateCluster",
+ tags=["template"],
+ prefix="/template",
+ description="Template cluster",
+ pageinfo=template_page_info,
+ endpoints={},
+ include_in_schema=True,
+ sub_category=[],
+)
diff --git a/Events/AllEvents/events/decision_book/book_payment/function_handlers.py b/Events/AllEvents/events/decision_book/book_payment/function_handlers.py
new file mode 100644
index 0000000..cbfbc0f
--- /dev/null
+++ b/Events/AllEvents/events/decision_book/book_payment/function_handlers.py
@@ -0,0 +1,78 @@
+from typing import Union, Optional
+
+from ApiLayers.ApiValidations.Request import ListOptions
+from Events.base_request_model import BaseRouteModel, ListOptionsBase
+from Services.PostgresDb.Models.pagination import PaginationResult
+
+
+class Handlers:
+ """Class for handling authentication functions"""
+
+ @classmethod # Requires no auth context
+ def handle_function(cls, **kwargs):
+ """Handle function with kwargs"""
+ return
+
+
+class TemplateFunctions(BaseRouteModel):
+ """
+ Class for handling authentication functions
+ Is a template 4 TokenMiddleware.event_required decorator function groups.
+ results as :
+ STATIC_MESSAGE & LANG retrieved from redis
+ {
+ "completed": true,
+ "message": STATIC_MESSAGE,
+ "lang": LANG,
+ "pagination": {
+ "size": 10,
+ "page": 2,
+ "allCount": 28366,
+ "totalCount": 18,
+ "totalPages": 2,
+ "pageCount": 8,
+ "orderField": ["type_code", "neighborhood_name"],
+ "orderType": ["asc", "desc"]
+ },
+ "data": [
+ {
+ "created_at": "2025-01-12 09:39:48 +00:00",
+ "active": true,
+ "expiry_starts": "2025-01-12 09:39:48 +00:00",
+ "locality_uu_id": "771fd152-aca1-4d75-a42e-9b29ea7112b5",
+ "uu_id": "e1baa3bc-93ce-4099-a078-a11b71d3b1a8"
+ },
+ ...
+ ]
+ }
+ """
+
+ @classmethod
+ def template_example_function_list(
+ cls, data: Optional[Union[dict, ListOptions]]
+ ) -> PaginationResult:
+ from ApiLayers.Schemas import AddressNeighborhood
+
+ list_options_base = ListOptionsBase(
+ table=AddressNeighborhood,
+ list_options=data,
+ model_query=None,
+ )
+ db_session, query_options = list_options_base.init_list_options()
+ if cls.context_retriever.token.is_occupant:
+ AddressNeighborhood.pre_query = AddressNeighborhood.filter_all(
+ AddressNeighborhood.neighborhood_code.icontains("10"),
+ db=db_session,
+ ).query
+ elif cls.context_retriever.token.is_employee:
+ AddressNeighborhood.pre_query = AddressNeighborhood.filter_all(
+ AddressNeighborhood.neighborhood_code.icontains("9"),
+ db=db_session,
+ ).query
+ records = AddressNeighborhood.filter_all(
+ *query_options.convert(), db=db_session
+ )
+ return list_options_base.paginated_result(
+ records=records,
+ response_model=getattr(cls.context_retriever, "RESPONSE_VALIDATOR", None),
+ )
diff --git a/Events/AllEvents/events/decision_book/book_payment/info.py b/Events/AllEvents/events/decision_book/book_payment/info.py
new file mode 100644
index 0000000..89d163f
--- /dev/null
+++ b/Events/AllEvents/events/decision_book/book_payment/info.py
@@ -0,0 +1,11 @@
+from Events.Engine.abstract_class import PageInfo
+
+
+template_page_info = PageInfo(
+ name="template",
+ title={"en": "template"},
+ description={"en": "template"},
+ icon="",
+ parent="",
+ url="",
+)
diff --git a/Events/AllEvents/events/decision_book/decision_book/api_events.py b/Events/AllEvents/events/decision_book/decision_book/api_events.py
new file mode 100644
index 0000000..be66917
--- /dev/null
+++ b/Events/AllEvents/events/decision_book/decision_book/api_events.py
@@ -0,0 +1,21 @@
+from Events.Engine.abstract_class import Event
+from ApiLayers.LanguageModels.Request import (
+ LoginRequestLanguageModel,
+)
+
+from models import TemplateResponseModels, TemplateRequestModels
+from function_handlers import TemplateFunctions
+
+
+# Auth Login
+template_event = Event(
+ name="authentication_login_super_user_event",
+ key="a5d2d0d1-3e9b-4b0f-8c7d-6d4a4b4c4d4e",
+ request_validator=TemplateRequestModels.TemplateRequestModelX,
+ language_models=[LoginRequestLanguageModel],
+ response_validation_static="LOGIN_SUCCESS",
+ description="Login super user",
+)
+
+
+template_event.endpoint_callable = TemplateFunctions.template_example_function()
diff --git a/Events/AllEvents/events/decision_book/decision_book/cluster.py b/Events/AllEvents/events/decision_book/decision_book/cluster.py
new file mode 100644
index 0000000..de5066b
--- /dev/null
+++ b/Events/AllEvents/events/decision_book/decision_book/cluster.py
@@ -0,0 +1,14 @@
+from Events.Engine.abstract_class import CategoryCluster
+from info import template_page_info
+
+
+TemplateCluster = CategoryCluster(
+ name="TemplateCluster",
+ tags=["template"],
+ prefix="/template",
+ description="Template cluster",
+ pageinfo=template_page_info,
+ endpoints={},
+ include_in_schema=True,
+ sub_category=[],
+)
diff --git a/Events/AllEvents/events/decision_book/decision_book/decision_book.py b/Events/AllEvents/events/decision_book/decision_book/decision_book.py
new file mode 100644
index 0000000..e69de29
diff --git a/Events/AllEvents/events/decision_book/decision_book/function_handlers.py b/Events/AllEvents/events/decision_book/decision_book/function_handlers.py
new file mode 100644
index 0000000..cbfbc0f
--- /dev/null
+++ b/Events/AllEvents/events/decision_book/decision_book/function_handlers.py
@@ -0,0 +1,78 @@
+from typing import Union, Optional
+
+from ApiLayers.ApiValidations.Request import ListOptions
+from Events.base_request_model import BaseRouteModel, ListOptionsBase
+from Services.PostgresDb.Models.pagination import PaginationResult
+
+
+class Handlers:
+ """Class for handling authentication functions"""
+
+ @classmethod # Requires no auth context
+ def handle_function(cls, **kwargs):
+ """Handle function with kwargs"""
+ return
+
+
+class TemplateFunctions(BaseRouteModel):
+ """
+ Class for handling authentication functions
+ Is a template 4 TokenMiddleware.event_required decorator function groups.
+ results as :
+ STATIC_MESSAGE & LANG retrieved from redis
+ {
+ "completed": true,
+ "message": STATIC_MESSAGE,
+ "lang": LANG,
+ "pagination": {
+ "size": 10,
+ "page": 2,
+ "allCount": 28366,
+ "totalCount": 18,
+ "totalPages": 2,
+ "pageCount": 8,
+ "orderField": ["type_code", "neighborhood_name"],
+ "orderType": ["asc", "desc"]
+ },
+ "data": [
+ {
+ "created_at": "2025-01-12 09:39:48 +00:00",
+ "active": true,
+ "expiry_starts": "2025-01-12 09:39:48 +00:00",
+ "locality_uu_id": "771fd152-aca1-4d75-a42e-9b29ea7112b5",
+ "uu_id": "e1baa3bc-93ce-4099-a078-a11b71d3b1a8"
+ },
+ ...
+ ]
+ }
+ """
+
+ @classmethod
+ def template_example_function_list(
+ cls, data: Optional[Union[dict, ListOptions]]
+ ) -> PaginationResult:
+ from ApiLayers.Schemas import AddressNeighborhood
+
+ list_options_base = ListOptionsBase(
+ table=AddressNeighborhood,
+ list_options=data,
+ model_query=None,
+ )
+ db_session, query_options = list_options_base.init_list_options()
+ if cls.context_retriever.token.is_occupant:
+ AddressNeighborhood.pre_query = AddressNeighborhood.filter_all(
+ AddressNeighborhood.neighborhood_code.icontains("10"),
+ db=db_session,
+ ).query
+ elif cls.context_retriever.token.is_employee:
+ AddressNeighborhood.pre_query = AddressNeighborhood.filter_all(
+ AddressNeighborhood.neighborhood_code.icontains("9"),
+ db=db_session,
+ ).query
+ records = AddressNeighborhood.filter_all(
+ *query_options.convert(), db=db_session
+ )
+ return list_options_base.paginated_result(
+ records=records,
+ response_model=getattr(cls.context_retriever, "RESPONSE_VALIDATOR", None),
+ )
diff --git a/Events/AllEvents/events/decision_book/decision_book/info.py b/Events/AllEvents/events/decision_book/decision_book/info.py
new file mode 100644
index 0000000..89d163f
--- /dev/null
+++ b/Events/AllEvents/events/decision_book/decision_book/info.py
@@ -0,0 +1,11 @@
+from Events.Engine.abstract_class import PageInfo
+
+
+template_page_info = PageInfo(
+ name="template",
+ title={"en": "template"},
+ description={"en": "template"},
+ icon="",
+ parent="",
+ url="",
+)
diff --git a/Events/AllEvents/events/decision_book/decision_book_items/api_events.py b/Events/AllEvents/events/decision_book/decision_book_items/api_events.py
new file mode 100644
index 0000000..be66917
--- /dev/null
+++ b/Events/AllEvents/events/decision_book/decision_book_items/api_events.py
@@ -0,0 +1,21 @@
+from Events.Engine.abstract_class import Event
+from ApiLayers.LanguageModels.Request import (
+ LoginRequestLanguageModel,
+)
+
+from models import TemplateResponseModels, TemplateRequestModels
+from function_handlers import TemplateFunctions
+
+
+# Auth Login
+template_event = Event(
+ name="authentication_login_super_user_event",
+ key="a5d2d0d1-3e9b-4b0f-8c7d-6d4a4b4c4d4e",
+ request_validator=TemplateRequestModels.TemplateRequestModelX,
+ language_models=[LoginRequestLanguageModel],
+ response_validation_static="LOGIN_SUCCESS",
+ description="Login super user",
+)
+
+
+template_event.endpoint_callable = TemplateFunctions.template_example_function()
diff --git a/Events/AllEvents/events/decision_book/decision_book_items/cluster.py b/Events/AllEvents/events/decision_book/decision_book_items/cluster.py
new file mode 100644
index 0000000..de5066b
--- /dev/null
+++ b/Events/AllEvents/events/decision_book/decision_book_items/cluster.py
@@ -0,0 +1,14 @@
+from Events.Engine.abstract_class import CategoryCluster
+from info import template_page_info
+
+
+TemplateCluster = CategoryCluster(
+ name="TemplateCluster",
+ tags=["template"],
+ prefix="/template",
+ description="Template cluster",
+ pageinfo=template_page_info,
+ endpoints={},
+ include_in_schema=True,
+ sub_category=[],
+)
diff --git a/Events/AllEvents/events/decision_book/decision_book_items/decision_book_items.py b/Events/AllEvents/events/decision_book/decision_book_items/decision_book_items.py
new file mode 100644
index 0000000..e69de29
diff --git a/Events/AllEvents/events/decision_book/decision_book_items/function_handlers.py b/Events/AllEvents/events/decision_book/decision_book_items/function_handlers.py
new file mode 100644
index 0000000..cbfbc0f
--- /dev/null
+++ b/Events/AllEvents/events/decision_book/decision_book_items/function_handlers.py
@@ -0,0 +1,78 @@
+from typing import Union, Optional
+
+from ApiLayers.ApiValidations.Request import ListOptions
+from Events.base_request_model import BaseRouteModel, ListOptionsBase
+from Services.PostgresDb.Models.pagination import PaginationResult
+
+
+class Handlers:
+ """Class for handling authentication functions"""
+
+ @classmethod # Requires no auth context
+ def handle_function(cls, **kwargs):
+ """Handle function with kwargs"""
+ return
+
+
+class TemplateFunctions(BaseRouteModel):
+ """
+ Class for handling authentication functions
+ Is a template 4 TokenMiddleware.event_required decorator function groups.
+ results as :
+ STATIC_MESSAGE & LANG retrieved from redis
+ {
+ "completed": true,
+ "message": STATIC_MESSAGE,
+ "lang": LANG,
+ "pagination": {
+ "size": 10,
+ "page": 2,
+ "allCount": 28366,
+ "totalCount": 18,
+ "totalPages": 2,
+ "pageCount": 8,
+ "orderField": ["type_code", "neighborhood_name"],
+ "orderType": ["asc", "desc"]
+ },
+ "data": [
+ {
+ "created_at": "2025-01-12 09:39:48 +00:00",
+ "active": true,
+ "expiry_starts": "2025-01-12 09:39:48 +00:00",
+ "locality_uu_id": "771fd152-aca1-4d75-a42e-9b29ea7112b5",
+ "uu_id": "e1baa3bc-93ce-4099-a078-a11b71d3b1a8"
+ },
+ ...
+ ]
+ }
+ """
+
+ @classmethod
+ def template_example_function_list(
+ cls, data: Optional[Union[dict, ListOptions]]
+ ) -> PaginationResult:
+ from ApiLayers.Schemas import AddressNeighborhood
+
+ list_options_base = ListOptionsBase(
+ table=AddressNeighborhood,
+ list_options=data,
+ model_query=None,
+ )
+ db_session, query_options = list_options_base.init_list_options()
+ if cls.context_retriever.token.is_occupant:
+ AddressNeighborhood.pre_query = AddressNeighborhood.filter_all(
+ AddressNeighborhood.neighborhood_code.icontains("10"),
+ db=db_session,
+ ).query
+ elif cls.context_retriever.token.is_employee:
+ AddressNeighborhood.pre_query = AddressNeighborhood.filter_all(
+ AddressNeighborhood.neighborhood_code.icontains("9"),
+ db=db_session,
+ ).query
+ records = AddressNeighborhood.filter_all(
+ *query_options.convert(), db=db_session
+ )
+ return list_options_base.paginated_result(
+ records=records,
+ response_model=getattr(cls.context_retriever, "RESPONSE_VALIDATOR", None),
+ )
diff --git a/Events/AllEvents/events/decision_book/decision_book_items/info.py b/Events/AllEvents/events/decision_book/decision_book_items/info.py
new file mode 100644
index 0000000..89d163f
--- /dev/null
+++ b/Events/AllEvents/events/decision_book/decision_book_items/info.py
@@ -0,0 +1,11 @@
+from Events.Engine.abstract_class import PageInfo
+
+
+template_page_info = PageInfo(
+ name="template",
+ title={"en": "template"},
+ description={"en": "template"},
+ icon="",
+ parent="",
+ url="",
+)
diff --git a/Events/AllEvents/events/decision_book/decision_book_items_debits/api_events.py b/Events/AllEvents/events/decision_book/decision_book_items_debits/api_events.py
new file mode 100644
index 0000000..be66917
--- /dev/null
+++ b/Events/AllEvents/events/decision_book/decision_book_items_debits/api_events.py
@@ -0,0 +1,21 @@
+from Events.Engine.abstract_class import Event
+from ApiLayers.LanguageModels.Request import (
+ LoginRequestLanguageModel,
+)
+
+from models import TemplateResponseModels, TemplateRequestModels
+from function_handlers import TemplateFunctions
+
+
+# Auth Login
+template_event = Event(
+ name="authentication_login_super_user_event",
+ key="a5d2d0d1-3e9b-4b0f-8c7d-6d4a4b4c4d4e",
+ request_validator=TemplateRequestModels.TemplateRequestModelX,
+ language_models=[LoginRequestLanguageModel],
+ response_validation_static="LOGIN_SUCCESS",
+ description="Login super user",
+)
+
+
+template_event.endpoint_callable = TemplateFunctions.template_example_function()
diff --git a/Events/AllEvents/events/decision_book/decision_book_items_debits/cluster.py b/Events/AllEvents/events/decision_book/decision_book_items_debits/cluster.py
new file mode 100644
index 0000000..de5066b
--- /dev/null
+++ b/Events/AllEvents/events/decision_book/decision_book_items_debits/cluster.py
@@ -0,0 +1,14 @@
+from Events.Engine.abstract_class import CategoryCluster
+from info import template_page_info
+
+
+TemplateCluster = CategoryCluster(
+ name="TemplateCluster",
+ tags=["template"],
+ prefix="/template",
+ description="Template cluster",
+ pageinfo=template_page_info,
+ endpoints={},
+ include_in_schema=True,
+ sub_category=[],
+)
diff --git a/Events/AllEvents/events/decision_book/decision_book_items_debits/decision_book_items_debits.py b/Events/AllEvents/events/decision_book/decision_book_items_debits/decision_book_items_debits.py
new file mode 100644
index 0000000..e69de29
diff --git a/Events/AllEvents/events/decision_book/decision_book_items_debits/function_handlers.py b/Events/AllEvents/events/decision_book/decision_book_items_debits/function_handlers.py
new file mode 100644
index 0000000..cbfbc0f
--- /dev/null
+++ b/Events/AllEvents/events/decision_book/decision_book_items_debits/function_handlers.py
@@ -0,0 +1,78 @@
+from typing import Union, Optional
+
+from ApiLayers.ApiValidations.Request import ListOptions
+from Events.base_request_model import BaseRouteModel, ListOptionsBase
+from Services.PostgresDb.Models.pagination import PaginationResult
+
+
+class Handlers:
+ """Class for handling authentication functions"""
+
+ @classmethod # Requires no auth context
+ def handle_function(cls, **kwargs):
+ """Handle function with kwargs"""
+ return
+
+
+class TemplateFunctions(BaseRouteModel):
+ """
+ Class for handling authentication functions
+ Is a template 4 TokenMiddleware.event_required decorator function groups.
+ results as :
+ STATIC_MESSAGE & LANG retrieved from redis
+ {
+ "completed": true,
+ "message": STATIC_MESSAGE,
+ "lang": LANG,
+ "pagination": {
+ "size": 10,
+ "page": 2,
+ "allCount": 28366,
+ "totalCount": 18,
+ "totalPages": 2,
+ "pageCount": 8,
+ "orderField": ["type_code", "neighborhood_name"],
+ "orderType": ["asc", "desc"]
+ },
+ "data": [
+ {
+ "created_at": "2025-01-12 09:39:48 +00:00",
+ "active": true,
+ "expiry_starts": "2025-01-12 09:39:48 +00:00",
+ "locality_uu_id": "771fd152-aca1-4d75-a42e-9b29ea7112b5",
+ "uu_id": "e1baa3bc-93ce-4099-a078-a11b71d3b1a8"
+ },
+ ...
+ ]
+ }
+ """
+
+ @classmethod
+ def template_example_function_list(
+ cls, data: Optional[Union[dict, ListOptions]]
+ ) -> PaginationResult:
+ from ApiLayers.Schemas import AddressNeighborhood
+
+ list_options_base = ListOptionsBase(
+ table=AddressNeighborhood,
+ list_options=data,
+ model_query=None,
+ )
+ db_session, query_options = list_options_base.init_list_options()
+ if cls.context_retriever.token.is_occupant:
+ AddressNeighborhood.pre_query = AddressNeighborhood.filter_all(
+ AddressNeighborhood.neighborhood_code.icontains("10"),
+ db=db_session,
+ ).query
+ elif cls.context_retriever.token.is_employee:
+ AddressNeighborhood.pre_query = AddressNeighborhood.filter_all(
+ AddressNeighborhood.neighborhood_code.icontains("9"),
+ db=db_session,
+ ).query
+ records = AddressNeighborhood.filter_all(
+ *query_options.convert(), db=db_session
+ )
+ return list_options_base.paginated_result(
+ records=records,
+ response_model=getattr(cls.context_retriever, "RESPONSE_VALIDATOR", None),
+ )
diff --git a/Events/AllEvents/events/decision_book/decision_book_items_debits/info.py b/Events/AllEvents/events/decision_book/decision_book_items_debits/info.py
new file mode 100644
index 0000000..89d163f
--- /dev/null
+++ b/Events/AllEvents/events/decision_book/decision_book_items_debits/info.py
@@ -0,0 +1,11 @@
+from Events.Engine.abstract_class import PageInfo
+
+
+template_page_info = PageInfo(
+ name="template",
+ title={"en": "template"},
+ description={"en": "template"},
+ icon="",
+ parent="",
+ url="",
+)
diff --git a/Events/AllEvents/events/decision_book/decision_book_person/api_events.py b/Events/AllEvents/events/decision_book/decision_book_person/api_events.py
new file mode 100644
index 0000000..be66917
--- /dev/null
+++ b/Events/AllEvents/events/decision_book/decision_book_person/api_events.py
@@ -0,0 +1,21 @@
+from Events.Engine.abstract_class import Event
+from ApiLayers.LanguageModels.Request import (
+ LoginRequestLanguageModel,
+)
+
+from models import TemplateResponseModels, TemplateRequestModels
+from function_handlers import TemplateFunctions
+
+
+# Auth Login
+template_event = Event(
+ name="authentication_login_super_user_event",
+ key="a5d2d0d1-3e9b-4b0f-8c7d-6d4a4b4c4d4e",
+ request_validator=TemplateRequestModels.TemplateRequestModelX,
+ language_models=[LoginRequestLanguageModel],
+ response_validation_static="LOGIN_SUCCESS",
+ description="Login super user",
+)
+
+
+template_event.endpoint_callable = TemplateFunctions.template_example_function()
diff --git a/Events/AllEvents/events/decision_book/decision_book_person/cluster.py b/Events/AllEvents/events/decision_book/decision_book_person/cluster.py
new file mode 100644
index 0000000..de5066b
--- /dev/null
+++ b/Events/AllEvents/events/decision_book/decision_book_person/cluster.py
@@ -0,0 +1,14 @@
+from Events.Engine.abstract_class import CategoryCluster
+from info import template_page_info
+
+
+TemplateCluster = CategoryCluster(
+ name="TemplateCluster",
+ tags=["template"],
+ prefix="/template",
+ description="Template cluster",
+ pageinfo=template_page_info,
+ endpoints={},
+ include_in_schema=True,
+ sub_category=[],
+)
diff --git a/Events/AllEvents/events/decision_book/decision_book_person/decision_book_person.py b/Events/AllEvents/events/decision_book/decision_book_person/decision_book_person.py
new file mode 100644
index 0000000..e69de29
diff --git a/Events/AllEvents/events/decision_book/decision_book_person/function_handlers.py b/Events/AllEvents/events/decision_book/decision_book_person/function_handlers.py
new file mode 100644
index 0000000..cbfbc0f
--- /dev/null
+++ b/Events/AllEvents/events/decision_book/decision_book_person/function_handlers.py
@@ -0,0 +1,78 @@
+from typing import Union, Optional
+
+from ApiLayers.ApiValidations.Request import ListOptions
+from Events.base_request_model import BaseRouteModel, ListOptionsBase
+from Services.PostgresDb.Models.pagination import PaginationResult
+
+
+class Handlers:
+ """Class for handling authentication functions"""
+
+ @classmethod # Requires no auth context
+ def handle_function(cls, **kwargs):
+ """Handle function with kwargs"""
+ return
+
+
+class TemplateFunctions(BaseRouteModel):
+ """
+ Class for handling authentication functions
+ Is a template 4 TokenMiddleware.event_required decorator function groups.
+ results as :
+ STATIC_MESSAGE & LANG retrieved from redis
+ {
+ "completed": true,
+ "message": STATIC_MESSAGE,
+ "lang": LANG,
+ "pagination": {
+ "size": 10,
+ "page": 2,
+ "allCount": 28366,
+ "totalCount": 18,
+ "totalPages": 2,
+ "pageCount": 8,
+ "orderField": ["type_code", "neighborhood_name"],
+ "orderType": ["asc", "desc"]
+ },
+ "data": [
+ {
+ "created_at": "2025-01-12 09:39:48 +00:00",
+ "active": true,
+ "expiry_starts": "2025-01-12 09:39:48 +00:00",
+ "locality_uu_id": "771fd152-aca1-4d75-a42e-9b29ea7112b5",
+ "uu_id": "e1baa3bc-93ce-4099-a078-a11b71d3b1a8"
+ },
+ ...
+ ]
+ }
+ """
+
+ @classmethod
+ def template_example_function_list(
+ cls, data: Optional[Union[dict, ListOptions]]
+ ) -> PaginationResult:
+ from ApiLayers.Schemas import AddressNeighborhood
+
+ list_options_base = ListOptionsBase(
+ table=AddressNeighborhood,
+ list_options=data,
+ model_query=None,
+ )
+ db_session, query_options = list_options_base.init_list_options()
+ if cls.context_retriever.token.is_occupant:
+ AddressNeighborhood.pre_query = AddressNeighborhood.filter_all(
+ AddressNeighborhood.neighborhood_code.icontains("10"),
+ db=db_session,
+ ).query
+ elif cls.context_retriever.token.is_employee:
+ AddressNeighborhood.pre_query = AddressNeighborhood.filter_all(
+ AddressNeighborhood.neighborhood_code.icontains("9"),
+ db=db_session,
+ ).query
+ records = AddressNeighborhood.filter_all(
+ *query_options.convert(), db=db_session
+ )
+ return list_options_base.paginated_result(
+ records=records,
+ response_model=getattr(cls.context_retriever, "RESPONSE_VALIDATOR", None),
+ )
diff --git a/Events/AllEvents/events/decision_book/decision_book_person/info.py b/Events/AllEvents/events/decision_book/decision_book_person/info.py
new file mode 100644
index 0000000..89d163f
--- /dev/null
+++ b/Events/AllEvents/events/decision_book/decision_book_person/info.py
@@ -0,0 +1,11 @@
+from Events.Engine.abstract_class import PageInfo
+
+
+template_page_info = PageInfo(
+ name="template",
+ title={"en": "template"},
+ description={"en": "template"},
+ icon="",
+ parent="",
+ url="",
+)
diff --git a/Events/AllEvents/events/decision_book/invitations/api_events.py b/Events/AllEvents/events/decision_book/invitations/api_events.py
new file mode 100644
index 0000000..be66917
--- /dev/null
+++ b/Events/AllEvents/events/decision_book/invitations/api_events.py
@@ -0,0 +1,21 @@
+from Events.Engine.abstract_class import Event
+from ApiLayers.LanguageModels.Request import (
+ LoginRequestLanguageModel,
+)
+
+from models import TemplateResponseModels, TemplateRequestModels
+from function_handlers import TemplateFunctions
+
+
+# Auth Login
+template_event = Event(
+ name="authentication_login_super_user_event",
+ key="a5d2d0d1-3e9b-4b0f-8c7d-6d4a4b4c4d4e",
+ request_validator=TemplateRequestModels.TemplateRequestModelX,
+ language_models=[LoginRequestLanguageModel],
+ response_validation_static="LOGIN_SUCCESS",
+ description="Login super user",
+)
+
+
+template_event.endpoint_callable = TemplateFunctions.template_example_function()
diff --git a/Events/AllEvents/events/decision_book/invitations/cluster.py b/Events/AllEvents/events/decision_book/invitations/cluster.py
new file mode 100644
index 0000000..de5066b
--- /dev/null
+++ b/Events/AllEvents/events/decision_book/invitations/cluster.py
@@ -0,0 +1,14 @@
+from Events.Engine.abstract_class import CategoryCluster
+from info import template_page_info
+
+
+TemplateCluster = CategoryCluster(
+ name="TemplateCluster",
+ tags=["template"],
+ prefix="/template",
+ description="Template cluster",
+ pageinfo=template_page_info,
+ endpoints={},
+ include_in_schema=True,
+ sub_category=[],
+)
diff --git a/Events/AllEvents/events/decision_book/invitations/function_handlers.py b/Events/AllEvents/events/decision_book/invitations/function_handlers.py
new file mode 100644
index 0000000..cbfbc0f
--- /dev/null
+++ b/Events/AllEvents/events/decision_book/invitations/function_handlers.py
@@ -0,0 +1,78 @@
+from typing import Union, Optional
+
+from ApiLayers.ApiValidations.Request import ListOptions
+from Events.base_request_model import BaseRouteModel, ListOptionsBase
+from Services.PostgresDb.Models.pagination import PaginationResult
+
+
+class Handlers:
+ """Class for handling authentication functions"""
+
+ @classmethod # Requires no auth context
+ def handle_function(cls, **kwargs):
+ """Handle function with kwargs"""
+ return
+
+
+class TemplateFunctions(BaseRouteModel):
+ """
+ Class for handling authentication functions
+ Is a template 4 TokenMiddleware.event_required decorator function groups.
+ results as :
+ STATIC_MESSAGE & LANG retrieved from redis
+ {
+ "completed": true,
+ "message": STATIC_MESSAGE,
+ "lang": LANG,
+ "pagination": {
+ "size": 10,
+ "page": 2,
+ "allCount": 28366,
+ "totalCount": 18,
+ "totalPages": 2,
+ "pageCount": 8,
+ "orderField": ["type_code", "neighborhood_name"],
+ "orderType": ["asc", "desc"]
+ },
+ "data": [
+ {
+ "created_at": "2025-01-12 09:39:48 +00:00",
+ "active": true,
+ "expiry_starts": "2025-01-12 09:39:48 +00:00",
+ "locality_uu_id": "771fd152-aca1-4d75-a42e-9b29ea7112b5",
+ "uu_id": "e1baa3bc-93ce-4099-a078-a11b71d3b1a8"
+ },
+ ...
+ ]
+ }
+ """
+
+ @classmethod
+ def template_example_function_list(
+ cls, data: Optional[Union[dict, ListOptions]]
+ ) -> PaginationResult:
+ from ApiLayers.Schemas import AddressNeighborhood
+
+ list_options_base = ListOptionsBase(
+ table=AddressNeighborhood,
+ list_options=data,
+ model_query=None,
+ )
+ db_session, query_options = list_options_base.init_list_options()
+ if cls.context_retriever.token.is_occupant:
+ AddressNeighborhood.pre_query = AddressNeighborhood.filter_all(
+ AddressNeighborhood.neighborhood_code.icontains("10"),
+ db=db_session,
+ ).query
+ elif cls.context_retriever.token.is_employee:
+ AddressNeighborhood.pre_query = AddressNeighborhood.filter_all(
+ AddressNeighborhood.neighborhood_code.icontains("9"),
+ db=db_session,
+ ).query
+ records = AddressNeighborhood.filter_all(
+ *query_options.convert(), db=db_session
+ )
+ return list_options_base.paginated_result(
+ records=records,
+ response_model=getattr(cls.context_retriever, "RESPONSE_VALIDATOR", None),
+ )
diff --git a/Events/AllEvents/events/decision_book/invitations/info.py b/Events/AllEvents/events/decision_book/invitations/info.py
new file mode 100644
index 0000000..89d163f
--- /dev/null
+++ b/Events/AllEvents/events/decision_book/invitations/info.py
@@ -0,0 +1,11 @@
+from Events.Engine.abstract_class import PageInfo
+
+
+template_page_info = PageInfo(
+ name="template",
+ title={"en": "template"},
+ description={"en": "template"},
+ icon="",
+ parent="",
+ url="",
+)
diff --git a/Events/AllEvents/events/decision_book/invitations/invitations.py b/Events/AllEvents/events/decision_book/invitations/invitations.py
new file mode 100644
index 0000000..e69de29
diff --git a/Events/AllEvents/events/identity/people/api_events.py b/Events/AllEvents/events/identity/people/api_events.py
new file mode 100644
index 0000000..be66917
--- /dev/null
+++ b/Events/AllEvents/events/identity/people/api_events.py
@@ -0,0 +1,21 @@
+from Events.Engine.abstract_class import Event
+from ApiLayers.LanguageModels.Request import (
+ LoginRequestLanguageModel,
+)
+
+from models import TemplateResponseModels, TemplateRequestModels
+from function_handlers import TemplateFunctions
+
+
+# Auth Login
+template_event = Event(
+ name="authentication_login_super_user_event",
+ key="a5d2d0d1-3e9b-4b0f-8c7d-6d4a4b4c4d4e",
+ request_validator=TemplateRequestModels.TemplateRequestModelX,
+ language_models=[LoginRequestLanguageModel],
+ response_validation_static="LOGIN_SUCCESS",
+ description="Login super user",
+)
+
+
+template_event.endpoint_callable = TemplateFunctions.template_example_function()
diff --git a/Events/AllEvents/events/identity/people/cluster.py b/Events/AllEvents/events/identity/people/cluster.py
new file mode 100644
index 0000000..de5066b
--- /dev/null
+++ b/Events/AllEvents/events/identity/people/cluster.py
@@ -0,0 +1,14 @@
+from Events.Engine.abstract_class import CategoryCluster
+from info import template_page_info
+
+
+TemplateCluster = CategoryCluster(
+ name="TemplateCluster",
+ tags=["template"],
+ prefix="/template",
+ description="Template cluster",
+ pageinfo=template_page_info,
+ endpoints={},
+ include_in_schema=True,
+ sub_category=[],
+)
diff --git a/Events/AllEvents/events/identity/people/function_handlers.py b/Events/AllEvents/events/identity/people/function_handlers.py
new file mode 100644
index 0000000..cbfbc0f
--- /dev/null
+++ b/Events/AllEvents/events/identity/people/function_handlers.py
@@ -0,0 +1,78 @@
+from typing import Union, Optional
+
+from ApiLayers.ApiValidations.Request import ListOptions
+from Events.base_request_model import BaseRouteModel, ListOptionsBase
+from Services.PostgresDb.Models.pagination import PaginationResult
+
+
+class Handlers:
+ """Class for handling authentication functions"""
+
+ @classmethod # Requires no auth context
+ def handle_function(cls, **kwargs):
+ """Handle function with kwargs"""
+ return
+
+
+class TemplateFunctions(BaseRouteModel):
+ """
+ Class for handling authentication functions
+ Is a template 4 TokenMiddleware.event_required decorator function groups.
+ results as :
+ STATIC_MESSAGE & LANG retrieved from redis
+ {
+ "completed": true,
+ "message": STATIC_MESSAGE,
+ "lang": LANG,
+ "pagination": {
+ "size": 10,
+ "page": 2,
+ "allCount": 28366,
+ "totalCount": 18,
+ "totalPages": 2,
+ "pageCount": 8,
+ "orderField": ["type_code", "neighborhood_name"],
+ "orderType": ["asc", "desc"]
+ },
+ "data": [
+ {
+ "created_at": "2025-01-12 09:39:48 +00:00",
+ "active": true,
+ "expiry_starts": "2025-01-12 09:39:48 +00:00",
+ "locality_uu_id": "771fd152-aca1-4d75-a42e-9b29ea7112b5",
+ "uu_id": "e1baa3bc-93ce-4099-a078-a11b71d3b1a8"
+ },
+ ...
+ ]
+ }
+ """
+
+ @classmethod
+ def template_example_function_list(
+ cls, data: Optional[Union[dict, ListOptions]]
+ ) -> PaginationResult:
+ from ApiLayers.Schemas import AddressNeighborhood
+
+ list_options_base = ListOptionsBase(
+ table=AddressNeighborhood,
+ list_options=data,
+ model_query=None,
+ )
+ db_session, query_options = list_options_base.init_list_options()
+ if cls.context_retriever.token.is_occupant:
+ AddressNeighborhood.pre_query = AddressNeighborhood.filter_all(
+ AddressNeighborhood.neighborhood_code.icontains("10"),
+ db=db_session,
+ ).query
+ elif cls.context_retriever.token.is_employee:
+ AddressNeighborhood.pre_query = AddressNeighborhood.filter_all(
+ AddressNeighborhood.neighborhood_code.icontains("9"),
+ db=db_session,
+ ).query
+ records = AddressNeighborhood.filter_all(
+ *query_options.convert(), db=db_session
+ )
+ return list_options_base.paginated_result(
+ records=records,
+ response_model=getattr(cls.context_retriever, "RESPONSE_VALIDATOR", None),
+ )
diff --git a/Events/AllEvents/events/identity/people/info.py b/Events/AllEvents/events/identity/people/info.py
new file mode 100644
index 0000000..89d163f
--- /dev/null
+++ b/Events/AllEvents/events/identity/people/info.py
@@ -0,0 +1,11 @@
+from Events.Engine.abstract_class import PageInfo
+
+
+template_page_info = PageInfo(
+ name="template",
+ title={"en": "template"},
+ description={"en": "template"},
+ icon="",
+ parent="",
+ url="",
+)
diff --git a/Events/AllEvents/events/identity/people/people.py b/Events/AllEvents/events/identity/people/people.py
new file mode 100644
index 0000000..e69de29
diff --git a/Events/AllEvents/events/identity/users/api_events.py b/Events/AllEvents/events/identity/users/api_events.py
new file mode 100644
index 0000000..be66917
--- /dev/null
+++ b/Events/AllEvents/events/identity/users/api_events.py
@@ -0,0 +1,21 @@
+from Events.Engine.abstract_class import Event
+from ApiLayers.LanguageModels.Request import (
+ LoginRequestLanguageModel,
+)
+
+from models import TemplateResponseModels, TemplateRequestModels
+from function_handlers import TemplateFunctions
+
+
+# Auth Login
+template_event = Event(
+ name="authentication_login_super_user_event",
+ key="a5d2d0d1-3e9b-4b0f-8c7d-6d4a4b4c4d4e",
+ request_validator=TemplateRequestModels.TemplateRequestModelX,
+ language_models=[LoginRequestLanguageModel],
+ response_validation_static="LOGIN_SUCCESS",
+ description="Login super user",
+)
+
+
+template_event.endpoint_callable = TemplateFunctions.template_example_function()
diff --git a/Events/AllEvents/events/identity/users/cluster.py b/Events/AllEvents/events/identity/users/cluster.py
new file mode 100644
index 0000000..de5066b
--- /dev/null
+++ b/Events/AllEvents/events/identity/users/cluster.py
@@ -0,0 +1,14 @@
+from Events.Engine.abstract_class import CategoryCluster
+from info import template_page_info
+
+
+TemplateCluster = CategoryCluster(
+ name="TemplateCluster",
+ tags=["template"],
+ prefix="/template",
+ description="Template cluster",
+ pageinfo=template_page_info,
+ endpoints={},
+ include_in_schema=True,
+ sub_category=[],
+)
diff --git a/Events/AllEvents/events/identity/users/function_handlers.py b/Events/AllEvents/events/identity/users/function_handlers.py
new file mode 100644
index 0000000..cbfbc0f
--- /dev/null
+++ b/Events/AllEvents/events/identity/users/function_handlers.py
@@ -0,0 +1,78 @@
+from typing import Union, Optional
+
+from ApiLayers.ApiValidations.Request import ListOptions
+from Events.base_request_model import BaseRouteModel, ListOptionsBase
+from Services.PostgresDb.Models.pagination import PaginationResult
+
+
+class Handlers:
+ """Class for handling authentication functions"""
+
+ @classmethod # Requires no auth context
+ def handle_function(cls, **kwargs):
+ """Handle function with kwargs"""
+ return
+
+
+class TemplateFunctions(BaseRouteModel):
+ """
+ Class for handling authentication functions
+ Is a template 4 TokenMiddleware.event_required decorator function groups.
+ results as :
+ STATIC_MESSAGE & LANG retrieved from redis
+ {
+ "completed": true,
+ "message": STATIC_MESSAGE,
+ "lang": LANG,
+ "pagination": {
+ "size": 10,
+ "page": 2,
+ "allCount": 28366,
+ "totalCount": 18,
+ "totalPages": 2,
+ "pageCount": 8,
+ "orderField": ["type_code", "neighborhood_name"],
+ "orderType": ["asc", "desc"]
+ },
+ "data": [
+ {
+ "created_at": "2025-01-12 09:39:48 +00:00",
+ "active": true,
+ "expiry_starts": "2025-01-12 09:39:48 +00:00",
+ "locality_uu_id": "771fd152-aca1-4d75-a42e-9b29ea7112b5",
+ "uu_id": "e1baa3bc-93ce-4099-a078-a11b71d3b1a8"
+ },
+ ...
+ ]
+ }
+ """
+
+ @classmethod
+ def template_example_function_list(
+ cls, data: Optional[Union[dict, ListOptions]]
+ ) -> PaginationResult:
+ from ApiLayers.Schemas import AddressNeighborhood
+
+ list_options_base = ListOptionsBase(
+ table=AddressNeighborhood,
+ list_options=data,
+ model_query=None,
+ )
+ db_session, query_options = list_options_base.init_list_options()
+ if cls.context_retriever.token.is_occupant:
+ AddressNeighborhood.pre_query = AddressNeighborhood.filter_all(
+ AddressNeighborhood.neighborhood_code.icontains("10"),
+ db=db_session,
+ ).query
+ elif cls.context_retriever.token.is_employee:
+ AddressNeighborhood.pre_query = AddressNeighborhood.filter_all(
+ AddressNeighborhood.neighborhood_code.icontains("9"),
+ db=db_session,
+ ).query
+ records = AddressNeighborhood.filter_all(
+ *query_options.convert(), db=db_session
+ )
+ return list_options_base.paginated_result(
+ records=records,
+ response_model=getattr(cls.context_retriever, "RESPONSE_VALIDATOR", None),
+ )
diff --git a/Events/AllEvents/events/identity/users/info.py b/Events/AllEvents/events/identity/users/info.py
new file mode 100644
index 0000000..89d163f
--- /dev/null
+++ b/Events/AllEvents/events/identity/users/info.py
@@ -0,0 +1,11 @@
+from Events.Engine.abstract_class import PageInfo
+
+
+template_page_info = PageInfo(
+ name="template",
+ title={"en": "template"},
+ description={"en": "template"},
+ icon="",
+ parent="",
+ url="",
+)
diff --git a/Events/AllEvents/events/identity/users/users.py b/Events/AllEvents/events/identity/users/users.py
new file mode 100644
index 0000000..e69de29
diff --git a/Events/AllEvents/events/project_decision_book/project_decision_book/api_events.py b/Events/AllEvents/events/project_decision_book/project_decision_book/api_events.py
new file mode 100644
index 0000000..be66917
--- /dev/null
+++ b/Events/AllEvents/events/project_decision_book/project_decision_book/api_events.py
@@ -0,0 +1,21 @@
+from Events.Engine.abstract_class import Event
+from ApiLayers.LanguageModels.Request import (
+ LoginRequestLanguageModel,
+)
+
+from models import TemplateResponseModels, TemplateRequestModels
+from function_handlers import TemplateFunctions
+
+
+# Auth Login
+template_event = Event(
+ name="authentication_login_super_user_event",
+ key="a5d2d0d1-3e9b-4b0f-8c7d-6d4a4b4c4d4e",
+ request_validator=TemplateRequestModels.TemplateRequestModelX,
+ language_models=[LoginRequestLanguageModel],
+ response_validation_static="LOGIN_SUCCESS",
+ description="Login super user",
+)
+
+
+template_event.endpoint_callable = TemplateFunctions.template_example_function()
diff --git a/Events/AllEvents/events/project_decision_book/project_decision_book/cluster.py b/Events/AllEvents/events/project_decision_book/project_decision_book/cluster.py
new file mode 100644
index 0000000..de5066b
--- /dev/null
+++ b/Events/AllEvents/events/project_decision_book/project_decision_book/cluster.py
@@ -0,0 +1,14 @@
+from Events.Engine.abstract_class import CategoryCluster
+from info import template_page_info
+
+
+TemplateCluster = CategoryCluster(
+ name="TemplateCluster",
+ tags=["template"],
+ prefix="/template",
+ description="Template cluster",
+ pageinfo=template_page_info,
+ endpoints={},
+ include_in_schema=True,
+ sub_category=[],
+)
diff --git a/Events/AllEvents/events/project_decision_book/project_decision_book/function_handlers.py b/Events/AllEvents/events/project_decision_book/project_decision_book/function_handlers.py
new file mode 100644
index 0000000..cbfbc0f
--- /dev/null
+++ b/Events/AllEvents/events/project_decision_book/project_decision_book/function_handlers.py
@@ -0,0 +1,78 @@
+from typing import Union, Optional
+
+from ApiLayers.ApiValidations.Request import ListOptions
+from Events.base_request_model import BaseRouteModel, ListOptionsBase
+from Services.PostgresDb.Models.pagination import PaginationResult
+
+
+class Handlers:
+ """Class for handling authentication functions"""
+
+ @classmethod # Requires no auth context
+ def handle_function(cls, **kwargs):
+ """Handle function with kwargs"""
+ return
+
+
+class TemplateFunctions(BaseRouteModel):
+ """
+ Class for handling authentication functions
+ Is a template 4 TokenMiddleware.event_required decorator function groups.
+ results as :
+ STATIC_MESSAGE & LANG retrieved from redis
+ {
+ "completed": true,
+ "message": STATIC_MESSAGE,
+ "lang": LANG,
+ "pagination": {
+ "size": 10,
+ "page": 2,
+ "allCount": 28366,
+ "totalCount": 18,
+ "totalPages": 2,
+ "pageCount": 8,
+ "orderField": ["type_code", "neighborhood_name"],
+ "orderType": ["asc", "desc"]
+ },
+ "data": [
+ {
+ "created_at": "2025-01-12 09:39:48 +00:00",
+ "active": true,
+ "expiry_starts": "2025-01-12 09:39:48 +00:00",
+ "locality_uu_id": "771fd152-aca1-4d75-a42e-9b29ea7112b5",
+ "uu_id": "e1baa3bc-93ce-4099-a078-a11b71d3b1a8"
+ },
+ ...
+ ]
+ }
+ """
+
+ @classmethod
+ def template_example_function_list(
+ cls, data: Optional[Union[dict, ListOptions]]
+ ) -> PaginationResult:
+ from ApiLayers.Schemas import AddressNeighborhood
+
+ list_options_base = ListOptionsBase(
+ table=AddressNeighborhood,
+ list_options=data,
+ model_query=None,
+ )
+ db_session, query_options = list_options_base.init_list_options()
+ if cls.context_retriever.token.is_occupant:
+ AddressNeighborhood.pre_query = AddressNeighborhood.filter_all(
+ AddressNeighborhood.neighborhood_code.icontains("10"),
+ db=db_session,
+ ).query
+ elif cls.context_retriever.token.is_employee:
+ AddressNeighborhood.pre_query = AddressNeighborhood.filter_all(
+ AddressNeighborhood.neighborhood_code.icontains("9"),
+ db=db_session,
+ ).query
+ records = AddressNeighborhood.filter_all(
+ *query_options.convert(), db=db_session
+ )
+ return list_options_base.paginated_result(
+ records=records,
+ response_model=getattr(cls.context_retriever, "RESPONSE_VALIDATOR", None),
+ )
diff --git a/Events/AllEvents/events/project_decision_book/project_decision_book/info.py b/Events/AllEvents/events/project_decision_book/project_decision_book/info.py
new file mode 100644
index 0000000..89d163f
--- /dev/null
+++ b/Events/AllEvents/events/project_decision_book/project_decision_book/info.py
@@ -0,0 +1,11 @@
+from Events.Engine.abstract_class import PageInfo
+
+
+template_page_info = PageInfo(
+ name="template",
+ title={"en": "template"},
+ description={"en": "template"},
+ icon="",
+ parent="",
+ url="",
+)
diff --git a/Events/AllEvents/events/project_decision_book/project_decision_book/project_decision_book.py b/Events/AllEvents/events/project_decision_book/project_decision_book/project_decision_book.py
new file mode 100644
index 0000000..e69de29
diff --git a/Events/AllEvents/events/project_decision_book/project_decision_book_items/api_events.py b/Events/AllEvents/events/project_decision_book/project_decision_book_items/api_events.py
new file mode 100644
index 0000000..be66917
--- /dev/null
+++ b/Events/AllEvents/events/project_decision_book/project_decision_book_items/api_events.py
@@ -0,0 +1,21 @@
+from Events.Engine.abstract_class import Event
+from ApiLayers.LanguageModels.Request import (
+ LoginRequestLanguageModel,
+)
+
+from models import TemplateResponseModels, TemplateRequestModels
+from function_handlers import TemplateFunctions
+
+
+# Auth Login
+template_event = Event(
+ name="authentication_login_super_user_event",
+ key="a5d2d0d1-3e9b-4b0f-8c7d-6d4a4b4c4d4e",
+ request_validator=TemplateRequestModels.TemplateRequestModelX,
+ language_models=[LoginRequestLanguageModel],
+ response_validation_static="LOGIN_SUCCESS",
+ description="Login super user",
+)
+
+
+template_event.endpoint_callable = TemplateFunctions.template_example_function()
diff --git a/Events/AllEvents/events/project_decision_book/project_decision_book_items/cluster.py b/Events/AllEvents/events/project_decision_book/project_decision_book_items/cluster.py
new file mode 100644
index 0000000..de5066b
--- /dev/null
+++ b/Events/AllEvents/events/project_decision_book/project_decision_book_items/cluster.py
@@ -0,0 +1,14 @@
+from Events.Engine.abstract_class import CategoryCluster
+from info import template_page_info
+
+
+TemplateCluster = CategoryCluster(
+ name="TemplateCluster",
+ tags=["template"],
+ prefix="/template",
+ description="Template cluster",
+ pageinfo=template_page_info,
+ endpoints={},
+ include_in_schema=True,
+ sub_category=[],
+)
diff --git a/Events/AllEvents/events/project_decision_book/project_decision_book_items/function_handlers.py b/Events/AllEvents/events/project_decision_book/project_decision_book_items/function_handlers.py
new file mode 100644
index 0000000..cbfbc0f
--- /dev/null
+++ b/Events/AllEvents/events/project_decision_book/project_decision_book_items/function_handlers.py
@@ -0,0 +1,78 @@
+from typing import Union, Optional
+
+from ApiLayers.ApiValidations.Request import ListOptions
+from Events.base_request_model import BaseRouteModel, ListOptionsBase
+from Services.PostgresDb.Models.pagination import PaginationResult
+
+
+class Handlers:
+ """Class for handling authentication functions"""
+
+ @classmethod # Requires no auth context
+ def handle_function(cls, **kwargs):
+ """Handle function with kwargs"""
+ return
+
+
+class TemplateFunctions(BaseRouteModel):
+ """
+ Class for handling authentication functions
+ Is a template 4 TokenMiddleware.event_required decorator function groups.
+ results as :
+ STATIC_MESSAGE & LANG retrieved from redis
+ {
+ "completed": true,
+ "message": STATIC_MESSAGE,
+ "lang": LANG,
+ "pagination": {
+ "size": 10,
+ "page": 2,
+ "allCount": 28366,
+ "totalCount": 18,
+ "totalPages": 2,
+ "pageCount": 8,
+ "orderField": ["type_code", "neighborhood_name"],
+ "orderType": ["asc", "desc"]
+ },
+ "data": [
+ {
+ "created_at": "2025-01-12 09:39:48 +00:00",
+ "active": true,
+ "expiry_starts": "2025-01-12 09:39:48 +00:00",
+ "locality_uu_id": "771fd152-aca1-4d75-a42e-9b29ea7112b5",
+ "uu_id": "e1baa3bc-93ce-4099-a078-a11b71d3b1a8"
+ },
+ ...
+ ]
+ }
+ """
+
+ @classmethod
+ def template_example_function_list(
+ cls, data: Optional[Union[dict, ListOptions]]
+ ) -> PaginationResult:
+ from ApiLayers.Schemas import AddressNeighborhood
+
+ list_options_base = ListOptionsBase(
+ table=AddressNeighborhood,
+ list_options=data,
+ model_query=None,
+ )
+ db_session, query_options = list_options_base.init_list_options()
+ if cls.context_retriever.token.is_occupant:
+ AddressNeighborhood.pre_query = AddressNeighborhood.filter_all(
+ AddressNeighborhood.neighborhood_code.icontains("10"),
+ db=db_session,
+ ).query
+ elif cls.context_retriever.token.is_employee:
+ AddressNeighborhood.pre_query = AddressNeighborhood.filter_all(
+ AddressNeighborhood.neighborhood_code.icontains("9"),
+ db=db_session,
+ ).query
+ records = AddressNeighborhood.filter_all(
+ *query_options.convert(), db=db_session
+ )
+ return list_options_base.paginated_result(
+ records=records,
+ response_model=getattr(cls.context_retriever, "RESPONSE_VALIDATOR", None),
+ )
diff --git a/Events/AllEvents/events/project_decision_book/project_decision_book_items/info.py b/Events/AllEvents/events/project_decision_book/project_decision_book_items/info.py
new file mode 100644
index 0000000..89d163f
--- /dev/null
+++ b/Events/AllEvents/events/project_decision_book/project_decision_book_items/info.py
@@ -0,0 +1,11 @@
+from Events.Engine.abstract_class import PageInfo
+
+
+template_page_info = PageInfo(
+ name="template",
+ title={"en": "template"},
+ description={"en": "template"},
+ icon="",
+ parent="",
+ url="",
+)
diff --git a/Events/AllEvents/events/project_decision_book/project_decision_book_items/project_decision_book_items.py b/Events/AllEvents/events/project_decision_book/project_decision_book_items/project_decision_book_items.py
new file mode 100644
index 0000000..e69de29
diff --git a/Events/AllEvents/events/project_decision_book/project_decision_book_person/api_events.py b/Events/AllEvents/events/project_decision_book/project_decision_book_person/api_events.py
new file mode 100644
index 0000000..be66917
--- /dev/null
+++ b/Events/AllEvents/events/project_decision_book/project_decision_book_person/api_events.py
@@ -0,0 +1,21 @@
+from Events.Engine.abstract_class import Event
+from ApiLayers.LanguageModels.Request import (
+ LoginRequestLanguageModel,
+)
+
+from models import TemplateResponseModels, TemplateRequestModels
+from function_handlers import TemplateFunctions
+
+
+# Auth Login
+template_event = Event(
+ name="authentication_login_super_user_event",
+ key="a5d2d0d1-3e9b-4b0f-8c7d-6d4a4b4c4d4e",
+ request_validator=TemplateRequestModels.TemplateRequestModelX,
+ language_models=[LoginRequestLanguageModel],
+ response_validation_static="LOGIN_SUCCESS",
+ description="Login super user",
+)
+
+
+template_event.endpoint_callable = TemplateFunctions.template_example_function()
diff --git a/Events/AllEvents/events/project_decision_book/project_decision_book_person/cluster.py b/Events/AllEvents/events/project_decision_book/project_decision_book_person/cluster.py
new file mode 100644
index 0000000..de5066b
--- /dev/null
+++ b/Events/AllEvents/events/project_decision_book/project_decision_book_person/cluster.py
@@ -0,0 +1,14 @@
+from Events.Engine.abstract_class import CategoryCluster
+from info import template_page_info
+
+
+TemplateCluster = CategoryCluster(
+ name="TemplateCluster",
+ tags=["template"],
+ prefix="/template",
+ description="Template cluster",
+ pageinfo=template_page_info,
+ endpoints={},
+ include_in_schema=True,
+ sub_category=[],
+)
diff --git a/Events/AllEvents/events/project_decision_book/project_decision_book_person/function_handlers.py b/Events/AllEvents/events/project_decision_book/project_decision_book_person/function_handlers.py
new file mode 100644
index 0000000..cbfbc0f
--- /dev/null
+++ b/Events/AllEvents/events/project_decision_book/project_decision_book_person/function_handlers.py
@@ -0,0 +1,78 @@
+from typing import Union, Optional
+
+from ApiLayers.ApiValidations.Request import ListOptions
+from Events.base_request_model import BaseRouteModel, ListOptionsBase
+from Services.PostgresDb.Models.pagination import PaginationResult
+
+
+class Handlers:
+ """Class for handling authentication functions"""
+
+ @classmethod # Requires no auth context
+ def handle_function(cls, **kwargs):
+ """Handle function with kwargs"""
+ return
+
+
+class TemplateFunctions(BaseRouteModel):
+ """
+ Class for handling authentication functions
+ Is a template 4 TokenMiddleware.event_required decorator function groups.
+ results as :
+ STATIC_MESSAGE & LANG retrieved from redis
+ {
+ "completed": true,
+ "message": STATIC_MESSAGE,
+ "lang": LANG,
+ "pagination": {
+ "size": 10,
+ "page": 2,
+ "allCount": 28366,
+ "totalCount": 18,
+ "totalPages": 2,
+ "pageCount": 8,
+ "orderField": ["type_code", "neighborhood_name"],
+ "orderType": ["asc", "desc"]
+ },
+ "data": [
+ {
+ "created_at": "2025-01-12 09:39:48 +00:00",
+ "active": true,
+ "expiry_starts": "2025-01-12 09:39:48 +00:00",
+ "locality_uu_id": "771fd152-aca1-4d75-a42e-9b29ea7112b5",
+ "uu_id": "e1baa3bc-93ce-4099-a078-a11b71d3b1a8"
+ },
+ ...
+ ]
+ }
+ """
+
+ @classmethod
+ def template_example_function_list(
+ cls, data: Optional[Union[dict, ListOptions]]
+ ) -> PaginationResult:
+ from ApiLayers.Schemas import AddressNeighborhood
+
+ list_options_base = ListOptionsBase(
+ table=AddressNeighborhood,
+ list_options=data,
+ model_query=None,
+ )
+ db_session, query_options = list_options_base.init_list_options()
+ if cls.context_retriever.token.is_occupant:
+ AddressNeighborhood.pre_query = AddressNeighborhood.filter_all(
+ AddressNeighborhood.neighborhood_code.icontains("10"),
+ db=db_session,
+ ).query
+ elif cls.context_retriever.token.is_employee:
+ AddressNeighborhood.pre_query = AddressNeighborhood.filter_all(
+ AddressNeighborhood.neighborhood_code.icontains("9"),
+ db=db_session,
+ ).query
+ records = AddressNeighborhood.filter_all(
+ *query_options.convert(), db=db_session
+ )
+ return list_options_base.paginated_result(
+ records=records,
+ response_model=getattr(cls.context_retriever, "RESPONSE_VALIDATOR", None),
+ )
diff --git a/Events/AllEvents/events/project_decision_book/project_decision_book_person/info.py b/Events/AllEvents/events/project_decision_book/project_decision_book_person/info.py
new file mode 100644
index 0000000..89d163f
--- /dev/null
+++ b/Events/AllEvents/events/project_decision_book/project_decision_book_person/info.py
@@ -0,0 +1,11 @@
+from Events.Engine.abstract_class import PageInfo
+
+
+template_page_info = PageInfo(
+ name="template",
+ title={"en": "template"},
+ description={"en": "template"},
+ icon="",
+ parent="",
+ url="",
+)
diff --git a/Events/AllEvents/events/project_decision_book/project_decision_book_person/project_decision_book_person.py b/Events/AllEvents/events/project_decision_book/project_decision_book_person/project_decision_book_person.py
new file mode 100644
index 0000000..e69de29
diff --git a/Events/AllEvents/events_file.py b/Events/AllEvents/events_file.py
new file mode 100644
index 0000000..fe3fcdc
--- /dev/null
+++ b/Events/AllEvents/events_file.py
@@ -0,0 +1,13 @@
+import Events.AllEvents.authentication as auths_events
+import Events.AllEvents.events as events_events
+import Events.AllEvents.validations as validations_events
+
+
+events_list = (auths_events, events_events, validations_events)
+
+
+def retrieve_cluster_by_name(cluster_name: str):
+ for module in events_list:
+ if hasattr(module, cluster_name):
+ return getattr(module, cluster_name, None)
+ return
diff --git a/Events/AllEvents/just_events_file.py b/Events/AllEvents/just_events_file.py
new file mode 100644
index 0000000..4401c54
--- /dev/null
+++ b/Events/AllEvents/just_events_file.py
@@ -0,0 +1,13 @@
+import Events.JustEvents.authentication as auths_events
+import Events.JustEvents.events as events_events
+import Events.JustEvents.validations as validations_events
+
+
+events_list = (auths_events, events_events, validations_events)
+
+
+def retrieve_cluster_by_name(cluster_name: str):
+ for module in events_list:
+ if hasattr(module, cluster_name):
+ return getattr(module, cluster_name, None)
+ return
diff --git a/Events/AllEvents/template/template/api_events.py b/Events/AllEvents/template/template/api_events.py
new file mode 100644
index 0000000..be66917
--- /dev/null
+++ b/Events/AllEvents/template/template/api_events.py
@@ -0,0 +1,21 @@
+from Events.Engine.abstract_class import Event
+from ApiLayers.LanguageModels.Request import (
+ LoginRequestLanguageModel,
+)
+
+from models import TemplateResponseModels, TemplateRequestModels
+from function_handlers import TemplateFunctions
+
+
+# Auth Login
+template_event = Event(
+ name="authentication_login_super_user_event",
+ key="a5d2d0d1-3e9b-4b0f-8c7d-6d4a4b4c4d4e",
+ request_validator=TemplateRequestModels.TemplateRequestModelX,
+ language_models=[LoginRequestLanguageModel],
+ response_validation_static="LOGIN_SUCCESS",
+ description="Login super user",
+)
+
+
+template_event.endpoint_callable = TemplateFunctions.template_example_function()
diff --git a/Events/AllEvents/template/template/cluster.py b/Events/AllEvents/template/template/cluster.py
new file mode 100644
index 0000000..de5066b
--- /dev/null
+++ b/Events/AllEvents/template/template/cluster.py
@@ -0,0 +1,14 @@
+from Events.Engine.abstract_class import CategoryCluster
+from info import template_page_info
+
+
+TemplateCluster = CategoryCluster(
+ name="TemplateCluster",
+ tags=["template"],
+ prefix="/template",
+ description="Template cluster",
+ pageinfo=template_page_info,
+ endpoints={},
+ include_in_schema=True,
+ sub_category=[],
+)
diff --git a/Events/AllEvents/template/template/function_handlers.py b/Events/AllEvents/template/template/function_handlers.py
new file mode 100644
index 0000000..cbfbc0f
--- /dev/null
+++ b/Events/AllEvents/template/template/function_handlers.py
@@ -0,0 +1,78 @@
+from typing import Union, Optional
+
+from ApiLayers.ApiValidations.Request import ListOptions
+from Events.base_request_model import BaseRouteModel, ListOptionsBase
+from Services.PostgresDb.Models.pagination import PaginationResult
+
+
+class Handlers:
+ """Class for handling authentication functions"""
+
+ @classmethod # Requires no auth context
+ def handle_function(cls, **kwargs):
+ """Handle function with kwargs"""
+ return
+
+
+class TemplateFunctions(BaseRouteModel):
+ """
+ Class for handling authentication functions
+ Is a template 4 TokenMiddleware.event_required decorator function groups.
+ results as :
+ STATIC_MESSAGE & LANG retrieved from redis
+ {
+ "completed": true,
+ "message": STATIC_MESSAGE,
+ "lang": LANG,
+ "pagination": {
+ "size": 10,
+ "page": 2,
+ "allCount": 28366,
+ "totalCount": 18,
+ "totalPages": 2,
+ "pageCount": 8,
+ "orderField": ["type_code", "neighborhood_name"],
+ "orderType": ["asc", "desc"]
+ },
+ "data": [
+ {
+ "created_at": "2025-01-12 09:39:48 +00:00",
+ "active": true,
+ "expiry_starts": "2025-01-12 09:39:48 +00:00",
+ "locality_uu_id": "771fd152-aca1-4d75-a42e-9b29ea7112b5",
+ "uu_id": "e1baa3bc-93ce-4099-a078-a11b71d3b1a8"
+ },
+ ...
+ ]
+ }
+ """
+
+ @classmethod
+ def template_example_function_list(
+ cls, data: Optional[Union[dict, ListOptions]]
+ ) -> PaginationResult:
+ from ApiLayers.Schemas import AddressNeighborhood
+
+ list_options_base = ListOptionsBase(
+ table=AddressNeighborhood,
+ list_options=data,
+ model_query=None,
+ )
+ db_session, query_options = list_options_base.init_list_options()
+ if cls.context_retriever.token.is_occupant:
+ AddressNeighborhood.pre_query = AddressNeighborhood.filter_all(
+ AddressNeighborhood.neighborhood_code.icontains("10"),
+ db=db_session,
+ ).query
+ elif cls.context_retriever.token.is_employee:
+ AddressNeighborhood.pre_query = AddressNeighborhood.filter_all(
+ AddressNeighborhood.neighborhood_code.icontains("9"),
+ db=db_session,
+ ).query
+ records = AddressNeighborhood.filter_all(
+ *query_options.convert(), db=db_session
+ )
+ return list_options_base.paginated_result(
+ records=records,
+ response_model=getattr(cls.context_retriever, "RESPONSE_VALIDATOR", None),
+ )
diff --git a/Events/AllEvents/template/template/info.py b/Events/AllEvents/template/template/info.py
new file mode 100644
index 0000000..89d163f
--- /dev/null
+++ b/Events/AllEvents/template/template/info.py
@@ -0,0 +1,11 @@
+from Events.Engine.abstract_class import PageInfo
+
+
+template_page_info = PageInfo(
+ name="template",
+ title={"en": "template"},
+ description={"en": "template"},
+ icon="",
+ parent="",
+ url="",
+)
diff --git a/Events/AllEvents/template/template/models.py b/Events/AllEvents/template/template/models.py
new file mode 100644
index 0000000..106b701
--- /dev/null
+++ b/Events/AllEvents/template/template/models.py
@@ -0,0 +1,9 @@
+from ApiLayers.ApiValidations.Request import BaseModelRegular
+
+
+class TemplateRequestModels:
+ TemplateRequestModelX = BaseModelRegular
+
+
+class TemplateResponseModels:
+ TemplateResponseModelsX = BaseModelRegular
diff --git a/Events/AllEvents/template/template/template.py b/Events/AllEvents/template/template/template.py
new file mode 100644
index 0000000..7a443eb
--- /dev/null
+++ b/Events/AllEvents/template/template/template.py
@@ -0,0 +1,39 @@
+"""
+template related API endpoints.
+"""
+
+from typing import Any, Dict
+from fastapi import Request
+
+from Events.Engine.abstract_class import MethodToEvent
+from Events.base_request_model import EndpointBaseRequestModel, ContextRetrievers
+from api_events import template_event
+
+
+AuthenticationLoginEventMethods = MethodToEvent(
+ name="AuthenticationLoginEventMethods",
+ events={
+ template_event.key: template_event,
+ },
+ headers=[],
+ errors=[],
+ url="/login",
+ method="POST",
+ summary="Login via domain and access key : [email] | [phone]",
+ description="Login to the system via domain, access key : [email] | [phone]",
+)
+
+
+def authentication_login_with_domain_and_creds_endpoint(
+ request: Request, data: EndpointBaseRequestModel
+) -> Dict[str, Any]:
+ event_2_catch = AuthenticationLoginEventMethods.retrieve_event(
+ event_function_code=f"{template_event.key}"
+ )
+ data = event_2_catch.REQUEST_VALIDATOR(**data.data)
+ return event_2_catch.endpoint_callable(request=request, data=data)
+
+
+AuthenticationLoginEventMethods.endpoint_callable = (
+ authentication_login_with_domain_and_creds_endpoint
+)
diff --git a/Events/AllEvents/validations/__init__.py b/Events/AllEvents/validations/__init__.py
new file mode 100644
index 0000000..f565e36
--- /dev/null
+++ b/Events/AllEvents/validations/__init__.py
@@ -0,0 +1,10 @@
+"""
+Validations package initialization.
+"""
+
+from .validation.cluster import ValidationsCluster
+
+
+__all__ = [
+ "ValidationsCluster",
+]
diff --git a/Events/AllEvents/validations/validation/api_events.py b/Events/AllEvents/validations/validation/api_events.py
new file mode 100644
index 0000000..c0bcefb
--- /dev/null
+++ b/Events/AllEvents/validations/validation/api_events.py
@@ -0,0 +1,78 @@
+from typing import Any
+from fastapi import Request
+
+from Events.Engine.abstract_class import Event
+
+from .models import ValidationsPydantic, ClusterPydantic, PagePydantic
+from .function_handlers import RetrieveValidation, RetrievePage
+
+# Validation Event
+validation_event = Event(
+ name="validation_event",
+ key="02b5a596-14ba-4361-90d7-c6755727c63f",
+ request_validator=ValidationsPydantic,
+ language_models=[],
+ statics=None,
+ description="Get Validations by event function code",
+)
+
+
+def get_validation_by_event_function_code(request: Request, data: Any):
+ return RetrieveValidation.retrieve_validation(data=data)
+
+
+validation_event.endpoint_callable = get_validation_by_event_function_code
+
+
+# Menu Event
+menu_event = Event(
+ name="menu_event",
+ key="a1613ca0-4843-498b-bfff-07ecea6777b2",
+ request_validator=ValidationsPydantic,
+ language_models=[],
+ statics=None,
+ description="Get Left Menu of the user",
+)
+
+
+def get_menu_by_event_function_code(request: Request, data: Any):
+ return RetrieveValidation.retrieve_validation(data=data)
+
+
+menu_event.endpoint_callable = get_menu_by_event_function_code
+
+
+# Cluster Event
+cluster_event = Event(
+ name="cluster_event",
+ key="eed3fe12-cec1-4f35-b43d-62fca0682f73",
+ request_validator=ClusterPydantic,
+ language_models=[],
+ statics=None,
+ description="Get Left Menu of the user",
+)
+
+
+def get_cluster_by_event_function_code(request: Request, data: Any):
+ return RetrievePage.retrieve_cluster(data=data)
+
+
+cluster_event.endpoint_callable = get_cluster_by_event_function_code
+
+
+# Page Event
+page_event = Event(
+ name="page_event",
+ key="",
+ request_validator=PagePydantic,
+ language_models=[],
+ statics=None,
+ description="Get Left Menu of the user",
+)
+
+
+def get_page_by_event_function_code(request: Request, data: Any):
+ return RetrievePage.retrieve_page(data=data)
+
+
+page_event.endpoint_callable = get_page_by_event_function_code
diff --git a/Events/AllEvents/validations/validation/cluster.py b/Events/AllEvents/validations/validation/cluster.py
new file mode 100644
index 0000000..54df876
--- /dev/null
+++ b/Events/AllEvents/validations/validation/cluster.py
@@ -0,0 +1,24 @@
+from Events.Engine.abstract_class import CategoryCluster
+
+from .validation import (
+ ValidationEventMethods,
+ MenuEventMethods,
+ ClusterEventMethods,
+ PageEventMethods,
+)
+
+
+ValidationsCluster = CategoryCluster(
+ name="ValidationsCluster",
+ tags=["Validations"],
+ prefix="/validations",
+ description="Validations cluster",
+ endpoints={
+ "ValidationEventMethods": ValidationEventMethods,
+ "MenuEventMethods": MenuEventMethods,
+ "ClusterEventMethods": ClusterEventMethods,
+ "PageEventMethods": PageEventMethods,
+ },
+ include_in_schema=True,
+ sub_category=[],
+)
diff --git a/Events/AllEvents/validations/validation/function_handlers.py b/Events/AllEvents/validations/validation/function_handlers.py
new file mode 100644
index 0000000..2b43bd3
--- /dev/null
+++ b/Events/AllEvents/validations/validation/function_handlers.py
@@ -0,0 +1,216 @@
+"""
+Validation function handlers
+"""
+
+from typing import Dict, Any
+from fastapi import Request
+
+from ApiLayers.AllConfigs.Redis.configs import (
+ RedisValidationKeysAction,
+ RedisCategoryKeys,
+)
+from Services.Redis.Actions.actions import RedisActions
+from Events.base_request_model import BaseRouteModel
+
+from config import ValidationsConfig
+
+
+class ValidateBase:
+
+ redis_key: str = f"{RedisCategoryKeys.METHOD_FUNCTION_CODES}:*:"
+
+ def __init__(self, url: str, reachable_codes: list):
+ self.url = url
+ self.reachable_codes = reachable_codes
+
+ @property
+ def function_codes(self):
+ redis_function_codes = RedisActions.get_json(
+ list_keys=[f"{self.redis_key}{self.url}"]
+ )
+ if redis_function_codes.status:
+ return redis_function_codes.first
+ raise ValueError("Function code not found")
+
+ @property
+ def intersection(self):
+ intersection = list(
+ set(self.function_codes).intersection(set(self.reachable_codes))
+ )
+ if not len(intersection) == 1:
+ raise ValueError(
+ "Users reachable function codes does not match or match more than one."
+ )
+ return intersection[0]
+
+
+class RedisHeaderRetrieve(ValidateBase):
+
+ redis_key: str = RedisValidationKeysAction.dynamic_header_request_key
+
+ @property
+ def header(self):
+ """
+ Headers: Headers which is merged with response model && language models of event
+ """
+ redis_header = RedisActions.get_json(
+ list_keys=[f"{self.redis_key}:{self.intersection}"]
+ )
+ if redis_header.status:
+ return redis_header.first
+ raise ValueError("Header not found")
+
+
+class RedisValidationRetrieve(ValidateBase):
+
+ redis_key: str = RedisValidationKeysAction.dynamic_validation_key
+
+ @property
+ def validation(self):
+ """
+ Validation: Validation of event which is merged with response model && language models of event
+ """
+ redis_validation = RedisActions.get_json(
+ list_keys=[f"{self.redis_key}:{self.intersection}"]
+ )
+ if redis_validation.status:
+ return redis_validation.first
+ raise ValueError("Header not found")
+
+
+class ValidationsBoth(RedisHeaderRetrieve, RedisValidationRetrieve):
+
+ @property
+ def both(self) -> Dict[str, Any]:
+ """
+ Headers: Headers which is merged with response model && language models of event
+ Validation: Validation of event which is merged with response model && language models of event
+ """
+ return {"headers": self.header, "validation": self.validation}
+
+
+class RetrieveValidation(BaseRouteModel):
+
+ @classmethod
+ def retrieve_validation(cls, data: Any):
+ """
+ Retrieve validation by event function code
+ """
+ if (
+ getattr(data, "asked_field", "")
+ not in ValidationsConfig.SUPPORTED_VALIDATIONS
+ ):
+ raise ValueError(
+ f"Invalid asked field please retry with valid fields {ValidationsConfig.SUPPORTED_VALIDATIONS}"
+ )
+
+ reachable_codes = []
+ if cls.context_retriever.token.is_employee:
+ reachable_codes = (
+ cls.context_retriever.token.selected_company.reachable_event_codes
+ )
+ elif cls.context_retriever.token.is_occupant:
+ reachable_codes = (
+ cls.context_retriever.token.selected_occupant.reachable_event_codes
+ )
+
+ validate_dict = dict(url=data.url, reachable_code=reachable_codes)
+ if data.asked_field == "all":
+ return ValidationsBoth(**validate_dict).both
+ elif data.asked_field == "headers":
+ return RedisHeaderRetrieve(**validate_dict).header
+ elif data.asked_field == "validation":
+ return RedisValidationRetrieve(**validate_dict).validation
+
+
+class RetrievePage(BaseRouteModel):
+
+ @staticmethod
+ def get_site_cluster(page_name: str):
+ """
+ /dashboard?site=clusterName retrieves clusterName
+ """
+ if page_name:
+ return page_name.split("?")[1].split("=")[1]
+ raise ValueError("Page name not found")
+
+ @classmethod
+ def retrieve_cluster(cls, data: Any):
+ """
+ Retrieve cluster by event function code
+ """
+ reachable_codes = []
+ if cls.context_retriever.token.is_employee:
+ reachable_codes = (
+ cls.context_retriever.token.selected_company.reachable_event_codes
+ )
+ elif cls.context_retriever.token.is_occupant:
+ reachable_codes = (
+ cls.context_retriever.token.selected_occupant.reachable_event_codes
+ )
+ validate_dict = dict(url=data.url, reachable_code=reachable_codes)
+ print("validate_dict", validate_dict)
+ cluster_name = data.get("name")
+ print("cluster_name", cluster_name)
+ raise NotImplementedError("Cluster not found")
+
+ @classmethod
+ def retrieve_page(cls, data: Any):
+ """
+ Retrieve page by event function code
+ """
+ from Events.Engine import CategoryCluster
+ from Events.JustEvents.events_file import retrieve_cluster_by_name
+
+ reachable_codes = []
+ if cls.context_retriever.token.is_employee:
+ reachable_codes = (
+ cls.context_retriever.token.selected_company.reachable_event_codes
+ )
+ elif cls.context_retriever.token.is_occupant:
+ reachable_codes = (
+ cls.context_retriever.token.selected_occupant.reachable_event_codes
+ )
+ cluster_from_all_events = cls.get_site_cluster(page_name=data.page)
+ if not cluster_from_all_events:
+ raise ValueError(f"Cluster not found : {data.page}")
+
+ cluster_name: CategoryCluster = retrieve_cluster_by_name(cluster_from_all_events)
+ if not cluster_name:
+ raise ValueError("Cluster not found")
+
+ page_info = cluster_name.retrieve_page_info().get(data.page, None)
+ if not page_info:
+ raise ValueError("Page not found")
+
+ endpoints: dict = dict(page_info).get("endpoints", {})
+ if not endpoints:
+ raise ValueError("Endpoints not found")
+
+ new_page_info_dict = dict(
+ name=cluster_name.name,
+ prefix=cluster_name.PREFIX,
+ url=dict(page_info).get("url", None),
+ icon=dict(page_info).get("icon", None),
+ mapping=cluster_name.MAPPING,
+ page_info=dict(page_info).get("page_info", None),
+ endpoints={},
+ language_models={},
+ instructions={},
+ )
+ for key, event_codes in dict(endpoints).items():
+ # Meaning client can reach this endpoint [] & [] intersection
+ if set(event_codes) & set(reachable_codes):
+ language_models = dict(page_info).get("language_models", {})
+ instructions = dict(page_info).get("instructions", {})
+ new_page_info_dict["endpoints"][key] = True
+ if language_models.get(key, None):
+ if key in language_models[key].keys(): # key has sub key blocks inside lang model
+ for key_model, val_model in dict(language_models[key]).items():
+ if key_model in new_page_info_dict["endpoints"].keys():
+ new_page_info_dict["language_models"][key_model] = language_models[key][key_model]
+ else:
+ new_page_info_dict["language_models"][key] = language_models[key]
+ if instructions.get(key, None):
+ new_page_info_dict["instructions"][key] = instructions.get(key, None)
+ return new_page_info_dict
diff --git a/Events/AllEvents/validations/validation/info.py b/Events/AllEvents/validations/validation/info.py
new file mode 100644
index 0000000..89d163f
--- /dev/null
+++ b/Events/AllEvents/validations/validation/info.py
@@ -0,0 +1,11 @@
+from Events.Engine.abstract_class import PageInfo
+
+
+template_page_info = PageInfo(
+ name="template",
+ title={"en": "template"},
+ description={"en": "template"},
+ icon="",
+ parent="",
+ url="",
+)
diff --git a/Events/AllEvents/validations/validation/models.py b/Events/AllEvents/validations/validation/models.py
new file mode 100644
index 0000000..a06c3f4
--- /dev/null
+++ b/Events/AllEvents/validations/validation/models.py
@@ -0,0 +1,19 @@
+"""
+Validation records request and response models.
+"""
+
+from typing import Optional
+from pydantic import BaseModel
+
+
+class ValidationsPydantic(BaseModel):
+ event_code: str
+ asked_field: Optional[str] = "all"
+
+
+class ClusterPydantic(BaseModel):
+ name: str
+
+
+class PagePydantic(BaseModel):
+ page: str
diff --git a/Events/AllEvents/validations/validation/validation.py b/Events/AllEvents/validations/validation/validation.py
new file mode 100644
index 0000000..92be968
--- /dev/null
+++ b/Events/AllEvents/validations/validation/validation.py
@@ -0,0 +1,119 @@
+"""
+Validation related API endpoints.
+"""
+
+from typing import Any, Dict
+from fastapi import Request
+
+from Events.Engine.abstract_class import MethodToEvent
+from Events.base_request_model import EndpointBaseRequestModel, ContextRetrievers
+from ApiLayers.Middleware.auth_middleware import MiddlewareModule
+
+from .api_events import validation_event, menu_event, cluster_event, page_event
+from .function_handlers import RetrieveValidation, RetrievePage
+
+
+ValidationEventMethods = MethodToEvent(
+ name="ValidationEventMethods",
+ events={validation_event.key: validation_event},
+ headers=[],
+ errors=[],
+ url="/validations",
+ method="POST",
+ decorators_list=[MiddlewareModule.auth_required],
+ summary="Get Validations by event function code",
+ description="Get Validations by event function code by All, Header, Validation & request url",
+)
+
+
+def validations_endpoint(
+ request: Request, data: EndpointBaseRequestModel
+) -> Dict[str, Any]:
+ function = ValidationEventMethods.retrieve_event(
+ event_function_code=f"{validation_event.key}"
+ )
+ data = function.REQUEST_VALIDATOR(**data.data)
+ RetrieveValidation.context_retriever = ContextRetrievers(func=validations_endpoint)
+ return function.endpoint_callable(request=request, data=data)
+
+
+ValidationEventMethods.endpoint_callable = validations_endpoint
+
+
+MenuEventMethods = MethodToEvent(
+ name="MenuEventMethods",
+ events={menu_event.key: menu_event},
+ headers=[],
+ errors=[],
+ url="/menu",
+ method="POST",
+ decorators_list=[MiddlewareModule.auth_required],
+ summary="Get Left Menu of the user",
+ description="Get Left Menu of the user",
+)
+
+
+def menu_endpoint(request: Request, data: EndpointBaseRequestModel) -> Dict[str, Any]:
+ function = MenuEventMethods.retrieve_event(
+ event_function_code=f"{menu_event.key}"
+ )
+ data = function.REQUEST_VALIDATOR(**data.data)
+ RetrieveValidation.context_retriever = ContextRetrievers(func=menu_endpoint)
+ return function.endpoint_callable(request=request, data=data)
+
+
+MenuEventMethods.endpoint_callable = menu_endpoint
+
+
+# Cluster Event
+ClusterEventMethods = MethodToEvent(
+ name="ClusterEventMethods",
+ events={cluster_event.key: cluster_event},
+ headers=[],
+ errors=[],
+ url="/cluster",
+ method="POST",
+ decorators_list=[MiddlewareModule.auth_required],
+ summary="Get Left Menu of the user",
+ description="Get Left Menu of the user",
+)
+
+
+def cluster_endpoint(
+ request: Request, data: EndpointBaseRequestModel
+) -> Dict[str, Any]:
+ function = ClusterEventMethods.retrieve_event(
+ event_function_code=f"{cluster_event.key}"
+ )
+ data = function.REQUEST_VALIDATOR(**data.data)
+ RetrievePage.context_retriever = ContextRetrievers(func=cluster_endpoint)
+ return function.endpoint_callable(request=request, data=data)
+
+
+ClusterEventMethods.endpoint_callable = cluster_endpoint
+
+
+# Page Event
+PageEventMethods = MethodToEvent(
+ name="PageEventMethods",
+ events={page_event.key: page_event},
+ headers=[],
+ errors=[],
+ url="/page",
+ method="POST",
+ decorators_list=[MiddlewareModule.auth_required],
+ summary="Get Left Menu of the user",
+ description="Get Left Menu of the user",
+)
+
+
+def page_endpoint(request: Request, data: EndpointBaseRequestModel) -> Dict[str, Any]:
+ function = PageEventMethods.retrieve_event(
+ event_function_code=f"{page_event.key}"
+ )
+ data = function.REQUEST_VALIDATOR(**data.data)
+ RetrievePage.context_retriever = ContextRetrievers(func=page_endpoint)
+ return function.endpoint_callable(request=request, data=data)
+
+
+PageEventMethods.endpoint_callable = page_endpoint
\ No newline at end of file
diff --git a/Events/Engine/__init__.py b/Events/Engine/__init__.py
new file mode 100644
index 0000000..531e19c
--- /dev/null
+++ b/Events/Engine/__init__.py
@@ -0,0 +1,22 @@
+"""ApiEvents package initialization.
+
+This module serves as the main entry point for the ApiEvents package,
+making common utilities and base classes available for all API services.
+"""
+
+from .abstract_class import (
+ MethodToEvent,
+ PageInfo,
+ CategoryCluster,
+ Event,
+)
+
+# from .base_request_model import BaseRequestModel, DictRequestModel
+
+# Re-export commonly used classes
+__all__ = [
+ "MethodToEvent",
+ "PageInfo",
+ "CategoryCluster",
+ "Event",
+]
diff --git a/Events/Engine/abstract_class.py b/Events/Engine/abstract_class.py
new file mode 100644
index 0000000..88377a3
--- /dev/null
+++ b/Events/Engine/abstract_class.py
@@ -0,0 +1,376 @@
+from abc import abstractmethod
+from typing import Any, Dict, List, Optional, Callable
+from uuid import UUID
+
+from ApiLayers.AllConfigs.Redis.configs import RedisCategoryKeys
+from Events.base_request_model import STATIC_PATH
+
+
+class PageComponent:
+
+ NAME: str
+ URL: str
+ FETCH_URL: str
+ LANGUAGE_MODELS: Dict[str, Any]
+ TYPE_COMPONENT: Optional[str] = "Page"
+
+ def __init__(
+ self,
+ name: str,
+ url: str,
+ # fetch_url: str,
+ language_models: Dict[str, Any],
+ ):
+ self.NAME = name
+ self.URL = url
+ # self.FETCH_URL = fetch_url
+ self.LANGUAGE_MODELS = language_models
+
+ def set_language_models(self, language_models: Dict[str, Any]):
+ self.LANGUAGE_MODELS = language_models
+
+ @property
+ def language_models(self):
+ return self.LANGUAGE_MODELS
+
+ def as_dict(self):
+ return {
+ "name": self.NAME,
+ "url": self.URL,
+ "language_models": self.LANGUAGE_MODELS,
+ # "fetch_url": self.FETCH_URL,
+ }
+
+
+class PageInfo:
+ """
+ match_page: {
+ "/dashboard?site=AccountCluster": [
+ "/accounts/create",
+ "/accounts/update",
+ "/accounts/list",
+ ],
+ "/update?site=AccountCluster": ["/accounts/update"],
+ "/create?site=AccountCluster": ["/accounts/create"],
+ },
+ """
+
+ NAME: str
+ PAGE_URL: str
+ PAGEINFO: Dict[str, Any]
+ URL: str = ""
+ ENDPOINTS: Dict[str, Any]
+ LANGUAGE_MODELS: Dict[str, Any]
+ SUB_COMPONENTS: Optional[list["PageComponent"]] = None
+ INSTRUCTIONS: Optional[Dict[str, Any]] = None
+
+ def __init__(
+ self,
+ name: str,
+ icon: str,
+ url: str,
+ endpoints: Dict[str, Any],
+ language_models: Dict[str, Any],
+ page_info: Optional[Dict[str, Any]] = None,
+ sub_components: Optional[list["PageComponent"]] = None,
+ instructions: Optional[Dict[str, Any]] = None,
+ ):
+ self.NAME = name
+ self.LANGUAGE_MODELS = language_models
+ self.ICON = icon
+ self.URL = url
+ self.SUB_COMPONENTS = sub_components
+ self.ENDPOINTS = endpoints
+ self.PAGEINFO = page_info
+ self.INSTRUCTIONS = instructions
+
+ @property
+ def endpoints(self):
+ return self.ENDPOINTS
+
+ @property
+ def sub_components(self):
+ return self.SUB_COMPONENTS
+
+ @property
+ def as_dict(self):
+ as_dict = {
+ "name": self.NAME,
+ "icon": self.ICON,
+ "url": self.URL,
+ "endpoints": self.ENDPOINTS,
+ "language_models": self.LANGUAGE_MODELS,
+ "page_info": self.PAGEINFO,
+ }
+ if self.INSTRUCTIONS:
+ as_dict["instructions"] = self.INSTRUCTIONS
+ if self.SUB_COMPONENTS:
+ as_dict["sub_components"] = [i.as_dict() for i in self.SUB_COMPONENTS]
+ return as_dict
+
+
+class Event:
+
+ KEY_: str # static string uuid.uuid4().__str__()
+ RESPONSE_VALIDATOR: Optional[Any]
+ REQUEST_VALIDATOR: Optional[Any]
+ DESCRIPTION: str
+ LANGUAGE_MODELS: list
+ STATICS: str
+ EXTRA_OPTIONS: Optional[Dict[str, Any]] = None
+ endpoint_callable: Any
+
+ def __init__(
+ self,
+ name: str,
+ key: str | UUID,
+ description: str,
+ language_models: list[Dict[str, Dict]],
+ statics: str = None,
+ request_validator: Optional[Any] = None,
+ response_validator: Optional[Any] = None,
+ extra_options: Optional[Dict[str, Any]] = None,
+ ) -> None:
+ self.NAME = name
+ self.KEY_ = key
+ self.REQUEST_VALIDATOR = request_validator
+ self.RESPONSE_VALIDATOR = response_validator
+ self.STATICS = statics
+ self.LANGUAGE_MODELS = language_models
+ self.DESCRIPTION = description
+ self.EXTRA_OPTIONS = extra_options
+
+ @property
+ def is_static_response(self):
+ return bool(self.STATICS)
+
+ @property
+ def static_response(self):
+ from Services.Redis.Actions.actions import RedisActions
+ from ApiLayers.AllConfigs.Redis.configs import RedisValidationKeysAction
+
+ if self.is_static_response:
+ static_response = RedisActions.get_json(
+ list_keys=[
+ f"{RedisValidationKeysAction.static_response_key}:{self.STATICS}"
+ ]
+ )
+ if static_response.status:
+ return static_response.first
+ return None
+
+ @property
+ def static_key(self):
+ return self.STATICS
+
+ @property
+ def description(self):
+ return f"This is an event of {self.name}. Description: {self.DESCRIPTION}"
+
+ @property
+ def name(self):
+ return self.NAME
+
+ @property
+ def key(self) -> str:
+ return str(self.KEY_)
+
+ @abstractmethod
+ def endpoint_callable(self, **kwargs) -> Any:
+ """
+ Retrieves the endpoint function based on the event key.
+ """
+ return self.endpoint_callable(**kwargs)
+
+
+class MethodToEvent:
+ """
+ for all endpoint callable
+ def endpoint_callable(request: Request, data: PydanticModel):
+ return cls.retrieve_event(event_function_code).retrieve_callable(token_dict, data)
+ [Table.__language_model__ | Dict[__language_model__]]
+ [Dict[ErrorCode][lang]]
+
+ """
+
+ EVENTS: dict[str, Event]
+ HEADER_LANGUAGE_MODELS: list[Dict]
+ ERRORS_LANGUAGE_MODELS: Optional[list[Dict]]
+ URL: str
+ METHOD: str
+ SUMMARY: str
+ DESCRIPTION: str
+ DECORATORS_LIST: Optional[Callable] = []
+ EXTRA_OPTIONS: Optional[Dict[str, Any]] = None
+
+ def __init__(
+ self,
+ name: str,
+ events: dict[str, Event],
+ headers: list[Dict],
+ url: str,
+ method: str,
+ summary: str,
+ description: str,
+ decorators_list: Optional[List[Callable]] = None,
+ errors: Optional[list[Dict]] = None,
+ extra_options: Optional[Dict[str, Any]] = None,
+ ):
+ self.EVENTS = events
+ self.URL = url
+ self.METHOD = method
+ self.SUMMARY = summary
+ self.NAME = name
+ self.DESCRIPTION = description
+ self.DECORATORS_LIST = decorators_list
+ self.HEADER_LANGUAGE_MODELS = headers
+ self.ERRORS_LANGUAGE_MODELS = errors
+ self.EXTRA_OPTIONS = extra_options
+
+ @property
+ def name(self):
+ return self.NAME
+
+ def retrieve_all_event_keys(self):
+ """
+ Retrieves all event keys from the events list.
+ """
+ return [str(event_key) for event_key in self.EVENTS.keys()]
+
+ def retrieve_event(self, event_function_code: str) -> Event:
+ """
+ Retrieves the event object from the events list based on the event function code.
+ """
+ if found_event := self.EVENTS.get(event_function_code, None):
+ return found_event
+ raise ValueError(f"Event with function code {event_function_code} not found")
+
+ def retrieve_redis_value(self, cluster: "CategoryCluster") -> Dict:
+ """
+ Key("METHOD_FUNCTION_CODES:{ClusterToMethod}:MethodEvent:Endpoint") : Value([FUNCTION_CODE, ...])
+ """
+ redis_key = f"{RedisCategoryKeys.METHOD_FUNCTION_CODES}:{cluster.name}:{self.name}:{f"{cluster.PREFIX}{self.URL}"}"
+ return {redis_key: self.retrieve_all_event_keys()}
+
+ @staticmethod
+ def endpoint_callable(**kwargs):
+ """
+ return cls.retrieve_event(event_function_code).retrieve_callable(token_dict, data)
+ """
+ raise NotImplementedError("Endpoint callable method is not implemented")
+
+
+class CategoryCluster:
+
+ TAGS: list
+ PREFIX: str
+ PAGEINFO: Optional[Dict["str", PageInfo]]
+ DESCRIPTION: str
+ ENDPOINTS: dict[str, MethodToEvent] # {"MethodToEvent": MethodToEvent, ...}
+ SUBCATEGORY: Optional[List["CategoryCluster"]] # [CategoryCluster, ...]
+ MAPPING: Optional[List[Dict[str, Any]]] # [{"key": "value"}, ...]
+ INCLUDE_IN_SCHEMA: Optional[bool] = True
+ IS_CLIENT: Optional[bool] = False
+
+ def __init__(
+ self,
+ name: str,
+ tags: list,
+ prefix: str,
+ description: str,
+ endpoints: dict[str, MethodToEvent],
+ sub_category: list,
+ mapping: Optional[List[Dict[str, Any]]] = None,
+ pageinfo: Optional[Dict["str", PageInfo]] = None,
+ include_in_schema: Optional[bool] = True,
+ is_client: Optional[bool] = False,
+ ):
+ self.NAME = name
+ self.TAGS = tags
+ self.PREFIX = prefix
+ self.PAGEINFO = pageinfo
+ self.DESCRIPTION = description
+ self.ENDPOINTS = endpoints or {}
+ self.SUBCATEGORY = sub_category or []
+ self.INCLUDE_IN_SCHEMA = include_in_schema
+ self.MAPPING = mapping
+ self.IS_CLIENT = is_client
+
+ @property
+ def is_clickable(self):
+ return bool(self.SUBCATEGORY)
+
+ @property
+ def is_client(self):
+ return self.IS_CLIENT
+
+ @property
+ def name(self):
+ return self.NAME
+
+ def get_redis_cluster_index_value(self):
+ """
+ RedisCategoryKeys.CLUSTER_2_METHOD_EVENT
+ Returns the class name and function codes for the class.
+ """
+ dict_cluster_2_method, list_endpoints = {}, [
+ i.name for i in self.ENDPOINTS.values()
+ ]
+ for endpoint_name in list_endpoints:
+ dict_cluster_2_method[endpoint_name] = self.name
+ dict_cluster_2_method[self.name] = list_endpoints
+ return dict_cluster_2_method
+
+ def retrieve_all_function_codes(self):
+ """
+ Retrieves all function codes by iterating over the events list.
+ """
+ all_function_codes = []
+ for event_method in self.ENDPOINTS.values():
+ all_function_codes.extend(
+ [str(event_key) for event_key in event_method.EVENTS.keys()]
+ )
+ return all_function_codes
+
+ def retrieve_redis_value(self) -> Dict:
+ """
+ Create Redis Key and Value from function codes
+ Key(CLUSTER_FUNCTION_CODES:ClusterToMethod) : Value(PAGE_INFO, [FUNCTION_CODE, ...])
+ """
+ return {
+ f"{RedisCategoryKeys.CLUSTER_FUNCTION_CODES}:{self.name}": self.retrieve_all_function_codes()
+ }
+
+ def retrieve_page_info(self):
+ """
+ PAGE_INFO:ClusterToMethod = {"PageInfo": {...}, "subCategory": PAGE_INFO:ClusterToMethod}
+ return {"prefix": self.PREFIX, "mapping": self.MAPPING, **page_infos}
+ """
+ page_infos = {}
+ if isinstance(self.PAGEINFO, dict):
+ for page_key, page_info in dict(self.PAGEINFO).items():
+ if page_info_dict := getattr(page_info, "as_dict", None):
+ page_infos[page_key] = page_info_dict
+ return {"prefix": self.PREFIX, "mapping": self.MAPPING, **page_infos}
+ if hasattr(self.PAGEINFO, "as_dict"):
+ return {"prefix": self.PREFIX, "mapping": self.MAPPING, **self.PAGEINFO.as_dict}
+ return
+
+
+class LanguageModels:
+ SITE_URL: str
+ COMPONENT: str = "Table"
+ PREFIX_URL: str = ""
+ PAGE_INFO: dict
+ STATIC_PATH: str = STATIC_PATH
+
+ def as_dict(self):
+ return {
+ "SITE_URL": f"/{self.STATIC_PATH}{self.SITE_URL}",
+ "COMPONENT": self.COMPONENT,
+ "PREFIX_URL": self.PREFIX_URL,
+ "PAGE_INFO": self.PAGE_INFO,
+ }
+
+
+DefaultClusterName = "site"
\ No newline at end of file
diff --git a/Events/Engine/set_defaults/category_cluster_models.py b/Events/Engine/set_defaults/category_cluster_models.py
new file mode 100644
index 0000000..cb2acc0
--- /dev/null
+++ b/Events/Engine/set_defaults/category_cluster_models.py
@@ -0,0 +1,46 @@
+from Events.Engine import CategoryCluster
+
+
+class CategoryBulk:
+
+ def __init__(self, category_cluster: CategoryCluster = None, name: str = ""):
+ self.category_cluster = category_cluster
+ self.name = name
+
+
+class CategoryClusterController:
+
+ imports_dict: list[CategoryBulk] = []
+
+ @property
+ def imports(self):
+ return self.imports_dict
+
+ @classmethod
+ def import_all_category_clusters(cls, category_clusters):
+ """
+ Imports all category clusters from the given list
+ { "category_cluster_name": "category_cluster_module" }
+ """
+ if not hasattr(category_clusters, "__all__"):
+ raise ValueError(
+ f"Given module {str(category_clusters)} does not have __all__ attribute"
+ )
+ for iter_module in [str(item) for item in category_clusters.__all__]:
+ # CategoryCluster which represent api routers for each category
+ cls.imports_dict.append(
+ CategoryBulk(
+ category_cluster=getattr(category_clusters, iter_module, None),
+ name=iter_module,
+ )
+ )
+
+ @classmethod
+ def as_dict(cls):
+ to_dict = {}
+ for cluster in cls.imports_dict:
+ to_dict[cluster.name] = cluster.category_cluster
+ return to_dict
+
+
+cluster_controller = CategoryClusterController()
diff --git a/Events/Engine/set_defaults/prepare_redis_items.py b/Events/Engine/set_defaults/prepare_redis_items.py
new file mode 100644
index 0000000..3e3a26b
--- /dev/null
+++ b/Events/Engine/set_defaults/prepare_redis_items.py
@@ -0,0 +1,108 @@
+from ApiLayers.AllConfigs.Redis.configs import (
+ RedisCategoryKeys,
+ RedisCategoryPageInfoKeysAction,
+)
+
+
+class PrepareRedisItems:
+
+ MENU_FIRST_LAYER_KEY: str = RedisCategoryKeys.MENU_FIRST_LAYER
+ MENU_FIRST_LAYER_VALUE: set[str] = set()
+ CLUSTER_INDEX_KEY: str = RedisCategoryKeys.CLUSTER_INDEX
+ CLUSTER_INDEX_VALUE: dict = {}
+ CLUSTER_FUNCTION_CODES_KEY: str = RedisCategoryKeys.CLUSTER_FUNCTION_CODES
+ CLUSTER_FUNCTION_CODES_VALUE: dict = {}
+ METHOD_FUNCTION_CODES_KEY: str = RedisCategoryKeys.METHOD_FUNCTION_CODES
+ METHOD_FUNCTION_CODES_VALUE: dict = {}
+ ENDPOINT2CLASS_KEY: str = RedisCategoryKeys.ENDPOINT2CLASS
+ ENDPOINT2CLASS_VALUE: dict = {}
+ PAGE_INFO_KEY: str = RedisCategoryPageInfoKeysAction.page_index
+ PAGE_INFO_VALUE: dict = {}
+
+ @property
+ def as_dict(self):
+ return {
+ self.MENU_FIRST_LAYER_KEY: list(self.MENU_FIRST_LAYER_VALUE),
+ self.CLUSTER_INDEX_KEY: self.CLUSTER_INDEX_VALUE,
+ self.CLUSTER_FUNCTION_CODES_KEY: self.CLUSTER_FUNCTION_CODES_VALUE,
+ self.METHOD_FUNCTION_CODES_KEY: self.METHOD_FUNCTION_CODES_VALUE,
+ self.ENDPOINT2CLASS_KEY: self.ENDPOINT2CLASS_VALUE,
+ self.PAGE_INFO_KEY: self.PAGE_INFO_VALUE,
+ }
+
+
+class DecoratorModule:
+
+ @staticmethod
+ def get_all_decorators(func):
+ """
+ Get all decorators of a function, excluding the original function itself.
+ Returns a list of decorator functions in the order they were applied.
+ """
+ decorators = []
+ current_func = func
+ original_qualname = getattr(func, "__qualname__", "")
+
+ while hasattr(current_func, "__wrapped__"):
+ if hasattr(current_func, "__closure__") and current_func.__closure__:
+ for cell in current_func.__closure__:
+ decorator = cell.cell_contents
+ # Only add if it's a callable and not the original function
+ if (
+ callable(decorator)
+ and getattr(decorator, "__qualname__", "") != original_qualname
+ ):
+ decorators.append(decorator)
+ current_func = current_func.__wrapped__
+ return list(
+ dict.fromkeys(decorators)
+ ) # Remove duplicates while preserving order
+
+ @staticmethod
+ def get_actual_decorators(method_endpoint):
+ original_qualname = getattr(
+ method_endpoint.endpoint_callable, "__qualname__", ""
+ )
+ actual_decorators = [
+ d
+ for d in method_endpoint.DECORATORS_LIST or []
+ if callable(d) and getattr(d, "__qualname__", "") != original_qualname
+ ]
+ return actual_decorators
+
+ @classmethod
+ def apply_decorators(cls, method_endpoint):
+ # Get the original function and its qualname
+ function_callable = method_endpoint.endpoint_callable
+ # Filter out the original function and apply decorators
+ actual_decorators = cls.get_actual_decorators(method_endpoint)
+
+ # Apply decorators in reverse order (to match @ syntax behavior)
+ for decorator in reversed(actual_decorators):
+ try:
+ function_callable = decorator(function_callable)
+ except Exception as e:
+ print(
+ f"Warning: Failed to apply decorator {decorator.__qualname__}: {str(e)}"
+ )
+
+ method_endpoint.endpoint_callable = function_callable
+
+ # Get the final list of applied decorators (for debugging)
+ applied_decorators = cls.get_all_decorators(method_endpoint.endpoint_callable)
+ applied_decorators_qualname = [
+ getattr(d, "__qualname__", str(d)) for d in applied_decorators
+ ]
+ if applied_decorators:
+ print(
+ f"Applied decorators for {method_endpoint.name}:",
+ applied_decorators_qualname,
+ )
+ return applied_decorators_qualname
+
+ @classmethod
+ def list_qualname(cls, method_endpoint_list):
+ return [
+ getattr(method_endpoint, "__qualname__", "")
+ for method_endpoint in method_endpoint_list
+ ]
diff --git a/Events/Engine/set_defaults/run.py b/Events/Engine/set_defaults/run.py
new file mode 100644
index 0000000..6da490b
--- /dev/null
+++ b/Events/Engine/set_defaults/run.py
@@ -0,0 +1,17 @@
+from Events.AllEvents.events_file import events_list
+
+from .category_cluster_models import cluster_controller
+
+
+def get_cluster_controller_group():
+ for cluster in events_list:
+ cluster_controller.import_all_category_clusters(cluster)
+ return cluster_controller
+
+
+"""
+prepare_routing = PrepareRouting(cluster_controller_group=cluster_controller)
+prepare_events = PrepareEvents(cluster_controller_group=cluster_controller)
+print(set_items_2_redis)
+print(prepare_routing)
+"""
diff --git a/Events/Engine/set_defaults/setClusters.py b/Events/Engine/set_defaults/setClusters.py
new file mode 100644
index 0000000..71a5aca
--- /dev/null
+++ b/Events/Engine/set_defaults/setClusters.py
@@ -0,0 +1,226 @@
+from typing import Any
+
+from ApiLayers.ApiServices.Cluster.create_router import (
+ CreateRouterFromCluster,
+ CreateEndpointFromCluster,
+)
+from ApiLayers.AllConfigs.Redis.configs import (
+ RedisCategoryKeys,
+ RedisCategoryPageInfoKeys,
+)
+from Events.Engine.abstract_class import CategoryCluster
+from Services.Redis.Actions.actions import RedisActions
+from Services.Redis.Models.cluster import RedisList
+
+from .prepare_redis_items import DecoratorModule, PrepareRedisItems
+from .category_cluster_models import CategoryClusterController
+
+
+class PrepareRouting(DecoratorModule):
+
+ __routers_list: list[Any] = list()
+ __endpoints_list: list[Any] = list()
+ __safe_endpoint_list: list[Any] = list()
+
+ def __init__(self, cluster_controller_group: CategoryClusterController):
+ self.cluster_controller_group = cluster_controller_group
+ self.prepare_needs()
+
+ def __str__(self):
+ return f"\nPrepared Routing:\n\n{self.routers}\n\n{self.endpoints}\n\n{self.safe_endpoints}\n"
+
+ @property
+ def routers(self):
+ return self.__routers_list
+
+ @property
+ def endpoints(self):
+ return self.__endpoints_list
+
+ @property
+ def safe_endpoints(self):
+ return self.__safe_endpoint_list
+
+ def create_endpoints(self, cluster: CategoryCluster, created_router):
+ for method_endpoint in list(cluster.ENDPOINTS.values()):
+ # Filter out the original function and apply decorators
+ applied_decorators_qualname = self.apply_decorators(method_endpoint)
+ # Register the endpoint with FastAPI router
+ create_endpoint = CreateEndpointFromCluster(
+ router=created_router, method_endpoint=method_endpoint
+ )
+ created_router = create_endpoint.router
+ if "MiddlewareModule" in applied_decorators_qualname:
+ self.__safe_endpoint_list.append(method_endpoint)
+ self.__endpoints_list.append(method_endpoint)
+
+ def create_router(self, cluster: CategoryCluster):
+ ### Create Router Parameters create router for each cluster
+ created_router = CreateRouterFromCluster(
+ prefix=cluster.PREFIX,
+ tags=cluster.TAGS,
+ include_in_schema=cluster.INCLUDE_IN_SCHEMA,
+ )
+ self.__routers_list.append(created_router.router)
+ return created_router.router
+
+ def prepare_needs(self):
+ # @Pages iterate(ClusterToMethod)
+ for cluster_control in self.cluster_controller_group.imports:
+ cluster = cluster_control.category_cluster
+ created_router = self.create_router(cluster)
+ self.create_endpoints(cluster, created_router)
+
+
+class PrepareEvents(DecoratorModule):
+
+ def __init__(self, cluster_controller_group: CategoryClusterController):
+ self.cluster_controller_group = cluster_controller_group
+ self.valid_redis_items: PrepareRedisItems = PrepareRedisItems()
+ self.prepare_needs()
+ self.prepare_page_info()
+
+ def prepare_page_info(self):
+ """
+ [SAVE]REDIS => PAGE_MENU_INDEX:PAGE_URL= {...PageInfo}
+ """
+ for cluster_control in self.cluster_controller_group.imports:
+ cluster = cluster_control.category_cluster
+ if retrieve_page_info := cluster.retrieve_page_info():
+ self.valid_redis_items.PAGE_INFO_VALUE.update(
+ {
+ f"{self.valid_redis_items.PAGE_INFO_KEY}:{cluster.name}": retrieve_page_info
+ }
+ )
+
+ def prepare_needs(self):
+ # @Pages iterate(ClusterToMethod)
+ for cluster_control in self.cluster_controller_group.imports:
+ cluster = cluster_control.category_cluster
+ ### Create Redis Parameters
+ # [SAVE]REDIS => MENU_FIRST_LAYER = [ClusterToMethod, ...]
+ if cluster.is_client:
+ self.valid_redis_items.MENU_FIRST_LAYER_VALUE.add(cluster.name)
+ # [SAVE]REDIS => CLUSTER_INDEX = {ClusterToMethod: [MethodEvent, ...], "MethodEvent": "ClusterToMethod"}
+ self.valid_redis_items.CLUSTER_INDEX_VALUE.update(
+ cluster.get_redis_cluster_index_value()
+ )
+ # [SAVE]REDIS => CLUSTER_FUNCTION_CODES = {"ClusterToMethod"} : [FUNCTION_CODE, ...]}
+ self.valid_redis_items.CLUSTER_FUNCTION_CODES_VALUE.update(
+ {
+ f"{self.valid_redis_items.CLUSTER_FUNCTION_CODES_KEY}:{cluster.name}": tuple(
+ cluster.retrieve_all_function_codes()
+ )
+ }
+ )
+
+ for method_endpoint in list(cluster.ENDPOINTS.values()):
+ # [SAVE]REDIS => ENDPOINT2CLASS = {MethodEvent: Endpoint("/.../.../..."), ...}
+ self.valid_redis_items.ENDPOINT2CLASS_VALUE.update(
+ {
+ f"{cluster.name}:{method_endpoint.name}": f"{cluster.PREFIX}{method_endpoint.URL}"
+ }
+ )
+ self.valid_redis_items.ENDPOINT2CLASS_VALUE.update(
+ {
+ f"{cluster.PREFIX}{method_endpoint.URL}": f"{cluster.name}:{method_endpoint.name}"
+ }
+ )
+ # [SAVE]REDIS => METHOD_FUNCTION_CODES:MethodEvent:Endpoint = [FUNCTION_CODE, ...]
+ self.valid_redis_items.METHOD_FUNCTION_CODES_VALUE.update(
+ method_endpoint.retrieve_redis_value(cluster=cluster)
+ )
+
+
+class SetItems2Redis:
+
+ std_out: str = ""
+
+ def __init__(self, prepare_events: PrepareEvents):
+ self.prepare_events = prepare_events
+ self.set_items()
+
+ def __str__(self):
+ return f"\nSetItems2Redis:\n\n{self.std_out}"
+
+ def set_items(self):
+
+ dict_prep = self.prepare_events.valid_redis_items.as_dict
+ for (
+ redis_values_to_delete,
+ redis_key_type,
+ ) in RedisCategoryKeys.__annotations__.items():
+ if isinstance(redis_key_type, str):
+ continue
+ RedisActions.delete(list_keys=[f"{redis_values_to_delete}*"])
+
+ for (
+ redis_values_to_delete,
+ redis_key_type,
+ ) in RedisCategoryPageInfoKeys.__annotations__.items():
+ if isinstance(redis_key_type, str):
+ continue
+ RedisActions.delete(list_keys=[f"{redis_values_to_delete}*"])
+
+ # Save MENU_FIRST_LAYER to Redis
+ redis_list = RedisList(redis_key=RedisCategoryKeys.MENU_FIRST_LAYER)
+ RedisActions.set_json(
+ list_keys=redis_list.to_list(),
+ value=dict_prep.get(RedisCategoryKeys.MENU_FIRST_LAYER),
+ )
+ self.std_out += f"{RedisCategoryKeys.MENU_FIRST_LAYER}: {dict_prep.get(RedisCategoryKeys.MENU_FIRST_LAYER)}\n"
+
+ # Save CLUSTER_INDEX to Redis
+ redis_list = RedisList(redis_key=RedisCategoryKeys.CLUSTER_INDEX)
+ RedisActions.set_json(
+ list_keys=redis_list.to_list(),
+ value=dict_prep.get(RedisCategoryKeys.CLUSTER_INDEX),
+ )
+ self.std_out += f"\n{RedisCategoryKeys.CLUSTER_INDEX}: {dict_prep.get(RedisCategoryKeys.CLUSTER_INDEX)}\n"
+
+ # Save CLUSTER_FUNCTION_CODES to Redis by iterating over the dict
+ for redis_key, redis_value in dict_prep.get(
+ RedisCategoryKeys.CLUSTER_FUNCTION_CODES
+ ).items():
+ redis_list = RedisList(redis_key=redis_key)
+ RedisActions.set_json(
+ list_keys=redis_list.to_list(), value=list(redis_value)
+ )
+ self.std_out += f"\n{RedisCategoryKeys.CLUSTER_FUNCTION_CODES}: {dict_prep.get(RedisCategoryKeys.CLUSTER_FUNCTION_CODES)}\n"
+
+ # Save METHOD_FUNCTION_CODES to Redis by iterating over the dict
+ for redis_key, redis_value in dict_prep.get(
+ RedisCategoryKeys.METHOD_FUNCTION_CODES
+ ).items():
+ redis_list = RedisList(redis_key=redis_key)
+ RedisActions.set_json(
+ list_keys=redis_list.to_list(), value=list(redis_value)
+ )
+ self.std_out += f"\n{RedisCategoryKeys.METHOD_FUNCTION_CODES}: {dict_prep.get(RedisCategoryKeys.METHOD_FUNCTION_CODES)}\n"
+
+ # Save ENDPOINT2CLASS to Redis by iterating over the dict
+ for redis_key, redis_value in dict_prep.get(
+ RedisCategoryKeys.ENDPOINT2CLASS
+ ).items():
+ redis_list = RedisList(
+ redis_key=f"{RedisCategoryKeys.ENDPOINT2CLASS}:{redis_key}\n"
+ )
+ RedisActions.set_json(list_keys=redis_list.to_list(), value=redis_value)
+ self.std_out += f"\n{RedisCategoryKeys.ENDPOINT2CLASS}: {dict_prep.get(RedisCategoryKeys.ENDPOINT2CLASS)}\n"
+
+ RedisActions.set_json(
+ list_keys=[f"{RedisCategoryKeys.REBUILD}"],
+ value={
+ f"{RedisCategoryKeys.MENU_FIRST_LAYER}": True,
+ f"{RedisCategoryKeys.CLUSTER_INDEX}": True,
+ f"{RedisCategoryKeys.CLUSTER_FUNCTION_CODES}": True,
+ f"{RedisCategoryKeys.METHOD_FUNCTION_CODES}": True,
+ f"{RedisCategoryKeys.ENDPOINT2CLASS}": True,
+ },
+ )
+
+ for redis_key, redis_value in dict_prep.get(
+ PrepareRedisItems.PAGE_INFO_KEY
+ ).items():
+ redis_list = RedisList(redis_key=redis_key)
+ RedisActions.set_json(list_keys=redis_list.to_list(), value=redis_value)
diff --git a/Events/TemplateServiceApi/__init__.py b/Events/TemplateServiceApi/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/Events/TemplateServiceApi/endpoint/endpoints.py b/Events/TemplateServiceApi/endpoint/endpoints.py
new file mode 100644
index 0000000..b623bc8
--- /dev/null
+++ b/Events/TemplateServiceApi/endpoint/endpoints.py
@@ -0,0 +1,52 @@
+from typing import TYPE_CHECKING, Dict, Any, Union
+
+from ApiEvents.base_request_model import DictRequestModel, EndpointBaseRequestModel
+from ApiEvents.abstract_class import (
+ RouteFactoryConfig,
+ EndpointFactoryConfig,
+ endpoint_wrapper,
+)
+
+if TYPE_CHECKING:
+ from fastapi import Request, HTTPException, status, Body
+
+from ApiValidations.Custom.token_objects import EmployeeTokenObject, OccupantTokenObject
+
+
+# Type aliases for common types
+
+prefix = ""
+
+
+@endpoint_wrapper(f"{prefix}")
+async def authentication_select_company_or_occupant_type(
+ request: "Request",
+ data: EndpointBaseRequestModel,
+) -> Dict[str, Any]:
+ """
+ Select company or occupant type.
+ """
+ auth_dict = authentication_select_company_or_occupant_type.auth
+ return {}
+
+
+_CONFIG = RouteFactoryConfig(
+ name="",
+ prefix=prefix,
+ tags=[""],
+ include_in_schema=True,
+ endpoints=[
+ EndpointFactoryConfig(
+ url_prefix=prefix,
+ url_endpoint="/",
+ url_of_endpoint="/",
+ endpoint="/",
+ method="POST",
+ summary="",
+ description="",
+ is_auth_required=True, # Needs token_dict
+ is_event_required=False,
+ endpoint_function=lambda: "",
+ ),
+ ],
+).as_dict()
diff --git a/Events/TemplateServiceApi/endpoint/eventFile.py b/Events/TemplateServiceApi/endpoint/eventFile.py
new file mode 100644
index 0000000..afb03fe
--- /dev/null
+++ b/Events/TemplateServiceApi/endpoint/eventFile.py
@@ -0,0 +1,19 @@
+"""
+ request models.
+"""
+
+from typing import TYPE_CHECKING, Dict, Any, Literal, Optional, TypedDict, Union
+from pydantic import BaseModel, Field, model_validator, RootModel, ConfigDict
+from ApiEvents.base_request_model import BaseRequestModel, DictRequestModel
+from ApiValidations.Custom.token_objects import EmployeeTokenObject, OccupantTokenObject
+from ApiValidations.Request.base_validations import ListOptions
+from ErrorHandlers.Exceptions.api_exc import HTTPExceptionApi
+from Schemas.identity.identity import (
+ AddressPostcode,
+ Addresses,
+ RelationshipEmployee2PostCode,
+)
+
+
+if TYPE_CHECKING:
+ from fastapi import Request
diff --git a/Events/TemplateServiceApi/route_configs.py b/Events/TemplateServiceApi/route_configs.py
new file mode 100644
index 0000000..e69de29
diff --git a/Events/__init__.py b/Events/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/Events/base_request_model.py b/Events/base_request_model.py
new file mode 100644
index 0000000..a9de05e
--- /dev/null
+++ b/Events/base_request_model.py
@@ -0,0 +1,126 @@
+"""
+Base request models for API endpoints.
+
+This module provides base request models that can be used across different endpoints
+to ensure consistent request handling and validation.
+"""
+
+from typing import Union, Optional, Any
+from pydantic import BaseModel, Field
+
+from ApiLayers.ApiValidations.Custom.token_objects import (
+ EmployeeTokenObject,
+ OccupantTokenObject,
+)
+from ApiLayers.ApiValidations.Custom.wrapper_contexts import AuthContext, EventContext
+from ApiLayers.AllConfigs.Token.config import Auth
+from Services.PostgresDb.Models.pagination import (
+ Pagination,
+ PaginationResult,
+ QueryOptions,
+)
+
+
+TokenDictType = Union[EmployeeTokenObject, OccupantTokenObject]
+
+STATIC_PATH = "events"
+
+class EndpointBaseRequestModel(BaseModel):
+
+ data: dict = Field(..., description="Data to be sent with the request")
+
+ class Config:
+ json_schema_extra = {"data": {"key": "value"}}
+
+
+class ContextRetrievers:
+ """Utility class to retrieve context from functions."""
+
+ is_auth: bool = False
+ is_event: bool = False
+ key_: str = ""
+ RESPONSE_VALIDATOR = None
+
+ def __init__(self, func, statics: Optional[str] = None):
+ self.func = func
+ if hasattr(self.func, "auth_context"):
+ self.is_auth = True
+ self.key_ = "auth_context"
+ elif hasattr(self.func, "event_context"):
+ self.is_event = hasattr(self.func, "event_context")
+ self.key_ = "event_context"
+
+ @property
+ def key(self) -> Union[str, None]:
+ """Retrieve key context from a function."""
+ return self.key_
+
+ @property
+ def context(self) -> Union[AuthContext, EventContext, None]:
+ """Retrieve authentication or event context from a function."""
+ return getattr(self.func, self.key, None)
+
+ @property
+ def request(self) -> Union[Any, None]:
+ """Retrieve request context from a function."""
+ return getattr(self.context, "request", None)
+
+ @property
+ def token(self) -> TokenDictType:
+ """Retrieve token context from a function."""
+ if self.is_auth or self.is_event:
+ return getattr(self.context, "auth", None)
+
+ @property
+ def url(self) -> Union[str, None]:
+ """Retrieve URL context from a function."""
+ return getattr(self.context, "url", None)
+
+ @property
+ def code(self) -> Union[str, None]:
+ """Retrieve code context from a function."""
+ if self.is_event:
+ return getattr(self.context, "code", None)
+ return None
+
+ @property
+ def base(self) -> Optional[dict[str, Any]]:
+ """Retrieve base request model from a function."""
+ return getattr(self.context, "base", None)
+
+ @property
+ def get_token(self) -> Optional[str]:
+ """Retrieve access key from a function."""
+ return getattr(self.request, "headers", {}).get(Auth.ACCESS_TOKEN_TAG, None)
+
+
+class BaseRouteModel:
+
+ context_retriever: Union[ContextRetrievers] = None
+
+
+class ListOptionsBase:
+
+ def __init__(self, table, list_options, model_query: Optional[BaseModel] = None):
+ self.table = table
+ self.list_options = list_options
+ self.model_query = model_query
+
+ def init_list_options(self) -> tuple:
+ db_session = self.table.new_session()
+ query_options = QueryOptions(
+ table=self.table, data=self.list_options, model_query=self.model_query
+ )
+ return db_session, query_options
+
+ def paginated_result(
+ self, records, response_model: Optional[BaseModel] = None
+ ) -> PaginationResult:
+ pagination = Pagination(data=records)
+ if isinstance(self.list_options, dict):
+ pagination.change(**self.list_options)
+ elif isinstance(self.list_options, BaseModel):
+ pagination.change(**self.list_options.model_dump())
+ return PaginationResult(
+ data=records, pagination=pagination, response_model=response_model
+ )
diff --git a/README copy.md b/README copy.md
new file mode 100644
index 0000000..6d0340d
--- /dev/null
+++ b/README copy.md
@@ -0,0 +1,171 @@
+# WAG Management API Service v4
+
+This service provides a comprehensive API for managing WAG (Wide Area Gateway) systems. It handles configuration, monitoring, and control operations for WAG devices in the network infrastructure.
+
+## Quick Start
+
+To run the tests using Docker Compose:
+
+```bash
+docker compose -f docker-compose.test.yml up --build
+```
+
+## Project Structure
+
+### Core Services and Components
+
+- `Services/` - Core service implementations
+ - `PostgresDb/` - PostgreSQL database operations and models
+ - `MongoDb/` - MongoDB operations and document models
+ - `Redis/` - Redis caching and session management
+ - `Email/` - Email notification service
+
+- `ApiValidations/` - Request validation and data sanitization
+ - Input validation rules
+ - Data sanitization filters
+ - Schema validation middleware
+
+- `ApiLibrary/` - Common utilities and helper functions
+ - Shared functions and utilities
+ - Common constants and configurations
+ - Helper classes and decorators
+
+### Configuration and Settings
+
+- `AllConfigs/` - Configuration management
+ - Database configurations
+ - Service settings
+ - Environment-specific configs
+
+- `Schemas/` - Data models and schema definitions
+ - Request/Response models
+ - Database schemas
+ - API contract definitions
+
+### Docker and Deployment
+
+- `DockerApiServices/` - API service Docker configurations
+ - API service Dockerfile
+ - Service dependencies
+
+- `DockerStoreServices/` - Storage service Docker configurations
+ - Database service Dockerfiles
+ - Storage service dependencies
+
+### Error Handling and Events
+
+- `ErrorHandlers/` - Error handling and exception management
+ - Custom exceptions
+ - Error handlers
+ - Exception middleware
+
+- `ApiEvents/` - Event handling and processing
+ - Event listeners
+ - Event dispatchers
+ - Message queue handlers
+
+### Language and Testing
+
+- `LanguageModels/` - Localization and language support
+ - Language files
+ - Translation models
+ - i18n configurations
+
+- `Ztest/` - Test suite
+ - Unit tests
+ - Integration tests
+ - Test fixtures and utilities
+
+### Additional Components
+
+- `scripts/` - Utility scripts and tools
+ - Deployment scripts
+ - Database migrations
+ - Maintenance utilities
+
+
+use arcjet @frontend
+
+## Architecture Overview
+
+This project follows a layered architecture with three core services:
+
+### Core Services
+1. **AuthServiceApi**: Authentication and authorization
+2. **EventServiceApi**: Event processing and management
+3. **ValidationServiceApi**: Input and schema validation
+
+### System Layers
+- **Dependencies Layer**: External dependencies and requirements
+- **Application Layer**: Core business logic
+- **Service Layer**: API implementations
+- **Test Layer**: Testing infrastructure
+- **Dev Layer**: Development utilities
+- **Root Layer**: Configuration and documentation
+
+For detailed architecture documentation, see [System Architecture](docs/architecture/system_architecture.md).
+
+## Suggested Improvements
+
+The following improvements have been identified to enhance the system:
+
+### Infrastructure & Deployment
+- **Service Isolation**: Containerize core services (Auth, Event, Validation)
+- **API Gateway**: Add gateway layer for rate limiting, versioning, and security
+- **Monitoring**: Implement distributed tracing and metrics collection
+- **Configuration**: Move to centralized configuration service with feature flags
+
+### Performance & Scaling
+- **Caching Strategy**: Enhance Redis implementation with invalidation patterns
+- **Database**: Implement sharding and read replicas
+- **Event System**: Add message queue (RabbitMQ/Kafka) for event handling
+- **Background Jobs**: Implement job processing and connection pooling
+
+### Security & Reliability
+- **API Security**: Implement key rotation and rate limiting
+- **Error Handling**: Add centralized tracking and circuit breakers
+- **Testing**: Add integration tests and performance benchmarks
+- **Audit**: Implement comprehensive audit logging
+
+### Development Experience
+- **Code Organization**: Move to domain-driven design
+- **Documentation**: Add OpenAPI/Swagger docs and ADRs
+- **Internationalization**: Create translation management system
+- **Developer Tools**: Enhance debugging and monitoring capabilities
+
+For implementation details of these improvements, see:
+- [Architecture Documentation](docs/architecture/system_architecture.md)
+- [Detailed Improvement Plan](docs/improvements/detailed_improvement_plan.md) with code examples and implementation timeline
+
+## Development Notes with AI-Windsurf
+
+This project uses AI-Windsurf's intelligent note-taking system to maintain comprehensive development documentation. Notes are automatically organized and stored in the `/docs/notes/` directory.
+
+### Note Structure
+- **Topic-based Organization**: Notes are categorized by topics (architecture, features, bugs, etc.)
+- **Date Tracking**: All notes include creation and modification dates
+- **Automatic Linking**: Related components and documentation are automatically cross-referenced
+- **Action Items**: Tasks and next steps are tracked within notes
+
+### Accessing Notes
+1. Browse the `/docs/notes/` directory
+2. Notes are stored in markdown format for easy reading
+3. Each note follows a standard template with:
+ - Overview
+ - Technical details
+ - Related components
+ - Action items
+
+### Adding Notes
+Work with AI-Windsurf to add notes by:
+1. Describing what you want to document
+2. Mentioning related components or previous notes
+3. Specifying any action items or follow-ups
+
+The AI will automatically:
+- Create properly formatted note files
+- Link related documentation
+- Update existing notes when relevant
+- Track development progress
+
+For detailed documentation about specific components, refer to the corresponding files in the `/docs/` directory.
\ No newline at end of file
diff --git a/README.md b/README.md
index 3fcb992..9863769 100644
--- a/README.md
+++ b/README.md
@@ -1,3 +1,3 @@
-# wag-managment-api-service-latest
+# wag-managment-api-service-version-5
-wag-managment-api-service-latest
\ No newline at end of file
+wag-managment-api-service-version-5
\ No newline at end of file
diff --git a/Scratches/endpoint.py b/Scratches/endpoint.py
new file mode 100644
index 0000000..949e649
--- /dev/null
+++ b/Scratches/endpoint.py
@@ -0,0 +1,172 @@
+from dataclasses import dataclass, field
+from typing import List, Optional, Dict, Any, Callable
+from fastapi import APIRouter, Depends, HTTPException
+from pydantic import BaseModel
+from typing import Union
+
+
+# First, let's create our category models
+class CategoryBase(BaseModel):
+ id: str
+ name: str
+ description: Optional[str] = None
+
+
+class CategoryCreate(CategoryBase):
+ parent_id: Optional[str] = None
+
+
+class CategoryResponse(CategoryBase):
+ children: List["CategoryResponse"] = []
+ parent_id: Optional[str] = None
+
+
+# Category data structure for handling the hierarchy
+@dataclass
+class CategoryNode:
+ id: str
+ name: str
+ description: Optional[str]
+ parent_id: Optional[str] = None
+ children: List["CategoryNode"] = field(default_factory=list)
+
+
+# Category Service for managing the hierarchy
+class CategoryService:
+ def __init__(self):
+ self.categories: Dict[str, CategoryNode] = {}
+
+ def add_category(self, category: CategoryCreate) -> CategoryNode:
+ node = CategoryNode(
+ id=category.id,
+ name=category.name,
+ description=category.description,
+ parent_id=category.parent_id,
+ )
+
+ self.categories[category.id] = node
+
+ if category.parent_id and category.parent_id in self.categories:
+ parent = self.categories[category.parent_id]
+ parent.children.append(node)
+
+ return node
+
+ def get_category_tree(self, category_id: str) -> Optional[CategoryNode]:
+ return self.categories.get(category_id)
+
+ def get_category_path(self, category_id: str) -> List[CategoryNode]:
+ path = []
+ current = self.categories.get(category_id)
+
+ while current:
+ path.append(current)
+ current = (
+ self.categories.get(current.parent_id) if current.parent_id else None
+ )
+
+ return list(reversed(path))
+
+
+# Factory for creating category endpoints
+class CategoryEndpointFactory:
+ def __init__(self, category_service: CategoryService):
+ self.category_service = category_service
+
+ def create_route_config(self, base_prefix: str) -> RouteFactoryConfig:
+ endpoints = [
+ # Create category endpoint
+ EndpointFactoryConfig(
+ url_prefix=base_prefix,
+ url_endpoint="/categories",
+ url_of_endpoint=f"{base_prefix}/categories",
+ endpoint="/categories",
+ method="POST",
+ summary="Create new category",
+ description="Create a new category with optional parent",
+ endpoint_function=self.create_category,
+ request_model=CategoryCreate,
+ response_model=CategoryResponse,
+ is_auth_required=True,
+ ),
+ # Get category tree endpoint
+ EndpointFactoryConfig(
+ url_prefix=base_prefix,
+ url_endpoint="/categories/{category_id}",
+ url_of_endpoint=f"{base_prefix}/categories/{{category_id}}",
+ endpoint="/categories/{category_id}",
+ method="GET",
+ summary="Get category tree",
+ description="Get category and its children",
+ endpoint_function=self.get_category_tree,
+ response_model=CategoryResponse,
+ is_auth_required=True,
+ ),
+ # Get category path endpoint
+ EndpointFactoryConfig(
+ url_prefix=base_prefix,
+ url_endpoint="/categories/{category_id}/path",
+ url_of_endpoint=f"{base_prefix}/categories/{{category_id}}/path",
+ endpoint="/categories/{category_id}/path",
+ method="GET",
+ summary="Get category path",
+ description="Get full path from root to this category",
+ endpoint_function=self.get_category_path,
+ response_model=List[CategoryResponse],
+ is_auth_required=True,
+ ),
+ ]
+
+ return RouteFactoryConfig(
+ name="categories",
+ tags=["Categories"],
+ prefix=base_prefix,
+ endpoints=endpoints,
+ )
+
+ async def create_category(self, category: CategoryCreate) -> CategoryResponse:
+ node = self.category_service.add_category(category)
+ return self._convert_to_response(node)
+
+ async def get_category_tree(self, category_id: str) -> CategoryResponse:
+ node = self.category_service.get_category_tree(category_id)
+ if not node:
+ raise HTTPException(status_code=404, detail="Category not found")
+ return self._convert_to_response(node)
+
+ async def get_category_path(self, category_id: str) -> List[CategoryResponse]:
+ path = self.category_service.get_category_path(category_id)
+ if not path:
+ raise HTTPException(status_code=404, detail="Category not found")
+ return [self._convert_to_response(node) for node in path]
+
+ def _convert_to_response(self, node: CategoryNode) -> CategoryResponse:
+ return CategoryResponse(
+ id=node.id,
+ name=node.name,
+ description=node.description,
+ parent_id=node.parent_id,
+ children=[self._convert_to_response(child) for child in node.children],
+ )
+
+
+# Usage example
+def create_category_router(base_prefix: str = "/api/v1") -> APIRouter:
+ category_service = CategoryService()
+ factory = CategoryEndpointFactory(category_service)
+ route_config = factory.create_route_config(base_prefix)
+
+ router = APIRouter(prefix=route_config.prefix, tags=route_config.tags)
+
+ for endpoint in route_config.endpoints:
+ router.add_api_route(
+ path=endpoint.endpoint,
+ endpoint=endpoint.endpoint_function,
+ methods=[endpoint.method],
+ response_model=endpoint.response_model,
+ summary=endpoint.summary,
+ description=endpoint.description,
+ **endpoint.extra_options,
+ )
+
+ return router
diff --git a/Services/Email/__init__.py b/Services/Email/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/Services/Email/send_email.py b/Services/Email/send_email.py
new file mode 100644
index 0000000..53b0add
--- /dev/null
+++ b/Services/Email/send_email.py
@@ -0,0 +1,34 @@
+from redmail import EmailSender
+
+from ApiLayers.AllConfigs.Email.configs import EmailConfig
+from ApiLayers.AllConfigs.Email.email_send_model import EmailSendModel
+
+email_sender = EmailSender(**EmailConfig.as_dict())
+
+
+class EmailService:
+
+ @classmethod
+ def send_email(cls, params: EmailSendModel) -> bool:
+ if not EmailConfig.EMAIL_SEND:
+ print("Email sending is disabled", params)
+ return False
+ try:
+ email_sender.connect()
+ receivers = ["karatay@mehmetkaratay.com.tr"]
+ email_sender.send(
+ subject=params.subject,
+ receivers=receivers,
+ text=params.text + f" : Gonderilen [{str(receivers)}]",
+ html=params.html,
+ cc=params.cc,
+ bcc=params.bcc,
+ headers=params.headers or {},
+ attachments=params.attachments or {},
+ )
+ return True
+ except Exception as e:
+ print(f"Error raised at email send :{e}")
+ finally:
+ email_sender.close()
+ return False
diff --git a/Services/MongoDb/Models/action_models/base.py b/Services/MongoDb/Models/action_models/base.py
new file mode 100644
index 0000000..d5b6df5
--- /dev/null
+++ b/Services/MongoDb/Models/action_models/base.py
@@ -0,0 +1,115 @@
+"""Base models for MongoDB documents."""
+
+from typing import Any, Dict, Optional, Union
+from bson import ObjectId
+from pydantic import BaseModel, ConfigDict, Field, model_validator
+from pydantic.json_schema import JsonSchemaValue
+from pydantic_core import CoreSchema, core_schema
+
+from ApiLibrary import system_arrow
+
+
+class PyObjectId(ObjectId):
+ """Custom type for handling MongoDB ObjectId in Pydantic models."""
+
+ @classmethod
+ def __get_pydantic_core_schema__(
+ cls,
+ _source_type: Any,
+ _handler: Any,
+ ) -> CoreSchema:
+ """Define the core schema for PyObjectId."""
+ return core_schema.json_or_python_schema(
+ json_schema=core_schema.str_schema(),
+ python_schema=core_schema.union_schema(
+ [
+ core_schema.is_instance_schema(ObjectId),
+ core_schema.chain_schema(
+ [
+ core_schema.str_schema(),
+ core_schema.no_info_plain_validator_function(cls.validate),
+ ]
+ ),
+ ]
+ ),
+ serialization=core_schema.plain_serializer_function_ser_schema(
+ lambda x: str(x),
+ return_schema=core_schema.str_schema(),
+ when_used="json",
+ ),
+ )
+
+ @classmethod
+ def validate(cls, value: Any) -> ObjectId:
+ """Validate and convert the value to ObjectId."""
+ if not ObjectId.is_valid(value):
+ raise ValueError("Invalid ObjectId")
+ return ObjectId(value)
+
+ @classmethod
+ def __get_pydantic_json_schema__(
+ cls,
+ _core_schema: CoreSchema,
+ _handler: Any,
+ ) -> JsonSchemaValue:
+ """Define the JSON schema for PyObjectId."""
+ return {"type": "string"}
+
+
+class MongoBaseModel(BaseModel):
+ """Base model for all MongoDB documents."""
+
+ model_config = ConfigDict(
+ arbitrary_types_allowed=True,
+ json_encoders={ObjectId: str},
+ populate_by_name=True,
+ from_attributes=True,
+ validate_assignment=True,
+ extra="allow",
+ )
+
+ # Optional _id field that will be ignored in create operations
+ id: Optional[PyObjectId] = Field(None, alias="_id")
+
+ def get_extra(self, field_name: str, default: Any = None) -> Any:
+ """Safely get extra field value.
+
+ Args:
+ field_name: Name of the extra field to retrieve
+ default: Default value to return if field doesn't exist
+
+ Returns:
+ Value of the extra field if it exists, otherwise the default value
+ """
+ return getattr(self, field_name, default)
+
+ def as_dict(self) -> Dict[str, Any]:
+ """Convert model to dictionary including all fields and extra fields.
+
+ Returns:
+ Dict containing all model fields and extra fields with proper type conversion
+ """
+ return self.model_dump(by_alias=True)
+
+
+class MongoDocument(MongoBaseModel):
+ """Base document model with timestamps."""
+
+ created_at: float = Field(default_factory=lambda: system_arrow.now().timestamp())
+ updated_at: float = Field(default_factory=lambda: system_arrow.now().timestamp())
+
+ @model_validator(mode="before")
+ @classmethod
+ def prevent_protected_fields(cls, data: Any) -> Any:
+ """Prevent user from setting protected fields like _id and timestamps."""
+ if isinstance(data, dict):
+ # Remove protected fields from input
+ data.pop("_id", None)
+ data.pop("created_at", None)
+ data.pop("updated_at", None)
+
+ # Set timestamps
+ data["created_at"] = system_arrow.now().timestamp()
+ data["updated_at"] = system_arrow.now().timestamp()
+
+ return data
diff --git a/Services/MongoDb/Models/action_models/domain.py b/Services/MongoDb/Models/action_models/domain.py
new file mode 100644
index 0000000..cac9162
--- /dev/null
+++ b/Services/MongoDb/Models/action_models/domain.py
@@ -0,0 +1,76 @@
+"""
+MongoDB Domain Models.
+
+This module provides Pydantic models for domain management,
+including domain history and access details.
+"""
+
+from datetime import datetime
+from typing import Any, Dict, List, Optional
+from pydantic import BaseModel, Field, ConfigDict, model_validator
+
+from ApiLibrary import system_arrow
+from Services.MongoDb.Models.action_models.base import MongoBaseModel, MongoDocument
+
+
+class DomainData(MongoBaseModel):
+ """Model for domain data.
+
+ Attributes:
+ user_uu_id: Unique identifier of the user
+ main_domain: Primary domain
+ other_domains_list: List of additional domains
+ extra_data: Additional domain-related data
+ """
+
+ user_uu_id: str = Field(..., description="User's unique identifier")
+ main_domain: str = Field(..., description="Primary domain")
+ other_domains_list: List[str] = Field(
+ default_factory=list, description="List of additional domains"
+ )
+ extra_data: Optional[Dict[str, Any]] = Field(
+ default_factory=dict,
+ alias="extraData",
+ description="Additional domain-related data",
+ )
+
+ model_config = ConfigDict(
+ from_attributes=True, populate_by_name=True, validate_assignment=True
+ )
+
+
+class DomainDocument(MongoDocument):
+ """Model for domain-related documents."""
+
+ data: DomainData = Field(..., description="Domain data")
+
+ def update_main_domain(self, new_domain: str) -> None:
+ """Update the main domain and move current to history.
+
+ Args:
+ new_domain: New main domain to set
+ """
+ if self.data.main_domain and self.data.main_domain != new_domain:
+ if self.data.main_domain not in self.data.other_domains_list:
+ self.data.other_domains_list.append(self.data.main_domain)
+ self.data.main_domain = new_domain
+
+
+class DomainDocumentCreate(MongoDocument):
+ """Model for creating new domain documents."""
+
+ data: DomainData = Field(..., description="Initial domain data")
+
+ model_config = ConfigDict(
+ from_attributes=True, populate_by_name=True, validate_assignment=True
+ )
+
+
+class DomainDocumentUpdate(MongoDocument):
+ """Model for updating existing domain documents."""
+
+ data: DomainData = Field(..., description="Updated domain data")
+
+ model_config = ConfigDict(
+ from_attributes=True, populate_by_name=True, validate_assignment=True
+ )
diff --git a/Services/MongoDb/Models/action_models/password.py b/Services/MongoDb/Models/action_models/password.py
new file mode 100644
index 0000000..e08707a
--- /dev/null
+++ b/Services/MongoDb/Models/action_models/password.py
@@ -0,0 +1,49 @@
+"""
+MongoDB Password Models.
+
+This module provides Pydantic models for password management,
+including password history and access details.
+"""
+
+from datetime import datetime
+from typing import Any, Dict, List, Optional
+from pydantic import Field
+
+from ApiLibrary import system_arrow
+from Services.MongoDb.Models.action_models.base import MongoBaseModel, MongoDocument
+
+
+class PasswordHistoryDetail(MongoBaseModel):
+ """Model for password history details."""
+
+ timestamp: datetime
+ ip_address: Optional[str] = Field(None, alias="ipAddress")
+ user_agent: Optional[str] = Field(None, alias="userAgent")
+ location: Optional[Dict[str, Any]] = None
+
+
+class PasswordHistoryData(MongoBaseModel):
+ """Model for password history data."""
+
+ password_history: List[str] = Field([], alias="passwordHistory")
+ access_history_detail: Dict[str, PasswordHistoryDetail] = Field(
+ default_factory=dict, alias="accessHistoryDetail"
+ )
+
+
+class PasswordDocument(MongoDocument):
+ """Model for password-related documents."""
+
+ data: PasswordHistoryData
+
+
+class PasswordDocumentCreate(MongoBaseModel):
+ """Model for creating new password documents."""
+
+ data: PasswordHistoryData = Field(..., description="Initial password data")
+
+
+class PasswordDocumentUpdate(MongoBaseModel):
+ """Model for updating existing password documents."""
+
+ data: PasswordHistoryData
diff --git a/Services/MongoDb/Models/actions.py b/Services/MongoDb/Models/actions.py
new file mode 100644
index 0000000..51ea587
--- /dev/null
+++ b/Services/MongoDb/Models/actions.py
@@ -0,0 +1,128 @@
+"""
+This module contains the MongoActions class, which provides methods for
+performing actions on the MongoDB database.
+Api Mongo functions in general retrieves 2 params which are
+companyUUID and Storage Reason
+"""
+
+from typing import Optional, Dict, Any, List
+
+from pymongo import MongoClient
+from pymongo.collection import Collection
+
+from Services.MongoDb.Models.mixins import (
+ MongoUpdateMixin,
+ MongoInsertMixin,
+ MongoFindMixin,
+ MongoDeleteMixin,
+ MongoAggregateMixin,
+)
+from Services.MongoDb.Models.exceptions import (
+ MongoDocumentNotFoundError,
+ MongoDuplicateKeyError,
+ MongoValidationError,
+ MongoConnectionError,
+)
+
+
+class MongoActions(
+ MongoUpdateMixin,
+ MongoInsertMixin,
+ MongoFindMixin,
+ MongoDeleteMixin,
+ MongoAggregateMixin,
+):
+ """Main MongoDB actions class that inherits all CRUD operation mixins.
+
+ This class provides a unified interface for all MongoDB operations while
+ managing collections based on company UUID and storage reason.
+ """
+
+ def __init__(
+ self, client: MongoClient, database: str, company_uuid: str, storage_reason: str
+ ):
+ """Initialize MongoDB actions with client and collection info.
+
+ Args:
+ client: MongoDB client
+ database: Database name to use
+ company_uuid: Company UUID for collection naming
+ storage_reason: Storage reason for collection naming
+ """
+ self._client = client
+ self._database = database
+ self._company_uuid = company_uuid
+ self._storage_reason = storage_reason
+ self._collection = None
+ self.use_collection(storage_reason)
+
+ def use_collection(self, storage_reason: str) -> None:
+ """Switch to a different collection.
+
+ Args:
+ storage_reason: New storage reason for collection naming
+ """
+ collection_name = f"{self._company_uuid}*{storage_reason}"
+ self._collection = self._client[self._database][collection_name]
+
+ @property
+ def collection(self) -> Collection:
+ """Get current MongoDB collection."""
+ return self._collection
+
+ def insert_one(self, document: Dict[str, Any]):
+ """Insert a single document."""
+ return super().insert_one(self.collection, document)
+
+ def insert_many(self, documents: List[Dict[str, Any]]):
+ """Insert multiple documents."""
+ return super().insert_many(self.collection, documents)
+
+ def find_one(
+ self, filter_query: Dict[str, Any], projection: Optional[Dict[str, Any]] = None
+ ):
+ """Find a single document."""
+ return super().find_one(self.collection, filter_query, projection)
+
+ def find_many(
+ self,
+ filter_query: Dict[str, Any],
+ projection: Optional[Dict[str, Any]] = None,
+ sort: Optional[List[tuple]] = None,
+ limit: Optional[int] = None,
+ skip: Optional[int] = None,
+ ):
+ """Find multiple documents."""
+ return super().find_many(
+ self.collection, filter_query, projection, sort, limit, skip
+ )
+
+ def update_one(
+ self,
+ filter_query: Dict[str, Any],
+ update_data: Dict[str, Any],
+ upsert: bool = False,
+ ):
+ """Update a single document."""
+ return super().update_one(self.collection, filter_query, update_data, upsert)
+
+ def update_many(
+ self,
+ filter_query: Dict[str, Any],
+ update_data: Dict[str, Any],
+ upsert: bool = False,
+ ):
+ """Update multiple documents."""
+ return super().update_many(self.collection, filter_query, update_data, upsert)
+
+ def delete_one(self, filter_query: Dict[str, Any]):
+ """Delete a single document."""
+ return super().delete_one(self.collection, filter_query)
+
+ def delete_many(self, filter_query: Dict[str, Any]):
+ """Delete multiple documents."""
+ return super().delete_many(self.collection, filter_query)
+
+ def aggregate(self, pipeline: List[Dict[str, Any]]):
+ """Execute an aggregation pipeline."""
+ return super().aggregate(self.collection, pipeline)
diff --git a/Services/MongoDb/Models/exception_handlers.py b/Services/MongoDb/Models/exception_handlers.py
new file mode 100644
index 0000000..d3754ed
--- /dev/null
+++ b/Services/MongoDb/Models/exception_handlers.py
@@ -0,0 +1,188 @@
+"""
+Exception handlers for MongoDB operations.
+
+This module provides exception handlers for MongoDB-related errors,
+converting them to appropriate HTTP responses.
+"""
+
+from typing import Callable, Any
+from fastapi import Request, status
+from fastapi.responses import JSONResponse
+from pymongo.errors import PyMongoError, DuplicateKeyError, ConnectionFailure
+
+from ApiLibrary.common.line_number import get_line_number_for_error
+from Services.MongoDb.Models.exceptions import (
+ MongoBaseException,
+ MongoConnectionError,
+ MongoDocumentNotFoundError,
+ MongoValidationError,
+ MongoDuplicateKeyError,
+ PasswordHistoryError,
+ PasswordReuseError,
+ PasswordHistoryLimitError,
+ InvalidPasswordDetailError,
+)
+from ErrorHandlers.ErrorHandlers.api_exc_handler import HTTPExceptionApi
+
+
+def handle_mongo_errors(func: Callable) -> Callable:
+ """Decorator to handle MongoDB operation errors.
+
+ Args:
+ func: Function to wrap with error handling
+
+ Returns:
+ Wrapped function with error handling
+ """
+
+ async def wrapper(*args, **kwargs) -> Any:
+ try:
+ return await func(*args, **kwargs)
+ except ConnectionFailure as e:
+ raise MongoConnectionError(
+ message=str(e), details={"error_type": "connection_failure"}
+ ).to_http_exception()
+ except DuplicateKeyError as e:
+ raise MongoDuplicateKeyError(
+ collection=e.details.get("namespace", "unknown"),
+ key_pattern=e.details.get("keyPattern", {}),
+ ).to_http_exception()
+ except PyMongoError as e:
+ raise MongoBaseException(
+ message=str(e), details={"error_type": "pymongo_error"}
+ ).to_http_exception()
+ except Exception as e:
+ raise HTTPExceptionApi(
+ lang="en",
+ error_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
+ loc=get_line_number_for_error(),
+ sys_msg=str(e),
+ )
+
+ return wrapper
+
+
+async def mongo_base_exception_handler(
+ request: Request, exc: MongoBaseException
+) -> JSONResponse:
+ """Handle base MongoDB exceptions.
+
+ Args:
+ request: FastAPI request
+ exc: MongoDB base exception
+
+ Returns:
+ JSON response with error details
+ """
+ return JSONResponse(
+ status_code=exc.status_code, content={"error": exc.to_http_exception()}
+ )
+
+
+async def mongo_connection_error_handler(
+ request: Request, exc: MongoConnectionError
+) -> JSONResponse:
+ """Handle MongoDB connection errors.
+
+ Args:
+ request: FastAPI request
+ exc: MongoDB connection error
+
+ Returns:
+ JSON response with connection error details
+ """
+ return JSONResponse(
+ status_code=status.HTTP_503_SERVICE_UNAVAILABLE,
+ content={"error": exc.to_http_exception()},
+ )
+
+
+async def mongo_document_not_found_handler(
+ request: Request, exc: MongoDocumentNotFoundError
+) -> JSONResponse:
+ """Handle document not found errors.
+
+ Args:
+ request: FastAPI request
+ exc: Document not found error
+
+ Returns:
+ JSON response with not found error details
+ """
+ return JSONResponse(
+ status_code=status.HTTP_404_NOT_FOUND,
+ content={"error": exc.to_http_exception()},
+ )
+
+
+async def mongo_validation_error_handler(
+ request: Request, exc: MongoValidationError
+) -> JSONResponse:
+ """Handle validation errors.
+
+ Args:
+ request: FastAPI request
+ exc: Validation error
+
+ Returns:
+ JSON response with validation error details
+ """
+ return JSONResponse(
+ status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
+ content={"error": exc.to_http_exception()},
+ )
+
+
+async def mongo_duplicate_key_error_handler(
+ request: Request, exc: MongoDuplicateKeyError
+) -> JSONResponse:
+ """Handle duplicate key errors.
+
+ Args:
+ request: FastAPI request
+ exc: Duplicate key error
+
+ Returns:
+ JSON response with duplicate key error details
+ """
+ return JSONResponse(
+ status_code=status.HTTP_409_CONFLICT, content={"error": exc.to_http_exception()}
+ )
+
+
+async def password_history_error_handler(
+ request: Request, exc: PasswordHistoryError
+) -> JSONResponse:
+ """Handle password history errors.
+
+ Args:
+ request: FastAPI request
+ exc: Password history error
+
+ Returns:
+ JSON response with password history error details
+ """
+ return JSONResponse(
+ status_code=exc.status_code, content={"error": exc.to_http_exception()}
+ )
+
+
+def register_exception_handlers(app: Any) -> None:
+ """Register all MongoDB exception handlers with FastAPI app.
+
+ Args:
+ app: FastAPI application instance
+ """
+ app.add_exception_handler(MongoBaseException, mongo_base_exception_handler)
+ app.add_exception_handler(MongoConnectionError, mongo_connection_error_handler)
+ app.add_exception_handler(
+ MongoDocumentNotFoundError, mongo_document_not_found_handler
+ )
+ app.add_exception_handler(MongoValidationError, mongo_validation_error_handler)
+ app.add_exception_handler(MongoDuplicateKeyError, mongo_duplicate_key_error_handler)
+ app.add_exception_handler(PasswordHistoryError, password_history_error_handler)
+ app.add_exception_handler(PasswordReuseError, password_history_error_handler)
+ app.add_exception_handler(PasswordHistoryLimitError, password_history_error_handler)
+ app.add_exception_handler(
+ InvalidPasswordDetailError, password_history_error_handler
+ )
diff --git a/Services/MongoDb/Models/exceptions.py b/Services/MongoDb/Models/exceptions.py
new file mode 100644
index 0000000..8197d4a
--- /dev/null
+++ b/Services/MongoDb/Models/exceptions.py
@@ -0,0 +1,146 @@
+"""
+Custom exceptions for MongoDB operations and password management.
+
+This module defines custom exceptions for handling various error cases in MongoDB
+operations and password-related functionality.
+"""
+
+from typing import Any, Dict, Optional
+from fastapi import HTTPException, status
+from ApiLibrary.common.line_number import get_line_number_for_error
+from ErrorHandlers.ErrorHandlers.api_exc_handler import HTTPExceptionApi
+
+
+class MongoBaseException(Exception):
+ """Base exception for MongoDB-related errors."""
+
+ def __init__(
+ self,
+ message: str,
+ status_code: int = status.HTTP_500_INTERNAL_SERVER_ERROR,
+ details: Optional[Dict[str, Any]] = None,
+ ):
+ self.message = message
+ self.status_code = status_code
+ self.details = details or {}
+ super().__init__(self.message)
+
+ def to_http_exception(self) -> HTTPException:
+ """Convert to FastAPI HTTPException."""
+ raise HTTPExceptionApi(
+ lang="en",
+ error_code=self.status_code,
+ loc=get_line_number_for_error(),
+ sys_msg=self.message,
+ )
+
+
+class MongoConnectionError(MongoBaseException):
+ """Raised when there's an error connecting to MongoDB."""
+
+ def __init__(
+ self,
+ message: str = "Failed to connect to MongoDB",
+ details: Optional[Dict[str, Any]] = None,
+ ):
+ super().__init__(
+ message=message,
+ status_code=status.HTTP_503_SERVICE_UNAVAILABLE,
+ details=details,
+ )
+
+
+class MongoDocumentNotFoundError(MongoBaseException):
+ """Raised when a document is not found in MongoDB."""
+
+ def __init__(
+ self,
+ collection: str,
+ filter_query: Dict[str, Any],
+ message: Optional[str] = None,
+ ):
+ message = message or f"Document not found in collection '{collection}'"
+ super().__init__(
+ message=message,
+ status_code=status.HTTP_404_NOT_FOUND,
+ details={"collection": collection, "filter": filter_query},
+ )
+
+
+class MongoValidationError(MongoBaseException):
+ """Raised when document validation fails."""
+
+ def __init__(self, message: str, field_errors: Optional[Dict[str, str]] = None):
+ super().__init__(
+ message=message,
+ status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
+ details={"field_errors": field_errors or {}},
+ )
+
+
+class MongoDuplicateKeyError(MongoBaseException):
+ """Raised when trying to insert a document with a duplicate key."""
+
+ def __init__(
+ self,
+ collection: str,
+ key_pattern: Dict[str, Any],
+ message: Optional[str] = None,
+ ):
+ message = message or f"Duplicate key error in collection '{collection}'"
+ super().__init__(
+ message=message,
+ status_code=status.HTTP_409_CONFLICT,
+ details={"collection": collection, "key_pattern": key_pattern},
+ )
+
+
+class PasswordHistoryError(MongoBaseException):
+ """Base exception for password history-related errors."""
+
+ def __init__(
+ self,
+ message: str,
+ status_code: int = status.HTTP_400_BAD_REQUEST,
+ details: Optional[Dict[str, Any]] = None,
+ ):
+ super().__init__(message, status_code, details)
+
+
+class PasswordReuseError(PasswordHistoryError):
+ """Raised when attempting to reuse a recent password."""
+
+ def __init__(
+ self,
+ message: str = "Password was used recently",
+ history_limit: Optional[int] = None,
+ ):
+ details = {"history_limit": history_limit} if history_limit else None
+ super().__init__(
+ message=message,
+ status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
+ details=details,
+ )
+
+
+class PasswordHistoryLimitError(PasswordHistoryError):
+ """Raised when password history limit is reached."""
+
+ def __init__(self, limit: int, message: Optional[str] = None):
+ message = message or f"Password history limit of {limit} reached"
+ super().__init__(
+ message=message,
+ status_code=status.HTTP_409_CONFLICT,
+ details={"limit": limit},
+ )
+
+
+class InvalidPasswordDetailError(PasswordHistoryError):
+ """Raised when password history detail is invalid."""
+
+ def __init__(self, message: str, field_errors: Optional[Dict[str, str]] = None):
+ super().__init__(
+ message=message,
+ status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
+ details={"field_errors": field_errors or {}},
+ )
diff --git a/Services/MongoDb/Models/mixins.py b/Services/MongoDb/Models/mixins.py
new file mode 100644
index 0000000..16fd056
--- /dev/null
+++ b/Services/MongoDb/Models/mixins.py
@@ -0,0 +1,171 @@
+"""
+MongoDB CRUD Operation Mixins.
+
+This module provides mixins for common MongoDB operations:
+1. Document creation (insert)
+2. Document retrieval (find)
+3. Document updates
+4. Document deletion
+5. Aggregation operations
+"""
+
+from typing import Any, Dict, List, Optional
+from functools import wraps
+
+from pymongo.collection import Collection
+from pymongo.errors import (
+ ConnectionFailure,
+ OperationFailure,
+ ServerSelectionTimeoutError,
+ PyMongoError,
+)
+
+from ApiLibrary.common.line_number import get_line_number_for_error
+from ErrorHandlers.ErrorHandlers.api_exc_handler import HTTPExceptionApi
+
+
+def handle_mongo_errors(func):
+ """Decorator to handle MongoDB operation errors.
+
+ Catches MongoDB-specific errors and converts them to HTTPExceptionApi.
+ """
+
+ @wraps(func)
+ def wrapper(*args, **kwargs):
+ try:
+ return func(*args, **kwargs)
+ except ConnectionFailure:
+ raise HTTPExceptionApi(
+ error_code="HTTP_503_SERVICE_UNAVAILABLE",
+ lang="en",
+ loc=get_line_number_for_error(),
+ sys_msg="MongoDB connection failed",
+ )
+ except ServerSelectionTimeoutError:
+ raise HTTPExceptionApi(
+ error_code="HTTP_504_GATEWAY_TIMEOUT",
+ lang="en",
+ loc=get_line_number_for_error(),
+ sys_msg="MongoDB connection timed out",
+ )
+ except OperationFailure as e:
+ raise HTTPExceptionApi(
+ error_code="HTTP_400_BAD_REQUEST",
+ lang="en",
+ loc=get_line_number_for_error(),
+ sys_msg=str(e),
+ )
+ except PyMongoError as e:
+ raise HTTPExceptionApi(
+ error_code="HTTP_500_INTERNAL_SERVER_ERROR",
+ lang="en",
+ loc=get_line_number_for_error(),
+ sys_msg=str(e),
+ )
+
+ return wrapper
+
+
+class MongoInsertMixin:
+ """Mixin for MongoDB insert operations."""
+
+ @handle_mongo_errors
+ def insert_one(self, collection: Collection, document: Dict[str, Any]):
+ """Insert a single document into the collection."""
+ result = collection.insert_one(document)
+ return result
+
+ @handle_mongo_errors
+ def insert_many(self, collection: Collection, documents: List[Dict[str, Any]]):
+ """Insert multiple documents into the collection."""
+ result = collection.insert_many(documents)
+ return result
+
+
+class MongoFindMixin:
+ """Mixin for MongoDB find operations."""
+
+ @handle_mongo_errors
+ def find_one(
+ self,
+ collection: Collection,
+ filter_query: Dict[str, Any],
+ projection: Optional[Dict[str, Any]] = None,
+ ):
+ """Find a single document in the collection."""
+ result = collection.find_one(filter_query, projection)
+ return result
+
+ @handle_mongo_errors
+ def find_many(
+ self,
+ collection: Collection,
+ filter_query: Dict[str, Any],
+ projection: Optional[Dict[str, Any]] = None,
+ sort: Optional[List[tuple]] = None,
+ limit: Optional[int] = None,
+ skip: Optional[int] = None,
+ ):
+ """Find multiple documents in the collection with pagination support."""
+ cursor = collection.find(filter_query, projection)
+ if sort:
+ cursor = cursor.sort(sort)
+ if skip:
+ cursor = cursor.skip(skip)
+ if limit:
+ cursor = cursor.limit(limit)
+ return list(cursor)
+
+
+class MongoUpdateMixin:
+ """Mixin for MongoDB update operations."""
+
+ @handle_mongo_errors
+ def update_one(
+ self,
+ collection: Collection,
+ filter_query: Dict[str, Any],
+ update_data: Dict[str, Any],
+ upsert: bool = False,
+ ):
+ """Update a single document in the collection."""
+ result = collection.update_one(filter_query, update_data, upsert=upsert)
+ return result
+
+ @handle_mongo_errors
+ def update_many(
+ self,
+ collection: Collection,
+ filter_query: Dict[str, Any],
+ update_data: Dict[str, Any],
+ upsert: bool = False,
+ ):
+ """Update multiple documents in the collection."""
+ result = collection.update_many(filter_query, update_data, upsert=upsert)
+ return result
+
+
+class MongoDeleteMixin:
+ """Mixin for MongoDB delete operations."""
+
+ @handle_mongo_errors
+ def delete_one(self, collection: Collection, filter_query: Dict[str, Any]):
+ """Delete a single document from the collection."""
+ result = collection.delete_one(filter_query)
+ return result
+
+ @handle_mongo_errors
+ def delete_many(self, collection: Collection, filter_query: Dict[str, Any]):
+ """Delete multiple documents from the collection."""
+ result = collection.delete_many(filter_query)
+ return result
+
+
+class MongoAggregateMixin:
+ """Mixin for MongoDB aggregation operations."""
+
+ @handle_mongo_errors
+ def aggregate(self, collection: Collection, pipeline: List[Dict[str, Any]]):
+ """Execute an aggregation pipeline on the collection."""
+ result = collection.aggregate(pipeline)
+ return result
diff --git a/Services/MongoDb/Models/response.py b/Services/MongoDb/Models/response.py
new file mode 100644
index 0000000..68666fe
--- /dev/null
+++ b/Services/MongoDb/Models/response.py
@@ -0,0 +1,85 @@
+"""
+Response handler for MongoDB query results.
+
+This module provides a wrapper class for MongoDB query results,
+adding convenience methods for accessing data and managing query state.
+"""
+
+from typing import Any, Dict, List, Optional, TypeVar, Generic, Union
+from pymongo.cursor import Cursor
+
+T = TypeVar("T")
+
+
+class MongoResponse(Generic[T]):
+ """
+ Wrapper for MongoDB query results.
+
+ Attributes:
+ cursor: MongoDB cursor object
+ first: Whether to return first result only
+ data: Query results (lazy loaded)
+ count: Total count of results
+ """
+
+ def __init__(
+ self,
+ cursor: Optional[Cursor] = None,
+ first: bool = False,
+ status: bool = True,
+ message: str = "",
+ error: Optional[str] = None,
+ data: Optional[Union[List[T], T]] = None,
+ ):
+ self._cursor = cursor
+ self._first = first
+ self.status = status
+ self.message = message
+ self.error = error
+ self._data: Optional[Union[List[T], T]] = data
+ self._count: Optional[int] = None
+
+ @property
+ def data(self) -> Union[List[T], T, None]:
+ """
+ Lazy load and return query results.
+ Returns first item if first=True, otherwise returns all results.
+ """
+ if self._data is None and self._cursor is not None:
+ results = list(self._cursor)
+ self._data = results[0] if self._first and results else results
+ return self._data
+
+ @property
+ def count(self) -> int:
+ """Lazy load and return total count of results."""
+ if self._count is None:
+ if self._cursor is not None:
+ self._count = self._cursor.count()
+ else:
+ self._count = len(self.all)
+ return self._count
+
+ @property
+ def all(self) -> List[T]:
+ """Get all results as list."""
+ return (
+ self.data
+ if isinstance(self.data, list)
+ else [self.data] if self.data else []
+ )
+
+ @property
+ def first(self) -> Optional[T]:
+ """Get first result only."""
+ return self.data if self._first else (self.data[0] if self.data else None)
+
+ def as_dict(self) -> Dict[str, Any]:
+ """Convert response to dictionary format."""
+ return {
+ "status": self.status,
+ "message": self.message,
+ "data": self.data,
+ "count": self.count,
+ "error": self.error,
+ }
diff --git a/Services/MongoDb/__init__.py b/Services/MongoDb/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/Services/MongoDb/database.py b/Services/MongoDb/database.py
new file mode 100644
index 0000000..643244c
--- /dev/null
+++ b/Services/MongoDb/database.py
@@ -0,0 +1,192 @@
+"""
+MongoDB database connection and operations.
+
+This module provides MongoDB connection management with:
+1. Connection pooling
+2. Lifecycle management
+3. Error handling
+"""
+
+from typing import Optional, Dict, Any, List, Union, Callable
+from contextlib import contextmanager
+from pymongo import MongoClient
+from pymongo.results import InsertOneResult, DeleteResult, UpdateResult
+from pymongo.cursor import Cursor
+from functools import wraps
+
+from AllConfigs.NoSqlDatabase.configs import MongoConfig
+
+
+class MongoInsertMixin:
+ """Mixin for MongoDB insert operations."""
+
+ def insert_one(self, document: Dict[str, Any]) -> InsertOneResult:
+ """Insert a single document."""
+ return self.collection.insert_one(document)
+
+ def insert_many(self, documents: List[Dict[str, Any]]) -> List[InsertOneResult]:
+ """Insert multiple documents."""
+ return self.collection.insert_many(documents)
+
+
+class MongoFindMixin:
+ """Mixin for MongoDB find operations."""
+
+ def find_one(self, filter_query: Dict[str, Any]) -> Optional[Dict[str, Any]]:
+ """Find a single document."""
+ return self.collection.find_one(filter_query)
+
+ def find_many(self, filter_query: Dict[str, Any]) -> Cursor:
+ """Find multiple documents."""
+ return self.collection.find(filter_query)
+
+
+class MongoUpdateMixin:
+ """Mixin for MongoDB update operations."""
+
+ def update_one(
+ self, filter_query: Dict[str, Any], update: Dict[str, Any]
+ ) -> UpdateResult:
+ """Update a single document."""
+ return self.collection.update_one(filter_query, update)
+
+ def update_many(
+ self, filter_query: Dict[str, Any], update: Dict[str, Any]
+ ) -> UpdateResult:
+ """Update multiple documents."""
+ return self.collection.update_many(filter_query, update)
+
+
+class MongoDeleteMixin:
+ """Mixin for MongoDB delete operations."""
+
+ def delete_one(self, filter_query: Dict[str, Any]) -> DeleteResult:
+ """Delete a single document."""
+ return self.collection.delete_one(filter_query)
+
+ def delete_many(self, filter_query: Dict[str, Any]) -> DeleteResult:
+ """Delete multiple documents."""
+ return self.collection.delete_many(filter_query)
+
+
+class MongoAggregateMixin:
+ """Mixin for MongoDB aggregate operations."""
+
+ def aggregate(self, pipeline: List[Dict[str, Any]]) -> Cursor:
+ """Execute an aggregation pipeline."""
+ return self.collection.aggregate(pipeline)
+
+
+class MongoDBHandler(
+ MongoInsertMixin,
+ MongoFindMixin,
+ MongoUpdateMixin,
+ MongoDeleteMixin,
+ MongoAggregateMixin,
+):
+ """Handler for MongoDB operations with connection management."""
+
+ _instance = None
+ _client: Optional[MongoClient] = None
+
+ def __new__(cls):
+ """Implement singleton pattern for database connection."""
+ if cls._instance is None:
+ cls._instance = super().__new__(cls)
+ return cls._instance
+
+ def __init__(self):
+ """Initialize MongoDB connection if not already initialized."""
+ if not self._client:
+ # Build connection options
+ connection_kwargs = {
+ "host": MongoConfig.URL,
+ "maxPoolSize": 50, # Maximum number of connections in the pool
+ "minPoolSize": 10, # Minimum number of connections in the pool
+ "maxIdleTimeMS": 30000, # Maximum time a connection can be idle (30 seconds)
+ "waitQueueTimeoutMS": 2000, # How long a thread will wait for a connection
+ "serverSelectionTimeoutMS": 5000, # How long to wait for server selection
+ }
+ self._client = MongoClient(**connection_kwargs)
+
+ # Test connection
+ self._client.admin.command("ping")
+
+ def __enter__(self):
+ """Context manager entry point."""
+ return self
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ """Context manager exit point - ensures connection is properly closed."""
+ try:
+ if self._client:
+ self._client.close()
+ self._client = None
+ except Exception:
+ # Silently pass any errors during shutdown
+ pass
+ return False # Don't suppress any exceptions
+
+ def close(self):
+ """Close MongoDB connection."""
+ try:
+ if self._client:
+ self._client.close()
+ self._client = None
+ except Exception:
+ # Silently pass any errors during shutdown
+ pass
+
+ @property
+ def client(self) -> MongoClient:
+ """Get MongoDB client."""
+ return self._client
+
+ def get_database(self, database_name: str = None):
+ """Get MongoDB database."""
+ db_name = database_name or MongoConfig.DATABASE_NAME
+ return self._client[db_name]
+
+ def get_collection(self, collection_name: str, database_name: str = None):
+ """Get MongoDB collection."""
+ database = self.get_database(database_name)
+ return database[collection_name]
+
+ # Create a function to get the singleton instance
+ @classmethod
+ @contextmanager
+ def get_mongodb(cls):
+ """Get or create the MongoDB singleton instance as a context manager."""
+ instance = cls()
+ try:
+ yield instance
+ finally:
+ try:
+ if instance._client:
+ instance._client.close()
+ instance._client = None
+ except Exception:
+ # Silently pass any errors during shutdown
+ pass
+
+ @classmethod
+ def with_mongodb(cls, func: Callable):
+ """Decorator to automatically handle MongoDB connection context.
+
+ Usage:
+ @MongoDBHandler.with_mongodb
+ def my_function(db, *args, **kwargs):
+ # db is the MongoDB instance
+ pass
+ """
+
+ @wraps(func)
+ def wrapper(*args, **kwargs):
+ with cls.get_mongodb() as db:
+ return func(db, *args, **kwargs)
+
+ return wrapper
+
+
+# Create a singleton instance for backward compatibility
+mongodb = MongoDBHandler()
diff --git a/Services/MongoDb/how_to.py b/Services/MongoDb/how_to.py
new file mode 100644
index 0000000..2adfbdb
--- /dev/null
+++ b/Services/MongoDb/how_to.py
@@ -0,0 +1,159 @@
+"""
+MongoDB Operations Examples
+
+This module provides practical examples of using MongoDB operations through our mixins.
+Each example demonstrates different aspects of CRUD operations and aggregation.
+"""
+
+import arrow
+from datetime import datetime
+
+from Services.MongoDb.database import MongoDBHandler
+
+
+@MongoDBHandler.with_mongodb
+def insert_examples(db) -> None:
+ """Examples of insert operations."""
+ # Get the collection
+ users_collection = db.get_collection("users")
+ products_collection = db.get_collection("products")
+
+ # Single document insert
+ user_doc = {
+ "username": "john_doe",
+ "email": "john@example.com",
+ "age": 30,
+ "created_at": datetime.now(),
+ }
+ result = users_collection.insert_one(user_doc)
+ print(f"Inserted user with ID: {result.inserted_id}")
+
+ # Multiple documents insert
+ products = [
+ {"name": "Laptop", "price": 999.99, "stock": 50},
+ {"name": "Mouse", "price": 29.99, "stock": 100},
+ {"name": "Keyboard", "price": 59.99, "stock": 75},
+ ]
+ result = products_collection.insert_many(products)
+ print(f"Inserted {len(result.inserted_ids)} products")
+
+
+@MongoDBHandler.with_mongodb
+def find_examples(db) -> None:
+ """Examples of find operations."""
+ # Get the collections
+ users_collection = db.get_collection("users")
+ products_collection = db.get_collection("products")
+
+ # Find one document
+ user = users_collection.find_one({"email": "john@example.com"})
+ print(f"Found user: {user}")
+
+ # Find many documents
+ products_cursor = products_collection.find({"price": {"$lt": 100}})
+ products = list(products_cursor)
+ print(f"Found {len(products)} products under $100")
+
+
+@MongoDBHandler.with_mongodb
+def update_examples(db) -> None:
+ """Examples of update operations."""
+ # Get the collections
+ products_collection = db.get_collection("products")
+
+ # Update single document
+ result = products_collection.update_one(
+ {"name": "Laptop"}, {"$set": {"price": 899.99, "stock": 45}}
+ )
+ print(f"Updated {result.modified_count} laptop(s)")
+
+ # Update multiple documents
+ result = products_collection.update_many(
+ {"stock": {"$lt": 10}}, {"$set": {"status": "low_stock"}}
+ )
+ print(f"Updated {result.modified_count} low stock products")
+
+
+@MongoDBHandler.with_mongodb
+def delete_examples(db) -> None:
+ """Examples of delete operations."""
+ # Get the collections
+ users_collection = db.get_collection("users")
+ products_collection = db.get_collection("products")
+
+ # Delete single document
+ result = users_collection.delete_one({"email": "john@example.com"})
+ print(f"Deleted {result.deleted_count} user")
+
+ # Delete multiple documents
+ result = products_collection.delete_many({"stock": 0})
+ print(f"Deleted {result.deleted_count} out-of-stock products")
+
+
+@MongoDBHandler.with_mongodb
+def aggregate_examples(db) -> None:
+ """Examples of aggregate operations."""
+ # Get the collection
+ products_collection = db.get_collection("products")
+
+ # Calculate average price by category
+ pipeline = [
+ {
+ "$group": {
+ "_id": "$category",
+ "avg_price": {"$avg": "$price"},
+ "total_products": {"$sum": 1},
+ }
+ },
+ {"$sort": {"avg_price": -1}},
+ ]
+ results = products_collection.aggregate(pipeline)
+ print("Category statistics:", list(results))
+
+
+@MongoDBHandler.with_mongodb
+def complex_query_example(db) -> None:
+ """Example of a more complex query combining multiple operations."""
+ # Get the collection
+ users_collection = db.get_collection("users")
+
+ # Find active users who made purchases in last 30 days
+ pipeline = [
+ {
+ "$match": {
+ "status": "active",
+ "last_purchase": {
+ "$gte": arrow.now().shift(days=-30).datetime,
+ },
+ }
+ },
+ {
+ "$lookup": {
+ "from": "orders",
+ "localField": "_id",
+ "foreignField": "user_id",
+ "as": "recent_orders",
+ }
+ },
+ {
+ "$project": {
+ "username": 1,
+ "email": 1,
+ "total_orders": {"$size": "$recent_orders"},
+ "total_spent": {"$sum": "$recent_orders.amount"},
+ }
+ },
+ {"$sort": {"total_spent": -1}},
+ ]
+ results = users_collection.aggregate(pipeline)
+ print("Active users with recent purchases:", list(results))
+
+
+if __name__ == "__main__":
+ # Example usage of all operations
+ insert_examples()
+ find_examples()
+ update_examples()
+ delete_examples()
+ aggregate_examples()
+ complex_query_example()
diff --git a/Services/PostgresDb/Models/core_alchemy.py b/Services/PostgresDb/Models/core_alchemy.py
new file mode 100644
index 0000000..a956259
--- /dev/null
+++ b/Services/PostgresDb/Models/core_alchemy.py
@@ -0,0 +1,149 @@
+from typing import Type, TypeVar
+
+from sqlalchemy.exc import SQLAlchemyError
+from sqlalchemy.orm import Session
+
+from ApiLayers.ApiLibrary import get_line_number_for_error
+from ApiLayers.ErrorHandlers.Exceptions.api_exc import HTTPExceptionApi
+
+
+# Type variable for class methods returning self
+T = TypeVar("T", bound="FilterAttributes")
+
+
+class BaseAlchemyModel:
+ """
+ Controller of alchemy to database transactions.
+ Query: Query object for model
+ Session: Session object for model
+ Actions: save, flush, rollback, commit
+ """
+
+ __abstract__ = True
+
+ @classmethod
+ def new_session(cls) -> Session:
+ """Get database session."""
+ from Services.PostgresDb.database import get_db
+
+ with get_db() as session:
+ return session
+
+ @classmethod
+ def flush(cls: Type[T], db: Session) -> T:
+ """
+ Flush the current session to the database.
+
+ Args:
+ db: Database session
+
+ Returns:
+ Self instance
+
+ Raises:
+ HTTPException: If database operation fails
+ """
+ try:
+ db.flush()
+ return cls
+ except SQLAlchemyError as e:
+ raise HTTPExceptionApi(
+ error_code="HTTP_304_NOT_MODIFIED",
+ lang=cls.lang or "tr",
+ loc=get_line_number_for_error(),
+ sys_msg=str(e),
+ )
+
+ def destroy(self: Type[T], db: Session) -> None:
+ """
+ Delete the record from the database.
+
+ Args:
+ db: Database session
+ """
+ db.delete(self)
+
+ @classmethod
+ def save_via_metadata(cls: Type[T], db: Session) -> None:
+ """
+ Save or rollback based on metadata.
+
+ Args:
+ db: Database session
+
+ Raises:
+ HTTPException: If save operation fails
+ """
+ try:
+ if cls.is_created:
+ db.commit()
+ db.flush()
+ db.rollback()
+ except SQLAlchemyError as e:
+ raise HTTPExceptionApi(
+ error_code="HTTP_304_NOT_MODIFIED",
+ lang=cls.lang or "tr",
+ loc=get_line_number_for_error(),
+ sys_msg=str(e),
+ )
+
+ @classmethod
+ def save(cls: Type[T], db: Session) -> None:
+ """
+ Commit changes to database.
+
+ Args:
+ db: Database session
+
+ Raises:
+ HTTPException: If commit fails
+ """
+ try:
+ db.commit()
+ except SQLAlchemyError as e:
+ raise HTTPExceptionApi(
+ error_code="HTTP_304_NOT_MODIFIED",
+ lang=cls.lang or "tr",
+ loc=get_line_number_for_error(),
+ sys_msg=str(e),
+ )
+ except Exception as e:
+ raise HTTPExceptionApi(
+ error_code="HTTP_500_INTERNAL_SERVER_ERROR",
+ lang=cls.lang or "tr",
+ loc=get_line_number_for_error(),
+ sys_msg=str(e),
+ )
+
+ @classmethod
+ def save_and_confirm(cls: Type[T], db: Session) -> None:
+ """
+ Save changes and mark record as confirmed.
+
+ Args:
+ db: Database session
+
+ Raises:
+ HTTPException: If operation fails
+ """
+ try:
+ cls.save(db)
+ cls.update(db, is_confirmed=True)
+ cls.save(db)
+ except SQLAlchemyError as e:
+ raise HTTPExceptionApi(
+ error_code="HTTP_304_NOT_MODIFIED",
+ lang=cls.lang or "tr",
+ loc=get_line_number_for_error(),
+ sys_msg=str(e),
+ )
+
+ @classmethod
+ def rollback(cls: Type[T], db: Session) -> None:
+ """
+ Rollback current transaction.
+
+ Args:
+ db: Database session
+ """
+ db.rollback()
diff --git a/Services/PostgresDb/Models/crud_alchemy.py b/Services/PostgresDb/Models/crud_alchemy.py
new file mode 100644
index 0000000..f6938d5
--- /dev/null
+++ b/Services/PostgresDb/Models/crud_alchemy.py
@@ -0,0 +1,400 @@
+import datetime
+
+from decimal import Decimal
+from typing import Any, Dict, List, Optional
+from sqlalchemy import TIMESTAMP, NUMERIC
+from sqlalchemy.orm import Session, Mapped
+from pydantic import BaseModel
+
+from ApiLayers.ApiLibrary import system_arrow, get_line_number_for_error
+from ApiLayers.ErrorHandlers.Exceptions.api_exc import HTTPExceptionApi
+
+from Services.PostgresDb.Models.core_alchemy import BaseAlchemyModel
+from Services.PostgresDb.Models.system_fields import SystemFields
+
+
+class MetaDataRow(BaseModel):
+ created: Optional[bool] = False
+ message: Optional[str] = None
+ error_case: Optional[str] = None
+
+
+class Credentials(BaseModel):
+ person_id: int
+ person_name: str
+ full_name: Optional[str] = None
+
+
+class CrudActions(SystemFields):
+
+ @classmethod
+ def extract_system_fields(
+ cls, filter_kwargs: dict, create: bool = True
+ ) -> Dict[str, Any]:
+ """
+ Remove system-managed fields from input dictionary.
+
+ Args:
+ filter_kwargs: Input dictionary of fields
+ create: If True, use creation field list, else use update field list
+
+ Returns:
+ Dictionary with system fields removed
+ """
+ system_fields = filter_kwargs.copy()
+ extract_fields = (
+ cls.__system__fields__create__ if create else cls.__system__fields__update__
+ )
+ for field in extract_fields:
+ system_fields.pop(field, None)
+ return system_fields
+
+ @classmethod
+ def remove_non_related_inputs(cls, kwargs: Dict[str, Any]) -> Dict[str, Any]:
+ """
+ Filter out inputs that don't correspond to model fields.
+
+ Args:
+ kwargs: Dictionary of field names and values
+
+ Returns:
+ Dictionary containing only valid model fields
+ """
+ return {
+ key: value
+ for key, value in kwargs.items()
+ if key in cls.columns + cls.hybrid_properties + cls.settable_relations
+ }
+
+ @classmethod
+ def iterate_over_variables(cls, val: Any, key: str) -> tuple[bool, Optional[Any]]:
+ """
+ Process a field value based on its type and convert it to the appropriate format.
+
+ Args:
+ val: Field value
+ key: Field name
+
+ Returns:
+ Tuple of (should_include, processed_value)
+ """
+ key_ = cls.__annotations__.get(key, None)
+ is_primary = key in cls.primary_keys
+ row_attr = bool(getattr(getattr(cls, key), "foreign_keys", None))
+
+ # Skip primary keys and foreign keys
+ if is_primary or row_attr:
+ return False, None
+
+ # Handle None values
+ if val is None:
+ return True, None
+
+ # Special handling for UUID fields
+ if str(key[-5:]).lower() == "uu_id":
+ return True, str(val)
+
+ # Handle typed fields
+ if key_:
+ if key_ == Mapped[int]:
+ return True, int(val)
+ elif key_ == Mapped[bool]:
+ return True, bool(val)
+ elif key_ == Mapped[float] or key_ == Mapped[NUMERIC]:
+ return True, round(float(val), 3)
+ elif key_ == Mapped[TIMESTAMP]:
+ return True, str(
+ system_arrow.get(str(val)).format("YYYY-MM-DD HH:mm:ss ZZ")
+ )
+ elif key_ == Mapped[str]:
+ return True, str(val)
+
+ # Handle based on Python types
+ else:
+ if isinstance(val, datetime.datetime):
+ return True, str(
+ system_arrow.get(str(val)).format("YYYY-MM-DD HH:mm:ss ZZ")
+ )
+ elif isinstance(val, bool):
+ return True, bool(val)
+ elif isinstance(val, (float, Decimal)):
+ return True, round(float(val), 3)
+ elif isinstance(val, int):
+ return True, int(val)
+ elif isinstance(val, str):
+ return True, str(val)
+ elif val is None:
+ return True, None
+
+ return False, None
+
+ def get_dict(
+ self,
+ exclude: Optional[List[str]] = None,
+ include: Optional[List[str]] = None,
+ ) -> Dict[str, Any]:
+ """
+ Convert model instance to dictionary with customizable fields.
+
+ Args:
+ exclude: List of fields to exclude
+ include: List of fields to include (takes precedence over exclude)
+
+ Returns:
+ Dictionary representation of the model
+ """
+ return_dict: Dict[str, Any] = {}
+
+ if include:
+ # Handle explicitly included fields
+ exclude_list = [
+ element
+ for element in self.__system_default_model__
+ if str(element)[-2:] == "id" and str(element)[-5:].lower() == "uu_id"
+ ]
+ columns_include_list = list(set(include).difference(set(exclude_list)))
+ columns_include_list.extend(["uu_id"])
+
+ for key in columns_include_list:
+ val = getattr(self, key)
+ correct, value_of_database = self.iterate_over_variables(val, key)
+ if correct:
+ return_dict[key] = value_of_database
+
+ elif exclude:
+ # Handle explicitly excluded fields
+ exclude.extend(
+ list(
+ set(getattr(self, "__exclude__fields__", []) or []).difference(
+ exclude
+ )
+ )
+ )
+ exclude.extend(
+ [
+ element
+ for element in self.__system_default_model__
+ if str(element)[-2:] == "id"
+ ]
+ )
+
+ columns_excluded_list = list(set(self.columns).difference(set(exclude)))
+ columns_excluded_list.extend(["uu_id", "active"])
+
+ for key in columns_excluded_list:
+ val = getattr(self, key)
+ correct, value_of_database = self.iterate_over_variables(val, key)
+ if correct:
+ return_dict[key] = value_of_database
+ else:
+ # Handle default field selection
+ exclude_list = (getattr(self, "__exclude__fields__", []) or []) + list(
+ self.__system_default_model__
+ )
+ columns_list = list(set(self.columns).difference(set(exclude_list)))
+ columns_list = [col for col in columns_list if str(col)[-2:] != "id"]
+ columns_list.extend(
+ [col for col in self.columns if str(col)[-5:].lower() == "uu_id"]
+ )
+
+ for remove_field in self.__system_default_model__:
+ if remove_field in columns_list:
+ columns_list.remove(remove_field)
+
+ for key in columns_list:
+ val = getattr(self, key)
+ correct, value_of_database = self.iterate_over_variables(val, key)
+ if correct:
+ return_dict[key] = value_of_database
+
+ return return_dict
+
+
+class CRUDModel(CrudActions):
+
+ __abstract__ = True
+
+ meta_data: MetaDataRow
+ creds: Credentials = None
+
+ @property
+ def is_created(self):
+ return self.meta_data.created
+
+ @classmethod
+ def create_credentials(cls, record_created) -> None:
+ """
+ Save user credentials for tracking.
+
+ Args:
+ record_created: Record that created or updated
+ """
+
+ if getattr(cls.creds, "person_id", None) and getattr(
+ cls.creds, "person_name", None
+ ):
+ record_created.created_by_id = cls.creds.person_id
+ record_created.created_by = cls.creds.person_name
+ return
+
+ @classmethod
+ def update_metadata(
+ cls, created: bool, error_case: str = None, message: str = None
+ ) -> None:
+ cls.meta_data = MetaDataRow(
+ created=created, error_case=error_case, message=message
+ )
+
+ @classmethod
+ def raise_exception(cls):
+ raise HTTPExceptionApi(
+ error_code=cls.meta_data.error_case,
+ lang=cls.lang,
+ loc=get_line_number_for_error(),
+ sys_msg=cls.meta_data.message,
+ )
+
+ @classmethod
+ def create_or_abort(cls, db: Session, **kwargs):
+ """
+ Create a new record or abort if it already exists.
+
+ Args:
+ db: Database session
+ **kwargs: Record fields
+
+ Returns:
+ New record if successfully created
+ """
+ check_kwargs = cls.extract_system_fields(kwargs)
+
+ # Search for existing record
+ query = db.query(cls).filter(
+ cls.expiry_ends > str(system_arrow.now()),
+ cls.expiry_starts <= str(system_arrow.now()),
+ )
+
+ for key, value in check_kwargs.items():
+ if hasattr(cls, key):
+ query = query.filter(getattr(cls, key) == value)
+
+ already_record = query.first()
+ # Handle existing record
+ if already_record:
+ if already_record.deleted:
+ cls.update_metadata(created=False, error_case="DeletedRecord")
+ cls.raise_exception()
+ elif not already_record.is_confirmed:
+ cls.update_metadata(created=False, error_case="IsNotConfirmed")
+ cls.raise_exception()
+ cls.update_metadata(created=False, error_case="AlreadyExists")
+ cls.raise_exception()
+
+ # Create new record
+ check_kwargs = cls.remove_non_related_inputs(check_kwargs)
+ created_record = cls()
+ for key, value in check_kwargs.items():
+ setattr(created_record, key, value)
+ cls.create_credentials(created_record)
+ db.add(created_record)
+ db.flush()
+ cls.update_metadata(created=True)
+ return created_record
+
+ @classmethod
+ def find_or_create(cls, db: Session, **kwargs):
+ """
+ Find an existing record matching the criteria or create a new one.
+
+ Args:
+ db: Database session
+ **kwargs: Search/creation criteria
+
+ Returns:
+ Existing or newly created record
+ """
+ check_kwargs = cls.extract_system_fields(kwargs)
+
+ # Search for existing record
+ query = db.query(cls).filter(
+ cls.expiry_ends > str(system_arrow.now()),
+ cls.expiry_starts <= str(system_arrow.now()),
+ )
+
+ for key, value in check_kwargs.items():
+ if hasattr(cls, key):
+ query = query.filter(getattr(cls, key) == value)
+
+ already_record = query.first()
+ # Handle existing record
+ if already_record:
+ if already_record.deleted:
+ cls.update_metadata(created=False, error_case="DeletedRecord")
+ return already_record
+ elif not already_record.is_confirmed:
+ cls.update_metadata(created=False, error_case="IsNotConfirmed")
+ return already_record
+ cls.update_metadata(created=False, error_case="AlreadyExists")
+ return already_record
+
+ # Create new record
+ check_kwargs = cls.remove_non_related_inputs(check_kwargs)
+ created_record = cls()
+ for key, value in check_kwargs.items():
+ setattr(created_record, key, value)
+ cls.create_credentials(created_record)
+ db.add(created_record)
+ db.flush()
+ cls.update_metadata(created=True)
+ return created_record
+
+ def update(self, db: Session, **kwargs):
+ """
+ Update the record with new values.
+
+ Args:
+ db: Database session
+ **kwargs: Fields to update
+
+ Returns:
+ Updated record
+
+ Raises:
+ ValueError: If attempting to update is_confirmed with other fields
+ """
+ check_kwargs = self.remove_non_related_inputs(kwargs)
+ check_kwargs = self.extract_system_fields(check_kwargs, create=False)
+
+ for key, value in check_kwargs.items():
+ setattr(self, key, value)
+
+ self.update_credentials(kwargs=kwargs)
+ db.flush()
+ return self
+
+ def update_credentials(self, **kwargs) -> None:
+ """
+ Save user credentials for tracking.
+
+ Args:
+ record_updated: Record that created or updated
+ """
+ # Update confirmation or modification tracking
+ is_confirmed_argument = kwargs.get("is_confirmed", None)
+
+ if is_confirmed_argument and not len(kwargs) == 1:
+ raise ValueError("Confirm field cannot be updated with other fields")
+
+ if is_confirmed_argument:
+ if getattr(self.creds, "person_id", None) and getattr(
+ self.creds, "person_name", None
+ ):
+ self.confirmed_by_id = self.creds.person_id
+ self.confirmed_by = self.creds.person_name
+ else:
+ if getattr(self.creds, "person_id", None) and getattr(
+ self.creds, "person_name", None
+ ):
+ self.updated_by_id = self.creds.person_id
+ self.updated_by = self.creds.person_name
+ return
diff --git a/Services/PostgresDb/Models/filter_functions.py b/Services/PostgresDb/Models/filter_functions.py
new file mode 100644
index 0000000..84dfb7e
--- /dev/null
+++ b/Services/PostgresDb/Models/filter_functions.py
@@ -0,0 +1,195 @@
+"""
+Advanced filtering functionality for SQLAlchemy models.
+
+This module provides a comprehensive set of filtering capabilities for SQLAlchemy models,
+including pagination, ordering, and complex query building.
+"""
+
+from __future__ import annotations
+from typing import Any, TypeVar, Type, Union
+
+from sqlalchemy import ColumnExpressionArgument
+from sqlalchemy.orm import Query, Session
+from sqlalchemy.sql.elements import BinaryExpression
+
+from Services.PostgresDb.Models.response import PostgresResponse
+
+from ApiLayers.ApiLibrary import system_arrow
+
+
+T = TypeVar("T", bound="QueryModel")
+
+
+class ArgumentModel:
+
+ __abstract__ = True
+
+ @classmethod
+ def _query(cls: Type[T], db: Session) -> Query:
+ """Returns the query to use in the model."""
+ return cls.pre_query if cls.pre_query else db.query(cls)
+
+ @classmethod
+ def add_new_arg_to_args(cls: Type[T], args_list, argument, value):
+ new_arg_list = list(
+ set(
+ args_
+ for args_ in list(args_list)
+ if isinstance(args_, BinaryExpression)
+ )
+ )
+ arg_left = lambda arg_obj: getattr(getattr(arg_obj, "left", None), "key", None)
+ # arg_right = lambda arg_obj: getattr(getattr(arg_obj, "right", None), "value", None)
+ if not any(True for arg in new_arg_list if arg_left(arg_obj=arg) == argument):
+ new_arg_list.append(value)
+ return tuple(new_arg_list)
+
+ @classmethod
+ def get_not_expired_query_arg(cls: Type[T], arg):
+ """Add expiry_starts and expiry_ends to the query."""
+ starts = cls.expiry_starts <= str(system_arrow.now())
+ ends = cls.expiry_ends > str(system_arrow.now())
+ arg = cls.add_new_arg_to_args(arg, "expiry_ends", ends)
+ arg = cls.add_new_arg_to_args(arg, "expiry_starts", starts)
+ return arg
+
+ @classmethod
+ def get_active_and_confirmed_query_arg(cls: Type[T], arg):
+ """Add active and confirmed to the query."""
+ arg = cls.add_new_arg_to_args(arg, "is_confirmed", cls.is_confirmed == True)
+ arg = cls.add_new_arg_to_args(arg, "active", cls.active == True)
+ arg = cls.add_new_arg_to_args(arg, "deleted", cls.deleted == False)
+ return arg
+
+
+class QueryModel(ArgumentModel):
+
+ pre_query = None
+ __abstract__ = True
+
+ @classmethod
+ def produce_query_to_add(cls: Type[T], filter_list):
+ if filter_list.get("query"):
+ for smart_iter in cls.filter_expr(**filter_list["query"]):
+ if key := getattr(getattr(smart_iter, "left", None), "key", None):
+ args = cls.add_new_arg_to_args(args, key, smart_iter)
+
+ @classmethod
+ def convert(
+ cls: Type[T], smart_options: dict, validate_model: Any = None
+ ) -> tuple[BinaryExpression]:
+ if not validate_model:
+ return tuple(cls.filter_expr(**smart_options))
+
+ @classmethod
+ def filter_by_one(
+ cls: Type[T], db: Session, system: bool = False, **kwargs
+ ) -> PostgresResponse:
+ """
+ Filter single record by keyword arguments.
+
+ Args:
+ db: Database session
+ system: If True, skip status filtering
+ **kwargs: Filter criteria
+
+ Returns:
+ Query response with single record
+ """
+ if "is_confirmed" not in kwargs and not system:
+ kwargs["is_confirmed"] = True
+ kwargs.pop("system", None)
+ query = cls._query(db).filter_by(**kwargs)
+ return PostgresResponse(pre_query=cls._query(db), query=query, is_array=False)
+
+ @classmethod
+ def filter_one(
+ cls: Type[T],
+ *args: Union[BinaryExpression, ColumnExpressionArgument],
+ db: Session,
+ ) -> PostgresResponse:
+ """
+ Filter single record by expressions.
+
+ Args:
+ db: Database session
+ args: Filter expressions
+ system: If True, skip status filtering
+ expired: If True, include expired records
+
+ Returns:
+ Query response with single record
+ """
+ args = cls.get_active_and_confirmed_query_arg(args)
+ args = cls.get_not_expired_query_arg(args)
+ query = cls._query(db=db).filter(*args)
+ return PostgresResponse(
+ pre_query=cls._query(db=db), query=query, is_array=False
+ )
+
+ @classmethod
+ def filter_one_system(
+ cls,
+ *args: Union[BinaryExpression, ColumnExpressionArgument],
+ db: Session,
+ ):
+ query = cls._query(db=db).filter(*args)
+ return PostgresResponse(
+ pre_query=cls._query(db=db), query=query, is_array=False
+ )
+
+ @classmethod
+ def filter_all_system(
+ cls: Type[T],
+ *args: Union[BinaryExpression, ColumnExpressionArgument],
+ db: Session,
+ ) -> PostgresResponse:
+ """
+ Filter multiple records by expressions without status filtering.
+
+ Args:
+ db: Database session
+ args: Filter expressions
+
+ Returns:
+ Query response with matching records
+ """
+
+ query = cls._query(db)
+ query = query.filter(*args)
+ return PostgresResponse(pre_query=cls._query(db), query=query, is_array=True)
+
+ @classmethod
+ def filter_all(
+ cls: Type[T],
+ *args: Union[BinaryExpression, ColumnExpressionArgument],
+ db: Session,
+ ) -> PostgresResponse:
+ """
+ Filter multiple records by expressions.
+
+ Args:
+ db: Database session
+ args: Filter expressions
+ Returns:
+ Query response with matching records
+ """
+ args = cls.get_active_and_confirmed_query_arg(args)
+ args = cls.get_not_expired_query_arg(args)
+ query = cls._query(db).filter(*args)
+ return PostgresResponse(pre_query=cls._query(db), query=query, is_array=True)
+
+ @classmethod
+ def filter_by_all_system(cls: Type[T], db: Session, **kwargs) -> PostgresResponse:
+ """
+ Filter multiple records by keyword arguments.
+
+ Args:
+ db: Database session
+ **kwargs: Filter criteria
+
+ Returns:
+ Query response with matching records
+ """
+ query = cls._query(db).filter_by(**kwargs)
+ return PostgresResponse(pre_query=cls._query(db), query=query, is_array=True)
diff --git a/Services/PostgresDb/Models/language_alchemy.py b/Services/PostgresDb/Models/language_alchemy.py
new file mode 100644
index 0000000..32230bf
--- /dev/null
+++ b/Services/PostgresDb/Models/language_alchemy.py
@@ -0,0 +1,2 @@
+class LanguageModel:
+ __language_model__ = None
diff --git a/Services/PostgresDb/Models/mixin.py b/Services/PostgresDb/Models/mixin.py
new file mode 100644
index 0000000..1dc5e2a
--- /dev/null
+++ b/Services/PostgresDb/Models/mixin.py
@@ -0,0 +1,177 @@
+from sqlalchemy import (
+ TIMESTAMP,
+ NUMERIC,
+ func,
+ text,
+ UUID,
+ String,
+ Integer,
+ Boolean,
+ SmallInteger,
+)
+from sqlalchemy.orm import Mapped, mapped_column
+from sqlalchemy_mixins.serialize import SerializeMixin
+from sqlalchemy_mixins.repr import ReprMixin
+from sqlalchemy_mixins.smartquery import SmartQueryMixin
+
+from Services.PostgresDb.Models.core_alchemy import BaseAlchemyModel
+from Services.PostgresDb.Models.crud_alchemy import CRUDModel
+from Services.PostgresDb.Models.filter_functions import QueryModel
+from Services.PostgresDb.database import Base
+
+
+class BasicMixin(Base, BaseAlchemyModel):
+
+ __abstract__ = True
+ __repr__ = ReprMixin.__repr__
+
+
+class CrudMixin(
+ BasicMixin, CRUDModel, SerializeMixin, ReprMixin, SmartQueryMixin, QueryModel
+):
+ """
+ Base mixin providing CRUD operations and common fields for PostgreSQL models.
+
+ Features:
+ - Automatic timestamps (created_at, updated_at)
+ - Soft delete capability
+ - User tracking (created_by, updated_by)
+ - Data serialization
+ - Multi-language support
+ """
+
+ __abstract__ = True
+
+ # Primary and reference fields
+ id: Mapped[int] = mapped_column(Integer, primary_key=True)
+ uu_id: Mapped[str] = mapped_column(
+ UUID,
+ server_default=text("gen_random_uuid()"),
+ index=True,
+ unique=True,
+ comment="Unique identifier UUID",
+ )
+
+ # Common timestamp fields for all models
+ expiry_starts: Mapped[TIMESTAMP] = mapped_column(
+ type_=TIMESTAMP(timezone=True),
+ server_default=func.now(),
+ nullable=False,
+ comment="Record validity start timestamp",
+ )
+ expiry_ends: Mapped[TIMESTAMP] = mapped_column(
+ type_=TIMESTAMP(timezone=True),
+ default="2099-12-31",
+ server_default="2099-12-31",
+ comment="Record validity end timestamp",
+ )
+
+
+class BaseCollection(CrudMixin):
+ """Base model class with minimal fields."""
+
+ __abstract__ = True
+ __repr__ = ReprMixin.__repr__
+
+
+class CrudCollection(CrudMixin):
+ """
+ Full-featured model class with all common fields.
+
+ Includes:
+ - UUID and reference ID
+ - Timestamps
+ - User tracking
+ - Confirmation status
+ - Soft delete
+ - Notification flags
+ """
+
+ __abstract__ = True
+ __repr__ = ReprMixin.__repr__
+
+ ref_id: Mapped[str] = mapped_column(
+ String(100), nullable=True, index=True, comment="External reference ID"
+ )
+
+ # Timestamps
+ created_at: Mapped[TIMESTAMP] = mapped_column(
+ TIMESTAMP(timezone=True),
+ server_default=func.now(),
+ nullable=False,
+ index=True,
+ comment="Record creation timestamp",
+ )
+ updated_at: Mapped[TIMESTAMP] = mapped_column(
+ TIMESTAMP(timezone=True),
+ server_default=func.now(),
+ onupdate=func.now(),
+ nullable=False,
+ index=True,
+ comment="Last update timestamp",
+ )
+
+ # Cryptographic and user tracking
+ cryp_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, index=True, comment="Cryptographic UUID"
+ )
+ created_by: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Creator name"
+ )
+ created_by_id: Mapped[int] = mapped_column(
+ Integer, nullable=True, comment="Creator ID"
+ )
+ updated_by: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Last modifier name"
+ )
+ updated_by_id: Mapped[int] = mapped_column(
+ Integer, nullable=True, comment="Last modifier ID"
+ )
+ confirmed_by: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Confirmer name"
+ )
+ confirmed_by_id: Mapped[int] = mapped_column(
+ Integer, nullable=True, comment="Confirmer ID"
+ )
+
+ # Status flags
+ is_confirmed: Mapped[bool] = mapped_column(
+ Boolean, server_default="0", comment="Record confirmation status"
+ )
+ replication_id: Mapped[int] = mapped_column(
+ SmallInteger, server_default="0", comment="Replication identifier"
+ )
+ deleted: Mapped[bool] = mapped_column(
+ Boolean, server_default="0", comment="Soft delete flag"
+ )
+ active: Mapped[bool] = mapped_column(
+ Boolean, server_default="1", comment="Record active status"
+ )
+ is_notification_send: Mapped[bool] = mapped_column(
+ Boolean, server_default="0", comment="Notification sent flag"
+ )
+ is_email_send: Mapped[bool] = mapped_column(
+ Boolean, server_default="0", comment="Email sent flag"
+ )
+
+ # @classmethod
+ # def retrieve_language_model(cls, lang: str, response_model: Any) -> Dict[str, str]:
+ # """
+ # Retrieve language-specific model headers and validation messages.
+ #
+ # Args:
+ # lang: Language code
+ # response_model: Model containing language annotations
+ #
+ # Returns:
+ # Dictionary of field names to localized headers
+ # """
+ # headers_and_validation = {}
+ # __language_model__ = getattr(cls.__language_model__, lang, "tr")
+ #
+ # for field in response_model.__annotations__.keys():
+ # headers_and_validation[field] = getattr(
+ # __language_model__, field, "Lang Not found"
+ # )
+ #
+ # return headers_and_validation
diff --git a/Services/PostgresDb/Models/pagination.py b/Services/PostgresDb/Models/pagination.py
new file mode 100644
index 0000000..f3c0780
--- /dev/null
+++ b/Services/PostgresDb/Models/pagination.py
@@ -0,0 +1,238 @@
+from __future__ import annotations
+from typing import Any, Dict, Optional, Union
+from sqlalchemy import desc, asc
+from pydantic import BaseModel
+from ApiLayers.AllConfigs.SqlDatabase.configs import PaginateConfig
+from ApiLayers.ApiValidations.Request import ListOptions
+from Services.PostgresDb.Models.response import PostgresResponse
+
+
+class PaginationConfig(BaseModel):
+ """
+ Configuration for pagination settings.
+
+ Attributes:
+ page: Current page number (default: 1)
+ size: Items per page (default: 10)
+ order_field: Field to order by (default: "id")
+ order_type: Order direction (default: "asc")
+ """
+
+ page: int = 1
+ size: int = 10
+ order_field: Optional[Union[tuple[str], list[str]]] = None
+ order_type: Optional[Union[tuple[str], list[str]]] = None
+
+ def __init__(self, **data):
+ super().__init__(**data)
+ if self.order_field is None:
+ self.order_field = ["uu_id"]
+ if self.order_type is None:
+ self.order_type = ["asc"]
+
+
+class Pagination:
+ """
+ Handles pagination logic for query results.
+
+ Manages page size, current page, ordering, and calculates total pages
+ and items based on the data source.
+
+ Attributes:
+ DEFAULT_SIZE: Default number of items per page (10)
+ MIN_SIZE: Minimum allowed page size (10)
+ MAX_SIZE: Maximum allowed page size (40)
+ """
+
+ DEFAULT_SIZE = PaginateConfig.DEFAULT_SIZE
+ MIN_SIZE = PaginateConfig.MIN_SIZE
+ MAX_SIZE = PaginateConfig.MAX_SIZE
+
+ def __init__(self, data: PostgresResponse):
+ self.data = data
+ self.size: int = self.DEFAULT_SIZE
+ self.page: int = 1
+ self.orderField: Optional[Union[tuple[str], list[str]]] = ["uu_id"]
+ self.orderType: Optional[Union[tuple[str], list[str]]] = ["asc"]
+ self.page_count: int = 1
+ self.total_count: int = 0
+ self.all_count: int = 0
+ self.total_pages: int = 1
+ self._update_page_counts()
+
+ def change(self, **kwargs) -> None:
+ """Update pagination settings from config."""
+ config = PaginationConfig(**kwargs)
+ self.size = (
+ config.size
+ if self.MIN_SIZE <= config.size <= self.MAX_SIZE
+ else self.DEFAULT_SIZE
+ )
+ self.page = config.page
+ self.orderField = config.order_field
+ self.orderType = config.order_type
+ self._update_page_counts()
+
+ def feed(self, data: PostgresResponse) -> None:
+ """Calculate pagination based on data source."""
+ self.data = data
+ self._update_page_counts()
+
+ def _update_page_counts(self) -> None:
+ """Update page counts and validate current page."""
+ if self.data:
+ self.total_count = self.data.count
+ self.all_count = self.data.total_count
+
+ self.size = (
+ self.size
+ if self.MIN_SIZE <= self.size <= self.MAX_SIZE
+ else self.DEFAULT_SIZE
+ )
+ self.total_pages = max(1, (self.total_count + self.size - 1) // self.size)
+ self.page = max(1, min(self.page, self.total_pages))
+ self.page_count = (
+ self.total_count % self.size
+ if self.page == self.total_pages and self.total_count % self.size
+ else self.size
+ )
+
+ def refresh(self) -> None:
+ """Reset pagination state to defaults."""
+ self._update_page_counts()
+
+ def reset(self) -> None:
+ """Reset pagination state to defaults."""
+ self.size = self.DEFAULT_SIZE
+ self.page = 1
+ self.orderField = "uu_id"
+ self.orderType = "asc"
+
+ def as_dict(self) -> Dict[str, Any]:
+ """Convert pagination state to dictionary format."""
+ self.refresh()
+ return {
+ "size": self.size,
+ "page": self.page,
+ "allCount": self.all_count,
+ "totalCount": self.total_count,
+ "totalPages": self.total_pages,
+ "pageCount": self.page_count,
+ "orderField": self.orderField,
+ "orderType": self.orderType,
+ }
+
+
+class PaginationResult:
+ """
+ Result of a paginated query.
+
+ Contains the query result and pagination state.
+ data: PostgresResponse of query results
+ pagination: Pagination state
+
+ Attributes:
+ _query: Original query object
+ pagination: Pagination state
+ """
+
+ def __init__(
+ self, data: PostgresResponse, pagination: Pagination, response_model: Any = None
+ ):
+ self._query = data.query
+ self.pagination = pagination
+ self.response_type = data.is_list
+ self.limit = self.pagination.size
+ self.offset = self.pagination.size * (self.pagination.page - 1)
+ self.order_by = self.pagination.orderField
+ self.response_model = response_model
+
+ def dynamic_order_by(self):
+ """
+ Dynamically order a query by multiple fields.
+ Returns:
+ Ordered query object.
+ """
+ if not len(self.order_by) == len(self.pagination.orderType):
+ raise ValueError(
+ "Order by fields and order types must have the same length."
+ )
+ order_criteria = zip(self.order_by, self.pagination.orderType)
+ for field, direction in order_criteria:
+ if hasattr(self._query.column_descriptions[0]["entity"], field):
+ if direction.lower().startswith("d"):
+ self._query = self._query.order_by(
+ desc(
+ getattr(self._query.column_descriptions[0]["entity"], field)
+ )
+ )
+ else:
+ self._query = self._query.order_by(
+ asc(
+ getattr(self._query.column_descriptions[0]["entity"], field)
+ )
+ )
+ return self._query
+
+ @property
+ def data(self) -> Union[list | dict]:
+ """Get query object."""
+ query_ordered = self.dynamic_order_by()
+ query_paginated = query_ordered.limit(self.limit).offset(self.offset)
+ queried_data = (
+ query_paginated.all() if self.response_type else query_paginated.first()
+ )
+ data = (
+ [result.get_dict() for result in queried_data]
+ if self.response_type
+ else queried_data.get_dict()
+ )
+ if self.response_model:
+ return [self.response_model(**item).model_dump() for item in data]
+ return data
+
+
+class QueryOptions:
+
+ def __init__(
+ self,
+ table,
+ data: Union[dict, ListOptions] = None,
+ model_query: Optional[Any] = None,
+ ):
+ self.table = table
+ self.data = data
+ self.model_query = model_query
+ if isinstance(data, dict):
+ self.data = ListOptions(**data)
+ self.validate_query()
+ if not self.data.order_type:
+ self.data.order_type = ["created_at"]
+ if not self.data.order_field:
+ self.data.order_field = ["uu_id"]
+
+ def validate_query(self):
+ if not self.data.query or not self.model_query:
+ return ()
+ cleaned_query, cleaned_query_by_model, last_dict = {}, {}, {}
+ for key, value in self.data.query.items():
+ cleaned_query[str(str(key).split("__")[0])] = value
+ cleaned_query_by_model[str(str(key).split("__")[0])] = (key, value)
+ cleaned_model = self.model_query(**cleaned_query)
+ for i in cleaned_query:
+ if hasattr(cleaned_model, i):
+ last_dict[str(cleaned_query_by_model[str(i)][0])] = str(
+ cleaned_query_by_model[str(i)][1]
+ )
+ self.data.query = last_dict
+
+ def convert(self) -> tuple:
+ """
+ self.table.convert(query)
+ (, )
+ """
+ if not self.data:
+ return ()
+ if not self.data.query:
+ return ()
+ return tuple(self.table.convert(self.data.query))
diff --git a/Services/PostgresDb/Models/response.py b/Services/PostgresDb/Models/response.py
new file mode 100644
index 0000000..1fe2407
--- /dev/null
+++ b/Services/PostgresDb/Models/response.py
@@ -0,0 +1,91 @@
+"""
+Response handler for PostgreSQL query results.
+
+This module provides a wrapper class for SQLAlchemy query results,
+adding convenience methods for accessing data and managing query state.
+"""
+
+from typing import Any, Dict, Optional, TypeVar, Generic, Union
+from sqlalchemy.orm import Query
+
+
+T = TypeVar("T")
+
+
+class PostgresResponse(Generic[T]):
+ """
+ Wrapper for PostgreSQL/SQLAlchemy query results.
+
+ Attributes:
+ metadata: Additional metadata for the query
+
+ Properties:
+ count: Total count of results
+ query: Get query object
+ as_dict: Convert response to dictionary format
+ """
+
+ def __init__(
+ self,
+ pre_query: Query,
+ query: Query,
+ is_array: bool = True,
+ metadata: Any = None,
+ ):
+ self._is_list = is_array
+ self._query = query
+ self._pre_query = pre_query
+ self._count: Optional[int] = None
+ self.metadata = metadata
+
+ @property
+ def data(self) -> Union[T, list[T]]:
+ """Get query results."""
+ if not self.is_list:
+ first_item = self._query.first()
+ return first_item if first_item else None
+ return self._query.all() if self._query.all() else []
+
+ @property
+ def data_as_dict(self) -> Union[Dict[str, Any], list[Dict[str, Any]]]:
+ """Get query results as dictionary."""
+ if self.is_list:
+ first_item = self._query.first()
+ return first_item.get_dict() if first_item.first() else None
+ all_items = self._query.all()
+ return [result.get_dict() for result in all_items] if all_items else []
+
+ @property
+ def total_count(self) -> int:
+ """Lazy load and return total count of results."""
+ if self.is_list:
+ return self._pre_query.count() if self._pre_query else 0
+ return 1
+
+ @property
+ def count(self) -> int:
+ """Lazy load and return total count of results."""
+ if self.is_list and self._count is None:
+ self._count = self._query.count()
+ elif not self.is_list:
+ self._count = 1
+ return self._count
+
+ @property
+ def query(self) -> Query:
+ """Get query object."""
+ return self._query
+
+ @property
+ def is_list(self) -> bool:
+ """Check if response is a list."""
+ return self._is_list
+
+ def as_dict(self) -> Dict[str, Any]:
+ """Convert response to dictionary format."""
+ return {
+ "metadata": self.metadata,
+ "is_list": self._is_list,
+ "query": self.query,
+ "count": self.count,
+ }
diff --git a/Services/PostgresDb/Models/system_fields.py b/Services/PostgresDb/Models/system_fields.py
new file mode 100644
index 0000000..1f5c252
--- /dev/null
+++ b/Services/PostgresDb/Models/system_fields.py
@@ -0,0 +1,50 @@
+class SystemFields:
+
+ __abstract__ = True
+
+ # System fields that should be handled automatically during creation
+ __system__fields__create__ = (
+ "created_at",
+ "updated_at",
+ "cryp_uu_id",
+ "created_by",
+ "created_by_id",
+ "updated_by",
+ "updated_by_id",
+ "replication_id",
+ "confirmed_by",
+ "confirmed_by_id",
+ "is_confirmed",
+ "deleted",
+ "active",
+ "is_notification_send",
+ "is_email_send",
+ )
+
+ # System fields that should be handled automatically during updates
+ __system__fields__update__ = (
+ "cryp_uu_id",
+ "created_at",
+ "updated_at",
+ "created_by",
+ "created_by_id",
+ "confirmed_by",
+ "confirmed_by_id",
+ "updated_by",
+ "updated_by_id",
+ "replication_id",
+ )
+
+ # Default fields to exclude from serialization
+ __system_default_model__ = (
+ "cryp_uu_id",
+ "is_confirmed",
+ "deleted",
+ "is_notification_send",
+ "replication_id",
+ "is_email_send",
+ "confirmed_by_id",
+ "confirmed_by",
+ "updated_by_id",
+ "created_by_id",
+ )
diff --git a/Services/PostgresDb/Models/token.py b/Services/PostgresDb/Models/token.py
new file mode 100644
index 0000000..5e9ea1f
--- /dev/null
+++ b/Services/PostgresDb/Models/token.py
@@ -0,0 +1,39 @@
+from typing import TypeVar, Dict, Any
+from dataclasses import dataclass
+from ApiLibrary import get_line_number_for_error
+from ErrorHandlers.Exceptions.api_exc import HTTPExceptionApi
+
+# Type variable for class methods returning self
+T = TypeVar("T", bound="FilterAttributes")
+
+
+@dataclass
+class TokenModel:
+ lang: str
+ credentials: Dict[str, str]
+ timezone: str
+
+ def __post_init__(self):
+ self.lang = str(self.lang or "tr").lower()
+ self.credentials = self.credentials or {}
+ if "GMT" in self.timezone:
+ raise HTTPExceptionApi(
+ error_code="HTTP_400_BAD_REQUEST",
+ lang=self.lang,
+ loc=get_line_number_for_error(),
+ sys_msg="Invalid timezone format",
+ )
+
+ @classmethod
+ def set_user_define_properties(cls, token: Any) -> None:
+ """
+ Set user-specific properties from the authentication token.
+
+ Args:
+ token: Authentication token containing user preferences
+ """
+ from ApiLibrary.date_time_actions.date_functions import DateTimeLocal
+
+ cls.credentials = token.credentials
+ cls.client_arrow = DateTimeLocal(is_client=True, timezone=token.timezone)
+ cls.lang = str(token.lang).lower()
diff --git a/Services/PostgresDb/__init__.py b/Services/PostgresDb/__init__.py
new file mode 100644
index 0000000..e0568d3
--- /dev/null
+++ b/Services/PostgresDb/__init__.py
@@ -0,0 +1,6 @@
+from Services.PostgresDb.Models.mixin import CrudCollection, BaseCollection
+
+__all__ = [
+ "CrudCollection",
+ "BaseCollection",
+]
diff --git a/Services/PostgresDb/database.py b/Services/PostgresDb/database.py
new file mode 100644
index 0000000..2a294b4
--- /dev/null
+++ b/Services/PostgresDb/database.py
@@ -0,0 +1,60 @@
+from contextlib import contextmanager
+from functools import lru_cache
+from typing import Generator
+
+from sqlalchemy import create_engine
+from sqlalchemy.orm import declarative_base, sessionmaker, scoped_session, Session
+
+from ApiLayers.AllConfigs.SqlDatabase.configs import WagDatabase
+
+# Configure the database engine with proper pooling
+engine = create_engine(
+ WagDatabase.DATABASE_URL,
+ pool_pre_ping=True, # Verify connection before using
+ pool_size=20, # Maximum number of permanent connections
+ max_overflow=10, # Maximum number of additional connections
+ pool_recycle=3600, # Recycle connections after 1 hour
+ pool_timeout=30, # Wait up to 30 seconds for a connection
+ echo=False, # Set to True for debugging SQL queries
+)
+
+Base = declarative_base()
+
+
+# Create a cached session factory
+@lru_cache()
+def get_session_factory() -> scoped_session:
+ """Create a thread-safe session factory."""
+ session_local = sessionmaker(
+ bind=engine,
+ autocommit=False,
+ autoflush=False,
+ expire_on_commit=False, # Prevent expired object issues
+ )
+ return scoped_session(session_local)
+
+
+@contextmanager
+def get_db() -> Generator[Session, None, None]:
+ """Get database session with proper connection management.
+
+ This context manager ensures:
+ - Proper connection pooling
+ - Session cleanup
+ - Connection return to pool
+ - Thread safety
+
+ Yields:
+ Session: SQLAlchemy session object
+ """
+ session_factory = get_session_factory()
+ session = session_factory()
+ try:
+ yield session
+ session.commit()
+ except Exception:
+ session.rollback()
+ raise
+ finally:
+ session.close()
+ session_factory.remove() # Clean up the session from the registry
diff --git a/Services/PostgresDb/how_to.py b/Services/PostgresDb/how_to.py
new file mode 100644
index 0000000..8e0999b
--- /dev/null
+++ b/Services/PostgresDb/how_to.py
@@ -0,0 +1,112 @@
+from typing import Optional
+
+from ApiLayers.ApiValidations.Request import ListOptions
+from ApiLayers.Schemas import AddressNeighborhood
+from Services.PostgresDb.Models.crud_alchemy import Credentials
+from Services.PostgresDb.Models.mixin import BasicMixin
+from Services.PostgresDb.Models.pagination import (
+ Pagination,
+ PaginationResult,
+ QueryOptions,
+)
+from pydantic import BaseModel
+
+
+listing = True
+creating = False
+updating = False
+
+
+new_session = AddressNeighborhood.new_session()
+new_session_test = AddressNeighborhood.new_session()
+
+
+BasicMixin.creds = Credentials(person_id=10, person_name="Berkay Super User")
+
+
+class QueryModel(BaseModel):
+ neighborhood_name: Optional[str]
+ neighborhood_code: Optional[str]
+
+
+if listing:
+ """List Options and Queries"""
+ AddressNeighborhood.pre_query = AddressNeighborhood.filter_all(
+ AddressNeighborhood.neighborhood_code.icontains("10"),
+ db=new_session,
+ ).query
+
+ list_options = {
+ "page": 1,
+ "size": 11,
+ "order_field": ["type_code", "neighborhood_code"],
+ "order_type": ["asc", "desc"],
+ "query": {
+ "neighborhood_name__ilike": "A%",
+ "neighborhood_code__contains": "3",
+ "my_other_field__ilike": "B%",
+ "other_other_field__ilike": "C%",
+ },
+ }
+
+ query_options = QueryOptions(
+ table=AddressNeighborhood, data=list_options, model_query=QueryModel
+ )
+ address_neighborhoods = AddressNeighborhood.filter_all(
+ *query_options.convert(), db=new_session
+ )
+
+ pagination = Pagination(data=address_neighborhoods)
+ pagination.change(**list_options)
+
+ pagination_result = PaginationResult(
+ data=address_neighborhoods, pagination=pagination
+ )
+ print("as_dict", pagination_result.pagination.as_dict())
+ for i, row in enumerate(pagination_result.data):
+ print(i + 1, row)
+
+ # list_options_valid = ListOptions(**list_options)
+ # pagination.page = 9
+ # pagination.size = 10
+ # pagination.orderField = ["type_code", "neighborhood_code"]
+ # pagination.orderType = ["asc", "asc"]
+
+if creating:
+ """Create Queries"""
+ find_or_create = AddressNeighborhood.find_or_create(
+ neighborhood_code="100",
+ neighborhood_name="Test",
+ locality_id=15334,
+ db=new_session,
+ )
+ find_or_create.save_via_metadata(db=new_session)
+ find_or_create.destroy(db=new_session)
+ find_or_create.save_via_metadata(db=new_session)
+ find_or_create = AddressNeighborhood.find_or_create(
+ neighborhood_code="100",
+ neighborhood_name="Test",
+ locality_id=15334,
+ db=new_session,
+ )
+ find_or_create.save_via_metadata(db=new_session)
+
+if updating:
+ """Update Queries"""
+
+ query_of_list_options = {"uu_id": str("33a89767-d2dc-4531-8f66-7b650e22a8a7")}
+ print("query_of_list_options", query_of_list_options)
+ address_neighborhoods_one = AddressNeighborhood.filter_one(
+ *AddressNeighborhood.convert(query_of_list_options),
+ db=new_session,
+ ).data
+ address_neighborhoods_one.update(
+ neighborhood_name="Test 44",
+ db=new_session,
+ )
+ address_neighborhoods_one.save(db=new_session)
+ address_neighborhoods_one = AddressNeighborhood.filter_one(
+ *AddressNeighborhood.convert(query_of_list_options),
+ db=new_session,
+ ).data_as_dict
+ print("address_neighborhoods_one", address_neighborhoods_one)
diff --git a/Services/Redis/Actions/actions.py b/Services/Redis/Actions/actions.py
new file mode 100644
index 0000000..3733357
--- /dev/null
+++ b/Services/Redis/Actions/actions.py
@@ -0,0 +1,159 @@
+import arrow
+
+from typing import Optional, List, Dict, Union
+
+from ApiLayers.AllConfigs.main import MainConfig
+
+from Services.Redis.conn import redis_cli
+from Services.Redis.Models.base import RedisRow
+from Services.Redis.Models.response import RedisResponse
+
+
+class RedisActions:
+ """Class for handling Redis operations with JSON data."""
+
+ @classmethod
+ def get_expiry_time(cls, expiry_kwargs: Dict[str, int]) -> int:
+ """Calculate expiry time in seconds from kwargs."""
+ time_multipliers = {"days": 86400, "hours": 3600, "minutes": 60, "seconds": 1}
+ return sum(
+ int(expiry_kwargs.get(unit, 0)) * multiplier
+ for unit, multiplier in time_multipliers.items()
+ )
+
+ @classmethod
+ def set_expiry_time(cls, expiry_seconds: int) -> Dict[str, int]:
+ """Convert total seconds back into a dictionary of time units."""
+ time_multipliers = {"days": 86400, "hours": 3600, "minutes": 60, "seconds": 1}
+ result = {}
+ for unit, multiplier in time_multipliers.items():
+ if expiry_seconds >= multiplier:
+ result[unit], expiry_seconds = divmod(expiry_seconds, multiplier)
+ return result
+
+ @classmethod
+ def resolve_expires_at(cls, redis_row: RedisRow) -> str:
+ """Resolve expiry time for Redis key."""
+ expiry_time = redis_cli.ttl(redis_row.redis_key)
+ if expiry_time == -1:
+ return "Key has no expiry time."
+ return arrow.now().shift(seconds=expiry_time).format(MainConfig.DATETIME_FORMAT)
+
+ @classmethod
+ def delete_key(cls, key: Union[Optional[str], Optional[bytes]]):
+ try:
+ redis_cli.delete(key)
+ return RedisResponse(
+ status=True,
+ message="Value is deleted successfully.",
+ )
+ except Exception as e:
+ return RedisResponse(
+ status=False,
+ message="Value is not deleted successfully.",
+ error=str(e),
+ )
+
+ @classmethod
+ def delete(
+ cls, list_keys: List[Union[Optional[str], Optional[bytes]]]
+ ) -> RedisResponse:
+ try:
+ regex = RedisRow().regex(list_keys=list_keys)
+ json_get = redis_cli.scan_iter(match=regex)
+
+ for row in list(json_get):
+ redis_cli.delete(row)
+
+ return RedisResponse(
+ status=True,
+ message="Values are deleted successfully.",
+ )
+ except Exception as e:
+ return RedisResponse(
+ status=False,
+ message="Values are not deleted successfully.",
+ error=str(e),
+ )
+
+ @classmethod
+ def set_json(
+ cls,
+ list_keys: List[Union[str, bytes]],
+ value: Optional[Union[Dict, List]],
+ expires: Optional[Dict[str, int]] = None,
+ ) -> RedisResponse:
+ """Set JSON value in Redis with optional expiry."""
+ redis_row = RedisRow()
+ redis_row.merge(set_values=list_keys)
+ redis_row.feed(value)
+ redis_row.expires_at_string = None
+ redis_row.expires_at = None
+ try:
+ if expires:
+ redis_row.expires_at = expires
+ expiry_time = cls.get_expiry_time(expiry_kwargs=expires)
+ redis_cli.setex(
+ name=redis_row.redis_key,
+ time=expiry_time,
+ value=redis_row.value,
+ )
+ redis_row.expires_at_string = str(
+ arrow.now()
+ .shift(seconds=expiry_time)
+ .format(MainConfig.DATETIME_FORMAT)
+ )
+ else:
+ redis_cli.set(name=redis_row.redis_key, value=redis_row.value)
+
+ return RedisResponse(
+ status=True,
+ message="Value is set successfully.",
+ data=redis_row,
+ )
+ except Exception as e:
+ return RedisResponse(
+ status=False,
+ message="Value is not set successfully.",
+ error=str(e),
+ )
+
+ @classmethod
+ def get_json(
+ cls, list_keys: List[Union[Optional[str], Optional[bytes]]]
+ ) -> RedisResponse:
+ """Get JSON values from Redis using pattern matching."""
+ try:
+ list_of_rows = []
+ regex = RedisRow.regex(list_keys=list_keys)
+ json_get = redis_cli.scan_iter(match=regex)
+ for row in list(json_get):
+ redis_row = RedisRow()
+ redis_row.set_key(key=row)
+ redis_value = redis_cli.get(row)
+ redis_value_expire = redis_cli.ttl(row)
+ redis_row.expires_at = cls.set_expiry_time(
+ expiry_seconds=int(redis_value_expire)
+ )
+ redis_row.expires_at_string = cls.resolve_expires_at(
+ redis_row=redis_row
+ )
+ redis_row.feed(redis_value)
+ list_of_rows.append(redis_row)
+ if list_of_rows:
+ return RedisResponse(
+ status=True,
+ message="Value is get successfully.",
+ data=list_of_rows,
+ )
+ return RedisResponse(
+ status=False,
+ message="Value is not get successfully.",
+ data=list_of_rows,
+ )
+ except Exception as e:
+ return RedisResponse(
+ status=False,
+ message="Value is not get successfully.",
+ error=str(e),
+ )
diff --git a/Services/Redis/Models/access.py b/Services/Redis/Models/access.py
new file mode 100644
index 0000000..1c55ee8
--- /dev/null
+++ b/Services/Redis/Models/access.py
@@ -0,0 +1,36 @@
+from typing import Optional
+from uuid import UUID
+from pydantic import field_validator
+
+from ApiLayers.AllConfigs.Redis.configs import RedisAuthKeys
+from Services.Redis.Models.row import BaseRedisModel
+
+
+class AccessToken(BaseRedisModel):
+
+ auth_key: Optional[str] = RedisAuthKeys.AUTH
+ accessToken: Optional[str] = None
+ userUUID: Optional[str | UUID] = None
+
+ @field_validator("userUUID", mode="after")
+ def validate_uuid(cls, v):
+ """Convert UUID to string during validation."""
+ if v is None:
+ return None
+ return str(v)
+
+ def to_list(self):
+ """Convert to list for Redis storage."""
+ return [
+ self.auth_key,
+ self.accessToken,
+ str(self.userUUID) if self.userUUID else None,
+ ]
+
+ @property
+ def count(self):
+ return 3
+
+ @property
+ def delimiter(self):
+ return ":"
diff --git a/Services/Redis/Models/base.py b/Services/Redis/Models/base.py
new file mode 100644
index 0000000..c58398e
--- /dev/null
+++ b/Services/Redis/Models/base.py
@@ -0,0 +1,310 @@
+"""
+Redis key-value operations with structured data handling.
+
+This module provides a class for managing Redis key-value operations with support for:
+- Structured data storage and retrieval
+- Key pattern generation for searches
+- JSON serialization/deserialization
+- Type-safe value handling
+"""
+
+import json
+from typing import Union, Dict, List, Optional, Any, ClassVar
+from Services.Redis.conn import redis_cli
+
+
+class RedisKeyError(Exception):
+ """Exception raised for Redis key-related errors."""
+
+ pass
+
+
+class RedisValueError(Exception):
+ """Exception raised for Redis value-related errors."""
+
+ pass
+
+
+class RedisRow:
+ """
+ Handles Redis key-value operations with structured data.
+
+ This class provides methods for:
+ - Managing compound keys with delimiters
+ - Converting between bytes and string formats
+ - JSON serialization/deserialization of values
+ - Pattern generation for Redis key searches
+
+ Attributes:
+ key: The Redis key in bytes or string format
+ value: The stored value (will be JSON serialized)
+ delimiter: Character used to separate compound key parts
+ expires_at: Optional expiration timestamp
+ """
+
+ key: ClassVar[Union[str, bytes]]
+ value: ClassVar[Any]
+ delimiter: str = ":"
+ expires_at: Optional[dict] = {"seconds": 60 * 60 * 30}
+ expires_at_string: Optional[str]
+
+ def get_expiry_time(self) -> int | None:
+ """Calculate expiry time in seconds from kwargs."""
+ time_multipliers = {"days": 86400, "hours": 3600, "minutes": 60, "seconds": 1}
+ if self.expires_at:
+ return sum(
+ int(self.expires_at.get(unit, 0)) * multiplier
+ for unit, multiplier in time_multipliers.items()
+ )
+ return
+
+ def merge(self, set_values: List[Union[str, bytes]]) -> None:
+ """
+ Merge list of values into a single delimited key.
+
+ Args:
+ set_values: List of values to merge into key
+
+ Example:
+ >>> RedisRow.merge(["users", "123", "profile"])
+ >>> print(RedisRow.key)
+ b'users:123:profile'
+ """
+ if not set_values:
+ raise RedisKeyError("Cannot merge empty list of values")
+
+ merged = []
+ for value in set_values:
+ if value is None:
+ continue
+ if isinstance(value, bytes):
+ value = value.decode()
+ merged.append(str(value))
+
+ self.key = self.delimiter.join(merged).encode()
+
+ @classmethod
+ def regex(cls, list_keys: List[Union[str, bytes, None]]) -> str:
+ """
+ Generate Redis search pattern from list of keys.
+
+ Args:
+ list_keys: List of key parts, can include None for wildcards
+
+ Returns:
+ str: Redis key pattern with wildcards
+
+ Example:
+ >>> RedisRow.regex([None, "users", "active"])
+ '*:users:active'
+ """
+ if not list_keys:
+ return ""
+
+ # Filter and convert valid keys
+ valid_keys = []
+ for key in list_keys:
+ if key is None or str(key) == "None":
+ continue
+ if isinstance(key, bytes):
+ key = key.decode()
+ valid_keys.append(str(key))
+
+ # Build pattern
+ pattern = cls.delimiter.join(valid_keys)
+ if not pattern:
+ return ""
+
+ # Add wildcard if first key was None
+ if list_keys[0] is None:
+ pattern = f"*{cls.delimiter}{pattern}"
+ if "*" not in pattern and any([list_key is None for list_key in list_keys]):
+ pattern = f"{pattern}:*"
+ return pattern
+
+ def parse(self) -> List[str]:
+ """
+ Parse the key into its component parts.
+
+ Returns:
+ List[str]: Key parts split by delimiter
+
+ Example:
+ >>> RedisRow.key = b'users:123:profile'
+ >>> RedisRow.parse()
+ ['users', '123', 'profile']
+ """
+ if not self.key:
+ return []
+
+ key_str = self.key.decode() if isinstance(self.key, bytes) else self.key
+ return key_str.split(self.delimiter)
+
+ def feed(self, value: Union[bytes, Dict, List, str]) -> None:
+ """
+ Convert and store value in JSON format.
+
+ Args:
+ value: Value to store (bytes, dict, or list)
+
+ Raises:
+ RedisValueError: If value type is not supported
+
+ Example:
+ >>> RedisRow.feed({"name": "John", "age": 30})
+ >>> print(RedisRow.value)
+ '{"name": "John", "age": 30}'
+ """
+ try:
+ if isinstance(value, (dict, list)):
+ self.value = json.dumps(value)
+ elif isinstance(value, bytes):
+ self.value = json.dumps(json.loads(value.decode()))
+ elif isinstance(value, str):
+ self.value = value
+ else:
+ raise RedisValueError(f"Unsupported value type: {type(value)}")
+ except json.JSONDecodeError as e:
+ raise RedisValueError(f"Invalid JSON format: {str(e)}")
+
+ def modify(self, add_dict: Dict) -> None:
+ """
+ Modify existing data by merging with new dictionary.
+
+ Args:
+ add_dict: Dictionary to merge with existing data
+
+ Example:
+ >>> RedisRow.feed({"name": "John"})
+ >>> RedisRow.modify({"age": 30})
+ >>> print(RedisRow.data)
+ {"name": "John", "age": 30}
+ """
+ if not isinstance(add_dict, dict):
+ raise RedisValueError("modify() requires a dictionary argument")
+ current_data = self.row if self.row else {}
+ if not isinstance(current_data, dict):
+ raise RedisValueError("Cannot modify non-dictionary data")
+ current_data = {
+ **current_data,
+ **add_dict,
+ }
+ self.feed(current_data)
+ self.save()
+
+ def save(self):
+ """
+ Save the data to Redis with optional expiration.
+
+ Raises:
+ RedisKeyError: If key is not set
+ RedisValueError: If value is not set
+ """
+ import arrow
+
+ if not self.key:
+ raise RedisKeyError("Cannot save data without a key")
+ if not self.value:
+ raise RedisValueError("Cannot save empty data")
+
+ if self.expires_at:
+ redis_cli.setex(
+ name=self.redis_key, time=self.get_expiry_time(), value=self.value
+ )
+ self.expires_at_string = str(
+ arrow.now()
+ .shift(seconds=self.get_expiry_time())
+ .format("YYYY-MM-DD HH:mm:ss")
+ )
+ return self.value
+ redis_cli.set(name=self.redis_key, value=self.value)
+ self.expires_at = None
+ self.expires_at_string = None
+ return self.value
+
+ def remove(self, key: str) -> None:
+ """
+ Remove a key from the stored dictionary.
+
+ Args:
+ key: Key to remove from stored dictionary
+
+ Raises:
+ KeyError: If key doesn't exist
+ RedisValueError: If stored value is not a dictionary
+ """
+ current_data = self.row
+ if not isinstance(current_data, dict):
+ raise RedisValueError("Cannot remove key from non-dictionary data")
+
+ try:
+ current_data.pop(key)
+ self.feed(current_data)
+ self.save()
+ except KeyError:
+ raise KeyError(f"Key '{key}' not found in stored data")
+
+ def delete(self) -> None:
+ """Delete the key from Redis."""
+ try:
+ redis_cli.delete(self.redis_key)
+ except Exception as e:
+ print(f"Error deleting key: {str(e)}")
+
+ @property
+ def keys(self) -> str:
+ """
+ Get key as string.
+
+ Returns:
+ str: Key in string format
+ """
+ return self.key.decode() if isinstance(self.key, bytes) else self.key
+
+ def set_key(self, key: Union[str, bytes]) -> None:
+ """
+ Set key ensuring bytes format.
+
+ Args:
+ key: Key in string or bytes format
+ """
+ if not key:
+ raise RedisKeyError("Cannot set empty key")
+ self.key = key if isinstance(key, bytes) else str(key).encode()
+
+ @property
+ def redis_key(self) -> bytes:
+ """
+ Get key in bytes format for Redis operations.
+
+ Returns:
+ bytes: Key in bytes format
+ """
+ return self.key if isinstance(self.key, bytes) else str(self.key).encode()
+
+ @property
+ def row(self) -> Union[Dict, List]:
+ """
+ Get stored value as Python object.
+
+ Returns:
+ Union[Dict, List]: Deserialized JSON data
+ """
+ try:
+ return json.loads(self.value)
+ except json.JSONDecodeError as e:
+ # return self.value
+ raise RedisValueError(f"Invalid JSON format in stored value: {str(e)}")
+
+ @property
+ def as_dict(self) -> Dict[str, Any]:
+ """
+ Get row data as dictionary.
+
+ Returns:
+ Dict[str, Any]: Dictionary with keys and value
+ """
+ return {
+ "keys": self.keys,
+ "value": self.row,
+ }
diff --git a/Services/Redis/Models/cluster.py b/Services/Redis/Models/cluster.py
new file mode 100644
index 0000000..1724cc3
--- /dev/null
+++ b/Services/Redis/Models/cluster.py
@@ -0,0 +1,17 @@
+from Services.Redis.Models.row import BaseRedisModel
+
+
+class RedisList(BaseRedisModel):
+ redis_key: str
+
+ def to_list(self):
+ """Convert to list for Redis storage."""
+ return [self.redis_key]
+
+ @property
+ def count(self):
+ return 1
+
+ @property
+ def delimiter(self):
+ return ":"
diff --git a/Services/Redis/Models/response.py b/Services/Redis/Models/response.py
new file mode 100644
index 0000000..72497bf
--- /dev/null
+++ b/Services/Redis/Models/response.py
@@ -0,0 +1,68 @@
+from typing import Union, Dict, List, Optional, Any
+from Services.Redis.Models.base import RedisRow
+
+
+class RedisResponse:
+ """Base class for Redis response handling."""
+
+ def __init__(
+ self,
+ status: bool,
+ message: str,
+ data: Any = None,
+ error: Optional[str] = None,
+ ):
+ self.status = status
+ self.message = message
+ self.data = data
+
+ if isinstance(data, dict):
+ self.data_type = "dict"
+ elif isinstance(data, list):
+ self.data_type = "list"
+ elif isinstance(data, RedisRow):
+ self.data_type = "row"
+ elif data is None:
+ self.data_type = None
+ self.error = error
+
+ def as_dict(self) -> Dict:
+ data = self.all
+ main_dict = {
+ "status": self.status,
+ "message": self.message,
+ "count": self.count,
+ "dataType": getattr(self, "data_type", None),
+ }
+ if isinstance(data, RedisRow):
+ dict_return = {data.keys: data.row}
+ dict_return.update(dict(main_dict))
+ return dict_return
+ elif isinstance(data, list):
+ dict_return = {row.keys: row.data for row in data}
+ dict_return.update(dict(main_dict))
+ return dict_return
+
+ @property
+ def all(self) -> Union[Optional[List[RedisRow]]]:
+ return self.data or []
+
+ @property
+ def count(self) -> int:
+ row = self.all
+ if isinstance(row, list):
+ return len(row)
+ elif isinstance(row, RedisRow):
+ return 1
+
+ @property
+ def first(self) -> Union[RedisRow, dict, None]:
+ if self.data:
+ if isinstance(self.data, list):
+ if isinstance(self.data[0], RedisRow):
+ return self.data[0].row
+ return self.data[0]
+ elif isinstance(self.data, RedisRow):
+ return self.data.row
+ self.status = False
+ return
diff --git a/Services/Redis/Models/row.py b/Services/Redis/Models/row.py
new file mode 100644
index 0000000..b9fd12b
--- /dev/null
+++ b/Services/Redis/Models/row.py
@@ -0,0 +1,20 @@
+from abc import abstractmethod
+from pydantic import BaseModel
+
+
+class BaseRedisModel(BaseModel):
+
+ @abstractmethod
+ def to_list(self) -> list:
+ """Convert to list for Redis storage."""
+ pass
+
+ @abstractmethod
+ def count(self) -> int:
+ """Return the number of elements in the list."""
+ pass
+
+ @abstractmethod
+ def delimiter(self) -> str:
+ """Return the delimiter for the list."""
+ pass
diff --git a/Services/Redis/__init__.py b/Services/Redis/__init__.py
new file mode 100644
index 0000000..3fe242a
--- /dev/null
+++ b/Services/Redis/__init__.py
@@ -0,0 +1,8 @@
+from Services.Redis.Actions.actions import RedisActions
+from Services.Redis.Models.access import AccessToken
+
+
+__all__ = [
+ "RedisActions",
+ "AccessToken",
+]
diff --git a/Services/Redis/conn.py b/Services/Redis/conn.py
new file mode 100644
index 0000000..d9e7d0d
--- /dev/null
+++ b/Services/Redis/conn.py
@@ -0,0 +1,25 @@
+from redis import Redis
+
+from ApiLayers.AllConfigs.Redis.configs import WagRedis
+
+
+class RedisConn:
+
+ def __init__(self):
+ self.redis = Redis(**WagRedis.as_dict())
+ if not self.check_connection():
+ raise Exception("Connection error")
+
+ def check_connection(self):
+ return self.redis.ping()
+
+ def set_connection(self, host, password, port, db):
+ self.redis = Redis(host=host, password=password, port=port, db=db)
+ return self.redis
+
+
+try:
+ redis_conn = RedisConn()
+ redis_cli = redis_conn.redis
+except Exception as e:
+ print("Redis Connection Error", e)
diff --git a/Services/Redis/howto.py b/Services/Redis/howto.py
new file mode 100644
index 0000000..728e88f
--- /dev/null
+++ b/Services/Redis/howto.py
@@ -0,0 +1,76 @@
+import secrets
+import random
+from uuid import uuid4
+
+from Services.Redis.Actions.actions import RedisActions
+from Services.Redis.Models.row import AccessToken
+
+
+def generate_token(length=32):
+ letters = "abcdefghijklmnopqrstuvwxyz"
+ merged_letters = [letter for letter in letters] + [
+ letter.upper() for letter in letters
+ ]
+ token_generated = secrets.token_urlsafe(length)
+ for i in str(token_generated):
+ if i not in merged_letters:
+ token_generated = token_generated.replace(
+ i, random.choice(merged_letters), 1
+ )
+ return token_generated
+
+
+save_json = {
+ "user": {
+ "first_name": "John",
+ "last_name": "Doe",
+ "email": "johndoe@glu.com",
+ "phone": "1234567890",
+ "address": "1234 Main St",
+ "details": {
+ "city": "San Francisco",
+ "state": "CA",
+ "zip": "94111",
+ },
+ },
+ "domain": "https://www.example.com",
+ "info": {
+ "mac": "oıuıouqqzxöç.işüğ",
+ "version": "1.0.0",
+ "type": "web",
+ "device": "desktop",
+ },
+}
+
+# access_object = AccessToken(
+# userUUID=str(uuid4()),
+# accessToken=generate_token(60)
+# )
+# redis_object = RedisActions.set_json(
+# list_keys=access_object.to_list(),
+# value=save_json,
+# expires={"seconds": 720}
+# )
+# quit()
+acc_token = "IuDXEzqzCSyOJvrwdjyxqGPOBnleUZjjXWsELJgUglJjyGhINOzAUpdMuzEzoTyOsJRUeEQsgXGUXrer:521a4ba7-898f-4204-a2e5-3226e1aea1e1"
+
+userUUID = acc_token.split(":")[1]
+accessToken = acc_token.split(":")[0]
+access_object = AccessToken(userUUID=None, accessToken=accessToken)
+print("access_object", access_object.to_list())
+redis_object = RedisActions.get_json(
+ list_keys=access_object.to_list(),
+)
+# print("type type(redis_object)", type(redis_object))
+# print("type redis_object.data", type(redis_object.data))
+# print("count", redis_object.count)
+# print("data", redis_object.data)
+# print("data", redis_object.as_dict())
+# print("message", redis_object.message)
+redis_row_object = redis_object.first
+redis_row_object.modify({"reachable_event_list_id": [i for i in range(50)]})
+# redis_row_object.remove("reachable_event_list_id")
+# redis_row_object.modify({"reachable_event_list_id": [i for i in range(10)]})
+# if redis_row_object:
+# print("redis_row_object", redis_row_object.delete())
+# print('redis_row_object.as_dict', redis_row_object.as_dict)
diff --git a/Services/__init__.py b/Services/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/Services/pagination.py b/Services/pagination.py
new file mode 100644
index 0000000..fe94e3f
--- /dev/null
+++ b/Services/pagination.py
@@ -0,0 +1,102 @@
+from typing import Any, Dict, List, Union, Protocol
+from dataclasses import dataclass
+from sqlalchemy.orm import Query
+
+from Services.PostgresDb.Models.response import PostgresResponse
+
+
+class DataValidator(Protocol):
+ """Protocol for data validation methods."""
+
+ @staticmethod
+ def validate_data(data: Any, cls_object: Any) -> None:
+ """Validate data and raise HTTPExceptionApi if invalid."""
+ ...
+
+
+@dataclass
+class PaginationConfig:
+ """
+ Configuration for pagination settings.
+
+ Attributes:
+ page: Current page number (default: 1)
+ size: Items per page (default: 10)
+ order_field: Field to order by (default: "id")
+ order_type: Order direction (default: "asc")
+ """
+
+ page: int = 1
+ size: int = 10
+ order_field: str = "id"
+ order_type: str = "asc"
+
+
+class Pagination:
+ """
+ Handles pagination logic for query results.
+
+ Manages page size, current page, ordering, and calculates total pages
+ and items based on the data source.
+
+ Attributes:
+ DEFAULT_SIZE: Default number of items per page (10)
+ MIN_SIZE: Minimum allowed page size (10)
+ MAX_SIZE: Maximum allowed page size (40)
+ """
+
+ DEFAULT_SIZE = 10
+ MIN_SIZE = 10
+ MAX_SIZE = 40
+
+ def __init__(self):
+ self.size: int = self.DEFAULT_SIZE
+ self.page: int = 1
+ self.order_field: str = "id"
+ self.order_type: str = "asc"
+ self.page_count: int = 1
+ self.total_count: int = 0
+ self.total_pages: int = 1
+
+ def change(self, config: PaginationConfig) -> None:
+ """Update pagination settings from config."""
+ self.size = (
+ config.size
+ if self.MIN_SIZE <= config.size <= self.MAX_SIZE
+ else self.DEFAULT_SIZE
+ )
+ self.page = config.page
+ self.order_field = config.order_field
+ self.order_type = config.order_type
+ self._update_page_counts()
+
+ def feed(self, data: Union[List[Any], PostgresResponse, Query]) -> None:
+ """Calculate pagination based on data source."""
+ self.total_count = (
+ len(data)
+ if isinstance(data, list)
+ else data.count if isinstance(data, PostgresResponse) else data.count()
+ )
+ self._update_page_counts()
+
+ def _update_page_counts(self) -> None:
+ """Update page counts and validate current page."""
+ self.total_pages = max(1, (self.total_count + self.size - 1) // self.size)
+ self.page = max(1, min(self.page, self.total_pages))
+ self.page_count = (
+ self.total_count % self.size
+ if self.page == self.total_pages and self.total_count % self.size
+ else self.size
+ )
+
+ def as_dict(self) -> Dict[str, Any]:
+ """Convert pagination state to dictionary format."""
+ return {
+ "size": self.size,
+ "page": self.page,
+ "totalCount": self.total_count,
+ "totalPages": self.total_pages,
+ "pageCount": self.page_count,
+ "orderField": self.order_field,
+ "orderType": self.order_type,
+ }
diff --git a/Ztest/Dockerfile b/Ztest/Dockerfile
new file mode 100644
index 0000000..470e2b2
--- /dev/null
+++ b/Ztest/Dockerfile
@@ -0,0 +1,46 @@
+# Use Python 3.9 as base image
+FROM python:3.9-slim
+
+# Set environment variables
+ENV PYTHONUNBUFFERED=1 \
+ PYTHONDONTWRITEBYTECODE=1 \
+ PIP_NO_CACHE_DIR=1 \
+ POETRY_VERSION=1.7.1 \
+ POETRY_HOME="/opt/poetry" \
+ POETRY_VIRTUALENVS_CREATE=false \
+ PYTHONPATH=/app
+
+# Add Poetry to PATH
+ENV PATH="$POETRY_HOME/bin:$PATH"
+
+# Install system dependencies
+RUN apt-get update \
+ && apt-get install -y --no-install-recommends \
+ curl \
+ build-essential \
+ libpq-dev \
+ postgresql-client \
+ && rm -rf /var/lib/apt/lists/*
+
+# Install Poetry
+RUN curl -sSL https://install.python-poetry.org | python3 -
+
+# Set working directory
+WORKDIR /app
+
+# Copy project files
+COPY pyproject.toml poetry.lock* ./
+
+# Install dependencies
+RUN poetry install --no-root --no-interaction --no-ansi
+
+# Copy required directories
+COPY Ztest/ ./Ztest/
+COPY Services/ ./Services/
+COPY AllConfigs/ ./AllConfigs/
+COPY ApiLibrary/ ./ApiLibrary/
+COPY ErrorHandlers/ ./ErrorHandlers/
+
+# Set entrypoint for running tests
+ENTRYPOINT ["poetry", "run", "pytest"]
+CMD ["-v", "--cov=Services", "Ztest/"]
diff --git a/Ztest/fixtures.py b/Ztest/fixtures.py
new file mode 100644
index 0000000..4968857
--- /dev/null
+++ b/Ztest/fixtures.py
@@ -0,0 +1,13 @@
+"""Test fixtures and models."""
+
+from sqlalchemy import Column, String
+
+from Services.PostgresDb.Models.mixins import CrudCollection
+
+
+class TestUser(CrudCollection):
+ """Test user model for PostgreSQL tests."""
+
+ __tablename__ = "test_users"
+ username = Column(String, unique=True, index=True)
+ email = Column(String, unique=True, index=True)
diff --git a/Ztest/models.py b/Ztest/models.py
new file mode 100644
index 0000000..6cbe94a
--- /dev/null
+++ b/Ztest/models.py
@@ -0,0 +1,13 @@
+"""Test models."""
+
+from sqlalchemy import Column, String
+
+from Services.PostgresDb.Models.mixins import CrudCollection
+
+
+class UserModel(CrudCollection):
+ """User model for PostgreSQL tests."""
+
+ __tablename__ = "test_users"
+ username = Column(String, unique=True, index=True)
+ email = Column(String, unique=True, index=True)
diff --git a/Ztest/test.py b/Ztest/test.py
new file mode 100644
index 0000000..5a99df9
--- /dev/null
+++ b/Ztest/test.py
@@ -0,0 +1,38 @@
+import secrets
+import uuid
+
+from Services.Redis import RedisActions, AccessToken
+
+first_user = AccessToken(
+ accessToken=secrets.token_urlsafe(90),
+ userUUID=uuid.uuid4().__str__(),
+)
+second_user = AccessToken(
+ accessToken=secrets.token_urlsafe(90),
+ userUUID=uuid.uuid4().__str__(),
+)
+
+json_data = lambda uu_id, access: {
+ "uu_id": uu_id,
+ "access_token": access,
+ "user_type": 1,
+ "selected_company": None,
+ "selected_occupant": None,
+ "reachable_event_list_id": [],
+}
+set_response_first_json = json_data(first_user.userUUID, first_user.accessToken)
+set_response_second_json = json_data(second_user.userUUID, second_user.accessToken)
+set_response_first = RedisActions.set_json(
+ list_keys=first_user.to_list(),
+ value=set_response_first_json,
+ expires={"seconds": 140},
+)
+
+set_response_second = RedisActions.set_json(
+ list_keys=second_user.to_list(),
+ value=set_response_second_json,
+ expires={"seconds": 190},
+)
+search_keys = [None, "*a*"]
+get_response = RedisActions.get_json(list_keys=search_keys)
+print("get_response.all", get_response.as_dict()["data"].values())
diff --git a/Ztest/test_mongo.py b/Ztest/test_mongo.py
new file mode 100644
index 0000000..57ab629
--- /dev/null
+++ b/Ztest/test_mongo.py
@@ -0,0 +1,104 @@
+"""Test MongoDB actions and models."""
+
+import pytest
+from pymongo import MongoClient
+
+from Services.MongoDb.Models.actions import MongoActions
+from Services.MongoDb.Models.action_models.domain import (
+ DomainData,
+ DomainDocumentCreate,
+ DomainDocumentUpdate,
+)
+from AllConfigs.NoSqlDatabase.configs import MongoConfig
+
+
+@pytest.fixture
+def mongo_client():
+ """Create MongoDB test client."""
+ # Connect using configured credentials
+ client = MongoClient(MongoConfig.URL)
+ client.admin.command("ping") # Test connection
+ yield client
+ client.close()
+
+
+@pytest.fixture
+def mongo_actions(mongo_client):
+ """Create MongoActions instance for testing."""
+ if not mongo_client:
+ pytest.skip("MongoDB connection not available")
+
+ actions = MongoActions(
+ client=mongo_client,
+ database=MongoConfig.DATABASE_NAME,
+ company_uuid="test_company",
+ storage_reason="domains",
+ )
+ yield actions
+ try:
+ # Cleanup after tests
+ if actions.collection is not None:
+ actions.collection.drop()
+ except Exception as e:
+ print(f"Failed to cleanup test collection: {e}")
+
+
+def test_mongo_crud_operations(mongo_actions: MongoActions):
+ """Test CRUD operations with MongoActions."""
+
+ # Create test data
+ domain_data = DomainData(
+ user_uu_id="test_user",
+ main_domain="example.com",
+ other_domains_list=["old.com"],
+ )
+ create_doc = DomainDocumentCreate(data=domain_data)
+
+ # Test create
+ result = mongo_actions.insert_one(create_doc.model_dump())
+ assert result.inserted_id is not None
+
+ # Test read
+ doc = mongo_actions.find_one({"data.main_domain": "example.com"})
+ assert doc is not None
+ assert doc["data"]["main_domain"] == "example.com"
+
+ # Test update
+ update_data = DomainData(
+ user_uu_id="test_user",
+ main_domain="new.com",
+ other_domains_list=["example.com", "old.com"],
+ )
+ update_doc = DomainDocumentUpdate(data=update_data)
+ result = mongo_actions.update_one(
+ {"_id": doc["_id"]}, {"$set": update_doc.model_dump()}
+ )
+ assert result.modified_count == 1
+
+ # Test delete
+ result = mongo_actions.delete_one({"_id": doc["_id"]})
+ assert result.deleted_count == 1
+
+
+def test_mongo_aggregate(mongo_actions: MongoActions):
+ """Test aggregate operations with MongoActions."""
+
+ # Insert test documents
+ docs = [
+ DomainDocumentCreate(
+ data=DomainData(user_uu_id="user1", main_domain=f"domain{i}.com")
+ ).model_dump()
+ for i in range(3)
+ ]
+ mongo_actions.insert_many(docs)
+
+ # Test aggregation
+ pipeline = [{"$group": {"_id": "$data.user_uu_id", "count": {"$sum": 1}}}]
+ result = mongo_actions.aggregate(pipeline)
+ result_list = list(result)
+ assert len(result_list) == 1
+ assert result_list[0]["count"] == 3
+
+
+if __name__ == "__main__":
+ pytest.main([__file__, "-v"])
diff --git a/Ztest/test_postgres.py b/Ztest/test_postgres.py
new file mode 100644
index 0000000..8be5491
--- /dev/null
+++ b/Ztest/test_postgres.py
@@ -0,0 +1,99 @@
+"""Test PostgreSQL database operations."""
+
+import pytest
+from sqlalchemy import create_engine, text
+from sqlalchemy.orm import Session
+
+from Services.PostgresDb.database import Base, get_db
+from AllConfigs.SqlDatabase.configs import WagDatabase
+from Ztest.models import UserModel
+
+
+@pytest.fixture(scope="session")
+def db_engine():
+ """Create a test database engine."""
+ # Use the same database URL but with test database
+ test_db_url = WagDatabase.DATABASE_URL
+ engine = create_engine(test_db_url, echo=True)
+
+ # Create all tables
+ Base.metadata.create_all(bind=engine)
+
+ yield engine
+
+ # Drop all tables after tests
+ Base.metadata.drop_all(bind=engine)
+
+
+@pytest.fixture
+def db_session(db_engine):
+ """Create a test database session."""
+ connection = db_engine.connect()
+ transaction = connection.begin()
+ session = Session(bind=connection)
+
+ yield session
+
+ # Rollback the transaction after each test
+ transaction.rollback()
+ connection.close()
+ session.close()
+
+
+def test_create_user(db_session):
+ """Test creating a user in the database."""
+ # Create user using CrudMixin methods
+ user = UserModel(username="testuser", email="test@example.com")
+ db_session.add(user)
+ db_session.commit()
+
+ # Verify user was created
+ db_user = db_session.query(UserModel).filter_by(username="testuser").first()
+ assert db_user is not None
+ assert db_user.email == "test@example.com"
+ assert db_user.created_at is not None
+ assert not db_user.deleted
+ assert db_user.active
+
+
+def test_update_user(db_session):
+ """Test updating a user in the database."""
+ # Create user
+ user = UserModel(username="updateuser", email="update@example.com")
+ db_session.add(user)
+ db_session.commit()
+
+ # Update user using CrudMixin methods
+ user.update(db=db_session, email="newemail@example.com")
+ db_session.commit()
+
+ # Verify update
+ updated_user = db_session.query(UserModel).filter_by(username="updateuser").first()
+ assert updated_user.email == "newemail@example.com"
+ assert updated_user.updated_at is not None
+
+
+def test_soft_delete_user(db_session):
+ """Test soft deleting a user from the database."""
+ # Create user
+ user = UserModel(username="deleteuser", email="delete@example.com")
+ db_session.add(user)
+ db_session.commit()
+
+ # Soft delete by updating deleted and active flags
+ user.update(db=db_session, deleted=True, active=False)
+ db_session.commit()
+
+ # Verify soft deletion
+ deleted_user = db_session.query(UserModel).filter_by(username="deleteuser").first()
+ assert deleted_user is not None
+ assert deleted_user.deleted
+ assert not deleted_user.active
+
+
+def test_get_db_context_manager():
+ """Test the get_db context manager."""
+ with get_db() as session:
+ # Verify we can execute a simple query
+ result = session.execute(text("SELECT 1"))
+ assert result.scalar() == 1
diff --git a/api-docker-compose.yml b/api-docker-compose.yml
new file mode 100644
index 0000000..0976315
--- /dev/null
+++ b/api-docker-compose.yml
@@ -0,0 +1,57 @@
+services:
+
+ wag_management_auth_service:
+ container_name: wag_management_auth_service
+ # restart: on-failure
+ build:
+ context: .
+ dockerfile: ApiServices/AuthService/Dockerfile
+ ports:
+ - "1111:41575"
+ environment:
+ - PYTHONPATH=/service_app
+ volumes:
+ - auth_venv:/service_app/.venv
+ - auth_logs:/service_app/logs
+
+ wag_management_validation_service:
+ container_name: wag_management_validation_service
+ # restart: on-failure
+ build:
+ context: .
+ dockerfile: ApiServices/ValidationService/Dockerfile
+ ports:
+ - "1113:41575"
+ environment:
+ - PYTHONPATH=/service_app
+ volumes:
+ - validation_venv:/service_app/.venv
+ - validation_logs:/service_app/logs
+
+# wag_management_init_service:
+# container_name: wag_management_init_service
+# build:
+# context: .
+# dockerfile: service_app_init/Dockerfile
+
+# wag_management_event_service:
+# container_name: wag_management_event_service
+# # restart: on-failure
+# build:
+# context: .
+# dockerfile: ApiServices/EventService/Dockerfile
+# ports:
+# - "1112:41575"
+# environment:
+# - PYTHONPATH=/service_app
+# volumes:
+# - event_venv:/service_app/.venv
+# - event_logs:/service_app/logs
+
+volumes:
+ auth_venv:
+ event_venv:
+ validation_venv:
+ auth_logs:
+ event_logs:
+ validation_logs:
diff --git a/docker-compose-services.yml b/docker-compose-services.yml
new file mode 100644
index 0000000..ca3e83c
--- /dev/null
+++ b/docker-compose-services.yml
@@ -0,0 +1,32 @@
+services:
+ init-service:
+ build:
+ context: .
+ dockerfile: DockerApiServices/InitServiceApi/Dockerfile
+
+ auth-service:
+ build:
+ context: .
+ dockerfile: DockerApiServices/AuthServiceApi/Dockerfile
+ ports:
+ - "41575:41575"
+ depends_on:
+ - init-service
+
+ validation-service:
+ build:
+ context: .
+ dockerfile: DockerApiServices/ValidationServiceApi/Dockerfile
+ ports:
+ - "41577:41577"
+ depends_on:
+ - init-service
+
+ event-service:
+ build:
+ context: .
+ dockerfile: DockerApiServices/EventServiceApi/Dockerfile
+ ports:
+ - "41576:41576"
+ depends_on:
+ - init-service
diff --git a/docker-compose.test.yml b/docker-compose.test.yml
new file mode 100644
index 0000000..c3e696f
--- /dev/null
+++ b/docker-compose.test.yml
@@ -0,0 +1,8 @@
+services:
+ test:
+ build:
+ context: .
+ dockerfile: Ztest/Dockerfile
+ volumes:
+ - .:/app
+ network_mode: "host"
diff --git a/docs/architecture/system_architecture.md b/docs/architecture/system_architecture.md
new file mode 100644
index 0000000..e1d6d80
--- /dev/null
+++ b/docs/architecture/system_architecture.md
@@ -0,0 +1,203 @@
+# System Architecture
+
+## Core Services
+
+### Top-Level Services
+1. **AuthServiceApi**
+ - User authentication and authorization
+ - Token management
+ - Permission handling
+
+2. **EventServiceApi**
+ - Event processing and management
+ - Event routing and handling
+ - Event validation
+
+3. **ValidationServiceApi**
+ - Input validation
+ - Data verification
+ - Schema validation
+
+## System Components
+
+### AllConfigs
+Configuration management for various system components.
+
+| Category | Context | Dependencies |
+|----------|----------|--------------|
+| Email | configs, email_send_model | - |
+| NoSqlDatabase | configs | - |
+| Redis | configs | - |
+| SqlDatabase | configs | - |
+| Token | configs | - |
+
+### Schemas
+- SQL Alchemy schema definitions
+- Data models and relationships
+- Database structure definitions
+
+### ApiLibrary
+
+| Category | Description |
+|----------|-------------|
+| common | Error line number tracking |
+| date_time_actions | DateTime handling functions |
+| extensions | Password module and utilities |
+
+### ApiServices
+
+| Category | Context | Dependencies |
+|----------|----------|--------------|
+| Login | UserLoginModule | ApiLibrary, Schemas, ErrorHandlers, ApiValidations, ApiServices |
+| Token | TokenService | Services, Schemas, ApiLibrary, ErrorHandlers, AllConfigs, ApiValidations |
+
+### Services
+
+| Category | Dependencies |
+|----------|--------------|
+| Email | ApiLibrary, Schemas, ErrorHandlers, ApiValidations, ApiServices |
+| MongoDb | - |
+| PostgresDb | - |
+| Redis | - |
+
+### ErrorHandlers
+- ErrorHandlers: General error handling
+- Exceptions: Custom exception definitions
+
+### LanguageModels
+- Database: Database-related language models
+- Errors: Error message translations
+
+### ApiValidations
+- Custom: Custom validation rules
+- Request: Request validation logic
+
+## Testing Framework
+
+### Test Categories
+- AlchemyResponse pagination testing
+- Redis function testing
+- MongoDB function testing
+- Validation testing
+- Header testing
+- Auth function testing
+- Language testing
+- Property definition testing
+- SmartQuery testing
+
+### Error Categories
+- AlchemyError
+- ApiError
+- RedisError
+- MongoError
+- EmailError
+- Validation[Pydantic]
+
+## Alchemy Implementation Phases
+
+1. **BaseAlchemyNeed**
+ - Session management
+ - Core functionality
+
+2. **PlainModel**
+ - Basic model implementation
+
+3. **FilteredModel**
+ - Filter functionality
+
+4. **PaginatedModel**
+ - Pagination attributes
+ - Filter integration
+
+5. **LanguageModel**
+ - Function retrieval
+ - Header management
+
+6. **ResponseModel**
+ - Plain AlchemyClass
+ - Dictionary conversion
+ - Multiple response handling
+
+## System Layers
+
+1. **DependenciesLayer**
+ - External dependencies
+ - System requirements
+
+2. **ApplicationLayer**
+ - Core application logic
+ - Business rules
+
+3. **ServiceLayer**
+ - Service implementations
+ - API endpoints
+
+4. **TestLayer**
+ - Test suites
+ - Test utilities
+
+5. **DevLayer**
+ - Development tools
+ - Debug utilities
+
+6. **RootLayer**
+ - Main directory
+ - Configuration files
+ - Documentation
+
+## TODO Items
+
+1. **Event Directory Structure**
+ - Move to ApiEvents
+ - Docker file integration
+ - Import organization
+
+2. **MethodToEvent Renewal**
+ - Update implementation
+ - Improve flexibility
+
+3. **Endpoint Configuration**
+ - Remove unnecessary fields
+ - Streamline configuration
+
+4. **Middleware Organization**
+ - Consolidate into /TokenEventMiddleware/
+ - Standardize naming
+
+5. **Code Cleanup**
+ - Remove ActionsSchemaFactory
+ - Remove ActionsSchema
+ - Move endpoint_wrapper to Middleware.wrappers
+
+6. **Function Organization**
+ - Support sync/async functions
+ - Improve API function organization
+
+7. **Directory Structure**
+ - Consolidate AllConfigs, ApiLibrary, ErrorHandlers
+ - Move to RootLayer
+
+8. **Configuration Management**
+ - Update RouteFactoryConfig
+ - Update EndpointFactoryConfig
+ - Implement event validation interface
+
+9. **Language Model**
+ - Review Schemas.__language_model__
+ - Update implementation
+
+10. **Service Container**
+ - Review ApiServices
+ - Plan container migration
+
+11. **Language Defaults**
+ - Add to config
+ - Implement ["tr", "en"] as default
+
+## Notes
+
+- Redis implementation needs RedisRow class
+- Event validation needs database integration
+- Consider containerization for ApiServices
+- Review language model implementation
+- Test coverage needs improvement
diff --git a/docs/events/ReadMe.md b/docs/events/ReadMe.md
new file mode 100644
index 0000000..330b702
--- /dev/null
+++ b/docs/events/ReadMe.md
@@ -0,0 +1,135 @@
+# System Architecture Documentation
+
+## Core Components
+
+### 1. ClusterToMethod (Router)
+API router that groups related endpoints into logical clusters.
+
+**Key Components:**
+- `TAGS`: List of router categorization tags
+- `PREFIX`: Base URL prefix for all contained endpoints
+- `PAGEINFO`: Page metadata and UI information
+- `ENDPOINTS`: Collection of MethodEvent instances
+- `SUBCATEGORY`: Nested ClusterToMethod instances for hierarchical routing
+
+### 2. MethodToEvent (Endpoint Handler)
+Handles individual API endpoints and their event mappings.
+
+**Key Components:**
+- `EVENTS`: Collection of Event instances
+- `HEADER_LANGUAGE_MODELS`: Header localization
+- `ERROR_LANGUAGE_MODELS`: Error message localization
+- Endpoint metadata (URL, method, summary, description)
+- Endpoint callable with request validation
+
+### 3. Event (Business Logic)
+Represents individual business operations with validation.
+
+**Core Features:**
+- Unique `KEY_` identifier
+- Request/Response validation using PydanticModel
+- Callable business logic function
+- Language model integration
+
+## Data Flow & Storage
+
+### Redis Key Structure
+```
+CLUSTER_FUNCTION_CODES:{ClusterToMethod}
+└── {PageInfo, [FunctionCodes]}
+
+METHOD_FUNCTION_CODES:{ClusterToMethod}:MethodEvent:Endpoint
+└── [FunctionCodes]
+
+LANGUAGE_MODELS:*
+└── Localization data
+
+PAGE_MAPPER:{Type}:{BuildingID}:{UserID}
+└── PageInfo
+
+MENU_MAPPER:{Type}:{BuildingID}:{UserID}
+└── PageInfo
+```
+
+### Application Initialization Flow
+1. **Pages Iteration**
+ - Saves router/endpoint mappings
+ - Caches menu structure
+
+2. **Events Iteration**
+ - Stores endpoint information
+ - Caches validation schemas
+
+3. **Web Statics**
+ - Caches localization data
+ - Builds UI components
+
+### Request Flow
+```
+Request → Router(ClusterToMethod) → Endpoint(MethodEvent) → Event Handler
+ ↓
+ Validation
+ ↓
+ Business Logic
+```
+
+## Core Services
+
+### 1. ValidationService
+- Model validation handling
+- Schema caching
+- Language-specific validation
+- Redis-first validation lookup
+
+### 2. EventService
+- Event routing management
+- Function code mapping
+- User-specific event access
+- Login state management
+
+### 3. AuthService
+- User authentication
+- Event access control
+- User preferences (timezone, language)
+- Token management
+
+## Design Patterns
+
+### 1. Multi-layer Validation
+- Language model validation
+- Function code validation
+- User access validation
+- Request/Response schema validation
+
+### 2. Hierarchical Routing
+- ClusterToMethod → MethodEvent → Event
+- Nested routing via SUBCATEGORY
+- URL prefix inheritance
+
+### 3. Internationalization
+- Comprehensive language support
+- Cached translations
+- Header and error localization
+- Per-user language preferences
+
+## Cache Layer
+
+### Redis Categories
+```
+RedisCategoryKeys:
+├── LANGUAGE_MODELS
+├── VALIDATION_USER
+├── CLUSTER_FUNCTION_CODES
+├── METHOD_FUNCTION_CODES
+├── MENU_FIRST_LAYER
+├── PAGE_MAPPER
+├── MENU_MAPPER
+├── AUTH (Authorization)
+├── OCC (Occupant)
+└── EMP (Employee)
+```
+
+### Cache Invalidation
+- On login: User-specific caches
+- On language change: Localization caches
+- On permission change: Access control caches
\ No newline at end of file
diff --git a/docs/improvements/README.md b/docs/improvements/README.md
new file mode 100644
index 0000000..29b334e
--- /dev/null
+++ b/docs/improvements/README.md
@@ -0,0 +1,55 @@
+# Improvements Documentation
+
+This directory contains documentation and example implementations for various system improvements.
+
+## Directory Structure
+
+```
+improvements/
+├── detailed_improvement_plan.md # Overall improvement plan
+├── language_service/ # Language service implementation
+│ ├── backend/
+│ │ ├── language_service.py # Basic language service
+│ │ └── zod_messages.py # Zod validation messages
+│ └── frontend/
+│ └── languageService.ts # Frontend language service
+└── validation_service/ # Validation service implementation
+ ├── backend/
+ │ └── schema_converter.py # Pydantic to Zod converter
+ └── frontend/
+ └── dynamicSchema.ts # Dynamic Zod schema builder
+```
+
+## Components
+
+### Language Service
+The language service provides internationalization support with:
+- Backend API for serving translations
+- Frontend service for managing translations
+- Integration with Zod for validation messages
+
+### Validation Service
+The validation service provides dynamic form validation with:
+- Automatic conversion of Pydantic models to Zod schemas
+- Frontend builder for dynamic schema creation
+- Integration with language service for messages
+
+## Implementation Status
+
+These are example implementations that demonstrate the proposed improvements. To implement in the actual system:
+
+1. Create appropriate service directories
+2. Copy and adapt the code
+3. Add tests
+4. Update dependencies
+5. Integrate with existing systems
+
+## Next Steps
+
+1. Review the implementations
+2. Decide on integration approach
+3. Create implementation tickets
+4. Plan phased rollout
+5. Add monitoring and metrics
+
+For detailed implementation plans and timelines, see [detailed_improvement_plan.md](./detailed_improvement_plan.md).
diff --git a/docs/improvements/detailed_improvement_plan.md b/docs/improvements/detailed_improvement_plan.md
new file mode 100644
index 0000000..f26fecc
--- /dev/null
+++ b/docs/improvements/detailed_improvement_plan.md
@@ -0,0 +1,311 @@
+# Detailed Improvement Plan
+
+## 1. Infrastructure & Deployment
+
+### Service Isolation and Containerization
+- **Microservices Architecture**
+ ```
+ /services
+ ├── auth-service/
+ │ ├── Dockerfile
+ │ └── docker-compose.yml
+ ├── event-service/
+ │ ├── Dockerfile
+ │ └── docker-compose.yml
+ └── validation-service/
+ ├── Dockerfile
+ └── docker-compose.yml
+ ```
+- **Service Discovery**
+ - Implement Consul for service registry
+ - Add health check endpoints
+ - Create service mesh with Istio
+
+### API Gateway Implementation
+```yaml
+# api-gateway.yml
+services:
+ gateway:
+ routes:
+ - id: auth-service
+ uri: lb://auth-service
+ predicates:
+ - Path=/api/auth/**
+ filters:
+ - RateLimit=100,1s
+ - CircuitBreaker=3,10s
+```
+
+### Monitoring Stack
+- **Distributed Tracing**
+ ```python
+ from opentelemetry import trace
+ from opentelemetry.exporter import jaeger
+
+ tracer = trace.get_tracer(__name__)
+ with tracer.start_as_current_span("operation") as span:
+ span.set_attribute("attribute", value)
+ ```
+- **Metrics Collection**
+ - Prometheus for metrics
+ - Grafana for visualization
+ - Custom dashboards for each service
+
+### Configuration Management
+```python
+# config_service.py
+class ConfigService:
+ def __init__(self):
+ self.consul_client = Consul()
+
+ def get_config(self, service_name: str) -> Dict:
+ return self.consul_client.kv.get(f"config/{service_name}")
+
+ def update_config(self, service_name: str, config: Dict):
+ self.consul_client.kv.put(f"config/{service_name}", config)
+```
+
+## 2. Performance & Scaling
+
+### Enhanced Caching Strategy
+```python
+# redis_cache.py
+class RedisCache:
+ def __init__(self):
+ self.client = Redis(cluster_mode=True)
+
+ async def get_or_set(self, key: str, callback: Callable):
+ if value := await self.client.get(key):
+ return value
+ value = await callback()
+ await self.client.set(key, value, ex=3600)
+ return value
+```
+
+### Database Optimization
+```sql
+-- Sharding Example
+CREATE TABLE users_shard_1 PARTITION OF users
+ FOR VALUES WITH (modulus 3, remainder 0);
+CREATE TABLE users_shard_2 PARTITION OF users
+ FOR VALUES WITH (modulus 3, remainder 1);
+```
+
+### Event System Enhancement
+```python
+# event_publisher.py
+class EventPublisher:
+ def __init__(self):
+ self.kafka_producer = KafkaProducer()
+
+ async def publish(self, topic: str, event: Dict):
+ await self.kafka_producer.send(
+ topic,
+ value=event,
+ headers=[("version", "1.0")]
+ )
+```
+
+### Background Processing
+```python
+# job_processor.py
+class JobProcessor:
+ def __init__(self):
+ self.celery = Celery()
+ self.connection_pool = ConnectionPool(max_size=100)
+
+ @celery.task
+ async def process_job(self, job_data: Dict):
+ async with self.connection_pool.acquire() as conn:
+ await conn.execute(job_data)
+```
+
+## 3. Security & Reliability
+
+### API Security Enhancement
+```python
+# security.py
+class SecurityMiddleware:
+ def __init__(self):
+ self.rate_limiter = RateLimiter()
+ self.key_rotator = KeyRotator()
+
+ async def process_request(self, request: Request):
+ await self.rate_limiter.check(request.client_ip)
+ await self.key_rotator.validate(request.api_key)
+```
+
+### Error Handling System
+```python
+# error_handler.py
+class ErrorHandler:
+ def __init__(self):
+ self.sentry_client = Sentry()
+ self.circuit_breaker = CircuitBreaker()
+
+ async def handle_error(self, error: Exception):
+ await self.sentry_client.capture_exception(error)
+ await self.circuit_breaker.record_error()
+```
+
+### Testing Framework
+```python
+# integration_tests.py
+class IntegrationTests:
+ async def setup(self):
+ self.containers = await TestContainers.start([
+ "postgres", "redis", "kafka"
+ ])
+
+ async def test_end_to_end(self):
+ await self.setup()
+ # Test complete user journey
+ await self.cleanup()
+```
+
+### Audit System
+```python
+# audit.py
+class AuditLogger:
+ def __init__(self):
+ self.elastic = Elasticsearch()
+
+ async def log_action(
+ self,
+ user_id: str,
+ action: str,
+ resource: str,
+ changes: Dict
+ ):
+ await self.elastic.index({
+ "user_id": user_id,
+ "action": action,
+ "resource": resource,
+ "changes": changes,
+ "timestamp": datetime.utcnow()
+ })
+```
+
+## 4. Development Experience
+
+### Domain-Driven Design
+```
+/src
+├── domain/
+│ ├── entities/
+│ ├── value_objects/
+│ └── aggregates/
+├── application/
+│ ├── commands/
+│ └── queries/
+└── infrastructure/
+ ├── repositories/
+ └── services/
+```
+
+### API Documentation
+```python
+# main.py
+from fastapi import FastAPI
+from fastapi.openapi.utils import get_openapi
+
+app = FastAPI()
+
+def custom_openapi():
+ return get_openapi(
+ title="WAG Management API",
+ version="4.0.0",
+ description="Complete API documentation",
+ routes=app.routes
+ )
+
+app.openapi = custom_openapi
+```
+
+### Translation Management
+```python
+# i18n.py
+class TranslationService:
+ def __init__(self):
+ self.translations = {}
+ self.fallback_chain = ["tr", "en"]
+
+ async def get_translation(
+ self,
+ key: str,
+ lang: str,
+ fallback: bool = True
+ ) -> str:
+ if translation := self.translations.get(f"{lang}.{key}"):
+ return translation
+ if fallback:
+ for lang in self.fallback_chain:
+ if translation := self.translations.get(f"{lang}.{key}"):
+ return translation
+ return key
+```
+
+### Developer Tools
+```python
+# debug_toolkit.py
+class DebugToolkit:
+ def __init__(self):
+ self.profiler = cProfile.Profile()
+ self.debugger = pdb.Pdb()
+
+ def profile_function(self, func: Callable):
+ def wrapper(*args, **kwargs):
+ self.profiler.enable()
+ result = func(*args, **kwargs)
+ self.profiler.disable()
+ return result
+ return wrapper
+```
+
+## Implementation Priority
+
+1. **Phase 1 - Foundation** (1-2 months)
+ - Service containerization
+ - Basic monitoring
+ - API gateway setup
+ - Security enhancements
+
+2. **Phase 2 - Scaling** (2-3 months)
+ - Caching implementation
+ - Database optimization
+ - Event system upgrade
+ - Background jobs
+
+3. **Phase 3 - Reliability** (1-2 months)
+ - Error handling
+ - Testing framework
+ - Audit system
+ - Performance monitoring
+
+4. **Phase 4 - Developer Experience** (1-2 months)
+ - Documentation
+ - Development tools
+ - Translation system
+ - Code organization
+
+## Success Metrics
+
+- **Performance**
+ - Response time < 100ms for 95% of requests
+ - Cache hit rate > 80%
+ - Zero downtime deployments
+
+- **Reliability**
+ - 99.99% uptime
+ - < 0.1% error rate
+ - < 1s failover time
+
+- **Security**
+ - Zero critical vulnerabilities
+ - 100% audit log coverage
+ - < 1hr security incident response time
+
+- **Development**
+ - 80% test coverage
+ - < 24hr PR review time
+ - < 1 day developer onboarding
diff --git a/docs/improvements/language_service/backend/language_service.py b/docs/improvements/language_service/backend/language_service.py
new file mode 100644
index 0000000..bc1c4e8
--- /dev/null
+++ b/docs/improvements/language_service/backend/language_service.py
@@ -0,0 +1,6 @@
+# Original content from ApiEvents/LanguageServiceApi/language_service.py
+from typing import Dict, List, Optional
+from fastapi import APIRouter, Header
+from pydantic import BaseModel
+
+# ... rest of the file content ...
diff --git a/docs/improvements/language_service/backend/zod_messages.py b/docs/improvements/language_service/backend/zod_messages.py
new file mode 100644
index 0000000..daec599
--- /dev/null
+++ b/docs/improvements/language_service/backend/zod_messages.py
@@ -0,0 +1,7 @@
+# Original content from ApiEvents/LanguageServiceApi/zod_messages.py
+from typing import Dict
+from fastapi import APIRouter, Header
+from pydantic import BaseModel
+from typing import Optional
+
+# ... rest of the file content ...
diff --git a/docs/improvements/language_service/frontend/languageService.ts b/docs/improvements/language_service/frontend/languageService.ts
new file mode 100644
index 0000000..88cf8f0
--- /dev/null
+++ b/docs/improvements/language_service/frontend/languageService.ts
@@ -0,0 +1,4 @@
+// Original content from frontend/src/services/languageService.ts
+import axios from 'axios';
+
+// ... rest of the file content ...
diff --git a/docs/improvements/validation_service/backend/schema_converter.py b/docs/improvements/validation_service/backend/schema_converter.py
new file mode 100644
index 0000000..b8ca7d8
--- /dev/null
+++ b/docs/improvements/validation_service/backend/schema_converter.py
@@ -0,0 +1,9 @@
+# Original content from ApiEvents/ValidationServiceApi/schema_converter.py
+from typing import Dict, Any, Type, get_type_hints, get_args, get_origin
+from pydantic import BaseModel, Field, EmailStr
+from enum import Enum
+import inspect
+from fastapi import APIRouter
+from datetime import datetime
+
+# ... rest of the file content ...
diff --git a/docs/improvements/validation_service/backend/unified_schema_service.py b/docs/improvements/validation_service/backend/unified_schema_service.py
new file mode 100644
index 0000000..115a2d0
--- /dev/null
+++ b/docs/improvements/validation_service/backend/unified_schema_service.py
@@ -0,0 +1,149 @@
+from typing import Dict, Any, Type, Optional
+from pydantic import BaseModel
+from fastapi import APIRouter, Header
+
+
+class ValidationMessages(BaseModel):
+ """Messages for Zod validation"""
+
+ required: str
+ invalid_type: str
+ invalid_string: Dict[str, str] # email, url, etc
+ too_small: Dict[str, str] # string, array, number
+ too_big: Dict[str, str] # string, array, number
+ invalid_date: str
+ invalid_enum: str
+ custom: Dict[str, str]
+
+
+class SchemaField(BaseModel):
+ """Schema field definition"""
+
+ type: str
+ items: Optional[str] = None # For arrays
+ values: Optional[list] = None # For enums
+ validations: Optional[Dict[str, Any]] = None
+
+
+class SchemaDefinition(BaseModel):
+ """Complete schema definition"""
+
+ name: str
+ fields: Dict[str, SchemaField]
+ messages: ValidationMessages
+
+
+class UnifiedSchemaService:
+ def __init__(self):
+ self.messages = {
+ "tr": ValidationMessages(
+ required="Bu alan zorunludur",
+ invalid_type="Geçersiz tip",
+ invalid_string={
+ "email": "Geçerli bir e-posta adresi giriniz",
+ "url": "Geçerli bir URL giriniz",
+ "uuid": "Geçerli bir UUID giriniz",
+ },
+ too_small={
+ "string": "{min} karakterden az olamaz",
+ "array": "En az {min} öğe gereklidir",
+ "number": "En az {min} olmalıdır",
+ },
+ too_big={
+ "string": "{max} karakterden fazla olamaz",
+ "array": "En fazla {max} öğe olabilir",
+ "number": "En fazla {max} olabilir",
+ },
+ invalid_date="Geçerli bir tarih giriniz",
+ invalid_enum="Geçersiz seçim",
+ custom={
+ "password_match": "Şifreler eşleşmiyor",
+ "strong_password": "Şifre güçlü değil",
+ },
+ ),
+ "en": ValidationMessages(
+ required="This field is required",
+ invalid_type="Invalid type",
+ invalid_string={
+ "email": "Please enter a valid email",
+ "url": "Please enter a valid URL",
+ "uuid": "Please enter a valid UUID",
+ },
+ too_small={
+ "string": "Must be at least {min} characters",
+ "array": "Must contain at least {min} items",
+ "number": "Must be at least {min}",
+ },
+ too_big={
+ "string": "Must be at most {max} characters",
+ "array": "Must contain at most {max} items",
+ "number": "Must be at most {max}",
+ },
+ invalid_date="Please enter a valid date",
+ invalid_enum="Invalid selection",
+ custom={
+ "password_match": "Passwords do not match",
+ "strong_password": "Password is not strong enough",
+ },
+ ),
+ }
+
+ def get_schema_with_messages(
+ self, model: Type[BaseModel], lang: str = "tr"
+ ) -> SchemaDefinition:
+ """Get schema definition with validation messages"""
+ fields: Dict[str, SchemaField] = {}
+
+ for field_name, field in model.__fields__.items():
+ field_info = SchemaField(
+ type=self._get_field_type(field.outer_type_),
+ items=self._get_items_type(field.outer_type_),
+ values=self._get_enum_values(field.outer_type_),
+ validations=self._get_validations(field),
+ )
+ fields[field_name] = field_info
+
+ return SchemaDefinition(
+ name=model.__name__, fields=fields, messages=self.messages[lang]
+ )
+
+ def _get_field_type(self, type_: Type) -> str:
+ # Implementation similar to SchemaConverter
+ pass
+
+ def _get_items_type(self, type_: Type) -> Optional[str]:
+ # Implementation similar to SchemaConverter
+ pass
+
+ def _get_enum_values(self, type_: Type) -> Optional[list]:
+ # Implementation similar to SchemaConverter
+ pass
+
+ def _get_validations(self, field) -> Optional[Dict[str, Any]]:
+ # Implementation similar to SchemaConverter
+ pass
+
+
+router = APIRouter(prefix="/api/schema", tags=["Schema"])
+schema_service = UnifiedSchemaService()
+
+
+@router.get("/model/{model_name}")
+async def get_model_schema(
+ model_name: str, accept_language: Optional[str] = Header(default="tr")
+) -> SchemaDefinition:
+ """Get model schema with validation messages"""
+ # You'd need to implement model lookup
+ models = {
+ "User": UserModel,
+ "Product": ProductModel,
+ # Add your models here
+ }
+
+ if model_name not in models:
+ raise ValueError(f"Model {model_name} not found")
+
+ lang = accept_language.split(",")[0][:2]
+ return schema_service.get_schema_with_messages(
+ models[model_name], lang if lang in ["tr", "en"] else "tr"
+ )
diff --git a/docs/improvements/validation_service/frontend/dynamicSchema.ts b/docs/improvements/validation_service/frontend/dynamicSchema.ts
new file mode 100644
index 0000000..f6067e9
--- /dev/null
+++ b/docs/improvements/validation_service/frontend/dynamicSchema.ts
@@ -0,0 +1,6 @@
+// Original content from frontend/src/validation/dynamicSchema.ts
+import { z } from 'zod';
+import axios from 'axios';
+import { zodMessages } from './zodMessages';
+
+// ... rest of the file content ...
diff --git a/docs/improvements/validation_service/frontend/unifiedSchemaBuilder.ts b/docs/improvements/validation_service/frontend/unifiedSchemaBuilder.ts
new file mode 100644
index 0000000..ac783a1
--- /dev/null
+++ b/docs/improvements/validation_service/frontend/unifiedSchemaBuilder.ts
@@ -0,0 +1,219 @@
+import { z } from 'zod';
+import axios from 'axios';
+
+interface ValidationMessages {
+ required: string;
+ invalid_type: string;
+ invalid_string: Record;
+ too_small: Record;
+ too_big: Record;
+ invalid_date: string;
+ invalid_enum: string;
+ custom: Record;
+}
+
+interface SchemaField {
+ type: string;
+ items?: string;
+ values?: any[];
+ validations?: Record;
+}
+
+interface SchemaDefinition {
+ name: string;
+ fields: Record;
+ messages: ValidationMessages;
+}
+
+class UnifiedSchemaBuilder {
+ private static instance: UnifiedSchemaBuilder;
+ private schemaCache: Map = new Map();
+
+ private constructor() {}
+
+ static getInstance(): UnifiedSchemaBuilder {
+ if (!UnifiedSchemaBuilder.instance) {
+ UnifiedSchemaBuilder.instance = new UnifiedSchemaBuilder();
+ }
+ return UnifiedSchemaBuilder.instance;
+ }
+
+ async getSchema(modelName: string): Promise {
+ // Check cache first
+ if (this.schemaCache.has(modelName)) {
+ return this.schemaCache.get(modelName)!;
+ }
+
+ // Fetch schema definition with messages from backend
+ const response = await axios.get(
+ `/api/schema/model/${modelName}`,
+ {
+ headers: {
+ 'Accept-Language': navigator.language || 'tr'
+ }
+ }
+ );
+
+ const schema = this.buildSchema(response.data);
+ this.schemaCache.set(modelName, schema);
+ return schema;
+ }
+
+ private buildSchema(definition: SchemaDefinition): z.ZodSchema {
+ const shape: Record = {};
+
+ for (const [fieldName, field] of Object.entries(definition.fields)) {
+ shape[fieldName] = this.buildField(field, definition.messages);
+ }
+
+ return z.object(shape);
+ }
+
+ private buildField(
+ field: SchemaField,
+ messages: ValidationMessages
+ ): z.ZodTypeAny {
+ let zodField: z.ZodTypeAny;
+
+ switch (field.type) {
+ case 'string':
+ zodField = z.string({
+ required_error: messages.required,
+ invalid_type_error: messages.invalid_type
+ });
+ break;
+ case 'email':
+ zodField = z.string().email(messages.invalid_string.email);
+ break;
+ case 'number':
+ zodField = z.number({
+ required_error: messages.required,
+ invalid_type_error: messages.invalid_type
+ });
+ break;
+ case 'boolean':
+ zodField = z.boolean({
+ required_error: messages.required,
+ invalid_type_error: messages.invalid_type
+ });
+ break;
+ case 'date':
+ zodField = z.date({
+ required_error: messages.required,
+ invalid_type_error: messages.invalid_date
+ });
+ break;
+ case 'array':
+ zodField = z.array(
+ this.buildField({ type: field.items! }, messages)
+ );
+ break;
+ case 'enum':
+ zodField = z.enum(field.values as [string, ...string[]], {
+ required_error: messages.required,
+ invalid_type_error: messages.invalid_enum
+ });
+ break;
+ default:
+ zodField = z.any();
+ }
+
+ // Apply validations if any
+ if (field.validations) {
+ zodField = this.applyValidations(zodField, field.validations, messages);
+ }
+
+ return zodField;
+ }
+
+ private applyValidations(
+ field: z.ZodTypeAny,
+ validations: Record,
+ messages: ValidationMessages
+ ): z.ZodTypeAny {
+ let result = field;
+
+ if ('min_length' in validations) {
+ result = (result as z.ZodString).min(
+ validations.min_length,
+ messages.too_small.string.replace(
+ '{min}',
+ validations.min_length.toString()
+ )
+ );
+ }
+
+ if ('max_length' in validations) {
+ result = (result as z.ZodString).max(
+ validations.max_length,
+ messages.too_big.string.replace(
+ '{max}',
+ validations.max_length.toString()
+ )
+ );
+ }
+
+ if ('pattern' in validations) {
+ result = (result as z.ZodString).regex(
+ new RegExp(validations.pattern),
+ messages.custom[validations.pattern_message] || 'Invalid format'
+ );
+ }
+
+ if ('gt' in validations) {
+ result = (result as z.ZodNumber).gt(
+ validations.gt,
+ messages.too_small.number.replace(
+ '{min}',
+ (validations.gt + 1).toString()
+ )
+ );
+ }
+
+ if ('lt' in validations) {
+ result = (result as z.ZodNumber).lt(
+ validations.lt,
+ messages.too_big.number.replace(
+ '{max}',
+ (validations.lt - 1).toString()
+ )
+ );
+ }
+
+ return result;
+ }
+}
+
+// Export singleton instance
+export const schemaBuilder = UnifiedSchemaBuilder.getInstance();
+
+// Usage example:
+/*
+import { schemaBuilder } from './validation/unifiedSchemaBuilder';
+import { zodResolver } from '@hookform/resolvers/zod';
+import { useForm } from 'react-hook-form';
+
+function UserForm() {
+ const [schema, setSchema] = useState(null);
+
+ useEffect(() => {
+ async function loadSchema() {
+ const userSchema = await schemaBuilder.getSchema('User');
+ setSchema(userSchema);
+ }
+ loadSchema();
+ }, []);
+
+ const form = useForm({
+ resolver: schema ? zodResolver(schema) : undefined
+ });
+
+ if (!schema) return Loading...
;
+
+ return (
+
+ );
+}
+*/
diff --git a/docs/method_event_system.md b/docs/method_event_system.md
new file mode 100644
index 0000000..447b077
--- /dev/null
+++ b/docs/method_event_system.md
@@ -0,0 +1,229 @@
+# MethodToEvent System Documentation
+
+## Overview
+The MethodToEvent system provides a unified way to manage API endpoints and frontend menu structure with built-in permission handling. It uses UUIDs for permission management and supports hierarchical menu structures.
+
+## Core Components
+
+### 1. MethodToEvent Base Class
+Base class for defining event methods with API endpoints and frontend page configuration.
+
+#### Class Variables
+- `action_key`: Unique identifier for the action
+- `event_type`: Type of event (e.g., 'query', 'command')
+- `event_description`: Human-readable description
+- `event_category`: Category for grouping
+- `__event_keys__`: UUID to event name mapping
+- `__event_validation__`: Validation rules
+- `__endpoint_config__`: API endpoint configuration
+- `__page_info__`: Frontend page configuration
+
+#### Methods
+
+##### Configure API Endpoints
+```python
+@classmethod
+def register_endpoint(
+ cls,
+ event_uuid: str,
+ path: str,
+ method: str = "POST",
+ response_model: Optional[Type] = None,
+ **kwargs
+) -> None
+```
+Registers an API endpoint for an event UUID.
+
+##### Configure Router
+```python
+@classmethod
+def configure_router(cls, prefix: str, tags: List[str]) -> None
+```
+Sets the router prefix and OpenAPI tags.
+
+##### Configure Page
+```python
+@classmethod
+def configure_page(
+ cls,
+ name: str,
+ title: Dict[str, str],
+ icon: str,
+ url: str,
+ component: Optional[str] = None,
+ parent: Optional[str] = None
+) -> None
+```
+Configures frontend page information.
+
+##### Get Page Info with Permissions
+```python
+@classmethod
+def get_page_info_with_permissions(
+ cls,
+ user_permission_uuids: Set[str],
+ include_endpoints: bool = False
+) -> Optional[Dict[str, Any]]
+```
+Returns page info if user has required permissions.
+
+### 2. EventMethodRegistry
+Singleton registry for managing all MethodToEvent classes and building menu structures.
+
+#### Methods
+
+##### Register Method Class
+```python
+@classmethod
+def register_method_class(cls, method_class: Type[MethodToEvent]) -> None
+```
+Registers a MethodToEvent class in the registry.
+
+##### Get All Menu Items
+```python
+@classmethod
+def get_all_menu_items(
+ cls,
+ user_permission_uuids: Set[str],
+ include_endpoints: bool = False
+) -> List[Dict[str, Any]]
+```
+Returns complete menu structure based on permissions.
+
+##### Get Available Endpoints
+```python
+@classmethod
+def get_available_endpoints(
+ cls,
+ user_permission_uuids: Set[str]
+) -> Dict[str, Dict[str, Any]]
+```
+Returns all available API endpoints based on permissions.
+
+## Example Usage
+
+### 1. Define Event Methods
+```python
+class AccountEventMethods(MethodToEvent):
+ event_category = "account"
+ event_type = "query"
+ event_description = "Account management operations"
+ __event_keys__ = {
+ "uuid1": "view_account",
+ "uuid2": "edit_account"
+ }
+
+ # Configure API
+ configure_router("/api/account", ["Account"])
+ register_endpoint(
+ "uuid1",
+ "/view",
+ method="GET",
+ response_model=AccountResponse
+ )
+
+ # Configure frontend
+ configure_page(
+ name="AccountPage",
+ title={"tr": "Hesaplar", "en": "Accounts"},
+ icon="User",
+ url="/account"
+ )
+
+class AccountDetailsEventMethods(MethodToEvent):
+ event_category = "account_details"
+ __event_keys__ = {
+ "uuid3": "view_details",
+ "uuid4": "edit_details"
+ }
+
+ configure_page(
+ name="AccountDetailsPage",
+ title={"tr": "Hesap Detayları", "en": "Account Details"},
+ icon="FileText",
+ url="/account/details",
+ parent="AccountPage" # Link to parent
+ )
+```
+
+### 2. Register and Use
+```python
+# Register classes
+registry = EventMethodRegistry()
+registry.register_method_class(AccountEventMethods)
+registry.register_method_class(AccountDetailsEventMethods)
+
+# Get menu structure
+user_permissions = {"uuid1", "uuid2", "uuid3"}
+menu_items = registry.get_all_menu_items(user_permissions, include_endpoints=True)
+```
+
+## Menu Structure Rules
+
+1. **Parent-Child Visibility**
+ - Parent page must have permissions to be visible
+ - If parent is not visible, children are never shown
+ - If parent is visible, all children are shown
+
+2. **Permission Checking**
+ - Based on UUID intersection
+ - Page is visible if user has any of its event UUIDs
+ - Endpoints only included if user has specific permission
+
+3. **Menu Organization**
+ - Automatic tree structure based on parent field
+ - Sorted by name for consistency
+ - Optional endpoint information included
+
+## Example Menu Structure
+```python
+[
+ {
+ "name": "AccountPage",
+ "title": {"tr": "Hesaplar", "en": "Accounts"},
+ "icon": "User",
+ "url": "/account",
+ "category": "account",
+ "type": "query",
+ "description": "Account management operations",
+ "available_endpoints": {
+ "uuid1": {"path": "/api/account/view", "method": "GET"},
+ "uuid2": {"path": "/api/account/edit", "method": "POST"}
+ },
+ "items": [
+ {
+ "name": "AccountDetailsPage",
+ "title": {"tr": "Hesap Detayları", "en": "Account Details"},
+ "icon": "FileText",
+ "url": "/account/details",
+ "parent": "AccountPage",
+ "available_endpoints": {
+ "uuid3": {"path": "/api/account/details/view", "method": "GET"}
+ }
+ }
+ ]
+ }
+]
+```
+
+## Best Practices
+
+1. **UUID Management**
+ - Use consistent UUIDs across the system
+ - Document UUID meanings and permissions
+ - Group related permissions under same parent
+
+2. **Page Organization**
+ - Use meaningful page names
+ - Provide translations for all titles
+ - Keep URL structure consistent with hierarchy
+
+3. **API Endpoints**
+ - Use consistent router prefixes
+ - Group related endpoints under same router
+ - Use appropriate HTTP methods
+
+4. **Permission Structure**
+ - Design permissions hierarchically
+ - Consider access patterns when grouping
+ - Document permission requirements
diff --git a/docs/notes/README.md b/docs/notes/README.md
new file mode 100644
index 0000000..a3b85b7
--- /dev/null
+++ b/docs/notes/README.md
@@ -0,0 +1,42 @@
+# Development Notes
+
+This directory contains development notes and documentation organized by topic and date.
+
+## Structure
+
+- Each note is stored as a markdown file
+- Files are organized by topic in subdirectories
+- File naming format: `YYYY-MM-DD_topic_name.md`
+- Each note includes:
+ - Date
+ - Topic/Category
+ - Content
+ - Related files/components
+ - Action items (if any)
+
+## How to Add Notes
+
+1. Create a new markdown file with the date prefix
+2. Use the standard note template
+3. Place in appropriate topic directory
+4. Link related notes if applicable
+
+## Note Template
+
+```markdown
+# [Topic] - [Date]
+
+## Overview
+Brief description of the topic/issue
+
+## Details
+Main content of the note
+
+## Related
+- Links to related files/components
+- References to other notes
+
+## Action Items
+- [ ] Todo items if any
+- [ ] Next steps
+```
diff --git a/pyproject.toml b/pyproject.toml
new file mode 100644
index 0000000..bbe7404
--- /dev/null
+++ b/pyproject.toml
@@ -0,0 +1,76 @@
+[tool.poetry]
+name = "wag-management-api-services"
+version = "0.1.0"
+description = "WAG Management API Services"
+authors = ["Karatay Berkay "]
+
+[tool.poetry.dependencies]
+python = "^3.12"
+# FastAPI and Web
+fastapi = "^0.104.1"
+uvicorn = "^0.24.0"
+pydantic = "^2.5.2"
+
+# MongoDB
+motor = "3.3.2" # Pinned version
+pymongo = "4.5.0" # Pinned version to match motor
+
+# PostgreSQL
+sqlalchemy = "^2.0.23"
+sqlalchemy-mixins = "^2.0.5"
+psycopg2-binary = "^2.9.9"
+
+# Redis
+redis = "^5.0.1"
+arrow = "^1.3.0"
+
+# Email
+redmail = "^0.6.0"
+
+# Testing
+pytest = "^7.4.3"
+pytest-asyncio = "^0.21.1"
+pytest-cov = "^4.1.0"
+
+# Utilities
+python-dateutil = "^2.8.2"
+typing-extensions = "^4.8.0"
+
+[tool.poetry.group.dev.dependencies]
+black = "^23.11.0"
+isort = "^5.12.0"
+mypy = "^1.7.1"
+flake8 = "^6.1.0"
+
+[build-system]
+requires = ["poetry-core>=1.0.0"]
+build-backend = "poetry.core.masonry.api"
+
+[tool.black]
+line-length = 88
+target-version = ['py39']
+include = '\.pyi?$'
+
+[tool.isort]
+profile = "black"
+multi_line_output = 3
+include_trailing_comma = true
+force_grid_wrap = 0
+use_parentheses = true
+line_length = 88
+
+[tool.mypy]
+python_version = "3.9"
+warn_return_any = true
+warn_unused_configs = true
+disallow_untyped_defs = true
+check_untyped_defs = true
+
+[tool.pytest.ini_options]
+minversion = "6.0"
+addopts = "-ra -q --cov=Services"
+testpaths = [
+ "Ztest",
+]
+python_files = ["test_*.py"]
+asyncio_mode = "auto"
diff --git a/scripts/dev.sh b/scripts/dev.sh
new file mode 100755
index 0000000..b2af983
--- /dev/null
+++ b/scripts/dev.sh
@@ -0,0 +1,154 @@
+#!/bin/bash
+
+# Colors for pretty output
+RED='\033[0;31m'
+GREEN='\033[0;32m'
+YELLOW='\033[1;33m'
+NC='\033[0m' # No Color
+
+# Error handling
+set -e
+trap 'last_command=$current_command; current_command=$BASH_COMMAND' DEBUG
+trap 'echo -e "${RED}\"${last_command}\" command failed with exit code $?.${NC}"' EXIT
+
+# Logging function
+log() {
+ echo -e "${GREEN}[$(date +'%Y-%m-%d %H:%M:%S')]${NC} $1"
+}
+
+# Check if a command exists
+check_command() {
+ if ! command -v "$1" &> /dev/null; then
+ echo -e "${YELLOW}$1 not found${NC}"
+ return 1
+ fi
+ return 0
+}
+
+# Check system requirements
+check_requirements() {
+ log "Checking system requirements..."
+
+ # Check Python version
+ if ! check_command python3; then
+ log "${RED}Python 3 is required but not found${NC}"
+ exit 1
+ fi
+
+ # Check if Docker Compose is installed
+ if ! check_command docker-compose; then
+ log "${YELLOW}Warning: Docker Compose not found. You'll need it later for running the application${NC}"
+ fi
+
+ log "${GREEN}System requirements check completed${NC}"
+}
+
+# Check if poetry is installed
+check_poetry() {
+ if ! command -v poetry &> /dev/null; then
+ log "${YELLOW}Poetry not found. Installing...${NC}"
+ curl -sSL https://install.python-poetry.org | python3 -
+ fi
+}
+
+# Setup development environment
+setup_dev() {
+ log "Setting up development environment..."
+ check_requirements
+ check_poetry
+ poetry install
+ log "${GREEN}Development environment setup complete!${NC}"
+}
+
+# Format code
+format_code() {
+ log "Formatting code..."
+ poetry run black Services/
+ poetry run isort Services/
+}
+
+# Run type checking
+check_types() {
+ log "Running type checks..."
+ poetry run mypy Services/
+}
+
+# Run linting
+lint_code() {
+ log "Running linter..."
+ poetry run flake8 Services/
+}
+
+# Run all checks
+check_all() {
+ format_code
+ lint_code
+ check_types
+}
+
+# Clean up pyc files and cache
+clean() {
+ log "Cleaning up python cache files..."
+ find . -type f -name "*.pyc" -delete
+ find . -type d -name "__pycache__" -delete
+ find . -type d -name ".pytest_cache" -delete
+ find . -type d -name ".mypy_cache" -delete
+}
+
+# Update dependencies
+update_deps() {
+ log "Updating dependencies..."
+ poetry update
+}
+
+# Show help
+show_help() {
+ echo -e "${GREEN}Available commands:${NC}"
+ echo "setup - Setup development environment"
+ echo "check-req - Check system requirements"
+ echo "format - Format code with black and isort"
+ echo "lint - Run flake8 linter"
+ echo "types - Run mypy type checker"
+ echo "check - Run all checks (format, lint, types)"
+ echo "clean - Clean up cache files"
+ echo "update - Update dependencies"
+}
+
+# Main command handler
+case "$1" in
+ "setup")
+ setup_dev
+ ;;
+ "check-req")
+ check_requirements
+ ;;
+ "format")
+ format_code
+ ;;
+ "lint")
+ lint_code
+ ;;
+ "types")
+ check_types
+ ;;
+ "check")
+ check_all
+ ;;
+ "clean")
+ clean
+ ;;
+ "update")
+ update_deps
+ ;;
+ "help"|"")
+ show_help
+ ;;
+ *)
+ echo -e "${RED}Unknown command: $1${NC}"
+ show_help
+ exit 1
+ ;;
+esac
+
+# Remove error handling trap
+trap - EXIT
diff --git a/trash/Models_old/alchemy_response.py b/trash/Models_old/alchemy_response.py
new file mode 100644
index 0000000..78b282f
--- /dev/null
+++ b/trash/Models_old/alchemy_response.py
@@ -0,0 +1,372 @@
+"""
+Response handlers for SQLAlchemy query results with pagination support.
+
+This module provides a set of response classes for handling different types of data:
+- Single PostgreSQL records
+- Multiple SQLAlchemy records
+- List data
+- Dictionary data
+
+Each response includes pagination information and supports data transformation
+through response models.
+"""
+
+from __future__ import annotations
+from typing import Any, Dict, List, Optional, Type, TypeVar, Protocol, Generic
+from dataclasses import dataclass
+
+from fastapi import status
+from fastapi.responses import JSONResponse
+
+from ApiLibrary.common.line_number import get_line_number_for_error
+from Services.PostgresDb.Models.response import PostgresResponse
+from ErrorHandlers.ErrorHandlers.api_exc_handler import HTTPExceptionApi
+from Services.pagination import Pagination, PaginationConfig
+
+
+T = TypeVar("T")
+DataT = TypeVar("DataT")
+
+
+@dataclass
+class ResponseConfig(Generic[T]):
+ """Configuration for response formatting.
+
+ Attributes:
+ status_code: HTTP status code (default: "HTTP_200_OK")
+ message: Response message to include in the response
+ completed: Operation completion status flag
+ cls_object: Class object for error handling context
+ response_model: Optional response model class for data transformation
+ """
+
+ status_code: str = "HTTP_200_OK"
+ message: str = ""
+ completed: bool = True
+ cls_object: Optional[Any] = None
+ response_model: Optional[Type[T]] = None
+
+
+class ResponseProtocol(Protocol):
+ """Protocol defining required methods for response models."""
+
+ def dump(self) -> Dict[str, Any]:
+ """Convert model to dictionary format."""
+ ...
+
+
+class BaseJsonResponse(Generic[T]):
+ """Base class for JSON response handling.
+
+ Provides common functionality for all response types including:
+ - Response formatting with consistent structure
+ - Pagination handling and configuration
+ - Data transformation through response models
+ """
+
+ def __init__(
+ self,
+ message: str,
+ result: Any,
+ response_model: Optional[Type[T]] = None,
+ status_code: str = "HTTP_200_OK",
+ completed: bool = True,
+ cls_object: Optional[Any] = None,
+ filter_attributes: Optional[Any] = None,
+ ) -> None:
+ """Initialize response handler.
+
+ Args:
+ message: Response message
+ result: Query result or data
+ response_model: Optional model for data transformation
+ status_code: HTTP status code
+ completed: Operation completion status
+ cls_object: Class object for error context
+ filter_attributes: Optional pagination and filtering attributes
+ """
+ self.status_code = getattr(status, status_code, status.HTTP_200_OK)
+ self.message = message
+ self.completed = completed
+ self.filter_attributes = filter_attributes
+ self.response_model = response_model
+ self.cls_object = cls_object
+ self.result = result
+
+ def _create_pagination(self) -> Pagination:
+ """Create and configure pagination instance.
+
+ Returns:
+ Configured Pagination instance
+ """
+ pagination = Pagination()
+ if self.filter_attributes:
+ pagination.change(
+ PaginationConfig(
+ page=self.filter_attributes.page,
+ size=self.filter_attributes.size,
+ order_field=self.filter_attributes.order_field,
+ order_type=self.filter_attributes.order_type,
+ )
+ )
+ return pagination
+
+ def _format_response(self, pagination: Pagination, data: Any) -> JSONResponse:
+ """Format final JSON response with pagination.
+
+ Args:
+ pagination: Pagination instance with configuration
+ data: Response data to include
+
+ Returns:
+ Formatted JSONResponse
+ """
+ return JSONResponse(
+ status_code=self.status_code,
+ content={
+ "pagination": pagination.as_dict(),
+ "completed": self.completed,
+ "message": self.message,
+ "data": data,
+ },
+ )
+
+ def _transform_data(self, data: Dict[str, Any]) -> Dict[str, Any]:
+ """Transform data using response model if provided.
+
+ Args:
+ data: Raw data dictionary
+
+ Returns:
+ Transformed data dictionary
+ """
+ if self.response_model:
+ return self.response_model(**data).dump()
+ return data
+
+ @staticmethod
+ def _validate_data(data: Any, expected_type: Type, cls_object: Any) -> None:
+ """Validate data type and raise exception if invalid.
+
+ Args:
+ data: Data to validate
+ expected_type: Expected type of data
+ cls_object: Class object for error context
+
+ Raises:
+ HTTPExceptionApi: If data type is invalid
+ """
+ if not isinstance(data, expected_type):
+ raise HTTPExceptionApi(
+ lang=cls_object.lang,
+ error_code="HTTP_400_BAD_REQUEST",
+ loc=get_line_number_for_error(),
+ sys_msg=f"Invalid data type: {type(data)}",
+ )
+
+
+class SinglePostgresResponse(BaseJsonResponse[T]):
+ """Handler for single record responses from PostgreSQL queries."""
+
+ def __new__(
+ cls,
+ message: str,
+ result: PostgresResponse,
+ response_model: Optional[Type[T]] = None,
+ status_code: str = "HTTP_200_OK",
+ completed: bool = True,
+ cls_object: Optional[Any] = None,
+ filter_attributes: Optional[Any] = None,
+ ) -> JSONResponse:
+ """Create response for single PostgreSQL record.
+
+ Args:
+ message: Response message
+ result: PostgreSQL query result
+ response_model: Optional model for data transformation
+ status_code: HTTP status code
+ completed: Operation completion status
+ cls_object: Class object for error context
+ filter_attributes: Optional pagination and filtering attributes
+
+ Returns:
+ Formatted JSON response
+
+ Raises:
+ HTTPExceptionApi: If result is invalid or empty
+ """
+ cls._validate_data(result, PostgresResponse, cls_object)
+
+ if not result.first:
+ raise HTTPExceptionApi(
+ lang=cls_object.lang,
+ error_code="HTTP_400_BAD_REQUEST",
+ loc=get_line_number_for_error(),
+ sys_msg="No data found",
+ )
+
+ instance = super().__new__(cls)
+ instance.__init__(
+ message=message,
+ result=result,
+ response_model=response_model,
+ status_code=status_code,
+ completed=completed,
+ cls_object=cls_object,
+ filter_attributes=filter_attributes,
+ )
+
+ pagination = instance._create_pagination()
+ data = instance._transform_data(result.data.get_dict())
+
+ return instance._format_response(pagination, data)
+
+
+class AlchemyJsonResponse(BaseJsonResponse[T]):
+ """Handler for multiple record responses from SQLAlchemy queries."""
+
+ def __new__(
+ cls,
+ message: str,
+ result: PostgresResponse,
+ response_model: Optional[Type[T]] = None,
+ status_code: str = "HTTP_200_OK",
+ completed: bool = True,
+ cls_object: Optional[Any] = None,
+ filter_attributes: Optional[Any] = None,
+ ) -> JSONResponse:
+ """Create response for multiple SQLAlchemy records.
+
+ Args:
+ message: Response message
+ result: PostgreSQL query result
+ response_model: Optional model for data transformation
+ status_code: HTTP status code
+ completed: Operation completion status
+ cls_object: Class object for error context
+ filter_attributes: Optional pagination and filtering attributes
+
+ Returns:
+ Formatted JSON response
+
+ Raises:
+ HTTPExceptionApi: If result is invalid
+ """
+ cls._validate_data(result, PostgresResponse, cls_object)
+
+ if result.first:
+ raise HTTPExceptionApi(
+ lang=cls_object.lang,
+ error_code="HTTP_400_BAD_REQUEST",
+ loc=get_line_number_for_error(),
+ sys_msg="No data found",
+ )
+
+ instance = super().__new__(cls)
+ instance.__init__(
+ message=message,
+ result=result,
+ response_model=response_model,
+ status_code=status_code,
+ completed=completed,
+ cls_object=cls_object,
+ filter_attributes=filter_attributes,
+ )
+
+ pagination = instance._create_pagination()
+ data = [instance._transform_data(item.get_dict()) for item in result.data]
+ pagination.feed(data)
+ return instance._format_response(pagination, data)
+
+
+class ListJsonResponse(BaseJsonResponse[T]):
+ """Handler for list data responses."""
+
+ def __new__(
+ cls,
+ message: str,
+ result: List[Any],
+ response_model: Optional[Type[T]] = None,
+ status_code: str = "HTTP_200_OK",
+ completed: bool = True,
+ cls_object: Optional[Any] = None,
+ filter_attributes: Optional[Any] = None,
+ ) -> JSONResponse:
+ """Create response for list data.
+
+ Args:
+ message: Response message
+ result: List of data items
+ response_model: Optional model for data transformation
+ status_code: HTTP status code
+ completed: Operation completion status
+ cls_object: Class object for error context
+ filter_attributes: Optional pagination and filtering attributes
+
+ Returns:
+ Formatted JSON response
+ """
+ cls._validate_data(result, list, cls_object)
+
+ instance = super().__new__(cls)
+ instance.__init__(
+ message=message,
+ result=result,
+ response_model=response_model,
+ status_code=status_code,
+ completed=completed,
+ cls_object=cls_object,
+ filter_attributes=filter_attributes,
+ )
+
+ pagination = instance._create_pagination()
+ data = [instance._transform_data(item) for item in result]
+ pagination.feed(data)
+
+ return instance._format_response(pagination, data)
+
+
+class DictJsonResponse(BaseJsonResponse[T]):
+ """Handler for dictionary data responses."""
+
+ def __new__(
+ cls,
+ message: str,
+ result: Dict[str, Any],
+ response_model: Optional[Type[T]] = None,
+ status_code: str = "HTTP_200_OK",
+ completed: bool = True,
+ cls_object: Optional[Any] = None,
+ filter_attributes: Optional[Any] = None,
+ ) -> JSONResponse:
+ """Create response for dictionary data.
+
+ Args:
+ message: Response message
+ result: Dictionary data
+ response_model: Optional model for data transformation
+ status_code: HTTP status code
+ completed: Operation completion status
+ cls_object: Class object for error context
+ filter_attributes: Optional pagination and filtering attributes
+
+ Returns:
+ Formatted JSON response
+ """
+ cls._validate_data(result, dict, cls_object)
+
+ instance = super().__new__(cls)
+ instance.__init__(
+ message=message,
+ result=result,
+ response_model=response_model,
+ status_code=status_code,
+ completed=completed,
+ cls_object=cls_object,
+ filter_attributes=filter_attributes,
+ )
+
+ pagination = instance._create_pagination()
+ data = instance._transform_data(result)
+
+ return instance._format_response(pagination, data)
diff --git a/trash/Models_old/base_model.py b/trash/Models_old/base_model.py
new file mode 100644
index 0000000..0df4e18
--- /dev/null
+++ b/trash/Models_old/base_model.py
@@ -0,0 +1,254 @@
+from contextlib import contextmanager
+from typing import Any, Dict, Optional, Generator
+from sqlalchemy.orm import Session
+from sqlalchemy import inspect
+
+from Services.PostgresDb.database import Base
+
+
+class BaseModel(Base):
+ """Base model class with common utility functions and SQLAlchemy integration.
+
+ This class serves as the foundation for all database models, providing:
+ - SQLAlchemy ORM integration through Base
+ - Session management utilities
+ - CRUD operations (create, update)
+ - Bulk operation support
+ """
+
+ __abstract__ = True # Marks this as a base class, won't create a table
+
+ @classmethod
+ def new_session(cls) -> Session:
+ """Get database session."""
+ from Services.PostgresDb.database import get_db
+
+ with get_db() as session:
+ return session
+
+ def update(
+ self, session: Optional[Session] = None, **kwargs: Dict[str, Any]
+ ) -> "BaseModel":
+ """Update model instance with given attributes.
+
+ Args:
+ session: Optional existing session to use. If not provided, creates a new one.
+ **kwargs: Attributes to update
+
+ Returns:
+ Updated model instance
+
+ Example:
+ # Using an existing session
+ with get_db() as session:
+ model.update(session=session, name="new name")
+ model2.update(session=session, status="active")
+ # Both updates use the same transaction
+
+ # Creating a new session automatically
+ model.update(name="new name") # Creates and manages its own session
+ """
+ should_close_session = session is None
+ if session is None:
+ session = self.get_session()
+
+ try:
+ # Remove unrelated fields
+ check_kwargs = self.remove_non_related_inputs(kwargs)
+
+ # Get all table columns
+ mapper = inspect(self.__class__)
+ columns = [column.key for column in mapper.columns]
+
+ # Get relationship fields
+ relationships = [rel.key for rel in mapper.relationships]
+
+ # Handle confirmation logic
+ is_confirmed_argument = kwargs.get("is_confirmed", None)
+ if is_confirmed_argument and not len(kwargs) == 1:
+ self.raise_http_exception(
+ status_code="HTTP_406_NOT_ACCEPTABLE",
+ error_case="ConfirmError",
+ data=kwargs,
+ message="Confirm field cannot be updated with other fields",
+ )
+
+ # Process system fields
+ check_kwargs = self.extract_system_fields(check_kwargs, create=False)
+
+ # Update columns
+ for key, value in check_kwargs.items():
+ if key in columns:
+ setattr(self, key, value)
+ elif key in relationships:
+ # Handle relationship updates
+ related_obj = getattr(self, key)
+ if isinstance(related_obj, list):
+ # Handle many-to-many or one-to-many relationships
+ if isinstance(value, list):
+ setattr(self, key, value)
+ else:
+ # Handle many-to-one or one-to-one relationships
+ setattr(self, key, value)
+
+ # Handle user tracking
+ if hasattr(self, "creds"):
+ person_id = getattr(self.creds, "person_id", None)
+ person_name = getattr(self.creds, "person_name", None)
+
+ if person_id and person_name:
+ if is_confirmed_argument:
+ self.confirmed_by_id = self.creds.get("person_id", "Unknown")
+ self.confirmed_by = self.creds.get("person_name", "Unknown")
+ else:
+ self.updated_by_id = self.creds.get("person_id", "Unknown")
+ self.updated_by = self.creds.get("person_name", "Unknown")
+
+ session.add(self)
+ session.flush()
+ return self
+
+ except Exception:
+ if should_close_session:
+ session.rollback()
+ raise
+ finally:
+ if should_close_session:
+ session.close()
+
+ @classmethod
+ def create(
+ cls, session: Optional[Session] = None, **kwargs: Dict[str, Any]
+ ) -> "BaseModel":
+ """Create new instance with optional session reuse.
+
+ Args:
+ session: Optional existing session to use. If not provided, creates a new one.
+ **kwargs: Attributes for the new instance
+
+ Returns:
+ Created model instance
+
+ Example:
+ # Using an existing session for multiple creates
+ with get_db() as session:
+ user1 = User.create(session=session, name="John")
+ user2 = User.create(session=session, name="Jane")
+ # Both creates use the same transaction
+
+ # Creating with auto-managed session
+ user = User.create(name="John") # Creates and manages its own session
+ """
+ instance = cls()
+ should_close_session = session is None
+
+ if session is None:
+ session = instance.get_session()
+
+ try:
+ check_kwargs = cls.remove_non_related_inputs(instance, kwargs)
+ check_kwargs = cls.extract_system_fields(
+ instance, check_kwargs, create=True
+ )
+
+ # Get all table columns and relationships
+ mapper = inspect(cls)
+ columns = [column.key for column in mapper.columns]
+ relationships = [rel.key for rel in mapper.relationships]
+
+ # Set attributes
+ for key, value in check_kwargs.items():
+ if key in columns:
+ setattr(instance, key, value)
+ elif key in relationships:
+ # Handle relationship assignments
+ if isinstance(value, list):
+ # Handle many-to-many or one-to-many relationships
+ setattr(instance, key, value)
+ else:
+ # Handle many-to-one or one-to-one relationships
+ setattr(instance, key, value)
+
+ # Handle user tracking
+ if hasattr(instance, "creds"):
+ person_id = getattr(instance.creds, "person_id", None)
+ person_name = getattr(instance.creds, "person_name", None)
+
+ if person_id and person_name:
+ instance.created_by_id = instance.creds.get("person_id", "Unknown")
+ instance.created_by = instance.creds.get("person_name", "Unknown")
+
+ session.add(instance)
+ session.flush()
+
+ if should_close_session:
+ session.commit()
+
+ return instance
+
+ except Exception:
+ if should_close_session:
+ session.rollback()
+ raise
+ finally:
+ if should_close_session:
+ session.close()
+
+ @classmethod
+ @contextmanager
+ def bulk_create(
+ cls, session: Optional[Session] = None
+ ) -> Generator[Session, None, None]:
+ """Context manager for bulk creating instances.
+
+ Args:
+ session: Optional existing session to use. If not provided, creates a new one.
+
+ Yields:
+ SQLAlchemy session for creating multiple instances
+
+ Example:
+ # Bulk create multiple instances in one transaction
+ with User.bulk_create() as session:
+ user1 = User.create(session=session, name="John")
+ user2 = User.create(session=session, name="Jane")
+ # Both creates share the same transaction
+ """
+ should_close_session = session is None
+ if session is None:
+ session = cls().get_session()
+
+ try:
+ yield session
+ if should_close_session:
+ session.commit()
+ except Exception:
+ if should_close_session:
+ session.rollback()
+ raise
+ finally:
+ if should_close_session:
+ session.close()
+
+
+# @router.put("/users/{user_id}")
+# async def update_user(
+# user_id: str,
+# update_data: Dict[str, Any],
+# db: Session = Depends(get_db_session)
+# ):
+# user = db.query(User).filter(User.id == user_id).first()
+# if not user:
+# raise HTTPException(status_code=404, detail="User not found")
+#
+# updated_user = user.update(**update_data)
+# return updated_user
+#
+#
+# @router.post("/users")
+# async def create_user(
+# user_data: Dict[str, Any],
+# db: Session = Depends(get_db_session)
+# ):
+# with User.create_with_session(**user_data) as new_user:
+# return new_user
diff --git a/trash/Models_old/filter_functions.py b/trash/Models_old/filter_functions.py
new file mode 100644
index 0000000..efc62bc
--- /dev/null
+++ b/trash/Models_old/filter_functions.py
@@ -0,0 +1,535 @@
+"""
+Advanced filtering functionality for SQLAlchemy models.
+
+This module provides a comprehensive set of filtering capabilities for SQLAlchemy models,
+including pagination, ordering, and complex query building.
+"""
+
+from __future__ import annotations
+from typing import Any, Dict, List, Optional, Type, TypeVar, Union, Tuple, Protocol
+from dataclasses import dataclass
+from json import dumps
+
+from sqlalchemy import BinaryExpression, desc, asc
+from sqlalchemy.exc import SQLAlchemyError
+from sqlalchemy.orm import Query, Session
+from sqlalchemy.sql.elements import BinaryExpression
+
+from ApiLibrary import system_arrow
+from ApiLibrary.common.line_number import get_line_number_for_error
+from ErrorHandlers.Exceptions.api_exc import HTTPExceptionApi
+from Services.PostgresDb.Models.response import PostgresResponse
+
+# Type variable for class methods returning self
+T = TypeVar("T", bound="FilterAttributes")
+
+
+class HTTPException(Exception):
+ """Base exception for HTTP errors."""
+
+ def __init__(self, status_code: str, detail: str):
+ self.status_code = status_code
+ self.detail = detail
+ super().__init__(detail)
+
+
+class HTTPStatus(Protocol):
+ """Protocol defining required HTTP status codes."""
+
+ HTTP_400_BAD_REQUEST: str
+ HTTP_404_NOT_FOUND: str
+ HTTP_304_NOT_MODIFIED: str
+
+
+@dataclass
+class FilterConfig:
+ """Configuration for filtering and pagination."""
+
+ page: int = 1
+ size: int = 10
+ order_field: str = "id"
+ order_type: str = "asc"
+ include_joins: List[str] = None
+ query: Dict[str, Any] = None
+
+ def __post_init__(self):
+ """Initialize default values for None fields."""
+ self.include_joins = self.include_joins or []
+ self.query = self.query or {}
+
+
+class QueryConfig:
+ """Configuration for query building and execution."""
+
+ def __init__(
+ self,
+ pre_query: Optional[Query] = None,
+ filter_config: Optional[FilterConfig] = None,
+ http_exception: Optional[Type[HTTPException]] = HTTPException,
+ status: Optional[Type[HTTPStatus]] = None,
+ ):
+ self.pre_query = pre_query
+ self.filter_config = filter_config or FilterConfig()
+ self.http_exception = http_exception
+ self.status = status
+ self.total_count: Optional[int] = None
+
+ def update_filter_config(self, **kwargs) -> None:
+ """Update filter configuration parameters."""
+ for key, value in kwargs.items():
+ if hasattr(self.filter_config, key):
+ setattr(self.filter_config, key, value)
+
+ def set_total_count(self, count: int) -> None:
+ """Set the total count of records."""
+ self.total_count = count
+
+
+class FilterAttributes:
+ """
+ Advanced filtering capabilities for SQLAlchemy models.
+
+ Features:
+ - Pagination and ordering
+ - Complex query building
+ - Active/deleted/confirmed status filtering
+ - Expiry date handling
+ - Transaction management
+
+ Usage:
+ # Initialize configuration
+ config = QueryConfig(filter_config=FilterConfig(page=1, size=10))
+
+ # Create model with configuration
+ class User(FilterAttributes):
+ query_config = config
+
+ # Filter multiple records
+ users = User.filter_by_all(db, name="John").data
+
+ # Update configuration
+ User.query_config.update_filter_config(page=2, size=20)
+ next_users = User.filter_all(db).data
+ """
+
+ __abstract__ = True
+
+ # Class-level configuration
+ query_config: QueryConfig = QueryConfig()
+
+ @classmethod
+ def flush(cls: Type[T], db: Session) -> T:
+ """
+ Flush the current session to the database.
+
+ Args:
+ db: Database session
+
+ Returns:
+ Self instance
+
+ Raises:
+ HTTPException: If database operation fails
+ """
+ try:
+ db.flush()
+ return cls
+ except SQLAlchemyError as e:
+ raise HTTPExceptionApi(
+ error_code="HTTP_304_NOT_MODIFIED",
+ lang=cls.lang or "tr",
+ loc=get_line_number_for_error(),
+ sys_msg=str(e),
+ )
+
+ @classmethod
+ def destroy(cls: Type[T], db: Session) -> None:
+ """
+ Delete the record from the database.
+
+ Args:
+ db: Database session
+ """
+ db.delete(cls)
+ db.commit()
+
+ @classmethod
+ def save_via_metadata(cls: Type[T], db: Session) -> None:
+ """
+ Save or rollback based on metadata.
+
+ Args:
+ db: Database session
+
+ Raises:
+ HTTPException: If save operation fails
+ """
+ try:
+ meta_data = getattr(cls, "meta_data", {})
+ if meta_data.get("created", False):
+ db.commit()
+ db.rollback()
+ except SQLAlchemyError as e:
+ raise HTTPExceptionApi(
+ error_code="HTTP_304_NOT_MODIFIED",
+ lang=cls.lang or "tr",
+ loc=get_line_number_for_error(),
+ sys_msg=str(e),
+ )
+
+ @classmethod
+ def save(cls: Type[T], db: Session) -> None:
+ """
+ Commit changes to database.
+
+ Args:
+ db: Database session
+
+ Raises:
+ HTTPException: If commit fails
+ """
+ try:
+ db.commit()
+ except SQLAlchemyError as e:
+ raise HTTPExceptionApi(
+ error_code="HTTP_304_NOT_MODIFIED",
+ lang=cls.lang or "tr",
+ loc=get_line_number_for_error(),
+ sys_msg=str(e),
+ )
+
+ @classmethod
+ def rollback(cls: Type[T], db: Session) -> None:
+ """
+ Rollback current transaction.
+
+ Args:
+ db: Database session
+ """
+ db.rollback()
+
+ @classmethod
+ def save_and_confirm(cls: Type[T], db: Session) -> None:
+ """
+ Save changes and mark record as confirmed.
+
+ Args:
+ db: Database session
+
+ Raises:
+ HTTPException: If operation fails
+ """
+ try:
+ cls.save(db)
+ cls.update(db, is_confirmed=True)
+ cls.save(db)
+ except SQLAlchemyError as e:
+ raise HTTPExceptionApi(
+ error_code="HTTP_304_NOT_MODIFIED",
+ lang=cls.lang or "tr",
+ loc=get_line_number_for_error(),
+ sys_msg=str(e),
+ )
+
+ @classmethod
+ def _query(cls: Type[T], db: Session) -> Query:
+ """
+ Get base query for model.
+
+ Args:
+ db: Database session
+
+ Returns:
+ SQLAlchemy Query object
+ """
+ return (
+ cls.query_config.pre_query if cls.query_config.pre_query else db.query(cls)
+ )
+
+ @classmethod
+ def add_query_to_filter(
+ cls: Type[T], query: Query, filter_list: Dict[str, Any]
+ ) -> Query:
+ """
+ Add pagination and ordering to query.
+
+ Args:
+ query: Base query
+ filter_list: Dictionary containing pagination and ordering parameters
+
+ Returns:
+ Modified query with pagination and ordering
+ """
+ order_field = getattr(cls, filter_list.get("order_field"))
+ order_func = desc if str(filter_list.get("order_type"))[0] == "d" else asc
+
+ return (
+ query.order_by(order_func(order_field))
+ .limit(filter_list.get("size"))
+ .offset((filter_list.get("page") - 1) * filter_list.get("size"))
+ .populate_existing()
+ )
+
+ @classmethod
+ def get_filter_attributes(cls) -> Dict[str, Any]:
+ """
+ Get filter configuration from attributes.
+
+ Returns:
+ Dictionary containing pagination and filtering parameters
+ """
+ return {
+ "page": getattr(cls.query_config.filter_config, "page", 1),
+ "size": getattr(cls.query_config.filter_config, "size", 10),
+ "order_field": getattr(cls.query_config.filter_config, "order_field", "id"),
+ "order_type": getattr(cls.query_config.filter_config, "order_type", "asc"),
+ "include_joins": getattr(
+ cls.query_config.filter_config, "include_joins", []
+ ),
+ "query": getattr(cls.query_config.filter_config, "query", {}),
+ }
+
+ @classmethod
+ def add_new_arg_to_args(
+ cls,
+ args_list: Tuple[BinaryExpression, ...],
+ argument: str,
+ value: BinaryExpression,
+ ) -> Tuple[BinaryExpression, ...]:
+ """
+ Add new argument to filter arguments if not exists.
+
+ Args:
+ args_list: Current filter arguments
+ argument: Argument name to check
+ value: New argument to add
+
+ Returns:
+ Updated argument tuple
+ """
+ new_args = [arg for arg in args_list if isinstance(arg, BinaryExpression)]
+ arg_left = lambda arg_obj: getattr(getattr(arg_obj, "left", None), "key", None)
+
+ if not any(arg_left(arg) == argument for arg in new_args):
+ new_args.append(value)
+
+ return tuple(new_args)
+
+ @classmethod
+ def get_not_expired_query_arg(
+ cls, args: Tuple[BinaryExpression, ...]
+ ) -> Tuple[BinaryExpression, ...]:
+ """
+ Add expiry date conditions to query.
+
+ Args:
+ args: Current query arguments
+
+ Returns:
+ Updated arguments with expiry conditions
+ """
+ current_time = str(system_arrow.now())
+ args = cls.add_new_arg_to_args(
+ args, "expiry_ends", cls.expiry_ends > current_time
+ )
+ args = cls.add_new_arg_to_args(
+ args, "expiry_starts", cls.expiry_starts <= current_time
+ )
+ return args
+
+ @classmethod
+ def get_active_and_confirmed_query_arg(
+ cls, args: Tuple[BinaryExpression, ...]
+ ) -> Tuple[BinaryExpression, ...]:
+ """
+ Add status conditions to query.
+
+ Args:
+ args: Current query arguments
+
+ Returns:
+ Updated arguments with status conditions
+ """
+ args = cls.add_new_arg_to_args(args, "is_confirmed", cls.is_confirmed == True)
+ args = cls.add_new_arg_to_args(args, "active", cls.active == True)
+ args = cls.add_new_arg_to_args(args, "deleted", cls.deleted == False)
+ return args
+
+ @classmethod
+ def select_only(
+ cls: Type[T],
+ db: Session,
+ *args: BinaryExpression,
+ select_args: List[Any],
+ order_by: Optional[Any] = None,
+ limit: Optional[int] = None,
+ system: bool = False,
+ ) -> PostgresResponse:
+ """
+ Select specific columns from filtered query.
+
+ Args:
+ db: Database session
+ args: Filter conditions
+ select_args: Columns to select
+ order_by: Optional ordering
+ limit: Optional result limit
+ system: If True, skip status filtering
+
+ Returns:
+ Query response with selected columns
+ """
+ if not system:
+ args = cls.get_active_and_confirmed_query_arg(args)
+ args = cls.get_not_expired_query_arg(args)
+
+ query = cls._query(db).filter(*args).with_entities(*select_args)
+ cls.query_config.set_total_count(query.count())
+
+ if order_by is not None:
+ query = query.order_by(order_by)
+ if limit:
+ query = query.limit(limit)
+
+ return PostgresResponse(query=query, first=False)
+
+ @classmethod
+ def filter_by_all(
+ cls: Type[T], db: Session, system: bool = False, **kwargs
+ ) -> PostgresResponse:
+ """
+ Filter multiple records by keyword arguments.
+
+ Args:
+ db: Database session
+ system: If True, skip status filtering
+ **kwargs: Filter criteria
+
+ Returns:
+ Query response with matching records
+ """
+ if "is_confirmed" not in kwargs and not system:
+ kwargs["is_confirmed"] = True
+ kwargs.pop("system", None)
+
+ query = cls._query(db).filter_by(**kwargs)
+ cls.query_config.set_total_count(query.count())
+
+ if cls.query_config.filter_config:
+ filter_list = cls.get_filter_attributes()
+ query = cls.add_query_to_filter(query, filter_list)
+
+ return PostgresResponse(query=query, first=False)
+
+ @classmethod
+ def filter_by_one(
+ cls: Type[T], db: Session, system: bool = False, **kwargs
+ ) -> PostgresResponse:
+ """
+ Filter single record by keyword arguments.
+
+ Args:
+ db: Database session
+ system: If True, skip status filtering
+ **kwargs: Filter criteria
+
+ Returns:
+ Query response with single record
+ """
+ if "is_confirmed" not in kwargs and not system:
+ kwargs["is_confirmed"] = True
+ kwargs.pop("system", None)
+
+ query = cls._query(db).filter_by(**kwargs)
+ cls.query_config.set_total_count(1)
+
+ return PostgresResponse(query=query, first=True)
+
+ @classmethod
+ def filter_all(
+ cls: Type[T], *args: Any, db: Session, system: bool = False
+ ) -> PostgresResponse:
+ """
+ Filter multiple records by expressions.
+
+ Args:
+ db: Database session
+ args: Filter expressions
+ system: If True, skip status filtering
+
+ Returns:
+ Query response with matching records
+ """
+ if not system:
+ args = cls.get_active_and_confirmed_query_arg(args)
+ args = cls.get_not_expired_query_arg(args)
+
+ filter_list = cls.get_filter_attributes()
+ if filter_list.get("query"):
+ for smart_iter in cls.filter_expr(**filter_list["query"]):
+ if key := getattr(getattr(smart_iter, "left", None), "key", None):
+ args = cls.add_new_arg_to_args(args, key, smart_iter)
+
+ query = cls._query(db)
+ cls.query_config.set_total_count(query.count())
+ query = query.filter(*args)
+
+ if cls.query_config.filter_config:
+ query = cls.add_query_to_filter(query, filter_list)
+
+ return PostgresResponse(query=query, first=False)
+
+ @classmethod
+ def filter_one(
+ cls: Type[T],
+ *args: Any,
+ db: Session,
+ system: bool = False,
+ expired: bool = False,
+ ) -> PostgresResponse:
+ """
+ Filter single record by expressions.
+
+ Args:
+ db: Database session
+ args: Filter expressions
+ system: If True, skip status filtering
+ expired: If True, include expired records
+
+ Returns:
+ Query response with single record
+ """
+ if not system:
+ args = cls.get_active_and_confirmed_query_arg(args)
+ if not expired:
+ args = cls.get_not_expired_query_arg(args)
+
+ query = cls._query(db).filter(*args)
+ cls.query_config.set_total_count(1)
+
+ return PostgresResponse(query=query, first=True)
+
+ # @classmethod
+ # def raise_http_exception(
+ # cls,
+ # status_code: str,
+ # error_case: str,
+ # data: Dict[str, Any],
+ # message: str,
+ # ) -> None:
+ # """
+ # Raise HTTP exception with formatted error details.
+
+ # Args:
+ # status_code: HTTP status code string
+ # error_case: Error type
+ # data: Additional error data
+ # message: Error message
+
+ # Raises:
+ # HTTPException: With formatted error details
+ # """
+ # raise HTTPExceptionApi(
+ # error_code="HTTP_304_NOT_MODIFIED",
+ # lang=cls.lang or "tr", loc=get_line_number_for_error()
+ # )
diff --git a/trash/Models_old/mixins.py b/trash/Models_old/mixins.py
new file mode 100644
index 0000000..94d1c61
--- /dev/null
+++ b/trash/Models_old/mixins.py
@@ -0,0 +1,549 @@
+"""
+PostgreSQL Base Models Module
+
+This module provides base classes for PostgreSQL models with common functionality such as:
+- CRUD operations with session management
+- Soft delete capability
+- Automatic timestamps
+- User tracking (created_by, updated_by)
+- Data serialization
+- Multi-language support
+"""
+
+import datetime
+from decimal import Decimal
+from typing import Any, Dict, List, Optional, Type, TypeVar, Union, cast
+
+from sqlalchemy import (
+ TIMESTAMP,
+ NUMERIC,
+ func,
+ text,
+ UUID,
+ String,
+ Integer,
+ Boolean,
+ SmallInteger,
+)
+from sqlalchemy.orm import Mapped, mapped_column, Session
+from sqlalchemy_mixins.serialize import SerializeMixin
+from sqlalchemy_mixins.repr import ReprMixin
+from sqlalchemy_mixins.smartquery import SmartQueryMixin
+
+from ApiLibrary import DateTimeLocal, system_arrow
+from Services.PostgresDb.Models.base_model import BaseModel
+from Services.PostgresDb.Models.filter_functions import FilterAttributes
+
+# Type variable for class methods returning self
+T = TypeVar("T", bound="CrudMixin")
+
+
+class CrudMixin(
+ BaseModel, SmartQueryMixin, SerializeMixin, ReprMixin, FilterAttributes
+):
+ """
+ Base mixin providing CRUD operations and common fields for PostgreSQL models.
+
+ Features:
+ - Automatic timestamps (created_at, updated_at)
+ - Soft delete capability
+ - User tracking (created_by, updated_by)
+ - Data serialization
+ - Multi-language support
+ """
+
+ __abstract__ = True
+
+ # System fields that should be handled automatically during creation
+ __system__fields__create__ = (
+ "created_at",
+ "updated_at",
+ "cryp_uu_id",
+ "created_by",
+ "created_by_id",
+ "updated_by",
+ "updated_by_id",
+ "replication_id",
+ "confirmed_by",
+ "confirmed_by_id",
+ "is_confirmed",
+ "deleted",
+ "active",
+ "is_notification_send",
+ "is_email_send",
+ )
+
+ # System fields that should be handled automatically during updates
+ __system__fields__update__ = (
+ "cryp_uu_id",
+ "created_at",
+ "updated_at",
+ "created_by",
+ "created_by_id",
+ "confirmed_by",
+ "confirmed_by_id",
+ "updated_by",
+ "updated_by_id",
+ "replication_id",
+ )
+
+ # Default fields to exclude from serialization
+ __system_default_model__ = [
+ "cryp_uu_id",
+ "is_confirmed",
+ "deleted",
+ "is_notification_send",
+ "replication_id",
+ "is_email_send",
+ "confirmed_by_id",
+ "confirmed_by",
+ "updated_by_id",
+ "created_by_id",
+ ]
+
+ # User credentials and preferences
+ creds = None
+ lang: str = "tr"
+ client_arrow: Optional[DateTimeLocal] = None
+ valid_record_dict: Dict[str, bool] = {"active": True, "deleted": False}
+ meta_data: Dict[str, Any] = {}
+
+ # Common timestamp fields for all models
+ expiry_starts: Mapped[TIMESTAMP] = mapped_column(
+ type_=TIMESTAMP(timezone=True),
+ server_default=func.now(),
+ nullable=False,
+ comment="Record validity start timestamp",
+ )
+ expiry_ends: Mapped[TIMESTAMP] = mapped_column(
+ type_=TIMESTAMP(timezone=True),
+ default="2099-12-31",
+ server_default="2099-12-31",
+ comment="Record validity end timestamp",
+ )
+
+ @classmethod
+ def set_user_define_properties(cls, token: Any) -> None:
+ """
+ Set user-specific properties from the authentication token.
+
+ Args:
+ token: Authentication token containing user preferences
+ """
+ cls.creds = token.credentials
+ cls.client_arrow = DateTimeLocal(is_client=True, timezone=token.timezone)
+ cls.lang = str(token.lang).lower()
+
+ @classmethod
+ def remove_non_related_inputs(cls, kwargs: Dict[str, Any]) -> Dict[str, Any]:
+ """
+ Filter out inputs that don't correspond to model fields.
+
+ Args:
+ kwargs: Dictionary of field names and values
+
+ Returns:
+ Dictionary containing only valid model fields
+ """
+ return {
+ key: value
+ for key, value in kwargs.items()
+ if key in cls.columns + cls.hybrid_properties + cls.settable_relations
+ }
+
+ @classmethod
+ def extract_system_fields(
+ cls, filter_kwargs: dict, create: bool = True
+ ) -> Dict[str, Any]:
+ """
+ Remove system-managed fields from input dictionary.
+
+ Args:
+ filter_kwargs: Input dictionary of fields
+ create: If True, use creation field list, else use update field list
+
+ Returns:
+ Dictionary with system fields removed
+ """
+ system_fields = filter_kwargs.copy()
+ extract_fields = (
+ cls.__system__fields__create__ if create else cls.__system__fields__update__
+ )
+ for field in extract_fields:
+ system_fields.pop(field, None)
+ return system_fields
+
+ @classmethod
+ def iterate_over_variables(cls, val: Any, key: str) -> tuple[bool, Optional[Any]]:
+ """
+ Process a field value based on its type and convert it to the appropriate format.
+
+ Args:
+ val: Field value
+ key: Field name
+
+ Returns:
+ Tuple of (should_include, processed_value)
+ """
+ key_ = cls.__annotations__.get(key, None)
+ is_primary = key in cls.primary_keys
+ row_attr = bool(getattr(getattr(cls, key), "foreign_keys", None))
+
+ # Skip primary keys and foreign keys
+ if is_primary or row_attr:
+ return False, None
+
+ # Handle None values
+ if val is None:
+ return True, None
+
+ # Special handling for UUID fields
+ if str(key[-5:]).lower() == "uu_id":
+ return True, str(val)
+
+ # Handle typed fields
+ if key_:
+ if key_ == Mapped[int]:
+ return True, int(val)
+ elif key_ == Mapped[bool]:
+ return True, bool(val)
+ elif key_ == Mapped[float] or key_ == Mapped[NUMERIC]:
+ return True, round(float(val), 3)
+ elif key_ == Mapped[TIMESTAMP]:
+ return True, str(
+ cls.client_arrow.get(str(val)).format("DD-MM-YYYY HH:mm:ss +0")
+ )
+ elif key_ == Mapped[str]:
+ return True, str(val)
+
+ # Handle based on Python types
+ else:
+ if isinstance(val, datetime.datetime):
+ return True, str(
+ cls.client_arrow.get(str(val)).format("DD-MM-YYYY HH:mm:ss +0")
+ )
+ elif isinstance(val, bool):
+ return True, bool(val)
+ elif isinstance(val, (float, Decimal)):
+ return True, round(float(val), 3)
+ elif isinstance(val, int):
+ return True, int(val)
+ elif isinstance(val, str):
+ return True, str(val)
+ elif val is None:
+ return True, None
+
+ return False, None
+
+ @classmethod
+ def find_or_create(cls: Type[T], db: Session, **kwargs) -> T:
+ """
+ Find an existing record matching the criteria or create a new one.
+
+ Args:
+ db: Database session
+ **kwargs: Search/creation criteria
+
+ Returns:
+ Existing or newly created record
+ """
+ check_kwargs = cls.extract_system_fields(kwargs)
+
+ # Search for existing record
+ query = db.query(cls).filter(
+ cls.expiry_ends > str(system_arrow.now()),
+ cls.expiry_starts <= str(system_arrow.now()),
+ )
+
+ for key, value in check_kwargs.items():
+ if hasattr(cls, key):
+ query = query.filter(getattr(cls, key) == value)
+
+ already_record = query.first()
+
+ # Handle existing record
+ if already_record:
+ if already_record.deleted:
+ already_record.meta_data = {
+ "created": False,
+ "error_case": "DeletedRecord",
+ "message": "",
+ }
+ return already_record
+ elif not already_record.is_confirmed:
+ already_record.meta_data = {
+ "created": False,
+ "error_case": "IsNotConfirmed",
+ "message": "",
+ }
+ return already_record
+
+ already_record.meta_data = {
+ "created": False,
+ "error_case": "AlreadyExists",
+ "message": "",
+ }
+ return already_record
+
+ # Create new record
+ check_kwargs = cls.remove_non_related_inputs(check_kwargs)
+ created_record = cls()
+
+ for key, value in check_kwargs.items():
+ setattr(created_record, key, value)
+
+ if getattr(cls.creds, "person_id", None) and getattr(
+ cls.creds, "person_name", None
+ ):
+ created_record.created_by_id = cls.creds.person_id
+ created_record.created_by = cls.creds.person_name
+
+ db.add(created_record)
+ db.flush()
+
+ created_record.meta_data = {"created": True, "error_case": None, "message": ""}
+ return created_record
+
+ def update(self, db: Session, **kwargs) -> "CrudMixin":
+ """
+ Update the record with new values.
+
+ Args:
+ db: Database session
+ **kwargs: Fields to update
+
+ Returns:
+ Updated record
+
+ Raises:
+ ValueError: If attempting to update is_confirmed with other fields
+ """
+ check_kwargs = self.remove_non_related_inputs(kwargs)
+ is_confirmed_argument = kwargs.get("is_confirmed", None)
+
+ if is_confirmed_argument and not len(kwargs) == 1:
+ raise ValueError("Confirm field cannot be updated with other fields")
+
+ check_kwargs = self.extract_system_fields(check_kwargs, create=False)
+
+ for key, value in check_kwargs.items():
+ setattr(self, key, value)
+
+ # Update confirmation or modification tracking
+ if is_confirmed_argument:
+ if getattr(self.creds, "person_id", None) and getattr(
+ self.creds, "person_name", None
+ ):
+ self.confirmed_by_id = self.creds.person_id
+ self.confirmed_by = self.creds.person_name
+ else:
+ if getattr(self.creds, "person_id", None) and getattr(
+ self.creds, "person_name", None
+ ):
+ self.updated_by_id = self.creds.person_id
+ self.updated_by = self.creds.person_name
+
+ db.flush()
+ return self
+
+ def get_dict(
+ self,
+ exclude: Optional[List[str]] = None,
+ include: Optional[List[str]] = None,
+ ) -> Dict[str, Any]:
+ """
+ Convert model instance to dictionary with customizable fields.
+
+ Args:
+ exclude: List of fields to exclude
+ include: List of fields to include (takes precedence over exclude)
+
+ Returns:
+ Dictionary representation of the model
+ """
+ return_dict: Dict[str, Any] = {}
+
+ if include:
+ # Handle explicitly included fields
+ exclude_list = [
+ element
+ for element in self.__system_default_model__
+ if str(element)[-2:] == "id" and str(element)[-5:].lower() == "uu_id"
+ ]
+ columns_include_list = list(set(include).difference(set(exclude_list)))
+ columns_include_list.extend(["uu_id"])
+
+ for key in columns_include_list:
+ val = getattr(self, key)
+ correct, value_of_database = self.iterate_over_variables(val, key)
+ if correct:
+ return_dict[key] = value_of_database
+
+ elif exclude:
+ # Handle explicitly excluded fields
+ exclude.extend(
+ list(
+ set(getattr(self, "__exclude__fields__", []) or []).difference(
+ exclude
+ )
+ )
+ )
+ exclude.extend(
+ [
+ element
+ for element in self.__system_default_model__
+ if str(element)[-2:] == "id"
+ ]
+ )
+
+ columns_excluded_list = list(set(self.columns).difference(set(exclude)))
+ columns_excluded_list.extend(["uu_id", "active"])
+
+ for key in columns_excluded_list:
+ val = getattr(self, key)
+ correct, value_of_database = self.iterate_over_variables(val, key)
+ if correct:
+ return_dict[key] = value_of_database
+ else:
+ # Handle default field selection
+ exclude_list = (
+ getattr(self, "__exclude__fields__", []) or []
+ ) + self.__system_default_model__
+ columns_list = list(set(self.columns).difference(set(exclude_list)))
+ columns_list = [col for col in columns_list if str(col)[-2:] != "id"]
+ columns_list.extend(
+ [col for col in self.columns if str(col)[-5:].lower() == "uu_id"]
+ )
+
+ for remove_field in self.__system_default_model__:
+ if remove_field in columns_list:
+ columns_list.remove(remove_field)
+
+ for key in columns_list:
+ val = getattr(self, key)
+ correct, value_of_database = self.iterate_over_variables(val, key)
+ if correct:
+ return_dict[key] = value_of_database
+
+ return return_dict
+
+
+class BaseCollection(CrudMixin):
+ """Base model class with minimal fields."""
+
+ __abstract__ = True
+ __repr__ = ReprMixin.__repr__
+
+ id: Mapped[int] = mapped_column(Integer, primary_key=True)
+
+
+class CrudCollection(CrudMixin):
+ """
+ Full-featured model class with all common fields.
+
+ Includes:
+ - UUID and reference ID
+ - Timestamps
+ - User tracking
+ - Confirmation status
+ - Soft delete
+ - Notification flags
+ """
+
+ __abstract__ = True
+ __repr__ = ReprMixin.__repr__
+
+ # Primary and reference fields
+ id: Mapped[int] = mapped_column(Integer, primary_key=True)
+ uu_id: Mapped[str] = mapped_column(
+ UUID,
+ server_default=text("gen_random_uuid()"),
+ index=True,
+ unique=True,
+ comment="Unique identifier UUID",
+ )
+ ref_id: Mapped[str] = mapped_column(
+ String(100), nullable=True, index=True, comment="External reference ID"
+ )
+
+ # Timestamps
+ created_at: Mapped[TIMESTAMP] = mapped_column(
+ TIMESTAMP(timezone=True),
+ server_default=func.now(),
+ nullable=False,
+ index=True,
+ comment="Record creation timestamp",
+ )
+ updated_at: Mapped[TIMESTAMP] = mapped_column(
+ TIMESTAMP(timezone=True),
+ server_default=func.now(),
+ onupdate=func.now(),
+ nullable=False,
+ index=True,
+ comment="Last update timestamp",
+ )
+
+ # Cryptographic and user tracking
+ cryp_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, index=True, comment="Cryptographic UUID"
+ )
+ created_by: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Creator name"
+ )
+ created_by_id: Mapped[int] = mapped_column(
+ Integer, nullable=True, comment="Creator ID"
+ )
+ updated_by: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Last modifier name"
+ )
+ updated_by_id: Mapped[int] = mapped_column(
+ Integer, nullable=True, comment="Last modifier ID"
+ )
+ confirmed_by: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Confirmer name"
+ )
+ confirmed_by_id: Mapped[int] = mapped_column(
+ Integer, nullable=True, comment="Confirmer ID"
+ )
+
+ # Status flags
+ is_confirmed: Mapped[bool] = mapped_column(
+ Boolean, server_default="0", comment="Record confirmation status"
+ )
+ replication_id: Mapped[int] = mapped_column(
+ SmallInteger, server_default="0", comment="Replication identifier"
+ )
+ deleted: Mapped[bool] = mapped_column(
+ Boolean, server_default="0", comment="Soft delete flag"
+ )
+ active: Mapped[bool] = mapped_column(
+ Boolean, server_default="1", comment="Record active status"
+ )
+ is_notification_send: Mapped[bool] = mapped_column(
+ Boolean, server_default="0", comment="Notification sent flag"
+ )
+ is_email_send: Mapped[bool] = mapped_column(
+ Boolean, server_default="0", comment="Email sent flag"
+ )
+
+ @classmethod
+ def retrieve_language_model(cls, lang: str, response_model: Any) -> Dict[str, str]:
+ """
+ Retrieve language-specific model headers and validation messages.
+
+ Args:
+ lang: Language code
+ response_model: Model containing language annotations
+
+ Returns:
+ Dictionary of field names to localized headers
+ """
+ headers_and_validation = {}
+ __language_model__ = getattr(cls.__language_model__, lang, "tr")
+
+ for field in response_model.__annotations__.keys():
+ headers_and_validation[field] = getattr(
+ __language_model__, field, "Lang Not found"
+ )
+
+ return headers_and_validation
diff --git a/trash/Models_old/query.py b/trash/Models_old/query.py
new file mode 100644
index 0000000..d2d40e0
--- /dev/null
+++ b/trash/Models_old/query.py
@@ -0,0 +1,44 @@
+from typing import Any, List, Optional, TypeVar, Union
+
+from sqlalchemy.orm import Query
+from sqlalchemy.orm.session import Session
+
+T = TypeVar("T")
+
+
+class QueryResponse:
+ """Handler for SQLAlchemy query results with error handling."""
+
+ def __init__(self, db: Session, query: Query, first: bool = False):
+ self.db = db
+ self.first = first
+ self.__query = query
+
+ def get(self, index: int) -> Optional[T]:
+ """Get item at specific index if it exists."""
+ count = self.count
+ if count and not index > count:
+ return self.data[index - 1]
+ return None
+
+ @property
+ def data(self) -> Union[Optional[T], List[T]]:
+ """Get query results with error handling."""
+ try:
+ if self.first:
+ return self.__query.first()
+ return self.__query.all()
+ except Exception as e:
+ # Handle any database errors by rolling back
+ self.db.rollback()
+ return None if self.first else []
+
+ @property
+ def count(self) -> int:
+ """Get total count of query results."""
+ return self.__query.count()
+
+ @property
+ def query(self) -> Query:
+ """Get the underlying SQLAlchemy query."""
+ return self.__query
diff --git a/trash/Models_old/response.py b/trash/Models_old/response.py
new file mode 100644
index 0000000..60d8cf0
--- /dev/null
+++ b/trash/Models_old/response.py
@@ -0,0 +1,90 @@
+"""
+Response handler for PostgreSQL query results.
+
+This module provides a wrapper class for SQLAlchemy query results,
+adding convenience methods for accessing data and managing query state.
+"""
+
+from typing import Any, Dict, List, Optional, TypeVar, Generic, Union
+from sqlalchemy.orm import Query
+
+T = TypeVar("T")
+
+
+class PostgresResponse(Generic[T]):
+ """
+ Wrapper for PostgreSQL/SQLAlchemy query results.
+
+ Attributes:
+ query: SQLAlchemy query object
+ first: Whether to return first result only
+ data: Query results (lazy loaded)
+ count: Total count of results
+
+ Properties:
+ all: All results as list
+ first_item: First result only
+ """
+
+ def __init__(
+ self,
+ query: Query,
+ first: bool = False,
+ status: bool = True,
+ message: str = "",
+ error: Optional[str] = None,
+ ):
+ self._query = query
+ self._first = first
+ self.status = status
+ self.message = message
+ self.error = error
+ self._data: Optional[Union[List[T], T]] = None
+ self._count: Optional[int] = None
+
+ @property
+ def query(self) -> Query:
+ """Get query object."""
+ return self._query
+
+ @property
+ def data(self) -> Union[List[T], T, None]:
+ """
+ Lazy load and return query results.
+ Returns first item if first=True, otherwise returns all results.
+ """
+ if self._data is None:
+ results = self._query.all()
+ self._data = results[0] if self._first and results else results
+ return self._data
+
+ @property
+ def count(self) -> int:
+ """Lazy load and return total count of results."""
+ if self._count is None:
+ self._count = self._query.count()
+ return self._count
+
+ @property
+ def all(self) -> List[T]:
+ """Get all results as list."""
+ return (
+ self.data
+ if isinstance(self.data, list)
+ else [self.data] if self.data else []
+ )
+
+ @property
+ def first(self) -> Optional[T]:
+ """Get first result only."""
+ return self.data if self._first else (self.data[0] if self.data else None)
+
+ def as_dict(self) -> Dict[str, Any]:
+ """Convert response to dictionary format."""
+ return {
+ "status": self.status,
+ "message": self.message,
+ "data": self.data,
+ "count": self.count,
+ "error": self.error,
+ }
diff --git a/trash/abstract_class.py b/trash/abstract_class.py
new file mode 100644
index 0000000..8c748fa
--- /dev/null
+++ b/trash/abstract_class.py
@@ -0,0 +1,33 @@
+class ClusterToMethod:
+
+ TAGS: list = ["Tag or Router"]
+ PREFIX: str = "/..."
+ PAGEINFO: PageInfo
+ ENDPOINTS: list # [MethodEvent, ...]
+ SUBCATEGORY: List[ClassVar[Any]] = [ClusterToMethod, ...]
+
+ def retrieve_all_function_codes():
+ """
+ [FUNCTION_CODE, ...]
+ self.ENDPOINTS -> iter()
+ """
+ pass
+
+ def retrieve_page_info():
+ """
+ PAGE_INFO:ClusterToMethod = {
+ "PageInfo": {...}
+ "subCategory": PAGE_INFO:ClusterToMethod
+ }
+ PAGE_INFO:ClusterToMethod = {
+ "PageInfo": {...}
+ "subCategory": PAGE_INFO:ClusterToMethod
+ }
+ """
+ pass
+
+ def retrieve_redis_value() -> Dict:
+ """
+ Key(CLUSTER_FUNCTION_CODES:ClusterToMethod) : Value(PAGE_INFO, [FUNCTION_CODE, ...])
+ """
+ pass
diff --git a/trash/abstract_class_old.py b/trash/abstract_class_old.py
new file mode 100644
index 0000000..1e8a21d
--- /dev/null
+++ b/trash/abstract_class_old.py
@@ -0,0 +1,957 @@
+"""
+Abstract base classes for API route and event handling.
+
+This module provides core abstractions for route configuration and factory,
+with support for authentication and event handling.
+"""
+
+import uuid
+import inspect
+
+from typing import (
+ Tuple,
+ TypeVar,
+ Optional,
+ Callable,
+ Dict,
+ Any,
+ List,
+ Type,
+ ClassVar,
+ Union,
+ Set,
+)
+from collections import defaultdict
+from dataclasses import dataclass, field
+from pydantic import BaseModel
+from fastapi import Request, Depends, APIRouter
+from functools import wraps
+
+from ApiLayers.ApiLibrary.common.line_number import get_line_number_for_error
+from ApiLayers.ErrorHandlers.Exceptions.api_exc import HTTPExceptionApi
+from ApiLayers.Schemas.rules.rules import EndpointRestriction
+
+
+ResponseModel = TypeVar("ResponseModel", bound=BaseModel)
+
+
+def endpoint_wrapper(url_of_endpoint: Optional[str] = None):
+ """Create a wrapper for endpoints that stores url_of_endpoint in closure.
+
+ Args:
+ url_of_endpoint: Optional URL path for the endpoint
+ """
+
+ def decorator(func: Callable[..., Any]) -> Callable[..., Any]:
+ @wraps(func)
+ async def wrapper(
+ *args: Any, **kwargs: Any
+ ) -> Union[Dict[str, Any], BaseModel]:
+ # Handle both async and sync functions
+ if inspect.iscoroutinefunction(func):
+ result = await func(*args, **kwargs)
+ else:
+ result = func(*args, **kwargs)
+
+ # If result is a coroutine, await it
+ if inspect.iscoroutine(result):
+ result = await result
+
+ # Add endpoint to the result
+ if isinstance(result, dict):
+ result["endpoint"] = url_of_endpoint
+ return result
+ elif isinstance(result, BaseModel):
+ # Convert Pydantic model to dict and add endpoint
+ result_dict = result.model_dump()
+ result_dict["endpoint"] = url_of_endpoint
+ return result_dict
+ return result
+
+ wrapper.url_of_endpoint = url_of_endpoint
+ return wrapper
+
+ return decorator
+
+
+@dataclass
+class EndpointFactoryConfig:
+ """Configuration class for API endpoints.
+
+ Attributes:
+ url_of_endpoint: Full URL path for this endpoint
+ endpoint: URL path for this endpoint
+ method: HTTP method (GET, POST, etc.)
+ summary: Short description for API documentation
+ description: Detailed description for API documentation
+ endpoint_function: Function to handle the endpoint
+ is_auth_required: Whether authentication is required
+ response_model: Optional response model for OpenAPI schema
+ request_model: Optional request model for OpenAPI schema
+ is_event_required: Whether event handling is required
+ extra_options: Additional endpoint options
+ """
+
+ url_prefix: str
+ url_endpoint: str
+ url_of_endpoint: str
+ endpoint: str
+ method: str
+ summary: str
+ description: str
+ endpoint_function: Callable[..., Any] # Now accepts any parameters and return type
+ response_model: Optional[type] = None
+ request_model: Optional[type] = None
+ is_auth_required: bool = True
+ is_event_required: bool = False
+ extra_options: Dict[str, Any] = field(default_factory=dict)
+
+ def __post_init__(self):
+ """Post initialization hook.
+
+ Wraps endpoint function with appropriate middleware based on configuration:
+ - If auth and event required -> wrap with TokenEventMiddleware
+ - If only event required -> wrap with EventMiddleware
+ - If only auth required -> wrap with MiddlewareModule.auth_required
+ """
+ # First apply auth/event middleware
+ if self.is_event_required:
+ from middleware import TokenEventMiddleware
+
+ self.endpoint_function = TokenEventMiddleware.event_required(
+ self.endpoint_function
+ )
+ elif self.is_auth_required:
+ from middleware import MiddlewareModule
+
+ self.endpoint_function = MiddlewareModule.auth_required(
+ self.endpoint_function
+ )
+
+ # Then wrap with endpoint_wrapper to store url_of_endpoint
+ self.endpoint_function = endpoint_wrapper(self.url_of_endpoint)(
+ self.endpoint_function
+ )
+
+
+class RouteFactoryConfig:
+ """Configuration class for API route factories.
+
+ Attributes:
+ name: Route name
+ tags: List of tags for API documentation
+ prefix: URL prefix for all endpoints in this route
+ include_in_schema: Whether to include in OpenAPI schema
+ endpoints: List of endpoint configurations
+ extra_options: Additional route options
+ """
+
+ def __init__(
+ self,
+ name: str,
+ tags: List[str],
+ prefix: str,
+ include_in_schema: bool = True,
+ endpoints: List[EndpointFactoryConfig] = None,
+ extra_options: Dict[str, Any] = None,
+ ):
+ self.name = name
+ self.tags = tags
+ self.prefix = prefix
+ self.include_in_schema = include_in_schema
+ self.endpoints = endpoints or []
+ self.extra_options = extra_options or {}
+
+ def __post_init__(self):
+ """Validate and normalize configuration after initialization."""
+ if self.endpoints is None:
+ self.endpoints = []
+ if self.extra_options is None:
+ self.extra_options = {}
+
+ def as_dict(self) -> Dict[str, Any]:
+ """Convert configuration to dictionary format."""
+ return {
+ "name": self.name,
+ "tags": self.tags,
+ "prefix": self.prefix,
+ "include_in_schema": self.include_in_schema,
+ "endpoints": [endpoint.__dict__ for endpoint in self.endpoints],
+ "extra_options": self.extra_options,
+ }
+
+
+class MethodToEvent:
+ """Base class for mapping methods to API events with type safety and endpoint configuration.
+
+ This class provides a framework for handling API events with proper
+ type checking for tokens and response models, as well as managing
+ endpoint configurations and frontend page structure.
+
+ Type Parameters:
+ TokenType: Type of authentication token
+ ResponseModel: Type of response model
+
+ Class Variables:
+ action_key: Unique identifier for the action
+ event_type: Type of event (e.g., 'query', 'command')
+ event_description: Human-readable description of the event
+ event_category: Category for grouping related events
+ __event_keys__: Mapping of UUIDs to event names
+ __event_validation__: Validation rules for events
+ __endpoint_config__: API endpoint configuration
+ __page_info__: Frontend page configuration
+ """
+
+ action_key: ClassVar[Optional[str]] = None
+ event_type: ClassVar[Optional[str]] = None
+ event_description: ClassVar[str] = ""
+ event_category: ClassVar[str] = ""
+ __event_keys__: ClassVar[Dict[str, str]] = {}
+ __event_validation__: Dict[str, Tuple[Type, Union[List, tuple]]] = {}
+ __endpoint_config__: ClassVar[Dict[str, Dict[str, Any]]] = {
+ "endpoints": {}, # Mapping of event UUIDs to endpoint configs
+ "router_prefix": "", # Router prefix for all endpoints in this class
+ "tags": [], # OpenAPI tags
+ }
+ __page_info__: ClassVar[Dict[str, Any]] = {
+ "name": "", # Page name (e.g., "AccountPage")
+ "title": {"tr": "", "en": ""}, # Multi-language titles
+ "icon": "", # Icon name
+ "url": "", # Frontend route
+ "component": None, # Optional component name
+ "parent": None, # Parent page name if this is a subpage
+ }
+
+ @classmethod
+ def register_endpoint(
+ cls,
+ event_uuid: str,
+ path: str,
+ method: str = "POST",
+ response_model: Optional[Type] = None,
+ **kwargs,
+ ) -> None:
+ """Register an API endpoint configuration for an event.
+
+ Args:
+ event_uuid: UUID of the event
+ path: Endpoint path (will be prefixed with router_prefix)
+ method: HTTP method (default: POST)
+ response_model: Pydantic model for response
+ **kwargs: Additional FastAPI endpoint parameters
+ """
+ if event_uuid not in cls.__event_keys__:
+ raise ValueError(f"Event UUID {event_uuid} not found in {cls.__name__}")
+
+ cls.__endpoint_config__["endpoints"][event_uuid] = {
+ "path": path,
+ "method": method,
+ "response_model": response_model,
+ **kwargs,
+ }
+
+ @classmethod
+ def configure_router(cls, prefix: str, tags: List[str]) -> None:
+ """Configure the API router settings.
+
+ Args:
+ prefix: Router prefix for all endpoints
+ tags: OpenAPI tags for documentation
+ """
+ cls.__endpoint_config__["router_prefix"] = prefix
+ cls.__endpoint_config__["tags"] = tags
+
+ @classmethod
+ def configure_page(
+ cls,
+ name: str,
+ title: Dict[str, str],
+ icon: str,
+ url: str,
+ component: Optional[str] = None,
+ parent: Optional[str] = None,
+ ) -> None:
+ """Configure the frontend page information.
+
+ Args:
+ name: Page name
+ title: Multi-language titles (must include 'tr' and 'en')
+ icon: Icon name
+ url: Frontend route
+ component: Optional component name
+ parent: Parent page name for subpages
+ """
+ required_langs = {"tr", "en"}
+ if not all(lang in title for lang in required_langs):
+ raise ValueError(
+ f"Title must contain all required languages: {required_langs}"
+ )
+
+ cls.__page_info__.update(
+ {
+ "name": name,
+ "title": title,
+ "icon": icon,
+ "url": url,
+ "component": component,
+ "parent": parent,
+ }
+ )
+
+ @classmethod
+ def get_endpoint_config(cls) -> Dict[str, Any]:
+ """Get the complete endpoint configuration."""
+ return cls.__endpoint_config__
+
+ @classmethod
+ def get_page_info(cls) -> Dict[str, Any]:
+ """Get the frontend page configuration."""
+ return cls.__page_info__
+
+ @classmethod
+ def has_available_events(cls, user_permission_uuids: Set[str]) -> bool:
+ """Check if any events are available based on user permissions."""
+ return bool(set(cls.__event_keys__.keys()) & user_permission_uuids)
+
+ @classmethod
+ def get_page_info_with_permissions(
+ cls, user_permission_uuids: Set[str], include_endpoints: bool = False
+ ) -> Optional[Dict[str, Any]]:
+ """Get page info if user has required permissions.
+
+ Args:
+ user_permission_uuids: Set of UUIDs the user has permission for
+ include_endpoints: Whether to include available endpoint information
+
+ Returns:
+ Dict with page info if user has permissions, None otherwise
+ """
+ # Check if user has any permissions for this page's events
+ if not cls.has_available_events(user_permission_uuids):
+ return None
+
+ # Start with basic page info
+ page_info = {
+ **cls.__page_info__,
+ "category": cls.event_category,
+ "type": cls.event_type,
+ "description": cls.event_description,
+ }
+
+ # Optionally include available endpoints
+ if include_endpoints:
+ available_endpoints = {}
+ for uuid, endpoint in cls.__endpoint_config__["endpoints"].items():
+ if uuid in user_permission_uuids:
+ available_endpoints[uuid] = {
+ "path": f"{cls.__endpoint_config__['router_prefix']}{endpoint['path']}",
+ "method": endpoint["method"],
+ "event_name": cls.__event_keys__[uuid],
+ }
+ if available_endpoints:
+ page_info["available_endpoints"] = available_endpoints
+
+ return page_info
+
+ @classmethod
+ def get_events_config(cls) -> Dict[str, Any]:
+ """Get the complete configuration including events, endpoints, and page info."""
+ return {
+ "events": cls.__event_keys__,
+ "endpoints": cls.__endpoint_config__,
+ "page_info": cls.__page_info__,
+ "category": cls.event_category,
+ "type": cls.event_type,
+ "description": cls.event_description,
+ }
+
+ @classmethod
+ def retrieve_event_response_model(cls, function_code: str) -> Any:
+ """Retrieve event validation for a specific function.
+
+ Args:
+ function_code: Function identifier
+
+ Returns:
+ Tuple containing response model and language models
+ """
+ event_validation_list = cls.__event_validation__.get(function_code, None)
+ if not event_validation_list:
+ raise HTTPExceptionApi(
+ error_code="",
+ lang="en",
+ loc=get_line_number_for_error(),
+ sys_msg="Function not found",
+ )
+ return event_validation_list[0]
+
+ @classmethod
+ def retrieve_event_languages(cls, function_code: str) -> Union[List, tuple]:
+ """Retrieve event description for a specific function.
+
+ Args:
+ function_code: Function identifier
+
+ Returns:
+ Event description
+ """
+ event_keys_list = cls.__event_validation__.get(function_code, None)
+ if not event_keys_list:
+ raise HTTPExceptionApi(
+ error_code="",
+ lang="en",
+ loc=get_line_number_for_error(),
+ sys_msg="Function not found",
+ )
+ function_language_models: Union[List, tuple] = event_keys_list[1]
+ if not function_language_models:
+ raise HTTPExceptionApi(
+ error_code="",
+ lang="en",
+ loc=get_line_number_for_error(),
+ sys_msg="Function not found",
+ )
+ return function_language_models
+
+ @staticmethod
+ def merge_models(language_model: List) -> Dict:
+ merged_models = {"tr": {}, "en": {}}
+ for model in language_model:
+ for lang in dict(model).keys():
+ if lang not in merged_models:
+ merged_models[lang] = model[lang]
+ else:
+ merged_models[lang].update(model[lang])
+ return merged_models
+
+ @classmethod
+ def retrieve_event_function(cls, function_code: str) -> Dict[str, str]:
+ """Retrieve event parameters for a specific function.
+
+ Args:
+ function_code: Function identifier
+
+ Returns:
+ Dictionary of event parameters
+ """
+ function_event = cls.__event_keys__[function_code]
+ function_itself = getattr(cls, function_event, None)
+ if not function_itself:
+ raise HTTPExceptionApi(
+ error_code="",
+ lang="en",
+ loc=get_line_number_for_error(),
+ sys_msg="Function not found",
+ )
+ return function_itself
+
+ @classmethod
+ def retrieve_language_parameters(
+ cls, function_code: str, language: str = "tr"
+ ) -> Dict[str, Any]:
+ """Retrieve language-specific parameters for an event.
+
+ Args:
+ language: Language code (e.g. 'tr', 'en')
+ function_code: Function identifier
+
+ Returns:
+ Dictionary of language-specific field mappings
+ """
+ event_language_models = cls.retrieve_event_languages(function_code)
+ event_response_model = cls.retrieve_event_response_model(function_code)
+ event_response_model_merged = cls.merge_models(event_language_models)
+ event_response_model_merged_lang = event_response_model_merged[language]
+ # Map response model fields to language-specific values
+ only_language_dict = {
+ field: event_response_model_merged_lang[field]
+ for field in event_response_model.model_fields
+ if field in event_response_model_merged_lang
+ }
+ """
+ __event_validation__ : {"key": [A, B, C]}
+ Language Model : Language Model that is model pydatnic requires
+ Language Models : All language_models that is included in Langugage Models Section
+ Merged Language Models : Merged with all models in list event_validation
+ """
+ return {
+ "language_model": only_language_dict,
+ "language_models": event_response_model_merged,
+ }
+
+
+class EventMethodRegistry:
+ """Registry for mapping event method UUIDs to categories and managing permissions."""
+
+ def __init__(self):
+ self._uuid_map: Dict[str, Tuple[Type[MethodToEvent], str]] = (
+ {}
+ ) # uuid -> (method_class, event_name)
+ self._category_events: Dict[str, Set[str]] = defaultdict(
+ set
+ ) # category -> set of uuids
+
+ def register_method(
+ self, category_name: str, method_class: Type[MethodToEvent]
+ ) -> None:
+ """Register a method class with its category."""
+ # Register all UUIDs from the method
+ for event_uuid, event_name in method_class.__event_keys__.items():
+ self._uuid_map[event_uuid] = (method_class, event_name)
+ self._category_events[category_name].add(event_uuid)
+
+ def get_method_by_uuid(
+ self, event_uuid: str
+ ) -> Optional[Tuple[Type[MethodToEvent], str]]:
+ """Get method class and event name by UUID."""
+ return self._uuid_map.get(event_uuid)
+
+ def get_events_for_category(self, category_name: str) -> Set[str]:
+ """Get all event UUIDs for a category."""
+ return self._category_events.get(category_name, set())
+
+
+class EventCategory:
+ """Base class for defining event categories similar to frontend page structure."""
+
+ def __init__(
+ self,
+ name: str,
+ title: Dict[str, str],
+ icon: str,
+ url: str,
+ component: Optional[str] = None,
+ page_info: Any = None,
+ all_endpoints: Dict[str, Set[str]] = None, # category -> set of event UUIDs
+ sub_categories: List = None,
+ ):
+ self.name = name
+ self.title = self._validate_title(title)
+ self.icon = icon
+ self.url = url
+ self.component = component
+ self.page_info = page_info
+ self.all_endpoints = all_endpoints or {}
+ self.sub_categories = self._process_subcategories(sub_categories or [])
+
+ def _validate_title(self, title: Dict[str, str]) -> Dict[str, str]:
+ """Validate title has required languages."""
+ required_langs = {"tr", "en"}
+ if not all(lang in title for lang in required_langs):
+ raise ValueError(
+ f"Title must contain all required languages: {required_langs}"
+ )
+ return title
+
+ def _process_subcategories(
+ self, categories: List[Union[Dict, "EventCategory"]]
+ ) -> List["EventCategory"]:
+ """Process subcategories ensuring they are all EventCategory instances."""
+ processed = []
+ for category in categories:
+ if isinstance(category, dict):
+ processed.append(EventCategory.from_dict(category))
+ elif isinstance(category, EventCategory):
+ processed.append(category)
+ else:
+ raise ValueError(f"Invalid subcategory type: {type(category)}")
+ return processed
+
+ def has_available_events(self, user_permission_uuids: Set[str]) -> bool:
+ """Check if category has available events based on UUID intersection."""
+ # Check current category's events
+ return any(
+ bool(events & user_permission_uuids)
+ for events in self.all_endpoints.values()
+ )
+
+ def get_menu_item(
+ self, user_permission_uuids: Set[str]
+ ) -> Optional[Dict[str, Any]]:
+ """Get menu item if category has available events."""
+ # First check if this category has available events
+ if not self.has_available_events(user_permission_uuids):
+ return None
+
+ menu_item = {
+ "name": self.name,
+ "title": self.title,
+ "icon": self.icon,
+ "url": self.url,
+ }
+
+ if self.component:
+ menu_item["component"] = self.component
+
+ # Only process subcategories if parent has permissions
+ sub_items = []
+ for subcategory in self.sub_categories:
+ if sub_menu := subcategory.get_menu_item(user_permission_uuids):
+ sub_items.append(sub_menu)
+
+ if sub_items:
+ menu_item["items"] = sub_items
+
+ return menu_item
+
+ def get_available_events(
+ self, registry: EventMethodRegistry, user_permission_uuids: Set[str]
+ ) -> Dict[str, List[Dict[str, Any]]]:
+ """Get available events based on user permission UUIDs."""
+ available_events = defaultdict(list)
+
+ # Process endpoints in current category
+ category_events = self.all_endpoints.get(self.name, set())
+ for event_uuid in category_events & user_permission_uuids:
+ method_info = registry.get_method_by_uuid(event_uuid)
+ if method_info:
+ method_class, event_name = method_info
+ available_events[method_class.event_type].append(
+ {
+ "uuid": event_uuid,
+ "name": event_name,
+ "description": method_class.event_description,
+ "category": method_class.event_category,
+ }
+ )
+
+ # Process subcategories recursively
+ for subcategory in self.sub_categories:
+ sub_events = subcategory.get_available_events(
+ registry, user_permission_uuids
+ )
+ for event_type, events in sub_events.items():
+ available_events[event_type].extend(events)
+
+ return dict(available_events)
+
+ @classmethod
+ def from_dict(cls, data: Dict[str, Any]) -> "EventCategory":
+ """Create category from dictionary."""
+ return cls(
+ name=data["name"],
+ title=data["title"],
+ icon=data["icon"],
+ url=data["url"],
+ component=data.get("component"),
+ page_info=data.get("pageInfo"),
+ all_endpoints=data.get("allEndpoints", {}),
+ sub_categories=data.get("subCategories", []),
+ )
+
+ def to_dict(
+ self,
+ registry: EventMethodRegistry,
+ user_permission_uuids: Optional[Set[str]] = None,
+ ) -> Dict[str, Any]:
+ """Convert category to dictionary with optional permission filtering."""
+ result = {
+ "name": self.name,
+ "title": self.title,
+ "icon": self.icon,
+ "url": self.url,
+ "pageInfo": self.page_info,
+ }
+
+ if user_permission_uuids is not None:
+ # Only include endpoints and their info if user has permissions
+ available_events = self.get_available_events(
+ registry, user_permission_uuids
+ )
+ if available_events:
+ result["availableEvents"] = available_events
+ result["allEndpoints"] = self.all_endpoints
+ else:
+ # Include all endpoints if no permissions specified
+ result["allEndpoints"] = self.all_endpoints
+
+ # Process subcategories
+ subcategories = [
+ sub.to_dict(registry, user_permission_uuids) for sub in self.sub_categories
+ ]
+ # Only include subcategories that have available events
+ if user_permission_uuids is None or any(
+ "availableEvents" in sub for sub in subcategories
+ ):
+ result["subCategories"] = subcategories
+
+ if self.component:
+ result["component"] = self.component
+
+ return result
+
+
+class EventCategoryManager:
+ """Manager class for handling event categories and their relationships."""
+
+ def __init__(self):
+ self.categories: List[EventCategory] = []
+ self.registry = EventMethodRegistry()
+
+ def get_menu_tree(self, user_permission_uuids: Set[str]) -> List[Dict[str, Any]]:
+ """Get menu tree based on available events."""
+ return [
+ menu_item
+ for category in self.categories
+ if (menu_item := category.get_menu_item(user_permission_uuids))
+ ]
+
+ def register_category(self, category: EventCategory) -> None:
+ """Register a category and its endpoints in the registry."""
+ self.categories.append(category)
+
+ def add_category(self, category: Union[EventCategory, Dict[str, Any]]) -> None:
+ """Add a new category."""
+ if isinstance(category, dict):
+ category = EventCategory.from_dict(category)
+ self.register_category(category)
+
+ def add_categories(
+ self, categories: List[Union[EventCategory, Dict[str, Any]]]
+ ) -> None:
+ """Add multiple categories at once."""
+ for category in categories:
+ self.add_category(category)
+
+ def get_category(self, name: str) -> Optional[EventCategory]:
+ """Get category by name."""
+ return next((cat for cat in self.categories if cat.name == name), None)
+
+ def get_all_categories(
+ self, user_permission_uuids: Optional[Set[str]] = None
+ ) -> List[Dict[str, Any]]:
+ """Get all categories as dictionary, filtered by user permissions."""
+ return [
+ cat.to_dict(self.registry, user_permission_uuids) for cat in self.categories
+ ]
+
+ def get_category_endpoints(self, category_name: str) -> Set[str]:
+ """Get all endpoint UUIDs for a category."""
+ category = self.get_category(category_name)
+ return category.all_endpoints.get(category_name, set()) if category else set()
+
+ def get_subcategories(
+ self, category_name: str, user_permission_uuids: Optional[Set[str]] = None
+ ) -> List[Dict[str, Any]]:
+ """Get subcategories for a category."""
+ category = self.get_category(category_name)
+ if not category:
+ return []
+ return [
+ sub.to_dict(self.registry, user_permission_uuids)
+ for sub in category.sub_categories
+ ]
+
+ def find_category_by_url(self, url: str) -> Optional[EventCategory]:
+ """Find a category by its URL."""
+ for category in self.categories:
+ if category.url == url:
+ return category
+ for subcategory in category.sub_categories:
+ if subcategory.url == url:
+ return subcategory
+ return None
+
+
+class EventMethodRegistry:
+ """Registry for all MethodToEvent classes and menu building."""
+
+ _instance = None
+ _method_classes: Dict[str, Type[MethodToEvent]] = {}
+
+ def __new__(cls):
+ if cls._instance is None:
+ cls._instance = super().__new__(cls)
+ return cls._instance
+
+ @classmethod
+ def register_method_class(cls, method_class: Type[MethodToEvent]) -> None:
+ """Register a MethodToEvent class."""
+ if not issubclass(method_class, MethodToEvent):
+ raise ValueError(
+ f"{method_class.__name__} must be a subclass of MethodToEvent"
+ )
+
+ page_info = method_class.get_page_info()
+ cls._method_classes[page_info["name"]] = method_class
+
+ @classmethod
+ def get_all_menu_items(
+ cls, user_permission_uuids: Set[str], include_endpoints: bool = False
+ ) -> List[Dict[str, Any]]:
+ """Get all menu items based on user permissions.
+
+ Args:
+ user_permission_uuids: Set of UUIDs the user has permission for
+ include_endpoints: Whether to include available endpoint information
+
+ Returns:
+ List of menu items organized in a tree structure
+ """
+ # First get all page infos
+ page_infos = {}
+ for method_class in cls._method_classes.values():
+ if page_info := method_class.get_page_info_with_permissions(
+ user_permission_uuids, include_endpoints
+ ):
+ page_infos[page_info["name"]] = page_info
+
+ # Build tree structure
+ menu_tree = []
+ child_pages = set()
+
+ # First pass: identify all child pages
+ for page_info in page_infos.values():
+ if page_info.get("parent"):
+ child_pages.add(page_info["name"])
+
+ # Second pass: build tree structure
+ for name, page_info in page_infos.items():
+ # Skip if this is a child page
+ if name in child_pages:
+ continue
+
+ # Start with this page's info
+ menu_item = page_info.copy()
+
+ # Find and add children
+ children = []
+ for child_info in page_infos.values():
+ if child_info.get("parent") == name:
+ children.append(child_info)
+
+ if children:
+ menu_item["items"] = sorted(children, key=lambda x: x["name"])
+
+ menu_tree.append(menu_item)
+
+ return sorted(menu_tree, key=lambda x: x["name"])
+
+ @classmethod
+ def get_available_endpoints(
+ cls, user_permission_uuids: Set[str]
+ ) -> Dict[str, Dict[str, Any]]:
+ """Get all available endpoints based on user permissions.
+
+ Args:
+ user_permission_uuids: Set of UUIDs the user has permission for
+
+ Returns:
+ Dict mapping event UUIDs to endpoint configurations
+ """
+ available_endpoints = {}
+
+ for method_class in cls._method_classes.values():
+ if page_info := method_class.get_page_info_with_permissions(
+ user_permission_uuids, include_endpoints=True
+ ):
+ if endpoints := page_info.get("available_endpoints"):
+ available_endpoints.update(endpoints)
+
+ return available_endpoints
+
+
+"""
+Example usage
+
+# Register your MethodToEvent classes
+registry = EventMethodRegistry()
+registry.register_method_class(AccountEventMethods)
+registry.register_method_class(AccountDetailsEventMethods)
+
+# Get complete menu structure
+user_permissions = {
+ "uuid1",
+ "uuid2",
+ "uuid3"
+}
+
+menu_items = registry.get_all_menu_items(user_permissions, include_endpoints=True)
+# Result:
+[
+ {
+ "name": "AccountPage",
+ "title": {"tr": "Hesaplar", "en": "Accounts"},
+ "icon": "User",
+ "url": "/account",
+ "category": "account",
+ "type": "query",
+ "description": "Account management operations",
+ "available_endpoints": {
+ "uuid1": {"path": "/api/account/view", "method": "GET"},
+ "uuid2": {"path": "/api/account/edit", "method": "POST"}
+ },
+ "items": [
+ {
+ "name": "AccountDetailsPage",
+ "title": {"tr": "Hesap Detayları", "en": "Account Details"},
+ "icon": "FileText",
+ "url": "/account/details",
+ "parent": "AccountPage",
+ "category": "account_details",
+ "type": "query",
+ "available_endpoints": {
+ "uuid3": {"path": "/api/account/details/view", "method": "GET"}
+ }
+ }
+ ]
+ }
+]
+
+# Get all available endpoints
+endpoints = registry.get_available_endpoints(user_permissions)
+# Result:
+{
+ "uuid1": {
+ "path": "/api/account/view",
+ "method": "GET",
+ "event_name": "view_account"
+ },
+ "uuid2": {
+ "path": "/api/account/edit",
+ "method": "POST",
+ "event_name": "edit_account"
+ },
+ "uuid3": {
+ "path": "/api/account/details/view",
+ "method": "GET",
+ "event_name": "view_details"
+ }
+}
+
+# Get event UUIDs from MethodToEvent classes
+account_events = {uuid for uuid in AccountEventMethods.__event_keys__}
+
+# Define categories with event UUIDs
+PAGES_INFO = [
+ {
+ "name": "AccountPage",
+ "title": {"tr": "Hesaplar", "en": "Accounts"},
+ "icon": "User",
+ "url": "/account",
+ "pageInfo": AccountPageInfo,
+ "allEndpoints": {"AccountPage": account_events},
+ "subCategories": [
+ {
+ "name": "AccountDetailsPage",
+ "title": {"tr": "Hesap Detayları", "en": "Account Details"},
+ "icon": "FileText",
+ "url": "/account/details",
+ "allEndpoints": {} # No direct endpoints, only shown if parent has permissions
+ }
+ ]
+ }
+]
+
+# Initialize manager
+manager = EventCategoryManager()
+manager.add_categories(PAGES_INFO)
+
+# Get menu tree based on available events
+user_permission_uuids = {
+ "31f4f32f-0cd4-4995-8a6a-f9f56335848a",
+ "ec98ef2c-bcd0-432d-a8f4-1822a56c33b2"
+}
+menu_tree = manager.get_menu_tree(user_permission_uuids)
+"""
diff --git a/trash/auth_old.py b/trash/auth_old.py
new file mode 100644
index 0000000..c67f6ba
--- /dev/null
+++ b/trash/auth_old.py
@@ -0,0 +1,777 @@
+"""
+Authentication related API endpoints.
+"""
+
+from typing import TYPE_CHECKING, Union
+
+# Regular imports (non-TYPE_CHECKING)
+from Events.Engine.abstract_class import MethodToEvent
+from Events.base_request_model import SuccessResponse
+
+from ApiLayers.ApiLibrary.common.line_number import get_line_number_for_error
+from ApiLayers.ApiLibrary.date_time_actions.date_functions import DateTimeLocal
+
+from ApiLayers.ApiServices.Login.user_login_handler import UserLoginModule
+from ApiLayers.ApiServices.Token.token_handler import TokenService
+from ApiLayers.ApiValidations.Custom.token_objects import CompanyToken, OccupantToken
+from ApiLayers.ApiValidations.Request.authentication import (
+ Login,
+ OccupantSelection,
+ EmployeeSelection,
+)
+from ApiLayers.ErrorHandlers import HTTPExceptionApi
+from ApiLayers.Schemas import (
+ BuildLivingSpace,
+ BuildParts,
+ RelationshipEmployee2Build,
+ Companies,
+ Departments,
+ Duties,
+ Duty,
+ Staff,
+ Employees,
+ Event2Employee,
+ Event2Occupant,
+ OccupantTypes,
+ Users,
+)
+from ApiLayers.ApiServices.Token.token_handler import (
+ OccupantTokenObject,
+ EmployeeTokenObject,
+)
+
+from .api_events import (
+ authentication_login_super_user_event,
+ authentication_select_company_or_occupant_type_super_user_event,
+ authentication_employee_selection_super_user_event,
+ authentication_occupant_selection_super_user_event,
+)
+
+
+if TYPE_CHECKING:
+ from fastapi import Request
+
+# Type aliases for common types
+TokenDictType = Union["EmployeeTokenObject", "OccupantTokenObject"]
+
+
+# class AuthenticationLoginEventMethods(MethodToEvent):
+# event_type = "LOGIN"
+# event_description = "Login via domain and access key : [email] | [phone]"
+# event_category = "AUTHENTICATION"
+# __event_keys__ = {
+# "e672846d-cc45-4d97-85d5-6f96747fac67": "authentication_login_with_domain_and_creds",
+# }
+# __event_validation__ = {
+# "e672846d-cc45-4d97-85d5-6f96747fac67": SuccessResponse,
+# }
+
+# @classmethod
+# async def authentication_login_with_domain_and_creds(
+# cls, request: "Request", data: Login
+# ):
+# """
+# Authenticate user with domain and credentials.
+
+# Args:
+# request: FastAPI request object
+# data: Request body containing login credentials
+# {
+# "domain": "evyos.com.tr",
+# "access_key": "karatay.berkay.sup@evyos.com.tr",
+# "password": "string",
+# "remember_me": false
+# }
+# Returns:
+# SuccessResponse containing authentication token and user info
+# """
+# # Get token from login module
+# user_login_module = UserLoginModule(request=request)
+# token = await user_login_module.login_user_via_credentials(access_data=data)
+
+# # Return response with token and headers
+# return {
+# "completed": True,
+# "message": "User is logged in successfully",
+# "access_token": token.get("access_token"),
+# "refresh_token": token.get("refresher_token"),
+# "access_object": {
+# "user_type": token.get("user_type"),
+# "companies_list": token.get("companies_list"),
+# },
+# "user": token.get("user"),
+# }
+
+
+# class AuthenticationSelectEventMethods(MethodToEvent):
+# event_type = "LOGIN"
+# event_description = "Select Employee Duty or Occupant Type"
+# event_category = "AUTHENTICATION"
+
+# __event_keys__ = {
+# "cee96b9b-8487-4e9f-aaed-2e8c79687bf9": "authentication_select_company_or_occupant_type",
+# }
+# # __event_validation__ = {
+# # "cee96b9b-8487-4e9f-aaed-2e8c79687bf9": "authentication_select_company_or_occupant_type",
+# # }
+
+# @classmethod
+# def _handle_employee_selection(
+# cls,
+# data: EmployeeSelection,
+# token_dict: TokenDictType,
+# request: "Request",
+# ):
+# Users.set_user_define_properties(token=token_dict)
+# db_session = Users.new_session()
+
+# if data.company_uu_id not in token_dict.companies_uu_id_list:
+# raise HTTPExceptionApi(
+# error_code="HTTP_400_BAD_REQUEST",
+# lang=token_dict.lang,
+# loc=get_line_number_for_error(),
+# sys_msg="Company not found in token",
+# )
+# selected_company = Companies.filter_one(
+# Companies.uu_id == data.company_uu_id,
+# db=db_session,
+# ).first
+# if not selected_company:
+# raise HTTPExceptionApi(
+# error_code="HTTP_400_BAD_REQUEST",
+# lang=token_dict.lang,
+# loc=get_line_number_for_error(),
+# sys_msg="Company not found in token",
+# )
+
+# # Get department IDs for the company
+# department_ids = [
+# dept.id
+# for dept in Departments.filter_all(
+# Departments.company_id == selected_company.id,
+# db=db_session,
+# ).data
+# ]
+
+# # Get duties IDs for the company
+# duties_ids = [
+# duty.id
+# for duty in Duties.filter_all(
+# Duties.company_id == selected_company.id, db=db_session
+# ).data
+# ]
+
+# # Get staff IDs
+# staff_ids = [
+# staff.id
+# for staff in Staff.filter_all(
+# Staff.duties_id.in_(duties_ids), db=db_session
+# ).data
+# ]
+
+# # Get employee
+# employee = Employees.filter_one(
+# Employees.people_id == token_dict.person_id,
+# Employees.staff_id.in_(staff_ids),
+# db=db_session,
+# ).first
+
+# if not employee:
+# raise HTTPExceptionApi(
+# error_code="HTTP_400_BAD_REQUEST",
+# lang=token_dict.lang,
+# loc=get_line_number_for_error(),
+# sys_msg="Employee not found in token",
+# )
+
+# # Get reachable events
+# reachable_event_codes = Event2Employee.get_event_codes(employee_id=employee.id)
+# reachable_event_endpoints = Event2Employee.get_event_endpoints(
+# employee_id=employee.id
+# )
+
+# # Get staff and duties
+# staff = Staff.filter_one(Staff.id == employee.staff_id, db=db_session).data
+# duties = Duties.filter_one(Duties.id == staff.duties_id, db=db_session).data
+# department = Departments.filter_one(
+# Departments.id == duties.department_id, db=db_session
+# ).data
+
+# # Get bulk duty
+# bulk_id = Duty.filter_by_one(system=True, duty_code="BULK", db=db_session).data
+# bulk_duty_id = Duties.filter_by_one(
+# company_id=selected_company.id,
+# duties_id=bulk_id.id,
+# **Duties.valid_record_dict,
+# db=db_session,
+# ).data
+# # Create company token
+# company_token = CompanyToken(
+# company_uu_id=selected_company.uu_id.__str__(),
+# company_id=selected_company.id,
+# department_id=department.id,
+# department_uu_id=department.uu_id.__str__(),
+# duty_id=duties.id,
+# duty_uu_id=duties.uu_id.__str__(),
+# bulk_duties_id=bulk_duty_id.id,
+# staff_id=staff.id,
+# staff_uu_id=staff.uu_id.__str__(),
+# employee_id=employee.id,
+# employee_uu_id=employee.uu_id.__str__(),
+# reachable_event_codes=reachable_event_codes,
+# reachable_event_endpoints=reachable_event_endpoints,
+# )
+# try: # Update Redis
+# update_token = TokenService.update_token_at_redis(
+# request=request, add_payload=company_token
+# )
+# return update_token
+# except Exception as e:
+# raise HTTPExceptionApi(
+# error_code="",
+# lang="en",
+# loc=get_line_number_for_error(),
+# sys_msg=f"{e}",
+# )
+
+# @classmethod
+# def _handle_occupant_selection(
+# cls,
+# data: OccupantSelection,
+# token_dict: TokenDictType,
+# request: "Request",
+# ):
+# """Handle occupant type selection"""
+# db = BuildLivingSpace.new_session()
+# # Get selected occupant type
+# selected_build_living_space = BuildLivingSpace.filter_one(
+# BuildLivingSpace.uu_id == data.build_living_space_uu_id,
+# db=db,
+# ).data
+# if not selected_build_living_space:
+# raise HTTPExceptionApi(
+# error_code="HTTP_400_BAD_REQUEST",
+# lang=token_dict.lang,
+# loc=get_line_number_for_error(),
+# sys_msg="Selected occupant type not found",
+# )
+
+# # Get reachable events
+# reachable_event_codes = Event2Occupant.get_event_codes(
+# build_living_space_id=selected_build_living_space.id
+# )
+# reachable_event_endpoints = Event2Occupant.get_event_endpoints(
+# build_living_space_id=selected_build_living_space.id
+# )
+
+# occupant_type = OccupantTypes.filter_one(
+# OccupantTypes.id == selected_build_living_space.occupant_type_id,
+# db=db,
+# system=True,
+# ).data
+# build_part = BuildParts.filter_one(
+# BuildParts.id == selected_build_living_space.build_parts_id,
+# db=db,
+# ).data
+# build = BuildParts.filter_one(
+# BuildParts.id == build_part.build_id,
+# db=db,
+# ).data
+# responsible_employee = Employees.filter_one(
+# Employees.id == build_part.responsible_employee_id,
+# db=db,
+# ).data
+# related_company = RelationshipEmployee2Build.filter_one(
+# RelationshipEmployee2Build.member_id == build.id,
+# db=db,
+# ).data
+# # Get company
+# company_related = Companies.filter_one(
+# Companies.id == related_company.company_id,
+# db=db,
+# ).data
+
+# # Create occupant token
+# occupant_token = OccupantToken(
+# living_space_id=selected_build_living_space.id,
+# living_space_uu_id=selected_build_living_space.uu_id.__str__(),
+# occupant_type_id=occupant_type.id,
+# occupant_type_uu_id=occupant_type.uu_id.__str__(),
+# occupant_type=occupant_type.occupant_type,
+# build_id=build.id,
+# build_uuid=build.uu_id.__str__(),
+# build_part_id=build_part.id,
+# build_part_uuid=build_part.uu_id.__str__(),
+# responsible_employee_id=responsible_employee.id,
+# responsible_employee_uuid=responsible_employee.uu_id.__str__(),
+# responsible_company_id=company_related.id,
+# responsible_company_uuid=company_related.uu_id.__str__(),
+# reachable_event_codes=reachable_event_codes,
+# reachable_event_endpoints=reachable_event_endpoints,
+# )
+
+# try: # Update Redis
+# update_token = TokenService.update_token_at_redis(
+# request=request, add_payload=occupant_token
+# )
+# return update_token
+# except Exception as e:
+# raise HTTPExceptionApi(
+# error_code="",
+# lang="en",
+# loc=get_line_number_for_error(),
+# sys_msg=f"{e}",
+# )
+
+# @classmethod
+# async def authentication_select_company_or_occupant_type(
+# cls,
+# request: "Request",
+# data: Union[EmployeeSelection, OccupantSelection],
+# token_dict: TokenDictType,
+# ):
+# """Handle selection of company or occupant type"""
+# if token_dict.is_employee:
+# return cls._handle_employee_selection(data, token_dict, request)
+# elif token_dict.is_occupant:
+# return cls._handle_occupant_selection(data, token_dict, request)
+
+
+class AuthenticationCheckTokenEventMethods(MethodToEvent):
+ event_type = "LOGIN"
+ event_description = "Check Token is valid for user"
+ event_category = "AUTHENTICATION"
+
+ __event_keys__ = {
+ "73d77e45-a33f-4f12-909e-3b56f00d8a12": "authentication_check_token_is_valid",
+ }
+ # __event_validation__ = {
+ # "73d77e45-a33f-4f12-909e-3b56f00d8a12": "authentication_check_token_is_valid",
+ # }
+
+ @classmethod
+ async def authentication_check_token_is_valid(
+ cls, request: "Request", data: DictRequestModel
+ ):
+ # try:
+ # if RedisActions.get_object_via_access_key(request=request):
+ # return ResponseHandler.success("Access Token is valid")
+ # except HTTPException:
+ # return ResponseHandler.unauthorized("Access Token is NOT valid")
+ return
+
+
+class AuthenticationRefreshEventMethods(MethodToEvent):
+ event_type = "LOGIN"
+ event_description = "Refresh user info using access token"
+ event_category = "AUTHENTICATION"
+
+ __event_keys__ = {
+ "48379bb2-ba81-4d8e-a9dd-58837cfcbf67": "authentication_refresh_user_info",
+ }
+ # __event_validation__ = {
+ # "48379bb2-ba81-4d8e-a9dd-58837cfcbf67": AuthenticationRefreshResponse,
+ # }
+
+ @classmethod
+ async def authentication_refresh_user_info(
+ cls,
+ request: "Request",
+ token_dict: TokenDictType,
+ data: DictRequestModel,
+ ):
+ # try:
+ # access_token = request.headers.get(Auth.ACCESS_TOKEN_TAG)
+ # if not access_token:
+ # return ResponseHandler.unauthorized()
+
+ # # Get user and token info
+ # found_user = Users.filter_one(Users.uu_id == token_dict.user_uu_id).data
+ # if not found_user:
+ # return ResponseHandler.not_found("User not found")
+ # user_token = UsersTokens.filter_one(
+ # UsersTokens.domain == found_user.domain_name,
+ # UsersTokens.user_id == found_user.id,
+ # UsersTokens.token_type == "RememberMe",
+ # ).data
+ # response_data = {
+ # "access_token": access_token,
+ # "refresh_token": getattr(user_token, "token", None),
+ # "user": found_user.get_dict(),
+ # }
+ # return ResponseHandler.success(
+ # "User info refreshed successfully",
+ # data=response_data,
+ # )
+ # except Exception as e:
+ # return ResponseHandler.error(str(e))
+ return
+
+
+class AuthenticationChangePasswordEventMethods(MethodToEvent):
+ event_type = "LOGIN"
+ event_description = "Change password with access token"
+ event_category = "AUTHENTICATION"
+
+ __event_keys__ = {
+ "f09f7c1a-bee6-4e32-8444-962ec8f39091": "authentication_change_password",
+ }
+ # __event_validation__ = {
+ # "f09f7c1a-bee6-4e32-8444-962ec8f39091": "authentication_change_password",
+ # }
+
+ @classmethod
+ async def authentication_change_password(
+ cls,
+ request: "Request",
+ data: ChangePasswordRequestModel,
+ token_dict: TokenDictType,
+ ):
+ # try:
+ # if not isinstance(token_dict, EmployeeTokenObject):
+ # return ResponseHandler.unauthorized(
+ # "Only employees can change password"
+ # )
+
+ # found_user = Users.filter_one(Users.uu_id == token_dict.user_uu_id).data
+ # if not found_user:
+ # return ResponseHandler.not_found("User not found")
+
+ # if not found_user.check_password(data.old_password):
+ # # UserLogger.log_password_change(
+ # # request,
+ # # found_user.id,
+ # # "change",
+ # # success=False,
+ # # error="Invalid old password",
+ # # )
+ # return ResponseHandler.unauthorized("Old password is incorrect")
+
+ # found_user.set_password(data.new_password)
+ # # UserLogger.log_password_change(
+ # # request, found_user.id, "change", success=True
+ # # )
+
+ # return ResponseHandler.success("Password changed successfully")
+ # except Exception as e:
+ # # UserLogger.log_password_change(
+ # # request,
+ # # found_user.id if found_user else None,
+ # # "change",
+ # # success=False,
+ # # error=str(e),
+ # # )
+ # return ResponseHandler.error(str(e))
+ return
+
+
+class AuthenticationCreatePasswordEventMethods(MethodToEvent):
+
+ event_type = "LOGIN"
+ event_description = "Create password with password reset token requested via email"
+ event_category = "AUTHENTICATION"
+
+ __event_keys__ = {
+ "c519f9af-92e1-47b2-abf7-5a3316d075f7": "authentication_create_password",
+ }
+ # __event_validation__ = {
+ # "c519f9af-92e1-47b2-abf7-5a3316d075f7": "authentication_create_password",
+ # }
+
+ @classmethod
+ async def authentication_create_password(
+ cls, request: "Request", data: CreatePasswordRequestModel
+ ):
+
+ # if not data.re_password == data.password:
+ # raise HTTPException(
+ # status_code=status.HTTP_406_NOT_ACCEPTABLE, detail="Password must match"
+ # )
+ # if found_user := Users.filter_one(
+ # Users.password_token == data.password_token
+ # ).data:
+ # found_user: Users = found_user
+ # found_user.create_password(found_user=found_user, password=data.password)
+ # found_user.password_token = ""
+ # found_user.save()
+ # # send_email_completed = send_email(
+ # # subject=f"Dear {found_user.user_tag}, your password has been changed.",
+ # # receivers=[str(found_user.email)],
+ # # html=password_is_changed_template(user_name=found_user.user_tag),
+ # # )
+ # # if not send_email_completed:
+ # # raise HTTPException(
+ # # status_code=400, detail="Email can not be sent. Try again later"
+ # # )
+ # return ResponseHandler.success(
+ # "Password is created successfully",
+ # data=found_user.get_dict(),
+ # )
+ # return ResponseHandler.not_found("Record not found")
+ return
+
+
+class AuthenticationDisconnectUserEventMethods(MethodToEvent):
+
+ event_type = "LOGIN"
+ event_description = "Disconnect all sessions of user in access token"
+ event_category = "AUTHENTICATION"
+
+ __event_keys__ = {
+ "8b586848-2fb3-4161-abbe-642157eec7ce": "authentication_disconnect_user",
+ }
+ # __event_validation__ = {
+ # "8b586848-2fb3-4161-abbe-642157eec7ce": "authentication_disconnect_user",
+ # }
+
+ @classmethod
+ async def authentication_disconnect_user(
+ cls, request: "Request", data: LogoutRequestModel, token_dict: TokenDictType
+ ):
+ # found_user = Users.filter_one(Users.uu_id == token_dict.user_uu_id).data
+ # if not found_user:
+ # return ResponseHandler.not_found("User not found")
+ # if already_tokens := RedisActions.get_object_via_user_uu_id(
+ # user_id=str(found_user.uu_id)
+ # ):
+ # for key, token_user in already_tokens.items():
+ # RedisActions.delete(key)
+ # selected_user = Users.filter_one(
+ # Users.uu_id == token_user.get("uu_id"),
+ # ).data
+ # selected_user.remove_refresher_token(
+ # domain=data.domain, disconnect=True
+ # )
+ # return ResponseHandler.success(
+ # "All sessions are disconnected",
+ # data=selected_user.get_dict(),
+ # )
+ # return ResponseHandler.not_found("Invalid data")
+ return
+
+
+class AuthenticationLogoutEventMethods(MethodToEvent):
+
+ event_type = "LOGIN"
+ event_description = "Logout only single session of user which domain is provided"
+ event_category = "AUTHENTICATION"
+
+ __event_keys__ = {
+ "5cc22e4e-a0f7-4077-be41-1871feb3dfd1": "authentication_logout_user",
+ }
+ # __event_validation__ = {
+ # "5cc22e4e-a0f7-4077-be41-1871feb3dfd1": "authentication_logout_user",
+ # }
+
+ @classmethod
+ async def authentication_logout_user(
+ cls,
+ request: "Request",
+ data: LogoutRequestModel,
+ token_dict: TokenDictType,
+ ):
+ # token_user = None
+ # if already_tokens := RedisActions.get_object_via_access_key(request=request):
+ # for key in already_tokens:
+ # token_user = RedisActions.get_json(key)
+ # if token_user.get("domain") == data.domain:
+ # RedisActions.delete(key)
+ # selected_user = Users.filter_one(
+ # Users.uu_id == token_user.get("uu_id"),
+ # ).data
+ # selected_user.remove_refresher_token(domain=data.domain)
+
+ # return ResponseHandler.success(
+ # "Session is logged out",
+ # data=token_user,
+ # )
+ # return ResponseHandler.not_found("Logout is not successfully completed")
+ return
+
+
+class AuthenticationRefreshTokenEventMethods(MethodToEvent):
+
+ event_type = "LOGIN"
+ event_description = "Refresh access token with refresher token"
+ event_category = "AUTHENTICATION"
+
+ __event_keys__ = {
+ "c90f3334-10c9-4181-b5ff-90d98a0287b2": "authentication_refresher_token",
+ }
+ # __event_validation__ = {
+ # "c90f3334-10c9-4181-b5ff-90d98a0287b2": AuthenticationRefreshResponse,
+ # }
+
+ @classmethod
+ async def authentication_refresher_token(
+ cls, request: "Request", data: RememberRequestModel, token_dict: TokenDictType
+ ):
+ # token_refresher = UsersTokens.filter_by_one(
+ # token=data.refresh_token,
+ # domain=data.domain,
+ # **UsersTokens.valid_record_dict,
+ # ).data
+ # if not token_refresher:
+ # return ResponseHandler.not_found("Invalid data")
+ # if found_user := Users.filter_one(
+ # Users.id == token_refresher.user_id,
+ # ).data:
+ # found_user: Users = found_user
+ # access_key = AuthActions.save_access_token_to_redis(
+ # request=request, found_user=found_user, domain=data.domain
+ # )
+ # found_user.last_agent = request.headers.get("User-Agent", None)
+ # found_user.last_platform = request.headers.get("Origin", None)
+ # found_user.last_remote_addr = getattr(
+ # request, "remote_addr", None
+ # ) or request.headers.get("X-Forwarded-For", None)
+ # found_user.last_seen = str(system_arrow.now())
+ # response_data = {
+ # "access_token": access_key,
+ # "refresh_token": data.refresh_token,
+ # }
+ # return ResponseHandler.success(
+ # "User is logged in successfully via refresher token",
+ # data=response_data,
+ # )
+ # return ResponseHandler.not_found("Invalid data")
+ return
+
+
+class AuthenticationForgotPasswordEventMethods(MethodToEvent):
+
+ event_type = "LOGIN"
+ event_description = "Send an email to user for a valid password reset token"
+ event_category = "AUTHENTICATION"
+
+ __event_keys__ = {
+ "e3ca6e24-b9f8-4127-949c-3bfa364e3513": "authentication_forgot_password",
+ }
+ # __event_validation__ = {
+ # "e3ca6e24-b9f8-4127-949c-3bfa364e3513": "authentication_forgot_password",
+ # }
+
+ @classmethod
+ async def authentication_forgot_password(
+ cls,
+ request: "Request",
+ data: ForgotRequestModel,
+ ):
+ # found_user: Users = Users.check_user_exits(
+ # access_key=data.access_key, domain=data.domain
+ # )
+ # forgot_key = AuthActions.save_access_token_to_redis(
+ # request=request, found_user=found_user, domain=data.domain
+ # )
+ # forgot_link = ApiStatic.forgot_link(forgot_key=forgot_key)
+ # send_email_completed = send_email(
+ # subject=f"Dear {found_user.user_tag}, your forgot password link has been sent.",
+ # receivers=[str(found_user.email)],
+ # html=change_your_password_template(
+ # user_name=found_user.user_tag, forgot_link=forgot_link
+ # ),
+ # )
+ # if not send_email_completed:
+ # raise HTTPException(
+ # status_code=400, detail="Email can not be sent. Try again later"
+ # )
+ # found_user.password_token = forgot_key
+ # found_user.password_token_is_valid = str(system_arrow.shift(days=1))
+ # found_user.save()
+
+ # return ResponseHandler.success(
+ # "Password is change link is sent to your email or phone",
+ # data={},
+ # )
+ return
+
+
+class AuthenticationResetPasswordEventMethods(MethodToEvent):
+
+ event_type = "UPDATE"
+ __event_keys__ = {
+ "af9e121e-24bb-44ac-a616-471d5754360e": "authentication_reset_password",
+ }
+
+ @classmethod
+ async def authentication_reset_password(
+ cls, request: "Request", data: ForgotRequestModel
+ ):
+ # from sqlalchemy import or_
+
+ # found_user = Users.query.filter(
+ # or_(
+ # Users.email == str(data.access_key).lower(),
+ # Users.phone_number == str(data.access_key).replace(" ", ""),
+ # ),
+ # ).first()
+ # if not found_user:
+ # raise HTTPException(
+ # status_code=status.HTTP_400_BAD_REQUEST,
+ # detail="Given access key or domain is not matching with the any user record.",
+ # )
+
+ # reset_password_token = found_user.reset_password_token(found_user=found_user)
+ # send_email_completed = send_email(
+ # subject=f"Dear {found_user.user_tag}, a password reset request has been received.",
+ # receivers=[str(found_user.email)],
+ # html=change_your_password_template(
+ # user_name=found_user.user_tag,
+ # forgot_link=ApiStatic.forgot_link(forgot_key=reset_password_token),
+ # ),
+ # )
+ # if not send_email_completed:
+ # raise found_user.raise_http_exception(
+ # status_code=400, message="Email can not be sent. Try again later"
+ # )
+ # return ResponseHandler.success(
+ # "Password change link is sent to your email or phone",
+ # data=found_user.get_dict(),
+ # )
+ return
+
+
+class AuthenticationDownloadAvatarEventMethods(MethodToEvent):
+
+ event_type = "LOGIN"
+ event_description = "Download avatar icon and profile info of user"
+ event_category = "AUTHENTICATION"
+
+ __event_keys__ = {
+ "c140cd5f-307f-4046-a93e-3ade032a57a7": "authentication_download_avatar",
+ }
+ # __event_validation__ = {
+ # "c140cd5f-307f-4046-a93e-3ade032a57a7": AuthenticationUserInfoResponse,
+ # }
+
+ @classmethod
+ async def authentication_download_avatar(
+ cls,
+ request: "Request",
+ data: DictRequestModel,
+ token_dict: TokenDictType,
+ ):
+ # if found_user := Users.filter_one(Users.id == token_dict.user_id).data:
+ # expired_starts = str(
+ # system_arrow.now() - system_arrow.get(str(found_user.expiry_ends))
+ # )
+ # expired_int = (
+ # system_arrow.now() - system_arrow.get(str(found_user.expiry_ends))
+ # ).days
+
+ # user_info = {
+ # "lang": token_dict.lang,
+ # "full_name": found_user.person.full_name,
+ # "avatar": found_user.avatar,
+ # "remember_me": found_user.remember_me,
+ # "expiry_ends": str(found_user.expiry_ends),
+ # "expired_str": expired_starts,
+ # "expired_int": int(expired_int),
+ # }
+ # return ResponseHandler.success(
+ # "Avatar and profile is shared via user credentials",
+ # data=user_info,
+ # )
+ # return ResponseHandler.not_found("Invalid data")
+
+ return
diff --git a/trash/endpoints.py b/trash/endpoints.py
new file mode 100644
index 0000000..d260d2c
--- /dev/null
+++ b/trash/endpoints.py
@@ -0,0 +1,384 @@
+"""
+Authentication endpoint configurations.
+"""
+
+from typing import TYPE_CHECKING, Dict, Any, Union, Annotated
+
+from ApiServices.Token.token_handler import TokenService
+from ApiValidations.Request import (
+ Logout,
+ Login,
+ Remember,
+ Forgot,
+ CreatePassword,
+ ChangePassword,
+ OccupantSelection,
+ EmployeeSelection,
+)
+
+from .auth import (
+ AuthenticationChangePasswordEventMethods,
+ AuthenticationCheckTokenEventMethods,
+ AuthenticationCreatePasswordEventMethods,
+ AuthenticationDisconnectUserEventMethods,
+ AuthenticationDownloadAvatarEventMethods,
+ AuthenticationForgotPasswordEventMethods,
+ AuthenticationLoginEventMethods,
+ AuthenticationLogoutEventMethods,
+ AuthenticationRefreshEventMethods,
+ AuthenticationRefreshTokenEventMethods,
+ AuthenticationResetPasswordEventMethods,
+ AuthenticationSelectEventMethods,
+)
+from .models import (
+ ChangePasswordRequestModel,
+ CreatePasswordRequestModel,
+ ForgotRequestModel,
+ LoginData,
+ LoginRequestModel,
+ LogoutRequestModel,
+ SelectionDataEmployee,
+ SelectionDataOccupant,
+ RememberRequestModel,
+)
+from ApiEvents.base_request_model import DictRequestModel, EndpointBaseRequestModel
+from ApiEvents.abstract_class import (
+ RouteFactoryConfig,
+ EndpointFactoryConfig,
+ endpoint_wrapper,
+)
+
+if TYPE_CHECKING:
+ from fastapi import Request, HTTPException, status, Body
+from ApiValidations.Custom.token_objects import EmployeeTokenObject, OccupantTokenObject
+
+
+# Type aliases for common types
+
+
+@endpoint_wrapper("/authentication/select")
+async def authentication_select_company_or_occupant_type(
+ request: "Request",
+ data: Union[EmployeeSelection, OccupantSelection],
+) -> Dict[str, Any]:
+ """
+ Select company or occupant type.
+ """
+ auth_dict = authentication_select_company_or_occupant_type.auth
+ if await AuthenticationSelectEventMethods.authentication_select_company_or_occupant_type(
+ request=request, data=data, token_dict=auth_dict
+ ):
+ if data.is_employee:
+ return {"selected_company": data.company_uu_id, "completed": True}
+ elif data.is_occupant:
+ return {
+ "selected_occupant": data.build_living_space_uu_id,
+ "completed": True,
+ }
+ return {"completed": False, "selected_company": None, "selected_occupant": None}
+
+
+@endpoint_wrapper("/authentication/login")
+async def authentication_login_with_domain_and_creds(
+ request: "Request",
+ data: Login,
+) -> Dict[str, Any]:
+ """
+ Authenticate user with domain and credentials.
+ """
+ return await AuthenticationLoginEventMethods.authentication_login_with_domain_and_creds(
+ request=request, data=data
+ )
+
+
+@endpoint_wrapper("/authentication/valid")
+async def authentication_check_token_is_valid(
+ request: "Request",
+) -> Dict[str, Any]:
+ """
+ Check if a token is valid.
+ """
+ try:
+ access_token = TokenService.get_access_token_from_request(request=request)
+ if TokenService.get_object_via_access_key(access_token=access_token):
+ return {
+ "message": "Access Token is valid",
+ }
+ except HTTPException:
+ return {
+ "message": "Access Token is NOT valid",
+ }
+
+
+@endpoint_wrapper("/authentication/refresh")
+async def authentication_refresh_user_info(
+ request: "Request",
+ data: EndpointBaseRequestModel,
+) -> Dict[str, Any]:
+ """
+ Refresh user information.
+ """
+ return {
+ "status": "OK",
+ }
+
+
+@endpoint_wrapper("/authentication/change-password")
+async def authentication_change_password(
+ request: "Request",
+ data: EndpointBaseRequestModel,
+) -> Dict[str, Any]:
+ """
+ Change user password.
+ """
+ return {
+ "status": "OK",
+ }
+
+
+@endpoint_wrapper("/authentication/create-password")
+async def authentication_create_password(
+ request: "Request",
+ data: EndpointBaseRequestModel,
+) -> Dict[str, Any]:
+ """
+ Create new password.
+ """
+ return {
+ "status": "OK",
+ }
+
+
+@endpoint_wrapper("/authentication/forgot-password")
+async def authentication_forgot_password(
+ request: "Request",
+ data: EndpointBaseRequestModel,
+) -> Dict[str, Any]:
+ """
+ Handle forgot password request.
+ """
+ return {
+ "status": "OK",
+ }
+
+
+@endpoint_wrapper("/authentication/reset-password")
+async def authentication_reset_password(
+ request: "Request",
+ data: EndpointBaseRequestModel,
+) -> Dict[str, Any]:
+ """
+ Reset password.
+ """
+ return {
+ "status": "OK",
+ }
+
+
+@endpoint_wrapper("/authentication/disconnect")
+async def authentication_disconnect_user(
+ request: "Request",
+ data: EndpointBaseRequestModel,
+) -> Dict[str, Any]:
+ """
+ Disconnect user.
+ """
+ return {
+ "status": "OK",
+ }
+
+
+@endpoint_wrapper("/authentication/logout")
+async def authentication_logout_user(
+ request: "Request",
+ data: EndpointBaseRequestModel,
+) -> Dict[str, Any]:
+ """
+ Logout user.
+ """
+ return {
+ "status": "OK",
+ }
+
+
+@endpoint_wrapper("/authentication/remember")
+async def authentication_refresher_token(
+ request: "Request",
+ data: EndpointBaseRequestModel,
+) -> Dict[str, Any]:
+ """
+ Refresh remember token.
+ """
+ return {
+ "status": "OK",
+ }
+
+
+@endpoint_wrapper("/authentication/avatar")
+async def authentication_download_avatar(
+ request: "Request",
+ data: EndpointBaseRequestModel,
+) -> Dict[str, Any]:
+ """
+ Download user avatar.
+ """
+ return {
+ "status": "OK",
+ }
+
+
+prefix = "/authentication"
+
+
+AUTH_CONFIG = RouteFactoryConfig(
+ name="authentication",
+ prefix=prefix,
+ tags=["Authentication"],
+ include_in_schema=True,
+ endpoints=[
+ EndpointFactoryConfig(
+ url_prefix=prefix,
+ url_endpoint="/select",
+ url_of_endpoint="/authentication/select",
+ endpoint="/select",
+ method="POST",
+ summary="Select company or occupant type",
+ description="Select company or occupant type",
+ is_auth_required=True, # Needs token_dict
+ is_event_required=False,
+ endpoint_function=authentication_select_company_or_occupant_type,
+ ),
+ EndpointFactoryConfig(
+ url_prefix=prefix,
+ url_endpoint="/login",
+ url_of_endpoint="/authentication/login",
+ endpoint="/login",
+ method="POST",
+ summary="Login user with domain and password",
+ description="Login user with domain and password",
+ is_auth_required=False, # Public endpoint
+ is_event_required=False,
+ endpoint_function=authentication_login_with_domain_and_creds,
+ ),
+ EndpointFactoryConfig(
+ url_prefix=prefix,
+ url_endpoint="/valid",
+ url_of_endpoint="/authentication/valid",
+ endpoint="/valid",
+ method="GET",
+ summary="Check access token is valid",
+ description="Check access token is valid",
+ is_auth_required=True, # Needs token validation
+ is_event_required=False,
+ endpoint_function=authentication_check_token_is_valid,
+ ),
+ EndpointFactoryConfig(
+ url_prefix=prefix,
+ url_endpoint="/refresh",
+ url_of_endpoint="/authentication/refresh",
+ endpoint="/refresh",
+ method="GET",
+ summary="Refresh credentials with access token",
+ description="Refresh credentials with access token",
+ is_auth_required=True, # Needs token_dict
+ is_event_required=False,
+ endpoint_function=authentication_refresh_user_info,
+ ),
+ EndpointFactoryConfig(
+ url_prefix=prefix,
+ url_endpoint="/change-password",
+ url_of_endpoint="/authentication/change-password",
+ endpoint="/change-password",
+ method="POST",
+ summary="Change password with access token",
+ description="Change password with access token",
+ is_auth_required=True, # Needs token_dict
+ is_event_required=False,
+ endpoint_function=authentication_change_password,
+ ),
+ EndpointFactoryConfig(
+ url_prefix=prefix,
+ url_endpoint="/create-password",
+ url_of_endpoint="/authentication/create-password",
+ endpoint="/create-password",
+ method="POST",
+ summary="Create password with password token",
+ description="Create password with password token",
+ is_auth_required=False, # Public endpoint
+ is_event_required=False,
+ endpoint_function=authentication_create_password,
+ ),
+ EndpointFactoryConfig(
+ url_prefix=prefix,
+ url_endpoint="/reset-password",
+ url_of_endpoint="/authentication/reset-password",
+ endpoint="/reset-password",
+ method="POST",
+ summary="Reset password with token",
+ description="Reset password with token",
+ is_auth_required=False, # Public endpoint
+ is_event_required=False,
+ endpoint_function=authentication_reset_password,
+ ),
+ EndpointFactoryConfig(
+ url_prefix=prefix,
+ url_endpoint="/disconnect",
+ url_of_endpoint="/authentication/disconnect",
+ endpoint="/disconnect",
+ method="POST",
+ summary="Disconnect user with access token",
+ description="Disconnect user with access token",
+ is_auth_required=True, # Needs token_dict
+ is_event_required=False,
+ endpoint_function=authentication_disconnect_user,
+ ),
+ EndpointFactoryConfig(
+ url_prefix=prefix,
+ url_endpoint="/logout",
+ url_of_endpoint="/authentication/logout",
+ endpoint="/logout",
+ method="POST",
+ summary="Logout user with access token",
+ description="Logout user with access token",
+ is_auth_required=True, # Needs token_dict
+ is_event_required=False,
+ endpoint_function=authentication_logout_user,
+ ),
+ EndpointFactoryConfig(
+ url_prefix=prefix,
+ url_endpoint="/remember",
+ url_of_endpoint="/authentication/remember",
+ endpoint="/remember",
+ method="POST",
+ summary="Refresh token with refresh token",
+ description="Refresh token with refresh token",
+ is_auth_required=True, # Needs token_dict
+ is_event_required=False,
+ endpoint_function=authentication_refresher_token,
+ ),
+ EndpointFactoryConfig(
+ url_prefix=prefix,
+ url_endpoint="/forgot-password",
+ url_of_endpoint="/authentication/forgot-password",
+ endpoint="/forgot-password",
+ method="POST",
+ summary="Request password reset via email",
+ description="Request password reset via email",
+ is_auth_required=False, # Public endpoint
+ is_event_required=False,
+ endpoint_function=authentication_forgot_password,
+ ),
+ EndpointFactoryConfig(
+ url_prefix=prefix,
+ url_endpoint="/avatar",
+ url_of_endpoint="/authentication/avatar",
+ endpoint="/avatar",
+ method="POST",
+ summary="Get user avatar with credentials",
+ description="Get user avatar with credentials",
+ is_auth_required=True, # Needs token_dict
+ is_event_required=False,
+ endpoint_function=authentication_download_avatar,
+ ),
+ ],
+).as_dict()
diff --git a/trash/models_old.py b/trash/models_old.py
new file mode 100644
index 0000000..10f5027
--- /dev/null
+++ b/trash/models_old.py
@@ -0,0 +1,134 @@
+"""
+Authentication request models.
+"""
+
+from typing import TYPE_CHECKING, Dict, Any, Literal, Optional, TypedDict
+from pydantic import BaseModel, Field, model_validator, RootModel, ConfigDict
+from ApiEvents.base_request_model import BaseRequestModel, DictRequestModel
+
+if TYPE_CHECKING:
+ from fastapi import Request
+
+
+class TokenObjectBase(BaseModel):
+ """Base model for token objects."""
+
+ user_type: str = Field(..., description="Type of user")
+ user_id: str = Field(..., description="User ID")
+ token: str = Field(..., description="Authentication token")
+ permissions: Dict[str, Any] = Field(description="User permissions")
+
+
+class LoginData(TypedDict):
+ """Type for login data."""
+
+ domain: str
+ access_key: str
+ password: str
+ remember_me: bool
+
+
+class LoginRequestModel(BaseRequestModel[LoginData]):
+ """Request model for login endpoint."""
+
+ model_config = ConfigDict(
+ json_schema_extra={
+ "example": {
+ "domain": "example.com",
+ "access_key": "user@example",
+ "password": "password",
+ "remember_me": False,
+ }
+ }
+ )
+
+
+class LogoutData(TypedDict):
+ """Type for logout data."""
+
+ token: str
+
+
+class LogoutRequestModel(BaseRequestModel[LogoutData]):
+ """Request model for logout endpoint."""
+
+ model_config = ConfigDict(
+ json_schema_extra={"example": {"token": "your-token-here"}}
+ )
+
+
+class RememberData(TypedDict):
+ """Type for remember token data."""
+
+ remember_token: str
+
+
+class RememberRequestModel(BaseRequestModel[RememberData]):
+ """Request model for remember token endpoint."""
+
+ model_config = ConfigDict(
+ json_schema_extra={"example": {"remember_token": "your-remember-token-here"}}
+ )
+
+
+class ForgotData(TypedDict):
+ """Type for forgot password data."""
+
+ email: str
+ domain: str
+
+
+class ForgotRequestModel(BaseRequestModel[ForgotData]):
+ """Request model for forgot password endpoint."""
+
+ model_config = ConfigDict(
+ json_schema_extra={
+ "example": {"email": "user@example.com", "domain": "example.com"}
+ }
+ )
+
+
+class ChangePasswordData(TypedDict):
+ """Type for change password data."""
+
+ old_password: str
+ new_password: str
+
+
+class ChangePasswordRequestModel(BaseRequestModel[ChangePasswordData]):
+ """Request model for change password endpoint."""
+
+ model_config = ConfigDict(
+ json_schema_extra={
+ "example": {"old_password": "old-pass", "new_password": "new-pass"}
+ }
+ )
+
+
+class CreatePasswordData(TypedDict):
+ """Type for create password data."""
+
+ token: str
+ password: str
+
+
+class CreatePasswordRequestModel(BaseRequestModel[CreatePasswordData]):
+ """Request model for create password endpoint."""
+
+ model_config = ConfigDict(
+ json_schema_extra={
+ "example": {"token": "password-creation-token", "password": "new-password"}
+ }
+ )
+
+
+class SelectionDataOccupant(BaseModel):
+ """Type for selection data."""
+
+ build_living_space_uu_id: Optional[str]
+
+
+class SelectionDataEmployee(BaseModel):
+ """Type for selection data."""
+
+ company_uu_id: Optional[str]