diff --git a/.idea/misc.xml b/.idea/misc.xml
index 01f61c8..5cfe588 100644
--- a/.idea/misc.xml
+++ b/.idea/misc.xml
@@ -3,5 +3,5 @@
-
+
\ No newline at end of file
diff --git a/.idea/wag-managment-api-service-version-3.iml b/.idea/wag-managment-api-service-version-3.iml
index f552ee6..001c2cd 100644
--- a/.idea/wag-managment-api-service-version-3.iml
+++ b/.idea/wag-managment-api-service-version-3.iml
@@ -2,7 +2,7 @@
-
+
\ No newline at end of file
diff --git a/ApiServices/.dockerignore b/ApiServices/.dockerignore
new file mode 100644
index 0000000..1449c90
--- /dev/null
+++ b/ApiServices/.dockerignore
@@ -0,0 +1,93 @@
+# Git
+.git
+.gitignore
+.gitattributes
+
+
+# CI
+.codeclimate.yml
+.travis.yml
+.taskcluster.yml
+
+# Docker
+docker-compose.yml
+service_app/Dockerfile
+.docker
+.dockerignore
+
+# Byte-compiled / optimized / DLL files
+**/__pycache__/
+**/*.py[cod]
+
+# C extensions
+*.so
+
+# Distribution / packaging
+.Python
+service_app/env/
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+*.egg-info/
+.installed.cfg
+*.egg
+
+# PyInstaller
+# Usually these files are written by a python script from a template
+# before PyInstaller builds the exe, so as to inject date/other infos into it.
+*.manifest
+*.spec
+
+# Installer logs
+pip-log.txt
+pip-delete-this-directory.txt
+
+# Unit test / coverage reports
+htmlcov/
+.tox/
+.coverage
+.cache
+nosetests.xml
+coverage.xml
+
+# Translations
+*.mo
+*.pot
+
+# Django stuff:
+*.log
+
+# Sphinx documentation
+docs/_build/
+
+# PyBuilder
+target/
+
+# Virtual environment
+service_app/.env
+.venv/
+venv/
+
+# PyCharm
+.idea
+
+# Python mode for VIM
+.ropeproject
+**/.ropeproject
+
+# Vim swap files
+**/*.swp
+
+# VS Code
+.vscode/
+
+test_application/
+
+
diff --git a/ApiServices/AuthService/.dockerignore b/ApiServices/AuthService/.dockerignore
new file mode 100644
index 0000000..1449c90
--- /dev/null
+++ b/ApiServices/AuthService/.dockerignore
@@ -0,0 +1,93 @@
+# Git
+.git
+.gitignore
+.gitattributes
+
+
+# CI
+.codeclimate.yml
+.travis.yml
+.taskcluster.yml
+
+# Docker
+docker-compose.yml
+service_app/Dockerfile
+.docker
+.dockerignore
+
+# Byte-compiled / optimized / DLL files
+**/__pycache__/
+**/*.py[cod]
+
+# C extensions
+*.so
+
+# Distribution / packaging
+.Python
+service_app/env/
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+*.egg-info/
+.installed.cfg
+*.egg
+
+# PyInstaller
+# Usually these files are written by a python script from a template
+# before PyInstaller builds the exe, so as to inject date/other infos into it.
+*.manifest
+*.spec
+
+# Installer logs
+pip-log.txt
+pip-delete-this-directory.txt
+
+# Unit test / coverage reports
+htmlcov/
+.tox/
+.coverage
+.cache
+nosetests.xml
+coverage.xml
+
+# Translations
+*.mo
+*.pot
+
+# Django stuff:
+*.log
+
+# Sphinx documentation
+docs/_build/
+
+# PyBuilder
+target/
+
+# Virtual environment
+service_app/.env
+.venv/
+venv/
+
+# PyCharm
+.idea
+
+# Python mode for VIM
+.ropeproject
+**/.ropeproject
+
+# Vim swap files
+**/*.swp
+
+# VS Code
+.vscode/
+
+test_application/
+
+
diff --git a/ApiServices/AuthService/.gitignore b/ApiServices/AuthService/.gitignore
new file mode 100644
index 0000000..f852d0e
--- /dev/null
+++ b/ApiServices/AuthService/.gitignore
@@ -0,0 +1,162 @@
+# ---> Python
+# Byte-compiled / optimized / DLL files
+__pycache__/
+*.py[cod]
+*$py.class
+
+# C extensions
+*.so
+
+# Distribution / packaging
+.idea/
+.Python
+develop-eggs/
+dist/
+downloads/
+eggs/
+.eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+wheels/
+share/python-wheels/
+*.egg-info/
+.installed.cfg
+*.egg
+MANIFEST
+
+# PyInstaller
+# Usually these files are written by a python script from a template
+# before PyInstaller builds the exe, so as to inject date/other infos into it.
+*.manifest
+*.spec
+
+# Installer logs
+pip-log.txt
+pip-delete-this-directory.txt
+
+# Unit test / coverage reports
+htmlcov/
+.tox/
+.nox/
+.coverage
+.coverage.*
+.cache
+nosetests.xml
+coverage.xml
+*.cover
+*.py,cover
+.hypothesis/
+.pytest_cache/
+cover/
+
+# Translations
+*.mo
+*.pot
+
+# Django stuff:
+*.log
+local_settings.py
+db.sqlite3
+db.sqlite3-journal
+
+# Flask stuff:
+instance/
+.webassets-cache
+
+# Scrapy stuff:
+.scrapy
+
+# Sphinx documentation
+docs/_build/
+
+# PyBuilder
+.pybuilder/
+target/
+
+# Jupyter Notebook
+.ipynb_checkpoints
+
+# IPython
+profile_default/
+ipython_config.py
+
+# pyenv
+# For a library or package, you might want to ignore these files since the code is
+# intended to run in multiple environments; otherwise, check them in:
+# .python-version
+
+# pipenv
+# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
+# However, in case of collaboration, if having platform-specific dependencies or dependencies
+# having no cross-platform support, pipenv may install dependencies that don't work, or not
+# install all needed dependencies.
+#Pipfile.lock
+
+# poetry
+# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
+# This is especially recommended for binary packages to ensure reproducibility, and is more
+# commonly ignored for libraries.
+# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
+#poetry.lock
+
+# pdm
+# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
+#pdm.lock
+# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
+# in version control.
+# https://pdm.fming.dev/#use-with-ide
+.pdm.toml
+
+# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
+__pypackages__/
+
+# Celery stuff
+celerybeat-schedule
+celerybeat.pid
+
+# SageMath parsed files
+*.sage.py
+
+# Environments
+service_app/.env
+.venv
+service_app/env/
+venv/
+service_app/env/
+env.bak/
+venv.bak/
+
+# Spyder project settings
+.spyderproject
+.spyproject
+
+# Rope project settings
+.ropeproject
+
+# mkdocs documentation
+/site
+
+# mypy
+.mypy_cache/
+.dmypy.json
+dmypy.json
+
+# Pyre type checker
+.pyre/
+
+# pytype static type analyzer
+.pytype/
+
+# Cython debug symbols
+cython_debug/
+
+# PyCharm
+# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
+# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
+# and can be added to the global gitignore or merged into this file. For a more nuclear
+# option (not recommended) you can uncomment the following to ignore the entire idea folder.
+#.idea/
+
diff --git a/ApiServices/AuthService/Dockerfile b/ApiServices/AuthService/Dockerfile
new file mode 100644
index 0000000..18d9b89
--- /dev/null
+++ b/ApiServices/AuthService/Dockerfile
@@ -0,0 +1,32 @@
+FROM python:3.12-slim-bookworm
+
+ENV PYTHONDONTWRITEBYTECODE 1
+ENV PYTHONUNBUFFERED 1
+
+COPY --from=ghcr.io/astral-sh/uv:latest /uv /uvx /bin/
+
+COPY ../service_app/requirements.txt .
+
+RUN uv venv
+RUN uv pip install -r requirements.txt
+
+COPY ../service_app ./service_app
+
+COPY ../databases ./service_app/databases
+COPY ../api_services ./service_app/api_services
+COPY ../api_objects ./service_app/api_objects
+COPY ../api_configs ./service_app/api_configs
+COPY ../api_events ./service_app/api_events
+COPY ../api_library ./service_app/api_library
+COPY ../api_validations ./service_app/api_validations
+
+WORKDIR /service_app
+
+CMD ["uv", "run", "app.py"]
+
+# Old File
+#FROM python:3.10
+
+#RUN pip install --upgrade pip
+#RUN pip install --no-cache-dir --upgrade -r requirements.txt
+#CMD ["python", "-m", "app"]
diff --git a/ApiServices/AuthService/app.py b/ApiServices/AuthService/app.py
new file mode 100644
index 0000000..7c3e4f7
--- /dev/null
+++ b/ApiServices/AuthService/app.py
@@ -0,0 +1,37 @@
+import uvicorn
+import routers
+
+from fastapi.middleware.cors import CORSMiddleware
+from fastapi.exceptions import HTTPException
+
+from middlewares.token_middleware import AuthHeaderMiddleware
+from application.create_file import create_app
+from api_objects.errors.errors_dictionary import ErrorHandlers
+from prometheus_fastapi_instrumentator import Instrumentator
+
+app = create_app(routers=routers)
+Instrumentator().instrument(app=app).expose(app=app)
+
+app.add_middleware(
+ CORSMiddleware,
+ **{
+ "allow_origins": ["*"],
+ "allow_credentials": True,
+ "allow_methods": ["*"],
+ "allow_headers": ["*"],
+ },
+)
+app.add_middleware(AuthHeaderMiddleware)
+
+app.add_exception_handler(HTTPException, ErrorHandlers.exception_handler_http)
+app.add_exception_handler(Exception, ErrorHandlers.exception_handler_exception)
+
+if __name__ == "__main__":
+ uvicorn_config = {
+ "app": "app:app",
+ "host": "0.0.0.0",
+ "port": 41575,
+ "log_level": "info",
+ "reload": True,
+ }
+ uvicorn.Server(uvicorn.Config(**uvicorn_config)).run()
diff --git a/ApiServices/AuthService/application/__init__.py b/ApiServices/AuthService/application/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/ApiServices/AuthService/application/create_file.py b/ApiServices/AuthService/application/create_file.py
new file mode 100644
index 0000000..b492475
--- /dev/null
+++ b/ApiServices/AuthService/application/create_file.py
@@ -0,0 +1,64 @@
+def create_app(routers):
+ from fastapi import FastAPI
+ from fastapi.responses import JSONResponse
+ from fastapi.openapi.utils import get_openapi
+ from fastapi.responses import RedirectResponse
+
+ from api_configs import Config
+
+ api_app = FastAPI(title=str(Config.TITLE), default_response_class=JSONResponse)
+
+ @api_app.get("/", include_in_schema=False, summary=str(Config.DESCRIPTION))
+ async def home():
+ return RedirectResponse(url="/docs")
+
+ for router in list(
+ [
+ getattr(routers, router)
+ for router in routers.__all__
+ if getattr(routers, router)
+ ]
+ ):
+ api_app.include_router(router)
+
+ openapi_schema = get_openapi(
+ title=Config.TITLE,
+ description=Config.DESCRIPTION,
+ version="0.0.1",
+ routes=api_app.routes,
+ )
+
+ if "components" in openapi_schema:
+ openapi_schema["components"]["securitySchemes"] = {
+ "Bearer Auth": {
+ "type": "apiKey",
+ "in": "header",
+ "name": "evyos-session-key",
+ "description": "Enter: **'Bearer <JWT>'**, where JWT is the access token",
+ }
+ }
+
+ for route in api_app.routes:
+ path = str(getattr(route, "path"))
+ if route.include_in_schema:
+ methods = [method.lower() for method in getattr(route, "methods")]
+ for method in methods:
+ insecure_paths = Config.INSECURE_PATHS.copy()
+ insecure_paths.remove("/authentication/select")
+ if path not in insecure_paths:
+ openapi_schema["paths"][path][method]["security"] = [
+ {"Bearer Auth": []}
+ ]
+ openapi_schema["paths"][path][method]["responses"]["403"] = {
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/HTTPValidationError"
+ }
+ }
+ },
+ "description": "Returned if user is unauthorized.",
+ }
+
+ api_app.openapi_schema = openapi_schema
+ return api_app
diff --git a/ApiServices/AuthService/middlewares/__init__.py b/ApiServices/AuthService/middlewares/__init__.py
new file mode 100644
index 0000000..c3b4cfe
--- /dev/null
+++ b/ApiServices/AuthService/middlewares/__init__.py
@@ -0,0 +1 @@
+from .token_middleware import AuthHeaderMiddleware
diff --git a/ApiServices/AuthService/middlewares/token_middleware.py b/ApiServices/AuthService/middlewares/token_middleware.py
new file mode 100644
index 0000000..6e2b20d
--- /dev/null
+++ b/ApiServices/AuthService/middlewares/token_middleware.py
@@ -0,0 +1,116 @@
+import json
+
+from time import perf_counter
+from api_configs import Config
+from fastapi import status
+from fastapi.exceptions import HTTPException
+from starlette.middleware.base import BaseHTTPMiddleware
+
+
+class MiddlewareLogs:
+
+ @staticmethod
+ def log_error(self, log_message):
+ print(log_message)
+
+
+def log_middlewares_exception(endpoint, token_user, message, request):
+ MiddlewareLogs.log_error(
+ str(
+ {
+ "log_type": "Authentication",
+ "log_message": message,
+ "log_action": "User",
+ "log_data": json.dumps(
+ {
+ "endpoint": endpoint,
+ "user": str(token_user),
+ "request": str(request.headers),
+ }
+ ),
+ }
+ )
+ )
+
+
+class AuthHeaderMiddleware(BaseHTTPMiddleware):
+
+ async def dispatch(self, request, call_next):
+ start_time, token_user, endpoint = perf_counter(), None, None
+
+ if check_if_path_secure(request=request, insecure_paths=Config.INSECURE_PATHS):
+ endpoint = str(getattr(getattr(request, "url", None), "path", None))
+ if un_auth := check_if_token_is_not_valid(
+ request=request, endpoint_name=endpoint
+ ):
+ auth, token_user = un_auth
+ if not auth == "valid":
+ # log_middlewares_exception(endpoint, token_user, "auth", request)
+ raise HTTPException(
+ status_code=status.HTTP_401_UNAUTHORIZED, detail=auth
+ )
+
+ response = await call_next(request)
+ prepare_response_needs(response, start_time)
+ # if endpoint and token_user:
+ # log_middlewares_exception(endpoint, token_user, "Request is completed", request)
+ return response
+
+
+def prepare_response_needs(response, start_time):
+ end_time = perf_counter()
+ response.headers["request-starts"], response.headers["request-ends"] = str(
+ start_time
+ ), str(end_time)
+ response.headers["elapsed-Time"] = str(float(end_time) - float(start_time)) + " ms"
+
+
+def check_if_path_secure(request, insecure_paths) -> bool:
+ return (
+ str(getattr(getattr(request, "url", None), "path", None)) not in insecure_paths
+ )
+
+
+def check_if_token_is_not_valid(request, endpoint_name):
+ from api_services.redis.functions import get_object_via_access_key
+
+ token_user = get_object_via_access_key(request)
+ if not token_user:
+ return "Session geçerli değil. Lütfen tekrar giriş yapınız.", token_user
+
+ return "valid", token_user
+
+ # on_token_user: Users = Users.find_one(uu_id=token_user["uu_id"])
+ # on_token_people: People = on_token_user.person
+ # #
+ # # if on_token_people.priority == 78:
+ # # return "valid", token_user
+ #
+ # if not token_user.get("duty_id", None):
+ # return (
+ # "Kullanıcı hiçbir yetki tanımlanmamıştır. Supervisor ile iletişime geçiniz.",
+ # token_user,
+ # )
+
+ #
+ # if endpoint_name in release_endpoint:
+ # return "valid", token_user
+ #
+ # if company_duty_app := CompanyDutyApp.find_one(
+ # endpoint_name=str("".join(endpoint_name.split("/")[:-1])),
+ # company_duty_id=int(token_user.get("duty_id")),
+ # ):
+ # if not company_duty_app.is_access_valid(
+ # endpoint_ext=endpoint_name.split("/")[-1]
+ # ):
+ # return (
+ # "Kullanıcı yetkili değildir. Supervisor ile iletişime geçiniz.",
+ # token_user,
+ # )
+ # else:
+ # return (
+ # "Kullanıcıya yetki tanımlanmamıştır. Supervisor ile iletişime geçiniz.",
+ # token_user,
+ # )
+
+ # return "valid", token_user
diff --git a/ApiServices/AuthService/pyproject.toml b/ApiServices/AuthService/pyproject.toml
new file mode 100644
index 0000000..f105854
--- /dev/null
+++ b/ApiServices/AuthService/pyproject.toml
@@ -0,0 +1,25 @@
+[project]
+name = "wag-managment-api-service-version-3"
+version = "0.1.0"
+description = "Wag Python API Service"
+readme = "README.md"
+requires-python = ">=3.12"
+dependencies = [
+ "alembic>=1.14.0",
+ "arrow>=1.3.0",
+ "cryptography>=43.0.3",
+ "faker>=30.8.2",
+ "fastapi>=0.115.4",
+ "pandas>=2.2.3",
+ "prometheus-fastapi-instrumentator>=7.0.0",
+ "psycopg2-binary>=2.9.10",
+ "pymongo>=4.10.1",
+ "redis>=5.2.0",
+ "redmail>=0.6.0",
+ "requests>=2.32.3",
+ "rsa>=4.9",
+ "sqlalchemy-mixins>=2.0.5",
+ "textdistance>=4.6.3",
+ "unidecode>=1.3.8",
+ "uvicorn>=0.32.0",
+]
diff --git a/ApiServices/AuthService/routers/__init__.py b/ApiServices/AuthService/routers/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/ApiServices/EventService/.dockerignore b/ApiServices/EventService/.dockerignore
new file mode 100644
index 0000000..1449c90
--- /dev/null
+++ b/ApiServices/EventService/.dockerignore
@@ -0,0 +1,93 @@
+# Git
+.git
+.gitignore
+.gitattributes
+
+
+# CI
+.codeclimate.yml
+.travis.yml
+.taskcluster.yml
+
+# Docker
+docker-compose.yml
+service_app/Dockerfile
+.docker
+.dockerignore
+
+# Byte-compiled / optimized / DLL files
+**/__pycache__/
+**/*.py[cod]
+
+# C extensions
+*.so
+
+# Distribution / packaging
+.Python
+service_app/env/
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+*.egg-info/
+.installed.cfg
+*.egg
+
+# PyInstaller
+# Usually these files are written by a python script from a template
+# before PyInstaller builds the exe, so as to inject date/other infos into it.
+*.manifest
+*.spec
+
+# Installer logs
+pip-log.txt
+pip-delete-this-directory.txt
+
+# Unit test / coverage reports
+htmlcov/
+.tox/
+.coverage
+.cache
+nosetests.xml
+coverage.xml
+
+# Translations
+*.mo
+*.pot
+
+# Django stuff:
+*.log
+
+# Sphinx documentation
+docs/_build/
+
+# PyBuilder
+target/
+
+# Virtual environment
+service_app/.env
+.venv/
+venv/
+
+# PyCharm
+.idea
+
+# Python mode for VIM
+.ropeproject
+**/.ropeproject
+
+# Vim swap files
+**/*.swp
+
+# VS Code
+.vscode/
+
+test_application/
+
+
diff --git a/ApiServices/EventService/.gitignore b/ApiServices/EventService/.gitignore
new file mode 100644
index 0000000..f852d0e
--- /dev/null
+++ b/ApiServices/EventService/.gitignore
@@ -0,0 +1,162 @@
+# ---> Python
+# Byte-compiled / optimized / DLL files
+__pycache__/
+*.py[cod]
+*$py.class
+
+# C extensions
+*.so
+
+# Distribution / packaging
+.idea/
+.Python
+develop-eggs/
+dist/
+downloads/
+eggs/
+.eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+wheels/
+share/python-wheels/
+*.egg-info/
+.installed.cfg
+*.egg
+MANIFEST
+
+# PyInstaller
+# Usually these files are written by a python script from a template
+# before PyInstaller builds the exe, so as to inject date/other infos into it.
+*.manifest
+*.spec
+
+# Installer logs
+pip-log.txt
+pip-delete-this-directory.txt
+
+# Unit test / coverage reports
+htmlcov/
+.tox/
+.nox/
+.coverage
+.coverage.*
+.cache
+nosetests.xml
+coverage.xml
+*.cover
+*.py,cover
+.hypothesis/
+.pytest_cache/
+cover/
+
+# Translations
+*.mo
+*.pot
+
+# Django stuff:
+*.log
+local_settings.py
+db.sqlite3
+db.sqlite3-journal
+
+# Flask stuff:
+instance/
+.webassets-cache
+
+# Scrapy stuff:
+.scrapy
+
+# Sphinx documentation
+docs/_build/
+
+# PyBuilder
+.pybuilder/
+target/
+
+# Jupyter Notebook
+.ipynb_checkpoints
+
+# IPython
+profile_default/
+ipython_config.py
+
+# pyenv
+# For a library or package, you might want to ignore these files since the code is
+# intended to run in multiple environments; otherwise, check them in:
+# .python-version
+
+# pipenv
+# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
+# However, in case of collaboration, if having platform-specific dependencies or dependencies
+# having no cross-platform support, pipenv may install dependencies that don't work, or not
+# install all needed dependencies.
+#Pipfile.lock
+
+# poetry
+# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
+# This is especially recommended for binary packages to ensure reproducibility, and is more
+# commonly ignored for libraries.
+# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
+#poetry.lock
+
+# pdm
+# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
+#pdm.lock
+# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
+# in version control.
+# https://pdm.fming.dev/#use-with-ide
+.pdm.toml
+
+# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
+__pypackages__/
+
+# Celery stuff
+celerybeat-schedule
+celerybeat.pid
+
+# SageMath parsed files
+*.sage.py
+
+# Environments
+service_app/.env
+.venv
+service_app/env/
+venv/
+service_app/env/
+env.bak/
+venv.bak/
+
+# Spyder project settings
+.spyderproject
+.spyproject
+
+# Rope project settings
+.ropeproject
+
+# mkdocs documentation
+/site
+
+# mypy
+.mypy_cache/
+.dmypy.json
+dmypy.json
+
+# Pyre type checker
+.pyre/
+
+# pytype static type analyzer
+.pytype/
+
+# Cython debug symbols
+cython_debug/
+
+# PyCharm
+# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
+# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
+# and can be added to the global gitignore or merged into this file. For a more nuclear
+# option (not recommended) you can uncomment the following to ignore the entire idea folder.
+#.idea/
+
diff --git a/ApiServices/EventService/Dockerfile b/ApiServices/EventService/Dockerfile
new file mode 100644
index 0000000..18d9b89
--- /dev/null
+++ b/ApiServices/EventService/Dockerfile
@@ -0,0 +1,32 @@
+FROM python:3.12-slim-bookworm
+
+ENV PYTHONDONTWRITEBYTECODE 1
+ENV PYTHONUNBUFFERED 1
+
+COPY --from=ghcr.io/astral-sh/uv:latest /uv /uvx /bin/
+
+COPY ../service_app/requirements.txt .
+
+RUN uv venv
+RUN uv pip install -r requirements.txt
+
+COPY ../service_app ./service_app
+
+COPY ../databases ./service_app/databases
+COPY ../api_services ./service_app/api_services
+COPY ../api_objects ./service_app/api_objects
+COPY ../api_configs ./service_app/api_configs
+COPY ../api_events ./service_app/api_events
+COPY ../api_library ./service_app/api_library
+COPY ../api_validations ./service_app/api_validations
+
+WORKDIR /service_app
+
+CMD ["uv", "run", "app.py"]
+
+# Old File
+#FROM python:3.10
+
+#RUN pip install --upgrade pip
+#RUN pip install --no-cache-dir --upgrade -r requirements.txt
+#CMD ["python", "-m", "app"]
diff --git a/ApiServices/EventService/app.py b/ApiServices/EventService/app.py
new file mode 100644
index 0000000..7c3e4f7
--- /dev/null
+++ b/ApiServices/EventService/app.py
@@ -0,0 +1,37 @@
+import uvicorn
+import routers
+
+from fastapi.middleware.cors import CORSMiddleware
+from fastapi.exceptions import HTTPException
+
+from middlewares.token_middleware import AuthHeaderMiddleware
+from application.create_file import create_app
+from api_objects.errors.errors_dictionary import ErrorHandlers
+from prometheus_fastapi_instrumentator import Instrumentator
+
+app = create_app(routers=routers)
+Instrumentator().instrument(app=app).expose(app=app)
+
+app.add_middleware(
+ CORSMiddleware,
+ **{
+ "allow_origins": ["*"],
+ "allow_credentials": True,
+ "allow_methods": ["*"],
+ "allow_headers": ["*"],
+ },
+)
+app.add_middleware(AuthHeaderMiddleware)
+
+app.add_exception_handler(HTTPException, ErrorHandlers.exception_handler_http)
+app.add_exception_handler(Exception, ErrorHandlers.exception_handler_exception)
+
+if __name__ == "__main__":
+ uvicorn_config = {
+ "app": "app:app",
+ "host": "0.0.0.0",
+ "port": 41575,
+ "log_level": "info",
+ "reload": True,
+ }
+ uvicorn.Server(uvicorn.Config(**uvicorn_config)).run()
diff --git a/ApiServices/EventService/application/__init__.py b/ApiServices/EventService/application/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/ApiServices/EventService/application/create_file.py b/ApiServices/EventService/application/create_file.py
new file mode 100644
index 0000000..b492475
--- /dev/null
+++ b/ApiServices/EventService/application/create_file.py
@@ -0,0 +1,64 @@
+def create_app(routers):
+ from fastapi import FastAPI
+ from fastapi.responses import JSONResponse
+ from fastapi.openapi.utils import get_openapi
+ from fastapi.responses import RedirectResponse
+
+ from api_configs import Config
+
+ api_app = FastAPI(title=str(Config.TITLE), default_response_class=JSONResponse)
+
+ @api_app.get("/", include_in_schema=False, summary=str(Config.DESCRIPTION))
+ async def home():
+ return RedirectResponse(url="/docs")
+
+ for router in list(
+ [
+ getattr(routers, router)
+ for router in routers.__all__
+ if getattr(routers, router)
+ ]
+ ):
+ api_app.include_router(router)
+
+ openapi_schema = get_openapi(
+ title=Config.TITLE,
+ description=Config.DESCRIPTION,
+ version="0.0.1",
+ routes=api_app.routes,
+ )
+
+ if "components" in openapi_schema:
+ openapi_schema["components"]["securitySchemes"] = {
+ "Bearer Auth": {
+ "type": "apiKey",
+ "in": "header",
+ "name": "evyos-session-key",
+ "description": "Enter: **'Bearer <JWT>'**, where JWT is the access token",
+ }
+ }
+
+ for route in api_app.routes:
+ path = str(getattr(route, "path"))
+ if route.include_in_schema:
+ methods = [method.lower() for method in getattr(route, "methods")]
+ for method in methods:
+ insecure_paths = Config.INSECURE_PATHS.copy()
+ insecure_paths.remove("/authentication/select")
+ if path not in insecure_paths:
+ openapi_schema["paths"][path][method]["security"] = [
+ {"Bearer Auth": []}
+ ]
+ openapi_schema["paths"][path][method]["responses"]["403"] = {
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/HTTPValidationError"
+ }
+ }
+ },
+ "description": "Returned if user is unauthorized.",
+ }
+
+ api_app.openapi_schema = openapi_schema
+ return api_app
diff --git a/ApiServices/EventService/middlewares/__init__.py b/ApiServices/EventService/middlewares/__init__.py
new file mode 100644
index 0000000..c3b4cfe
--- /dev/null
+++ b/ApiServices/EventService/middlewares/__init__.py
@@ -0,0 +1 @@
+from .token_middleware import AuthHeaderMiddleware
diff --git a/ApiServices/EventService/middlewares/token_middleware.py b/ApiServices/EventService/middlewares/token_middleware.py
new file mode 100644
index 0000000..6e2b20d
--- /dev/null
+++ b/ApiServices/EventService/middlewares/token_middleware.py
@@ -0,0 +1,116 @@
+import json
+
+from time import perf_counter
+from api_configs import Config
+from fastapi import status
+from fastapi.exceptions import HTTPException
+from starlette.middleware.base import BaseHTTPMiddleware
+
+
+class MiddlewareLogs:
+
+ @staticmethod
+ def log_error(self, log_message):
+ print(log_message)
+
+
+def log_middlewares_exception(endpoint, token_user, message, request):
+ MiddlewareLogs.log_error(
+ str(
+ {
+ "log_type": "Authentication",
+ "log_message": message,
+ "log_action": "User",
+ "log_data": json.dumps(
+ {
+ "endpoint": endpoint,
+ "user": str(token_user),
+ "request": str(request.headers),
+ }
+ ),
+ }
+ )
+ )
+
+
+class AuthHeaderMiddleware(BaseHTTPMiddleware):
+
+ async def dispatch(self, request, call_next):
+ start_time, token_user, endpoint = perf_counter(), None, None
+
+ if check_if_path_secure(request=request, insecure_paths=Config.INSECURE_PATHS):
+ endpoint = str(getattr(getattr(request, "url", None), "path", None))
+ if un_auth := check_if_token_is_not_valid(
+ request=request, endpoint_name=endpoint
+ ):
+ auth, token_user = un_auth
+ if not auth == "valid":
+ # log_middlewares_exception(endpoint, token_user, "auth", request)
+ raise HTTPException(
+ status_code=status.HTTP_401_UNAUTHORIZED, detail=auth
+ )
+
+ response = await call_next(request)
+ prepare_response_needs(response, start_time)
+ # if endpoint and token_user:
+ # log_middlewares_exception(endpoint, token_user, "Request is completed", request)
+ return response
+
+
+def prepare_response_needs(response, start_time):
+ end_time = perf_counter()
+ response.headers["request-starts"], response.headers["request-ends"] = str(
+ start_time
+ ), str(end_time)
+ response.headers["elapsed-Time"] = str(float(end_time) - float(start_time)) + " ms"
+
+
+def check_if_path_secure(request, insecure_paths) -> bool:
+ return (
+ str(getattr(getattr(request, "url", None), "path", None)) not in insecure_paths
+ )
+
+
+def check_if_token_is_not_valid(request, endpoint_name):
+ from api_services.redis.functions import get_object_via_access_key
+
+ token_user = get_object_via_access_key(request)
+ if not token_user:
+ return "Session geçerli değil. Lütfen tekrar giriş yapınız.", token_user
+
+ return "valid", token_user
+
+ # on_token_user: Users = Users.find_one(uu_id=token_user["uu_id"])
+ # on_token_people: People = on_token_user.person
+ # #
+ # # if on_token_people.priority == 78:
+ # # return "valid", token_user
+ #
+ # if not token_user.get("duty_id", None):
+ # return (
+ # "Kullanıcı hiçbir yetki tanımlanmamıştır. Supervisor ile iletişime geçiniz.",
+ # token_user,
+ # )
+
+ #
+ # if endpoint_name in release_endpoint:
+ # return "valid", token_user
+ #
+ # if company_duty_app := CompanyDutyApp.find_one(
+ # endpoint_name=str("".join(endpoint_name.split("/")[:-1])),
+ # company_duty_id=int(token_user.get("duty_id")),
+ # ):
+ # if not company_duty_app.is_access_valid(
+ # endpoint_ext=endpoint_name.split("/")[-1]
+ # ):
+ # return (
+ # "Kullanıcı yetkili değildir. Supervisor ile iletişime geçiniz.",
+ # token_user,
+ # )
+ # else:
+ # return (
+ # "Kullanıcıya yetki tanımlanmamıştır. Supervisor ile iletişime geçiniz.",
+ # token_user,
+ # )
+
+ # return "valid", token_user
diff --git a/ApiServices/EventService/pyproject.toml b/ApiServices/EventService/pyproject.toml
new file mode 100644
index 0000000..f105854
--- /dev/null
+++ b/ApiServices/EventService/pyproject.toml
@@ -0,0 +1,25 @@
+[project]
+name = "wag-managment-api-service-version-3"
+version = "0.1.0"
+description = "Wag Python API Service"
+readme = "README.md"
+requires-python = ">=3.12"
+dependencies = [
+ "alembic>=1.14.0",
+ "arrow>=1.3.0",
+ "cryptography>=43.0.3",
+ "faker>=30.8.2",
+ "fastapi>=0.115.4",
+ "pandas>=2.2.3",
+ "prometheus-fastapi-instrumentator>=7.0.0",
+ "psycopg2-binary>=2.9.10",
+ "pymongo>=4.10.1",
+ "redis>=5.2.0",
+ "redmail>=0.6.0",
+ "requests>=2.32.3",
+ "rsa>=4.9",
+ "sqlalchemy-mixins>=2.0.5",
+ "textdistance>=4.6.3",
+ "unidecode>=1.3.8",
+ "uvicorn>=0.32.0",
+]
diff --git a/ApiServices/EventService/routers/__init__.py b/ApiServices/EventService/routers/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/ApiServices/ValidationService/.dockerignore b/ApiServices/ValidationService/.dockerignore
new file mode 100644
index 0000000..1449c90
--- /dev/null
+++ b/ApiServices/ValidationService/.dockerignore
@@ -0,0 +1,93 @@
+# Git
+.git
+.gitignore
+.gitattributes
+
+
+# CI
+.codeclimate.yml
+.travis.yml
+.taskcluster.yml
+
+# Docker
+docker-compose.yml
+service_app/Dockerfile
+.docker
+.dockerignore
+
+# Byte-compiled / optimized / DLL files
+**/__pycache__/
+**/*.py[cod]
+
+# C extensions
+*.so
+
+# Distribution / packaging
+.Python
+service_app/env/
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+*.egg-info/
+.installed.cfg
+*.egg
+
+# PyInstaller
+# Usually these files are written by a python script from a template
+# before PyInstaller builds the exe, so as to inject date/other infos into it.
+*.manifest
+*.spec
+
+# Installer logs
+pip-log.txt
+pip-delete-this-directory.txt
+
+# Unit test / coverage reports
+htmlcov/
+.tox/
+.coverage
+.cache
+nosetests.xml
+coverage.xml
+
+# Translations
+*.mo
+*.pot
+
+# Django stuff:
+*.log
+
+# Sphinx documentation
+docs/_build/
+
+# PyBuilder
+target/
+
+# Virtual environment
+service_app/.env
+.venv/
+venv/
+
+# PyCharm
+.idea
+
+# Python mode for VIM
+.ropeproject
+**/.ropeproject
+
+# Vim swap files
+**/*.swp
+
+# VS Code
+.vscode/
+
+test_application/
+
+
diff --git a/ApiServices/ValidationService/.gitignore b/ApiServices/ValidationService/.gitignore
new file mode 100644
index 0000000..f852d0e
--- /dev/null
+++ b/ApiServices/ValidationService/.gitignore
@@ -0,0 +1,162 @@
+# ---> Python
+# Byte-compiled / optimized / DLL files
+__pycache__/
+*.py[cod]
+*$py.class
+
+# C extensions
+*.so
+
+# Distribution / packaging
+.idea/
+.Python
+develop-eggs/
+dist/
+downloads/
+eggs/
+.eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+wheels/
+share/python-wheels/
+*.egg-info/
+.installed.cfg
+*.egg
+MANIFEST
+
+# PyInstaller
+# Usually these files are written by a python script from a template
+# before PyInstaller builds the exe, so as to inject date/other infos into it.
+*.manifest
+*.spec
+
+# Installer logs
+pip-log.txt
+pip-delete-this-directory.txt
+
+# Unit test / coverage reports
+htmlcov/
+.tox/
+.nox/
+.coverage
+.coverage.*
+.cache
+nosetests.xml
+coverage.xml
+*.cover
+*.py,cover
+.hypothesis/
+.pytest_cache/
+cover/
+
+# Translations
+*.mo
+*.pot
+
+# Django stuff:
+*.log
+local_settings.py
+db.sqlite3
+db.sqlite3-journal
+
+# Flask stuff:
+instance/
+.webassets-cache
+
+# Scrapy stuff:
+.scrapy
+
+# Sphinx documentation
+docs/_build/
+
+# PyBuilder
+.pybuilder/
+target/
+
+# Jupyter Notebook
+.ipynb_checkpoints
+
+# IPython
+profile_default/
+ipython_config.py
+
+# pyenv
+# For a library or package, you might want to ignore these files since the code is
+# intended to run in multiple environments; otherwise, check them in:
+# .python-version
+
+# pipenv
+# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
+# However, in case of collaboration, if having platform-specific dependencies or dependencies
+# having no cross-platform support, pipenv may install dependencies that don't work, or not
+# install all needed dependencies.
+#Pipfile.lock
+
+# poetry
+# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
+# This is especially recommended for binary packages to ensure reproducibility, and is more
+# commonly ignored for libraries.
+# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
+#poetry.lock
+
+# pdm
+# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
+#pdm.lock
+# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
+# in version control.
+# https://pdm.fming.dev/#use-with-ide
+.pdm.toml
+
+# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
+__pypackages__/
+
+# Celery stuff
+celerybeat-schedule
+celerybeat.pid
+
+# SageMath parsed files
+*.sage.py
+
+# Environments
+service_app/.env
+.venv
+service_app/env/
+venv/
+service_app/env/
+env.bak/
+venv.bak/
+
+# Spyder project settings
+.spyderproject
+.spyproject
+
+# Rope project settings
+.ropeproject
+
+# mkdocs documentation
+/site
+
+# mypy
+.mypy_cache/
+.dmypy.json
+dmypy.json
+
+# Pyre type checker
+.pyre/
+
+# pytype static type analyzer
+.pytype/
+
+# Cython debug symbols
+cython_debug/
+
+# PyCharm
+# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
+# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
+# and can be added to the global gitignore or merged into this file. For a more nuclear
+# option (not recommended) you can uncomment the following to ignore the entire idea folder.
+#.idea/
+
diff --git a/ApiServices/ValidationService/Dockerfile b/ApiServices/ValidationService/Dockerfile
new file mode 100644
index 0000000..18d9b89
--- /dev/null
+++ b/ApiServices/ValidationService/Dockerfile
@@ -0,0 +1,32 @@
+FROM python:3.12-slim-bookworm
+
+ENV PYTHONDONTWRITEBYTECODE 1
+ENV PYTHONUNBUFFERED 1
+
+COPY --from=ghcr.io/astral-sh/uv:latest /uv /uvx /bin/
+
+COPY ../service_app/requirements.txt .
+
+RUN uv venv
+RUN uv pip install -r requirements.txt
+
+COPY ../service_app ./service_app
+
+COPY ../databases ./service_app/databases
+COPY ../api_services ./service_app/api_services
+COPY ../api_objects ./service_app/api_objects
+COPY ../api_configs ./service_app/api_configs
+COPY ../api_events ./service_app/api_events
+COPY ../api_library ./service_app/api_library
+COPY ../api_validations ./service_app/api_validations
+
+WORKDIR /service_app
+
+CMD ["uv", "run", "app.py"]
+
+# Old File
+#FROM python:3.10
+
+#RUN pip install --upgrade pip
+#RUN pip install --no-cache-dir --upgrade -r requirements.txt
+#CMD ["python", "-m", "app"]
diff --git a/ApiServices/ValidationService/app.py b/ApiServices/ValidationService/app.py
new file mode 100644
index 0000000..7c3e4f7
--- /dev/null
+++ b/ApiServices/ValidationService/app.py
@@ -0,0 +1,37 @@
+import uvicorn
+import routers
+
+from fastapi.middleware.cors import CORSMiddleware
+from fastapi.exceptions import HTTPException
+
+from middlewares.token_middleware import AuthHeaderMiddleware
+from application.create_file import create_app
+from api_objects.errors.errors_dictionary import ErrorHandlers
+from prometheus_fastapi_instrumentator import Instrumentator
+
+app = create_app(routers=routers)
+Instrumentator().instrument(app=app).expose(app=app)
+
+app.add_middleware(
+ CORSMiddleware,
+ **{
+ "allow_origins": ["*"],
+ "allow_credentials": True,
+ "allow_methods": ["*"],
+ "allow_headers": ["*"],
+ },
+)
+app.add_middleware(AuthHeaderMiddleware)
+
+app.add_exception_handler(HTTPException, ErrorHandlers.exception_handler_http)
+app.add_exception_handler(Exception, ErrorHandlers.exception_handler_exception)
+
+if __name__ == "__main__":
+ uvicorn_config = {
+ "app": "app:app",
+ "host": "0.0.0.0",
+ "port": 41575,
+ "log_level": "info",
+ "reload": True,
+ }
+ uvicorn.Server(uvicorn.Config(**uvicorn_config)).run()
diff --git a/ApiServices/ValidationService/application/__init__.py b/ApiServices/ValidationService/application/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/ApiServices/ValidationService/application/create_file.py b/ApiServices/ValidationService/application/create_file.py
new file mode 100644
index 0000000..b492475
--- /dev/null
+++ b/ApiServices/ValidationService/application/create_file.py
@@ -0,0 +1,64 @@
+def create_app(routers):
+ from fastapi import FastAPI
+ from fastapi.responses import JSONResponse
+ from fastapi.openapi.utils import get_openapi
+ from fastapi.responses import RedirectResponse
+
+ from api_configs import Config
+
+ api_app = FastAPI(title=str(Config.TITLE), default_response_class=JSONResponse)
+
+ @api_app.get("/", include_in_schema=False, summary=str(Config.DESCRIPTION))
+ async def home():
+ return RedirectResponse(url="/docs")
+
+ for router in list(
+ [
+ getattr(routers, router)
+ for router in routers.__all__
+ if getattr(routers, router)
+ ]
+ ):
+ api_app.include_router(router)
+
+ openapi_schema = get_openapi(
+ title=Config.TITLE,
+ description=Config.DESCRIPTION,
+ version="0.0.1",
+ routes=api_app.routes,
+ )
+
+ if "components" in openapi_schema:
+ openapi_schema["components"]["securitySchemes"] = {
+ "Bearer Auth": {
+ "type": "apiKey",
+ "in": "header",
+ "name": "evyos-session-key",
+ "description": "Enter: **'Bearer <JWT>'**, where JWT is the access token",
+ }
+ }
+
+ for route in api_app.routes:
+ path = str(getattr(route, "path"))
+ if route.include_in_schema:
+ methods = [method.lower() for method in getattr(route, "methods")]
+ for method in methods:
+ insecure_paths = Config.INSECURE_PATHS.copy()
+ insecure_paths.remove("/authentication/select")
+ if path not in insecure_paths:
+ openapi_schema["paths"][path][method]["security"] = [
+ {"Bearer Auth": []}
+ ]
+ openapi_schema["paths"][path][method]["responses"]["403"] = {
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/HTTPValidationError"
+ }
+ }
+ },
+ "description": "Returned if user is unauthorized.",
+ }
+
+ api_app.openapi_schema = openapi_schema
+ return api_app
diff --git a/ApiServices/ValidationService/middlewares/__init__.py b/ApiServices/ValidationService/middlewares/__init__.py
new file mode 100644
index 0000000..c3b4cfe
--- /dev/null
+++ b/ApiServices/ValidationService/middlewares/__init__.py
@@ -0,0 +1 @@
+from .token_middleware import AuthHeaderMiddleware
diff --git a/ApiServices/ValidationService/middlewares/token_middleware.py b/ApiServices/ValidationService/middlewares/token_middleware.py
new file mode 100644
index 0000000..6e2b20d
--- /dev/null
+++ b/ApiServices/ValidationService/middlewares/token_middleware.py
@@ -0,0 +1,116 @@
+import json
+
+from time import perf_counter
+from api_configs import Config
+from fastapi import status
+from fastapi.exceptions import HTTPException
+from starlette.middleware.base import BaseHTTPMiddleware
+
+
+class MiddlewareLogs:
+
+ @staticmethod
+ def log_error(self, log_message):
+ print(log_message)
+
+
+def log_middlewares_exception(endpoint, token_user, message, request):
+ MiddlewareLogs.log_error(
+ str(
+ {
+ "log_type": "Authentication",
+ "log_message": message,
+ "log_action": "User",
+ "log_data": json.dumps(
+ {
+ "endpoint": endpoint,
+ "user": str(token_user),
+ "request": str(request.headers),
+ }
+ ),
+ }
+ )
+ )
+
+
+class AuthHeaderMiddleware(BaseHTTPMiddleware):
+
+ async def dispatch(self, request, call_next):
+ start_time, token_user, endpoint = perf_counter(), None, None
+
+ if check_if_path_secure(request=request, insecure_paths=Config.INSECURE_PATHS):
+ endpoint = str(getattr(getattr(request, "url", None), "path", None))
+ if un_auth := check_if_token_is_not_valid(
+ request=request, endpoint_name=endpoint
+ ):
+ auth, token_user = un_auth
+ if not auth == "valid":
+ # log_middlewares_exception(endpoint, token_user, "auth", request)
+ raise HTTPException(
+ status_code=status.HTTP_401_UNAUTHORIZED, detail=auth
+ )
+
+ response = await call_next(request)
+ prepare_response_needs(response, start_time)
+ # if endpoint and token_user:
+ # log_middlewares_exception(endpoint, token_user, "Request is completed", request)
+ return response
+
+
+def prepare_response_needs(response, start_time):
+ end_time = perf_counter()
+ response.headers["request-starts"], response.headers["request-ends"] = str(
+ start_time
+ ), str(end_time)
+ response.headers["elapsed-Time"] = str(float(end_time) - float(start_time)) + " ms"
+
+
+def check_if_path_secure(request, insecure_paths) -> bool:
+ return (
+ str(getattr(getattr(request, "url", None), "path", None)) not in insecure_paths
+ )
+
+
+def check_if_token_is_not_valid(request, endpoint_name):
+ from api_services.redis.functions import get_object_via_access_key
+
+ token_user = get_object_via_access_key(request)
+ if not token_user:
+ return "Session geçerli değil. Lütfen tekrar giriş yapınız.", token_user
+
+ return "valid", token_user
+
+ # on_token_user: Users = Users.find_one(uu_id=token_user["uu_id"])
+ # on_token_people: People = on_token_user.person
+ # #
+ # # if on_token_people.priority == 78:
+ # # return "valid", token_user
+ #
+ # if not token_user.get("duty_id", None):
+ # return (
+ # "Kullanıcı hiçbir yetki tanımlanmamıştır. Supervisor ile iletişime geçiniz.",
+ # token_user,
+ # )
+
+ #
+ # if endpoint_name in release_endpoint:
+ # return "valid", token_user
+ #
+ # if company_duty_app := CompanyDutyApp.find_one(
+ # endpoint_name=str("".join(endpoint_name.split("/")[:-1])),
+ # company_duty_id=int(token_user.get("duty_id")),
+ # ):
+ # if not company_duty_app.is_access_valid(
+ # endpoint_ext=endpoint_name.split("/")[-1]
+ # ):
+ # return (
+ # "Kullanıcı yetkili değildir. Supervisor ile iletişime geçiniz.",
+ # token_user,
+ # )
+ # else:
+ # return (
+ # "Kullanıcıya yetki tanımlanmamıştır. Supervisor ile iletişime geçiniz.",
+ # token_user,
+ # )
+
+ # return "valid", token_user
diff --git a/ApiServices/ValidationService/pyproject.toml b/ApiServices/ValidationService/pyproject.toml
new file mode 100644
index 0000000..f105854
--- /dev/null
+++ b/ApiServices/ValidationService/pyproject.toml
@@ -0,0 +1,25 @@
+[project]
+name = "wag-managment-api-service-version-3"
+version = "0.1.0"
+description = "Wag Python API Service"
+readme = "README.md"
+requires-python = ">=3.12"
+dependencies = [
+ "alembic>=1.14.0",
+ "arrow>=1.3.0",
+ "cryptography>=43.0.3",
+ "faker>=30.8.2",
+ "fastapi>=0.115.4",
+ "pandas>=2.2.3",
+ "prometheus-fastapi-instrumentator>=7.0.0",
+ "psycopg2-binary>=2.9.10",
+ "pymongo>=4.10.1",
+ "redis>=5.2.0",
+ "redmail>=0.6.0",
+ "requests>=2.32.3",
+ "rsa>=4.9",
+ "sqlalchemy-mixins>=2.0.5",
+ "textdistance>=4.6.3",
+ "unidecode>=1.3.8",
+ "uvicorn>=0.32.0",
+]
diff --git a/ApiServices/ValidationService/routers/__init__.py b/ApiServices/ValidationService/routers/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/ApiServices/debian-docker-compose.yml b/ApiServices/debian-docker-compose.yml
new file mode 100644
index 0000000..7caf323
--- /dev/null
+++ b/ApiServices/debian-docker-compose.yml
@@ -0,0 +1,29 @@
+services:
+
+ wag_management_auth_service:
+ container_name: wag_management_auth_service
+ restart: on-failure
+ build:
+ context: .
+ dockerfile: AuthService/Dockerfile
+ ports:
+ - "11:41575"
+
+ wag_management_event_service:
+ container_name: wag_management_event_service
+ restart: on-failure
+ build:
+ context: .
+ dockerfile: EventService/Dockerfile
+ ports:
+ - "12:41575"
+
+ wag_management_validation_service:
+ container_name: wag_management_validation_service
+ restart: on-failure
+ build:
+ context: .
+ dockerfile: ValidationService/Dockerfile
+ ports:
+ - "13:41575"
+
diff --git a/ApiServices/local-docker-compose.yml b/ApiServices/local-docker-compose.yml
new file mode 100644
index 0000000..7caf323
--- /dev/null
+++ b/ApiServices/local-docker-compose.yml
@@ -0,0 +1,29 @@
+services:
+
+ wag_management_auth_service:
+ container_name: wag_management_auth_service
+ restart: on-failure
+ build:
+ context: .
+ dockerfile: AuthService/Dockerfile
+ ports:
+ - "11:41575"
+
+ wag_management_event_service:
+ container_name: wag_management_event_service
+ restart: on-failure
+ build:
+ context: .
+ dockerfile: EventService/Dockerfile
+ ports:
+ - "12:41575"
+
+ wag_management_validation_service:
+ container_name: wag_management_validation_service
+ restart: on-failure
+ build:
+ context: .
+ dockerfile: ValidationService/Dockerfile
+ ports:
+ - "13:41575"
+
diff --git a/README.md b/README.md
index da1fad9..8872145 100644
--- a/README.md
+++ b/README.md
@@ -1,3 +1,6 @@
+
+Add: uv pip install -r pyproject.toml
+
- Run Store Services
On debian
@@ -15,3 +18,11 @@ http://localhost:11222
postgres_main_commercial:
http://localhost:5444
+make sure
+set lang and timezone on login
+BaseMixin || CrudMixin add
+http_exception = fastapi.HTTPException
+status = fastapi.status
+
+
+
diff --git a/StoreServices/.dockerignore b/StoreServices/.dockerignore
new file mode 100644
index 0000000..1449c90
--- /dev/null
+++ b/StoreServices/.dockerignore
@@ -0,0 +1,93 @@
+# Git
+.git
+.gitignore
+.gitattributes
+
+
+# CI
+.codeclimate.yml
+.travis.yml
+.taskcluster.yml
+
+# Docker
+docker-compose.yml
+service_app/Dockerfile
+.docker
+.dockerignore
+
+# Byte-compiled / optimized / DLL files
+**/__pycache__/
+**/*.py[cod]
+
+# C extensions
+*.so
+
+# Distribution / packaging
+.Python
+service_app/env/
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+*.egg-info/
+.installed.cfg
+*.egg
+
+# PyInstaller
+# Usually these files are written by a python script from a template
+# before PyInstaller builds the exe, so as to inject date/other infos into it.
+*.manifest
+*.spec
+
+# Installer logs
+pip-log.txt
+pip-delete-this-directory.txt
+
+# Unit test / coverage reports
+htmlcov/
+.tox/
+.coverage
+.cache
+nosetests.xml
+coverage.xml
+
+# Translations
+*.mo
+*.pot
+
+# Django stuff:
+*.log
+
+# Sphinx documentation
+docs/_build/
+
+# PyBuilder
+target/
+
+# Virtual environment
+service_app/.env
+.venv/
+venv/
+
+# PyCharm
+.idea
+
+# Python mode for VIM
+.ropeproject
+**/.ropeproject
+
+# Vim swap files
+**/*.swp
+
+# VS Code
+.vscode/
+
+test_application/
+
+
diff --git a/api_configs/databaseConfigs.py b/api_configs/databaseConfigs.py
new file mode 100644
index 0000000..c930eb4
--- /dev/null
+++ b/api_configs/databaseConfigs.py
@@ -0,0 +1,27 @@
+storeHost = "10.10.2.36"
+
+
+class WagDatabase:
+ SQL: str = "postgresql+psycopg2"
+ USERNAME: str = "berkay_wag_user"
+ PASSWORD: str = "berkay_wag_user_password"
+ HOST: str = storeHost
+ PORT: str = "5444"
+ DATABASE_NAME: str = "wag_database"
+ DATABASE_URL: str = f"{SQL}://{USERNAME}:{PASSWORD}@{HOST}:{PORT}/{DATABASE_NAME}"
+
+
+class WagRedis:
+ REDIS_HOST = storeHost
+ REDIS_PASSWORD: str = "commercial_redis_password"
+ REDIS_PORT: int = 11222
+ REDIS_DB: int = 0
+
+
+class MongoConfig:
+ password = "mongo_password"
+ username = "mongo_user"
+ database_name = "mongo_database"
+ host = storeHost
+ port = 11777
+ url = f"mongodb://{username}:{password}@{host}:{port}/{database_name}?retryWrites=true&w=majority"
diff --git a/api_library/__init__.py b/api_library/__init__.py
new file mode 100644
index 0000000..6e23dee
--- /dev/null
+++ b/api_library/__init__.py
@@ -0,0 +1,8 @@
+from api_library.date_time_actions.date_functions import (
+ client_arrow,
+ system_arrow,
+ DateTimeLocal,
+)
+
+
+__all__ = ["client_arrow", "system_arrow", "DateTimeLocal"]
diff --git a/api_library/date_time_actions/date_functions.py b/api_library/date_time_actions/date_functions.py
new file mode 100644
index 0000000..07ebccc
--- /dev/null
+++ b/api_library/date_time_actions/date_functions.py
@@ -0,0 +1,49 @@
+import arrow
+import calendar
+
+
+class DateTimeLocal:
+
+ __SYSTEM__: str = "GMT+0"
+
+ def __init__(self, timezone: str = "GMT+3", is_client: bool = True):
+ self.timezone = self.__SYSTEM__
+ if is_client:
+ self.timezone = timezone.replace("-", "+")
+
+ def find_last_day_of_month(self, date_value):
+ today = self.get(date_value).date()
+ _, last_day = calendar.monthrange(today.year, today.month)
+ return self.get(today.year, today.month, last_day, 23, 59, 59).to(self.timezone)
+
+ def find_first_day_of_month(self, date_value):
+ today = self.get(date_value).date()
+ return self.get(today.year, today.month, 1).to(self.timezone)
+
+ def get(self, *args):
+ return arrow.get(*args).to(str(self.timezone))
+
+ def now(self):
+ return arrow.now().to(str(self.timezone))
+
+ def shift(self, date, **kwargs):
+ return self.get(date).shift(**kwargs)
+
+ def date(self, date):
+ return self.get(date).date()
+
+ def time(self, date):
+ return self.get(date).time()
+
+ def string_date(self, date, splitter: str = "-"):
+ return str(self.get(date).date()).replace("-", splitter)
+
+ def string_time_only(self, date):
+ return self.get(date).format("HH:mm:ss")
+
+ def string_date_only(self, date):
+ return self.get(date).format("YYYY-MM-DD")
+
+
+client_arrow = DateTimeLocal(is_client=True)
+system_arrow = DateTimeLocal(is_client=False)
diff --git a/api_objects/__init__.py b/api_objects/__init__.py
new file mode 100644
index 0000000..b196ce1
--- /dev/null
+++ b/api_objects/__init__.py
@@ -0,0 +1,17 @@
+from api_objects.auth.token_objects import (
+ OccupantTokenObject,
+ EmployeeTokenObject,
+ UserType,
+ CompanyToken,
+ OccupantToken,
+ ApplicationToken,
+)
+
+__all__ = [
+ "OccupantTokenObject",
+ "EmployeeTokenObject",
+ "UserType",
+ "CompanyToken",
+ "OccupantToken",
+ "ApplicationToken",
+]
diff --git a/api_objects/auth/token_objects.py b/api_objects/auth/token_objects.py
new file mode 100644
index 0000000..eca0b7e
--- /dev/null
+++ b/api_objects/auth/token_objects.py
@@ -0,0 +1,106 @@
+import enum
+from typing import Optional, List, Any
+from pydantic import BaseModel
+
+
+# Company / Priority / Department / Duty / Employee / Occupant / Module / Endpoint are changeable dynamics
+
+
+class UserType(enum.Enum):
+
+ employee = 1
+ occupant = 2
+
+
+class Credentials(BaseModel):
+
+ person_id: int
+ person_name: str
+
+
+class ApplicationToken(BaseModel):
+ # Application Token Object -> is the main object for the user
+
+ domain: Optional[str] = "app.evyos.com.tr"
+ lang: Optional[str] = "TR"
+ timezone: Optional[str] = "Europe/Istanbul"
+
+ user_type: int = UserType.occupant.value
+ credentials: dict = None
+
+ user_uu_id: str
+ user_id: int
+
+ person_id: int
+ person_uu_id: str
+
+ request: Optional[dict] = None # Request Info of Client
+
+
+class OccupantToken(BaseModel):
+ # Selection of the occupant type for a build part is made by the user
+
+ living_space_id: int # Internal use
+ living_space_uu_id: str # Outer use
+
+ occupant_type_id: int
+ occupant_type_uu_id: str
+ occupant_type: str
+
+ build_id: int
+ build_uuid: str
+ build_part_id: int
+ build_part_uuid: str
+
+ responsible_company_id: Optional[int] = None
+ responsible_company_uuid: Optional[str] = None
+ responsible_employee_id: Optional[int] = None
+ responsible_employee_uuid: Optional[str] = None
+
+ reachable_event_list_id: Optional[list] = None # ID list of reachable modules
+ # reachable_event_list_uu_id: Optional[list] = None # UUID list of reachable modules
+
+
+class CompanyToken(BaseModel): # Required Company Object for an employee
+
+ company_id: int
+ company_uu_id: str
+
+ department_id: int # ID list of departments
+ department_uu_id: str # ID list of departments
+
+ duty_id: int
+ duty_uu_id: str
+
+ staff_id: int
+ staff_uu_id: str
+
+ employee_id: int
+ employee_uu_id: str
+
+ bulk_duties_id: int
+
+ reachable_event_list_id: Optional[list] = None # ID list of reachable modules
+ # reachable_event_list_uu_id: Optional[list] = None # UUID list of reachable modules
+
+
+class OccupantTokenObject(ApplicationToken):
+ # Occupant Token Object -> Requires selection of the occupant type for a specific build part
+
+ available_occupants: dict = None
+
+ selected_occupant: Optional[OccupantToken] = None # Selected Occupant Type
+ available_event: Optional[Any] = None
+
+
+class EmployeeTokenObject(ApplicationToken):
+ # Full hierarchy Employee[staff_id] -> Staff -> Duty -> Department -> Company
+
+ companies_id_list: List[int] # List of company objects
+ companies_uu_id_list: List[str] # List of company objects
+
+ duty_id_list: List[int] # List of duty objects
+ duty_uu_id_list: List[str] # List of duty objects
+
+ selected_company: Optional[CompanyToken] = None # Selected Company Object
+ available_event: Optional[Any] = None
diff --git a/api_objects/errors/alchemy_errors.py b/api_objects/errors/alchemy_errors.py
new file mode 100644
index 0000000..c5ac9eb
--- /dev/null
+++ b/api_objects/errors/alchemy_errors.py
@@ -0,0 +1,85 @@
+from .errors_dictionary import ErrorMessages
+
+
+class AlchemyError:
+ ERRORS_DICT = {
+ "100": "HTTP_100_CONTINUE",
+ "101": "HTTP_101_SWITCHING_PROTOCOLS",
+ "102": "HTTP_102_PROCESSING",
+ "103": "HTTP_103_EARLY_HINTS",
+ "200": "HTTP_200_OK",
+ "201": "HTTP_201_CREATED",
+ "202": "HTTP_202_ACCEPTED",
+ "203": "HTTP_203_NON_AUTHORITATIVE_INFORMATION",
+ "204": "HTTP_204_NO_CONTENT",
+ "205": "HTTP_205_RESET_CONTENT",
+ "206": "HTTP_206_PARTIAL_CONTENT",
+ "207": "HTTP_207_MULTI_STATUS",
+ "208": "HTTP_208_ALREADY_REPORTED",
+ "226": "HTTP_226_IM_USED",
+ "300": "HTTP_300_MULTIPLE_CHOICES",
+ "301": "HTTP_301_MOVED_PERMANENTLY",
+ "302": "HTTP_302_FOUND",
+ "303": "HTTP_303_SEE_OTHER",
+ "304": "HTTP_304_NOT_MODIFIED",
+ "305": "HTTP_305_USE_PROXY",
+ "306": "HTTP_306_RESERVED",
+ "307": "HTTP_307_TEMPORARY_REDIRECT",
+ "308": "HTTP_308_PERMANENT_REDIRECT",
+ "400": "HTTP_400_BAD_REQUEST",
+ "401": "HTTP_401_UNAUTHORIZED",
+ "402": "HTTP_402_PAYMENT_REQUIRED",
+ "403": "HTTP_403_FORBIDDEN",
+ "404": "HTTP_404_NOT_FOUND",
+ "405": "HTTP_405_METHOD_NOT_ALLOWED",
+ "406": "HTTP_406_NOT_ACCEPTABLE",
+ "407": "HTTP_407_PROXY_AUTHENTICATION_REQUIRED",
+ "408": "HTTP_408_REQUEST_TIMEOUT",
+ "409": "HTTP_409_CONFLICT",
+ "410": "HTTP_410_GONE",
+ "411": "HTTP_411_LENGTH_REQUIRED",
+ "412": "HTTP_412_PRECONDITION_FAILED",
+ "413": "HTTP_413_REQUEST_ENTITY_TOO_LARGE",
+ "414": "HTTP_414_REQUEST_URI_TOO_LONG",
+ "415": "HTTP_415_UNSUPPORTED_MEDIA_TYPE",
+ "416": "HTTP_416_REQUESTED_RANGE_NOT_SATISFIABLE",
+ "417": "HTTP_417_EXPECTATION_FAILED",
+ "418": "HTTP_418_IM_A_TEAPOT",
+ "421": "HTTP_421_MISDIRECTED_REQUEST",
+ "422": "HTTP_422_UNPROCESSABLE_ENTITY",
+ "423": "HTTP_423_LOCKED",
+ "424": "HTTP_424_FAILED_DEPENDENCY",
+ "426": "HTTP_426_UPGRADE_REQUIRED",
+ "428": "HTTP_428_PRECONDITION_REQUIRED",
+ "429": "HTTP_429_TOO_MANY_REQUESTS",
+ "431": "HTTP_431_REQUEST_HEADER_FIELDS_TOO_LARGE",
+ "451": "HTTP_451_UNAVAILABLE_FOR_LEGAL_REASONS",
+ "500": "HTTP_500_INTERNAL_SERVER_ERROR",
+ }
+ ERRORS_KEYS = {
+ "delete": "DeletedRecord",
+ "update": "UpdatedRecord",
+ "create": "CreatedRecord",
+ "list": "ListedRecords",
+ "not_found": "RecordNotFound",
+ "already_exist": "AlreadyExists",
+ "not_deleted": "RecordNotDeleted",
+ "not_updated": "RecordNotUpdated",
+ "not_created": "RecordNotCreated",
+ "not_listed": "RecordsNotListed",
+ "not_confirm": "IsNotConfirmed",
+ }
+
+ def __init__(self, lang):
+ self.lang = lang
+
+ def retrieve_error_needs(self, data, status_code, error_case, message_key):
+ return dict(
+ status_code=self.ERRORS_DICT[status_code],
+ error_case=self.ERRORS_KEYS[error_case],
+ data=data,
+ message=ErrorMessages.get_message(message_key, self.lang),
+ )
+
+
+alchemy_error = AlchemyError(lang="")
diff --git a/api_objects/errors/errors_dictionary.py b/api_objects/errors/errors_dictionary.py
new file mode 100644
index 0000000..3ac90b2
--- /dev/null
+++ b/api_objects/errors/errors_dictionary.py
@@ -0,0 +1,44 @@
+from json import loads
+
+
+class ErrorMessages:
+ __messages__ = {}
+
+ @classmethod
+ def get_message(cls, message_key, lang):
+ return cls.__messages__[lang][message_key]
+
+
+
+class ErrorHandlers:
+ def __init__(self, requests, exceptions, response_model, status):
+ self.requests = requests # from fastapi.requests import Request
+ self.exceptions = exceptions # from fastapi.exceptions import HTTPException
+ self.response_model = response_model # from fastapi.responses import JSONResponse
+ self.status = status # from fastapi import status
+
+ def exception_handler_http(self, request, exc):
+ exc_detail = getattr(exc, "detail", None)
+ try:
+ detail = loads(str(exc_detail))
+ return self.response_model(
+ status_code=exc.status_code,
+ content={
+ "Data": detail.get("data", {}),
+ "Error": detail.get("error_case", "UNKNOWN"),
+ "Message": detail.get(
+ "message", "An error occurred while processing the request"
+ ),
+ },
+ )
+ except Exception as e:
+ return self.response_model(
+ status_code=exc.status_code,
+ content={"Error": str(exc_detail), "Message": f"{str(e)}", "Data": {}},
+ )
+
+ def exception_handler_exception(self, request, exc):
+ return self.response_model(
+ status_code=self.status.HTTP_417_EXPECTATION_FAILED,
+ content={"message": exc.__str__()},
+ )
diff --git a/databases/sql_models/__init__.py b/databases/sql_models/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/databases/sql_models/account/account.py b/databases/sql_models/account/account.py
new file mode 100644
index 0000000..4a51fe2
--- /dev/null
+++ b/databases/sql_models/account/account.py
@@ -0,0 +1,763 @@
+from databases.sql_models.core_mixin import CrudCollection
+
+from sqlalchemy.orm import mapped_column, Mapped
+from sqlalchemy import (
+ String,
+ Integer,
+ ForeignKey,
+ Index,
+ SmallInteger,
+ Boolean,
+ TIMESTAMP,
+ Numeric,
+ UUID,
+)
+
+
+class AccountBooks(CrudCollection):
+
+ __tablename__ = "account_books"
+ __exclude__fields__ = []
+
+ country: Mapped[str] = mapped_column(String, nullable=False)
+ branch_type: Mapped[str] = mapped_column(SmallInteger, server_default="0")
+
+ company_id: Mapped[int] = mapped_column(ForeignKey("companies.id"), nullable=False)
+ company_uu_id: Mapped[str] = mapped_column(String, nullable=False)
+ branch_id: Mapped[int] = mapped_column(ForeignKey("companies.id"))
+ branch_uu_id: Mapped[str] = mapped_column(String, comment="Branch UU ID")
+
+ # company: Mapped["Companies"] = relationship(
+ # "Company", back_populates="company_account_books", foreign_keys=[company_id]
+ # )
+ # branch: Mapped["Companies"] = relationship(
+ # "Company", back_populates="branch_account_books", foreign_keys=[branch_id]
+ # )
+ # account_master: Mapped[List["AccountMaster"]] = relationship(
+ # "AccountMaster",
+ # back_populates="account_header",
+ # foreign_keys="AccountMaster.account_header_id",
+ # )
+ # account_detail: Mapped[List["AccountDetail"]] = relationship(
+ # "AccountDetail",
+ # back_populates="account_header",
+ # foreign_keys="AccountDetail.account_header_id",
+ # )
+
+ __table_args__ = (
+ Index("account_companies_book_ndx_00", company_id, "expiry_starts"),
+ {"comment": "Account Book Information"},
+ )
+
+
+class AccountCodes(CrudCollection):
+
+ __tablename__ = "account_codes"
+ __exclude__fields__ = []
+
+ account_code: Mapped[str] = mapped_column(
+ String(48), nullable=False, comment="Account Code"
+ )
+ comment_line: Mapped[str] = mapped_column(
+ String(128), nullable=False, comment="Comment Line"
+ )
+
+ is_receive_or_debit: Mapped[bool] = mapped_column(Boolean)
+ product_id: Mapped[int] = mapped_column(Integer, server_default="0")
+ nvi_id: Mapped[str] = mapped_column(String(48), server_default="")
+ status_id: Mapped[int] = mapped_column(SmallInteger, server_default="0")
+ account_code_seperator: Mapped[str] = mapped_column(String(1), server_default=".")
+
+ system_id: Mapped[int] = mapped_column(SmallInteger, server_default="0")
+ locked: Mapped[bool] = mapped_column(SmallInteger, server_default="0")
+
+ company_id: Mapped[int] = mapped_column(ForeignKey("companies.id"))
+ company_uu_id: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Company UU ID"
+ )
+ customer_id: Mapped[int] = mapped_column(ForeignKey("companies.id"))
+ customer_uu_id: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Customer UU ID"
+ )
+ person_id: Mapped[int] = mapped_column(ForeignKey("people.id"))
+ person_uu_id: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Person UU ID"
+ )
+
+ # company: Mapped["Companies"] = relationship(
+ # "Company", back_populates="account_codes", foreign_keys=[company_id]
+ # )
+ # customer: Mapped["Companies"] = relationship(
+ # "Company", back_populates="account_codes", foreign_keys=[customer_id]
+ # )
+ # person: Mapped["People"] = relationship(
+ # "People", back_populates="account_codes", foreign_keys=[person_id]
+ # )
+ # account_detail: Mapped[List["AccountDetail"]] = relationship(
+ # "AccountDetail",
+ # back_populates="account_code",
+ # foreign_keys="AccountDetail.account_code_id",
+ # )
+ #
+ # account_code_parser: Mapped["AccountCodeParser"] = relationship(
+ # "AccountCodeParser",
+ # back_populates="account_codes",
+ # foreign_keys="AccountCodeParser.account_code_id",
+ # )
+
+
+class AccountCodeParser(CrudCollection):
+
+ __tablename__ = "account_code_parser"
+ __exclude__fields__ = []
+
+ account_code_1: Mapped[str] = mapped_column(String, nullable=False, comment="Order")
+ account_code_2: Mapped[str] = mapped_column(String, nullable=False, comment="Order")
+ account_code_3: Mapped[str] = mapped_column(String, nullable=False, comment="Order")
+ account_code_4: Mapped[str] = mapped_column(String, server_default="")
+ account_code_5: Mapped[str] = mapped_column(String, server_default="")
+ account_code_6: Mapped[str] = mapped_column(String, server_default="")
+
+ account_code_id: Mapped[int] = mapped_column(
+ ForeignKey("account_codes.id"), nullable=False
+ )
+ account_code_uu_id: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Account Code UU ID"
+ )
+
+ # account_codes: Mapped["AccountCodes"] = relationship(
+ # "AccountCodes",
+ # back_populates="account_code_parser",
+ # foreign_keys=[account_code_id],
+ # )
+
+ __table_args__ = (
+ Index("_account_code_parser_ndx_00", account_code_id),
+ {"comment": "Account Code Parser Information"},
+ )
+
+ @property
+ def get_account_code(self):
+ return f"{self.account_codes.account_code_seperator}".join(
+ [
+ getattr(self, f"account_code_{i}")
+ for i in range(1, 7)
+ if getattr(self, f"account_code_{i}")
+ ]
+ )
+
+
+class AccountMaster(CrudCollection):
+ """
+ AccountCodes class based on declarative_base and CrudCollection via session
+ """
+
+ __tablename__ = "account_master"
+ __exclude__fields__ = []
+
+ doc_date: Mapped[TIMESTAMP] = mapped_column(
+ TIMESTAMP(timezone=True), nullable=False, comment="Document Date"
+ )
+ plug_type: Mapped[str] = mapped_column(String, nullable=False, comment="Plug Type")
+ plug_number: Mapped[int] = mapped_column(
+ Integer, nullable=False, comment="Plug Number"
+ )
+
+ special_code: Mapped[str] = mapped_column(String(12), server_default="")
+ authorization_code: Mapped[str] = mapped_column(String(12), server_default="")
+
+ doc_code: Mapped[str] = mapped_column(String(12), server_default="")
+ doc_type: Mapped[int] = mapped_column(SmallInteger, server_default="0")
+
+ comment_line1: Mapped[str] = mapped_column(String, server_default="")
+ comment_line2: Mapped[str] = mapped_column(String, server_default="")
+ comment_line3: Mapped[str] = mapped_column(String, server_default="")
+ comment_line4: Mapped[str] = mapped_column(String, server_default="")
+ comment_line5: Mapped[str] = mapped_column(String, server_default="")
+ comment_line6: Mapped[str] = mapped_column(String, server_default="")
+ project_code: Mapped[str] = mapped_column(String(12), server_default="")
+ module_no: Mapped[str] = mapped_column(String, server_default="")
+ journal_no: Mapped[int] = mapped_column(Integer, server_default="0")
+
+ status_id: Mapped[int] = mapped_column(SmallInteger, server_default="0")
+ canceled: Mapped[bool] = mapped_column(Boolean, server_default="0")
+ print_count: Mapped[int] = mapped_column(SmallInteger, server_default="0")
+ total_active: Mapped[float] = mapped_column(Numeric(20, 6), server_default="0")
+ total_passive: Mapped[float] = mapped_column(Numeric(20, 6), server_default="0")
+ total_active_1: Mapped[float] = mapped_column(Numeric(20, 6), server_default="0")
+ total_passive_1: Mapped[float] = mapped_column(Numeric(20, 6), server_default="0")
+ total_active_2: Mapped[float] = mapped_column(Numeric(20, 6), server_default="0")
+ total_passive_2: Mapped[float] = mapped_column(Numeric(20, 6), server_default="0")
+ total_active_3: Mapped[float] = mapped_column(Numeric(20, 6), server_default="0")
+ total_passive_3: Mapped[float] = mapped_column(Numeric(20, 6), server_default="0")
+ total_active_4: Mapped[float] = mapped_column(Numeric(20, 6), server_default="0")
+ total_passive_4: Mapped[float] = mapped_column(Numeric(20, 6), server_default="0")
+ cross_ref: Mapped[int] = mapped_column(Integer, server_default="0")
+ data_center_id: Mapped[str] = mapped_column(String, server_default="")
+ data_center_rec_num: Mapped[int] = mapped_column(Integer, server_default="0")
+
+ account_header_id: Mapped[int] = mapped_column(
+ ForeignKey("account_books.id"), nullable=False
+ )
+ account_header_uu_id: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Account Header UU ID"
+ )
+ project_item_id: Mapped[int] = mapped_column(
+ ForeignKey("build_decision_book_projects.id")
+ )
+ project_item_uu_id: Mapped[str] = mapped_column(
+ String, comment="Project Item UU ID"
+ )
+ department_id: Mapped[int] = mapped_column(ForeignKey("departments.id"))
+ department_uu_id: Mapped[str] = mapped_column(String, comment="Department UU ID")
+
+ # account_header: Mapped["AccountBooks"] = relationship(
+ # "AccountBooks",
+ # back_populates="account_master",
+ # foreign_keys=[account_header_id],
+ # )
+ # project_item: Mapped["BuildDecisionBookProjects"] = relationship(
+ # "BuildDecisionBookProjects",
+ # back_populates="account_master",
+ # foreign_keys=[project_item_id],
+ # )
+ # account_detail: Mapped[List["AccountDetail"]] = relationship(
+ # "AccountDetail",
+ # back_populates="account_master",
+ # foreign_keys="AccountDetail.account_master_id",
+ # )
+
+ __table_args__ = (
+ Index("_account_master_ndx_00", doc_date, account_header_id),
+ {"comment": "Account Master Information"},
+ )
+
+
+class AccountDetail(CrudCollection):
+ """
+ AccountCodes class based on declarative_base and CrudCollection via session
+ """
+
+ __tablename__ = "account_detail"
+ __exclude__fields__ = []
+ __enum_list__ = [("plug_type", "AccountingReceiptTypes", "M")]
+
+ doc_date: Mapped[TIMESTAMP] = mapped_column(
+ TIMESTAMP(timezone=True), nullable=False, comment="Document Date"
+ )
+ line_no: Mapped[int] = mapped_column(
+ SmallInteger, nullable=False, comment="Line Number"
+ )
+ receive_debit: Mapped[str] = mapped_column(
+ String(1), nullable=False, comment="Receive Debit"
+ )
+ debit: Mapped[float] = mapped_column(
+ Numeric(20, 6), nullable=False, comment="Debit"
+ )
+
+ department: Mapped[str] = mapped_column(String(24), server_default="")
+ special_code: Mapped[str] = mapped_column(String(12), server_default="")
+ account_ref: Mapped[int] = mapped_column(Integer, server_default="0")
+ account_fiche_ref: Mapped[int] = mapped_column(Integer, server_default="0")
+ center_ref: Mapped[int] = mapped_column(Integer, server_default="0")
+ general_code: Mapped[str] = mapped_column(String(32), server_default="")
+ credit: Mapped[float] = mapped_column(Numeric(20, 6), server_default="0")
+ currency_type: Mapped[str] = mapped_column(String(4), server_default="TL")
+ exchange_rate: Mapped[float] = mapped_column(Numeric(20, 6), server_default="0")
+ debit_cur: Mapped[float] = mapped_column(Numeric(20, 6), server_default="0")
+ credit_cur: Mapped[float] = mapped_column(Numeric(20, 6), server_default="0")
+ discount_cur: Mapped[float] = mapped_column(Numeric(20, 6), server_default="0")
+ amount: Mapped[float] = mapped_column(Numeric(20, 6), server_default="0")
+ cross_account_code: Mapped[float] = mapped_column(String(32), server_default="")
+ inf_index: Mapped[float] = mapped_column(Numeric(20, 6), server_default="0")
+ not_inflated: Mapped[int] = mapped_column(SmallInteger, server_default="0")
+ not_calculated: Mapped[int] = mapped_column(SmallInteger, server_default="0")
+ comment_line1: Mapped[str] = mapped_column(String(64), server_default="")
+ comment_line2: Mapped[str] = mapped_column(String(64), server_default="")
+ comment_line3: Mapped[str] = mapped_column(String(64), server_default="")
+ comment_line4: Mapped[str] = mapped_column(String(64), server_default="")
+ comment_line5: Mapped[str] = mapped_column(String(64), server_default="")
+ comment_line6: Mapped[str] = mapped_column(String(64), server_default="")
+ owner_acc_ref: Mapped[int] = mapped_column(Integer, server_default="0")
+ from_where: Mapped[int] = mapped_column(Integer, server_default="0")
+ orj_eid: Mapped[int] = mapped_column(Integer, server_default="0")
+ canceled: Mapped[int] = mapped_column(SmallInteger, server_default="0")
+ cross_ref: Mapped[int] = mapped_column(Integer, server_default="0")
+ data_center_id: Mapped[str] = mapped_column(String, server_default="")
+ data_center_rec_num: Mapped[str] = mapped_column(Integer, server_default="0")
+ status_id: Mapped[int] = mapped_column(SmallInteger, server_default="0")
+
+ plug_type_id: Mapped[int] = mapped_column(
+ ForeignKey("api_enum_dropdown.id"), nullable=True
+ )
+ plug_type_uu_id = mapped_column(String, nullable=False, comment="Plug Type UU ID")
+ account_header_id: Mapped[int] = mapped_column(
+ ForeignKey("account_books.id"), nullable=False
+ )
+ account_header_uu_id: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Account Header UU ID"
+ )
+ account_code_id: Mapped[int] = mapped_column(
+ ForeignKey("account_codes.id"), nullable=False
+ )
+ account_code_uu_id: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Account Code UU ID"
+ )
+ account_master_id: Mapped[int] = mapped_column(
+ ForeignKey("account_master.id"), nullable=False
+ )
+ account_master_uu_id: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Account Master UU ID"
+ )
+ project_id: Mapped[int] = mapped_column(
+ ForeignKey("build_decision_book_projects.id")
+ )
+ project_uu_id: Mapped[str] = mapped_column(String, comment="Project UU ID")
+
+ # account_header: Mapped["AccountBooks"] = relationship(
+ # "AccountBooks",
+ # back_populates="account_detail",
+ # foreign_keys=[account_header_id],
+ # )
+ # account_code: Mapped["AccountCodes"] = relationship(
+ # "AccountCodes",
+ # back_populates="account_detail",
+ # foreign_keys=[account_code_id],
+ # )
+ # account_master: Mapped["AccountMaster"] = relationship(
+ # "AccountMaster",
+ # back_populates="account_detail",
+ # foreign_keys=[account_master_id],
+ # )
+ # project: Mapped["BuildDecisionBookProjects"] = relationship(
+ # "BuildDecisionBookProjects",
+ # back_populates="account_detail",
+ # foreign_keys=[project_id],
+ # )
+ # decision_book_payment_detail: Mapped["BuildDecisionBookPaymentsDetail"] = (
+ # relationship(
+ # "BuildDecisionBookPaymentsDetail",
+ # back_populates="accounting",
+ # foreign_keys="BuildDecisionBookPaymentsDetail.accounting_id",
+ # )
+ # )
+ # decision_book_project_payment_detail: Mapped[
+ # "BuildDecisionBookProjectPaymentsDetail"
+ # ] = relationship(
+ # "BuildDecisionBookProjectPaymentsDetail",
+ # back_populates="accounting",
+ # foreign_keys="BuildDecisionBookProjectPaymentsDetail.accounting_id",
+ # )
+ # decision_book_budget: Mapped["BuildDecisionBookBudget"] = relationship(
+ # "BuildDecisionBookBudget",
+ # back_populates="accounting",
+ # foreign_keys="BuildDecisionBookBudget.accounting_id",
+ # )
+
+ __table_args__ = (
+ Index(
+ "_account_detail_ndx_00",
+ account_master_id,
+ doc_date,
+ line_no,
+ account_header_id,
+ unique=True,
+ ),
+ {"comment": "Account Detail Information"},
+ )
+
+
+class AccountRecords(CrudCollection):
+ """
+ build_decision_book_id = kaydın sorumlu olduğu karar defteri
+ send_company_id = kaydı gönderen firma, send_person_id = gönderen kişi
+ customer_id = sorumlu kullanıcı bilgisi, company_id = sorumlu firma
+ """
+
+ __tablename__ = "account_records"
+ __exclude__fields__ = []
+ __enum_list__ = [
+ ("receive_debit", "DebitTypes", "D"),
+ ("budget_type", "BudgetType", "B"),
+ ]
+
+ iban: Mapped[str] = mapped_column(
+ String(64), nullable=False, comment="IBAN Number of Bank"
+ )
+ bank_date: Mapped[TIMESTAMP] = mapped_column(
+ TIMESTAMP(timezone=True), nullable=False, comment="Bank Transaction Date"
+ )
+
+ currency_value: Mapped[float] = mapped_column(
+ Numeric(20, 6), nullable=False, comment="Currency Value"
+ )
+ bank_balance: Mapped[float] = mapped_column(
+ Numeric(20, 6), nullable=False, comment="Bank Balance"
+ )
+ currency: Mapped[str] = mapped_column(
+ String(5), nullable=False, comment="Unit of Currency"
+ )
+ additional_balance: Mapped[float] = mapped_column(
+ Numeric(20, 6), nullable=False, comment="Additional Balance"
+ )
+ channel_branch: Mapped[str] = mapped_column(
+ String(120), nullable=False, comment="Branch Bank"
+ )
+ process_name: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Bank Process Type Name"
+ )
+ process_type: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Bank Process Type"
+ )
+ process_comment: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Transaction Record Comment"
+ )
+ process_garbage: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Transaction Record Garbage"
+ )
+ bank_reference_code: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Bank Reference Code"
+ )
+
+ add_comment_note: Mapped[str] = mapped_column(String, server_default="")
+ is_receipt_mail_send: Mapped[bool] = mapped_column(Boolean, server_default="0")
+ found_from = mapped_column(String, server_default="")
+ similarity: Mapped[float] = mapped_column(Numeric(20, 6), server_default="0")
+ remainder_balance: Mapped[float] = mapped_column(Numeric(20, 6), server_default="0")
+
+ bank_date_y: Mapped[int] = mapped_column(Integer)
+ bank_date_m: Mapped[int] = mapped_column(SmallInteger)
+ bank_date_w: Mapped[int] = mapped_column(SmallInteger)
+ bank_date_d: Mapped[int] = mapped_column(SmallInteger)
+
+ approving_accounting_record: Mapped[bool] = mapped_column(
+ Boolean, server_default="0"
+ )
+ accounting_receipt_date: Mapped[TIMESTAMP] = mapped_column(
+ TIMESTAMP(timezone=True), server_default="1900-01-01 00:00:00"
+ )
+ accounting_receipt_number: Mapped[int] = mapped_column(Integer, server_default="0")
+ status_id: Mapped[int] = mapped_column(SmallInteger, server_default="0")
+
+ approved_record: Mapped[bool] = mapped_column(Boolean, server_default="0")
+ import_file_name: Mapped[str] = mapped_column(
+ String, nullable=True, comment="XLS Key"
+ )
+
+ receive_debit: Mapped[int] = mapped_column(
+ ForeignKey("api_enum_dropdown.id"), nullable=True
+ )
+ receive_debit_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Debit UU ID"
+ )
+ budget_type: Mapped[int] = mapped_column(
+ ForeignKey("api_enum_dropdown.id"), nullable=True
+ )
+ budget_type_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Budget Type UU ID"
+ )
+
+ company_id: Mapped[int] = mapped_column(ForeignKey("companies.id"), nullable=True)
+ company_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Company UU ID"
+ )
+ send_company_id: Mapped[int] = mapped_column(
+ ForeignKey("companies.id"), nullable=True
+ )
+ send_company_uu_id = mapped_column(
+ String, nullable=True, comment="Send Company UU ID"
+ )
+
+ send_person_id: Mapped[int] = mapped_column(ForeignKey("people.id"), nullable=True)
+ send_person_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Send Person UU ID"
+ )
+ approving_accounting_person: Mapped[int] = mapped_column(
+ ForeignKey("people.id"), nullable=True
+ )
+ approving_accounting_person_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Approving Accounting Person UU ID"
+ )
+
+ living_space_id: Mapped[int] = mapped_column(
+ ForeignKey("build_living_space.id"), nullable=True
+ )
+ living_space_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Living Space UU ID"
+ )
+ customer_id: Mapped[int] = mapped_column(ForeignKey("people.id"), nullable=True)
+ customer_uu_id = mapped_column(String, nullable=True, comment="Customer UU ID")
+
+ build_id: Mapped[int] = mapped_column(ForeignKey("build.id"), nullable=True)
+ build_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Build UU ID"
+ )
+ build_parts_id: Mapped[int] = mapped_column(
+ ForeignKey("build_parts.id"), nullable=True
+ )
+ build_parts_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Build Parts UU ID"
+ )
+ build_decision_book_id: Mapped[int] = mapped_column(
+ ForeignKey("build_decision_book.id"), nullable=True
+ )
+ build_decision_book_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Build Decision Book UU ID"
+ )
+
+ # companies: Mapped[List["Company"]] = relationship(
+ # "Company", back_populates="budget_records", foreign_keys=[company_id]
+ # )
+ # send_companies: Mapped[List["Company"]] = relationship(
+ # "Company", back_populates="send_budget_records", foreign_keys=[send_company_id]
+ # )
+ #
+ # parts: Mapped[List["BuildParts"]] = relationship(
+ # "BuildParts", back_populates="budget_records", foreign_keys=[build_parts_id]
+ # )
+ # people: Mapped["People"] = relationship(
+ # "People", back_populates="budget_records", foreign_keys=[customer_id]
+ # )
+ # send_person: Mapped["People"] = relationship(
+ # "People", back_populates="budget_records", foreign_keys=[send_person_id]
+ # )
+ # decision_books: Mapped[List["BuildDecisionBook"]] = relationship(
+ # "BuildDecisionBook",
+ # back_populates="budget_records",
+ # foreign_keys=[build_decision_book_id],
+ # )
+ #
+ # decision_book_payment_detail: Mapped["BuildDecisionBookPaymentsDetail"] = (
+ # relationship(
+ # "BuildDecisionBookPaymentsDetail",
+ # back_populates="budget_records",
+ # foreign_keys="BuildDecisionBookPaymentsDetail.budget_records_id",
+ # )
+ # )
+ #
+ # decision_book_project_payments_detail: Mapped[
+ # List["BuildDecisionBookProjectPaymentsDetail"]
+ # ] = relationship(
+ # "BuildDecisionBookProjectPaymentsDetail",
+ # back_populates="budget_records",
+ # foreign_keys="BuildDecisionBookProjectPaymentsDetail.budget_records_id",
+ # )
+
+ __table_args__ = (
+ Index("_budget_records_ndx_00", is_receipt_mail_send, bank_date),
+ Index(
+ "_budget_records_ndx_01",
+ iban,
+ bank_date,
+ bank_reference_code,
+ bank_balance,
+ unique=True,
+ ),
+ Index("_budget_records_ndx_02", status_id, bank_date),
+ {
+ "comment": "Bank Records that are related to building and financial transactions"
+ },
+ )
+
+ # def payment_budget_record_close(self):
+ # from database_sql_models import (
+ # DecisionBookProjectPaymentsMaster,
+ # ApiEnumDropdown,
+ # BuildDecisionBook,
+ # BuildDecisionBookPaymentsMaster,
+ # )
+ #
+ # budget_record = self
+ # if self.receive_debit == ApiEnumDropdown.uuid_of_enum(
+ # enum_class="DebitTypes", key="R"
+ # ):
+ # print(
+ # "This record is not debit. Debit:",
+ # self.receive_debit,
+ # "DebitTypes.R.name",
+ # # str(DebitTypes.R.name),
+ # )
+ # return
+ # if abs(budget_record.currency_value + budget_record.remainder_balance) > 0:
+ # payment_dict = {
+ # "budget_records_id": self.id,
+ # "build_decision_book_id": budget_record.build_decision_book_id,
+ # "build_parts_id": budget_record.build_parts_id,
+ # "start_date": budget_record.bank_date,
+ # "paid_value": budget_record.currency_value
+ # - budget_record.remainder_balance,
+ # "is_all": False,
+ # }
+ # (paid_value, start_paid_value, balance) = (
+ # float(budget_record.currency_value - budget_record.remainder_balance),
+ # float(budget_record.currency_value - budget_record.remainder_balance),
+ # float(budget_record.remainder_balance),
+ # )
+ # print(
+ # "self.id",
+ # self.id,
+ # "paid_value",
+ # paid_value,
+ # "start_paid_value",
+ # start_paid_value,
+ # "balance",
+ # balance,
+ # self.receive_debit,
+ # )
+ #
+ # if not BuildDecisionBook.find_one(
+ # id=payment_dict["build_decision_book_id"]
+ # ):
+ # return paid_value
+ #
+ # if budget_record.replication_id == 55:
+ # if paid_value > 0:
+ # payment_dict["dues_type"] = ApiEnumDropdown.uuid_of_enum(
+ # enum_class="BuildDuesTypes", key="L"
+ # )
+ # paid_value = (
+ # DecisionBookProjectPaymentsMaster.pay_law_and_ren_of_build_part(
+ # **payment_dict
+ # )
+ # )
+ # print("dues_type", payment_dict["dues_type"], paid_value)
+ # if paid_value > 0:
+ # payment_dict.pop("dues_type", None)
+ # paid_value = BuildDecisionBookPaymentsMaster.pay_dues_of_build_part(
+ # **payment_dict
+ # )
+ # print("dues_type", None, paid_value)
+ # if paid_value > 0:
+ # payment_dict["dues_type"] = ApiEnumDropdown.uuid_of_enum(
+ # enum_class="BuildDuesTypes", key="R"
+ # )
+ # paid_value = (
+ # DecisionBookProjectPaymentsMaster.pay_law_and_ren_of_build_part(
+ # **payment_dict
+ # )
+ # )
+ # print("dues_type", payment_dict["dues_type"], paid_value)
+ # payment_dict["is_all"] = True
+ # if paid_value > 0:
+ # payment_dict["dues_type"] = ApiEnumDropdown.uuid_of_enum(
+ # enum_class="BuildDuesTypes", key="L"
+ # )
+ # paid_value = (
+ # DecisionBookProjectPaymentsMaster.pay_law_and_ren_of_build_part(
+ # **payment_dict
+ # )
+ # )
+ # print("is all dues_type", payment_dict["dues_type"], paid_value)
+ # if paid_value > 0:
+ # payment_dict.pop("dues_type", None)
+ # paid_value = BuildDecisionBookPaymentsMaster.pay_dues_of_build_part(
+ # **payment_dict
+ # )
+ # print("is all dues_type", None, paid_value)
+ # if paid_value > 0:
+ # payment_dict["dues_type"] = ApiEnumDropdown.uuid_of_enum(
+ # enum_class="BuildDuesTypes", key="R"
+ # )
+ # paid_value = (
+ # DecisionBookProjectPaymentsMaster.pay_law_and_ren_of_build_part(
+ # **payment_dict
+ # )
+ # )
+ # print("is all dues_type", payment_dict["dues_type"], paid_value)
+
+
+# class AccountRecordDecisionPaymentClosed(CrudCollection):
+#
+# __tablename__ = "account_record_decision_payment_closed"
+# __exclude__fields__ = []
+#
+# arc_currency: Mapped[str] = mapped_column(
+# String(5), nullable=False, comment="Unit of Currency"
+# )
+# arc_processing_time: Mapped[TIMESTAMP] = mapped_column(
+# TIMESTAMP(timezone=True), nullable=False, comment="Processing Time"
+# )
+# arc_currency_value: Mapped[float] = mapped_column(
+# Numeric(20, 6), nullable=False, comment="Currency Value"
+# )
+#
+# decision_book_budgets_id: Mapped[int] = mapped_column(
+# ForeignKey("decision_book_budgets.id"), nullable=True
+# )
+# decision_book_budgets_uu_id: Mapped[str] = mapped_column(
+# String, nullable=True, comment="Budget UUID"
+# )
+#
+# build_decision_book_payment_id: Mapped[int] = mapped_column(
+# ForeignKey("build_decision_book_payments.id")
+# )
+# build_decision_book_payment_uu_id: Mapped[str] = mapped_column(
+# String, nullable=True, comment="Build Decision Book Payment UU ID"
+# )
+# account_records_id: Mapped[int] = mapped_column(ForeignKey("account_records.id"))
+# account_records_uu_id: Mapped[str] = mapped_column(
+# String, nullable=True, comment="Account Record UU ID"
+# )
+#
+# __table_args__ = (
+# Index(
+# "_account_record_decision_payment_closed_ndx_00",
+# account_records_id,
+# build_decision_book_payment_id,
+# arc_processing_time,
+# ),
+# Index(
+# "_account_record_decision_payment_closed_ndx_01",
+# build_decision_book_payment_id,
+# account_records_id,
+# arc_processing_time,
+# ),
+# {"comment": "Account Record Decision Payment Closed Information"},
+# )
+#
+
+
+class AccountRecordExchanges(CrudCollection):
+ __tablename__ = "account_record_exchanges"
+ __exclude__fields__ = []
+
+ are_currency: Mapped[str] = mapped_column(
+ String(5), nullable=False, comment="Unit of Currency"
+ )
+ are_exchange_rate: Mapped[float] = mapped_column(
+ Numeric(18, 6), nullable=False, server_default="1"
+ )
+ usd_exchange_rate_value: Mapped[float] = mapped_column(
+ Numeric(18, 6),
+ nullable=True,
+ server_default="0",
+ comment="It will be written by multiplying the usd exchange rate with the current value result.",
+ )
+ eur_exchange_rate_value: Mapped[float] = mapped_column(
+ Numeric(18, 6),
+ nullable=True,
+ server_default="0",
+ comment="It will be written by multiplying the eur exchange rate with the current value result.",
+ )
+ gbp_exchange_rate_value: Mapped[float] = mapped_column(
+ Numeric(18, 6),
+ nullable=True,
+ server_default="0",
+ comment="It will be written by multiplying the gpd exchange rate with the current value result.",
+ )
+ cny_exchange_rate_value: Mapped[float] = mapped_column(
+ Numeric(18, 6),
+ nullable=True,
+ server_default="0",
+ comment="It will be written by multiplying the cny exchange rate with the current value result.",
+ )
+
+ account_records_id: Mapped[int] = mapped_column(ForeignKey("account_records.id"))
+ account_records_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Account Record UU ID"
+ )
+
+ __table_args__ = (
+ Index("_account_record_exchanges_ndx_00", account_records_id),
+ {"comment": "Account Record Exchanges Information"},
+ )
diff --git a/databases/sql_models/account/iban.py b/databases/sql_models/account/iban.py
new file mode 100644
index 0000000..e4b338f
--- /dev/null
+++ b/databases/sql_models/account/iban.py
@@ -0,0 +1,113 @@
+from sqlalchemy.orm import mapped_column, Mapped
+from sqlalchemy import String, ForeignKey, Index, TIMESTAMP, SmallInteger, Identity
+
+from databases.sql_models.core_mixin import CrudCollection
+
+
+class BuildIbans(CrudCollection):
+ """
+ BuildParts class based on declarative_base and BaseMixin via session
+ """
+
+ __tablename__ = "build_ibans"
+ __exclude__fields__ = []
+
+ iban: Mapped[str] = mapped_column(
+ String(40), server_default="", nullable=False, comment="IBAN number"
+ )
+ start_date: Mapped[TIMESTAMP] = mapped_column(
+ TIMESTAMP(timezone=True), nullable=False, comment="Bank Transaction Start Date"
+ )
+
+ stop_date: Mapped[TIMESTAMP] = mapped_column(
+ TIMESTAMP(timezone=True), server_default="2900-01-01 00:00:00"
+ )
+ bank_code: Mapped[str] = mapped_column(String(24), server_default="TR0000000000000")
+ xcomment: Mapped[str] = mapped_column(String(64), server_default="????")
+
+ build_id: Mapped[int] = mapped_column(
+ ForeignKey("build.id"), nullable=True, comment="Building ID"
+ )
+ build_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Building UUID", index=True
+ )
+ # building: Mapped["Build"] = relationship(
+ # "Build", back_populates="build_ibans", foreign_keys=[build_id]
+ # )
+
+ __table_args__ = (
+ Index("_build_ibans_ndx_01", iban, start_date, unique=True),
+ {"comment": "IBANs related to money transactions due to building objects"},
+ )
+
+ # @property
+ # def enums(self):
+ # return_dict = {}
+ # for key, enum in self.__enums_list__.items():
+ # for enum_item in EnumDropdown.filter_by(enum_class=enum):
+ # return_dict[key] = {
+ # enum_item.get_dict(include=["key", "value", "description"])
+ # }
+ # return return_dict
+
+
+class BuildIbanDescription(CrudCollection):
+ """
+ SearchComments class based on declarative_base and CrudCollection via session
+ """
+
+ __tablename__ = "build_iban_description"
+ __exclude__fields__ = []
+
+ iban: Mapped[str] = mapped_column(String, nullable=False, comment="IBAN Number")
+ group_id: Mapped[int] = mapped_column(
+ SmallInteger, nullable=False, comment="Group ID"
+ )
+ search_word: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Search Word", index=True
+ )
+
+ # decision_book_project_id: Mapped[int] = mapped_column(
+ # ForeignKey("build_decision_book_projects.id")
+ # )
+ # decision_book_project_uu_id: Mapped[str] = mapped_column(
+ # String, nullable=False, comment="Decision Book Project UUID"
+ # )
+ customer_id: Mapped[int] = mapped_column(ForeignKey("people.id"), nullable=True)
+ customer_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Customer UUID"
+ )
+ company_id: Mapped[int] = mapped_column(ForeignKey("companies.id"), nullable=True)
+ company_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Company UUID"
+ )
+ build_parts_id: Mapped[int] = mapped_column(
+ ForeignKey("build_parts.id"), nullable=True
+ )
+ build_parts_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Build Parts UUID"
+ )
+
+ # decision_book_project: Mapped["BuildDecisionBookProjects"] = relationship(
+ # "BuildDecisionBookProjects",
+ # back_populates="search_iban_description",
+ # foreign_keys=[decision_book_project_id],
+ # )
+ # customer: Mapped["People"] = relationship(
+ # "People", back_populates="search_iban_description", foreign_keys=[customer_id]
+ # )
+ # company: Mapped["Companies"] = relationship(
+ # "Company", back_populates="search_iban_description", foreign_keys=[company_id]
+ # )
+ # parts: Mapped["BuildParts"] = relationship(
+ # "BuildParts",
+ # back_populates="search_iban_description",
+ # foreign_keys=[build_parts_id],
+ # )
+
+ __table_args__ = (
+ Index(
+ "_search_iban_description_ndx_00", iban, search_word, group_id, unique=True
+ ),
+ {"comment": "Search Iban Description Information"},
+ )
diff --git a/databases/sql_models/api/encrypter.py b/databases/sql_models/api/encrypter.py
new file mode 100644
index 0000000..1924ba0
--- /dev/null
+++ b/databases/sql_models/api/encrypter.py
@@ -0,0 +1,124 @@
+import random
+
+from datetime import datetime, timedelta
+
+from sqlalchemy import String
+from sqlalchemy.orm import mapped_column, Mapped
+
+from cryptography.fernet import Fernet, MultiFernet
+from databases.sql_models.core_mixin import CrudCollection
+
+
+class CrypterEngine(CrudCollection):
+
+ __tablename__ = "crypter_engine"
+ __table_args__ = ()
+ encrypt_list = []
+ decrypt_list = []
+ keys_error = "Unable to retrieve encrypt keys"
+ alchemy_error = "Alchemy object is empty"
+
+ key_first: Mapped[str] = mapped_column(String, nullable=False)
+ key_second: Mapped[str] = mapped_column(String, nullable=False)
+
+ @classmethod
+ def get_valid_keys(cls, row=None):
+ cls.encrypt_list, cls.decrypt_list = [], []
+ if not cls.filter_all(cls.created_at > datetime.now() - timedelta(days=29)).get(
+ 1
+ ):
+ cls.create_encrypt_keys(count=100)
+ if decrypt_identifier := getattr(row, "cryp_uu_id", None):
+ if decrypt_row := cls.find_one(uu_id=str(decrypt_identifier)):
+ return (
+ decrypt_row.key_first.decode(),
+ decrypt_row.key_second.decode(),
+ decrypt_row.uu_id,
+ )
+ if encrypt_rows := cls.filter_all(
+ cls.created_at > datetime.now() - timedelta(days=29)
+ ).data:
+ encrypt_row = random.choice(encrypt_rows)
+ return (
+ encrypt_row.key_first.encode(),
+ encrypt_row.key_second.encode(),
+ encrypt_rows.uu_id,
+ )
+ return None, None, None
+
+ @classmethod
+ def create_encrypt_keys(cls, count: int):
+ for _ in range(count):
+ key_first = Fernet.generate_key()
+ key_second = Fernet.generate_key()
+ cls.find_or_create(
+ key_first=key_first.decode(), key_second=key_second.decode()
+ )
+
+ @classmethod
+ def raise_exception(cls, message=None):
+ raise Exception(message if message else cls.keys_error)
+
+ @classmethod
+ def encrypt_given_alchemy_list(cls, alchemy_object_list: list):
+ for alchemy_object in alchemy_object_list:
+ key_first, key_second, cryp_uu_id = cls.get_valid_keys()
+ fernet_keys = MultiFernet([Fernet(key_first), Fernet(key_second)])
+ if not key_first or not key_second:
+ cls.raise_exception()
+ alchemy_dict = alchemy_object.get_dict() if alchemy_object else None
+ if not alchemy_dict:
+ cls.raise_exception(cls.alchemy_error)
+ for key, plain_row in alchemy_dict.items():
+ if key in alchemy_object.__encrypt_list__:
+ alchemy_dict[key] = fernet_keys.encrypt(plain_row).decode()
+ alchemy_dict["cryp_uu_id"] = cryp_uu_id
+ cls.encrypt_list.append(alchemy_object.update(**alchemy_dict))
+ return cls.encrypt_list
+
+ @classmethod
+ def encrypt_given_alchemy_object(cls, alchemy_object_object):
+ key_first, key_second, cryp_uu_id = cls.get_valid_keys()
+ fernet_keys = MultiFernet([Fernet(key_first), Fernet(key_second)])
+ if not key_first or not key_second:
+ cls.raise_exception()
+ alchemy_dict = (
+ alchemy_object_object.get_dict() if alchemy_object_object else None
+ )
+ if not alchemy_dict:
+ cls.raise_exception(cls.alchemy_error)
+ for key, plain_row in alchemy_dict.items():
+ if key in alchemy_object_object.__encrypt_list__:
+ alchemy_dict[key] = fernet_keys.encrypt(plain_row).decode()
+ alchemy_dict["cryp_uu_id"] = cryp_uu_id
+ return alchemy_object_object.update(**alchemy_dict)
+
+ @classmethod
+ def decrypt_given_alchemy(cls, alchemy_object_list: list):
+ for alchemy_object in alchemy_object_list:
+ key_first, key_second, cryp_uu_id = cls.get_valid_keys(row=alchemy_object)
+ fernet_keys = MultiFernet([Fernet(key_first), Fernet(key_second)])
+ if not key_first or not key_second:
+ cls.raise_exception()
+ alchemy_dict = alchemy_object.get_dict() if alchemy_object else None
+ if not alchemy_dict:
+ cls.raise_exception(cls.alchemy_error)
+ for key, plain_row in alchemy_dict.items():
+ if key in alchemy_object.__encrypt_list__:
+ alchemy_dict[key] = fernet_keys.decrypt(plain_row).decode()
+ cls.decrypt_list.append(alchemy_dict)
+ return cls.decrypt_list
+
+ @classmethod
+ def decrypt_given_alchemy_object(cls, alchemy_object):
+ key_first, key_second, cryp_uu_id = cls.get_valid_keys(row=alchemy_object)
+ fernet_keys = MultiFernet([Fernet(key_first), Fernet(key_second)])
+ if not key_first or not key_second:
+ cls.raise_exception()
+ alchemy_dict = alchemy_object.get_dict() if alchemy_object else None
+ if not alchemy_dict:
+ cls.raise_exception(cls.alchemy_error)
+ for key, plain_row in alchemy_dict.items():
+ if key in alchemy_object.__encrypt_list__:
+ alchemy_dict[key] = fernet_keys.decrypt(plain_row).decode()
+ return alchemy_dict
diff --git a/databases/sql_models/building/budget.py b/databases/sql_models/building/budget.py
new file mode 100644
index 0000000..389be28
--- /dev/null
+++ b/databases/sql_models/building/budget.py
@@ -0,0 +1,157 @@
+from sqlalchemy import (
+ String,
+ ForeignKey,
+ Index,
+ SmallInteger,
+ Boolean,
+ TIMESTAMP,
+ Text,
+ Numeric,
+ Integer,
+)
+from sqlalchemy.orm import mapped_column, Mapped, relationship
+from databases.sql_models.core_mixin import CrudCollection
+
+
+class DecisionBookBudgetBooks(CrudCollection):
+
+ __tablename__ = "decision_book_budget_books"
+ __exclude__fields__ = []
+
+ country: Mapped[str] = mapped_column(String, nullable=False)
+ branch_type: Mapped[int] = mapped_column(SmallInteger, server_default="0")
+
+ company_id: Mapped[int] = mapped_column(ForeignKey("companies.id"), nullable=False)
+ company_uu_id: Mapped[str] = mapped_column(String, nullable=False)
+ branch_id: Mapped[int] = mapped_column(ForeignKey("companies.id"), nullable=True)
+ branch_uu_id: Mapped[str] = mapped_column(
+ String, comment="Branch UU ID", nullable=True
+ )
+ build_decision_book_id: Mapped[int] = mapped_column(
+ ForeignKey("build_decision_book.id"), nullable=False
+ )
+ build_decision_book_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Build Decision Book UU ID"
+ )
+
+ __table_args__ = (
+ Index(
+ "_decision_book_budget_companies_book_ndx_00",
+ company_id,
+ CrudCollection.created_at,
+ ),
+ {"comment": "budget Book Information"},
+ )
+
+
+class DecisionBookBudgetCodes(CrudCollection):
+
+ __tablename__ = "decision_book_budget_codes"
+ __exclude__fields__ = []
+
+ budget_code: Mapped[str] = mapped_column(
+ String(48), nullable=False, comment="budget Code"
+ )
+ comment_line: Mapped[str] = mapped_column(
+ Text, nullable=False, comment="Comment Line"
+ )
+
+ build_decision_book_id: Mapped[int] = mapped_column(
+ ForeignKey("build_decision_book.id"), nullable=True
+ )
+ build_decision_book_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Build Decision Book UU ID"
+ )
+
+ build_parts_id: Mapped[int] = mapped_column(
+ ForeignKey("build_parts.id"), nullable=True
+ )
+ build_parts_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Build Parts UU ID"
+ )
+
+ company_id: Mapped[int] = mapped_column(ForeignKey("companies.id"), nullable=True)
+ company_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Company UU ID"
+ )
+
+ __table_args__ = (
+ Index("_decision_book_budget_codes_ndx_00", budget_code, "created_at"),
+ Index("_decision_book_budget_codes_ndx_01", company_id, "created_at"),
+ {"comment": "budget Book Information"},
+ )
+
+
+class DecisionBookBudgetMaster(CrudCollection):
+
+ __tablename__ = "decision_book_budget_master"
+ __exclude__fields__ = []
+
+ budget_type: Mapped[str] = mapped_column(
+ String(50), nullable=False
+ ) # Bütçe tipi (örneğin: Operasyonel, Yatırım)
+ currency: Mapped[str] = mapped_column(
+ String(8), server_default="TRY"
+ ) # Bütçe para birimi
+ total_budget: Mapped[float] = mapped_column(
+ Numeric(10, 2), nullable=False
+ ) # Toplam bütçe
+
+ tracking_period_id: Mapped[int] = mapped_column(
+ ForeignKey("api_enum_dropdown.id"), nullable=True
+ )
+ tracking_period_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Part Direction UUID"
+ )
+ budget_books_id: Mapped[int] = mapped_column(
+ Integer, ForeignKey("decision_book_budget_books.id"), nullable=False
+ )
+ budget_books_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Budget Books UU ID"
+ )
+ department_id: Mapped[int] = mapped_column(
+ Integer, ForeignKey("departments.id"), nullable=False
+ ) # Departman ile ilişki
+ department_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Department UU ID"
+ )
+
+ __table_args__ = ({"comment": "budget Book Information"},)
+
+
+class DecisionBookBudgets(CrudCollection):
+
+ __tablename__ = "decision_book_budgets"
+ __exclude__fields__ = []
+
+ process_date: Mapped[TIMESTAMP] = mapped_column(
+ TIMESTAMP(timezone=True), nullable=False
+ ) # Başlangıç tarihi
+ budget_codes_id: Mapped[int] = mapped_column(
+ Integer, ForeignKey("decision_book_budget_codes.id"), nullable=False
+ )
+ total_budget: Mapped[float] = mapped_column(
+ Numeric(10, 2), nullable=False
+ ) # Toplam bütçe
+ used_budget: Mapped[float] = mapped_column(
+ Numeric(10, 2), nullable=False, default=0.0
+ ) # Kullanılan bütçe
+ remaining_budget: Mapped[float] = mapped_column(
+ Numeric(10, 2), nullable=False, default=0.0
+ ) # Kullanılan bütçe
+
+ decision_book_budget_master_id: Mapped[int] = mapped_column(
+ Integer, ForeignKey("decision_book_budget_master.id"), nullable=False
+ )
+ decision_book_budget_master_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Decision Book Budget Master UU ID"
+ )
+
+ __table_args__ = (
+ Index(
+ "_decision_book_budgets_ndx_00",
+ decision_book_budget_master_uu_id,
+ process_date,
+ ),
+ {"comment": "budget Book Information"},
+ )
diff --git a/databases/sql_models/building/build.py b/databases/sql_models/building/build.py
new file mode 100644
index 0000000..0fc53ba
--- /dev/null
+++ b/databases/sql_models/building/build.py
@@ -0,0 +1,861 @@
+import typing
+from operator import or_
+from datetime import datetime, timedelta
+from platform import system
+from typing import List
+
+from fastapi import HTTPException, status
+
+from sqlalchemy.orm import mapped_column, relationship, Mapped
+from sqlalchemy import (
+ String,
+ Integer,
+ ForeignKey,
+ Index,
+ SmallInteger,
+ Boolean,
+ TIMESTAMP,
+ Text,
+ Numeric,
+)
+
+from api_library.date_time_actions.date_functions import system_arrow
+from databases.sql_models.core_mixin import CrudCollection
+
+from databases.extensions.selector_classes import SelectActionWithEmployee
+from api_validations.validations_request import (
+ InsertBuildParts,
+ InsertBuild,
+ UpdateBuild,
+)
+from api_objects.auth.token_objects import EmployeeTokenObject, OccupantTokenObject
+
+
+class BuildTypes(CrudCollection):
+ """
+ BuildTypes class based on declarative_base and BaseMixin via session
+
+ """
+
+ __tablename__ = "build_types"
+ __exclude__fields__ = []
+ __include__fields__ = []
+
+ function_code: Mapped[str] = mapped_column(
+ String(12), server_default="", nullable=False, comment="Function Code"
+ )
+ type_code: Mapped[str] = mapped_column(
+ String(12), server_default="", nullable=False, comment="Structure Type Code"
+ )
+ lang: Mapped[str] = mapped_column(
+ String(4), server_default="TR", nullable=False, comment="Language"
+ )
+ type_name: Mapped[str] = mapped_column(
+ String(48), server_default="", nullable=False, comment="Type Name"
+ )
+
+ __table_args__ = (
+ Index("_build_types_ndx_00", type_code, function_code, lang, unique=True),
+ {"comment": "Function group of building types with their language information"},
+ )
+
+
+class Part2Employee(CrudCollection):
+ """
+ Employee2Parts class based on declarative_base and BaseMixin via session
+ In between start and end date, a part can be assigned to only one employee
+ """
+
+ __tablename__ = "part2employee"
+ __exclude__fields__ = []
+ __include__fields__ = []
+
+ build_id: Mapped[int] = mapped_column(Integer, comment="Building ID")
+ part_id: Mapped[int] = mapped_column(
+ ForeignKey("build_parts.id"), nullable=False, comment="Part ID"
+ )
+ employee_id: Mapped[int] = mapped_column(
+ ForeignKey("employees.id"), nullable=False, comment="Employee ID"
+ )
+
+ __table_args__ = (
+ Index("_part2employee_ndx_00", employee_id, part_id, unique=True),
+ {"comment": "Employee2Parts Information"},
+ )
+
+
+class RelationshipEmployee2Build(CrudCollection):
+ """
+ CompanyRelationship class based on declarative_base and CrudCollection via session
+ Company -> Sub Company -> Sub-Sub Company
+
+ """
+
+ __tablename__ = "relationship_employee2build"
+ __exclude__fields__ = []
+
+ company_id: Mapped[int] = mapped_column(
+ ForeignKey("companies.id"), nullable=False
+ ) # 1, 2, 3
+ employee_id: Mapped[int] = mapped_column(
+ ForeignKey("employees.id"), nullable=False
+ ) # employee -> (n)person Evyos LTD
+ member_id: Mapped[int] = mapped_column(
+ ForeignKey("build.id"), nullable=False
+ ) # 2, 3, 4
+
+ relationship_type: Mapped[str] = mapped_column(
+ String, nullable=True, server_default="Employee"
+ ) # Commercial
+ show_only: Mapped[bool] = mapped_column(Boolean, server_default="False")
+
+ __table_args__ = (
+ Index(
+ "relationship_build_employee_ndx_00",
+ company_id,
+ employee_id,
+ member_id,
+ relationship_type,
+ unique=True,
+ ),
+ {"comment": "Build & Employee Relationship Information"},
+ )
+
+
+class Build(CrudCollection, SelectActionWithEmployee):
+ """
+ Builds class based on declarative_base and BaseMixin via session
+ """
+
+ __tablename__ = "build"
+ __exclude__fields__ = []
+ __include__fields__ = []
+ __access_by__ = []
+ __many__table__ = RelationshipEmployee2Build
+ # __explain__ = AbstractBuild()
+
+ gov_address_code: Mapped[str] = mapped_column(
+ String, server_default="", unique=True
+ )
+ build_name: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Building Name"
+ )
+ build_no: Mapped[str] = mapped_column(
+ String(8), nullable=False, comment="Building Number"
+ )
+
+ max_floor: Mapped[int] = mapped_column(
+ SmallInteger, server_default="1", nullable=False, comment="Max Floor"
+ )
+ underground_floor: Mapped[int] = mapped_column(
+ SmallInteger, server_default="0", nullable=False, comment="Underground Floor"
+ )
+ build_date: Mapped[TIMESTAMP] = mapped_column(
+ TIMESTAMP(timezone=True), server_default="1900-01-01"
+ )
+ decision_period_date: Mapped[TIMESTAMP] = mapped_column(
+ TIMESTAMP(timezone=True),
+ server_default="1900-01-01",
+ comment="Building annual ordinary meeting period",
+ )
+ tax_no: Mapped[str] = mapped_column(String(24), server_default="")
+ lift_count: Mapped[int] = mapped_column(SmallInteger, server_default="0")
+ heating_system: Mapped[bool] = mapped_column(Boolean, server_default="True")
+ cooling_system: Mapped[bool] = mapped_column(Boolean, server_default="False")
+ hot_water_system: Mapped[bool] = mapped_column(Boolean, server_default="False")
+ block_service_man_count: Mapped[int] = mapped_column(
+ SmallInteger, server_default="0"
+ )
+ security_service_man_count: Mapped[int] = mapped_column(
+ SmallInteger, server_default="0"
+ )
+ garage_count: Mapped[int] = mapped_column(
+ SmallInteger, server_default="0", comment="Garage Count"
+ )
+ management_room_id: Mapped[int] = mapped_column(
+ Integer, nullable=True, comment="Management Room ID"
+ )
+
+ site_id: Mapped[int] = mapped_column(ForeignKey("build_sites.id"), nullable=True)
+ site_uu_id: Mapped[str] = mapped_column(String, comment="Site UUID", nullable=True)
+ address_id: Mapped[int] = mapped_column(ForeignKey("addresses.id"), nullable=False)
+ address_uu_id: Mapped[str] = mapped_column(
+ String, comment="Address UUID", nullable=False
+ )
+ build_types_id: Mapped[int] = mapped_column(
+ ForeignKey("build_types.id"), nullable=False, comment="Building Type"
+ )
+ build_types_uu_id: Mapped[str] = mapped_column(String, comment="Building Type UUID")
+
+ parts: Mapped[List["BuildParts"]] = relationship(
+ "BuildParts", back_populates="buildings", foreign_keys="BuildParts.build_id"
+ )
+ decision_books: Mapped[List["BuildDecisionBook"]] = relationship(
+ "BuildDecisionBook",
+ back_populates="buildings",
+ foreign_keys="BuildDecisionBook.build_id",
+ )
+
+ # build_ibans: Mapped["BuildIbans"] = relationship(
+ # "BuildIbans", back_populates="building", foreign_keys="BuildIbans.build_id"
+ # )
+ # areas: Mapped["BuildArea"] = relationship(
+ # "BuildArea", back_populates="buildings", foreign_keys="BuildArea.build_id"
+ # )
+ # response_companies: Mapped["Companies"] = relationship(
+ # "Companies",
+ # back_populates="response_buildings",
+ # foreign_keys=[response_company_id],
+ # )
+ # addresses: Mapped[List["Address"]] = relationship(
+ # "Address", back_populates="buildings", foreign_keys=[address_id]
+ # )
+ # peoples: Mapped["People"] = relationship(
+ # "People", back_populates="buildings", foreign_keys=[people_id]
+ # )
+ # sites: Mapped["BuildSites"] = relationship(
+ # "BuildSites", back_populates="buildings", foreign_keys=[site_id]
+ # )
+
+ __table_args__ = (
+ Index("_builds_ndx_00", gov_address_code),
+ Index("_builds_ndx_01", build_name, build_no),
+ {
+ "comment": "Build objects are building that are created for living and store purposes"
+ },
+ )
+
+ @property
+ def management_room(self):
+ if management_room := BuildParts.filter_by_one(
+ system=True, id=self.management_room_id, build_id=self.id
+ ).data:
+ return management_room
+ return None
+
+ @classmethod
+ def create_action(cls, data: InsertBuild, token):
+ from databases import Addresses
+
+ data_dict = data.excluded_dump()
+ data_dict["address_id"] = None
+ if data.address_uu_id:
+ official_address = Addresses.filter_one(
+ Addresses.uu_id == data.address_uu_id,
+ ).data
+ data_dict["address_id"] = official_address.id
+ data_dict["build_no"] = str(official_address.build_number)
+ if not data_dict["address_id"]:
+ raise HTTPException(
+ status_code=status.HTTP_404_NOT_FOUND,
+ detail="Address is not found in database. Re-enter address record then try again.",
+ )
+ build_type = BuildTypes.filter_by_one(
+ system=True, uu_id=str(data.build_types_uu_id)
+ ).data
+ data_dict["build_types_id"] = build_type.id
+ build_created = cls.find_or_create(**data_dict)
+ created_build_relation = cls.__many__table__.find_or_create(
+ company_id=token.selected_company.company_id,
+ employee_id=token.selected_company.employee_id,
+ member_id=build_created.id,
+ )
+ build_created.save()
+ build_created.update(is_confirmed=True)
+ build_created.save()
+ created_build_relation.update(is_confirmed=True)
+ created_build_relation.save()
+ return build_created
+
+ @classmethod
+ def update_action(cls, data: UpdateBuild, build_uu_id: str, token):
+ from databases import Addresses
+
+ print("data_dict", data.dump())
+ data_dict = data.excluded_dump()
+ if data.address_uu_id:
+ official_address = Addresses.filter_one(
+ Addresses.uu_id == data.address_uu_id
+ ).data
+ data_dict["address_id"] = official_address.id if official_address else None
+ print("data_dict", data_dict)
+ if build_to_update := cls.filter_one(cls.uu_id == build_uu_id).data:
+ print("build_to_update", build_to_update.get_dict())
+ updated_build = build_to_update.update(**data_dict)
+ updated_build.save()
+ print("updated_build", updated_build.get_dict())
+ return updated_build
+
+ @property
+ def top_flat(self):
+ max_flat_no = 0
+ for part in self.parts:
+ if part.part_no > self.max_flat_no:
+ max_flat_no = part.part_no
+ return max_flat_no
+
+ @property
+ def bottom_flat(self):
+ min_flat_no = 0
+ for part in self.parts:
+ if part.part_no < self.max_flat_no:
+ min_flat_no = part.part_no
+ return min_flat_no
+
+ @property
+ def human_livable_parts(self) -> tuple:
+ parts = list(part for part in self.parts if part.human_livable)
+ return parts, len(parts)
+
+ @property
+ def livable_part_count(self):
+ livable_parts = BuildParts.filter_all(
+ BuildParts.build_id == self.id,
+ BuildParts.human_livable == True,
+ )
+ if not livable_parts.data:
+ raise HTTPException(
+ status_code=status.HTTP_404_NOT_FOUND,
+ detail="There is no livable part in this building.",
+ )
+ return livable_parts.count
+
+ @property
+ def part_type_count(self):
+ building_types = None
+ for part in self.parts:
+ building_types = {}
+ build_type = BuildTypes.filter_by_one(
+ system=True, id=part.build_part_type_id
+ ).data
+ if build_type.type_code in building_types:
+ building_types[build_type.type_code]["list"].append(part.part_no)
+ else:
+ building_types[build_type.type_code] = {"list": [part.part_no]}
+
+ # for key, val in building_types.items():
+ # list_parts = val["list"]
+ # building_types[key] = {
+ # "list": list_parts,
+ # "min": min(list_parts),
+ # "max": max(list_parts),
+ # "count": len(list_parts),
+ # }
+ return building_types
+
+
+class BuildParts(CrudCollection):
+ """
+ BuildParts class based on declarative_base and BaseMixin via session
+ Attentions: Part_no is unique for each building and Every building must have a management section.!!! default no 0
+ """
+
+ __tablename__ = "build_parts"
+ __exclude__fields__ = []
+ __include__fields__ = []
+ __enum_list__ = [("part_direction", "Directions", "NN")]
+
+ # https://adres.nvi.gov.tr/VatandasIslemleri/AdresSorgu
+ address_gov_code: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Goverment Door Code"
+ )
+ # part_name: Mapped[str] = mapped_column(String(24), server_default="", nullable=False, comment="Part Name")
+ part_no: Mapped[int] = mapped_column(
+ SmallInteger, server_default="0", nullable=False, comment="Part Number"
+ )
+ part_level: Mapped[int] = mapped_column(
+ SmallInteger, server_default="0", comment="Building Part Level"
+ )
+ part_code: Mapped[str] = mapped_column(
+ String, server_default="", nullable=False, comment="Part Code"
+ )
+ part_gross_size: Mapped[int] = mapped_column(
+ Integer, server_default="0", comment="Part Gross Size"
+ )
+ part_net_size: Mapped[int] = mapped_column(
+ Integer, server_default="0", comment="Part Net Size"
+ )
+ default_accessory: Mapped[str] = mapped_column(
+ Text, server_default="0", comment="Default Accessory"
+ )
+ human_livable: Mapped[bool] = mapped_column(
+ Boolean, server_default="1", comment="Human Livable"
+ )
+ due_part_key: Mapped[str] = mapped_column(
+ String, server_default="", nullable=False, comment="Constant Payment Group"
+ )
+
+ build_id: Mapped[int] = mapped_column(
+ ForeignKey("build.id"), nullable=False, comment="Building ID"
+ )
+ build_uu_id: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Building UUID"
+ )
+ part_direction_id: Mapped[int] = mapped_column(
+ ForeignKey("api_enum_dropdown.id"), nullable=True
+ )
+ part_direction_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Part Direction UUID"
+ )
+ part_type_id: Mapped[int] = mapped_column(
+ ForeignKey("build_types.id"), nullable=False, comment="Building Part Type"
+ )
+ part_type_uu_id: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Building Part Type UUID"
+ )
+
+ buildings: Mapped["Build"] = relationship(
+ "Build", back_populates="parts", foreign_keys=[build_id]
+ )
+
+ __table_args__ = (
+ Index("build_parts_ndx_01", build_id, part_no, unique=True),
+ {"comment": "Part objects that are belong to building objects"},
+ )
+
+ @classmethod
+ def create_action(cls, data: InsertBuildParts, token):
+ from databases import ApiEnumDropdown
+
+ data_dict = data.dump()
+ build_from_duty = Build.select_action(
+ employee_id=token.selected_company.employee_id,
+ filter_expr=[Build.uu_id == data.build_uu_id],
+ )
+ building = build_from_duty.first()
+ if not building:
+ raise HTTPException(
+ status_code=status.HTTP_406_NOT_ACCEPTABLE,
+ detail="This Employee can not reach this building or building uu-id not found in database. "
+ "Check with your supervisor.",
+ )
+
+ if build_types := BuildTypes.filter_one(
+ BuildTypes.uu_id == data.build_part_type_uu_id,
+ ).data:
+ part_direction = ApiEnumDropdown.get_by_uuid(
+ uuid=str(data.part_direction_uu_id)
+ )
+
+ data_dict["part_gross_size"] = data.part_gross_size
+ data_dict["part_net_size"] = data.part_net_size
+ data_dict["part_type_id"] = build_types.id
+ data_dict["part_level"] = data.part_level
+ data_dict["build_id"] = building.id
+ data_dict["part_no"] = data.part_no
+ data_dict["part_code"] = (
+ f"{build_types.type_code}:{str(data_dict['part_no']).zfill(2)}"
+ )
+ data_dict["address_gov_code"] = data.address_gov_code
+ data_dict["default_accessory"] = data.default_accessory
+ data_dict["human_livable"] = bool(data.human_livable)
+
+ data_dict["build_uu_id"] = str(data.build_uu_id)
+ data_dict["part_type_id"] = build_types.id
+ data_dict["part_type_uu_id"] = str(build_types.uu_id)
+ data_dict["part_direction_id"] = part_direction.id
+ data_dict["part_direction_uu_id"] = str(part_direction.uu_id)
+ # data_dict["part_direction"] = str(data.part_direction_uu_id)
+
+ if not data_dict["part_gross_size"]:
+ raise HTTPException(
+ status_code=status.HTTP_406_NOT_ACCEPTABLE,
+ detail="Part Gross Size can not be empty.",
+ )
+
+ if not data_dict["part_net_size"]:
+ raise HTTPException(
+ status_code=status.HTTP_406_NOT_ACCEPTABLE,
+ detail="Part Net Size can not be empty.",
+ )
+ pt = int(data_dict["part_net_size"])
+ data_dict["due_part_key"] = str(pt + (5 - (pt % 5))) + "M2"
+ del data_dict["build_part_type_uu_id"]
+ return cls.find_or_create(**data_dict)
+
+ raise HTTPException(
+ status_code=status.HTTP_418_IM_A_TEAPOT,
+ detail="Build Part can not be created.",
+ )
+
+ @property
+ def part_name(self):
+ if build_type := BuildTypes.filter_by_one(
+ system=True, id=self.part_type_id
+ ).data:
+ return f"{str(build_type.type_name).upper()} : {str(self.part_no).upper()}"
+ return f"Undefined:{str(build_type.type_name).upper()}"
+
+
+class BuildLivingSpace(CrudCollection):
+ """
+ LivingSpace class based on declarative_base and BaseMixin via session
+ Owner or live person = Occupant of the build part
+ + Query OR(owner_person_id == person_id, life_person_id == person_id) AND (now(date))
+ """
+
+ __tablename__ = "build_living_space"
+ __exclude__fields__ = []
+ __include__fields__ = []
+
+ fix_value: Mapped[float] = mapped_column(
+ Numeric(20, 6),
+ server_default="0",
+ comment="Fixed value is deducted from debit.",
+ )
+ fix_percent: Mapped[float] = mapped_column(
+ Numeric(6, 2),
+ server_default="0",
+ comment="Fixed percent is deducted from debit.",
+ )
+
+ agreement_no: Mapped[str] = mapped_column(
+ String, server_default="", comment="Agreement No"
+ )
+ marketing_process: Mapped[bool] = mapped_column(Boolean, server_default="False")
+ marketing_layer: Mapped[int] = mapped_column(SmallInteger, server_default="0")
+
+ build_parts_id: Mapped[int] = mapped_column(
+ ForeignKey("build_parts.id"),
+ nullable=False,
+ index=True,
+ comment="Build Part ID",
+ )
+ build_parts_uu_id: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Build Part UUID"
+ )
+ person_id: Mapped[int] = mapped_column(
+ ForeignKey("people.id"),
+ nullable=False,
+ index=True,
+ comment="Responsible People ID",
+ )
+ person_uu_id: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Responsible People UUID"
+ )
+ occupant_type: Mapped[int] = mapped_column(
+ ForeignKey("occupant_types.id"),
+ nullable=False,
+ comment="Occupant Type",
+ )
+ occupant_type_uu_id: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Occupant Type UUID"
+ )
+
+ __table_args__ = (
+ {"comment": "Living Space inside building parts that are related to people"},
+ )
+
+ @classmethod
+ def create_action(
+ cls,
+ data: dict,
+ token_dict: typing.Union[EmployeeTokenObject, OccupantTokenObject],
+ ):
+ from databases import Services, OccupantTypes
+ from api_events.events.events.events_bind_modules import (
+ ModulesBindOccupantEventMethods,
+ )
+
+ if data.get("expiry_starts"):
+ data["expiry_starts"] = str(system_arrow.get(data["expiry_starts"]))
+ if data.get("expiry_ends"):
+ data["expiry_ends"] = str(system_arrow.get(data["expiry_ends"]))
+ created_living_space = BuildLivingSpace.find_or_create(**data)
+ occupant_type = OccupantTypes.filter_by_one(
+ system=True, uu_id=created_living_space.occupant_type_uu_id
+ ).data
+ related_service = Services.filter_by_one(
+ related_responsibility=occupant_type.occupant_code,
+ ).data
+ if not related_service:
+ raise HTTPException(
+ status_code=status.HTTP_418_IM_A_TEAPOT,
+ detail="Service is not found in database. Re-enter service record then try again.",
+ )
+ ModulesBindOccupantEventMethods.bind_default_module_for_first_init_occupant(
+ build_living_space_id=created_living_space.id,
+ )
+ created_living_space.save_and_confirm()
+ return created_living_space
+
+ @classmethod
+ def find_living_from_customer_id(
+ cls, customer_id, process_date, add_days: int = 32
+ ):
+ from api_library.date_time_actions.date_functions import system_arrow
+
+ formatted_date = system_arrow.get(str(process_date))
+ living_spaces = cls.filter_all(
+ or_(
+ cls.owner_person_id == customer_id,
+ cls.life_person_id == customer_id,
+ ),
+ cls.start_date < formatted_date - timedelta(days=add_days),
+ cls.stop_date > formatted_date + timedelta(days=add_days),
+ )
+ return living_spaces.data, living_spaces.count
+
+
+class BuildManagement(CrudCollection):
+
+ __tablename__ = "build_management"
+ __exclude__fields__ = []
+
+ discounted_percentage: Mapped[float] = mapped_column(
+ Numeric(6, 2), server_default="0.00"
+ ) # %22
+ discounted_price: Mapped[float] = mapped_column(
+ Numeric(20, 2), server_default="0.00"
+ ) # Normal: 78.00 TL
+ calculated_price: Mapped[float] = mapped_column(
+ Numeric(20, 2), server_default="0.00"
+ ) # sana düz 75.00 TL yapar
+
+ occupant_type: Mapped[int] = mapped_column(
+ ForeignKey("occupant_types.id"),
+ nullable=False,
+ comment="Occupant Type",
+ )
+ occupant_type_uu_id: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Occupant Type UUID"
+ )
+ build_id: Mapped[int] = mapped_column(
+ ForeignKey("build.id"), nullable=False, comment="Building ID"
+ )
+ build_uu_id: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Building UUID"
+ )
+ build_parts_id: Mapped[int] = mapped_column(
+ ForeignKey("build_parts.id"),
+ nullable=False,
+ index=True,
+ comment="Build Part ID",
+ )
+ build_parts_uu_id: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Build Part UUID"
+ )
+
+ __table_args__ = (
+ Index(
+ "build_management_ndx_00",
+ build_parts_id,
+ occupant_type,
+ "expiry_starts",
+ unique=True,
+ ),
+ {"comment": "Management of the building parts that are related to people"},
+ )
+
+
+class BuildArea(CrudCollection):
+ """
+ Builds class based on declarative_base and BaseMixin via session
+ """
+
+ __tablename__ = "build_area"
+ __exclude__fields__ = []
+
+ area_name: Mapped[str] = mapped_column(String, server_default="")
+ area_code: Mapped[str] = mapped_column(String, server_default="")
+ area_type: Mapped[str] = mapped_column(String, server_default="GREEN")
+ area_direction: Mapped[str] = mapped_column(String(2), server_default="NN")
+ area_gross_size: Mapped[float] = mapped_column(Numeric(20, 6), server_default="0")
+ area_net_size: Mapped[float] = mapped_column(Numeric(20, 6), server_default="0")
+ width = mapped_column(Integer, server_default="0")
+ size = mapped_column(Integer, server_default="0")
+
+ build_id: Mapped[int] = mapped_column(ForeignKey("build.id"))
+ build_uu_id: Mapped[str] = mapped_column(String, comment="Building UUID")
+ part_type_id: Mapped[int] = mapped_column(
+ ForeignKey("build_types.id"), nullable=True, comment="Building Part Type"
+ )
+ part_type_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Building Part Type UUID"
+ )
+
+ # buildings: Mapped["Build"] = relationship(
+ # "Build", back_populates="areas", foreign_keys=[build_id]
+ # )
+
+ _table_args_ = (
+ Index("_edm_build_parts_area_ndx_00", build_id, area_code, unique=True),
+ )
+
+
+class BuildSites(CrudCollection):
+ """
+ Builds class based on declarative_base and BaseMixin via session
+ """
+
+ __tablename__ = "build_sites"
+ __exclude__fields__ = []
+ __include__fields__ = []
+
+ site_name: Mapped[str] = mapped_column(String(24), nullable=False)
+ site_no: Mapped[str] = mapped_column(String(8), nullable=False)
+
+ address_id: Mapped[int] = mapped_column(ForeignKey("addresses.id"))
+ address_uu_id: Mapped[str] = mapped_column(String, comment="Address UUID")
+
+ # addresses: Mapped["Address"] = relationship(
+ # "Address", back_populates="site", foreign_keys=[address_id]
+ # )
+ # buildings: Mapped["Build"] = relationship(
+ # "Build", back_populates="sites", foreign_keys="Build.site_id"
+ # )
+
+ __table_args__ = (
+ Index("_sites_ndx_01", site_no, site_name),
+ {"comment": "Sites that groups building objets"},
+ )
+
+
+class BuildCompaniesProviding(CrudCollection):
+ """ """
+
+ __tablename__ = "build_companies_providing"
+ __exclude__fields__ = []
+ __include__fields__ = []
+
+ build_id = mapped_column(
+ ForeignKey("build.id"), nullable=False, comment="Building ID"
+ )
+ build_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Providing UUID"
+ )
+ company_id: Mapped[int] = mapped_column(ForeignKey("companies.id"))
+ company_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Providing UUID"
+ )
+ provide_id: Mapped[int] = mapped_column(
+ ForeignKey("api_enum_dropdown.id"), nullable=True
+ )
+ provide_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Providing UUID"
+ )
+ contract_id: Mapped[int] = mapped_column(
+ Integer, ForeignKey("companies.id"), nullable=True
+ )
+
+ __table_args__ = (
+ Index(
+ "_build_companies_providing_ndx_00",
+ build_id,
+ company_id,
+ provide_id,
+ unique=True,
+ ),
+ {"comment": "Companies providing services for building"},
+ )
+
+
+class BuildPersonProviding(CrudCollection):
+ """ """
+
+ __tablename__ = "build_person_providing"
+ __exclude__fields__ = []
+ __include__fields__ = []
+
+ build_id = mapped_column(
+ ForeignKey("build.id"), nullable=False, comment="Building ID"
+ )
+ build_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Providing UUID"
+ )
+ people_id: Mapped[int] = mapped_column(ForeignKey("people.id"))
+ people_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="People UUID"
+ )
+ provide_id: Mapped[int] = mapped_column(
+ ForeignKey("api_enum_dropdown.id"), nullable=True
+ )
+ provide_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Providing UUID"
+ )
+ contract_id: Mapped[int] = mapped_column(
+ Integer, ForeignKey("companies.id"), nullable=True
+ )
+
+ __table_args__ = (
+ Index(
+ "_build_person_providing_ndx_00",
+ build_id,
+ people_id,
+ provide_id,
+ unique=True,
+ ),
+ {"comment": "People providing services for building"},
+ )
+
+
+# owner_people: Mapped["People"] = relationship(
+# "People",
+# back_populates="owner_buildings",
+# foreign_keys=[current_owner_person_id],
+# )
+# tenant_people: Mapped["People"] = relationship(
+# "People",
+# back_populates="tenant_buildings",
+# foreign_keys=[current_tenant_person_id],
+# )
+# decision_book_management: Mapped[List["BuildDecisionBookManagement"]] = (
+# relationship(
+# "BuildDecisionBookManagement",
+# back_populates="buildings",
+# foreign_keys="BuildDecisionBookManagement.build_parts_id",
+# )
+# )
+# budget_records: Mapped[List["CompanyBudgetRecords"]] = relationship(
+# "CompanyBudgetRecords",
+# back_populates="parts",
+# foreign_keys="CompanyBudgetRecords.build_parts_id",
+# )
+# living_spaces: Mapped[List["BuildLivingSpace"]] = relationship(
+# "BuildLivingSpace",
+# back_populates="parts",
+# foreign_keys="BuildLivingSpace.build_parts_id",
+# )
+# decision_book_payment_master: Mapped[List["BuildDecisionBookPaymentsMaster"]] = (
+# relationship(
+# "BuildDecisionBookPaymentsMaster",
+# back_populates="parts",
+# foreign_keys="BuildDecisionBookPaymentsMaster.build_parts_id",
+# )
+# )
+# decision_book_project_payments_master: Mapped[
+# "BuildDecisionBookProjectPaymentsMaster"
+# ] = relationship(
+# "BuildDecisionBookProjectPaymentsMaster",
+# back_populates="parts",
+# foreign_keys="BuildDecisionBookProjectPaymentsMaster.build_parts_id",
+# )
+# search_iban_description: Mapped["BuildIbanDescription"] = relationship(
+# "BuildIbanDescription",
+# back_populates="parts",
+# foreign_keys="BuildIbanDescription.build_parts_id",
+# )
+
+# parts: Mapped[List["BuildParts"]] = relationship(
+# "BuildParts", back_populates="living_spaces", foreign_keys=[build_parts_id]
+# )
+# owner_people: Mapped["People"] = relationship(
+# "People", back_populates="owner_living_spaces", foreign_keys=[owner_person_id]
+# )
+# life_people: Mapped["People"] = relationship(
+# "People", back_populates="life_living_spaces", foreign_keys=[life_person_id]
+# )
+# company_id: Mapped[int] = mapped_column(ForeignKey("companies.id"))
+# response_company_id: Mapped[int] = mapped_column(ForeignKey("companies.id"))
+# person_id: Mapped[int] = mapped_column(ForeignKey("people.id"))
+
+# companies: Mapped["Companies"] = relationship(
+# "Companies", back_populates="buildings", foreign_keys=[company_id]
+# )
+# @classmethod
+# def select_action(cls, duty_id, token=None):
+# from database_sql_models import Companies
+#
+# related_companies = Companies.select_action(duty_id=duty_id)
+# companies_ids = [company.id for company in related_companies.all()]
+# return cls.filter_all(cls.company_id.in_(companies_ids)).query
diff --git a/databases/sql_models/building/decision_book.py b/databases/sql_models/building/decision_book.py
new file mode 100644
index 0000000..58c3bad
--- /dev/null
+++ b/databases/sql_models/building/decision_book.py
@@ -0,0 +1,1773 @@
+import math
+from datetime import datetime, timedelta
+from decimal import Decimal
+from typing import List
+from fastapi import HTTPException, status
+
+from api_library.date_time_actions.date_functions import system_arrow, client_arrow
+
+from sqlalchemy import (
+ String,
+ ForeignKey,
+ Index,
+ SmallInteger,
+ Boolean,
+ TIMESTAMP,
+ Text,
+ Numeric,
+ Integer,
+)
+from sqlalchemy.orm import mapped_column, Mapped, relationship
+
+from api_validations.validations_request import (
+ InsertDecisionBook,
+ InsertBuildDecisionBookItems,
+ InsertBuildDecisionBookItemDebits,
+ InsertBuildDecisionBookProjects,
+)
+from databases.sql_models.core_mixin import CrudCollection
+
+
+class BuildDecisionBook(CrudCollection):
+ """
+ Builds class based on declarative_base and BaseMixin via session
+ The start dates of the decision log periods are determined from the 'decision_period_date' field in the decision log table within the building information.
+ decision_period_date = Her yıl yapılan karar toplantısı + 365 gün her yıl tekrar eden
+ decision_book_pdf_path: Karar defteri pdf dosyasının yolu
+ resp_company_fix_wage: Karar defterinin oluşmasını sağlayan dışardaki danışmanlık ücreti
+ is_out_sourced: Karar defterinin dışardan alınan hizmetle oluşturulup oluşturulmadığı
+ contact_agreement_path: Karar defterinin oluşmasını sağlayan dışardaki danışmanlık anlaşması dosyasının yolu
+ contact_agreement_date: Karar defterinin oluşmasını sağlayan dışardaki danışmanlık anlaşma tarihi
+ meeting_date: Karar defterinin oluşmasını sağlayan toplantı tarihi
+ decision_type: Karar defterinin tipi (Bina Yönetim Toplantısı (BYT), Yıllık Acil Toplantı (YAT)
+ """
+
+ __tablename__ = "build_decision_book"
+ __exclude__fields__ = []
+
+ decision_book_pdf_path: Mapped[str] = mapped_column(
+ String, server_default="", nullable=True
+ )
+ resp_company_fix_wage: Mapped[float] = mapped_column(
+ Numeric(10, 2), server_default="0"
+ ) #
+ is_out_sourced: Mapped[bool] = mapped_column(Boolean, server_default="0")
+ meeting_date: Mapped[TIMESTAMP] = mapped_column(
+ TIMESTAMP(timezone=True), server_default="1900-01-01"
+ )
+ decision_type: Mapped[str] = mapped_column(String(3), server_default="RBM")
+ meeting_is_completed: Mapped[bool] = mapped_column(Boolean, server_default="0")
+ meeting_completed_date: Mapped[TIMESTAMP] = mapped_column(
+ TIMESTAMP(timezone=True), nullable=True, comment="Meeting Completed Date"
+ )
+
+ build_id: Mapped[int] = mapped_column(ForeignKey("build.id"), nullable=False)
+ build_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Build UUID"
+ )
+ resp_company_id: Mapped[int] = mapped_column(ForeignKey("companies.id"))
+ resp_company_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Company UUID"
+ )
+ contact_id: Mapped[int] = mapped_column(
+ ForeignKey("contracts.id"), nullable=True, comment="Contract id"
+ )
+ contact_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Contract UUID"
+ )
+
+ buildings: Mapped["Build"] = relationship(
+ "Build",
+ back_populates="decision_books",
+ foreign_keys=build_id,
+ )
+ decision_book_items: Mapped[List["BuildDecisionBookItems"]] = relationship(
+ "BuildDecisionBookItems",
+ back_populates="decision_books",
+ foreign_keys="BuildDecisionBookItems.build_decision_book_id",
+ )
+
+ __table_args__ = (
+ Index("build_decision_book_ndx_011", meeting_date, build_id),
+ Index("build_decision_book_ndx_011", build_id, "expiry_starts", "expiry_ends"),
+ {
+ "comment": "Decision Book objects that are related to decision taken at building meetings"
+ },
+ )
+
+ @classmethod
+ def retrieve_active_rbm(cls):
+ from databases import (
+ Build,
+ )
+
+ related_build = Build.find_one(id=cls.build_id)
+ related_date = system_arrow.get(related_build.build_date)
+ date_processed = related_date.replace(
+ year=system_arrow.now().date().year, month=related_date.month, day=1
+ )
+ if system_arrow.now().date() <= date_processed:
+ book = cls.filter_one(
+ cls.expiry_ends <= date_processed,
+ cls.decision_type == "RBM",
+ cls.build_id == related_build.id,
+ ).data
+ if not book:
+ cls.raise_http_exception(
+ status_code="HTTP_404_NOT_FOUND",
+ error_case="NOTFOUND",
+ message=f"Decision Book is not found for {related_build.build_name}-RBM",
+ data=dict(
+ build_id=str(related_build.uu_id),
+ build_name=related_build.build_name,
+ decision_type="RBM",
+ ),
+ )
+ return book
+ return
+
+ @classmethod
+ def select_action(cls, duty_id, token=None):
+ from databases import (
+ Build,
+ Companies,
+ )
+
+ related_companies = Companies.select_action(duty_id_list=[int(duty_id)])
+ related_companies_ids = list(
+ related_.id for related_ in related_companies.all()
+ )
+ related_building = Build.filter_all(Build.company_id.in_(related_companies_ids))
+ related_building_ids = list(related_.id for related_ in related_building.data)
+ return cls.filter_all(cls.build_id.in_(related_building_ids)).query
+
+ @classmethod
+ def create_action(cls, data: InsertDecisionBook, token=None):
+ from databases import (
+ Build,
+ Companies,
+ )
+
+ data_dict = data.model_dump()
+ if building := Build.find_one(uu_id=data.build_uu_id):
+ data_dict["build_id"] = building.id
+ if response_company := Companies.find_one(
+ uu_id=data_dict["resp_company_uu_id"]
+ ):
+ data_dict["resp_company_id"] = response_company.id
+ if not building:
+ raise HTTPException(
+ status_code=status.HTTP_406_NOT_ACCEPTABLE,
+ detail="Building must be given to create decision book.",
+ )
+ expiry_starts = system_arrow.get(str(data_dict.get("expiry_starts"))).format(
+ "%Y-%m-%d"
+ )
+ data_dict["expiry_starts"] = str(expiry_starts)
+ expiry_ends = system_arrow.get(str(data_dict.get("expiry_ends"))).format(
+ "%Y-%m-%d"
+ )
+ data_dict["expiry_ends"] = str(
+ expiry_ends.replace(month=expiry_ends.month + 1, day=1) - timedelta(days=1)
+ )
+
+ if decision_book := BuildDecisionBook.filter_one(
+ BuildDecisionBook.build_id == building.id,
+ BuildDecisionBook.expiry_ends > data_dict["expiry_starts"],
+ BuildDecisionBook.decision_type == data_dict.get("decision_type"),
+ ).data: # Decision book is already exist:
+ cls.raise_http_exception(
+ status_code=status.HTTP_409_CONFLICT,
+ error_case="RECORDEXITS",
+ message="Decision Book is already exist.",
+ data=decision_book.get_dict(),
+ )
+
+ data_dict["expiry_starts"] = str(expiry_starts.replace(day=1))
+ data_dict["expiry_ends"] = str(
+ expiry_ends.replace(month=expiry_ends.month + 1, day=1) - timedelta(days=1)
+ )
+ del data_dict["build_uu_id"], data_dict["resp_company_uu_id"]
+ return cls.find_or_create(**data_dict)
+
+ @property
+ def semester(self):
+ start_format = "".join(
+ [str(self.expiry_starts.year), "-", str(self.expiry_starts.month)]
+ )
+ end_format = "".join(
+ [str(self.expiry_ends.year), "-", str(self.expiry_ends.month)]
+ )
+ return "".join([start_format, " ", end_format])
+
+ def check_book_is_valid(self, bank_date: str):
+ if all(
+ [True if letter in str(bank_date) else False for letter in ["-", " ", ":"]]
+ ):
+ bank_date = datetime.strptime(str(bank_date), "%Y-%m-%d %H:%M:%S")
+ date_valid = (
+ system_arrow.get(self.expiry_starts)
+ < system_arrow.get(bank_date)
+ < system_arrow.get(self.expiry_ends)
+ )
+ return date_valid and self.active and not self.deleted
+
+ @classmethod
+ def retrieve_valid_book(cls, bank_date, iban):
+ from databases import (
+ BuildIbans,
+ )
+
+ if all(
+ [True if letter in str(bank_date) else False for letter in ["-", " ", ":"]]
+ ):
+ bank_date = datetime.strptime(str(bank_date), "%Y-%m-%d %H:%M:%S")
+ build_iban = BuildIbans.find_one(iban=iban)
+ decision_book: cls = cls.filter_one(
+ cls.build_id == build_iban.build_id,
+ cls.expiry_starts < bank_date,
+ cls.expiry_ends > bank_date,
+ cls.active == True,
+ cls.deleted == False,
+ ).data
+ decision_book.check_book_is_valid(bank_date.__str__())
+ return decision_book
+ return
+
+
+class BuildDecisionBookInvitations(CrudCollection):
+ """
+ Builds class based on declarative_base and BaseMixin via session
+ """
+
+ __tablename__ = "build_decision_book_invitations"
+ __exclude__fields__ = []
+
+ build_id: Mapped[int] = mapped_column(Integer, nullable=False)
+ build_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Build UUID"
+ )
+ decision_book_id: Mapped[int] = mapped_column(
+ ForeignKey("build_decision_book.id"), nullable=False
+ )
+ decision_book_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Decision Book UUID"
+ )
+
+ invitation_type: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Invite Type"
+ )
+ invitation_attempt: Mapped[int] = mapped_column(SmallInteger, server_default="1")
+ living_part_count: Mapped[int] = mapped_column(SmallInteger, server_default="1")
+ living_part_percentage: Mapped[float] = mapped_column(
+ Numeric(10, 2), server_default="0.51"
+ )
+
+ message: Mapped[str] = mapped_column(
+ Text, nullable=True, comment="Invitation Message"
+ )
+ planned_date: Mapped[TIMESTAMP] = mapped_column(
+ TIMESTAMP(timezone=True), nullable=False, comment="Planned Meeting Date"
+ )
+ planned_date_expires: Mapped[TIMESTAMP] = mapped_column(
+ TIMESTAMP(timezone=True), nullable=False, comment="Planned Meeting Date Expires"
+ )
+
+ __table_args__ = (
+ Index(
+ "_build_decision_book_invitations_ndx_01",
+ invitation_type,
+ planned_date,
+ invitation_attempt,
+ unique=True,
+ ),
+ {"comment": "People that are invited to building meetings."},
+ )
+
+ @classmethod
+ def check_invites_are_ready_for_meeting(cls, selected_decision_book, token_dict):
+ first_book_invitation = BuildDecisionBookInvitations.filter_one(
+ BuildDecisionBookInvitations.build_id
+ == token_dict.selected_occupant.build_id,
+ BuildDecisionBookInvitations.decision_book_id == selected_decision_book.id,
+ BuildDecisionBookInvitations.invitation_attempt == 1,
+ ).data
+ if not first_book_invitation:
+ raise HTTPException(
+ status_code=status.HTTP_404_NOT_FOUND,
+ detail=f"First Meeting Invitation is not found for Decision Book UUID : {selected_decision_book.uu_id}",
+ )
+ need_attend_count = int(first_book_invitation.living_part_count) * Decimal(
+ first_book_invitation.living_part_percentage
+ )
+ valid_invite_count = (
+ BuildDecisionBookPerson.filter_all(
+ BuildDecisionBookPerson.invite_id == first_book_invitation.id,
+ BuildDecisionBookPerson.build_decision_book_id
+ == selected_decision_book.id,
+ BuildDecisionBookPerson.is_attending == True,
+ system=True,
+ )
+ .query.distinct(BuildDecisionBookPerson.person_id)
+ .count()
+ )
+
+ second_book_invitation = BuildDecisionBookInvitations.filter_one(
+ BuildDecisionBookInvitations.build_id
+ == token_dict.selected_occupant.build_id,
+ BuildDecisionBookInvitations.decision_book_id == selected_decision_book.id,
+ BuildDecisionBookInvitations.invitation_attempt == 2,
+ system=True,
+ ).data
+ if not valid_invite_count >= need_attend_count and not second_book_invitation:
+ raise HTTPException(
+ status_code=status.HTTP_400_BAD_REQUEST,
+ detail=f"In order meeting to be held, {math.ceil(need_attend_count)} people must attend "
+ f"to the meeting. Only {valid_invite_count} people are attending to the meeting.",
+ )
+ return first_book_invitation or second_book_invitation
+
+
+class BuildDecisionBookPerson(CrudCollection):
+ """
+ Builds class based on declarative_base and BaseMixin via session
+ Karar Defteri toplantılarına katılan kişiler veya yetkililer
+ dues_percent_discount: Katılımcının aidat indirim oranı Aidatdan yüzde indirim alır
+ dues_fix_discount: Katılımcının aidat sabit miktarı Aidatdan sabit bir miktar indirim alır
+ dues_discount_approval_date: Bu kişinin indiriminin onayladığı tarih
+ management_typecode: Kişinin toplantı görevi
+ """
+
+ __tablename__ = "build_decision_book_person"
+ __exclude__fields__ = []
+ __enum_list__ = [("management_typecode", "BuildManagementType", "bm")]
+
+ dues_percent_discount: Mapped[int] = mapped_column(SmallInteger, server_default="0")
+ dues_fix_discount: Mapped[float] = mapped_column(Numeric(10, 2), server_default="0")
+ dues_discount_approval_date: Mapped[TIMESTAMP] = mapped_column(
+ TIMESTAMP(timezone=True), server_default="1900-01-01 00:00:00"
+ )
+ send_date: Mapped[TIMESTAMP] = mapped_column(
+ TIMESTAMP(timezone=True), nullable=False, comment="Confirmation Date"
+ )
+ is_attending: Mapped[bool] = mapped_column(
+ Boolean, server_default="0", comment="Occupant is Attending to invitation"
+ )
+ confirmed_date: Mapped[TIMESTAMP] = mapped_column(
+ TIMESTAMP(timezone=True), nullable=True, comment="Confirmation Date"
+ )
+ token: Mapped[str] = mapped_column(
+ String, server_default="", comment="Invitation Token"
+ )
+
+ vicarious_person_id: Mapped[int] = mapped_column(
+ ForeignKey("people.id"), nullable=True, comment="Vicarious Person ID"
+ )
+ vicarious_person_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Vicarious Person UUID"
+ )
+
+ invite_id: Mapped[int] = mapped_column(
+ ForeignKey("build_decision_book_invitations.id"), nullable=False
+ )
+ invite_uu_id: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Invite UUID"
+ )
+
+ build_decision_book_id: Mapped[int] = mapped_column(
+ ForeignKey("build_decision_book.id"), nullable=False
+ )
+ build_decision_book_uu_id: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Decision Book UUID"
+ )
+ build_living_space_id: Mapped[int] = mapped_column(
+ ForeignKey("build_living_space.id"), nullable=False
+ )
+ build_living_space_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Living Space UUID"
+ )
+ person_id: Mapped[int] = mapped_column(ForeignKey("people.id"), nullable=False)
+ # person_uu_id: Mapped[str] = mapped_column(String, nullable=False, comment="Person UUID")
+
+ __table_args__ = (
+ Index(
+ "_build_decision_book_person_ndx_01",
+ build_decision_book_id,
+ invite_id,
+ build_living_space_id,
+ unique=True,
+ ),
+ {"comment": "People that are attended to building meetings."},
+ )
+
+ def retrieve_all_occupant_types(self):
+ all_decision_book_people = self.filter_all(
+ BuildDecisionBookPersonOccupants.invite_id == self.invite_id,
+ system=True,
+ )
+ BuildDecisionBookPersonOccupants.pre_query = all_decision_book_people.query
+ return BuildDecisionBookPersonOccupants.filter_all(system=True).data
+
+ def add_occupant_type(self, occupant_type, build_living_space_id: int = None):
+ from databases import (
+ Build,
+ BuildLivingSpace,
+ Services,
+ )
+ from api_events.events.events.events_bind_services import (
+ ServiceBindOccupantEventMethods,
+ )
+
+ book_dict = dict(
+ build_decision_book_person_id=self.id,
+ build_decision_book_person_uu_id=str(self.uu_id),
+ invite_id=self.invite_id,
+ invite_uu_id=str(self.invite_uu_id),
+ occupant_type_id=occupant_type.id,
+ occupant_type_uu_id=str(occupant_type.uu_id),
+ )
+ if person_occupants := BuildDecisionBookPersonOccupants.find_or_create(
+ **book_dict
+ ):
+ person_occupants.save_and_confirm()
+
+ decision_book = BuildDecisionBook.filter_one(
+ BuildDecisionBook.id == self.build_decision_book_id,
+ ).data
+ person_occupants.update(
+ expiry_starts=decision_book.expiry_starts,
+ expiry_ends=decision_book.expiry_ends,
+ )
+ if build_living_space_id:
+ related_service = Services.filter_by_one(
+ related_responsibility=str(occupant_type.occupant_code),
+ **Services.valid_record_dict,
+ ).data
+ if not related_service:
+ raise HTTPException(
+ status_code=status.HTTP_404_NOT_FOUND,
+ detail=f"Service is not found for {occupant_type.occupant_code}",
+ )
+
+ decision_build = Build.filter_one(
+ Build.id == decision_book.build_id,
+ ).data
+ management_room = decision_build.management_room
+ if not management_room:
+ raise HTTPException(
+ status_code=status.HTTP_404_NOT_FOUND,
+ detail=f"Management Room is not found in {decision_build.build_name}",
+ )
+
+ living_space = BuildLivingSpace.filter_one(
+ BuildLivingSpace.id == build_living_space_id,
+ ).data
+ expiry_ends = str(
+ system_arrow.get(decision_book.meeting_date).shift(hours=23)
+ )
+ expiry_starts = str(system_arrow.get(decision_book.meeting_date))
+ related_living_space = BuildLivingSpace.find_or_create(
+ build_parts_id=management_room.id,
+ build_parts_uu_id=str(management_room.uu_id),
+ occupant_type=occupant_type.id,
+ occupant_type_uu_id=str(occupant_type.uu_id),
+ person_id=living_space.person_id,
+ person_uu_id=str(living_space.person_uu_id),
+ expiry_starts=expiry_starts,
+ expiry_ends=expiry_ends,
+ )
+ expires_at = str(
+ system_arrow.get(decision_book.meeting_date).shift(days=15)
+ )
+ related_living_space.save_and_confirm()
+ ServiceBindOccupantEventMethods.bind_services_occupant_system(
+ build_living_space_id=related_living_space.id,
+ service_id=related_service.id,
+ expires_at=expires_at,
+ )
+ return person_occupants
+ return
+
+ def get_occupant_types(self):
+ if occupants := BuildDecisionBookPersonOccupants.filter_all(
+ BuildDecisionBookPersonOccupants.build_decision_book_person_id == self.id,
+ ).data:
+ return occupants
+ return
+
+ def check_occupant_type(self, occupant_type):
+ book_person_occupant_type = BuildDecisionBookPersonOccupants.filter_one(
+ BuildDecisionBookPersonOccupants.build_decision_book_person_id == self.id,
+ BuildDecisionBookPersonOccupants.occupant_type_id == occupant_type.id,
+ BuildDecisionBookPersonOccupants.active == True,
+ BuildDecisionBookPersonOccupants.is_confirmed == True,
+ ).data
+ if not book_person_occupant_type:
+ raise HTTPException(
+ status_code=status.HTTP_404_NOT_FOUND,
+ detail=f"Occupant Type : {occupant_type.occupant_code} is not found in "
+ f"Decision Book Person UUID {self.uu_id}",
+ )
+
+
+class BuildDecisionBookPersonOccupants(CrudCollection):
+ """
+ Builds class based on declarative_base and BaseMixin via session
+ """
+
+ __tablename__ = "build_decision_book_person_occupants"
+ __exclude__fields__ = []
+
+ build_decision_book_person_id: Mapped[int] = mapped_column(
+ ForeignKey("build_decision_book_person.id"), nullable=False
+ )
+ build_decision_book_person_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Decision Book Person UUID"
+ )
+ invite_id: Mapped[int] = mapped_column(
+ ForeignKey("build_decision_book_invitations.id"), nullable=True
+ )
+ invite_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Invite UUID"
+ )
+
+ occupant_type_id: Mapped[int] = mapped_column(
+ ForeignKey("occupant_types.id"), nullable=False
+ )
+ occupant_type_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Occupant UUID"
+ )
+
+ __table_args__ = (
+ Index(
+ "_build_decision_book_person_occupants_ndx_01",
+ build_decision_book_person_id,
+ occupant_type_id,
+ unique=True,
+ ),
+ {"comment": "Occupant Types of People that are attended to building meetings."},
+ )
+
+
+class BuildDecisionBookItems(CrudCollection):
+ """
+ Builds class based on declarative_base and BaseMixin via session
+ item_commentary = metine itiraz şerh maddesi için
+ item_order = maddelerin sıralanma numarası
+ item_objection = maddelerin itiraz şerhi Text şeklinde
+ """
+
+ __tablename__ = "build_decision_book_items"
+ __exclude__fields__ = []
+
+ item_order: Mapped[int] = mapped_column(
+ SmallInteger, nullable=False, comment="Order Number of Item"
+ )
+ item_comment: Mapped[str] = mapped_column(
+ Text, nullable=False, comment="Comment Content"
+ )
+ item_objection: Mapped[str] = mapped_column(
+ Text, nullable=True, comment="Objection Content"
+ )
+ info_is_completed: Mapped[bool] = mapped_column(
+ Boolean, server_default="0", comment="Info process is Completed"
+ )
+ is_payment_created: Mapped[bool] = mapped_column(
+ Boolean, server_default="0", comment="Are payment Records Created"
+ )
+
+ info_type_id: Mapped[int] = mapped_column(
+ ForeignKey("api_enum_dropdown.id"), nullable=True
+ )
+ info_type_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Info Type UUID"
+ )
+
+ build_decision_book_id: Mapped[int] = mapped_column(
+ ForeignKey("build_decision_book.id"), nullable=False
+ )
+ build_decision_book_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Decision Book UUID"
+ )
+ item_short_comment: Mapped[str] = mapped_column(
+ String(24),
+ nullable=True,
+ comment="This field is reserved for use in grouping data or in the pivot heading.",
+ )
+
+ decision_books: Mapped["BuildDecisionBook"] = relationship(
+ "BuildDecisionBook",
+ back_populates="decision_book_items",
+ foreign_keys=[build_decision_book_id],
+ )
+ decision_book_project: Mapped["BuildDecisionBookProjects"] = relationship(
+ "BuildDecisionBookProjects",
+ back_populates="build_decision_book_item",
+ foreign_keys="BuildDecisionBookProjects.build_decision_book_item_id",
+ )
+
+ __table_args__ = (
+ Index("_build_decision_book_item_ndx_01", build_decision_book_id),
+ Index(
+ "_build_decision_book_item_ndx_02",
+ build_decision_book_id,
+ item_order,
+ unique=True,
+ ),
+ {
+ "comment": "Decision Book Items that are related to decision taken at building meetings"
+ },
+ )
+
+ @classmethod
+ def select_action(cls, duty_id, token=None):
+ from databases import (
+ Build,
+ Companies,
+ )
+
+ related_companies = Companies.select_action(duty_id_list=[duty_id])
+ related_companies_ids = list(
+ related_.id for related_ in related_companies.all()
+ )
+ related_building = Build.query.filter(
+ Build.company_id.in_(related_companies_ids)
+ )
+ related_building_ids = list(related_.id for related_ in related_building.all())
+ related_decision_books = BuildDecisionBook.query.filter(
+ BuildDecisionBook.build_id.in_(related_building_ids)
+ )
+ related_decision_books_ids = list(
+ related_.id for related_ in related_decision_books.all()
+ )
+ return cls.query.filter(
+ cls.build_decision_book_id.in_(related_decision_books_ids)
+ )
+
+ @classmethod
+ def create_action(cls, data: InsertBuildDecisionBookItems, token):
+ data_dict = data.dump()
+ BuildDecisionBook.pre_query = BuildDecisionBook.select_action(
+ duty_id=token.duty_list["duty_id"]
+ )
+ cls.pre_query = cls.select_action(duty_id=token.duty_list["duty_id"])
+ if decision_book := BuildDecisionBook.filter_one(
+ BuildDecisionBook.uu_id == data.build_decision_book_uu_id
+ ).data:
+ found_dict = dict(
+ item_order=data.item_order, build_decision_book_id=decision_book.id
+ )
+ if decision_book_is_already := cls.find_one(**found_dict):
+ decision_book_is_already.is_found = True
+ return decision_book_is_already.get_dict()
+ data_dict["build_decision_book_id"] = decision_book.id
+ data_dict["is_confirmed"] = True
+ del data_dict["build_decision_book_uu_id"]
+ return BuildDecisionBookItems.find_or_create(**data_dict)
+
+ @classmethod
+ def check_meeting_is_valid_to_start_add_attendance(cls, decision_book, token_dict):
+ from databases import (
+ People,
+ OccupantTypes,
+ )
+
+ active_invite = (
+ BuildDecisionBookInvitations.check_invites_are_ready_for_meeting(
+ selected_decision_book=decision_book,
+ token_dict=token_dict,
+ )
+ )
+ occupant_type_required_list = ("MT-PRS", "MT-WRT", "BU-MNG", "BU-SPV")
+ occupant_type_list = OccupantTypes.filter_all(
+ OccupantTypes.occupant_code.in_(occupant_type_required_list),
+ system=True,
+ ).data
+ # active_invite = invitations[1] if invitations[1] else invitations[0]
+ invitation = BuildDecisionBookInvitations.filter_one(
+ BuildDecisionBookInvitations.id == active_invite.id
+ ).data
+ people_book_attend_count = None
+ if int(invitation.invitation_attempt) == 1:
+ people_book_attend_is_attending = BuildDecisionBookPerson.filter_all(
+ BuildDecisionBookPerson.invite_id == invitation.id,
+ BuildDecisionBookPerson.is_attending == True,
+ )
+ people_book_attend = BuildDecisionBookPersonOccupants.filter_all(
+ BuildDecisionBookPersonOccupants.build_decision_book_person_id.in_(
+ [person.id for person in people_book_attend_is_attending.data]
+ ),
+ BuildDecisionBookPersonOccupants.occupant_type_id.in_(
+ [occupant_type.id for occupant_type in occupant_type_list]
+ ),
+ )
+ people_book_attend_count = people_book_attend.count
+ if not people_book_attend_count == len(occupant_type_required_list) - 1:
+ error_detail = " - ".join(occupant_type_required_list)
+ raise HTTPException(
+ status_code=status.HTTP_400_BAD_REQUEST,
+ detail=f"{error_detail} occupant types must be attend to meeting. "
+ f"Check attendants and try again",
+ )
+
+ comment = (
+ lambda id_, occ_type, full_name: f"{full_name} is nomindated for {occ_type} at Meeting Invite Code : {id_}"
+ )
+ book_items_dict = dict(
+ build_decision_book_id=decision_book.id,
+ build_decision_book_uu_id=str(decision_book.uu_id),
+ is_confirmed=True,
+ active=True,
+ is_payment_created=True,
+ )
+ occupant_type_pre = OccupantTypes.filter_by_one(
+ system=True, occupant_code="MT-PRS", occupant_category_type="MT"
+ ).data
+ occupant_type_wrt = OccupantTypes.filter_by_one(
+ system=True, occupant_code="MT-WRT", occupant_category_type="MT"
+ ).data
+ occupant_type_mng = OccupantTypes.filter_by_one(
+ system=True, occupant_code="BU-MNG", occupant_category_type="BU"
+ ).data
+
+ person_occupants_pre = BuildDecisionBookPersonOccupants.filter_one(
+ BuildDecisionBookPersonOccupants.invite_id == invitation.id,
+ BuildDecisionBookPersonOccupants.occupant_type_id == occupant_type_pre.id,
+ ).data
+ person_invite_pret = BuildDecisionBookPerson.filter_one(
+ BuildDecisionBookPerson.id
+ == person_occupants_pre.build_decision_book_person_id
+ ).data
+ person = People.filter_one(People.id == person_invite_pret.person_id).data
+ created_attendance = BuildDecisionBookItems.find_or_create(
+ **book_items_dict,
+ item_order=1,
+ item_comment=comment(
+ id_=person_invite_pret.invite_uu_id,
+ occ_type=occupant_type_pre.occupant_type,
+ full_name=person.full_name,
+ ),
+ )
+ created_attendance.save_and_confirm()
+
+ person_occupants_wrt = BuildDecisionBookPersonOccupants.filter_one(
+ BuildDecisionBookPersonOccupants.invite_id == invitation.id,
+ BuildDecisionBookPersonOccupants.occupant_type_id == occupant_type_wrt.id,
+ ).data
+ person_invite_wrt = BuildDecisionBookPerson.filter_one(
+ BuildDecisionBookPerson.id
+ == person_occupants_wrt.build_decision_book_person_id
+ ).data
+ person = People.filter_one(People.id == person_invite_pret.person_id).data
+ created_attendance = BuildDecisionBookItems.find_or_create(
+ **book_items_dict,
+ item_order=2,
+ item_comment=comment(
+ id_=person_invite_wrt.invite_uu_id,
+ occ_type=occupant_type_wrt.occupant_type,
+ full_name=person.full_name,
+ ),
+ )
+ created_attendance.save_and_confirm()
+
+ person_occupants_mng = BuildDecisionBookPersonOccupants.filter_one(
+ BuildDecisionBookPersonOccupants.invite_id == invitation.id,
+ BuildDecisionBookPersonOccupants.occupant_type_id == occupant_type_mng.id,
+ ).data
+ person_invite_mng = BuildDecisionBookPerson.filter_one(
+ BuildDecisionBookPerson.id
+ == person_occupants_mng.build_decision_book_person_id
+ ).data
+ person = People.filter_one(People.id == person_invite_pret.person_id).data
+ created_attendance = BuildDecisionBookItems.find_or_create(
+ **book_items_dict,
+ item_order=3,
+ item_comment=comment(
+ id_=person_invite_mng.invite_uu_id,
+ occ_type=occupant_type_mng.occupant_type,
+ full_name=person.full_name,
+ ),
+ )
+ created_attendance.save_and_confirm()
+ return people_book_attend_count
+
+
+class BuildDecisionBookItemsUnapproved(CrudCollection):
+ """
+ Builds class based on declarative_base and BaseMixin via session unapproved personnel
+ """
+
+ __tablename__ = "build_decision_book_items_unapproved"
+ __exclude__fields__ = []
+
+ item_objection: Mapped[str] = mapped_column(
+ Text, nullable=False, comment="Objection Content"
+ )
+ item_order: Mapped[int] = mapped_column(
+ SmallInteger, nullable=False, comment="Order Number"
+ )
+
+ decision_book_item_id: Mapped[int] = mapped_column(
+ ForeignKey("build_decision_book_items.id"), nullable=False
+ )
+ decision_book_item_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Decision Book Item"
+ )
+ person_id: Mapped[int] = mapped_column(ForeignKey("people.id"), nullable=False)
+ person_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Person UUID"
+ )
+ build_decision_book_item: Mapped[int] = mapped_column(
+ ForeignKey("build_decision_book_items.id"), nullable=False
+ )
+ build_decision_book_item_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Decision Book Item UUID"
+ )
+
+ __table_args__ = (
+ Index("_build_decision_book_item_unapproved_ndx_01", build_decision_book_item),
+ {
+ "comment": "People that are unapproved partially or completely in decision book items"
+ },
+ )
+
+
+class BuildDecisionBookPayments(CrudCollection):
+ """
+ Builds class based on declarative_base and BaseMixin via session
+ period_time = to_char(NEW.process_date, 'YYYY-MM');
+ """
+
+ __tablename__ = "build_decision_book_payments"
+ __exclude__fields__ = []
+ __enum_list__ = [("receive_debit", "DebitTypes", "D")]
+
+ payment_plan_time_periods: Mapped[str] = mapped_column(
+ String(10), nullable=False, comment="Payment Plan Time Periods"
+ )
+ process_date: Mapped[TIMESTAMP] = mapped_column(
+ TIMESTAMP(timezone=True), nullable=False, comment="Payment Due Date"
+ )
+ payment_amount: Mapped[float] = mapped_column(
+ Numeric(16, 2), nullable=False, comment="Payment Amount"
+ )
+ currency: Mapped[str] = mapped_column(String(8), server_default="TRY")
+
+ payment_types_id: Mapped[int] = mapped_column(
+ ForeignKey("api_enum_dropdown.id"), nullable=True
+ )
+ payment_types_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Dues Type UUID"
+ )
+
+ period_time: Mapped[str] = mapped_column(String(12))
+ process_date_y: Mapped[int] = mapped_column(SmallInteger)
+ process_date_m: Mapped[int] = mapped_column(SmallInteger)
+
+ build_decision_book_item_id: Mapped[int] = mapped_column(
+ ForeignKey("build_decision_book_items.id"),
+ nullable=False,
+ comment="Build Decision Book Item ID",
+ )
+ build_decision_book_item_uu_id: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Decision Book Item UUID"
+ )
+ # build_decision_book_id: Mapped[int] = mapped_column(
+ # ForeignKey("build_decision_book.id"), nullable=True
+ # )
+ # build_decision_book_uu_id: Mapped[str] = mapped_column(
+ # String, nullable=True, comment="Decision Book UUID"
+ # )
+ build_parts_id: Mapped[int] = mapped_column(
+ ForeignKey("build_parts.id"), nullable=False
+ )
+ build_parts_uu_id: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Build Part UUID"
+ )
+ decision_book_project_id: Mapped[int] = mapped_column(
+ ForeignKey("build_decision_book_projects.id"),
+ nullable=True,
+ comment="Decision Book Project ID",
+ )
+ decision_book_project_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Decision Book Project UUID"
+ )
+ account_records_id: Mapped[int] = mapped_column(
+ ForeignKey("account_records.id"), nullable=True
+ )
+ account_records_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Account Record UU ID"
+ )
+
+ # budget_records_id: Mapped[int] = mapped_column(ForeignKey("account_records.id"), nullable=True)
+ # budget_records_uu_id: Mapped[str] = mapped_column(
+ # String, nullable=True, comment="Budget UUID"
+ # )
+ # accounting_id: Mapped[int] = mapped_column(ForeignKey("account_detail.id"), nullable=True)
+ # accounting_uu_id: Mapped[str] = mapped_column(
+ # String, nullable=True, comment="Accounting UUID"
+ # )
+ # receive_debit_id: Mapped[int] = mapped_column(ForeignKey("api_enum_dropdown.id"), nullable=True)
+ # receive_debit_uu_id: Mapped[str] = mapped_column(String, nullable=True, comment="Debit UUID")
+
+ # accounting: Mapped["AccountDetail"] = relationship(
+ # "AccountDetail",
+ # back_populates="decision_book_payment_detail",
+ # foreign_keys=[accounting_id],
+ # )
+ #
+ # decision_book_master: Mapped["BuildDecisionBookPaymentsMaster"] = relationship(
+ # "BuildDecisionBookPaymentsMaster",
+ # back_populates="decision_book_payment_detail",
+ # foreign_keys=[build_decision_book_payments_master_id],
+ # )
+ # budget_records: Mapped["CompanyBudgetRecords"] = relationship(
+ # "CompanyBudgetRecords",
+ # back_populates="decision_book_payment_detail",
+ # foreign_keys=[budget_records_id],
+ # )
+
+ __table_args__ = (
+ Index(
+ "build_decision_book_payments_detail_ndx_00",
+ build_decision_book_item_id,
+ build_parts_id,
+ payment_plan_time_periods,
+ process_date,
+ payment_types_id,
+ account_records_id,
+ unique=True,
+ ),
+ Index("build_decision_book_payments_detail_ndx_01", account_records_id),
+ {"comment": "Payment Details of Decision Book Payments"},
+ )
+
+
+class BuildDecisionBookLegal(CrudCollection):
+ """
+ Builds class based on declarative_base and BaseMixin via session
+ lawsuits_type C:Court Mehkeme M: mediator arabulucu
+ """
+
+ __tablename__ = "build_decision_book_legal"
+ __exclude__fields__ = []
+
+ period_start_date: Mapped[TIMESTAMP] = mapped_column(
+ TIMESTAMP(timezone=True), nullable=False, comment="Start Date of Legal Period"
+ )
+ lawsuits_decision_number: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Lawsuits Decision Number"
+ )
+ lawsuits_decision_date: Mapped[TIMESTAMP] = mapped_column(
+ TIMESTAMP(timezone=True), nullable=False, comment="Lawsuits Decision Date"
+ )
+
+ period_stop_date: Mapped[TIMESTAMP] = mapped_column(
+ TIMESTAMP(timezone=True), server_default="2099-12-31 23:59:59"
+ )
+ decision_book_pdf_path: Mapped[str] = mapped_column(
+ String, server_default="", nullable=True
+ )
+ resp_company_total_wage: Mapped[float] = mapped_column(
+ Numeric(10, 2), server_default="0", nullable=True
+ )
+ contact_agreement_path: Mapped[str] = mapped_column(
+ String, server_default="", nullable=True
+ )
+ contact_agreement_date: Mapped[TIMESTAMP] = mapped_column(
+ TIMESTAMP(timezone=True), server_default="1900-01-01 00:00:00", nullable=True
+ )
+ meeting_date: Mapped[str] = mapped_column(
+ TIMESTAMP(timezone=True), server_default="1900-01-01 00:00:00"
+ )
+ lawsuits_type: Mapped[str] = mapped_column(String(1), server_default="C")
+ lawsuits_name: Mapped[str] = mapped_column(String(128))
+ lawsuits_note: Mapped[str] = mapped_column(String(512))
+ lawyer_cost: Mapped[float] = mapped_column(Numeric(20, 2))
+ mediator_lawyer_cost: Mapped[float] = mapped_column(Numeric(20, 2))
+ other_cost: Mapped[float] = mapped_column(Numeric(20, 2))
+ legal_cost: Mapped[float] = mapped_column(Numeric(20, 2))
+ approved_cost: Mapped[float] = mapped_column(Numeric(20, 2))
+ total_price: Mapped[float] = mapped_column(Numeric(20, 2))
+
+ build_db_item_id: Mapped[int] = mapped_column(
+ ForeignKey("build_decision_book_items.id"), nullable=False
+ )
+ build_db_item_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Decision Book Item UUID"
+ )
+ resp_attorney_id: Mapped[int] = mapped_column(
+ ForeignKey("people.id"), nullable=False
+ )
+ resp_attorney_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Attorney UUID"
+ )
+ resp_attorney_company_id: Mapped[int] = mapped_column(ForeignKey("companies.id"))
+ resp_attorney_company_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Company UUID"
+ )
+ mediator_lawyer_person_id: Mapped[int] = mapped_column(ForeignKey("people.id"))
+ mediator_lawyer_person_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Mediator Lawyer UUID"
+ )
+
+ __table_args__ = (
+ Index("_build_decision_book_legal_ndx_00", meeting_date),
+ {
+ "comment": "Legal items related to decision book items recoreded at building meetings"
+ },
+ )
+
+
+class BuildDecisionBookProjects(CrudCollection):
+ """
+ Builds class based on declarative_base and BaseMixin via session
+ project_type = C:Court Mehkeme M: mediator arabulucu
+ """
+
+ __tablename__ = "build_decision_book_projects"
+ __exclude__fields__ = []
+
+ project_no: Mapped[str] = mapped_column(
+ String(12), nullable=True, comment="Project Number of Decision Book"
+ )
+ project_name: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Project Name"
+ )
+ project_start_date: Mapped[TIMESTAMP] = mapped_column(
+ TIMESTAMP(timezone=True), nullable=False, comment="Project Start Date"
+ )
+ project_stop_date: Mapped[TIMESTAMP] = mapped_column(
+ TIMESTAMP(timezone=True), server_default="2099-12-31 23:59:59"
+ )
+ project_type: Mapped[str] = mapped_column(String, server_default="C")
+ project_note: Mapped[str] = mapped_column(Text)
+
+ decision_book_pdf_path: Mapped[str] = mapped_column(
+ String, server_default="", nullable=True
+ )
+ is_completed: Mapped[bool] = mapped_column(
+ Boolean, server_default="0", comment="Project is Completed"
+ )
+ status_code: Mapped[int] = mapped_column(SmallInteger, nullable=True)
+ resp_company_fix_wage: Mapped[float] = mapped_column(
+ Numeric(10, 2), server_default="0"
+ )
+ is_out_sourced: Mapped[bool] = mapped_column(Boolean, server_default="0")
+
+ meeting_date: Mapped[TIMESTAMP] = mapped_column(
+ TIMESTAMP(timezone=True), server_default="1900-01-01 00:00:00", index=True
+ )
+ currency: Mapped[str] = mapped_column(String(8), server_default="TRY")
+ bid_price: Mapped[float] = mapped_column(Numeric(16, 4), server_default="0")
+ approved_price: Mapped[float] = mapped_column(Numeric(16, 4), server_default="0")
+ final_price: Mapped[float] = mapped_column(Numeric(16, 4), server_default="0")
+
+ contact_id: Mapped[int] = mapped_column(
+ ForeignKey("contracts.id"), nullable=True, comment="Contract id"
+ )
+ contact_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Contract UUID"
+ )
+ build_decision_book_id: Mapped[int] = mapped_column(
+ ForeignKey("build_decision_book.id"), nullable=False
+ )
+ build_decision_book_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Decision Book UUID"
+ )
+ build_decision_book_item_id: Mapped[int] = mapped_column(
+ ForeignKey("build_decision_book_items.id"), nullable=False
+ )
+ build_decision_book_item_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Decision Book Item UUID"
+ )
+ project_response_living_space_id: Mapped[int] = mapped_column(
+ ForeignKey("build_living_space.id"),
+ nullable=True,
+ comment="Project Response Person ID",
+ )
+ project_response_living_space_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Project Response Person UUID"
+ )
+ resp_company_id: Mapped[int] = mapped_column(
+ ForeignKey("companies.id"), nullable=True
+ )
+ resp_company_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Company UUID"
+ )
+
+ build_decision_book_item: Mapped["BuildDecisionBookItems"] = relationship(
+ "BuildDecisionBookItems",
+ back_populates="decision_book_project",
+ foreign_keys=[build_decision_book_item_id],
+ )
+
+ @classmethod
+ def select_action(cls, duty_id, token=None):
+ from databases import (
+ Build,
+ Companies,
+ )
+
+ related_companies = Companies.select_action(duty_id_list=[duty_id])
+ related_companies_ids = list(
+ related_.id for related_ in related_companies.all()
+ )
+ related_building = Build.filter_all(Build.company_id.in_(related_companies_ids))
+ related_building_ids = list(related_.id for related_ in related_building.data)
+ related_decision_books = BuildDecisionBook.filter_all(
+ BuildDecisionBook.build_id.in_(related_building_ids),
+ ).data
+ related_decision_books_ids = list(
+ related_.id for related_ in related_decision_books
+ )
+ related_decision_books_items = BuildDecisionBookItems.filter_all(
+ BuildDecisionBookItems.build_decision_book_id.in_(
+ related_decision_books_ids
+ ),
+ ).data
+ related_decision_books_items_ids = list(
+ related_.id for related_ in related_decision_books_items
+ )
+ return cls.filter_all(
+ cls.build_decision_book_item_id.in_(related_decision_books_items_ids),
+ ).query
+
+ @classmethod
+ def create_action(cls, data: InsertBuildDecisionBookProjects, token=None):
+ from databases import (
+ People,
+ Companies,
+ )
+
+ data_dict = data.dump()
+ BuildDecisionBookItems.pre_query = BuildDecisionBookItems.select_action(
+ duty_id=token.duty_list["duty_id"]
+ )
+ People.pre_query = People.select_action(
+ duty_id_list=[token.duty_list["duty_id"]]
+ )
+ decision_book_project_item = BuildDecisionBookItems.find_one_or_abort(
+ uu_id=data_dict.get("build_decision_book_item_uu_id")
+ )
+ project_response_person = People.find_one_or_abort(
+ uu_id=data_dict.get("project_response_person_uu_id")
+ )
+ data_dict["build_decision_book_item_id"] = decision_book_project_item.id
+ data_dict["project_response_person_id"] = project_response_person.id
+ if data.resp_company_uu_id:
+ resp_company = Companies.find_one(uu_id=data.resp_company_uu_id)
+ data_dict["resp_company_id"] = resp_company.id
+ del (
+ data_dict["build_decision_book_item_uu_id"],
+ data_dict["project_response_person_uu_id"],
+ )
+ del data_dict["resp_company_uu_id"]
+ data_dict["is_confirmed"] = True
+ return cls.find_or_create(**data_dict)
+
+ __table_args__ = (
+ Index(
+ "_build_decision_book_project_ndx_00",
+ project_no,
+ project_start_date,
+ unique=True,
+ ),
+ {
+ "comment": "Project related to decision taken at building meetings on book items"
+ },
+ )
+
+ @property
+ def get_project_year(self):
+ return self.decision_book_items.decision_books.period_start_date.year
+
+ @property
+ def get_project_no(self):
+ return f"{self.get_project_year}-{str(self.id)[-4:].zfill(4)}"
+
+
+class BuildDecisionBookProjectPerson(CrudCollection):
+ """
+ Builds class based on declarative_base and BaseMixin via session
+ """
+
+ __tablename__ = "build_decision_book_project_person"
+ __exclude__fields__ = []
+ # __enum_list__ = [("management_typecode", "ProjectTeamTypes", "PTT-EMP")]
+
+ dues_percent_discount: Mapped[int] = mapped_column(SmallInteger, server_default="0")
+ job_fix_wage: Mapped[float] = mapped_column(Numeric(10, 2), server_default="0")
+ bid_price: Mapped[float] = mapped_column(Numeric(10, 2), server_default="0")
+ decision_price: Mapped[float] = mapped_column(Numeric(10, 2), server_default="0")
+
+ build_decision_book_project_id: Mapped[int] = mapped_column(
+ ForeignKey("build_decision_book_projects.id"), nullable=False
+ )
+ build_decision_book_project_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Decision Book Project UUID"
+ )
+ living_space_id: Mapped[int] = mapped_column(
+ ForeignKey("build_living_space.id"), nullable=False
+ )
+ living_space_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Living Space UUID"
+ )
+
+ __table_args__ = (
+ {"comment": "People that are attended to building project meetings."},
+ )
+
+
+class BuildDecisionBookProjectItems(CrudCollection):
+ """
+ Builds class based on declarative_base and BaseMixin via session
+ """
+
+ __tablename__ = "build_decision_book_project_items"
+ __exclude__fields__ = []
+
+ item_header: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Item Header"
+ )
+ item_comment: Mapped[str] = mapped_column(
+ Text, nullable=False, comment="Item Comment"
+ )
+ attachment_pdf_path: Mapped[str] = mapped_column(
+ String, server_default="", nullable=True, comment="Attachment PDF Path"
+ )
+ item_estimated_cost: Mapped[float] = mapped_column(
+ Numeric(16, 2), server_default="0", comment="Estimated Cost"
+ )
+ item_short_comment: Mapped[str] = mapped_column(
+ String(24),
+ nullable=True,
+ comment="This field is reserved for use in grouping data or in the pivot heading.",
+ )
+
+ build_decision_book_project_id: Mapped[int] = mapped_column(
+ ForeignKey("build_decision_book_projects.id"), nullable=False
+ )
+ build_decision_book_project_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Decision Book Project UUID"
+ )
+
+ __table_args__ = (
+ {"comment": "Project Items related to decision taken at building meetings"},
+ )
+
+
+#
+# class BuildDecisionBookPaymentsMaster(CrudCollection):
+# """
+# Builds class based on declarative_base and BaseMixin via session
+# """
+#
+# __tablename__ = "build_decision_book_payments_master"
+# __exclude__fields__ = []
+# __enum_list__ = [("dues_types", "BuildDuesTypes", "D")]
+#
+# payment_plan_time_periods = mapped_column(
+# String(8), nullable=False, comment="Payment Plan Time Periods"
+# )
+# default_payment_amount = mapped_column(
+# Numeric(20, 2), nullable=False, comment="Default Payment Amount"
+# )
+#
+# dues_types_id: Mapped[int] = mapped_column(ForeignKey("api_enum_dropdown.id"), nullable=True)
+# dues_types_uu_id = mapped_column(String, nullable=True, comment="Dues Type UUID")
+# build_decision_book_item_debits_id = mapped_column(
+# ForeignKey("build_decision_book_item_debits.id"), nullable=False
+# )
+# build_decision_book_item_debits_uu_id = mapped_column(
+# String, nullable=True, comment="Decision Book Item Debit UUID"
+# )
+# build_parts_id: Mapped[int] = mapped_column(ForeignKey("build_parts.id"), nullable=False)
+# build_parts_uu_id = mapped_column(String, nullable=True, comment="Build Part UUID")
+#
+# # decision_books_item_debits: Mapped["BuildDecisionBookItemDebits"] = relationship(
+# # "BuildDecisionBookItemDebits",
+# # back_populates="decision_book_payment_masters",
+# # foreign_keys=[build_decision_book_item_debits_id],
+# # )
+# # parts: Mapped["BuildParts"] = relationship(
+# # "BuildParts",
+# # back_populates="decision_book_payment_master",
+# # foreign_keys=[build_parts_id],
+# # )
+# # decision_book_payment_detail: Mapped[List["BuildDecisionBookPaymentsDetail"]] = (
+# # relationship(
+# # "BuildDecisionBookPaymentsDetail",
+# # back_populates="decision_book_master",
+# # foreign_keys="BuildDecisionBookPaymentsDetail.build_decision_book_payments_master_id",
+# # )
+# # )
+#
+# __table_args__ = (
+# Index(
+# "_build_decision_book_payments_master_ndx_00",
+# build_decision_book_item_debits_id,
+# build_parts_id,
+# dues_types_id,
+# unique=True,
+# ),
+# {
+# "comment": "Master Payment Items related to decision taken at building meetings"
+# },
+# )
+#
+# # @classmethod
+# # def pay_dues_of_build_part(
+# # cls,
+# # budget_records_id,
+# # build_decision_book_id,
+# # build_parts_id,
+# # start_date,
+# # paid_value,
+# # is_all=False,
+# # is_limited=False,
+# # ):
+# #
+# # book_payment_master = cls.find_one(
+# # build_decision_book_id=build_decision_book_id,
+# # build_parts_id=build_parts_id,
+# # dues_types=BuildDuesTypes.D.name,
+# # )
+# # paid_amount = 0
+# # if book_payment_master:
+# # month_start_date = (
+# # find_first_day_of_month(start_date)
+# # if not is_all
+# # else datetime(1900, 1, 1)
+# # )
+# # last_date = (
+# # find_last_day_of_month(start_date) if not is_limited else start_date
+# # )
+# # payment_dues, count = BuildDecisionBookPaymentsDetail.filter(
+# # and_(
+# # BuildDecisionBookPaymentsDetail.build_decision_book_payments_master_id
+# # == book_payment_master.id,
+# # BuildDecisionBookPaymentsDetail.process_date >= month_start_date,
+# # BuildDecisionBookPaymentsDetail.process_date <= last_date,
+# # )
+# # )
+# # period_amount = {}
+# # for payment_due in payment_dues:
+# # if payment_due.period_time not in period_amount:
+# # period_amount[payment_due.period_time] = 0
+# # period_amount[payment_due.period_time] += float(
+# # payment_due.payment_amount
+# # )
+# # paid_amount += payment_due.payment_amount
+# # print(
+# # "period_amount",
+# # period_amount,
+# # "paid_amount",
+# # paid_amount,
+# # "paid_value",
+# # paid_value,
+# # )
+# # if paid_amount > 0:
+# # return float(paid_value)
+# # period_amounts = sorted(
+# # period_amount.items(), key=lambda x: x[0], reverse=False
+# # )
+# # for period_amount in period_amounts:
+# # if period_amount[1] >= 0:
+# # continue
+# # if not paid_value > 0:
+# # break
+# # if budget_record := CompanyBudgetRecords.find_one(id=budget_records_id):
+# # debit_to_pay = abs(float(period_amount[1]))
+# # debit_to_pay = (
+# # paid_value if debit_to_pay > paid_value else debit_to_pay
+# # )
+# # budget_record.remainder_balance = float(debit_to_pay) + float(
+# # budget_record.remainder_balance
+# # )
+# # budget_record.save()
+# # BuildDecisionBookPaymentsDetail.find_or_create(
+# # build_decision_book_payments_master_id=book_payment_master.id,
+# # budget_records_id=budget_records_id,
+# # process_date=str(start_date),
+# # receive_debit=DebitTypes.R.name,
+# # period_time=str(period_amount[0]),
+# # process_date_y=str(period_amount[0]).split("-")[0],
+# # process_date_m=str(period_amount[0]).split("-")[1],
+# # payment_amount=abs(debit_to_pay),
+# # )
+# # paid_value = float(paid_value) - float(debit_to_pay)
+# # return float(paid_value)
+# #
+
+#
+# class BuildDecisionBookItemDebits(CrudCollection):
+# """
+# Builds class based on declarative_base and BaseMixin via session
+# dues_values = due_key, due_value
+# """
+#
+# __tablename__ = "build_decision_book_item_debits"
+# __exclude__fields__ = []
+# __enum_list__ = [("dues_types", "BuildDuesTypes", "D")]
+#
+# dues_types_id: Mapped[int] = mapped_column(ForeignKey("api_enum_dropdown.id"), nullable=True)
+# dues_types_uu_id = mapped_column(String, nullable=True, comment="Dues Type UUID")
+# # dues_values = mapped_column(
+# # MutableDict.as_mutable(JSONB()),
+# # nullable=False,
+# # comment="Due Part Key Description of inner parts",
+# # )
+# flat_type = mapped_column(
+# String, nullable=True, comment="Flat Type of Building Part"
+# )
+# flat_payment = mapped_column(
+# Numeric(20, 2), nullable=True, comment="Flat Payment Amount"
+# )
+# decision_taken: Mapped[bool] = mapped_column(Boolean, server_default="0")
+#
+# build_decision_book_item_id = mapped_column(
+# ForeignKey("build_decision_book_items.id"), nullable=False
+# )
+# build_decision_book_item_uu_id = mapped_column(
+# String, nullable=True, comment="Decision Book Item UUID"
+# )
+#
+# @classmethod
+# def select_action(cls, duty_id, token=None):
+# from database_sql_models import Companies
+#
+# related_companies = Companies.select_action(duty_id=duty_id)
+# related_companies_ids = list(
+# related_.id for related_ in related_companies.all()
+# )
+# related_building = Build.query.filter(
+# Build.company_id.in_(related_companies_ids)
+# )
+# related_building_ids = list(related_.id for related_ in related_building.all())
+# related_decision_books = BuildDecisionBook.query.filter(
+# BuildDecisionBook.build_id.in_(related_building_ids)
+# )
+# related_decision_books_ids = list(
+# related_.id for related_ in related_decision_books.all()
+# )
+# related_decision_books_items = BuildDecisionBookItems.query.filter(
+# BuildDecisionBookItems.build_decision_book_id.in_(
+# related_decision_books_ids
+# )
+# )
+# related_decision_books_items_ids = list(
+# related_.id for related_ in related_decision_books_items.all()
+# )
+# return cls.query.filter(
+# cls.build_decision_book_item_id.in_(related_decision_books_items_ids)
+# )
+#
+# @classmethod
+# def create_action(cls, data: InsertBuildDecisionBookItemDebits, token):
+# from database_sql_models import ApiEnumDropdown
+# from application.shared_functions import find_last_day_of_month
+#
+# data_dict = data.dump()
+# BuildDecisionBookItems.pre_query = BuildDecisionBookItems.select_action(
+# duty_id=token.duty_list["duty_id"]
+# )
+# cls.pre_query = cls.select_action(duty_id=token.duty_list["duty_id"])
+# if decision_book_item := BuildDecisionBookItems.find_one_or_abort(
+# uu_id=data.build_decision_book_item_uu_id
+# ):
+# data_dict["build_decision_book_item_id"] = decision_book_item.id
+# dues_values, payment_master_list = data_dict["dues_values"], []
+# data_dict["is_confirmed"] = True
+# del data_dict["build_decision_book_item_uu_id"]
+# item_debits = cls.find_or_create(**data_dict)
+# debit_dropdown = ApiEnumDropdown.find_one(
+# enum_class="DebitTypes", value="Debit"
+# )
+# for dues_key, dues_value in dues_values.items():
+# building_parts = decision_book_item.decision_books.buildings.parts
+# decision_book = decision_book_item.decision_books
+# for building_part in building_parts:
+# detail_list = []
+# if str(building_part.due_part_key) == str(dues_key):
+# book_master = BuildDecisionBookPaymentsMaster.create(
+# build_decision_book_item_debits_id=item_debits.id,
+# build_parts_id=building_part.id,
+# dues_types=debit_dropdown.uu_id,
+# payment_plan_time_periods="M",
+# default_payment_amount=dues_value,
+# is_confirmed=True,
+# )
+# if book_master:
+# start_date = decision_book.expiry_starts
+# while start_date <= decision_book.expiry_ends:
+# start_date = find_last_day_of_month(start_date)
+# data_detail = BuildDecisionBookPaymentsDetail.find_or_create(
+# build_decision_book_payments_master_id=book_master.id,
+# budget_records_id=None,
+# process_date=start_date,
+# receive_debit=debit_dropdown.uu_id,
+# period_time=start_date.strftime("%Y-%m"),
+# process_date_y=start_date.year,
+# process_date_m=start_date.month,
+# accounting_id=None,
+# payment_amount=float(dues_value) * -1,
+# is_confirmed=True,
+# )
+# start_date = start_date + timedelta(days=2)
+# detail_list.append(data_detail.get_dict())
+# payment_master_list.append(
+# {**book_master.get_dict(), "detail_list": detail_list}
+# )
+# return_dict = {
+# **item_debits.get_dict(),
+# "debit_lists": payment_master_list,
+# }
+# return return_dict
+#
+# __table_args__ = (
+# {
+# "comment": "Debits of Decision Book Items that are related to decision taken at building meetings"
+# },
+# )
+
+
+#
+# class BuildDecisionBookBudget(CrudCollection):
+# """
+# Builds class based on declarative_base and BaseMixin via session
+# """
+#
+# __tablename__ = "build_decision_book_budget"
+#
+# item_order = mapped_column(SmallInteger, nullable=False, comment="Order Number")
+# budget_type = mapped_column(String, nullable=False, comment="Budget Type")
+# plan_value: Mapped[float] = mapped_column(Numeric(10, 2), nullable=False, comment="Plan Value")
+#
+# line_comment = mapped_column(String(32), server_default="")
+# process_date_y: Mapped[int] = mapped_column(SmallInteger)
+# process_date_m: Mapped[int] = mapped_column(SmallInteger)
+# process_date_w: Mapped[int] = mapped_column(SmallInteger)
+# period_time = mapped_column(String(12), server_default="")
+#
+# build_decision_book_id: Mapped[int] = mapped_column(ForeignKey("build_decision_book.id"))
+# accounting_id = mapped_column(ForeignKey("account_detail.id"))
+#
+# __table_args__ = (
+# Index("_build_decision_book_budget_ndx_01", accounting_id),
+# {"comment": "Budget Items related to decision taken at building meetings"},
+# )
+#
+#
+# class BuildDecisionBookBudgetItem(CrudCollection):
+# """
+# Builds class based on declarative_base and BaseMixin via session
+# """
+#
+# __tablename__ = "build_decision_book_budget_item"
+# __exclude__fields__ = []
+#
+# paid_date = mapped_column(TIMESTAMP, nullable=False, comment="Payment Due Date")
+# period_time = mapped_column(String(12), server_default="")
+# paid_value: Mapped[float] = mapped_column(Numeric(10, 2), server_default="0")
+#
+# build_decision_book_budget_id = mapped_column(
+# ForeignKey("build_decision_book_budget.id"), nullable=False
+# )
+#
+# __table_args__ = (
+# Index(
+# "_build_decision_book_budget_item_ndx_01",
+# build_decision_book_budget_id,
+# paid_date,
+# ),
+# )
+#
+
+
+# buildings: Mapped["Build"] = relationship(
+# "Build", back_populates="decision_books", foreign_keys=[build_id]
+# )
+# companies: Mapped[List["Company"]] = relationship(
+# "Company", back_populates="decision_books", foreign_keys=[resp_company_id]
+# )
+# budget_records: Mapped[List["CompanyBudgetRecords"]] = relationship(
+# "CompanyBudgetRecords",
+# back_populates="decision_books",
+# foreign_keys="CompanyBudgetRecords.build_decision_book_id",
+# )
+# decision_book_items: Mapped[List["BuildDecisionBookItems"]] = relationship(
+# "BuildDecisionBookItems",
+# back_populates="decision_books",
+# foreign_keys="BuildDecisionBookItems.build_decision_book_id",
+# )
+#
+# decision_book_management: Mapped["BuildDecisionBookManagement"] = relationship(
+# "BuildDecisionBookManagement",
+# back_populates="decision_book",
+# foreign_keys="BuildDecisionBookManagement.build_decision_book_id",
+# )
+#
+# decision_book_people: Mapped[List["BuildDecisionBookPerson"]] = relationship(
+# "BuildDecisionBookPerson",
+# back_populates="decision_books",
+# foreign_keys="BuildDecisionBookPerson.build_decision_book_id",
+# )
+#
+# # decision_book_projects: Mapped[List["DecisionBookProjects"]] = relationship(
+# # "DecisionBookProjects",
+# # back_populates="decision_books",
+# # foreign_keys="DecisionBookProjects.build_decision_book_id",
+# # )
+# # decision_book_project_people: Mapped[List["BuildDecisionBookProjectPerson"]] = (
+# # relationship(
+# # "BuildDecisionBookProjectPerson",
+# # back_populates="decision_books",
+# # foreign_keys="BuildDecisionBookProjectPerson.build_decision_book_id",
+# # )
+# # )
+# decision_book_legal_people: Mapped["BuildDecisionBookProjectsLegal"] = relationship(
+# "BuildDecisionBookProjectsLegal",
+# back_populates="decision_books",
+# foreign_keys="BuildDecisionBookProjectsLegal.build_decision_book_id",
+# )
+#
+# decision_book_budget: Mapped["BuildDecisionBookBudget"] = relationship(
+# "BuildDecisionBookBudget",
+# back_populates="decision_book",
+# foreign_keys="BuildDecisionBookBudget.build_decision_book_id",
+# )
+
+# decision_book_items: Mapped[List["BuildDecisionBookItems"]] = relationship(
+# "BuildDecisionBookItems",
+# back_populates="decision_book_item_debits",
+# foreign_keys=[build_decision_book_item_id],
+# )
+# decision_book_payment_masters: Mapped[List["BuildDecisionBookPaymentsMaster"]] = relationship(
+# "BuildDecisionBookPaymentsMaster",
+# back_populates="decision_books_item_debits",
+# foreign_keys="BuildDecisionBookPaymentsMaster.build_decision_book_item_debits_id",
+# )
+#
+# decision_books: Mapped["BuildDecisionBook"] = relationship(
+# "BuildDecisionBook",
+# back_populates="decision_book_items",
+# foreign_keys=[build_decision_book_id],
+# )
+# decision_book_item_debits: Mapped[List["BuildDecisionBookItemDebits"]] = (
+# relationship(
+# "BuildDecisionBookItemDebits",
+# back_populates="decision_book_items",
+# foreign_keys="BuildDecisionBookItemDebits.build_decision_book_item_id",
+# )
+# )
+# decision_book_projects: Mapped["DecisionBookProjects"] = relationship(
+# "DecisionBookProjects",
+# back_populates="decision_book_items",
+# foreign_keys="DecisionBookProjects.build_decision_book_item_id",
+# )
+# decision_book_legal: Mapped["BuildDecisionBookLegal"] = relationship(
+# "BuildDecisionBookLegal",
+# back_populates="decision_books_items",
+# foreign_keys="BuildDecisionBookLegal.build_db_item_id",
+# )
+#
+# build_decision_book_item_unapproved: Mapped[
+# List["BuildDecisionBookItemsUnapproved"]
+# ] = relationship(
+# "BuildDecisionBookItemsUnapproved",
+# back_populates="decision_book_items",
+# foreign_keys="BuildDecisionBookItemsUnapproved.build_decision_book_item",
+# )
+
+# decision_books_items: Mapped["BuildDecisionBookItems"] = relationship(
+# "BuildDecisionBookItems",
+# back_populates="decision_book_legal",
+# foreign_keys=[build_db_item_id],
+# )
+# attorney_companies: Mapped["Companies"] = relationship(
+# "Company",
+# back_populates="decision_book_legal",
+# foreign_keys=[resp_attorney_company],
+# )
+# attorney_persons: Mapped["People"] = relationship(
+# "People",
+# back_populates="attorney_decision_book_legal",
+# foreign_keys=[resp_attorney_id],
+# )
+# lawyer_persons: Mapped["People"] = relationship(
+# "People",
+# back_populates="lawyer_decision_book_legal",
+# foreign_keys=[mediator_lawyer_person_id],
+# )
+
+# decision_books: Mapped["BuildDecisionBook"] = relationship(
+# "BuildDecisionBook",
+# back_populates="decision_book_people",
+# foreign_keys=[build_decision_book_id],
+# )
+# people: Mapped["People"] = relationship(
+# "People", back_populates="decision_book_people", foreign_keys=[person_id]
+# )
+
+# decision_book_budget: Mapped["BuildDecisionBookBudget"] = relationship(
+# "BuildDecisionBookBudget",
+# back_populates="decision_book_budget_item",
+# foreign_keys=[build_decision_book_budget_id],
+# )
+
+# accounting: Mapped["AccountDetail"] = relationship(
+# "AccountDetail",
+# back_populates="decision_book_budget",
+# foreign_keys=[accounting_id],
+# )
+# decision_book: Mapped["BuildDecisionBook"] = relationship(
+# "BuildDecisionBook",
+# back_populates="decision_book_budget",
+# foreign_keys=[build_decision_book_id],
+# )
+# decision_book_budget_item: Mapped["BuildDecisionBookBudgetItem"] = relationship(
+# "BuildDecisionBookBudgetItem",
+# back_populates="decision_book_budget",
+# foreign_keys="BuildDecisionBookBudgetItem.build_decision_book_budget_id",
+# )
+
+# decision_book_items: Mapped["BuildDecisionBookItems"] = relationship(
+# "BuildDecisionBookItems",
+# back_populates="build_decision_book_item_unapproved",
+# foreign_keys=[build_decision_book_item],
+# )
+#
+# peoples: Mapped["People"] = relationship(
+# "People",
+# back_populates="build_decision_book_item_unapproved",
+# foreign_keys=[person_id],
+# )
+#
+# class BuildDecisionBookInvitationsPerson(CrudCollection):
+# """
+# Builds class based on declarative_base and BaseMixin via session
+# """
+#
+# __tablename__ = "build_decision_book_invitations_person"
+# __exclude__fields__ = []
+#
+# invite_id = mapped_column(ForeignKey("build_decision_book_invitations.id"), nullable=False)
+# invite_uu_id = mapped_column(String, nullable=True, comment="Invite UUID")
+# person_id = mapped_column(ForeignKey("people.id"), nullable=False)
+# person_uu_id = mapped_column(String, nullable=False, comment="Person UUID")
+#
+# send_date = mapped_column(TIMESTAMP, nullable=False, comment="Confirmation Date")
+# is_confirmed: Mapped[bool] = mapped_column(Boolean, server_default="0", comment="Message is Confirmed")
+# confirmed_date = mapped_column(TIMESTAMP, nullable=True, comment="Confirmation Date")
+# token = mapped_column(String, server_default="", comment="Invitation Token")
+#
+# __table_args__ = (
+# Index(
+# "decision_book_invitations_person_ndx_01",
+# invite_id,
+# person_id,
+# unique=True,
+# ),
+# {"comment": "People that are invited to building meetings."},
+# )
diff --git a/databases/sql_models/company/company.py b/databases/sql_models/company/company.py
new file mode 100644
index 0000000..02eb359
--- /dev/null
+++ b/databases/sql_models/company/company.py
@@ -0,0 +1,569 @@
+from fastapi.exceptions import HTTPException
+
+from databases.sql_models.core_mixin import CrudCollection
+
+from sqlalchemy import String, Integer, Boolean, ForeignKey, Index, Identity
+from sqlalchemy.orm import mapped_column, Mapped
+
+from api_configs import RelationAccess
+from databases.extensions import SelectAction
+from api_validations.validations_request import (
+ InsertCompany,
+ UpdateCompany,
+ MatchCompany2Company,
+)
+from api_objects.auth.token_objects import EmployeeTokenObject
+
+
+class RelationshipDutyCompany(CrudCollection):
+ """
+ CompanyRelationship class based on declarative_base and CrudCollection via session
+ Company -> Sub Company -> Sub-Sub Company
+
+ if owner_id == parent_id: can manipulate data of any record
+ else: Read-Only
+ duty_id = if relationship_type == base An organization / not operational / no responsible person
+
+ relationship = company_id filter -> Action filter(company_id) relationship_type = Organization
+ relationship = company_id filter -> Action filter(company_id) relationship_type = Commercial
+ """
+
+ __tablename__ = "relationship_duty_company"
+ __exclude__fields__ = []
+ __access_by__ = RelationAccess.SuperAccessList
+
+ owner_id: Mapped[int] = mapped_column(
+ ForeignKey("companies.id"), nullable=False
+ ) # 1
+ duties_id: Mapped[int] = mapped_column(
+ ForeignKey("duties.id"), nullable=False
+ ) # duty -> (n)employee Evyos LTD
+
+ member_id: Mapped[int] = mapped_column(
+ ForeignKey("companies.id"), nullable=False
+ ) # 2, 3, 4
+ parent_id: Mapped[int] = mapped_column(
+ ForeignKey("companies.id"), nullable=True
+ ) # None
+
+ relationship_type: Mapped[str] = mapped_column(
+ String, nullable=True, server_default="Commercial"
+ ) # Commercial, Organization # Bulk
+ child_count: Mapped[int] = mapped_column(Integer) # 0
+ show_only: Mapped[bool] = mapped_column(Boolean, server_default="0")
+
+ # related_company: Mapped[List["Companies"]] = relationship(
+ # "Companies",
+ # back_populates="related_companies",
+ # foreign_keys=[related_company_id],
+ # )
+
+ @classmethod
+ def match_company_to_company_commercial(cls, data: MatchCompany2Company, token):
+ from databases import (
+ Duties,
+ )
+
+ token_duties_id, token_company_id = token.get("duty_id"), token.get(
+ "company_id"
+ )
+ list_match_company_id = []
+ send_duties = Duties.filter_one(
+ Duties.uu_id == data.duty_uu_id,
+ )
+ send_user_duties = Duties.filter_one(
+ Duties.duties_id == send_duties.id,
+ Duties.company_id == token_duties_id,
+ )
+ if not send_user_duties:
+ raise Exception(
+ "Send Duty is not found in company. Please check duty uuid and try again."
+ )
+
+ for company_uu_id in list(data.match_company_uu_id):
+ company = Companies.filter_one(
+ Companies.uu_id == company_uu_id,
+ )
+ bulk_company = RelationshipDutyCompany.filter_one(
+ RelationshipDutyCompany.owner_id == token_company_id,
+ RelationshipDutyCompany.relationship_type == "Bulk",
+ RelationshipDutyCompany.member_id == company.id,
+ )
+ if not bulk_company:
+ raise Exception(
+ f"Bulk Company is not found in company. "
+ f"Please check company uuid {bulk_company.uu_id} and try again."
+ )
+ list_match_company_id.append(bulk_company)
+
+ for match_company_id in list_match_company_id:
+ RelationshipDutyCompany.find_or_create(
+ owner_id=token_company_id,
+ duties_id=send_user_duties.id,
+ member_id=match_company_id.id,
+ parent_id=match_company_id.parent_id,
+ relationship_type="Commercial",
+ show_only=False,
+ )
+
+ @classmethod
+ def match_company_to_company_organization(cls, data: MatchCompany2Company, token):
+ from databases import (
+ Duties,
+ )
+
+ token_duties_id, token_company_id = token.get("duty_id"), token.get(
+ "company_id"
+ )
+ list_match_company_id = []
+ send_duties = Duties.filter_one(
+ Duties.uu_id == data.duty_uu_id,
+ )
+ send_user_duties = Duties.filter_one(
+ Duties.duties_id == send_duties.id,
+ Duties.company_id == token_duties_id,
+ )
+ if not send_user_duties:
+ raise Exception(
+ "Send Duty is not found in company. Please check duty uuid and try again."
+ )
+
+ for company_uu_id in list(data.match_company_uu_id):
+ company = Companies.filter_one(
+ Companies.uu_id == company_uu_id,
+ )
+ bulk_company = RelationshipDutyCompany.filter_one(
+ RelationshipDutyCompany.owner_id == token_company_id,
+ RelationshipDutyCompany.relationship_type == "Bulk",
+ RelationshipDutyCompany.member_id == company.id,
+ )
+ if not bulk_company:
+ raise Exception(
+ f"Bulk Company is not found in company. "
+ f"Please check company uuid {bulk_company.uu_id} and try again."
+ )
+ list_match_company_id.append(bulk_company)
+
+ for match_company_id in list_match_company_id:
+ Duties.init_a_company_default_duties(
+ company_id=match_company_id.id,
+ company_uu_id=str(match_company_id.uu_id),
+ )
+ RelationshipDutyCompany.find_or_create(
+ owner_id=token_company_id,
+ duties_id=send_user_duties.id,
+ member_id=match_company_id.id,
+ parent_id=match_company_id.parent_id,
+ relationship_type="Organization",
+ show_only=False,
+ )
+
+ __table_args__ = (
+ Index(
+ "_company_relationship_ndx_01",
+ duties_id,
+ owner_id,
+ member_id,
+ relationship_type,
+ unique=True,
+ ),
+ {"comment": "Company Relationship Information"},
+ )
+
+
+class Companies(CrudCollection, SelectAction):
+ """
+ Company class based on declarative_base and CrudCollection via session
+ formal_name = Government register name by offical
+ public_name = Public registered name by User
+ nick_name = Search by nickname, commercial_type = Tüzel veya birey
+ """
+
+ __tablename__ = "companies"
+
+ __exclude__fields__ = ["is_blacklist", "is_commercial"]
+ __access_by__ = []
+ __many__table__ = RelationshipDutyCompany
+ # __explain__ = AbstractCompany()
+
+ formal_name: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Formal Name"
+ )
+ company_type: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Company Type"
+ )
+ commercial_type: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Commercial Type"
+ )
+ tax_no: Mapped[str] = mapped_column(
+ String, index=True, unique=True, nullable=False, comment="Tax No"
+ )
+
+ public_name: Mapped[str] = mapped_column(String, comment="Public Name of a company")
+ company_tag: Mapped[str] = mapped_column(String, comment="Company Tag")
+ default_lang_type: Mapped[str] = mapped_column(String, server_default="TR")
+ default_money_type: Mapped[str] = mapped_column(String, server_default="TL")
+ is_commercial: Mapped[bool] = mapped_column(Boolean, server_default="False")
+ is_blacklist: Mapped[bool] = mapped_column(Boolean, server_default="False")
+ parent_id = mapped_column(Integer, nullable=True)
+ workplace_no: Mapped[str] = mapped_column(String, nullable=True)
+
+ official_address_id: Mapped[int] = mapped_column(
+ ForeignKey("addresses.id"), nullable=True
+ )
+ official_address_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Official Address UUID"
+ )
+ top_responsible_company_id: Mapped[int] = mapped_column(
+ ForeignKey("companies.id"), nullable=True
+ )
+ top_responsible_company_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Top Responsible Company UUID"
+ )
+
+ # buildings: Mapped[List["Build"]] = relationship(
+ # "Build",
+ # back_populates="companies",
+ # foreign_keys="Build.company_id",
+ # )
+
+ __table_args__ = (
+ Index("_company_ndx_01", tax_no, unique=True),
+ Index("_company_ndx_02", formal_name, public_name),
+ {"comment": "Company Information"},
+ )
+
+ @classmethod
+ def create_action(cls, data: InsertCompany, token: EmployeeTokenObject):
+ from databases import Addresses, Duties
+
+ data_dict = data.model_dump()
+ if cls.filter_one(cls.tax_no == str(data.tax_no).strip(), system=True).data:
+ raise HTTPException(
+ status_code=400,
+ detail="Company already exists. Please ask supervisor to make company visible for your duty.",
+ )
+
+ official_address = Addresses.filter_one(
+ Addresses.uu_id == data.official_address_uu_id,
+ ).data
+ # if not official_address:
+ # raise HTTPException(
+ # status_code=400,
+ # detail="Official address is not found. Please check address uuid and try again.",
+ # )
+
+ bulk_duties = Duties.get_bulk_duties_of_a_company(
+ company_id=token.selected_company.company_id
+ )
+
+ if official_address:
+ data_dict["official_address_id"] = official_address.id
+ data_dict["official_address_uu_id"] = str(official_address.uu_id)
+
+ data_dict["parent_id"] = token.selected_company.company_id
+ data_dict["top_responsible_company_id"] = token.selected_company.company_id
+ data_dict["top_responsible_company_uu_id"] = (
+ token.selected_company.company_uu_id
+ )
+ company_created = cls.find_or_create(**data_dict)
+ company_created.save_and_confirm()
+ company_relationship_created = RelationshipDutyCompany.find_or_create(
+ owner_id=token.selected_company.company_id,
+ duties_id=bulk_duties.id,
+ member_id=company_created.id,
+ parent_id=company_created.parent_id,
+ child_count=0,
+ relationship_type="Bulk",
+ show_only=False,
+ )
+ company_relationship_created.save_and_confirm()
+ return company_created
+
+ @classmethod
+ def update_action(cls, data: UpdateCompany, token):
+ from databases import (
+ Addresses,
+ )
+
+ data_dict = data.excluded_dump()
+ duty_id = token.get("duty_id")
+ company_id = token.get("company_id")
+ if data.official_address_uu_id:
+ official_address = Addresses.filter_one(
+ Addresses.uu_id == data.official_address_uu_id,
+ *Addresses.valid_record_args(Addresses),
+ ).data
+ data_dict["official_address_id"] = official_address.id
+ del data_dict["official_address_uu_id"], data_dict["company_uu_id"]
+ company_to_update = cls.select_action(
+ duty_id_list=[duty_id],
+ filter_expr=[
+ cls.uu_id == data.company_uu_id,
+ RelationshipDutyCompany.parent_id == company_id,
+ ],
+ )
+ return company_to_update.update(**data_dict)
+
+ # parent_id = mapped_column(ForeignKey("companies.id"))
+ # if data.parent_uu_id:
+ # company = Companies.find_one(uu_id=data.parent_uu_id)
+ # data_dict["parent_id"] = company.id
+ # def is_access_valid(self, endpoint_ext: str):
+ # try:
+ # if (
+ # not arrow.get(self.stop_date)
+ # > arrow.utcnow()
+ # > arrow.get(self.start_date)
+ # ):
+ # message = f"Kullanıcı yetkileri süresi dolmuştur. {self.endpoint_name} için supervisor ile görüşünüz."
+ # SystemLogs.create_log(
+ # log_type="ERROR",
+ # log_code="ACCESS_EXPIRED",
+ # log_action=self.__tablename__,
+ # log_message=message,
+ # )
+ # return False
+ # except Exception as e:
+ # SystemLogs.create_log(
+ # log_type="ERROR",
+ # log_code="ACCESS_EXPIRED",
+ # log_action=self.__tablename__,
+ # log_message=e,
+ # )
+ # return False
+ #
+ # access_dict = {
+ # "LIST": self.access_read,
+ # "INSERT": self.access_write,
+ # "UPDATE": self.access_update,
+ # "DELETE": self.access_delete,
+ # "ACTIVE": self.access_update,
+ # "PRINT": self.report_print,
+ # "EXPORT": self.report_export,
+ # }
+ # return access_dict.get(endpoint_ext.upper(), False)
+
+ # official_address: Mapped[List["Address"]] = relationship(
+ # "Address",
+ # back_populates="official_companies",
+ # foreign_keys=[official_address_id],
+ # )
+ #
+ # emails: Mapped[List["UsersEmails"]] = relationship(
+ # "UsersEmails", back_populates="companies", foreign_keys="UsersEmails.company_id"
+ # )
+ # phones: Mapped[List["UsersPhones"]] = relationship(
+ # "UsersPhones", back_populates="company", foreign_keys="UsersPhones.company_id"
+ # )
+ # buildings: Mapped[List["Build"]] = relationship(
+ # "Build",
+ # back_populates="companies",
+ # foreign_keys="Build.company_id",
+ # )
+ # response_buildings: Mapped[List["Build"]] = relationship(
+ # "Build",
+ # back_populates="response_companies",
+ # foreign_keys="Build.response_company_id",
+ # )
+ # departments: Mapped[List["CompanyDepartments"]] = relationship(
+ # "CompanyDepartments",
+ # back_populates="company",
+ # foreign_keys="CompanyDepartments.company_id",
+ # )
+ # budget_records: Mapped[List["CompanyBudgetRecords"]] = relationship(
+ # "CompanyBudgetRecords",
+ # back_populates="companies",
+ # foreign_keys="CompanyBudgetRecords.company_id",
+ # )
+ # send_budget_records: Mapped[List["CompanyBudgetRecords"]] = relationship(
+ # "CompanyBudgetRecords",
+ # back_populates="send_companies",
+ # foreign_keys="CompanyBudgetRecords.send_company_id",
+ # )
+ # decision_books: Mapped[List["BuildDecisionBook"]] = relationship(
+ # "BuildDecisionBook",
+ # back_populates="companies",
+ # foreign_keys="BuildDecisionBook.resp_company_id",
+ # )
+ # decision_book_projects: Mapped[List["BuildDecisionBookProjects"]] = relationship(
+ # "BuildDecisionBookProjects",
+ # back_populates="companies",
+ # foreign_keys="BuildDecisionBookProjects.resp_company_id",
+ # )
+ # decision_book_legal: Mapped["BuildDecisionBookLegal"] = relationship(
+ # "BuildDecisionBookLegal",
+ # back_populates="attorney_companies",
+ # foreign_keys="BuildDecisionBookLegal.resp_attorney_company",
+ # )
+ #
+ # company_account_books: Mapped["AccountBooks"] = relationship(
+ # "AccountBooks",
+ # back_populates="company",
+ # foreign_keys="AccountBooks.company_id",
+ # )
+ # branch_account_books: Mapped["AccountBooks"] = relationship(
+ # "AccountBooks",
+ # back_populates="branch",
+ # foreign_keys="AccountBooks.branch_id",
+ # )
+ # account_codes: Mapped["AccountCodes"] = relationship(
+ # "AccountCodes", back_populates="company", foreign_keys="AccountCodes.company_id"
+ # )
+ # search_iban_description: Mapped["BuildIbanDescription"] = relationship(
+ # "BuildIbanDescription",
+ # back_populates="company",
+ # foreign_keys="BuildIbanDescription.company_id",
+ # )
+ # related_companies: Mapped[List["CompanyRelationship"]] = relationship(
+ # "CompanyRelationship",
+ # back_populates="related_company",
+ # foreign_keys="CompanyRelationship.related_company_id",
+ # )
+
+
+#
+# class AbstractCompany:
+# """
+# Abstract and explanation of Company class for end-user guide
+# """
+#
+# formal_name = Explanation(
+# explanation="Devletin resmi kayıtlarında bulunan şirket ünvanıdır.",
+# usage="Devletin resmi kayıtlarında bulunan şirket adı istendiğinde kullanılır.",
+# alias="Resmi Ünvan",
+# example=["X Şirketi LTD", "Y Şirketi A.Ş."],
+# )
+# company_type = Explanation(
+# explanation="Şirketin türüdür.",
+# usage="Şirketin türü istendiğinde kullanılır.",
+# alias="Şirket Türü",
+# example=[
+# "Şahıs",
+# "Limited",
+# "Anonim",
+# "Kolektif",
+# "Komandit",
+# "Kooperatif",
+# "Serbest Meslek",
+# "Adi Ortaklık",
+# ],
+# )
+# commercial_type = Explanation(
+# explanation="Şirketin ticari türüdür.",
+# usage="Şirketin ticari türü istendiğinde kullanılır.",
+# alias="Ticari Tür",
+# example=["Tüzel", "Birey"],
+# )
+# tax_no = Explanation(
+# explanation="Şirketin vergi numarasıdır.",
+# usage="Şirketin vergi numarası istendiğinde kullanılır.",
+# alias="Vergi No",
+# example=["1234567890"],
+# )
+# public_name = Explanation(
+# explanation="Şirketin kamuoyunda bilinen adıdır.",
+# usage="Şirketin kamuoyunda bilinen adı istendiğinde kullanılır.",
+# alias="Piyasada Bilinen Adı",
+# example=["X Şirketi", "Y Şirketi"],
+# )
+# company_tag = Explanation(
+# explanation="Şirketin takma adı veya etiketidir.",
+# usage="Şirketin yöneticisin karar verdiği takma adı veya etiketi istendiğinde kullanılır.",
+# alias="Şirket Etiketi veya Takma Adı",
+# example=["X", "Y"],
+# )
+# default_lang_type = Explanation(
+# explanation="Şirketin varsayılan dil türüdür.",
+# usage="Şirketin varsayılan dil türü istendiğinde kullanılır.",
+# alias="Şirketin Dil Türü",
+# example=["TR", "EN"],
+# )
+# default_money_type = Explanation(
+# explanation="Şirketin varsayılan para birimi türüdür.",
+# usage="Şirketin varsayılan para birimi türü istendiğinde kullanılır.",
+# alias="Şirketin Para Birimi Türü",
+# example=["TL", "USD", "EUR"],
+# )
+# is_commercial = Explanation(
+# explanation="Şirketin ticari olup olmadığını belirtir.",
+# usage="Şirketin ticari olup olmadığını applikasyonun anlaması için kullanılır.",
+# condition=lambda commercial_type: True if commercial_type == "Şahıs" else False,
+# alias="Şirket Ticari mi?",
+# )
+# is_blacklist = Explanation(
+# explanation="Şirketin kara listeye alınıp alınmadığını belirtir.",
+# usage="Şirketin kara listeye alınıp alınmadığını applikasyonun anlaması için kullanılır.",
+# alias="Kara Listeye alınsın mı?",
+# example=[True, False],
+# )
+# parent_id = Explanation(
+# explanation="Şirketin sorumlu olduğu şirketin ID'sidir.",
+# usage="Şirketin sorumlu olduğu şirketin ID'si istendiğinde kullanılır.",
+# alias="Sorumlu Şirket",
+# example=[
+# "Bir şirketin sorumlu şirketi hangisi olduğunu bulmak için kullanılır.",
+# ],
+# )
+# workplace_no = Explanation(
+# explanation="Şirketin iş yeri numarasıdır.",
+# usage="Şirketin iş yeri numarası istendiğinde kullanılır.",
+# alias="İş Yeri No",
+# example=["1234567890"],
+# )
+# official_address_id = Explanation(
+# explanation="Şirketin resmi adresidi.",
+# usage="Şirketin resmi adresinin ne olduğunu bulmak için kullanılır.",
+# alias="Resmi Adres",
+# example=[
+# "Bu şirketin adresi nedir sorusuna cevap vermek için kullanılır.",
+# ],
+# )
+# top_responsible_company_id = Explanation(
+# explanation="Şirketin en üst sorumlu şirketin ID'sidir.",
+# usage="Şirketin en üst sorumlu şirketin hangisi olduğunu bulmak için kullanılır.",
+# alias="Ana Yetkili Şirket",
+# example=[
+# "Bölge veya ülke genelinde en üst sorumlu şirketin hangisi olduğunu belirtmek için kullanılır.",
+# ],
+# )
+# buildings = Explanation(
+# explanation="Şirketin sahip olduğu binaların listesidir.",
+# usage="Şirketin sahip olduğu binaların listesini bulmak için kullanılır.",
+# alias="Sorumlu olduğu binalar Binalar",
+# example=[
+# "Şirketin sahip olduğu binaların listesini bulmak için kullanılır.",
+# ],
+# )
+#
+# def wag_create_company(self):
+# """
+# Er kişiye wag_create_company fonksiyonu = fieldları manipule edebilir?
+# 78 ile oluşturulan bir user için wag_create_company fonksiyonu = fieldları manipule edebilir?
+# """
+# return {
+# "commercial_type": self.commercial_type,
+# "formal_name": self.formal_name,
+# "public_name": self.public_name,
+# "company_type": self.company_type,
+# "tax_no": self.tax_no,
+# "workplace_no": self.workplace_no,
+# "company_tag": self.company_tag,
+# "default_lang_type": self.default_lang_type,
+# "default_money_type": self.default_money_type,
+# "official_address_id": self.official_address_id,
+# }
+#
+# def wag_update_company(self):
+# return {
+# "commercial_type": self.commercial_type,
+# "formal_name": self.formal_name,
+# "public_name": self.public_name,
+# "company_type": self.company_type,
+# "tax_no": self.tax_no,
+# "workplace_no": self.workplace_no,
+# "company_tag": self.company_tag,
+# "default_lang_type": self.default_lang_type,
+# "default_money_type": self.default_money_type,
+# "official_address_id": self.official_address_id,
+# }
diff --git a/databases/sql_models/company/department.py b/databases/sql_models/company/department.py
new file mode 100644
index 0000000..6c3e31d
--- /dev/null
+++ b/databases/sql_models/company/department.py
@@ -0,0 +1,232 @@
+from sqlalchemy import String, Integer, ForeignKey, Index, Boolean, Identity
+from sqlalchemy.orm import mapped_column, Mapped
+
+from databases.sql_models.core_mixin import CrudCollection
+
+
+class Departments(CrudCollection):
+
+ __tablename__ = "departments"
+ __exclude__fields__ = []
+
+ parent_department_id = mapped_column(Integer, server_default="0")
+ department_code = mapped_column(
+ String(16), nullable=False, index=True, comment="Department Code"
+ )
+ department_name: Mapped[str] = mapped_column(
+ String(128), nullable=False, comment="Department Name"
+ )
+ department_description: Mapped[str] = mapped_column(String, server_default="")
+
+ company_id: Mapped[int] = mapped_column(ForeignKey("companies.id"), nullable=False)
+ company_uu_id: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Company UUID"
+ )
+
+ # @classmethod
+ # def create_action(cls, data: DepartmentsPydantic, token):
+ # data_dict = data.model_dump()
+ # data_dict["company_id"] = token.selected_company.company_id
+ # return cls.find_or_create(**data_dict)
+
+ __table_args__ = {"comment": "Departments Information"}
+
+
+class Duty(CrudCollection):
+
+ __tablename__ = "duty"
+ __exclude__fields__ = []
+
+ duty_name: Mapped[str] = mapped_column(
+ String, unique=True, nullable=False, comment="Duty Name"
+ )
+ duty_code: Mapped[str] = mapped_column(String, nullable=False, comment="Duty Code")
+ duty_description: Mapped[str] = mapped_column(String, comment="Duty Description")
+
+ # @classmethod
+ # def create_action(cls, data: InsertCompanyDuty, token):
+ # # if not cls.__is_super__:
+ # # raise HTTPException(
+ # # status_code=401, detail="You are not authorized to create a duty."
+ # # )
+ # data_dict = data.model_dump()
+ #
+ # return cls.find_or_create(**data_dict)
+
+ __table_args__ = ({"comment": "Duty Information"},)
+
+
+class Duties(CrudCollection):
+
+ __tablename__ = "duties"
+ __exclude__fields__ = []
+
+ users_default_duty = mapped_column(
+ ForeignKey("duty.id"), nullable=True, comment="Default Duty for Users"
+ )
+ company_id: Mapped[int] = mapped_column(Integer)
+ company_uu_id: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Company UUID"
+ )
+ duties_id: Mapped[int] = mapped_column(ForeignKey("duty.id"), nullable=False)
+ duties_uu_id: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Duty UUID"
+ )
+ department_id = mapped_column(
+ ForeignKey("departments.id"), nullable=False, comment="Department ID"
+ )
+ department_uu_id: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Department UUID"
+ )
+ # priority_id: Mapped[int] = mapped_column(ForeignKey("priority.id"), nullable=True)
+ management_duty = mapped_column(
+ Boolean, server_default="0"
+ ) # is this a prime Company Duty ???
+
+ @classmethod
+ def init_a_company_default_duties(cls, company_id, company_uu_id):
+ __default_init__ = ["Execution Office", "IT Department"]
+
+ active_row = dict(
+ is_confirmed=True, active=True, deleted=False, is_notification_send=True
+ )
+ list_of_created = []
+
+ execution = Departments.find_or_create(
+ department_name="Execution Office",
+ department_code="EO001",
+ company_id=company_id,
+ company_uu_id=str(company_uu_id),
+ **active_row,
+ )
+ list_of_created.append(execution)
+ it_dept = Departments.find_or_create(
+ department_name="IT Department",
+ department_code="ITD001",
+ company_id=company_id,
+ company_uu_id=str(company_uu_id),
+ **active_row,
+ )
+ list_of_created.append(it_dept)
+ bm_duty = Duty.find_or_create(
+ duty_name="Business Manager",
+ duty_code="BM0001",
+ duty_description="Business Manager",
+ **active_row,
+ )
+ list_of_created.append(bm_duty)
+ it_duty = Duty.find_or_create(
+ duty_name="IT Manager",
+ duty_code="IT0001",
+ duty_description="IT Manager",
+ **active_row,
+ )
+ list_of_created.append(it_duty)
+ bulk_duty = Duty.find_or_create(
+ duty_name="BULK",
+ duty_code="BULK",
+ duty_description="BULK RECORDS OF THE COMPANY",
+ **active_row,
+ )
+ list_of_created.append(bulk_duty)
+ occu_duty = Duty.find_or_create(
+ duty_name="OCCUPANT",
+ duty_code="OCCUPANT",
+ duty_description="OCCUPANT RECORDS OF THE COMPANY",
+ **active_row,
+ )
+ list_of_created.append(occu_duty)
+ duties_created_bm = cls.find_or_create(
+ company_id=company_id,
+ company_uu_id=str(company_uu_id),
+ duties_id=bm_duty.id,
+ duties_uu_id=str(bm_duty.uu_id),
+ department_id=execution.id,
+ department_uu_id=str(execution.uu_id),
+ **active_row,
+ )
+ list_of_created.append(duties_created_bm)
+ duties_created_it = cls.find_or_create(
+ company_id=company_id,
+ company_uu_id=str(company_uu_id),
+ duties_id=it_duty.id,
+ duties_uu_id=str(it_duty.uu_id),
+ department_id=it_dept.id,
+ department_uu_id=str(it_dept.uu_id),
+ **active_row,
+ )
+ list_of_created.append(duties_created_it)
+ duties_created__ex = cls.find_or_create(
+ company_id=company_id,
+ company_uu_id=str(company_uu_id),
+ duties_id=bulk_duty.id,
+ duties_uu_id=str(bulk_duty.uu_id),
+ department_id=execution.id,
+ department_uu_id=str(execution.uu_id),
+ **active_row,
+ )
+ list_of_created.append(duties_created__ex)
+ duties_created_at = cls.find_or_create(
+ company_id=company_id,
+ company_uu_id=str(company_uu_id),
+ duties_id=occu_duty.id,
+ duties_uu_id=str(occu_duty.uu_id),
+ department_id=execution.id,
+ department_uu_id=str(execution.uu_id),
+ **active_row,
+ )
+ list_of_created.append(duties_created_at)
+ return list_of_created
+
+ @classmethod
+ def get_bulk_duties_of_a_company(cls, company_id):
+ duties_id = Duty.filter_by_one(system=True, duty_code="BULK").data
+ if bulk_duties := Duties.filter_by_one(
+ duties_id=getattr(duties_id, "id", None),
+ company_id=company_id,
+ **Duties.valid_record_dict,
+ ).data:
+ return bulk_duties
+ raise Exception("Bulk Duty not found. Please contact with supervisor.")
+
+ # @classmethod
+ # def create_action(cls, data: InsertCompanyDuty):
+ # data_dict = data.model_dump()
+ # if department := Departments.find_one(uu_id=data.department_uu_id):
+ # data_dict["department_id"] = department.id
+ # del data_dict["department_uu_id"]
+ # return cls.find_or_create(**data_dict)
+
+ __table_args__ = (
+ Index("duty_ndx_00", company_id, duties_id, department_id, unique=True),
+ {"comment": "Duty & Company & Department Information"},
+ )
+
+ # department: Mapped[List["CompanyDepartments"]] = relationship(
+ # "CompanyDepartments", back_populates="duties", foreign_keys=[department_id]
+ # )
+ # employees: Mapped[List["CompanyEmployees"]] = relationship(
+ # "CompanyEmployees",
+ # back_populates="duty",
+ # foreign_keys="CompanyEmployees.duty_id",
+ # )
+ # duty_app: Mapped["CompanyDutyApp"] = relationship(
+ # "CompanyDutyApp", back_populates="duties", foreign_keys="CompanyDutyApp.company_duty_id"
+ # )
+
+ # def get_language_of_duty(self, lang):
+ # if erp_text := ErpText.find_one(lang=lang, text_code=self.duty_code):
+ # return erp_text.text_name, erp_text.text_description
+ # return None, None
+
+ # company: Mapped["Companies"] = relationship(
+ # "Company", back_populates="departments", foreign_keys=[company_id]
+ # )
+ # duties: Mapped[List["CompanyDuty"]] = relationship(
+ # "CompanyDuty",
+ # back_populates="department",
+ # foreign_keys="CompanyDuty.department_id",
+ # )
+ # app_item: Mapped["AppItems"] = relationship(
+ # "AppItems", back_populates="department", foreign_keys="AppItems.department_id"
+ # )
diff --git a/databases/sql_models/company/employee.py b/databases/sql_models/company/employee.py
new file mode 100644
index 0000000..82e48cc
--- /dev/null
+++ b/databases/sql_models/company/employee.py
@@ -0,0 +1,142 @@
+from sqlalchemy import (
+ String,
+ ForeignKey,
+ Index,
+ Numeric,
+)
+from sqlalchemy.orm import mapped_column, Mapped
+from databases.sql_models.core_mixin import CrudCollection
+
+from api_validations.validations_request import InsertCompanyEmployees
+
+
+class Staff(CrudCollection):
+
+ __tablename__ = "staff"
+ __exclude__fields__ = []
+
+ staff_description: Mapped[str] = mapped_column(
+ String, server_default="", comment="Staff Description"
+ )
+ staff_name: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Staff Name"
+ )
+ staff_code: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Staff Code"
+ )
+
+ duties_id: Mapped[int] = mapped_column(ForeignKey("duties.id"), nullable=False)
+ duties_uu_id: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Duty UUID"
+ )
+
+ # people: Mapped["People"] = relationship(
+ # "People", back_populates="employees", foreign_keys=[people_id], uselist=True
+ # )
+ # duty: Mapped["CompanyDuty"] = relationship(
+ # "CompanyDuty", back_populates="employees", foreign_keys=[duty_id]
+ # )
+
+ @classmethod
+ def create_action(cls, data: InsertCompanyEmployees):
+ from databases import Duties
+
+ data_dict = data.model_dump()
+ if duty := Duties.find_one(uu_id=data.duty_uu_id):
+ data_dict["duty_id"] = duty.id
+ # if person := People.find_one(uu_id=data.person_uu_id):
+ # data_dict["people_id"] = person.id
+ if data.start_date:
+ data_dict["expiry_starts"] = data.start_date
+ if data.stop_date:
+ data_dict["expiry_ends"] = data.stop_date
+ # del data_dict["duty_uu_id"], data_dict["person_uu_id"]
+ del data_dict["start_date"], data_dict["stop_date"], data_dict["duty_uu_id"]
+ return cls.find_or_create(**data_dict)
+
+ __table_args__ = ({"comment": "Staff Information"},)
+
+
+class Employees(CrudCollection):
+
+ __tablename__ = "employees"
+ __exclude__fields__ = []
+
+ staff_id: Mapped[int] = mapped_column(ForeignKey("staff.id"))
+ staff_uu_id: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Staff UUID"
+ )
+ people_id: Mapped[int] = mapped_column(ForeignKey("people.id"), nullable=True)
+ people_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="People UUID"
+ )
+
+ __table_args__ = (
+ Index("employees_ndx_00", people_id, staff_id, unique=True),
+ {"comment": "Employee Person Information"},
+ )
+
+
+class EmployeeHistory(CrudCollection):
+
+ __tablename__ = "employee_history"
+ __exclude__fields__ = []
+
+ staff_id: Mapped[int] = mapped_column(
+ ForeignKey("staff.id"), nullable=False, comment="Staff ID"
+ )
+ staff_uu_id: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Staff UUID"
+ )
+ people_id: Mapped[int] = mapped_column(
+ ForeignKey("people.id"), nullable=False, comment="People ID"
+ )
+ people_uu_id: Mapped[str] = mapped_column(
+ String, nullable=False, comment="People UUID"
+ )
+
+ __table_args__ = (
+ Index("_employee_history_ndx_00", people_id, staff_id),
+ {"comment": "Employee History Information"},
+ )
+
+
+class EmployeesSalaries(CrudCollection):
+
+ __tablename__ = "employee_salaries"
+ __exclude__fields__ = []
+
+ gross_salary: Mapped[float] = mapped_column(
+ Numeric(20, 6), nullable=False, comment="Gross Salary"
+ )
+ net_salary: Mapped[float] = mapped_column(
+ Numeric(20, 6), nullable=False, comment="Net Salary"
+ )
+
+ people_id: Mapped[int] = mapped_column(ForeignKey("people.id"), nullable=False)
+ people_uu_id: Mapped[str] = mapped_column(
+ String, nullable=False, comment="People UUID"
+ )
+
+ # people: Mapped["People"] = relationship(
+ # "People", back_populates="employee_salaries", foreign_keys=[people_id]
+ # )
+
+ __table_args__ = (
+ Index("_employee_salaries_ndx_00", people_id, "expiry_starts"),
+ {"comment": "Employee Salaries Information"},
+ )
+
+
+# class Events2Employees(CrudCollection):
+#
+# __tablename__ = "events2employees"
+# __exclude__fields__ = []
+#
+# event_id = mapped_column(ForeignKey("events.id"), nullable=False)
+# employees_id = mapped_column(ForeignKey("employees.id"), nullable=False)
+#
+# __table_args__ = (
+# Index("_events2employees_ndx_00", event_id, employees_id),
+# {"comment": "Events2Employees Information"},
+# )
diff --git a/databases/sql_models/core_mixin.py b/databases/sql_models/core_mixin.py
new file mode 100644
index 0000000..c85b8c6
--- /dev/null
+++ b/databases/sql_models/core_mixin.py
@@ -0,0 +1,427 @@
+import datetime
+from decimal import Decimal
+
+from sqlalchemy import (
+ TIMESTAMP,
+ NUMERIC,
+ func,
+ text,
+ UUID,
+ String,
+ Integer,
+ Boolean,
+ SmallInteger,
+)
+from sqlalchemy.orm import (
+ Mapped,
+ mapped_column,
+)
+from sqlalchemy_mixins.session import SessionMixin
+from sqlalchemy_mixins.serialize import SerializeMixin
+from sqlalchemy_mixins.repr import ReprMixin
+from sqlalchemy_mixins.smartquery import SmartQueryMixin
+
+from api_library import DateTimeLocal, client_arrow, system_arrow
+from databases.sql_models.sql_operations import FilterAttributes
+from databases.sql_models.postgres_database import Base
+
+
+class CrudMixin(Base, SmartQueryMixin, SessionMixin, FilterAttributes):
+
+ __abstract__ = True # The model is abstract not a database table.
+ __session__ = Base.session # The session to use in the model.
+ __system__fields__create__ = (
+ "created_at",
+ "updated_at",
+ "cryp_uu_id",
+ "created_by",
+ "created_by_id",
+ "updated_by",
+ "updated_by_id",
+ "replication_id",
+ "confirmed_by",
+ "confirmed_by_id",
+ "is_confirmed",
+ "deleted",
+ "active",
+ "is_notification_send",
+ "is_email_send",
+ ) # The system fields to use in the model.
+ __system__fields__update__ = (
+ "cryp_uu_id",
+ "created_at",
+ "updated_at",
+ "created_by",
+ "created_by_id",
+ "confirmed_by",
+ "confirmed_by_id",
+ "updated_by",
+ "updated_by_id",
+ "replication_id",
+ )
+ __system_default_model__ = [
+ "cryp_uu_id",
+ "is_confirmed",
+ "deleted",
+ "is_notification_send",
+ "replication_id",
+ "is_email_send",
+ "confirmed_by_id",
+ "confirmed_by",
+ "updated_by_id",
+ "created_by_id",
+ ]
+
+ creds = None # The credentials to use in the model.
+ client_arrow: DateTimeLocal = None # The arrow to use in the model.
+ valid_record_dict: dict = {"active": True, "deleted": False}
+ valid_record_args = lambda class_: [class_.active == True, class_.deleted == False]
+ metadata: dict = {}
+
+ expiry_starts: Mapped[TIMESTAMP] = mapped_column(
+ TIMESTAMP(timezone=True), server_default=func.now(), nullable=False
+ )
+ expiry_ends: Mapped[TIMESTAMP] = mapped_column(
+ TIMESTAMP(timezone=True), default="2099-12-31", server_default="2099-12-31"
+ )
+
+ @classmethod
+ def set_user_define_properties(cls, token):
+ cls.creds = token.credentials
+ cls.client_arrow = DateTimeLocal(is_client=True, timezone=token.timezone)
+
+ @classmethod
+ def remove_non_related_inputs(cls, kwargs):
+ """
+ Removes the non-related inputs from the given attributes.
+ """
+ return {
+ key: value
+ for key, value in kwargs.items()
+ if key in cls.columns + cls.hybrid_properties + cls.settable_relations
+ }
+
+ @classmethod
+ def extract_system_fields(cls, filter_kwargs: dict, create: bool = True):
+ """
+ Extracts the system fields from the given attributes.
+ """
+ system_fields = filter_kwargs.copy()
+ extract_fields = (
+ cls.__system__fields__create__ if create else cls.__system__fields__update__
+ )
+ for field in extract_fields:
+ system_fields.pop(field, None)
+ return system_fields
+
+ @classmethod
+ def iterate_over_variables(cls, val, key):
+ key_ = cls.__annotations__.get(key, None)
+ is_primary, value_type = key in cls.primary_keys, type(val)
+ row_attr = bool(getattr(getattr(cls, key), "foreign_keys", None))
+ if is_primary or row_attr:
+ return False, None
+ elif val is None:
+ return True, None
+ elif str(key[-5:]).lower() == "uu_id":
+ return True, str(val)
+ elif key_:
+ if key_ == Mapped[int]:
+ return True, int(val)
+ elif key_ == Mapped[bool]:
+ return True, bool(val)
+ elif key_ == Mapped[float] or key_ == Mapped[NUMERIC]:
+ return True, round(float(val), 3)
+ elif key_ == Mapped[int]:
+ return True, int(val)
+ elif key_ == Mapped[TIMESTAMP]:
+ return True, str(
+ cls.client_arrow.get(str(val)).format("DD-MM-YYYY HH:mm:ss")
+ )
+ elif key_ == Mapped[str]:
+ return True, str(val)
+ else:
+ if isinstance(val, datetime.datetime):
+ return True, str(
+ cls.client_arrow.get(str(val)).format("DD-MM-YYYY HH:mm:ss")
+ )
+ elif isinstance(value_type, bool):
+ return True, bool(val)
+ elif isinstance(value_type, float) or isinstance(value_type, Decimal):
+ return True, round(float(val), 3)
+ elif isinstance(value_type, int):
+ return True, int(val)
+ elif isinstance(value_type, str):
+ return True, str(val)
+ elif isinstance(value_type, type(None)):
+ return True, None
+ return False, None
+
+ @classmethod
+ def find_or_create(cls, **kwargs):
+ """
+ Finds a record with the given attributes or creates it if it doesn't exist.
+ If found, sets is_found to True, otherwise False.
+ is_found can be used to check if the record was found or created.
+ """
+ check_kwargs = cls.extract_system_fields(kwargs)
+ cls.pre_query = cls.query.filter(
+ cls.expiry_ends > str(system_arrow.now()),
+ cls.expiry_starts <= str(system_arrow.now()),
+ )
+ already_record = cls.filter_by_one(system=True, **check_kwargs).data
+ cls.pre_query = None
+ if already_record:
+ if already_record.deleted:
+ already_record.metadata = {
+ "created": False,
+ "error_case": "DeletedRecord",
+ "message": "",
+ }
+ return already_record
+ elif already_record.is_confirmed:
+ already_record.metadata = {
+ "created": False,
+ "error_case": "IsNotConfirmed",
+ "message": "",
+ }
+ return already_record
+ already_record.metadata = {
+ "created": False,
+ "error_case": "AlreadyExists",
+ "message": "",
+ }
+ return already_record
+ check_kwargs = cls.remove_non_related_inputs(check_kwargs)
+ created_record = cls()
+ for key, value in check_kwargs.items():
+ setattr(created_record, key, value)
+ if getattr(cls.creds, "person_id", None) and getattr(
+ cls.creds, "person_name", None
+ ):
+ cls.created_by_id = cls.creds.get("person_id", None)
+ cls.created_by = cls.creds.get("person_name", None)
+ created_record.flush()
+ already_record.metadata = {"created": True, "error_case": None, "message": ""}
+ return created_record
+
+ @classmethod
+ def find_or_abort(cls, **kwargs):
+ """
+ Finds a record with the given attributes or creates it if it doesn't exist.
+ If found, sets is_found to True, otherwise False.
+ is_found can be used to check if the record was found or created.
+ """
+ check_kwargs = cls.extract_system_fields(kwargs)
+ cls.pre_query = cls.query.filter(
+ cls.expiry_ends > str(system_arrow.now()),
+ cls.expiry_starts <= str(system_arrow.now()),
+ )
+ already_record = cls.filter_by_one(system=True, **check_kwargs).data
+ cls.pre_query = None
+ if already_record:
+ if already_record.deleted:
+ cls.raise_http_exception(
+ status_code="HTTP_406_NOT_ACCEPTABLE",
+ error_case="DeletedRecord",
+ data=check_kwargs,
+ message="Record exits but is deleted. Contact with authorized user",
+ )
+ elif not already_record.is_confirmed:
+ cls.raise_http_exception(
+ status_code="HTTP_406_NOT_ACCEPTABLE",
+ error_case="IsNotConfirmed",
+ data=check_kwargs,
+ message="Record exits but is not confirmed. Contact with authorized user",
+ )
+ cls.raise_http_exception(
+ status_code="HTTP_406_NOT_ACCEPTABLE",
+ error_case="AlreadyExists",
+ data=check_kwargs,
+ message="Record already exits. Refresh data and try again",
+ )
+ check_kwargs = cls.remove_non_related_inputs(check_kwargs)
+ created_record = cls()
+ for key, value in check_kwargs.items():
+ setattr(created_record, key, value)
+ if getattr(cls.creds, "person_id", None) and getattr(
+ cls.creds, "person_name", None
+ ):
+ cls.created_by_id = cls.creds.get("person_id", None)
+ cls.created_by = cls.creds.get("person_name", None)
+ created_record.flush()
+ return created_record
+
+ def update(self, **kwargs):
+ check_kwargs = self.remove_non_related_inputs(kwargs)
+ """Updates the record with the given attributes."""
+ is_confirmed_argument = kwargs.get("is_confirmed", None)
+ if is_confirmed_argument and not len(kwargs) == 1:
+ self.raise_http_exception(
+ status_code="HTTP_406_NOT_ACCEPTABLE",
+ error_case="ConfirmError",
+ data=kwargs,
+ message="Confirm field can not be updated with other fields",
+ )
+ check_kwargs = self.extract_system_fields(check_kwargs, create=False)
+ for key, value in check_kwargs.items():
+ setattr(self, key, value)
+
+ if is_confirmed_argument:
+ if getattr(self.creds, "person_id", None) and getattr(
+ self.creds, "person_name", None
+ ):
+ self.confirmed_by_id = self.creds.get("person_id", "Unknown")
+ self.confirmed_by = self.creds.get("person_name", "Unknown")
+ else:
+ if getattr(self.creds, "person_id", None) and getattr(
+ self.creds, "person_name", None
+ ):
+ self.updated_by_id = self.creds.get("person_id", "Unknown")
+ self.updated_by = self.creds.get("person_id", "Unknown")
+ self.flush()
+ return self
+
+ def get_dict(
+ self, exclude: list = None, include: list = None, include_joins: list = None
+ ):
+ return_dict = {}
+ if include:
+ exclude_list = [
+ element
+ for element in self.__system_default_model__
+ if str(element)[-2:] == "id" and str(element)[-5:].lower() == "uu_id"
+ ]
+ columns_include_list = list(set(include).difference(set(exclude_list)))
+ # columns_include_list.extend([column for column in self.columns if str(column)[-5:].lower() == 'uu_id'])
+ columns_include_list.extend(["uu_id"])
+ for key in list(columns_include_list):
+ val = getattr(self, key)
+ correct, value_of_database = self.iterate_over_variables(val, key)
+ if correct:
+ return_dict[key] = value_of_database
+ elif exclude:
+ exclude.extend(
+ list(set(self.__exclude__fields__ or []).difference(exclude))
+ )
+ exclude.extend(
+ [
+ element
+ for element in self.__system_default_model__
+ if str(element)[-2:] == "id"
+ ]
+ )
+ columns_excluded_list = list(set(self.columns).difference(set(exclude)))
+ # columns_excluded_list.extend([column for column in self.columns if str(column)[-5:].lower() == 'uu_id'])
+ columns_excluded_list.extend(["uu_id", "active"])
+
+ for key in list(columns_excluded_list):
+ val = getattr(self, key)
+ correct, value_of_database = self.iterate_over_variables(val, key)
+ if correct:
+ return_dict[key] = value_of_database
+ else:
+ exclude_list = (
+ self.__exclude__fields__ or [] + self.__system_default_model__
+ )
+ columns_list = list(set(self.columns).difference(set(exclude_list)))
+ columns_list = [
+ columns for columns in columns_list if str(columns)[-2:] != "id"
+ ]
+ columns_list.extend(
+ [
+ column
+ for column in self.columns
+ if str(column)[-5:].lower() == "uu_id"
+ ]
+ )
+ for remove_field in self.__system_default_model__:
+ if remove_field in columns_list:
+ columns_list.remove(remove_field)
+ for key in list(columns_list):
+ val = getattr(self, key)
+ correct, value_of_database = self.iterate_over_variables(val, key)
+ if correct:
+ return_dict[key] = value_of_database
+ return return_dict
+
+
+class BaseMixin(CrudMixin, ReprMixin, SerializeMixin, FilterAttributes):
+
+ __abstract__ = True
+
+
+class BaseCollection(BaseMixin):
+
+ __abstract__ = True
+ __repr__ = ReprMixin.__repr__
+
+ id: Mapped[int] = mapped_column(primary_key=True)
+
+
+class CrudCollection(BaseMixin, SmartQueryMixin):
+
+ __abstract__ = True
+ __repr__ = ReprMixin.__repr__
+
+ id: Mapped[int] = mapped_column(primary_key=True)
+ uu_id: Mapped[str] = mapped_column(
+ UUID, server_default=text("gen_random_uuid()"), index=True, unique=True
+ )
+
+ ref_id: Mapped[str] = mapped_column(String(100), nullable=True, index=True)
+ created_at: Mapped[TIMESTAMP] = mapped_column(
+ "created_at",
+ TIMESTAMP(timezone=True),
+ server_default=func.now(),
+ nullable=False,
+ index=True,
+ )
+
+ updated_at: Mapped[TIMESTAMP] = mapped_column(
+ "updated_at",
+ TIMESTAMP(timezone=True),
+ server_default=func.now(),
+ onupdate=func.now(),
+ nullable=False,
+ index=True,
+ )
+ cryp_uu_id: Mapped[str] = mapped_column(String, nullable=True, index=True)
+
+ created_by: Mapped[str] = mapped_column(String, nullable=True)
+ created_by_id: Mapped[int] = mapped_column(Integer, nullable=True)
+ updated_by: Mapped[str] = mapped_column(String, nullable=True)
+ updated_by_id: Mapped[int] = mapped_column(Integer, nullable=True)
+
+ confirmed_by: Mapped[str] = mapped_column(String, nullable=True)
+ confirmed_by_id: Mapped[int] = mapped_column(Integer, nullable=True)
+ is_confirmed: Mapped[bool] = mapped_column(Boolean, server_default="0")
+
+ replication_id: Mapped[int] = mapped_column(SmallInteger, server_default="0")
+ deleted: Mapped[bool] = mapped_column(Boolean, server_default="0")
+ active: Mapped[bool] = mapped_column(Boolean, server_default="1")
+ is_notification_send: Mapped[bool] = mapped_column(Boolean, server_default="0")
+ is_email_send: Mapped[bool] = mapped_column(Boolean, server_default="0")
+
+# all_arguments = [
+# record
+# for record in self.__class__.__dict__
+# if "_" not in record[0] and "id" not in record[-2:]
+# ]
+#
+# for all_argument in all_arguments:
+# column = getattr(self.__class__, all_argument)
+# is_populate = isinstance(column, InstrumentedAttribute) and not hasattr(
+# column, "foreign_keys"
+# )
+# if is_populate and all_argument in include_joins or []:
+# populate_arg = getattr(self, all_argument, None)
+# if isinstance(populate_arg, list):
+# return_dict[all_argument] = [
+# arg.get_dict() if arg else [] for arg in populate_arg
+# ]
+# elif getattr(populate_arg, "get_dict", None):
+# return_dict[all_argument] = (
+# populate_arg.get_dict() if populate_arg else []
+# )
+# return dict(sorted(return_dict.items(), reverse=False))
\ No newline at end of file
diff --git a/databases/sql_models/event/event.py b/databases/sql_models/event/event.py
new file mode 100644
index 0000000..5e85e84
--- /dev/null
+++ b/databases/sql_models/event/event.py
@@ -0,0 +1,408 @@
+from databases.sql_models.core_mixin import CrudCollection
+
+from sqlalchemy import (
+ String,
+ ForeignKey,
+ Numeric,
+ SmallInteger,
+ Boolean,
+ Integer,
+ Index,
+)
+from sqlalchemy.orm import mapped_column, Mapped
+
+
+class Events(CrudCollection):
+ """
+ Events class based on declarative_base and BaseMixin via session
+ If Events2Occupants and Events2Employees are not found for user request, response 401 Unauthorized
+ """
+
+ __tablename__ = "events"
+ __exclude__fields__ = []
+
+ event_type: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Event Type"
+ )
+ function_code: Mapped[str] = mapped_column(
+ String, nullable=False, comment="function code"
+ )
+ function_class: Mapped[str] = mapped_column(
+ String, nullable=False, comment="class name"
+ )
+
+ # name: Mapped[str] = mapped_column(String, nullable=True) # form or page title
+ description: Mapped[str] = mapped_column(
+ String, server_default=""
+ ) # form or page description
+ property_description: Mapped[str] = mapped_column(String, server_default="")
+
+ marketing_layer = mapped_column(SmallInteger, server_default="3")
+ cost: Mapped[float] = mapped_column(Numeric(20, 2), server_default="0.00")
+ unit_price: Mapped[float] = mapped_column(Numeric(20, 2), server_default="0.00")
+
+ endpoint_id: Mapped[int] = mapped_column(
+ ForeignKey("endpoint_restriction.id"), nullable=True
+ )
+ endpoint_uu_id: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Endpoint UUID"
+ )
+
+ __table_args__ = ({"comment": "Events Information"},)
+
+
+class Modules(CrudCollection):
+ """
+ Modules class based on declarative_base and BaseMixin via session
+ """
+
+ __tablename__ = "modules"
+ __exclude__fields__ = []
+
+ module_name: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Module Name"
+ )
+ module_description: Mapped[str] = mapped_column(String, server_default="")
+ module_code: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Module Code"
+ )
+ module_layer = mapped_column(Integer, nullable=False, comment="Module Layer")
+ is_default_module = mapped_column(Boolean, server_default="0")
+
+ def retrieve_services(self):
+ services = Services.filter_all(Services.module_id == self.id).data
+ if not services:
+ self.raise_http_exception(
+ status_code="HTTP_404_NOT_FOUND",
+ error_case="RECORD_NOT_FOUND",
+ message=f"No services found for this module : {str(self.uu_id)}",
+ data={
+ "module_uu_id": str(self.uu_id),
+ },
+ )
+ return services
+
+ __table_args__ = ({"comment": "Modules Information"},)
+
+
+class Services(CrudCollection):
+ """
+ Services class based on declarative_base and BaseMixin via session
+ """
+
+ __tablename__ = "services"
+ __exclude__fields__ = []
+
+ module_id: Mapped[int] = mapped_column(ForeignKey("modules.id"), nullable=False)
+ module_uu_id: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Module UUID"
+ )
+ service_name: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Service Name"
+ )
+ service_description: Mapped[str] = mapped_column(String, server_default="")
+ service_code: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Service Code"
+ )
+ related_responsibility: Mapped[str] = mapped_column(String, server_default="")
+
+ @classmethod
+ def retrieve_service_via_occupant_code(cls, occupant_code):
+ from databases import OccupantTypes
+
+ occupant_type = OccupantTypes.filter_by_one(
+ system=True,
+ occupant_code=occupant_code,
+ ).data
+ if not occupant_type:
+ cls.raise_http_exception(
+ status_code="HTTP_404_NOT_FOUND",
+ error_case="RECORD_NOT_FOUND",
+ message=f"No occupant type found for this code : {occupant_code}",
+ data={
+ "occupant_code": occupant_code,
+ },
+ )
+ return cls.filter_one(
+ cls.related_responsibility == occupant_type.occupant_code
+ ).data
+
+ __table_args__ = ({"comment": "Services Information"},)
+
+
+class Service2Events(CrudCollection):
+ """
+ Service2Actions class based on declarative_base and BaseMixin via session
+ """
+
+ __tablename__ = "services2events"
+ __exclude__fields__ = []
+
+ service_id: Mapped[int] = mapped_column(ForeignKey("services.id"), nullable=False)
+ service_uu_id = mapped_column(String, nullable=False, comment="Service UUID")
+ event_id: Mapped[int] = mapped_column(ForeignKey("events.id"), nullable=False)
+ event_uu_id = mapped_column(String, nullable=False, comment="Event UUID")
+
+ __table_args__ = ({"comment": "Service2Events Information"},)
+
+
+class Event2OccupantExtra(CrudCollection):
+
+ __tablename__ = "event2occupant_extra"
+ __exclude__fields__ = []
+
+ build_living_space_id: Mapped[int] = mapped_column(
+ ForeignKey("build_living_space.id"), nullable=False
+ )
+ build_living_space_uu_id: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Build Living Space UUID"
+ )
+ event_id: Mapped[int] = mapped_column(ForeignKey("events.id"), nullable=False)
+ event_uu_id: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Event UUID"
+ )
+
+ __table_args__ = (
+ Index(
+ "event2occupant_extra_bind_event_to_occupant",
+ build_living_space_id,
+ event_id,
+ unique=True,
+ ),
+ {"comment": "Occupant2Event Information"},
+ )
+
+
+class Event2EmployeeExtra(CrudCollection):
+ """
+ Employee2Event class based on declarative_base and BaseMixin via session
+ """
+
+ __tablename__ = "event2employee_extra"
+ __exclude__fields__ = []
+
+ employee_id: Mapped[int] = mapped_column(ForeignKey("employees.id"), nullable=False)
+ employee_uu_id: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Employee UUID"
+ )
+
+ event_id: Mapped[int] = mapped_column(ForeignKey("events.id"), nullable=False)
+ event_uu_id: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Event UUID"
+ )
+
+ __table_args__ = (
+ Index(
+ "event2employee_extra_employee_to_event",
+ employee_id,
+ event_id,
+ unique=True,
+ ),
+ {"comment": "Employee to Event Information"},
+ )
+
+
+class Event2Employee(CrudCollection):
+ """
+ Employee2Event class based on declarative_base and BaseMixin via session
+ """
+
+ __tablename__ = "event2employee"
+ __exclude__fields__ = []
+
+ employee_id: Mapped[int] = mapped_column(ForeignKey("employees.id"), nullable=False)
+ employee_uu_id: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Employee UUID"
+ )
+ event_service_id: Mapped[int] = mapped_column(
+ ForeignKey("services.id"), nullable=False
+ )
+ event_service_uu_id: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Event Cluster UUID"
+ )
+
+ __table_args__ = (
+ Index(
+ "event2employee_employee_to_event",
+ employee_id,
+ event_service_id,
+ unique=True,
+ ),
+ {"comment": "Employee to Event Information"},
+ )
+
+ @classmethod
+ def get_event_id_by_employee_id(cls, employee_id) -> list:
+ occupant_events = cls.filter_all(
+ cls.employee_id == employee_id,
+ ).data
+ active_events = Service2Events.filter_all(
+ Service2Events.service_id.in_(
+ [event.event_service_id for event in occupant_events]
+ ),
+ system=True,
+ ).data
+ active_events_id = [event.event_id for event in active_events]
+ if extra_events := Event2EmployeeExtra.filter_all(
+ Event2EmployeeExtra.employee_id == employee_id
+ ).data:
+ active_events_id.extend([event.event_id for event in extra_events])
+ return active_events_id
+
+
+class Event2Occupant(CrudCollection):
+ """
+ Occupant2Event class based on declarative_base and BaseMixin via session
+ """
+
+ __tablename__ = "event2occupant"
+ __exclude__fields__ = []
+
+ build_living_space_id: Mapped[str] = mapped_column(
+ ForeignKey("build_living_space.id"), nullable=False
+ )
+ build_living_space_uu_id: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Build Living Space UUID"
+ )
+ event_service_id: Mapped[int] = mapped_column(
+ ForeignKey("services.id"), nullable=False
+ )
+ event_service_uu_id: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Event Cluster UUID"
+ )
+ # event_id: Mapped[int] = mapped_column(ForeignKey("events.id"), nullable=False)
+ # event_uu_id = mapped_column(String, nullable=False, comment="Event UUID")
+
+ __table_args__ = (
+ Index(
+ "event2occupant_bind_event_to_occupant",
+ build_living_space_id,
+ event_service_id,
+ unique=True,
+ ),
+ {"comment": "Occupant2Event Information"},
+ )
+
+ @classmethod
+ def get_event_id_by_build_living_space_id(cls, build_living_space_id) -> list:
+ occupant_events = cls.filter_all(
+ cls.build_living_space_id == build_living_space_id,
+ ).data
+ active_events = Service2Events.filter_all(
+ Service2Events.service_id.in_(
+ [event.event_service_id for event in occupant_events]
+ ),
+ system=True,
+ ).data
+ active_events_id = [event.event_id for event in active_events]
+ if extra_events := Event2OccupantExtra.filter_all(
+ Event2OccupantExtra.build_living_space_id == build_living_space_id
+ ).data:
+ active_events_id.extend([event.event_id for event in extra_events])
+ return active_events_id
+
+
+class ModulePrice(CrudCollection):
+ """
+ ModulePrice class based on declarative_base and BaseMixin via session
+ """
+
+ __tablename__ = "module_price"
+ __exclude__fields__ = []
+
+ campaign_code: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Campaign Code"
+ )
+ module_id: Mapped[int] = mapped_column(ForeignKey("modules.id"), nullable=False)
+ module_uu_id: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Module UUID"
+ )
+ service_id: Mapped[int] = mapped_column(ForeignKey("services.id"), nullable=False)
+ service_uu_id: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Service UUID"
+ )
+ event_id: Mapped[int] = mapped_column(ForeignKey("events.id"), nullable=False)
+ event_uu_id: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Event UUID"
+ )
+ is_counted_percentage: Mapped[float] = mapped_column(
+ Numeric(6, 2), server_default="0.00"
+ ) # %22
+ discounted_price: Mapped[float] = mapped_column(
+ Numeric(20, 2), server_default="0.00"
+ ) # Normal: 78.00 TL
+ calculated_price: Mapped[float] = mapped_column(
+ Numeric(20, 2), server_default="0.00"
+ ) # sana düz 75.00 TL yapar
+
+ __table_args__ = ({"comment": "ModulePrice Information"},)
+
+
+# class Modules2Occupant(CrudCollection):
+# """
+# ModulesOccupantPrices class based on declarative_base and BaseMixin via session
+# discounted_price - calculated_price = Pazarlamaya gider yazılır 3 TL
+# """
+#
+# __tablename__ = "modules2_occupant"
+#
+#
+# discounted_percentage: Mapped[float] = mapped_column(Numeric(6, 2), server_default="0.00") # %22
+# discounted_price = mapped_column(
+# Numeric(20, 2), server_default="0.00"
+# ) # Normal: 78.00 TL
+# calculated_price = mapped_column(
+# Numeric(20, 2), server_default="0.00"
+# ) # sana düz 75.00 TL yapar
+#
+# service_id = mapped_column(ForeignKey("services.id"), nullable=False)
+# build_living_space_id = mapped_column(
+# ForeignKey("build_living_space.id"), nullable=False, index=True
+# )
+#
+# __table_args__ = ({"comment": "ModulesOccupantPrices Information"},)
+#
+#
+# class Modules2Employee(CrudCollection):
+# """
+# Modules2EmployeeServices class based on declarative_base and BaseMixin via session
+# """
+#
+# __tablename__ = "modules2_employee"
+#
+# discounted_percentage: Mapped[float] = mapped_column(Numeric(6, 2), server_default="0.00") # %22
+# discounted_price = mapped_column(
+# Numeric(20, 2), server_default="0.00"
+# ) # Normal: 78.00 TL
+# calculated_price = mapped_column(
+# Numeric(20, 2), server_default="0.00"
+# ) # sana düz 75.00 TL yapar
+#
+# service_id = mapped_column(ForeignKey("services.id"), nullable=False)
+# employee_id = mapped_column(ForeignKey("employees.id"), nullable=False)
+#
+# __table_args__ = ({"comment": "Modules2EmployeeServices Information"},)
+# class Actions(CrudCollection):
+# """
+# Actions class based on declarative_base and BaseMixin via session
+# """
+#
+# __tablename__ = "actions"
+# __exclude__fields__ = []
+#
+# action_table = mapped_column(String, nullable=False, comment="Action Table")
+# action_type = mapped_column(String, nullable=False, comment="Action Type")
+# action_description = mapped_column(String, server_default="")
+# action_code = mapped_column(String, nullable=False, comment="Action Code")
+# endpoint_id = mapped_column(ForeignKey("endpoint_restriction.id"), nullable=True)
+# endpoint_uu_id = mapped_column(String, nullable=True, comment="Endpoint UUID")
+#
+# @property
+# def action_name(self):
+# return f"{self.action_table} {self.action_type}"
+#
+# @property
+# def total_cost(self):
+# return self.cost * self.unit_price
+#
+# __table_args__ = ({"comment": "Actions Information"},)
diff --git a/databases/sql_models/identity/identity.py b/databases/sql_models/identity/identity.py
new file mode 100644
index 0000000..44e9ec9
--- /dev/null
+++ b/databases/sql_models/identity/identity.py
@@ -0,0 +1,1069 @@
+from api_library.date_time_actions.date_functions import system_arrow
+from api_configs import Auth, ApiStatic, RelationAccess
+
+from datetime import timedelta
+from fastapi import HTTPException
+
+from databases.sql_models.core_mixin import CrudCollection
+from databases.extensions import SelectAction, SelectActionWithEmployee
+from databases.extensions.auth import UserLoginModule
+
+from sqlalchemy import (
+ String,
+ Boolean,
+ TIMESTAMP,
+ ForeignKey,
+ func,
+ Numeric,
+ Index,
+ BigInteger,
+ Integer,
+ Text,
+ or_,
+)
+from sqlalchemy.orm import mapped_column, relationship, Mapped
+
+from api_validations.validations_request import InsertUsers, InsertPerson
+
+
+class UsersTokens(CrudCollection):
+
+ __tablename__ = "users_tokens"
+ __exclude__fields__ = []
+
+ user_id: Mapped[int] = mapped_column(ForeignKey("users.id"), nullable=False)
+
+ token_type: Mapped[str] = mapped_column(String(16), server_default="RememberMe")
+ token: Mapped[str] = mapped_column(String, server_default="")
+ domain: Mapped[str] = mapped_column(String, server_default="")
+ expires_at: Mapped[TIMESTAMP] = mapped_column(
+ TIMESTAMP(timezone=True),
+ default=str(system_arrow.shift(date=system_arrow.now(), days=3)),
+ )
+
+ # users = relationship("Users", back_populates="tokens", foreign_keys=[user_id])
+
+
+class Users(CrudCollection, UserLoginModule, SelectAction):
+ """
+ Application User frame to connect to API with assigned token-based HTTP connection
+ """
+
+ __tablename__ = "users"
+ __exclude__fields__ = [
+ "hash_password",
+ "password_token",
+ "expiry_begins",
+ "related_company",
+ ]
+
+ user_tag: Mapped[str] = mapped_column(
+ String(64), server_default="", comment="Unique tag for the user", index=True
+ )
+ email: Mapped[str] = mapped_column(
+ String(128), server_default="", comment="Email address of the user", index=True
+ )
+ phone_number: Mapped[str] = mapped_column(
+ String, server_default="", comment="Phone number of the user", index=True
+ )
+ via: Mapped[str] = mapped_column(
+ String,
+ server_default="111",
+ comment="Email 1/ Phone 2/ User Tag 3 All 111 Only 100",
+ )
+
+ avatar: Mapped[str] = mapped_column(
+ String, server_default="", comment="Avatar URL for the user"
+ )
+ hash_password: Mapped[str] = mapped_column(
+ String(256), server_default="", comment="Hashed password for security"
+ )
+ password_token: Mapped[str] = mapped_column(
+ String(256), server_default="", comment="Token for password reset"
+ )
+ remember_me: Mapped[bool] = mapped_column(
+ Boolean, server_default="0", comment="Flag to remember user login"
+ )
+
+ password_expires_day: Mapped[str] = mapped_column(
+ "expires_day",
+ String,
+ server_default=str(Auth.PASSWORD_EXPIRE_DAY),
+ comment="Password expires in days",
+ )
+ password_expiry_begins: Mapped[TIMESTAMP] = mapped_column(
+ "expiry_begins",
+ TIMESTAMP(timezone=True),
+ server_default=func.now(),
+ comment="Timestamp when password expiry begins",
+ )
+ related_company: Mapped[str] = mapped_column(String, comment="Related Company UUID")
+
+ person_id: Mapped[int] = mapped_column(
+ ForeignKey("people.id"), nullable=False, comment="Foreign key to person table"
+ )
+ person_uu_id: Mapped[str] = mapped_column(
+ String, server_default="", comment="Person UUID", index=True
+ )
+ person = relationship("People", back_populates="user", foreign_keys=[person_id])
+
+ @property
+ def is_occupant(self):
+ return not str(self.email).split("@")[1] == Auth.ACCESS_EMAIL_EXT
+
+ @property
+ def password_expiry_ends(self):
+ """Calculates the expiry end date based on expiry begins and expires day"""
+ return self.password_expiry_begins + timedelta(
+ days=int(
+ "".join(
+ [
+ _
+ for _ in str(self.password_expires_day).split(",")[0]
+ if _.isdigit()
+ ]
+ )
+ )
+ )
+
+ @property
+ def is_super_user(self):
+ """Checks if the user is a superuser based on priority code"""
+ return getattr(self.priority, "priority_code", 0) == 78
+
+ @property
+ def is_user(self):
+ """Checks if the user is a regular user based on priority code"""
+ return getattr(self.priority, "priority_code", 0) == 0
+
+ @classmethod
+ def create_action(cls, create_user: InsertUsers, token_dict):
+ found_person = People.filter_one(
+ People.uu_id == create_user.people_uu_id,
+ ).data
+
+ if not found_person:
+ raise HTTPException(status_code=400, detail="Person not found.")
+ if (
+ not any(i in str(create_user.email) for i in ["@", "."])
+ and not len(str(create_user.phone_number)) >= 10
+ ):
+ raise HTTPException(
+ status_code=400,
+ detail="Please enter at least one valid email or phone number.",
+ )
+ if not create_user.avatar:
+ create_user.avatar = ApiStatic.PLACEHOLDER
+ create_dict = create_user.model_dump()
+ del create_dict["people_uu_id"]
+ create_dict["person_id"] = found_person.id
+ create_dict["person_uu_id"] = str(found_person.uu_id)
+ create_dict["related_company"] = token_dict.selected_company.company_uu_id
+ created_user = cls.find_or_create(**create_dict)
+ created_user.reset_password_token(found_user=created_user)
+ return created_user
+
+ @classmethod
+ def credentials(cls):
+ person_object = People.filter_by_one(system=True, id=cls.person_id).data
+ # if not person_object:
+ # raise HTTPException(
+ # status_code=401,
+ # detail="Person not found. Please contact the admin.",
+ # )
+ if person_object:
+ return {
+ "person_id": person_object.id,
+ "person_uu_id": str(person_object.uu_id),
+ }
+ return {
+ "person_id": None,
+ "person_uu_id": None,
+ }
+
+ def get_employee_and_duty_details(self):
+ from databases import Employees, Duties
+
+ found_person = People.filter_one(
+ People.id == self.person_id,
+ )
+ found_employees = Employees.filter_by_active(
+ people_id=found_person.id, is_confirmed=True
+ )
+ found_duties = Duties.filter_all(
+ Duties.is_confirmed == True,
+ Duties.id.in_(
+ list(found_employee.duty_id for found_employee in found_employees.data)
+ ),
+ )
+ if not found_employees.count:
+ raise HTTPException(
+ status_code=401,
+ detail={
+ "message": "Person has no confirmed duty. No employee match please register "
+ "your super admin",
+ "completed": False,
+ },
+ )
+ return {
+ "duty_list": [
+ {
+ "duty_id": duty.id,
+ "duty_uu_id": duty.uu_id.__str__(),
+ "duty_code": duty.duty_code,
+ "duty_name": duty.duty_name,
+ "duty_description": duty.duty_description,
+ }
+ for duty in found_duties.data
+ ],
+ }
+
+ def get_main_domain_and_other_domains(self, get_main_domain: bool = True):
+ from databases import MongoQueryIdentity
+
+ query_engine = MongoQueryIdentity(company_uuid=self.related_company)
+ domain_via_user = query_engine.get_domain_via_user(user_uu_id=str(self.uu_id))
+ if get_main_domain:
+ return domain_via_user.get("main_domain")
+ return domain_via_user.get("other_domains_list")
+
+
+class RelationshipDutyPeople(CrudCollection):
+
+ __tablename__ = "relationship_duty_people"
+ __exclude__fields__ = []
+ __access_by__ = RelationAccess.SuperAccessList
+
+ company_id: Mapped[int] = mapped_column(
+ ForeignKey("companies.id"), nullable=False
+ ) # 1, 2, 3
+ duties_id: Mapped[int] = mapped_column(
+ ForeignKey("duties.id"), nullable=False
+ ) # duty -> (n)person Evyos LTD
+ member_id: Mapped[int] = mapped_column(
+ ForeignKey("people.id"), nullable=False
+ ) # 2, 3, 4
+
+ relationship_type: Mapped[str] = mapped_column(
+ String, nullable=True, server_default="Employee"
+ ) # Commercial
+ show_only: Mapped[bool] = mapped_column(Boolean, server_default="0")
+
+ # related_company: Mapped[List["Company"]] = relationship(
+ # "Company",
+ # back_populates="related_companies",
+ # foreign_keys=[related_company_id],
+ # )
+
+ __table_args__ = (
+ Index(
+ "person_relationship_ndx_01",
+ company_id,
+ duties_id,
+ member_id,
+ relationship_type,
+ unique=True,
+ ),
+ {"comment": "Person Relationship Information"},
+ )
+
+
+class People(CrudCollection, SelectAction):
+ """
+ People that are related to users in the application
+ """
+
+ __tablename__ = "people"
+ __exclude__fields__ = []
+ __many__table__ = RelationshipDutyPeople
+ __encrypt_list__ = [
+ "father_name",
+ "mother_name",
+ "country_code",
+ "national_identity_id",
+ "birth_place",
+ "birth_date",
+ "tax_no",
+ ]
+
+ firstname: Mapped[str] = mapped_column(
+ String, nullable=False, comment="First name of the person"
+ )
+ surname: Mapped[str] = mapped_column(
+ String(24), nullable=False, comment="Surname of the person"
+ )
+ middle_name: Mapped[str] = mapped_column(
+ String, server_default="", comment="Middle name of the person"
+ )
+ sex_code: Mapped[str] = mapped_column(
+ String(1), nullable=False, comment="Sex code of the person (e.g., M/F)"
+ )
+ person_ref: Mapped[str] = mapped_column(
+ String, server_default="", comment="Reference ID for the person"
+ )
+ person_tag: Mapped[str] = mapped_column(
+ String, server_default="", comment="Unique tag for the person"
+ )
+
+ # ENCRYPT DATA
+ father_name: Mapped[str] = mapped_column(
+ String, server_default="", comment="Father's name of the person"
+ )
+ mother_name: Mapped[str] = mapped_column(
+ String, server_default="", comment="Mother's name of the person"
+ )
+ country_code: Mapped[str] = mapped_column(
+ String(4), server_default="TR", comment="Country code of the person"
+ )
+ national_identity_id: Mapped[str] = mapped_column(
+ String, server_default="", comment="National identity ID of the person"
+ )
+ birth_place: Mapped[str] = mapped_column(
+ String, server_default="", comment="Birth place of the person"
+ )
+ birth_date: Mapped[TIMESTAMP] = mapped_column(
+ TIMESTAMP(timezone=True),
+ server_default="1900-01-01",
+ comment="Birth date of the person",
+ )
+ tax_no: Mapped[str] = mapped_column(
+ String, server_default="", comment="Tax number of the person"
+ )
+ # ENCRYPT DATA
+ user = relationship(
+ "Users", back_populates="person", foreign_keys="Users.person_id"
+ )
+
+ __table_args__ = (
+ Index(
+ "person_ndx_001",
+ national_identity_id,
+ unique=True,
+ ),
+ {"comment": "Person Information"},
+ )
+
+ @property
+ def full_name(self):
+ if self.middle_name:
+ return f"{self.firstname} {self.middle_name} {self.surname}"
+ return f"{self.firstname} {self.surname}"
+
+ @classmethod
+ def create_action(cls, data: InsertPerson, token):
+ from databases import Duties
+
+ token_duties_id, token_company_id = (
+ token.selected_company.duty_id,
+ token.selected_company.company_id,
+ )
+ bulk_duty = Duties.get_bulk_duties_of_a_company(company_id=token_company_id)
+
+ if str(data.country_code) == "TR":
+ if not len(data.national_identity_id) == 11:
+ raise HTTPException(
+ status_code=400,
+ detail="Please enter a valid national identity number.",
+ )
+ if data.tax_no and not len(str(data.tax_no)) == 10:
+ raise HTTPException(
+ status_code=400,
+ detail="Please enter a valid tax number.",
+ )
+
+ create_dict = data.model_dump()
+ create_dict["firstname"] = str(create_dict["firstname"]).capitalize()
+ create_dict["middle_name"] = str(create_dict["middle_name"]).capitalize()
+ create_dict["surname"] = str(create_dict["surname"]).upper()
+ create_dict["birth_place"] = str(create_dict["birth_place"]).upper()
+ created_people = cls.find_or_create(**create_dict)
+ created_people.update(is_confirmed=True)
+ duty_people = RelationshipDutyPeople.find_or_create(
+ company_id=token.selected_company.company_id,
+ duties_id=bulk_duty.id,
+ member_id=created_people.id,
+ )
+ duty_people.update(is_confirmed=True)
+ return created_people
+
+
+class RelationshipEmployee2PostCode(CrudCollection):
+ """
+ Build2EmployeeRelationship class based on declarative_base and BaseMixin via session
+ """
+
+ __tablename__ = "relationship_employee2postcode"
+ __exclude__fields__ = []
+ __include__fields__ = []
+
+ company_id: Mapped[int] = mapped_column(
+ ForeignKey("companies.id"), nullable=True
+ ) # 1, 2, 3
+ employee_id: Mapped[int] = mapped_column(ForeignKey("employees.id"), nullable=False)
+ member_id: Mapped[int] = mapped_column(
+ ForeignKey("address_postcode.id"), nullable=False
+ )
+
+ relationship_type: Mapped[str] = mapped_column(
+ String, nullable=True, server_default="Employee"
+ ) # Commercial
+ show_only: Mapped[bool] = mapped_column(Boolean, server_default="0")
+
+ __table_args__ = ({"comment": "Build2Employee Relationship Information"},)
+
+
+class AddressPostcode(CrudCollection, SelectActionWithEmployee):
+ """
+ Postcode class based on declarative_base and BaseMixin via session
+ """
+
+ __tablename__ = "address_postcode"
+ __exclude__fields__ = []
+ __access_by__ = []
+ __many__table__ = RelationshipEmployee2PostCode
+
+ street_id: Mapped[int] = mapped_column(ForeignKey("address_street.id"))
+ street_uu_id: Mapped[str] = mapped_column(
+ String, server_default="", comment="Street UUID"
+ )
+ postcode: Mapped[str] = mapped_column(
+ String(32), nullable=False, comment="Postcode"
+ )
+
+ __table_args__ = ({"comment": "Postcode Information"},)
+
+
+class Addresses(CrudCollection):
+ """
+ Address class based on declarative_base and BaseMixin via session
+ """
+
+ __tablename__ = "addresses"
+ __exclude__fields__ = []
+
+ build_number: Mapped[str] = mapped_column(
+ String(24), nullable=False, comment="Build Number"
+ )
+ door_number: Mapped[str] = mapped_column(
+ String(24), nullable=True, comment="Door Number"
+ )
+ floor_number: Mapped[str] = mapped_column(
+ String(24), nullable=True, comment="Floor Number"
+ )
+
+ comment_address: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Address"
+ )
+ letter_address: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Address"
+ )
+ short_letter_address: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Address"
+ )
+
+ latitude: Mapped[float] = mapped_column(Numeric(20, 12), server_default="0")
+ longitude: Mapped[float] = mapped_column(Numeric(20, 12), server_default="0")
+
+ street_id: Mapped[int] = mapped_column(
+ ForeignKey("address_street.id"), nullable=False
+ )
+ street_uu_id: Mapped[str] = mapped_column(
+ String, server_default="", comment="Street UUID"
+ )
+
+ @classmethod
+ def list_via_employee(cls, token_dict, filter_expr=None):
+ post_code_list = RelationshipEmployee2PostCode.filter_all(
+ RelationshipEmployee2PostCode.employee_id
+ == token_dict.selected_company.employee_id,
+ ).data
+ post_code_id_list = [post_code.member_id for post_code in post_code_list]
+ if not post_code_id_list:
+ raise HTTPException(
+ status_code=404,
+ detail="User has no post code registered. User can not list addresses.",
+ )
+ cls.pre_query = cls.filter_all(cls.post_code_id.in_(post_code_id_list)).query
+ filter_cls = cls.filter_all(*filter_expr or [])
+ cls.pre_query = None
+ return filter_cls.data
+
+ # buildings: Mapped["Build"] = relationship(
+ # "Build", back_populates="addresses", foreign_keys="Build.address_id"
+ # )
+ # site: Mapped["BuildSites"] = relationship(
+ # "BuildSites", back_populates="addresses", foreign_keys="BuildSites.address_id"
+ # )
+ # official_companies: Mapped["Companies"] = relationship(
+ # "Company",
+ # back_populates="official_address",
+ # foreign_keys="Company.official_address_id",
+ # )
+
+ # @classmethod
+ # def create_action(cls, request, create_address: InsertAddress):
+ # from services.redis.auth_actions.token import parse_token_object_to_dict
+ #
+ # token_dict = parse_token_object_to_dict(request=request)
+ # data_dict = create_address.model_dump()
+ # post_code = AddressPostcode.find_one(uu_id=create_address.post_code_uu_id)
+ # if not post_code:
+ # raise HTTPException(
+ # status_code=404,
+ # detail="Post code not found.",
+ # )
+ # if Employee2AddressRelationship.post_code_id.find_one(
+ # employee_id=token_dict.selected_company.employee_id,
+ # post_code_id=post_code.id,
+ # ):
+ # data_dict["post_code_id"] = post_code.id
+ # del data_dict["post_code_uu_id"]
+ # return cls.find_or_create(**create_address.model_dump())
+ # raise HTTPException(
+ # status_code=401,
+ # detail=f"User is not qualified to create address at this post code {post_code.postcode}",
+ # )
+
+ # __table_args__ = (
+ # Index("_address_ndx_00", country_code, b_state, city, district),
+ # {"comment": "Address Information"},
+ # )
+
+
+class AddressGeographicLocations(CrudCollection):
+ """
+ Country class based on declarative_base and BaseMixin via session
+ """
+
+ __tablename__ = "address_geographic_locations"
+ __exclude__fields__ = []
+
+ geo_table: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Address Table Name"
+ )
+ geo_id: Mapped[int] = mapped_column(
+ Integer, nullable=False, comment="Address Table ID"
+ )
+ geo_name: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Geographic Location Name"
+ )
+ geo_latitude: Mapped[float] = mapped_column(
+ Numeric(20, 6), server_default="0", comment="Geographic Location Name"
+ )
+ geo_longitude: Mapped[float] = mapped_column(
+ Numeric(20, 6), server_default="0", comment="Geographic Location Latitude"
+ )
+ geo_altitude: Mapped[float] = mapped_column(
+ Numeric(20, 6), server_default="0", comment="Geographic Location Longitude"
+ )
+ geo_description: Mapped[str] = mapped_column(
+ Text, nullable=False, comment="Geographic Location Description"
+ )
+ geo_area_size: Mapped[float] = mapped_column(
+ Numeric(20, 2),
+ nullable=True,
+ server_default="0",
+ comment="Geographic Location Area Size",
+ )
+ geo_population: Mapped[int] = mapped_column(
+ BigInteger, nullable=True, comment="Geographic Location Population"
+ )
+ # geo_geom_point = mapped_column(Geometry('POINT', srid=4326), nullable=True, comment="Geographic Location Points")
+ # geo_geom_polygon = mapped_column(Geometry('POLYGON', srid=4326), nullable=True,
+ # comment="Geographic Location Vector geographic information (polygon)")
+ # geo_centroid = mapped_column( GEOMETRY(POINT, 4326), nullable=True,
+ # comment="Geographic Location center of gravity of the region(points)")
+
+ __table_args__ = (
+ Index("_address_geographic_locations_ndx_00", geo_table, geo_id),
+ Index("_address_geographic_locations_ndx_01", geo_latitude, geo_longitude),
+ {"comment": "Geographic Location Information"},
+ )
+
+
+class AddressCountry(CrudCollection):
+ """
+ Country class based on declarative_base and BaseMixin via session
+ """
+
+ __tablename__ = "address_country"
+ __exclude__fields__ = []
+
+ country_code: Mapped[str] = mapped_column(
+ String(16), nullable=False, comment="Country Code"
+ )
+ country_name: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Country Name"
+ )
+ money_code: Mapped[str] = mapped_column(
+ String(12), nullable=True, comment="Money Code"
+ )
+ language: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Language Code"
+ )
+ address_geographic_id: Mapped[int] = mapped_column(
+ BigInteger, nullable=True, comment="Address Geographic Id"
+ )
+
+ __table_args__ = (
+ Index("_address_country_ndx_00", money_code),
+ Index("_address_country_ndx_01", country_code, unique=True),
+ {"comment": "Country Information"},
+ )
+
+
+class AddressState(CrudCollection):
+ """
+ State class based on declarative_base and BaseMixin via session
+ """
+
+ __tablename__ = "address_state"
+ __exclude__fields__ = []
+
+ state_code: Mapped[str] = mapped_column(
+ String(16), nullable=False, comment="State Code"
+ )
+ state_name: Mapped[str] = mapped_column(
+ String, nullable=False, comment="State Name"
+ )
+ licence_plate: Mapped[str] = mapped_column(
+ String(24), nullable=True, comment="Sign Code"
+ )
+ phone_code: Mapped[str] = mapped_column(
+ String(36), nullable=True, comment="Phone Code"
+ )
+ gov_code: Mapped[str] = mapped_column(
+ String(128), nullable=True, comment="Government Code"
+ )
+ address_geographic_id: Mapped[int] = mapped_column(
+ BigInteger, nullable=True, comment="Address Geographic Id"
+ )
+
+ country_id: Mapped[int] = mapped_column(ForeignKey("address_country.id"))
+ country_uu_id: Mapped[str] = mapped_column(
+ String, server_default="", comment="Country UUID"
+ )
+
+ __table_args__ = (
+ Index(
+ "_address_state_ndx_01",
+ country_id,
+ state_code,
+ unique=True,
+ ),
+ {"comment": "State Information"},
+ )
+
+
+class AddressCity(CrudCollection):
+ """
+ City class based on declarative_base and BaseMixin via session
+ """
+
+ __tablename__ = "address_city"
+ __exclude__fields__ = []
+
+ city_code: Mapped[str] = mapped_column(
+ String(24), nullable=False, comment="City Code"
+ )
+ city_name: Mapped[str] = mapped_column(String, nullable=False, comment="City Name")
+ licence_plate: Mapped[str] = mapped_column(
+ String(24), nullable=True, comment="Sign Code"
+ )
+ phone_code: Mapped[str] = mapped_column(
+ String(36), nullable=True, comment="Phone Code"
+ )
+ gov_code: Mapped[str] = mapped_column(
+ String(128), nullable=True, comment="Government Code"
+ )
+ address_geographic_id: Mapped[int] = mapped_column(
+ BigInteger, nullable=True, comment="Address Geographic Id"
+ )
+
+ state_id: Mapped[int] = mapped_column(ForeignKey("address_state.id"))
+ state_uu_id: Mapped[str] = mapped_column(
+ String, server_default="", comment="State UUID"
+ )
+
+ __table_args__ = (
+ Index(
+ "_address_city_ndx_01",
+ state_id,
+ city_code,
+ unique=True,
+ ),
+ {"comment": "City Information"},
+ )
+
+
+class AddressDistrict(CrudCollection):
+ """
+ District class based on declarative_base and BaseMixin via session
+ """
+
+ __tablename__ = "address_district"
+ __exclude__fields__ = []
+
+ district_code: Mapped[str] = mapped_column(
+ String(16), nullable=False, comment="District Code"
+ )
+ district_name: Mapped[str] = mapped_column(
+ String, nullable=False, comment="District Name"
+ )
+ phone_code: Mapped[str] = mapped_column(
+ String(36), nullable=True, comment="Phone Code"
+ )
+ gov_code: Mapped[str] = mapped_column(
+ String(128), nullable=True, comment="Government Code"
+ )
+ address_geographic_id: Mapped[int] = mapped_column(
+ BigInteger, nullable=True, comment="Address Geographic Id"
+ )
+
+ city_id: Mapped[int] = mapped_column(
+ ForeignKey("address_city.id"), nullable=False, comment="City ID"
+ )
+ city_uu_id: Mapped[str] = mapped_column(
+ String, server_default="", comment="City UUID"
+ )
+
+ __table_args__ = (
+ Index(
+ "_address_district_ndx_01",
+ city_id,
+ district_code,
+ unique=True,
+ ),
+ {"comment": "District Information"},
+ )
+
+
+class AddressLocality(CrudCollection):
+ """
+ Locality class based on declarative_base and BaseMixin via session
+ """
+
+ __tablename__ = "address_locality"
+ __exclude__fields__ = []
+
+ locality_code: Mapped[str] = mapped_column(
+ String(16), nullable=False, comment="Locality Code"
+ )
+ locality_name: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Locality Name"
+ )
+ type_code: Mapped[str] = mapped_column(String, nullable=True, comment="Type Name")
+ type_description: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Type Name"
+ )
+ gov_code: Mapped[str] = mapped_column(
+ String(128), nullable=True, comment="Government Code"
+ )
+ address_show: Mapped[bool] = mapped_column(Boolean, server_default="1")
+ address_geographic_id: Mapped[int] = mapped_column(
+ BigInteger, nullable=True, comment="Address Geographic Id"
+ )
+
+ district_id: Mapped[int] = mapped_column(
+ ForeignKey("address_district.id"), nullable=False, comment="District ID"
+ )
+ district_uu_id: Mapped[str] = mapped_column(
+ String, server_default="", comment="District UUID"
+ )
+
+ __table_args__ = (
+ Index(
+ "_address_locality_ndx_01",
+ district_id,
+ locality_code,
+ unique=True,
+ ),
+ {"comment": "Locality Information"},
+ )
+
+
+class AddressNeighborhood(CrudCollection):
+ """
+ Neighborhood class based on declarative_base and BaseMixin via session
+ """
+
+ __tablename__ = "address_neighborhood"
+ __exclude__fields__ = []
+
+ neighborhood_code: Mapped[str] = mapped_column(
+ String(16), nullable=False, comment="Neighborhood Code"
+ )
+ neighborhood_name: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Neighborhood Name"
+ )
+ type_code: Mapped[str] = mapped_column(String, nullable=True, comment="Type Name")
+ type_description: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Type Name"
+ )
+ gov_code: Mapped[str] = mapped_column(
+ String(128), nullable=True, comment="Government Code"
+ )
+ address_show: Mapped[bool] = mapped_column(Boolean, server_default="1")
+ address_geographic_id: Mapped[int] = mapped_column(
+ BigInteger, nullable=True, comment="Address Geographic Id"
+ )
+
+ district_id: Mapped[int] = mapped_column(
+ ForeignKey("address_district.id"), nullable=True, comment="District ID"
+ )
+ district_uu_id: Mapped[str] = mapped_column(
+ String, server_default="", comment="District UUID"
+ )
+ locality_id: Mapped[int] = mapped_column(
+ ForeignKey("address_locality.id"), nullable=True, comment="Locality ID"
+ )
+ locality_uu_id: Mapped[str] = mapped_column(
+ String, server_default="", comment="Locality UUID"
+ )
+
+ __table_args__ = (
+ Index(
+ "_address_neighborhood_ndx_01",
+ locality_id,
+ neighborhood_code,
+ unique=True,
+ ),
+ {"comment": "Neighborhood Information"},
+ )
+
+
+class AddressStreet(CrudCollection):
+ """
+ Street class based on declarative_base and BaseMixin via session
+ """
+
+ __tablename__ = "address_street"
+ __exclude__fields__ = []
+
+ street_code: Mapped[str] = mapped_column(
+ String(16), nullable=False, comment="Street Code"
+ )
+ street_name: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Street Name"
+ )
+ type_code: Mapped[str] = mapped_column(String, nullable=True, comment="Type Name")
+ type_description: Mapped[str] = mapped_column(
+ String, nullable=True, comment="Type Name"
+ )
+ gov_code: Mapped[str] = mapped_column(
+ String(128), nullable=True, comment="Government Code"
+ )
+
+ address_geographic_id: Mapped[int] = mapped_column(
+ BigInteger, nullable=True, comment="Address Geographic Id"
+ )
+ neighborhood_id: Mapped[int] = mapped_column(
+ ForeignKey("address_neighborhood.id"), nullable=False, comment="Neighborhood ID"
+ )
+ neighborhood_uu_id: Mapped[str] = mapped_column(
+ String, server_default="", comment="Neighborhood UUID"
+ )
+
+ __table_args__ = (
+ Index("_address_street_ndx_01", neighborhood_id, street_code, unique=True),
+ {"comment": "Street Information"},
+ )
+
+ @classmethod
+ def search_address_text(cls, search_text, token_dict=None):
+ field_dict = {
+ "AddressStreet.uu_id": cls.uu_id,
+ "AddressCountry.uu_id": AddressCountry.uu_id,
+ "AddressState.uu_id": AddressState.uu_id,
+ "AddressCity.uu_id": AddressCity.uu_id,
+ "AddressDistrict.uu_id": AddressDistrict.uu_id,
+ "AddressLocality.uu_id": AddressLocality.uu_id,
+ "AddressNeighborhood.uu_id": AddressNeighborhood.uu_id,
+ "AddressCountry.country_name": AddressCountry.country_name,
+ "AddressState.state_name": AddressState.state_name,
+ "AddressCity.city_name": AddressCity.city_name,
+ "AddressDistrict.district_name": AddressDistrict.district_name,
+ "AddressLocality.locality_name": AddressLocality.locality_name,
+ "AddressNeighborhood.neighborhood_name": AddressNeighborhood.neighborhood_name,
+ "AddressStreet.street_name": cls.street_name,
+ }
+ joined_data = (
+ cls.session.query(*list(field_dict.values()))
+ .select_from(cls)
+ .join(AddressNeighborhood, AddressNeighborhood.id == cls.neighborhood_id)
+ .join(
+ AddressLocality, AddressLocality.id == AddressNeighborhood.locality_id
+ )
+ .join(AddressDistrict, AddressDistrict.id == AddressLocality.district_id)
+ .join(AddressCity, AddressCity.id == AddressDistrict.city_id)
+ .join(AddressState, AddressState.id == AddressCity.state_id)
+ .join(AddressCountry, AddressCountry.id == AddressState.country_id)
+ .filter(
+ or_(
+ AddressNeighborhood.neighborhood_name.ilike(
+ f"%{str(search_text).upper()}%"
+ ),
+ AddressLocality.locality_name.ilike(
+ f"%{str(search_text).upper()}%"
+ ),
+ AddressDistrict.district_name.ilike(
+ f"%{str(search_text).upper()}%"
+ ),
+ # AddressCity.city_name.ilike(f"%{str(search_text).upper()}%"),
+ # AddressState.state_name.ilike(f"%{str(search_text).upper()}%"),
+ # AddressCountry.country_name.ilike(f"%{str(search_text).upper()}%"),
+ cls.street_name.ilike(f"%{str(search_text).upper()}%"),
+ ),
+ )
+ )
+ # select([mytable.c.id]).where(
+ # func.to_tsvector('english', mytable.c.title) \
+ # .match('somestring', postgresql_regconfig='english')
+ # )
+ joined_statement = joined_data
+ joined_data = joined_data.first()
+ if not joined_data:
+ raise HTTPException(
+ status_code=404,
+ detail="No address found with the given search text.",
+ )
+ return dict(
+ query=joined_statement,
+ schema=list(field_dict.keys()),
+ )
+
+
+class OccupantTypes(CrudCollection):
+ """
+ Occupant Types class based on declarative_base and BaseMixin via session
+ """
+
+ __tablename__ = "occupant_types"
+ __exclude__fields__ = []
+
+ occupant_type: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Occupant Type"
+ )
+ occupant_description: Mapped[str] = mapped_column(String, server_default="")
+ occupant_code: Mapped[str] = mapped_column(String, server_default="")
+ occupant_category: Mapped[str] = mapped_column(String, server_default="")
+ occupant_category_type: Mapped[str] = mapped_column(String, server_default="")
+ occupant_is_unique: Mapped[bool] = mapped_column(Boolean, server_default="0")
+
+ __table_args__ = ({"comment": "Occupant Types Information"},)
+
+ @classmethod
+ def get_manager_occupant_type(cls):
+ if occupant_types := cls.filter_all(
+ cls.occupant_is_unique == True, cls.occupant_category_type == "MT"
+ ).data:
+ return [occupant.uu_id.__str__() for occupant in occupant_types]
+ raise HTTPException(
+ status_code=404,
+ detail="No manager type found.",
+ )
+
+
+class Contracts(CrudCollection):
+ """
+ Contract class based on declarative_base and BaseMixin via session
+ """
+
+ __tablename__ = "contracts"
+ __exclude__fields__ = []
+
+ contract_type: Mapped[str] = mapped_column(
+ String(5),
+ nullable=False,
+ comment="The code for personnel is P and the code for companies is C.",
+ )
+ contract_title: Mapped[str] = mapped_column(String(255))
+ contract_details: Mapped[str] = mapped_column(Text)
+ contract_terms: Mapped[str] = mapped_column(Text)
+
+ contract_code: Mapped[str] = mapped_column(
+ String(100),
+ nullable=False,
+ comment="contract_code is the unique code given by the system.",
+ )
+ contract_date: Mapped[TIMESTAMP] = mapped_column(
+ TIMESTAMP(timezone=True),
+ server_default="2099-12-31 23:59:59",
+ comment="contract date is the date the contract is made. "
+ "expire start is the start date of the contract, expire en is the end date of the contract.",
+ )
+
+ company_id: Mapped[int] = mapped_column(
+ Integer, ForeignKey("companies.id"), nullable=True
+ )
+ company_uu_id: Mapped[str] = mapped_column(
+ String, server_default="", comment="Company UUID"
+ )
+
+ person_id: Mapped[int] = mapped_column(
+ Integer, ForeignKey("people.id"), nullable=True
+ )
+ person_uu_id: Mapped[str] = mapped_column(
+ String, server_default="", comment="Person UUID"
+ )
+
+ @classmethod
+ def retrieve_contact_no(cls):
+ # from api_library.date_time_actions.date_functions import system_arrow
+
+ # todo When create record contract_code == below string
+ related_date, counter = Contracts.client_arrow.now(), 1
+ return (
+ f"{related_date.date().year}{str(cls.contract_type)}{str(counter).zfill(6)}"
+ )
+
+ __table_args__ = (
+ Index("_contract_ndx_01", contract_code, unique=True),
+ {"comment": "Contract Information"},
+ )
+
+
+# def selected_employee_and_duty_details(self, selected_duty_uu_id):
+# from database_sql_models import (
+# Companies,
+# Employees,
+# Duties,
+# Departments,
+# )
+#
+# found_person = People.find_one(id=self.person_id)
+# found_employee = Employees.find_one(
+# people_id=found_person.id, is_confirmed=True
+# )
+# found_duty = Duties.find_one(uu_id=selected_duty_uu_id)
+# found_department = Departments.find_one(id=found_duty.department_id)
+# found_company = Companies.find_one(id=found_department.company_id)
+# return {
+# "duty_list": {
+# "duty_id": found_duty.id,
+# "duty_uu_id": found_duty.uu_id.__str__(),
+# "duty_code": found_duty.duty_code,
+# "duty_name": found_duty.duty_name,
+# "duty_description": found_duty.duty_description,
+# },
+# "employee_list": {
+# "employee_id": found_employee.id,
+# "employee_uu_id": found_employee.uu_id.__str__(),
+# "people_id": found_employee.people_id,
+# "duty_id": found_employee.duty_id,
+# "employee_description": found_employee.employee_description,
+# },
+# "department_list": {
+# "department_id": found_department.id,
+# "department_uu_id": found_department.uu_id.__str__(),
+# "company_id": found_department.company_id,
+# "department_name": found_department.department_name,
+# "department_description": found_department.department_description,
+# },
+# "companies_list": {
+# "company_id": found_company.id,
+# "company_uu_id": found_company.uu_id.__str__(),
+# "formal_name": found_company.formal_name,
+# "company_tag": found_company.company_tag,
+# },
+# }
diff --git a/databases/sql_models/others/enums.py b/databases/sql_models/others/enums.py
new file mode 100644
index 0000000..f8dd442
--- /dev/null
+++ b/databases/sql_models/others/enums.py
@@ -0,0 +1,105 @@
+from fastapi.exceptions import HTTPException
+
+from sqlalchemy import (
+ UUID,
+ String,
+ text,
+)
+from sqlalchemy.orm import (
+ Mapped,
+ mapped_column,
+)
+from databases.sql_models.core_mixin import CrudCollection
+
+
+class ApiEnumDropdown(CrudCollection):
+ __tablename__ = "api_enum_dropdown"
+ __exclude__fields__ = ["enum_class"]
+
+ id: Mapped[int] = mapped_column(primary_key=True)
+ uu_id: Mapped[str] = mapped_column(
+ UUID, server_default=text("gen_random_uuid()"), index=True, unique=True
+ )
+ enum_class: Mapped[str] = mapped_column(
+ String, nullable=False, comment="Enum Constant Name"
+ )
+ key: Mapped[str] = mapped_column(String, nullable=False, comment="Enum Key")
+ value: Mapped[str] = mapped_column(String, nullable=False, comment="Enum Value")
+ description: Mapped[str] = mapped_column(String, nullable=True)
+
+ __table_args__ = ({"comment": "Enum objets that are linked to tables"},)
+
+ @classmethod
+ def get_by_uuid(cls, uuid: str):
+ return cls.filter_by_one(system=True, uu_id=str(uuid)).data
+
+ @classmethod
+ def get_debit_search(cls, search_debit: str = None, search_uu_id: str = None):
+ if search_uu_id:
+ if search := cls.filter_one(
+ cls.enum_class.in_(["DebitTypes"]),
+ cls.uu_id == search_uu_id,
+ system=True,
+ ).data:
+ return search
+ elif search_debit:
+ if search := cls.filter_one(
+ cls.enum_class.in_(["DebitTypes"]), cls.key == search_debit, system=True
+ ).data:
+ return search
+ return cls.filter_all(cls.enum_class.in_(["DebitTypes"]), system=True).data
+
+ @classmethod
+ def get_due_types(cls):
+ if due_list := cls.filter_all(
+ cls.enum_class == "BuildDuesTypes",
+ cls.key.in_(["BDT-A", "BDT-D"]),
+ system=True,
+ ).data:
+ return [due.uu_id.__str__() for due in due_list]
+ raise HTTPException(
+ status_code=404,
+ detail="No dues types found",
+ )
+
+ @classmethod
+ def due_type_search(cls, search_management: str = None, search_uu_id: str = None):
+ if search_uu_id:
+ if search := cls.filter_one(
+ cls.enum_class.in_(["BuildDuesTypes"]),
+ cls.uu_id == search_uu_id,
+ system=True,
+ ).data:
+ return search
+ elif search_management:
+ if search := cls.filter_one(
+ cls.enum_class.in_(["BuildDuesTypes"]),
+ cls.key == search_management,
+ system=True,
+ ).data:
+ return search
+ return cls.filter_all(cls.enum_class.in_(["BuildDuesTypes"]), system=True).data
+
+ def get_enum_dict(self):
+ return {
+ "uu_id": str(self.uu_id),
+ "enum_class": self.enum_class,
+ "key": self.key,
+ "value": self.value,
+ "description": self.description,
+ }
+
+ @classmethod
+ def uuid_of_enum(cls, enum_class: str, key: str):
+ return str(
+ getattr(
+ cls.filter_one(
+ cls.enum_class == enum_class, cls.key == key, system=True
+ ).data,
+ "uu_id",
+ None,
+ )
+ )
+
+
+ApiEnumDropdown.set_session(ApiEnumDropdown.__session__)
diff --git a/databases/sql_models/postgres_database.py b/databases/sql_models/postgres_database.py
new file mode 100644
index 0000000..ed817ba
--- /dev/null
+++ b/databases/sql_models/postgres_database.py
@@ -0,0 +1,21 @@
+from api_configs import WagDatabase
+from sqlalchemy import create_engine
+from sqlalchemy.orm import scoped_session, sessionmaker
+from sqlalchemy.ext.declarative import declarative_base
+
+
+engine_config = {
+ "url": WagDatabase.DATABASE_URL,
+ "pool_size": 20,
+ "max_overflow": 10,
+ "echo": False,
+ "isolation_level": "READ COMMITTED",
+ "pool_pre_ping": True,
+}
+
+engine = create_engine(**engine_config)
+SessionLocal = sessionmaker(bind=engine, autoflush=False, autocommit=False, echo=True)
+session = scoped_session(sessionmaker(bind=engine))
+
+Base = declarative_base()
+Base.session = session
diff --git a/databases/sql_models/response_model.py b/databases/sql_models/response_model.py
new file mode 100644
index 0000000..9fba8cd
--- /dev/null
+++ b/databases/sql_models/response_model.py
@@ -0,0 +1,43 @@
+class AlchemyResponse:
+ """
+ alchemy_object = [AlchemyObject].filter_non_deleted() -> AlchemyResponse
+ alchemy_object.get(1) -> Get the first object in the list
+ alchemy_object.data -> Get the list of objects
+ alchemy_object.count -> Get the count of objects
+ """
+
+ def __init__(self, query, first: bool = False):
+ self.first = first
+ self.__query = query
+
+ def get(self, index: int):
+ count = self.count
+ if count and not index > count:
+ return self.data[index - 1]
+ return None
+
+ @property
+ def data(self):
+ if self.first:
+ try:
+ return self.__query.first()
+ except Exception as e:
+ err = e
+ self.__query.session.expunge_all()
+ self.__query.session.rollback()
+ return None
+ try:
+ return self.__query.all()
+ except Exception as e:
+ err = e
+ self.__query.session.expunge_all()
+ self.__query.session.rollback()
+ return []
+
+ @property
+ def count(self):
+ return self.__query.count()
+
+ @property
+ def query(self):
+ return self.__query
diff --git a/databases/sql_models/rules/rules.py b/databases/sql_models/rules/rules.py
new file mode 100644
index 0000000..6619d5e
--- /dev/null
+++ b/databases/sql_models/rules/rules.py
@@ -0,0 +1,28 @@
+from sqlalchemy import String, Boolean
+from databases.sql_models.core_mixin import CrudCollection
+from sqlalchemy.orm import mapped_column, Mapped
+
+
+class EndpointRestriction(CrudCollection):
+ """
+ Initialize Endpoint Restriction with default values
+ """
+
+ __tablename__ = "endpoint_restriction"
+ __exclude__fields__ = []
+
+ endpoint_function: Mapped[str] = mapped_column(
+ String, server_default="", comment="Function name of the API endpoint"
+ )
+ endpoint_name: Mapped[str] = mapped_column(
+ String, server_default="", comment="Name of the API endpoint"
+ )
+ endpoint_method: Mapped[str] = mapped_column(
+ String, server_default="", comment="HTTP method used by the endpoint"
+ )
+ endpoint_desc: Mapped[str] = mapped_column(
+ String, server_default="", comment="Description of the endpoint"
+ )
+ endpoint_code: Mapped[str] = mapped_column(
+ String, server_default="", unique=True, comment="Unique code for the endpoint"
+ )
diff --git a/databases/sql_models/sql_operations.py b/databases/sql_models/sql_operations.py
new file mode 100644
index 0000000..a2a760e
--- /dev/null
+++ b/databases/sql_models/sql_operations.py
@@ -0,0 +1,239 @@
+from json import dumps
+
+from sqlalchemy import BinaryExpression
+from sqlalchemy.exc import SQLAlchemyError
+
+from databases.sql_models.response_model import AlchemyResponse
+from databases.sql_models.postgres_database import Base
+
+
+class FilterAttributes:
+ """
+ The class to use in the model for filtering.
+ Usage:
+ alchemy_objects = Model.filter_by_all(name="Something").data [, ]
+ alchemy_object = Model.filter_by_one(name="Something").data
+ alchemy_objects = Model.filter_all(Model.name == "Something").data [, ]
+ alchemy_object = Model.filter_one(Model.name == "Something").data
+ """
+
+ __abstract__ = True
+ __session__ = Base.session # The session to use in the model.
+
+ pre_query = None # The query to use before the filtering such as: query = cls.query.filter_by(active=True)
+ total_count = None # The query to use before the filtering such as: query = cls.query.filter_by(active=True)
+ filter_attr = None # The filter attributes to use in the model.
+ http_exception = None
+ status = None
+
+ def flush(self):
+ """Flush the current session."""
+ try:
+ self.__session__.add(self)
+ self.__session__.flush()
+ return self
+ except SQLAlchemyError as e:
+ self.raise_http_exception(
+ status_code="HTTP_400_BAD_REQUEST",
+ error_case=e.__class__.__name__,
+ data={},
+ message=str(e.__context__).split("\n")[0],
+ )
+
+ def destroy(self):
+ """Delete the record from the database."""
+ self.__session__.delete(self)
+ self.__session__.commit()
+
+ @classmethod
+ def save(cls):
+ """Saves the updated model to the current entity db."""
+ try:
+ cls.__session__.commit()
+ except SQLAlchemyError as e:
+ cls.raise_http_exception(
+ status_code="HTTP_400_BAD_REQUEST",
+ error_case=e.__class__.__name__,
+ data={},
+ message=str(e.__context__).split("\n")[0],
+ )
+
+ def save_and_confirm(self):
+ """Saves the updated model to the current entity db."""
+ try:
+ self.save()
+ self.update(is_confirmed=True)
+ self.save()
+ except SQLAlchemyError as e:
+ self.raise_http_exception(
+ status_code="HTTP_400_BAD_REQUEST",
+ error_case=e.__class__.__name__,
+ data={},
+ message=str(e.__context__).split("\n")[0],
+ )
+
+ @classmethod
+ def _query(cls):
+ """Returns the query to use in the model."""
+ return cls.pre_query if cls.pre_query else cls.query
+
+ @classmethod
+ def add_query_to_filter(cls, filter_query, filter_list):
+ return (
+ filter_query.order_by(
+ getattr(cls, filter_list.get("order_field")).desc()
+ if str(filter_list.get("order_type"))[0] == "d"
+ else getattr(cls, filter_list.get("order_field")).asc()
+ )
+ .limit(filter_list.get("size"))
+ .offset(int((filter_list.get("page")) - 1) * int(filter_list.get("size")))
+ .populate_existing()
+ )
+
+ @classmethod
+ def get_filter_attributes(cls):
+ """
+ Returns the filter to use pagination and ordering.
+ page is the current page number.
+ size is the number of records per page.
+ order_field is the field to order by.
+ order_type is the order type (asc or desc).
+ include_joins returns the joined tables when related field names are given as a list.
+ """
+ return {
+ "page": getattr(cls.filter_attr, "page", 1),
+ "size": getattr(cls.filter_attr, "size", 10),
+ "order_field": getattr(cls.filter_attr, "order_field", "id"),
+ "order_type": getattr(cls.filter_attr, "order_type", "asc"),
+ "include_joins": getattr(cls.filter_attr, "include_joins", []),
+ "query": getattr(cls.filter_attr, "query", {}),
+ }
+
+ @classmethod
+ def add_new_arg_to_args(cls, args_list, argument, value):
+ new_arg_list = list(
+ set(
+ args_
+ for args_ in list(args_list)
+ if isinstance(args_, BinaryExpression)
+ )
+ )
+ arg_left = lambda arg_obj: getattr(getattr(arg_obj, "left", None), "key", None)
+ # arg_right = lambda arg_obj: getattr(getattr(arg_obj, "right", None), "value", None)
+ if not any(True for arg in new_arg_list if arg_left(arg_obj=arg) == argument):
+ new_arg_list.append(value)
+ return tuple(new_arg_list)
+
+ @classmethod
+ def get_not_expired_query_arg(cls, arg):
+ """Add expiry_starts and expiry_ends to the query."""
+ from api_library.date_time_actions.date_functions import system_arrow
+
+ arg = cls.add_new_arg_to_args(
+ arg, "expiry_ends", cls.expiry_ends > str(system_arrow.now())
+ )
+ arg = cls.add_new_arg_to_args(
+ arg, "expiry_starts", cls.expiry_starts <= str(system_arrow.now())
+ )
+ return arg
+
+ @classmethod
+ def get_active_and_confirmed_query_arg(cls, arg):
+ """Add active and confirmed to the query."""
+ arg = cls.add_new_arg_to_args(arg, "is_confirmed", cls.is_confirmed == True)
+ arg = cls.add_new_arg_to_args(arg, "active", cls.active == True)
+ arg = cls.add_new_arg_to_args(arg, "deleted", cls.deleted == False)
+ return arg
+
+ @classmethod
+ def select_only(
+ cls, *args, select_args: list, order_by=None, limit=None, system=False
+ ):
+ if not system:
+ args = cls.get_active_and_confirmed_query_arg(args)
+ args = cls.get_not_expired_query_arg(args)
+ query = cls._query().filter(*args).with_entities(*select_args)
+ cls.total_count = query.count()
+ if order_by is not None:
+ query = query.order_by(order_by)
+ if limit:
+ query = query.limit(limit)
+ return AlchemyResponse(query=query, first=False)
+
+ @classmethod
+ def filter_by_all(cls, system=False, **kwargs):
+ """
+ Filters all the records regardless of is_deleted, is_confirmed.
+ """
+ if "is_confirmed" not in kwargs and not system:
+ kwargs["is_confirmed"] = True
+ kwargs.pop("system", None)
+ query = cls._query().filter_by(**kwargs)
+ cls.total_count = query.count()
+ if cls.filter_attr:
+ filter_list = cls.get_filter_attributes()
+ data_query = cls.add_query_to_filter(query, filter_list)
+ cls.filter_attr = None
+ return AlchemyResponse(query=data_query, first=False)
+ return AlchemyResponse(query=query, first=False)
+
+ @classmethod
+ def filter_by_one(cls, system=False, **kwargs):
+ """
+ Filters one record regardless of is_deleted, is_confirmed.
+ """
+ if "is_confirmed" not in kwargs and not system:
+ kwargs["is_confirmed"] = True
+ kwargs.pop("system", None)
+ query = cls._query().filter_by(**kwargs)
+ cls.total_count = 1
+ return AlchemyResponse(query=query, first=True)
+
+ @classmethod
+ def filter_all(cls, *args, system=False):
+ """
+ Filters all the records regardless of is_deleted, is_confirmed.
+ """
+ arg_left = lambda arg_obj: getattr(getattr(arg_obj, "left", None), "key", None)
+ if not system:
+ args = cls.get_active_and_confirmed_query_arg(args)
+ args = cls.get_not_expired_query_arg(args)
+ filter_list = cls.get_filter_attributes()
+ if filter_list.get("query", None):
+ for smart_iter in cls.filter_expr(**filter_list.get("query", {})):
+ if key := arg_left(smart_iter):
+ args = cls.add_new_arg_to_args(args, key, smart_iter)
+ query = cls._query().filter(*args)
+ cls.total_count = query.count()
+ if cls.filter_attr:
+ data_query = cls.add_query_to_filter(query, filter_list)
+ cls.filter_attr = None
+ return AlchemyResponse(query=data_query, first=False)
+ cls.filter_attr = None
+ return AlchemyResponse(query=query, first=False)
+
+ @classmethod
+ def filter_one(cls, *args, system=False, expired: bool = False):
+ """
+ Filters one record regardless of is_deleted, is_confirmed.
+ """
+ if not system:
+ args = cls.get_active_and_confirmed_query_arg(args)
+ args = cls.get_not_expired_query_arg(args)
+ query = cls._query().filter(*args)
+ cls.total_count = 1
+ return AlchemyResponse(query=query, first=True)
+
+ @classmethod
+ def raise_http_exception(cls, status_code, error_case, data, message):
+ cls.__session__.rollback()
+ raise cls.http_exception(
+ status_code=getattr(cls.status, status_code, "HTTP_404_NOT_FOUND"),
+ detail=dumps(
+ {
+ "data": data,
+ "error": error_case,
+ "message": message,
+ }
+ ),
+ )
diff --git a/pyproject.toml b/pyproject.toml
new file mode 100644
index 0000000..f105854
--- /dev/null
+++ b/pyproject.toml
@@ -0,0 +1,25 @@
+[project]
+name = "wag-managment-api-service-version-3"
+version = "0.1.0"
+description = "Wag Python API Service"
+readme = "README.md"
+requires-python = ">=3.12"
+dependencies = [
+ "alembic>=1.14.0",
+ "arrow>=1.3.0",
+ "cryptography>=43.0.3",
+ "faker>=30.8.2",
+ "fastapi>=0.115.4",
+ "pandas>=2.2.3",
+ "prometheus-fastapi-instrumentator>=7.0.0",
+ "psycopg2-binary>=2.9.10",
+ "pymongo>=4.10.1",
+ "redis>=5.2.0",
+ "redmail>=0.6.0",
+ "requests>=2.32.3",
+ "rsa>=4.9",
+ "sqlalchemy-mixins>=2.0.5",
+ "textdistance>=4.6.3",
+ "unidecode>=1.3.8",
+ "uvicorn>=0.32.0",
+]