app apis and service stroe updated
This commit is contained in:
parent
b29a229dd3
commit
364a4df4b1
|
|
@ -3,5 +3,5 @@
|
||||||
<component name="Black">
|
<component name="Black">
|
||||||
<option name="sdkName" value="Python 3.10 (api_backend_hag)" />
|
<option name="sdkName" value="Python 3.10 (api_backend_hag)" />
|
||||||
</component>
|
</component>
|
||||||
<component name="ProjectRootManager" version="2" project-jdk-name="Python 3.10 (api_backend_hag)" project-jdk-type="Python SDK" />
|
<component name="ProjectRootManager" version="2" project-jdk-name="Python 3.12 virtualenv at ~/git-gitea-evyos/wag-managment-api-service-version-3/.venv" project-jdk-type="Python SDK" />
|
||||||
</project>
|
</project>
|
||||||
|
|
@ -2,7 +2,7 @@
|
||||||
<module type="PYTHON_MODULE" version="4">
|
<module type="PYTHON_MODULE" version="4">
|
||||||
<component name="NewModuleRootManager">
|
<component name="NewModuleRootManager">
|
||||||
<content url="file://$MODULE_DIR$" />
|
<content url="file://$MODULE_DIR$" />
|
||||||
<orderEntry type="jdk" jdkName="Python 3.10 (api_backend_hag)" jdkType="Python SDK" />
|
<orderEntry type="jdk" jdkName="Python 3.12 virtualenv at ~/git-gitea-evyos/wag-managment-api-service-version-3/.venv" jdkType="Python SDK" />
|
||||||
<orderEntry type="sourceFolder" forTests="false" />
|
<orderEntry type="sourceFolder" forTests="false" />
|
||||||
</component>
|
</component>
|
||||||
</module>
|
</module>
|
||||||
|
|
@ -0,0 +1,93 @@
|
||||||
|
# Git
|
||||||
|
.git
|
||||||
|
.gitignore
|
||||||
|
.gitattributes
|
||||||
|
|
||||||
|
|
||||||
|
# CI
|
||||||
|
.codeclimate.yml
|
||||||
|
.travis.yml
|
||||||
|
.taskcluster.yml
|
||||||
|
|
||||||
|
# Docker
|
||||||
|
docker-compose.yml
|
||||||
|
service_app/Dockerfile
|
||||||
|
.docker
|
||||||
|
.dockerignore
|
||||||
|
|
||||||
|
# Byte-compiled / optimized / DLL files
|
||||||
|
**/__pycache__/
|
||||||
|
**/*.py[cod]
|
||||||
|
|
||||||
|
# C extensions
|
||||||
|
*.so
|
||||||
|
|
||||||
|
# Distribution / packaging
|
||||||
|
.Python
|
||||||
|
service_app/env/
|
||||||
|
build/
|
||||||
|
develop-eggs/
|
||||||
|
dist/
|
||||||
|
downloads/
|
||||||
|
eggs/
|
||||||
|
lib/
|
||||||
|
lib64/
|
||||||
|
parts/
|
||||||
|
sdist/
|
||||||
|
var/
|
||||||
|
*.egg-info/
|
||||||
|
.installed.cfg
|
||||||
|
*.egg
|
||||||
|
|
||||||
|
# PyInstaller
|
||||||
|
# Usually these files are written by a python script from a template
|
||||||
|
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||||
|
*.manifest
|
||||||
|
*.spec
|
||||||
|
|
||||||
|
# Installer logs
|
||||||
|
pip-log.txt
|
||||||
|
pip-delete-this-directory.txt
|
||||||
|
|
||||||
|
# Unit test / coverage reports
|
||||||
|
htmlcov/
|
||||||
|
.tox/
|
||||||
|
.coverage
|
||||||
|
.cache
|
||||||
|
nosetests.xml
|
||||||
|
coverage.xml
|
||||||
|
|
||||||
|
# Translations
|
||||||
|
*.mo
|
||||||
|
*.pot
|
||||||
|
|
||||||
|
# Django stuff:
|
||||||
|
*.log
|
||||||
|
|
||||||
|
# Sphinx documentation
|
||||||
|
docs/_build/
|
||||||
|
|
||||||
|
# PyBuilder
|
||||||
|
target/
|
||||||
|
|
||||||
|
# Virtual environment
|
||||||
|
service_app/.env
|
||||||
|
.venv/
|
||||||
|
venv/
|
||||||
|
|
||||||
|
# PyCharm
|
||||||
|
.idea
|
||||||
|
|
||||||
|
# Python mode for VIM
|
||||||
|
.ropeproject
|
||||||
|
**/.ropeproject
|
||||||
|
|
||||||
|
# Vim swap files
|
||||||
|
**/*.swp
|
||||||
|
|
||||||
|
# VS Code
|
||||||
|
.vscode/
|
||||||
|
|
||||||
|
test_application/
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -0,0 +1,93 @@
|
||||||
|
# Git
|
||||||
|
.git
|
||||||
|
.gitignore
|
||||||
|
.gitattributes
|
||||||
|
|
||||||
|
|
||||||
|
# CI
|
||||||
|
.codeclimate.yml
|
||||||
|
.travis.yml
|
||||||
|
.taskcluster.yml
|
||||||
|
|
||||||
|
# Docker
|
||||||
|
docker-compose.yml
|
||||||
|
service_app/Dockerfile
|
||||||
|
.docker
|
||||||
|
.dockerignore
|
||||||
|
|
||||||
|
# Byte-compiled / optimized / DLL files
|
||||||
|
**/__pycache__/
|
||||||
|
**/*.py[cod]
|
||||||
|
|
||||||
|
# C extensions
|
||||||
|
*.so
|
||||||
|
|
||||||
|
# Distribution / packaging
|
||||||
|
.Python
|
||||||
|
service_app/env/
|
||||||
|
build/
|
||||||
|
develop-eggs/
|
||||||
|
dist/
|
||||||
|
downloads/
|
||||||
|
eggs/
|
||||||
|
lib/
|
||||||
|
lib64/
|
||||||
|
parts/
|
||||||
|
sdist/
|
||||||
|
var/
|
||||||
|
*.egg-info/
|
||||||
|
.installed.cfg
|
||||||
|
*.egg
|
||||||
|
|
||||||
|
# PyInstaller
|
||||||
|
# Usually these files are written by a python script from a template
|
||||||
|
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||||
|
*.manifest
|
||||||
|
*.spec
|
||||||
|
|
||||||
|
# Installer logs
|
||||||
|
pip-log.txt
|
||||||
|
pip-delete-this-directory.txt
|
||||||
|
|
||||||
|
# Unit test / coverage reports
|
||||||
|
htmlcov/
|
||||||
|
.tox/
|
||||||
|
.coverage
|
||||||
|
.cache
|
||||||
|
nosetests.xml
|
||||||
|
coverage.xml
|
||||||
|
|
||||||
|
# Translations
|
||||||
|
*.mo
|
||||||
|
*.pot
|
||||||
|
|
||||||
|
# Django stuff:
|
||||||
|
*.log
|
||||||
|
|
||||||
|
# Sphinx documentation
|
||||||
|
docs/_build/
|
||||||
|
|
||||||
|
# PyBuilder
|
||||||
|
target/
|
||||||
|
|
||||||
|
# Virtual environment
|
||||||
|
service_app/.env
|
||||||
|
.venv/
|
||||||
|
venv/
|
||||||
|
|
||||||
|
# PyCharm
|
||||||
|
.idea
|
||||||
|
|
||||||
|
# Python mode for VIM
|
||||||
|
.ropeproject
|
||||||
|
**/.ropeproject
|
||||||
|
|
||||||
|
# Vim swap files
|
||||||
|
**/*.swp
|
||||||
|
|
||||||
|
# VS Code
|
||||||
|
.vscode/
|
||||||
|
|
||||||
|
test_application/
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -0,0 +1,162 @@
|
||||||
|
# ---> Python
|
||||||
|
# Byte-compiled / optimized / DLL files
|
||||||
|
__pycache__/
|
||||||
|
*.py[cod]
|
||||||
|
*$py.class
|
||||||
|
|
||||||
|
# C extensions
|
||||||
|
*.so
|
||||||
|
|
||||||
|
# Distribution / packaging
|
||||||
|
.idea/
|
||||||
|
.Python
|
||||||
|
develop-eggs/
|
||||||
|
dist/
|
||||||
|
downloads/
|
||||||
|
eggs/
|
||||||
|
.eggs/
|
||||||
|
lib/
|
||||||
|
lib64/
|
||||||
|
parts/
|
||||||
|
sdist/
|
||||||
|
var/
|
||||||
|
wheels/
|
||||||
|
share/python-wheels/
|
||||||
|
*.egg-info/
|
||||||
|
.installed.cfg
|
||||||
|
*.egg
|
||||||
|
MANIFEST
|
||||||
|
|
||||||
|
# PyInstaller
|
||||||
|
# Usually these files are written by a python script from a template
|
||||||
|
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||||
|
*.manifest
|
||||||
|
*.spec
|
||||||
|
|
||||||
|
# Installer logs
|
||||||
|
pip-log.txt
|
||||||
|
pip-delete-this-directory.txt
|
||||||
|
|
||||||
|
# Unit test / coverage reports
|
||||||
|
htmlcov/
|
||||||
|
.tox/
|
||||||
|
.nox/
|
||||||
|
.coverage
|
||||||
|
.coverage.*
|
||||||
|
.cache
|
||||||
|
nosetests.xml
|
||||||
|
coverage.xml
|
||||||
|
*.cover
|
||||||
|
*.py,cover
|
||||||
|
.hypothesis/
|
||||||
|
.pytest_cache/
|
||||||
|
cover/
|
||||||
|
|
||||||
|
# Translations
|
||||||
|
*.mo
|
||||||
|
*.pot
|
||||||
|
|
||||||
|
# Django stuff:
|
||||||
|
*.log
|
||||||
|
local_settings.py
|
||||||
|
db.sqlite3
|
||||||
|
db.sqlite3-journal
|
||||||
|
|
||||||
|
# Flask stuff:
|
||||||
|
instance/
|
||||||
|
.webassets-cache
|
||||||
|
|
||||||
|
# Scrapy stuff:
|
||||||
|
.scrapy
|
||||||
|
|
||||||
|
# Sphinx documentation
|
||||||
|
docs/_build/
|
||||||
|
|
||||||
|
# PyBuilder
|
||||||
|
.pybuilder/
|
||||||
|
target/
|
||||||
|
|
||||||
|
# Jupyter Notebook
|
||||||
|
.ipynb_checkpoints
|
||||||
|
|
||||||
|
# IPython
|
||||||
|
profile_default/
|
||||||
|
ipython_config.py
|
||||||
|
|
||||||
|
# pyenv
|
||||||
|
# For a library or package, you might want to ignore these files since the code is
|
||||||
|
# intended to run in multiple environments; otherwise, check them in:
|
||||||
|
# .python-version
|
||||||
|
|
||||||
|
# pipenv
|
||||||
|
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
||||||
|
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
||||||
|
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
||||||
|
# install all needed dependencies.
|
||||||
|
#Pipfile.lock
|
||||||
|
|
||||||
|
# poetry
|
||||||
|
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
|
||||||
|
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
||||||
|
# commonly ignored for libraries.
|
||||||
|
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
|
||||||
|
#poetry.lock
|
||||||
|
|
||||||
|
# pdm
|
||||||
|
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
|
||||||
|
#pdm.lock
|
||||||
|
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
|
||||||
|
# in version control.
|
||||||
|
# https://pdm.fming.dev/#use-with-ide
|
||||||
|
.pdm.toml
|
||||||
|
|
||||||
|
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
|
||||||
|
__pypackages__/
|
||||||
|
|
||||||
|
# Celery stuff
|
||||||
|
celerybeat-schedule
|
||||||
|
celerybeat.pid
|
||||||
|
|
||||||
|
# SageMath parsed files
|
||||||
|
*.sage.py
|
||||||
|
|
||||||
|
# Environments
|
||||||
|
service_app/.env
|
||||||
|
.venv
|
||||||
|
service_app/env/
|
||||||
|
venv/
|
||||||
|
service_app/env/
|
||||||
|
env.bak/
|
||||||
|
venv.bak/
|
||||||
|
|
||||||
|
# Spyder project settings
|
||||||
|
.spyderproject
|
||||||
|
.spyproject
|
||||||
|
|
||||||
|
# Rope project settings
|
||||||
|
.ropeproject
|
||||||
|
|
||||||
|
# mkdocs documentation
|
||||||
|
/site
|
||||||
|
|
||||||
|
# mypy
|
||||||
|
.mypy_cache/
|
||||||
|
.dmypy.json
|
||||||
|
dmypy.json
|
||||||
|
|
||||||
|
# Pyre type checker
|
||||||
|
.pyre/
|
||||||
|
|
||||||
|
# pytype static type analyzer
|
||||||
|
.pytype/
|
||||||
|
|
||||||
|
# Cython debug symbols
|
||||||
|
cython_debug/
|
||||||
|
|
||||||
|
# PyCharm
|
||||||
|
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
|
||||||
|
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
||||||
|
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
||||||
|
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
||||||
|
#.idea/
|
||||||
|
|
||||||
|
|
@ -0,0 +1,32 @@
|
||||||
|
FROM python:3.12-slim-bookworm
|
||||||
|
|
||||||
|
ENV PYTHONDONTWRITEBYTECODE 1
|
||||||
|
ENV PYTHONUNBUFFERED 1
|
||||||
|
|
||||||
|
COPY --from=ghcr.io/astral-sh/uv:latest /uv /uvx /bin/
|
||||||
|
|
||||||
|
COPY ../service_app/requirements.txt .
|
||||||
|
|
||||||
|
RUN uv venv
|
||||||
|
RUN uv pip install -r requirements.txt
|
||||||
|
|
||||||
|
COPY ../service_app ./service_app
|
||||||
|
|
||||||
|
COPY ../databases ./service_app/databases
|
||||||
|
COPY ../api_services ./service_app/api_services
|
||||||
|
COPY ../api_objects ./service_app/api_objects
|
||||||
|
COPY ../api_configs ./service_app/api_configs
|
||||||
|
COPY ../api_events ./service_app/api_events
|
||||||
|
COPY ../api_library ./service_app/api_library
|
||||||
|
COPY ../api_validations ./service_app/api_validations
|
||||||
|
|
||||||
|
WORKDIR /service_app
|
||||||
|
|
||||||
|
CMD ["uv", "run", "app.py"]
|
||||||
|
|
||||||
|
# Old File
|
||||||
|
#FROM python:3.10
|
||||||
|
|
||||||
|
#RUN pip install --upgrade pip
|
||||||
|
#RUN pip install --no-cache-dir --upgrade -r requirements.txt
|
||||||
|
#CMD ["python", "-m", "app"]
|
||||||
|
|
@ -0,0 +1,37 @@
|
||||||
|
import uvicorn
|
||||||
|
import routers
|
||||||
|
|
||||||
|
from fastapi.middleware.cors import CORSMiddleware
|
||||||
|
from fastapi.exceptions import HTTPException
|
||||||
|
|
||||||
|
from middlewares.token_middleware import AuthHeaderMiddleware
|
||||||
|
from application.create_file import create_app
|
||||||
|
from api_objects.errors.errors_dictionary import ErrorHandlers
|
||||||
|
from prometheus_fastapi_instrumentator import Instrumentator
|
||||||
|
|
||||||
|
app = create_app(routers=routers)
|
||||||
|
Instrumentator().instrument(app=app).expose(app=app)
|
||||||
|
|
||||||
|
app.add_middleware(
|
||||||
|
CORSMiddleware,
|
||||||
|
**{
|
||||||
|
"allow_origins": ["*"],
|
||||||
|
"allow_credentials": True,
|
||||||
|
"allow_methods": ["*"],
|
||||||
|
"allow_headers": ["*"],
|
||||||
|
},
|
||||||
|
)
|
||||||
|
app.add_middleware(AuthHeaderMiddleware)
|
||||||
|
|
||||||
|
app.add_exception_handler(HTTPException, ErrorHandlers.exception_handler_http)
|
||||||
|
app.add_exception_handler(Exception, ErrorHandlers.exception_handler_exception)
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
uvicorn_config = {
|
||||||
|
"app": "app:app",
|
||||||
|
"host": "0.0.0.0",
|
||||||
|
"port": 41575,
|
||||||
|
"log_level": "info",
|
||||||
|
"reload": True,
|
||||||
|
}
|
||||||
|
uvicorn.Server(uvicorn.Config(**uvicorn_config)).run()
|
||||||
|
|
@ -0,0 +1,64 @@
|
||||||
|
def create_app(routers):
|
||||||
|
from fastapi import FastAPI
|
||||||
|
from fastapi.responses import JSONResponse
|
||||||
|
from fastapi.openapi.utils import get_openapi
|
||||||
|
from fastapi.responses import RedirectResponse
|
||||||
|
|
||||||
|
from api_configs import Config
|
||||||
|
|
||||||
|
api_app = FastAPI(title=str(Config.TITLE), default_response_class=JSONResponse)
|
||||||
|
|
||||||
|
@api_app.get("/", include_in_schema=False, summary=str(Config.DESCRIPTION))
|
||||||
|
async def home():
|
||||||
|
return RedirectResponse(url="/docs")
|
||||||
|
|
||||||
|
for router in list(
|
||||||
|
[
|
||||||
|
getattr(routers, router)
|
||||||
|
for router in routers.__all__
|
||||||
|
if getattr(routers, router)
|
||||||
|
]
|
||||||
|
):
|
||||||
|
api_app.include_router(router)
|
||||||
|
|
||||||
|
openapi_schema = get_openapi(
|
||||||
|
title=Config.TITLE,
|
||||||
|
description=Config.DESCRIPTION,
|
||||||
|
version="0.0.1",
|
||||||
|
routes=api_app.routes,
|
||||||
|
)
|
||||||
|
|
||||||
|
if "components" in openapi_schema:
|
||||||
|
openapi_schema["components"]["securitySchemes"] = {
|
||||||
|
"Bearer Auth": {
|
||||||
|
"type": "apiKey",
|
||||||
|
"in": "header",
|
||||||
|
"name": "evyos-session-key",
|
||||||
|
"description": "Enter: **'Bearer <JWT>'**, where JWT is the access token",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for route in api_app.routes:
|
||||||
|
path = str(getattr(route, "path"))
|
||||||
|
if route.include_in_schema:
|
||||||
|
methods = [method.lower() for method in getattr(route, "methods")]
|
||||||
|
for method in methods:
|
||||||
|
insecure_paths = Config.INSECURE_PATHS.copy()
|
||||||
|
insecure_paths.remove("/authentication/select")
|
||||||
|
if path not in insecure_paths:
|
||||||
|
openapi_schema["paths"][path][method]["security"] = [
|
||||||
|
{"Bearer Auth": []}
|
||||||
|
]
|
||||||
|
openapi_schema["paths"][path][method]["responses"]["403"] = {
|
||||||
|
"content": {
|
||||||
|
"application/json": {
|
||||||
|
"schema": {
|
||||||
|
"$ref": "#/components/schemas/HTTPValidationError"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"description": "Returned if user is unauthorized.",
|
||||||
|
}
|
||||||
|
|
||||||
|
api_app.openapi_schema = openapi_schema
|
||||||
|
return api_app
|
||||||
|
|
@ -0,0 +1 @@
|
||||||
|
from .token_middleware import AuthHeaderMiddleware
|
||||||
|
|
@ -0,0 +1,116 @@
|
||||||
|
import json
|
||||||
|
|
||||||
|
from time import perf_counter
|
||||||
|
from api_configs import Config
|
||||||
|
from fastapi import status
|
||||||
|
from fastapi.exceptions import HTTPException
|
||||||
|
from starlette.middleware.base import BaseHTTPMiddleware
|
||||||
|
|
||||||
|
|
||||||
|
class MiddlewareLogs:
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def log_error(self, log_message):
|
||||||
|
print(log_message)
|
||||||
|
|
||||||
|
|
||||||
|
def log_middlewares_exception(endpoint, token_user, message, request):
|
||||||
|
MiddlewareLogs.log_error(
|
||||||
|
str(
|
||||||
|
{
|
||||||
|
"log_type": "Authentication",
|
||||||
|
"log_message": message,
|
||||||
|
"log_action": "User",
|
||||||
|
"log_data": json.dumps(
|
||||||
|
{
|
||||||
|
"endpoint": endpoint,
|
||||||
|
"user": str(token_user),
|
||||||
|
"request": str(request.headers),
|
||||||
|
}
|
||||||
|
),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class AuthHeaderMiddleware(BaseHTTPMiddleware):
|
||||||
|
|
||||||
|
async def dispatch(self, request, call_next):
|
||||||
|
start_time, token_user, endpoint = perf_counter(), None, None
|
||||||
|
|
||||||
|
if check_if_path_secure(request=request, insecure_paths=Config.INSECURE_PATHS):
|
||||||
|
endpoint = str(getattr(getattr(request, "url", None), "path", None))
|
||||||
|
if un_auth := check_if_token_is_not_valid(
|
||||||
|
request=request, endpoint_name=endpoint
|
||||||
|
):
|
||||||
|
auth, token_user = un_auth
|
||||||
|
if not auth == "valid":
|
||||||
|
# log_middlewares_exception(endpoint, token_user, "auth", request)
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_401_UNAUTHORIZED, detail=auth
|
||||||
|
)
|
||||||
|
|
||||||
|
response = await call_next(request)
|
||||||
|
prepare_response_needs(response, start_time)
|
||||||
|
# if endpoint and token_user:
|
||||||
|
# log_middlewares_exception(endpoint, token_user, "Request is completed", request)
|
||||||
|
return response
|
||||||
|
|
||||||
|
|
||||||
|
def prepare_response_needs(response, start_time):
|
||||||
|
end_time = perf_counter()
|
||||||
|
response.headers["request-starts"], response.headers["request-ends"] = str(
|
||||||
|
start_time
|
||||||
|
), str(end_time)
|
||||||
|
response.headers["elapsed-Time"] = str(float(end_time) - float(start_time)) + " ms"
|
||||||
|
|
||||||
|
|
||||||
|
def check_if_path_secure(request, insecure_paths) -> bool:
|
||||||
|
return (
|
||||||
|
str(getattr(getattr(request, "url", None), "path", None)) not in insecure_paths
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def check_if_token_is_not_valid(request, endpoint_name):
|
||||||
|
from api_services.redis.functions import get_object_via_access_key
|
||||||
|
|
||||||
|
token_user = get_object_via_access_key(request)
|
||||||
|
if not token_user:
|
||||||
|
return "Session geçerli değil. Lütfen tekrar giriş yapınız.", token_user
|
||||||
|
|
||||||
|
return "valid", token_user
|
||||||
|
|
||||||
|
# on_token_user: Users = Users.find_one(uu_id=token_user["uu_id"])
|
||||||
|
# on_token_people: People = on_token_user.person
|
||||||
|
# #
|
||||||
|
# # if on_token_people.priority == 78:
|
||||||
|
# # return "valid", token_user
|
||||||
|
#
|
||||||
|
# if not token_user.get("duty_id", None):
|
||||||
|
# return (
|
||||||
|
# "Kullanıcı hiçbir yetki tanımlanmamıştır. Supervisor ile iletişime geçiniz.",
|
||||||
|
# token_user,
|
||||||
|
# )
|
||||||
|
|
||||||
|
#
|
||||||
|
# if endpoint_name in release_endpoint:
|
||||||
|
# return "valid", token_user
|
||||||
|
#
|
||||||
|
# if company_duty_app := CompanyDutyApp.find_one(
|
||||||
|
# endpoint_name=str("".join(endpoint_name.split("/")[:-1])),
|
||||||
|
# company_duty_id=int(token_user.get("duty_id")),
|
||||||
|
# ):
|
||||||
|
# if not company_duty_app.is_access_valid(
|
||||||
|
# endpoint_ext=endpoint_name.split("/")[-1]
|
||||||
|
# ):
|
||||||
|
# return (
|
||||||
|
# "Kullanıcı yetkili değildir. Supervisor ile iletişime geçiniz.",
|
||||||
|
# token_user,
|
||||||
|
# )
|
||||||
|
# else:
|
||||||
|
# return (
|
||||||
|
# "Kullanıcıya yetki tanımlanmamıştır. Supervisor ile iletişime geçiniz.",
|
||||||
|
# token_user,
|
||||||
|
# )
|
||||||
|
|
||||||
|
# return "valid", token_user
|
||||||
|
|
@ -0,0 +1,25 @@
|
||||||
|
[project]
|
||||||
|
name = "wag-managment-api-service-version-3"
|
||||||
|
version = "0.1.0"
|
||||||
|
description = "Wag Python API Service"
|
||||||
|
readme = "README.md"
|
||||||
|
requires-python = ">=3.12"
|
||||||
|
dependencies = [
|
||||||
|
"alembic>=1.14.0",
|
||||||
|
"arrow>=1.3.0",
|
||||||
|
"cryptography>=43.0.3",
|
||||||
|
"faker>=30.8.2",
|
||||||
|
"fastapi>=0.115.4",
|
||||||
|
"pandas>=2.2.3",
|
||||||
|
"prometheus-fastapi-instrumentator>=7.0.0",
|
||||||
|
"psycopg2-binary>=2.9.10",
|
||||||
|
"pymongo>=4.10.1",
|
||||||
|
"redis>=5.2.0",
|
||||||
|
"redmail>=0.6.0",
|
||||||
|
"requests>=2.32.3",
|
||||||
|
"rsa>=4.9",
|
||||||
|
"sqlalchemy-mixins>=2.0.5",
|
||||||
|
"textdistance>=4.6.3",
|
||||||
|
"unidecode>=1.3.8",
|
||||||
|
"uvicorn>=0.32.0",
|
||||||
|
]
|
||||||
|
|
@ -0,0 +1,93 @@
|
||||||
|
# Git
|
||||||
|
.git
|
||||||
|
.gitignore
|
||||||
|
.gitattributes
|
||||||
|
|
||||||
|
|
||||||
|
# CI
|
||||||
|
.codeclimate.yml
|
||||||
|
.travis.yml
|
||||||
|
.taskcluster.yml
|
||||||
|
|
||||||
|
# Docker
|
||||||
|
docker-compose.yml
|
||||||
|
service_app/Dockerfile
|
||||||
|
.docker
|
||||||
|
.dockerignore
|
||||||
|
|
||||||
|
# Byte-compiled / optimized / DLL files
|
||||||
|
**/__pycache__/
|
||||||
|
**/*.py[cod]
|
||||||
|
|
||||||
|
# C extensions
|
||||||
|
*.so
|
||||||
|
|
||||||
|
# Distribution / packaging
|
||||||
|
.Python
|
||||||
|
service_app/env/
|
||||||
|
build/
|
||||||
|
develop-eggs/
|
||||||
|
dist/
|
||||||
|
downloads/
|
||||||
|
eggs/
|
||||||
|
lib/
|
||||||
|
lib64/
|
||||||
|
parts/
|
||||||
|
sdist/
|
||||||
|
var/
|
||||||
|
*.egg-info/
|
||||||
|
.installed.cfg
|
||||||
|
*.egg
|
||||||
|
|
||||||
|
# PyInstaller
|
||||||
|
# Usually these files are written by a python script from a template
|
||||||
|
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||||
|
*.manifest
|
||||||
|
*.spec
|
||||||
|
|
||||||
|
# Installer logs
|
||||||
|
pip-log.txt
|
||||||
|
pip-delete-this-directory.txt
|
||||||
|
|
||||||
|
# Unit test / coverage reports
|
||||||
|
htmlcov/
|
||||||
|
.tox/
|
||||||
|
.coverage
|
||||||
|
.cache
|
||||||
|
nosetests.xml
|
||||||
|
coverage.xml
|
||||||
|
|
||||||
|
# Translations
|
||||||
|
*.mo
|
||||||
|
*.pot
|
||||||
|
|
||||||
|
# Django stuff:
|
||||||
|
*.log
|
||||||
|
|
||||||
|
# Sphinx documentation
|
||||||
|
docs/_build/
|
||||||
|
|
||||||
|
# PyBuilder
|
||||||
|
target/
|
||||||
|
|
||||||
|
# Virtual environment
|
||||||
|
service_app/.env
|
||||||
|
.venv/
|
||||||
|
venv/
|
||||||
|
|
||||||
|
# PyCharm
|
||||||
|
.idea
|
||||||
|
|
||||||
|
# Python mode for VIM
|
||||||
|
.ropeproject
|
||||||
|
**/.ropeproject
|
||||||
|
|
||||||
|
# Vim swap files
|
||||||
|
**/*.swp
|
||||||
|
|
||||||
|
# VS Code
|
||||||
|
.vscode/
|
||||||
|
|
||||||
|
test_application/
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -0,0 +1,162 @@
|
||||||
|
# ---> Python
|
||||||
|
# Byte-compiled / optimized / DLL files
|
||||||
|
__pycache__/
|
||||||
|
*.py[cod]
|
||||||
|
*$py.class
|
||||||
|
|
||||||
|
# C extensions
|
||||||
|
*.so
|
||||||
|
|
||||||
|
# Distribution / packaging
|
||||||
|
.idea/
|
||||||
|
.Python
|
||||||
|
develop-eggs/
|
||||||
|
dist/
|
||||||
|
downloads/
|
||||||
|
eggs/
|
||||||
|
.eggs/
|
||||||
|
lib/
|
||||||
|
lib64/
|
||||||
|
parts/
|
||||||
|
sdist/
|
||||||
|
var/
|
||||||
|
wheels/
|
||||||
|
share/python-wheels/
|
||||||
|
*.egg-info/
|
||||||
|
.installed.cfg
|
||||||
|
*.egg
|
||||||
|
MANIFEST
|
||||||
|
|
||||||
|
# PyInstaller
|
||||||
|
# Usually these files are written by a python script from a template
|
||||||
|
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||||
|
*.manifest
|
||||||
|
*.spec
|
||||||
|
|
||||||
|
# Installer logs
|
||||||
|
pip-log.txt
|
||||||
|
pip-delete-this-directory.txt
|
||||||
|
|
||||||
|
# Unit test / coverage reports
|
||||||
|
htmlcov/
|
||||||
|
.tox/
|
||||||
|
.nox/
|
||||||
|
.coverage
|
||||||
|
.coverage.*
|
||||||
|
.cache
|
||||||
|
nosetests.xml
|
||||||
|
coverage.xml
|
||||||
|
*.cover
|
||||||
|
*.py,cover
|
||||||
|
.hypothesis/
|
||||||
|
.pytest_cache/
|
||||||
|
cover/
|
||||||
|
|
||||||
|
# Translations
|
||||||
|
*.mo
|
||||||
|
*.pot
|
||||||
|
|
||||||
|
# Django stuff:
|
||||||
|
*.log
|
||||||
|
local_settings.py
|
||||||
|
db.sqlite3
|
||||||
|
db.sqlite3-journal
|
||||||
|
|
||||||
|
# Flask stuff:
|
||||||
|
instance/
|
||||||
|
.webassets-cache
|
||||||
|
|
||||||
|
# Scrapy stuff:
|
||||||
|
.scrapy
|
||||||
|
|
||||||
|
# Sphinx documentation
|
||||||
|
docs/_build/
|
||||||
|
|
||||||
|
# PyBuilder
|
||||||
|
.pybuilder/
|
||||||
|
target/
|
||||||
|
|
||||||
|
# Jupyter Notebook
|
||||||
|
.ipynb_checkpoints
|
||||||
|
|
||||||
|
# IPython
|
||||||
|
profile_default/
|
||||||
|
ipython_config.py
|
||||||
|
|
||||||
|
# pyenv
|
||||||
|
# For a library or package, you might want to ignore these files since the code is
|
||||||
|
# intended to run in multiple environments; otherwise, check them in:
|
||||||
|
# .python-version
|
||||||
|
|
||||||
|
# pipenv
|
||||||
|
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
||||||
|
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
||||||
|
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
||||||
|
# install all needed dependencies.
|
||||||
|
#Pipfile.lock
|
||||||
|
|
||||||
|
# poetry
|
||||||
|
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
|
||||||
|
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
||||||
|
# commonly ignored for libraries.
|
||||||
|
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
|
||||||
|
#poetry.lock
|
||||||
|
|
||||||
|
# pdm
|
||||||
|
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
|
||||||
|
#pdm.lock
|
||||||
|
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
|
||||||
|
# in version control.
|
||||||
|
# https://pdm.fming.dev/#use-with-ide
|
||||||
|
.pdm.toml
|
||||||
|
|
||||||
|
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
|
||||||
|
__pypackages__/
|
||||||
|
|
||||||
|
# Celery stuff
|
||||||
|
celerybeat-schedule
|
||||||
|
celerybeat.pid
|
||||||
|
|
||||||
|
# SageMath parsed files
|
||||||
|
*.sage.py
|
||||||
|
|
||||||
|
# Environments
|
||||||
|
service_app/.env
|
||||||
|
.venv
|
||||||
|
service_app/env/
|
||||||
|
venv/
|
||||||
|
service_app/env/
|
||||||
|
env.bak/
|
||||||
|
venv.bak/
|
||||||
|
|
||||||
|
# Spyder project settings
|
||||||
|
.spyderproject
|
||||||
|
.spyproject
|
||||||
|
|
||||||
|
# Rope project settings
|
||||||
|
.ropeproject
|
||||||
|
|
||||||
|
# mkdocs documentation
|
||||||
|
/site
|
||||||
|
|
||||||
|
# mypy
|
||||||
|
.mypy_cache/
|
||||||
|
.dmypy.json
|
||||||
|
dmypy.json
|
||||||
|
|
||||||
|
# Pyre type checker
|
||||||
|
.pyre/
|
||||||
|
|
||||||
|
# pytype static type analyzer
|
||||||
|
.pytype/
|
||||||
|
|
||||||
|
# Cython debug symbols
|
||||||
|
cython_debug/
|
||||||
|
|
||||||
|
# PyCharm
|
||||||
|
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
|
||||||
|
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
||||||
|
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
||||||
|
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
||||||
|
#.idea/
|
||||||
|
|
||||||
|
|
@ -0,0 +1,32 @@
|
||||||
|
FROM python:3.12-slim-bookworm
|
||||||
|
|
||||||
|
ENV PYTHONDONTWRITEBYTECODE 1
|
||||||
|
ENV PYTHONUNBUFFERED 1
|
||||||
|
|
||||||
|
COPY --from=ghcr.io/astral-sh/uv:latest /uv /uvx /bin/
|
||||||
|
|
||||||
|
COPY ../service_app/requirements.txt .
|
||||||
|
|
||||||
|
RUN uv venv
|
||||||
|
RUN uv pip install -r requirements.txt
|
||||||
|
|
||||||
|
COPY ../service_app ./service_app
|
||||||
|
|
||||||
|
COPY ../databases ./service_app/databases
|
||||||
|
COPY ../api_services ./service_app/api_services
|
||||||
|
COPY ../api_objects ./service_app/api_objects
|
||||||
|
COPY ../api_configs ./service_app/api_configs
|
||||||
|
COPY ../api_events ./service_app/api_events
|
||||||
|
COPY ../api_library ./service_app/api_library
|
||||||
|
COPY ../api_validations ./service_app/api_validations
|
||||||
|
|
||||||
|
WORKDIR /service_app
|
||||||
|
|
||||||
|
CMD ["uv", "run", "app.py"]
|
||||||
|
|
||||||
|
# Old File
|
||||||
|
#FROM python:3.10
|
||||||
|
|
||||||
|
#RUN pip install --upgrade pip
|
||||||
|
#RUN pip install --no-cache-dir --upgrade -r requirements.txt
|
||||||
|
#CMD ["python", "-m", "app"]
|
||||||
|
|
@ -0,0 +1,37 @@
|
||||||
|
import uvicorn
|
||||||
|
import routers
|
||||||
|
|
||||||
|
from fastapi.middleware.cors import CORSMiddleware
|
||||||
|
from fastapi.exceptions import HTTPException
|
||||||
|
|
||||||
|
from middlewares.token_middleware import AuthHeaderMiddleware
|
||||||
|
from application.create_file import create_app
|
||||||
|
from api_objects.errors.errors_dictionary import ErrorHandlers
|
||||||
|
from prometheus_fastapi_instrumentator import Instrumentator
|
||||||
|
|
||||||
|
app = create_app(routers=routers)
|
||||||
|
Instrumentator().instrument(app=app).expose(app=app)
|
||||||
|
|
||||||
|
app.add_middleware(
|
||||||
|
CORSMiddleware,
|
||||||
|
**{
|
||||||
|
"allow_origins": ["*"],
|
||||||
|
"allow_credentials": True,
|
||||||
|
"allow_methods": ["*"],
|
||||||
|
"allow_headers": ["*"],
|
||||||
|
},
|
||||||
|
)
|
||||||
|
app.add_middleware(AuthHeaderMiddleware)
|
||||||
|
|
||||||
|
app.add_exception_handler(HTTPException, ErrorHandlers.exception_handler_http)
|
||||||
|
app.add_exception_handler(Exception, ErrorHandlers.exception_handler_exception)
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
uvicorn_config = {
|
||||||
|
"app": "app:app",
|
||||||
|
"host": "0.0.0.0",
|
||||||
|
"port": 41575,
|
||||||
|
"log_level": "info",
|
||||||
|
"reload": True,
|
||||||
|
}
|
||||||
|
uvicorn.Server(uvicorn.Config(**uvicorn_config)).run()
|
||||||
|
|
@ -0,0 +1,64 @@
|
||||||
|
def create_app(routers):
|
||||||
|
from fastapi import FastAPI
|
||||||
|
from fastapi.responses import JSONResponse
|
||||||
|
from fastapi.openapi.utils import get_openapi
|
||||||
|
from fastapi.responses import RedirectResponse
|
||||||
|
|
||||||
|
from api_configs import Config
|
||||||
|
|
||||||
|
api_app = FastAPI(title=str(Config.TITLE), default_response_class=JSONResponse)
|
||||||
|
|
||||||
|
@api_app.get("/", include_in_schema=False, summary=str(Config.DESCRIPTION))
|
||||||
|
async def home():
|
||||||
|
return RedirectResponse(url="/docs")
|
||||||
|
|
||||||
|
for router in list(
|
||||||
|
[
|
||||||
|
getattr(routers, router)
|
||||||
|
for router in routers.__all__
|
||||||
|
if getattr(routers, router)
|
||||||
|
]
|
||||||
|
):
|
||||||
|
api_app.include_router(router)
|
||||||
|
|
||||||
|
openapi_schema = get_openapi(
|
||||||
|
title=Config.TITLE,
|
||||||
|
description=Config.DESCRIPTION,
|
||||||
|
version="0.0.1",
|
||||||
|
routes=api_app.routes,
|
||||||
|
)
|
||||||
|
|
||||||
|
if "components" in openapi_schema:
|
||||||
|
openapi_schema["components"]["securitySchemes"] = {
|
||||||
|
"Bearer Auth": {
|
||||||
|
"type": "apiKey",
|
||||||
|
"in": "header",
|
||||||
|
"name": "evyos-session-key",
|
||||||
|
"description": "Enter: **'Bearer <JWT>'**, where JWT is the access token",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for route in api_app.routes:
|
||||||
|
path = str(getattr(route, "path"))
|
||||||
|
if route.include_in_schema:
|
||||||
|
methods = [method.lower() for method in getattr(route, "methods")]
|
||||||
|
for method in methods:
|
||||||
|
insecure_paths = Config.INSECURE_PATHS.copy()
|
||||||
|
insecure_paths.remove("/authentication/select")
|
||||||
|
if path not in insecure_paths:
|
||||||
|
openapi_schema["paths"][path][method]["security"] = [
|
||||||
|
{"Bearer Auth": []}
|
||||||
|
]
|
||||||
|
openapi_schema["paths"][path][method]["responses"]["403"] = {
|
||||||
|
"content": {
|
||||||
|
"application/json": {
|
||||||
|
"schema": {
|
||||||
|
"$ref": "#/components/schemas/HTTPValidationError"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"description": "Returned if user is unauthorized.",
|
||||||
|
}
|
||||||
|
|
||||||
|
api_app.openapi_schema = openapi_schema
|
||||||
|
return api_app
|
||||||
|
|
@ -0,0 +1 @@
|
||||||
|
from .token_middleware import AuthHeaderMiddleware
|
||||||
|
|
@ -0,0 +1,116 @@
|
||||||
|
import json
|
||||||
|
|
||||||
|
from time import perf_counter
|
||||||
|
from api_configs import Config
|
||||||
|
from fastapi import status
|
||||||
|
from fastapi.exceptions import HTTPException
|
||||||
|
from starlette.middleware.base import BaseHTTPMiddleware
|
||||||
|
|
||||||
|
|
||||||
|
class MiddlewareLogs:
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def log_error(self, log_message):
|
||||||
|
print(log_message)
|
||||||
|
|
||||||
|
|
||||||
|
def log_middlewares_exception(endpoint, token_user, message, request):
|
||||||
|
MiddlewareLogs.log_error(
|
||||||
|
str(
|
||||||
|
{
|
||||||
|
"log_type": "Authentication",
|
||||||
|
"log_message": message,
|
||||||
|
"log_action": "User",
|
||||||
|
"log_data": json.dumps(
|
||||||
|
{
|
||||||
|
"endpoint": endpoint,
|
||||||
|
"user": str(token_user),
|
||||||
|
"request": str(request.headers),
|
||||||
|
}
|
||||||
|
),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class AuthHeaderMiddleware(BaseHTTPMiddleware):
|
||||||
|
|
||||||
|
async def dispatch(self, request, call_next):
|
||||||
|
start_time, token_user, endpoint = perf_counter(), None, None
|
||||||
|
|
||||||
|
if check_if_path_secure(request=request, insecure_paths=Config.INSECURE_PATHS):
|
||||||
|
endpoint = str(getattr(getattr(request, "url", None), "path", None))
|
||||||
|
if un_auth := check_if_token_is_not_valid(
|
||||||
|
request=request, endpoint_name=endpoint
|
||||||
|
):
|
||||||
|
auth, token_user = un_auth
|
||||||
|
if not auth == "valid":
|
||||||
|
# log_middlewares_exception(endpoint, token_user, "auth", request)
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_401_UNAUTHORIZED, detail=auth
|
||||||
|
)
|
||||||
|
|
||||||
|
response = await call_next(request)
|
||||||
|
prepare_response_needs(response, start_time)
|
||||||
|
# if endpoint and token_user:
|
||||||
|
# log_middlewares_exception(endpoint, token_user, "Request is completed", request)
|
||||||
|
return response
|
||||||
|
|
||||||
|
|
||||||
|
def prepare_response_needs(response, start_time):
|
||||||
|
end_time = perf_counter()
|
||||||
|
response.headers["request-starts"], response.headers["request-ends"] = str(
|
||||||
|
start_time
|
||||||
|
), str(end_time)
|
||||||
|
response.headers["elapsed-Time"] = str(float(end_time) - float(start_time)) + " ms"
|
||||||
|
|
||||||
|
|
||||||
|
def check_if_path_secure(request, insecure_paths) -> bool:
|
||||||
|
return (
|
||||||
|
str(getattr(getattr(request, "url", None), "path", None)) not in insecure_paths
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def check_if_token_is_not_valid(request, endpoint_name):
|
||||||
|
from api_services.redis.functions import get_object_via_access_key
|
||||||
|
|
||||||
|
token_user = get_object_via_access_key(request)
|
||||||
|
if not token_user:
|
||||||
|
return "Session geçerli değil. Lütfen tekrar giriş yapınız.", token_user
|
||||||
|
|
||||||
|
return "valid", token_user
|
||||||
|
|
||||||
|
# on_token_user: Users = Users.find_one(uu_id=token_user["uu_id"])
|
||||||
|
# on_token_people: People = on_token_user.person
|
||||||
|
# #
|
||||||
|
# # if on_token_people.priority == 78:
|
||||||
|
# # return "valid", token_user
|
||||||
|
#
|
||||||
|
# if not token_user.get("duty_id", None):
|
||||||
|
# return (
|
||||||
|
# "Kullanıcı hiçbir yetki tanımlanmamıştır. Supervisor ile iletişime geçiniz.",
|
||||||
|
# token_user,
|
||||||
|
# )
|
||||||
|
|
||||||
|
#
|
||||||
|
# if endpoint_name in release_endpoint:
|
||||||
|
# return "valid", token_user
|
||||||
|
#
|
||||||
|
# if company_duty_app := CompanyDutyApp.find_one(
|
||||||
|
# endpoint_name=str("".join(endpoint_name.split("/")[:-1])),
|
||||||
|
# company_duty_id=int(token_user.get("duty_id")),
|
||||||
|
# ):
|
||||||
|
# if not company_duty_app.is_access_valid(
|
||||||
|
# endpoint_ext=endpoint_name.split("/")[-1]
|
||||||
|
# ):
|
||||||
|
# return (
|
||||||
|
# "Kullanıcı yetkili değildir. Supervisor ile iletişime geçiniz.",
|
||||||
|
# token_user,
|
||||||
|
# )
|
||||||
|
# else:
|
||||||
|
# return (
|
||||||
|
# "Kullanıcıya yetki tanımlanmamıştır. Supervisor ile iletişime geçiniz.",
|
||||||
|
# token_user,
|
||||||
|
# )
|
||||||
|
|
||||||
|
# return "valid", token_user
|
||||||
|
|
@ -0,0 +1,25 @@
|
||||||
|
[project]
|
||||||
|
name = "wag-managment-api-service-version-3"
|
||||||
|
version = "0.1.0"
|
||||||
|
description = "Wag Python API Service"
|
||||||
|
readme = "README.md"
|
||||||
|
requires-python = ">=3.12"
|
||||||
|
dependencies = [
|
||||||
|
"alembic>=1.14.0",
|
||||||
|
"arrow>=1.3.0",
|
||||||
|
"cryptography>=43.0.3",
|
||||||
|
"faker>=30.8.2",
|
||||||
|
"fastapi>=0.115.4",
|
||||||
|
"pandas>=2.2.3",
|
||||||
|
"prometheus-fastapi-instrumentator>=7.0.0",
|
||||||
|
"psycopg2-binary>=2.9.10",
|
||||||
|
"pymongo>=4.10.1",
|
||||||
|
"redis>=5.2.0",
|
||||||
|
"redmail>=0.6.0",
|
||||||
|
"requests>=2.32.3",
|
||||||
|
"rsa>=4.9",
|
||||||
|
"sqlalchemy-mixins>=2.0.5",
|
||||||
|
"textdistance>=4.6.3",
|
||||||
|
"unidecode>=1.3.8",
|
||||||
|
"uvicorn>=0.32.0",
|
||||||
|
]
|
||||||
|
|
@ -0,0 +1,93 @@
|
||||||
|
# Git
|
||||||
|
.git
|
||||||
|
.gitignore
|
||||||
|
.gitattributes
|
||||||
|
|
||||||
|
|
||||||
|
# CI
|
||||||
|
.codeclimate.yml
|
||||||
|
.travis.yml
|
||||||
|
.taskcluster.yml
|
||||||
|
|
||||||
|
# Docker
|
||||||
|
docker-compose.yml
|
||||||
|
service_app/Dockerfile
|
||||||
|
.docker
|
||||||
|
.dockerignore
|
||||||
|
|
||||||
|
# Byte-compiled / optimized / DLL files
|
||||||
|
**/__pycache__/
|
||||||
|
**/*.py[cod]
|
||||||
|
|
||||||
|
# C extensions
|
||||||
|
*.so
|
||||||
|
|
||||||
|
# Distribution / packaging
|
||||||
|
.Python
|
||||||
|
service_app/env/
|
||||||
|
build/
|
||||||
|
develop-eggs/
|
||||||
|
dist/
|
||||||
|
downloads/
|
||||||
|
eggs/
|
||||||
|
lib/
|
||||||
|
lib64/
|
||||||
|
parts/
|
||||||
|
sdist/
|
||||||
|
var/
|
||||||
|
*.egg-info/
|
||||||
|
.installed.cfg
|
||||||
|
*.egg
|
||||||
|
|
||||||
|
# PyInstaller
|
||||||
|
# Usually these files are written by a python script from a template
|
||||||
|
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||||
|
*.manifest
|
||||||
|
*.spec
|
||||||
|
|
||||||
|
# Installer logs
|
||||||
|
pip-log.txt
|
||||||
|
pip-delete-this-directory.txt
|
||||||
|
|
||||||
|
# Unit test / coverage reports
|
||||||
|
htmlcov/
|
||||||
|
.tox/
|
||||||
|
.coverage
|
||||||
|
.cache
|
||||||
|
nosetests.xml
|
||||||
|
coverage.xml
|
||||||
|
|
||||||
|
# Translations
|
||||||
|
*.mo
|
||||||
|
*.pot
|
||||||
|
|
||||||
|
# Django stuff:
|
||||||
|
*.log
|
||||||
|
|
||||||
|
# Sphinx documentation
|
||||||
|
docs/_build/
|
||||||
|
|
||||||
|
# PyBuilder
|
||||||
|
target/
|
||||||
|
|
||||||
|
# Virtual environment
|
||||||
|
service_app/.env
|
||||||
|
.venv/
|
||||||
|
venv/
|
||||||
|
|
||||||
|
# PyCharm
|
||||||
|
.idea
|
||||||
|
|
||||||
|
# Python mode for VIM
|
||||||
|
.ropeproject
|
||||||
|
**/.ropeproject
|
||||||
|
|
||||||
|
# Vim swap files
|
||||||
|
**/*.swp
|
||||||
|
|
||||||
|
# VS Code
|
||||||
|
.vscode/
|
||||||
|
|
||||||
|
test_application/
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -0,0 +1,162 @@
|
||||||
|
# ---> Python
|
||||||
|
# Byte-compiled / optimized / DLL files
|
||||||
|
__pycache__/
|
||||||
|
*.py[cod]
|
||||||
|
*$py.class
|
||||||
|
|
||||||
|
# C extensions
|
||||||
|
*.so
|
||||||
|
|
||||||
|
# Distribution / packaging
|
||||||
|
.idea/
|
||||||
|
.Python
|
||||||
|
develop-eggs/
|
||||||
|
dist/
|
||||||
|
downloads/
|
||||||
|
eggs/
|
||||||
|
.eggs/
|
||||||
|
lib/
|
||||||
|
lib64/
|
||||||
|
parts/
|
||||||
|
sdist/
|
||||||
|
var/
|
||||||
|
wheels/
|
||||||
|
share/python-wheels/
|
||||||
|
*.egg-info/
|
||||||
|
.installed.cfg
|
||||||
|
*.egg
|
||||||
|
MANIFEST
|
||||||
|
|
||||||
|
# PyInstaller
|
||||||
|
# Usually these files are written by a python script from a template
|
||||||
|
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||||
|
*.manifest
|
||||||
|
*.spec
|
||||||
|
|
||||||
|
# Installer logs
|
||||||
|
pip-log.txt
|
||||||
|
pip-delete-this-directory.txt
|
||||||
|
|
||||||
|
# Unit test / coverage reports
|
||||||
|
htmlcov/
|
||||||
|
.tox/
|
||||||
|
.nox/
|
||||||
|
.coverage
|
||||||
|
.coverage.*
|
||||||
|
.cache
|
||||||
|
nosetests.xml
|
||||||
|
coverage.xml
|
||||||
|
*.cover
|
||||||
|
*.py,cover
|
||||||
|
.hypothesis/
|
||||||
|
.pytest_cache/
|
||||||
|
cover/
|
||||||
|
|
||||||
|
# Translations
|
||||||
|
*.mo
|
||||||
|
*.pot
|
||||||
|
|
||||||
|
# Django stuff:
|
||||||
|
*.log
|
||||||
|
local_settings.py
|
||||||
|
db.sqlite3
|
||||||
|
db.sqlite3-journal
|
||||||
|
|
||||||
|
# Flask stuff:
|
||||||
|
instance/
|
||||||
|
.webassets-cache
|
||||||
|
|
||||||
|
# Scrapy stuff:
|
||||||
|
.scrapy
|
||||||
|
|
||||||
|
# Sphinx documentation
|
||||||
|
docs/_build/
|
||||||
|
|
||||||
|
# PyBuilder
|
||||||
|
.pybuilder/
|
||||||
|
target/
|
||||||
|
|
||||||
|
# Jupyter Notebook
|
||||||
|
.ipynb_checkpoints
|
||||||
|
|
||||||
|
# IPython
|
||||||
|
profile_default/
|
||||||
|
ipython_config.py
|
||||||
|
|
||||||
|
# pyenv
|
||||||
|
# For a library or package, you might want to ignore these files since the code is
|
||||||
|
# intended to run in multiple environments; otherwise, check them in:
|
||||||
|
# .python-version
|
||||||
|
|
||||||
|
# pipenv
|
||||||
|
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
||||||
|
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
||||||
|
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
||||||
|
# install all needed dependencies.
|
||||||
|
#Pipfile.lock
|
||||||
|
|
||||||
|
# poetry
|
||||||
|
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
|
||||||
|
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
||||||
|
# commonly ignored for libraries.
|
||||||
|
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
|
||||||
|
#poetry.lock
|
||||||
|
|
||||||
|
# pdm
|
||||||
|
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
|
||||||
|
#pdm.lock
|
||||||
|
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
|
||||||
|
# in version control.
|
||||||
|
# https://pdm.fming.dev/#use-with-ide
|
||||||
|
.pdm.toml
|
||||||
|
|
||||||
|
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
|
||||||
|
__pypackages__/
|
||||||
|
|
||||||
|
# Celery stuff
|
||||||
|
celerybeat-schedule
|
||||||
|
celerybeat.pid
|
||||||
|
|
||||||
|
# SageMath parsed files
|
||||||
|
*.sage.py
|
||||||
|
|
||||||
|
# Environments
|
||||||
|
service_app/.env
|
||||||
|
.venv
|
||||||
|
service_app/env/
|
||||||
|
venv/
|
||||||
|
service_app/env/
|
||||||
|
env.bak/
|
||||||
|
venv.bak/
|
||||||
|
|
||||||
|
# Spyder project settings
|
||||||
|
.spyderproject
|
||||||
|
.spyproject
|
||||||
|
|
||||||
|
# Rope project settings
|
||||||
|
.ropeproject
|
||||||
|
|
||||||
|
# mkdocs documentation
|
||||||
|
/site
|
||||||
|
|
||||||
|
# mypy
|
||||||
|
.mypy_cache/
|
||||||
|
.dmypy.json
|
||||||
|
dmypy.json
|
||||||
|
|
||||||
|
# Pyre type checker
|
||||||
|
.pyre/
|
||||||
|
|
||||||
|
# pytype static type analyzer
|
||||||
|
.pytype/
|
||||||
|
|
||||||
|
# Cython debug symbols
|
||||||
|
cython_debug/
|
||||||
|
|
||||||
|
# PyCharm
|
||||||
|
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
|
||||||
|
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
||||||
|
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
||||||
|
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
||||||
|
#.idea/
|
||||||
|
|
||||||
|
|
@ -0,0 +1,32 @@
|
||||||
|
FROM python:3.12-slim-bookworm
|
||||||
|
|
||||||
|
ENV PYTHONDONTWRITEBYTECODE 1
|
||||||
|
ENV PYTHONUNBUFFERED 1
|
||||||
|
|
||||||
|
COPY --from=ghcr.io/astral-sh/uv:latest /uv /uvx /bin/
|
||||||
|
|
||||||
|
COPY ../service_app/requirements.txt .
|
||||||
|
|
||||||
|
RUN uv venv
|
||||||
|
RUN uv pip install -r requirements.txt
|
||||||
|
|
||||||
|
COPY ../service_app ./service_app
|
||||||
|
|
||||||
|
COPY ../databases ./service_app/databases
|
||||||
|
COPY ../api_services ./service_app/api_services
|
||||||
|
COPY ../api_objects ./service_app/api_objects
|
||||||
|
COPY ../api_configs ./service_app/api_configs
|
||||||
|
COPY ../api_events ./service_app/api_events
|
||||||
|
COPY ../api_library ./service_app/api_library
|
||||||
|
COPY ../api_validations ./service_app/api_validations
|
||||||
|
|
||||||
|
WORKDIR /service_app
|
||||||
|
|
||||||
|
CMD ["uv", "run", "app.py"]
|
||||||
|
|
||||||
|
# Old File
|
||||||
|
#FROM python:3.10
|
||||||
|
|
||||||
|
#RUN pip install --upgrade pip
|
||||||
|
#RUN pip install --no-cache-dir --upgrade -r requirements.txt
|
||||||
|
#CMD ["python", "-m", "app"]
|
||||||
|
|
@ -0,0 +1,37 @@
|
||||||
|
import uvicorn
|
||||||
|
import routers
|
||||||
|
|
||||||
|
from fastapi.middleware.cors import CORSMiddleware
|
||||||
|
from fastapi.exceptions import HTTPException
|
||||||
|
|
||||||
|
from middlewares.token_middleware import AuthHeaderMiddleware
|
||||||
|
from application.create_file import create_app
|
||||||
|
from api_objects.errors.errors_dictionary import ErrorHandlers
|
||||||
|
from prometheus_fastapi_instrumentator import Instrumentator
|
||||||
|
|
||||||
|
app = create_app(routers=routers)
|
||||||
|
Instrumentator().instrument(app=app).expose(app=app)
|
||||||
|
|
||||||
|
app.add_middleware(
|
||||||
|
CORSMiddleware,
|
||||||
|
**{
|
||||||
|
"allow_origins": ["*"],
|
||||||
|
"allow_credentials": True,
|
||||||
|
"allow_methods": ["*"],
|
||||||
|
"allow_headers": ["*"],
|
||||||
|
},
|
||||||
|
)
|
||||||
|
app.add_middleware(AuthHeaderMiddleware)
|
||||||
|
|
||||||
|
app.add_exception_handler(HTTPException, ErrorHandlers.exception_handler_http)
|
||||||
|
app.add_exception_handler(Exception, ErrorHandlers.exception_handler_exception)
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
uvicorn_config = {
|
||||||
|
"app": "app:app",
|
||||||
|
"host": "0.0.0.0",
|
||||||
|
"port": 41575,
|
||||||
|
"log_level": "info",
|
||||||
|
"reload": True,
|
||||||
|
}
|
||||||
|
uvicorn.Server(uvicorn.Config(**uvicorn_config)).run()
|
||||||
|
|
@ -0,0 +1,64 @@
|
||||||
|
def create_app(routers):
|
||||||
|
from fastapi import FastAPI
|
||||||
|
from fastapi.responses import JSONResponse
|
||||||
|
from fastapi.openapi.utils import get_openapi
|
||||||
|
from fastapi.responses import RedirectResponse
|
||||||
|
|
||||||
|
from api_configs import Config
|
||||||
|
|
||||||
|
api_app = FastAPI(title=str(Config.TITLE), default_response_class=JSONResponse)
|
||||||
|
|
||||||
|
@api_app.get("/", include_in_schema=False, summary=str(Config.DESCRIPTION))
|
||||||
|
async def home():
|
||||||
|
return RedirectResponse(url="/docs")
|
||||||
|
|
||||||
|
for router in list(
|
||||||
|
[
|
||||||
|
getattr(routers, router)
|
||||||
|
for router in routers.__all__
|
||||||
|
if getattr(routers, router)
|
||||||
|
]
|
||||||
|
):
|
||||||
|
api_app.include_router(router)
|
||||||
|
|
||||||
|
openapi_schema = get_openapi(
|
||||||
|
title=Config.TITLE,
|
||||||
|
description=Config.DESCRIPTION,
|
||||||
|
version="0.0.1",
|
||||||
|
routes=api_app.routes,
|
||||||
|
)
|
||||||
|
|
||||||
|
if "components" in openapi_schema:
|
||||||
|
openapi_schema["components"]["securitySchemes"] = {
|
||||||
|
"Bearer Auth": {
|
||||||
|
"type": "apiKey",
|
||||||
|
"in": "header",
|
||||||
|
"name": "evyos-session-key",
|
||||||
|
"description": "Enter: **'Bearer <JWT>'**, where JWT is the access token",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for route in api_app.routes:
|
||||||
|
path = str(getattr(route, "path"))
|
||||||
|
if route.include_in_schema:
|
||||||
|
methods = [method.lower() for method in getattr(route, "methods")]
|
||||||
|
for method in methods:
|
||||||
|
insecure_paths = Config.INSECURE_PATHS.copy()
|
||||||
|
insecure_paths.remove("/authentication/select")
|
||||||
|
if path not in insecure_paths:
|
||||||
|
openapi_schema["paths"][path][method]["security"] = [
|
||||||
|
{"Bearer Auth": []}
|
||||||
|
]
|
||||||
|
openapi_schema["paths"][path][method]["responses"]["403"] = {
|
||||||
|
"content": {
|
||||||
|
"application/json": {
|
||||||
|
"schema": {
|
||||||
|
"$ref": "#/components/schemas/HTTPValidationError"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"description": "Returned if user is unauthorized.",
|
||||||
|
}
|
||||||
|
|
||||||
|
api_app.openapi_schema = openapi_schema
|
||||||
|
return api_app
|
||||||
|
|
@ -0,0 +1 @@
|
||||||
|
from .token_middleware import AuthHeaderMiddleware
|
||||||
|
|
@ -0,0 +1,116 @@
|
||||||
|
import json
|
||||||
|
|
||||||
|
from time import perf_counter
|
||||||
|
from api_configs import Config
|
||||||
|
from fastapi import status
|
||||||
|
from fastapi.exceptions import HTTPException
|
||||||
|
from starlette.middleware.base import BaseHTTPMiddleware
|
||||||
|
|
||||||
|
|
||||||
|
class MiddlewareLogs:
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def log_error(self, log_message):
|
||||||
|
print(log_message)
|
||||||
|
|
||||||
|
|
||||||
|
def log_middlewares_exception(endpoint, token_user, message, request):
|
||||||
|
MiddlewareLogs.log_error(
|
||||||
|
str(
|
||||||
|
{
|
||||||
|
"log_type": "Authentication",
|
||||||
|
"log_message": message,
|
||||||
|
"log_action": "User",
|
||||||
|
"log_data": json.dumps(
|
||||||
|
{
|
||||||
|
"endpoint": endpoint,
|
||||||
|
"user": str(token_user),
|
||||||
|
"request": str(request.headers),
|
||||||
|
}
|
||||||
|
),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class AuthHeaderMiddleware(BaseHTTPMiddleware):
|
||||||
|
|
||||||
|
async def dispatch(self, request, call_next):
|
||||||
|
start_time, token_user, endpoint = perf_counter(), None, None
|
||||||
|
|
||||||
|
if check_if_path_secure(request=request, insecure_paths=Config.INSECURE_PATHS):
|
||||||
|
endpoint = str(getattr(getattr(request, "url", None), "path", None))
|
||||||
|
if un_auth := check_if_token_is_not_valid(
|
||||||
|
request=request, endpoint_name=endpoint
|
||||||
|
):
|
||||||
|
auth, token_user = un_auth
|
||||||
|
if not auth == "valid":
|
||||||
|
# log_middlewares_exception(endpoint, token_user, "auth", request)
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_401_UNAUTHORIZED, detail=auth
|
||||||
|
)
|
||||||
|
|
||||||
|
response = await call_next(request)
|
||||||
|
prepare_response_needs(response, start_time)
|
||||||
|
# if endpoint and token_user:
|
||||||
|
# log_middlewares_exception(endpoint, token_user, "Request is completed", request)
|
||||||
|
return response
|
||||||
|
|
||||||
|
|
||||||
|
def prepare_response_needs(response, start_time):
|
||||||
|
end_time = perf_counter()
|
||||||
|
response.headers["request-starts"], response.headers["request-ends"] = str(
|
||||||
|
start_time
|
||||||
|
), str(end_time)
|
||||||
|
response.headers["elapsed-Time"] = str(float(end_time) - float(start_time)) + " ms"
|
||||||
|
|
||||||
|
|
||||||
|
def check_if_path_secure(request, insecure_paths) -> bool:
|
||||||
|
return (
|
||||||
|
str(getattr(getattr(request, "url", None), "path", None)) not in insecure_paths
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def check_if_token_is_not_valid(request, endpoint_name):
|
||||||
|
from api_services.redis.functions import get_object_via_access_key
|
||||||
|
|
||||||
|
token_user = get_object_via_access_key(request)
|
||||||
|
if not token_user:
|
||||||
|
return "Session geçerli değil. Lütfen tekrar giriş yapınız.", token_user
|
||||||
|
|
||||||
|
return "valid", token_user
|
||||||
|
|
||||||
|
# on_token_user: Users = Users.find_one(uu_id=token_user["uu_id"])
|
||||||
|
# on_token_people: People = on_token_user.person
|
||||||
|
# #
|
||||||
|
# # if on_token_people.priority == 78:
|
||||||
|
# # return "valid", token_user
|
||||||
|
#
|
||||||
|
# if not token_user.get("duty_id", None):
|
||||||
|
# return (
|
||||||
|
# "Kullanıcı hiçbir yetki tanımlanmamıştır. Supervisor ile iletişime geçiniz.",
|
||||||
|
# token_user,
|
||||||
|
# )
|
||||||
|
|
||||||
|
#
|
||||||
|
# if endpoint_name in release_endpoint:
|
||||||
|
# return "valid", token_user
|
||||||
|
#
|
||||||
|
# if company_duty_app := CompanyDutyApp.find_one(
|
||||||
|
# endpoint_name=str("".join(endpoint_name.split("/")[:-1])),
|
||||||
|
# company_duty_id=int(token_user.get("duty_id")),
|
||||||
|
# ):
|
||||||
|
# if not company_duty_app.is_access_valid(
|
||||||
|
# endpoint_ext=endpoint_name.split("/")[-1]
|
||||||
|
# ):
|
||||||
|
# return (
|
||||||
|
# "Kullanıcı yetkili değildir. Supervisor ile iletişime geçiniz.",
|
||||||
|
# token_user,
|
||||||
|
# )
|
||||||
|
# else:
|
||||||
|
# return (
|
||||||
|
# "Kullanıcıya yetki tanımlanmamıştır. Supervisor ile iletişime geçiniz.",
|
||||||
|
# token_user,
|
||||||
|
# )
|
||||||
|
|
||||||
|
# return "valid", token_user
|
||||||
|
|
@ -0,0 +1,25 @@
|
||||||
|
[project]
|
||||||
|
name = "wag-managment-api-service-version-3"
|
||||||
|
version = "0.1.0"
|
||||||
|
description = "Wag Python API Service"
|
||||||
|
readme = "README.md"
|
||||||
|
requires-python = ">=3.12"
|
||||||
|
dependencies = [
|
||||||
|
"alembic>=1.14.0",
|
||||||
|
"arrow>=1.3.0",
|
||||||
|
"cryptography>=43.0.3",
|
||||||
|
"faker>=30.8.2",
|
||||||
|
"fastapi>=0.115.4",
|
||||||
|
"pandas>=2.2.3",
|
||||||
|
"prometheus-fastapi-instrumentator>=7.0.0",
|
||||||
|
"psycopg2-binary>=2.9.10",
|
||||||
|
"pymongo>=4.10.1",
|
||||||
|
"redis>=5.2.0",
|
||||||
|
"redmail>=0.6.0",
|
||||||
|
"requests>=2.32.3",
|
||||||
|
"rsa>=4.9",
|
||||||
|
"sqlalchemy-mixins>=2.0.5",
|
||||||
|
"textdistance>=4.6.3",
|
||||||
|
"unidecode>=1.3.8",
|
||||||
|
"uvicorn>=0.32.0",
|
||||||
|
]
|
||||||
|
|
@ -0,0 +1,29 @@
|
||||||
|
services:
|
||||||
|
|
||||||
|
wag_management_auth_service:
|
||||||
|
container_name: wag_management_auth_service
|
||||||
|
restart: on-failure
|
||||||
|
build:
|
||||||
|
context: .
|
||||||
|
dockerfile: AuthService/Dockerfile
|
||||||
|
ports:
|
||||||
|
- "11:41575"
|
||||||
|
|
||||||
|
wag_management_event_service:
|
||||||
|
container_name: wag_management_event_service
|
||||||
|
restart: on-failure
|
||||||
|
build:
|
||||||
|
context: .
|
||||||
|
dockerfile: EventService/Dockerfile
|
||||||
|
ports:
|
||||||
|
- "12:41575"
|
||||||
|
|
||||||
|
wag_management_validation_service:
|
||||||
|
container_name: wag_management_validation_service
|
||||||
|
restart: on-failure
|
||||||
|
build:
|
||||||
|
context: .
|
||||||
|
dockerfile: ValidationService/Dockerfile
|
||||||
|
ports:
|
||||||
|
- "13:41575"
|
||||||
|
|
||||||
|
|
@ -0,0 +1,29 @@
|
||||||
|
services:
|
||||||
|
|
||||||
|
wag_management_auth_service:
|
||||||
|
container_name: wag_management_auth_service
|
||||||
|
restart: on-failure
|
||||||
|
build:
|
||||||
|
context: .
|
||||||
|
dockerfile: AuthService/Dockerfile
|
||||||
|
ports:
|
||||||
|
- "11:41575"
|
||||||
|
|
||||||
|
wag_management_event_service:
|
||||||
|
container_name: wag_management_event_service
|
||||||
|
restart: on-failure
|
||||||
|
build:
|
||||||
|
context: .
|
||||||
|
dockerfile: EventService/Dockerfile
|
||||||
|
ports:
|
||||||
|
- "12:41575"
|
||||||
|
|
||||||
|
wag_management_validation_service:
|
||||||
|
container_name: wag_management_validation_service
|
||||||
|
restart: on-failure
|
||||||
|
build:
|
||||||
|
context: .
|
||||||
|
dockerfile: ValidationService/Dockerfile
|
||||||
|
ports:
|
||||||
|
- "13:41575"
|
||||||
|
|
||||||
11
README.md
11
README.md
|
|
@ -1,3 +1,6 @@
|
||||||
|
|
||||||
|
Add: uv pip install -r pyproject.toml
|
||||||
|
|
||||||
- Run Store Services
|
- Run Store Services
|
||||||
|
|
||||||
On debian
|
On debian
|
||||||
|
|
@ -15,3 +18,11 @@ http://localhost:11222
|
||||||
postgres_main_commercial:
|
postgres_main_commercial:
|
||||||
http://localhost:5444
|
http://localhost:5444
|
||||||
|
|
||||||
|
make sure
|
||||||
|
set lang and timezone on login
|
||||||
|
BaseMixin || CrudMixin add
|
||||||
|
http_exception = fastapi.HTTPException
|
||||||
|
status = fastapi.status
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,93 @@
|
||||||
|
# Git
|
||||||
|
.git
|
||||||
|
.gitignore
|
||||||
|
.gitattributes
|
||||||
|
|
||||||
|
|
||||||
|
# CI
|
||||||
|
.codeclimate.yml
|
||||||
|
.travis.yml
|
||||||
|
.taskcluster.yml
|
||||||
|
|
||||||
|
# Docker
|
||||||
|
docker-compose.yml
|
||||||
|
service_app/Dockerfile
|
||||||
|
.docker
|
||||||
|
.dockerignore
|
||||||
|
|
||||||
|
# Byte-compiled / optimized / DLL files
|
||||||
|
**/__pycache__/
|
||||||
|
**/*.py[cod]
|
||||||
|
|
||||||
|
# C extensions
|
||||||
|
*.so
|
||||||
|
|
||||||
|
# Distribution / packaging
|
||||||
|
.Python
|
||||||
|
service_app/env/
|
||||||
|
build/
|
||||||
|
develop-eggs/
|
||||||
|
dist/
|
||||||
|
downloads/
|
||||||
|
eggs/
|
||||||
|
lib/
|
||||||
|
lib64/
|
||||||
|
parts/
|
||||||
|
sdist/
|
||||||
|
var/
|
||||||
|
*.egg-info/
|
||||||
|
.installed.cfg
|
||||||
|
*.egg
|
||||||
|
|
||||||
|
# PyInstaller
|
||||||
|
# Usually these files are written by a python script from a template
|
||||||
|
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||||
|
*.manifest
|
||||||
|
*.spec
|
||||||
|
|
||||||
|
# Installer logs
|
||||||
|
pip-log.txt
|
||||||
|
pip-delete-this-directory.txt
|
||||||
|
|
||||||
|
# Unit test / coverage reports
|
||||||
|
htmlcov/
|
||||||
|
.tox/
|
||||||
|
.coverage
|
||||||
|
.cache
|
||||||
|
nosetests.xml
|
||||||
|
coverage.xml
|
||||||
|
|
||||||
|
# Translations
|
||||||
|
*.mo
|
||||||
|
*.pot
|
||||||
|
|
||||||
|
# Django stuff:
|
||||||
|
*.log
|
||||||
|
|
||||||
|
# Sphinx documentation
|
||||||
|
docs/_build/
|
||||||
|
|
||||||
|
# PyBuilder
|
||||||
|
target/
|
||||||
|
|
||||||
|
# Virtual environment
|
||||||
|
service_app/.env
|
||||||
|
.venv/
|
||||||
|
venv/
|
||||||
|
|
||||||
|
# PyCharm
|
||||||
|
.idea
|
||||||
|
|
||||||
|
# Python mode for VIM
|
||||||
|
.ropeproject
|
||||||
|
**/.ropeproject
|
||||||
|
|
||||||
|
# Vim swap files
|
||||||
|
**/*.swp
|
||||||
|
|
||||||
|
# VS Code
|
||||||
|
.vscode/
|
||||||
|
|
||||||
|
test_application/
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -0,0 +1,27 @@
|
||||||
|
storeHost = "10.10.2.36"
|
||||||
|
|
||||||
|
|
||||||
|
class WagDatabase:
|
||||||
|
SQL: str = "postgresql+psycopg2"
|
||||||
|
USERNAME: str = "berkay_wag_user"
|
||||||
|
PASSWORD: str = "berkay_wag_user_password"
|
||||||
|
HOST: str = storeHost
|
||||||
|
PORT: str = "5444"
|
||||||
|
DATABASE_NAME: str = "wag_database"
|
||||||
|
DATABASE_URL: str = f"{SQL}://{USERNAME}:{PASSWORD}@{HOST}:{PORT}/{DATABASE_NAME}"
|
||||||
|
|
||||||
|
|
||||||
|
class WagRedis:
|
||||||
|
REDIS_HOST = storeHost
|
||||||
|
REDIS_PASSWORD: str = "commercial_redis_password"
|
||||||
|
REDIS_PORT: int = 11222
|
||||||
|
REDIS_DB: int = 0
|
||||||
|
|
||||||
|
|
||||||
|
class MongoConfig:
|
||||||
|
password = "mongo_password"
|
||||||
|
username = "mongo_user"
|
||||||
|
database_name = "mongo_database"
|
||||||
|
host = storeHost
|
||||||
|
port = 11777
|
||||||
|
url = f"mongodb://{username}:{password}@{host}:{port}/{database_name}?retryWrites=true&w=majority"
|
||||||
|
|
@ -0,0 +1,8 @@
|
||||||
|
from api_library.date_time_actions.date_functions import (
|
||||||
|
client_arrow,
|
||||||
|
system_arrow,
|
||||||
|
DateTimeLocal,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = ["client_arrow", "system_arrow", "DateTimeLocal"]
|
||||||
|
|
@ -0,0 +1,49 @@
|
||||||
|
import arrow
|
||||||
|
import calendar
|
||||||
|
|
||||||
|
|
||||||
|
class DateTimeLocal:
|
||||||
|
|
||||||
|
__SYSTEM__: str = "GMT+0"
|
||||||
|
|
||||||
|
def __init__(self, timezone: str = "GMT+3", is_client: bool = True):
|
||||||
|
self.timezone = self.__SYSTEM__
|
||||||
|
if is_client:
|
||||||
|
self.timezone = timezone.replace("-", "+")
|
||||||
|
|
||||||
|
def find_last_day_of_month(self, date_value):
|
||||||
|
today = self.get(date_value).date()
|
||||||
|
_, last_day = calendar.monthrange(today.year, today.month)
|
||||||
|
return self.get(today.year, today.month, last_day, 23, 59, 59).to(self.timezone)
|
||||||
|
|
||||||
|
def find_first_day_of_month(self, date_value):
|
||||||
|
today = self.get(date_value).date()
|
||||||
|
return self.get(today.year, today.month, 1).to(self.timezone)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
return arrow.get(*args).to(str(self.timezone))
|
||||||
|
|
||||||
|
def now(self):
|
||||||
|
return arrow.now().to(str(self.timezone))
|
||||||
|
|
||||||
|
def shift(self, date, **kwargs):
|
||||||
|
return self.get(date).shift(**kwargs)
|
||||||
|
|
||||||
|
def date(self, date):
|
||||||
|
return self.get(date).date()
|
||||||
|
|
||||||
|
def time(self, date):
|
||||||
|
return self.get(date).time()
|
||||||
|
|
||||||
|
def string_date(self, date, splitter: str = "-"):
|
||||||
|
return str(self.get(date).date()).replace("-", splitter)
|
||||||
|
|
||||||
|
def string_time_only(self, date):
|
||||||
|
return self.get(date).format("HH:mm:ss")
|
||||||
|
|
||||||
|
def string_date_only(self, date):
|
||||||
|
return self.get(date).format("YYYY-MM-DD")
|
||||||
|
|
||||||
|
|
||||||
|
client_arrow = DateTimeLocal(is_client=True)
|
||||||
|
system_arrow = DateTimeLocal(is_client=False)
|
||||||
|
|
@ -0,0 +1,17 @@
|
||||||
|
from api_objects.auth.token_objects import (
|
||||||
|
OccupantTokenObject,
|
||||||
|
EmployeeTokenObject,
|
||||||
|
UserType,
|
||||||
|
CompanyToken,
|
||||||
|
OccupantToken,
|
||||||
|
ApplicationToken,
|
||||||
|
)
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"OccupantTokenObject",
|
||||||
|
"EmployeeTokenObject",
|
||||||
|
"UserType",
|
||||||
|
"CompanyToken",
|
||||||
|
"OccupantToken",
|
||||||
|
"ApplicationToken",
|
||||||
|
]
|
||||||
|
|
@ -0,0 +1,106 @@
|
||||||
|
import enum
|
||||||
|
from typing import Optional, List, Any
|
||||||
|
from pydantic import BaseModel
|
||||||
|
|
||||||
|
|
||||||
|
# Company / Priority / Department / Duty / Employee / Occupant / Module / Endpoint are changeable dynamics
|
||||||
|
|
||||||
|
|
||||||
|
class UserType(enum.Enum):
|
||||||
|
|
||||||
|
employee = 1
|
||||||
|
occupant = 2
|
||||||
|
|
||||||
|
|
||||||
|
class Credentials(BaseModel):
|
||||||
|
|
||||||
|
person_id: int
|
||||||
|
person_name: str
|
||||||
|
|
||||||
|
|
||||||
|
class ApplicationToken(BaseModel):
|
||||||
|
# Application Token Object -> is the main object for the user
|
||||||
|
|
||||||
|
domain: Optional[str] = "app.evyos.com.tr"
|
||||||
|
lang: Optional[str] = "TR"
|
||||||
|
timezone: Optional[str] = "Europe/Istanbul"
|
||||||
|
|
||||||
|
user_type: int = UserType.occupant.value
|
||||||
|
credentials: dict = None
|
||||||
|
|
||||||
|
user_uu_id: str
|
||||||
|
user_id: int
|
||||||
|
|
||||||
|
person_id: int
|
||||||
|
person_uu_id: str
|
||||||
|
|
||||||
|
request: Optional[dict] = None # Request Info of Client
|
||||||
|
|
||||||
|
|
||||||
|
class OccupantToken(BaseModel):
|
||||||
|
# Selection of the occupant type for a build part is made by the user
|
||||||
|
|
||||||
|
living_space_id: int # Internal use
|
||||||
|
living_space_uu_id: str # Outer use
|
||||||
|
|
||||||
|
occupant_type_id: int
|
||||||
|
occupant_type_uu_id: str
|
||||||
|
occupant_type: str
|
||||||
|
|
||||||
|
build_id: int
|
||||||
|
build_uuid: str
|
||||||
|
build_part_id: int
|
||||||
|
build_part_uuid: str
|
||||||
|
|
||||||
|
responsible_company_id: Optional[int] = None
|
||||||
|
responsible_company_uuid: Optional[str] = None
|
||||||
|
responsible_employee_id: Optional[int] = None
|
||||||
|
responsible_employee_uuid: Optional[str] = None
|
||||||
|
|
||||||
|
reachable_event_list_id: Optional[list] = None # ID list of reachable modules
|
||||||
|
# reachable_event_list_uu_id: Optional[list] = None # UUID list of reachable modules
|
||||||
|
|
||||||
|
|
||||||
|
class CompanyToken(BaseModel): # Required Company Object for an employee
|
||||||
|
|
||||||
|
company_id: int
|
||||||
|
company_uu_id: str
|
||||||
|
|
||||||
|
department_id: int # ID list of departments
|
||||||
|
department_uu_id: str # ID list of departments
|
||||||
|
|
||||||
|
duty_id: int
|
||||||
|
duty_uu_id: str
|
||||||
|
|
||||||
|
staff_id: int
|
||||||
|
staff_uu_id: str
|
||||||
|
|
||||||
|
employee_id: int
|
||||||
|
employee_uu_id: str
|
||||||
|
|
||||||
|
bulk_duties_id: int
|
||||||
|
|
||||||
|
reachable_event_list_id: Optional[list] = None # ID list of reachable modules
|
||||||
|
# reachable_event_list_uu_id: Optional[list] = None # UUID list of reachable modules
|
||||||
|
|
||||||
|
|
||||||
|
class OccupantTokenObject(ApplicationToken):
|
||||||
|
# Occupant Token Object -> Requires selection of the occupant type for a specific build part
|
||||||
|
|
||||||
|
available_occupants: dict = None
|
||||||
|
|
||||||
|
selected_occupant: Optional[OccupantToken] = None # Selected Occupant Type
|
||||||
|
available_event: Optional[Any] = None
|
||||||
|
|
||||||
|
|
||||||
|
class EmployeeTokenObject(ApplicationToken):
|
||||||
|
# Full hierarchy Employee[staff_id] -> Staff -> Duty -> Department -> Company
|
||||||
|
|
||||||
|
companies_id_list: List[int] # List of company objects
|
||||||
|
companies_uu_id_list: List[str] # List of company objects
|
||||||
|
|
||||||
|
duty_id_list: List[int] # List of duty objects
|
||||||
|
duty_uu_id_list: List[str] # List of duty objects
|
||||||
|
|
||||||
|
selected_company: Optional[CompanyToken] = None # Selected Company Object
|
||||||
|
available_event: Optional[Any] = None
|
||||||
|
|
@ -0,0 +1,85 @@
|
||||||
|
from .errors_dictionary import ErrorMessages
|
||||||
|
|
||||||
|
|
||||||
|
class AlchemyError:
|
||||||
|
ERRORS_DICT = {
|
||||||
|
"100": "HTTP_100_CONTINUE",
|
||||||
|
"101": "HTTP_101_SWITCHING_PROTOCOLS",
|
||||||
|
"102": "HTTP_102_PROCESSING",
|
||||||
|
"103": "HTTP_103_EARLY_HINTS",
|
||||||
|
"200": "HTTP_200_OK",
|
||||||
|
"201": "HTTP_201_CREATED",
|
||||||
|
"202": "HTTP_202_ACCEPTED",
|
||||||
|
"203": "HTTP_203_NON_AUTHORITATIVE_INFORMATION",
|
||||||
|
"204": "HTTP_204_NO_CONTENT",
|
||||||
|
"205": "HTTP_205_RESET_CONTENT",
|
||||||
|
"206": "HTTP_206_PARTIAL_CONTENT",
|
||||||
|
"207": "HTTP_207_MULTI_STATUS",
|
||||||
|
"208": "HTTP_208_ALREADY_REPORTED",
|
||||||
|
"226": "HTTP_226_IM_USED",
|
||||||
|
"300": "HTTP_300_MULTIPLE_CHOICES",
|
||||||
|
"301": "HTTP_301_MOVED_PERMANENTLY",
|
||||||
|
"302": "HTTP_302_FOUND",
|
||||||
|
"303": "HTTP_303_SEE_OTHER",
|
||||||
|
"304": "HTTP_304_NOT_MODIFIED",
|
||||||
|
"305": "HTTP_305_USE_PROXY",
|
||||||
|
"306": "HTTP_306_RESERVED",
|
||||||
|
"307": "HTTP_307_TEMPORARY_REDIRECT",
|
||||||
|
"308": "HTTP_308_PERMANENT_REDIRECT",
|
||||||
|
"400": "HTTP_400_BAD_REQUEST",
|
||||||
|
"401": "HTTP_401_UNAUTHORIZED",
|
||||||
|
"402": "HTTP_402_PAYMENT_REQUIRED",
|
||||||
|
"403": "HTTP_403_FORBIDDEN",
|
||||||
|
"404": "HTTP_404_NOT_FOUND",
|
||||||
|
"405": "HTTP_405_METHOD_NOT_ALLOWED",
|
||||||
|
"406": "HTTP_406_NOT_ACCEPTABLE",
|
||||||
|
"407": "HTTP_407_PROXY_AUTHENTICATION_REQUIRED",
|
||||||
|
"408": "HTTP_408_REQUEST_TIMEOUT",
|
||||||
|
"409": "HTTP_409_CONFLICT",
|
||||||
|
"410": "HTTP_410_GONE",
|
||||||
|
"411": "HTTP_411_LENGTH_REQUIRED",
|
||||||
|
"412": "HTTP_412_PRECONDITION_FAILED",
|
||||||
|
"413": "HTTP_413_REQUEST_ENTITY_TOO_LARGE",
|
||||||
|
"414": "HTTP_414_REQUEST_URI_TOO_LONG",
|
||||||
|
"415": "HTTP_415_UNSUPPORTED_MEDIA_TYPE",
|
||||||
|
"416": "HTTP_416_REQUESTED_RANGE_NOT_SATISFIABLE",
|
||||||
|
"417": "HTTP_417_EXPECTATION_FAILED",
|
||||||
|
"418": "HTTP_418_IM_A_TEAPOT",
|
||||||
|
"421": "HTTP_421_MISDIRECTED_REQUEST",
|
||||||
|
"422": "HTTP_422_UNPROCESSABLE_ENTITY",
|
||||||
|
"423": "HTTP_423_LOCKED",
|
||||||
|
"424": "HTTP_424_FAILED_DEPENDENCY",
|
||||||
|
"426": "HTTP_426_UPGRADE_REQUIRED",
|
||||||
|
"428": "HTTP_428_PRECONDITION_REQUIRED",
|
||||||
|
"429": "HTTP_429_TOO_MANY_REQUESTS",
|
||||||
|
"431": "HTTP_431_REQUEST_HEADER_FIELDS_TOO_LARGE",
|
||||||
|
"451": "HTTP_451_UNAVAILABLE_FOR_LEGAL_REASONS",
|
||||||
|
"500": "HTTP_500_INTERNAL_SERVER_ERROR",
|
||||||
|
}
|
||||||
|
ERRORS_KEYS = {
|
||||||
|
"delete": "DeletedRecord",
|
||||||
|
"update": "UpdatedRecord",
|
||||||
|
"create": "CreatedRecord",
|
||||||
|
"list": "ListedRecords",
|
||||||
|
"not_found": "RecordNotFound",
|
||||||
|
"already_exist": "AlreadyExists",
|
||||||
|
"not_deleted": "RecordNotDeleted",
|
||||||
|
"not_updated": "RecordNotUpdated",
|
||||||
|
"not_created": "RecordNotCreated",
|
||||||
|
"not_listed": "RecordsNotListed",
|
||||||
|
"not_confirm": "IsNotConfirmed",
|
||||||
|
}
|
||||||
|
|
||||||
|
def __init__(self, lang):
|
||||||
|
self.lang = lang
|
||||||
|
|
||||||
|
def retrieve_error_needs(self, data, status_code, error_case, message_key):
|
||||||
|
return dict(
|
||||||
|
status_code=self.ERRORS_DICT[status_code],
|
||||||
|
error_case=self.ERRORS_KEYS[error_case],
|
||||||
|
data=data,
|
||||||
|
message=ErrorMessages.get_message(message_key, self.lang),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
alchemy_error = AlchemyError(lang="")
|
||||||
|
|
@ -0,0 +1,44 @@
|
||||||
|
from json import loads
|
||||||
|
|
||||||
|
|
||||||
|
class ErrorMessages:
|
||||||
|
__messages__ = {}
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get_message(cls, message_key, lang):
|
||||||
|
return cls.__messages__[lang][message_key]
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
class ErrorHandlers:
|
||||||
|
def __init__(self, requests, exceptions, response_model, status):
|
||||||
|
self.requests = requests # from fastapi.requests import Request
|
||||||
|
self.exceptions = exceptions # from fastapi.exceptions import HTTPException
|
||||||
|
self.response_model = response_model # from fastapi.responses import JSONResponse
|
||||||
|
self.status = status # from fastapi import status
|
||||||
|
|
||||||
|
def exception_handler_http(self, request, exc):
|
||||||
|
exc_detail = getattr(exc, "detail", None)
|
||||||
|
try:
|
||||||
|
detail = loads(str(exc_detail))
|
||||||
|
return self.response_model(
|
||||||
|
status_code=exc.status_code,
|
||||||
|
content={
|
||||||
|
"Data": detail.get("data", {}),
|
||||||
|
"Error": detail.get("error_case", "UNKNOWN"),
|
||||||
|
"Message": detail.get(
|
||||||
|
"message", "An error occurred while processing the request"
|
||||||
|
),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
return self.response_model(
|
||||||
|
status_code=exc.status_code,
|
||||||
|
content={"Error": str(exc_detail), "Message": f"{str(e)}", "Data": {}},
|
||||||
|
)
|
||||||
|
|
||||||
|
def exception_handler_exception(self, request, exc):
|
||||||
|
return self.response_model(
|
||||||
|
status_code=self.status.HTTP_417_EXPECTATION_FAILED,
|
||||||
|
content={"message": exc.__str__()},
|
||||||
|
)
|
||||||
|
|
@ -0,0 +1,763 @@
|
||||||
|
from databases.sql_models.core_mixin import CrudCollection
|
||||||
|
|
||||||
|
from sqlalchemy.orm import mapped_column, Mapped
|
||||||
|
from sqlalchemy import (
|
||||||
|
String,
|
||||||
|
Integer,
|
||||||
|
ForeignKey,
|
||||||
|
Index,
|
||||||
|
SmallInteger,
|
||||||
|
Boolean,
|
||||||
|
TIMESTAMP,
|
||||||
|
Numeric,
|
||||||
|
UUID,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class AccountBooks(CrudCollection):
|
||||||
|
|
||||||
|
__tablename__ = "account_books"
|
||||||
|
__exclude__fields__ = []
|
||||||
|
|
||||||
|
country: Mapped[str] = mapped_column(String, nullable=False)
|
||||||
|
branch_type: Mapped[str] = mapped_column(SmallInteger, server_default="0")
|
||||||
|
|
||||||
|
company_id: Mapped[int] = mapped_column(ForeignKey("companies.id"), nullable=False)
|
||||||
|
company_uu_id: Mapped[str] = mapped_column(String, nullable=False)
|
||||||
|
branch_id: Mapped[int] = mapped_column(ForeignKey("companies.id"))
|
||||||
|
branch_uu_id: Mapped[str] = mapped_column(String, comment="Branch UU ID")
|
||||||
|
|
||||||
|
# company: Mapped["Companies"] = relationship(
|
||||||
|
# "Company", back_populates="company_account_books", foreign_keys=[company_id]
|
||||||
|
# )
|
||||||
|
# branch: Mapped["Companies"] = relationship(
|
||||||
|
# "Company", back_populates="branch_account_books", foreign_keys=[branch_id]
|
||||||
|
# )
|
||||||
|
# account_master: Mapped[List["AccountMaster"]] = relationship(
|
||||||
|
# "AccountMaster",
|
||||||
|
# back_populates="account_header",
|
||||||
|
# foreign_keys="AccountMaster.account_header_id",
|
||||||
|
# )
|
||||||
|
# account_detail: Mapped[List["AccountDetail"]] = relationship(
|
||||||
|
# "AccountDetail",
|
||||||
|
# back_populates="account_header",
|
||||||
|
# foreign_keys="AccountDetail.account_header_id",
|
||||||
|
# )
|
||||||
|
|
||||||
|
__table_args__ = (
|
||||||
|
Index("account_companies_book_ndx_00", company_id, "expiry_starts"),
|
||||||
|
{"comment": "Account Book Information"},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class AccountCodes(CrudCollection):
|
||||||
|
|
||||||
|
__tablename__ = "account_codes"
|
||||||
|
__exclude__fields__ = []
|
||||||
|
|
||||||
|
account_code: Mapped[str] = mapped_column(
|
||||||
|
String(48), nullable=False, comment="Account Code"
|
||||||
|
)
|
||||||
|
comment_line: Mapped[str] = mapped_column(
|
||||||
|
String(128), nullable=False, comment="Comment Line"
|
||||||
|
)
|
||||||
|
|
||||||
|
is_receive_or_debit: Mapped[bool] = mapped_column(Boolean)
|
||||||
|
product_id: Mapped[int] = mapped_column(Integer, server_default="0")
|
||||||
|
nvi_id: Mapped[str] = mapped_column(String(48), server_default="")
|
||||||
|
status_id: Mapped[int] = mapped_column(SmallInteger, server_default="0")
|
||||||
|
account_code_seperator: Mapped[str] = mapped_column(String(1), server_default=".")
|
||||||
|
|
||||||
|
system_id: Mapped[int] = mapped_column(SmallInteger, server_default="0")
|
||||||
|
locked: Mapped[bool] = mapped_column(SmallInteger, server_default="0")
|
||||||
|
|
||||||
|
company_id: Mapped[int] = mapped_column(ForeignKey("companies.id"))
|
||||||
|
company_uu_id: Mapped[str] = mapped_column(
|
||||||
|
String, nullable=False, comment="Company UU ID"
|
||||||
|
)
|
||||||
|
customer_id: Mapped[int] = mapped_column(ForeignKey("companies.id"))
|
||||||
|
customer_uu_id: Mapped[str] = mapped_column(
|
||||||
|
String, nullable=False, comment="Customer UU ID"
|
||||||
|
)
|
||||||
|
person_id: Mapped[int] = mapped_column(ForeignKey("people.id"))
|
||||||
|
person_uu_id: Mapped[str] = mapped_column(
|
||||||
|
String, nullable=False, comment="Person UU ID"
|
||||||
|
)
|
||||||
|
|
||||||
|
# company: Mapped["Companies"] = relationship(
|
||||||
|
# "Company", back_populates="account_codes", foreign_keys=[company_id]
|
||||||
|
# )
|
||||||
|
# customer: Mapped["Companies"] = relationship(
|
||||||
|
# "Company", back_populates="account_codes", foreign_keys=[customer_id]
|
||||||
|
# )
|
||||||
|
# person: Mapped["People"] = relationship(
|
||||||
|
# "People", back_populates="account_codes", foreign_keys=[person_id]
|
||||||
|
# )
|
||||||
|
# account_detail: Mapped[List["AccountDetail"]] = relationship(
|
||||||
|
# "AccountDetail",
|
||||||
|
# back_populates="account_code",
|
||||||
|
# foreign_keys="AccountDetail.account_code_id",
|
||||||
|
# )
|
||||||
|
#
|
||||||
|
# account_code_parser: Mapped["AccountCodeParser"] = relationship(
|
||||||
|
# "AccountCodeParser",
|
||||||
|
# back_populates="account_codes",
|
||||||
|
# foreign_keys="AccountCodeParser.account_code_id",
|
||||||
|
# )
|
||||||
|
|
||||||
|
|
||||||
|
class AccountCodeParser(CrudCollection):
|
||||||
|
|
||||||
|
__tablename__ = "account_code_parser"
|
||||||
|
__exclude__fields__ = []
|
||||||
|
|
||||||
|
account_code_1: Mapped[str] = mapped_column(String, nullable=False, comment="Order")
|
||||||
|
account_code_2: Mapped[str] = mapped_column(String, nullable=False, comment="Order")
|
||||||
|
account_code_3: Mapped[str] = mapped_column(String, nullable=False, comment="Order")
|
||||||
|
account_code_4: Mapped[str] = mapped_column(String, server_default="")
|
||||||
|
account_code_5: Mapped[str] = mapped_column(String, server_default="")
|
||||||
|
account_code_6: Mapped[str] = mapped_column(String, server_default="")
|
||||||
|
|
||||||
|
account_code_id: Mapped[int] = mapped_column(
|
||||||
|
ForeignKey("account_codes.id"), nullable=False
|
||||||
|
)
|
||||||
|
account_code_uu_id: Mapped[str] = mapped_column(
|
||||||
|
String, nullable=False, comment="Account Code UU ID"
|
||||||
|
)
|
||||||
|
|
||||||
|
# account_codes: Mapped["AccountCodes"] = relationship(
|
||||||
|
# "AccountCodes",
|
||||||
|
# back_populates="account_code_parser",
|
||||||
|
# foreign_keys=[account_code_id],
|
||||||
|
# )
|
||||||
|
|
||||||
|
__table_args__ = (
|
||||||
|
Index("_account_code_parser_ndx_00", account_code_id),
|
||||||
|
{"comment": "Account Code Parser Information"},
|
||||||
|
)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def get_account_code(self):
|
||||||
|
return f"{self.account_codes.account_code_seperator}".join(
|
||||||
|
[
|
||||||
|
getattr(self, f"account_code_{i}")
|
||||||
|
for i in range(1, 7)
|
||||||
|
if getattr(self, f"account_code_{i}")
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class AccountMaster(CrudCollection):
|
||||||
|
"""
|
||||||
|
AccountCodes class based on declarative_base and CrudCollection via session
|
||||||
|
"""
|
||||||
|
|
||||||
|
__tablename__ = "account_master"
|
||||||
|
__exclude__fields__ = []
|
||||||
|
|
||||||
|
doc_date: Mapped[TIMESTAMP] = mapped_column(
|
||||||
|
TIMESTAMP(timezone=True), nullable=False, comment="Document Date"
|
||||||
|
)
|
||||||
|
plug_type: Mapped[str] = mapped_column(String, nullable=False, comment="Plug Type")
|
||||||
|
plug_number: Mapped[int] = mapped_column(
|
||||||
|
Integer, nullable=False, comment="Plug Number"
|
||||||
|
)
|
||||||
|
|
||||||
|
special_code: Mapped[str] = mapped_column(String(12), server_default="")
|
||||||
|
authorization_code: Mapped[str] = mapped_column(String(12), server_default="")
|
||||||
|
|
||||||
|
doc_code: Mapped[str] = mapped_column(String(12), server_default="")
|
||||||
|
doc_type: Mapped[int] = mapped_column(SmallInteger, server_default="0")
|
||||||
|
|
||||||
|
comment_line1: Mapped[str] = mapped_column(String, server_default="")
|
||||||
|
comment_line2: Mapped[str] = mapped_column(String, server_default="")
|
||||||
|
comment_line3: Mapped[str] = mapped_column(String, server_default="")
|
||||||
|
comment_line4: Mapped[str] = mapped_column(String, server_default="")
|
||||||
|
comment_line5: Mapped[str] = mapped_column(String, server_default="")
|
||||||
|
comment_line6: Mapped[str] = mapped_column(String, server_default="")
|
||||||
|
project_code: Mapped[str] = mapped_column(String(12), server_default="")
|
||||||
|
module_no: Mapped[str] = mapped_column(String, server_default="")
|
||||||
|
journal_no: Mapped[int] = mapped_column(Integer, server_default="0")
|
||||||
|
|
||||||
|
status_id: Mapped[int] = mapped_column(SmallInteger, server_default="0")
|
||||||
|
canceled: Mapped[bool] = mapped_column(Boolean, server_default="0")
|
||||||
|
print_count: Mapped[int] = mapped_column(SmallInteger, server_default="0")
|
||||||
|
total_active: Mapped[float] = mapped_column(Numeric(20, 6), server_default="0")
|
||||||
|
total_passive: Mapped[float] = mapped_column(Numeric(20, 6), server_default="0")
|
||||||
|
total_active_1: Mapped[float] = mapped_column(Numeric(20, 6), server_default="0")
|
||||||
|
total_passive_1: Mapped[float] = mapped_column(Numeric(20, 6), server_default="0")
|
||||||
|
total_active_2: Mapped[float] = mapped_column(Numeric(20, 6), server_default="0")
|
||||||
|
total_passive_2: Mapped[float] = mapped_column(Numeric(20, 6), server_default="0")
|
||||||
|
total_active_3: Mapped[float] = mapped_column(Numeric(20, 6), server_default="0")
|
||||||
|
total_passive_3: Mapped[float] = mapped_column(Numeric(20, 6), server_default="0")
|
||||||
|
total_active_4: Mapped[float] = mapped_column(Numeric(20, 6), server_default="0")
|
||||||
|
total_passive_4: Mapped[float] = mapped_column(Numeric(20, 6), server_default="0")
|
||||||
|
cross_ref: Mapped[int] = mapped_column(Integer, server_default="0")
|
||||||
|
data_center_id: Mapped[str] = mapped_column(String, server_default="")
|
||||||
|
data_center_rec_num: Mapped[int] = mapped_column(Integer, server_default="0")
|
||||||
|
|
||||||
|
account_header_id: Mapped[int] = mapped_column(
|
||||||
|
ForeignKey("account_books.id"), nullable=False
|
||||||
|
)
|
||||||
|
account_header_uu_id: Mapped[str] = mapped_column(
|
||||||
|
String, nullable=False, comment="Account Header UU ID"
|
||||||
|
)
|
||||||
|
project_item_id: Mapped[int] = mapped_column(
|
||||||
|
ForeignKey("build_decision_book_projects.id")
|
||||||
|
)
|
||||||
|
project_item_uu_id: Mapped[str] = mapped_column(
|
||||||
|
String, comment="Project Item UU ID"
|
||||||
|
)
|
||||||
|
department_id: Mapped[int] = mapped_column(ForeignKey("departments.id"))
|
||||||
|
department_uu_id: Mapped[str] = mapped_column(String, comment="Department UU ID")
|
||||||
|
|
||||||
|
# account_header: Mapped["AccountBooks"] = relationship(
|
||||||
|
# "AccountBooks",
|
||||||
|
# back_populates="account_master",
|
||||||
|
# foreign_keys=[account_header_id],
|
||||||
|
# )
|
||||||
|
# project_item: Mapped["BuildDecisionBookProjects"] = relationship(
|
||||||
|
# "BuildDecisionBookProjects",
|
||||||
|
# back_populates="account_master",
|
||||||
|
# foreign_keys=[project_item_id],
|
||||||
|
# )
|
||||||
|
# account_detail: Mapped[List["AccountDetail"]] = relationship(
|
||||||
|
# "AccountDetail",
|
||||||
|
# back_populates="account_master",
|
||||||
|
# foreign_keys="AccountDetail.account_master_id",
|
||||||
|
# )
|
||||||
|
|
||||||
|
__table_args__ = (
|
||||||
|
Index("_account_master_ndx_00", doc_date, account_header_id),
|
||||||
|
{"comment": "Account Master Information"},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class AccountDetail(CrudCollection):
|
||||||
|
"""
|
||||||
|
AccountCodes class based on declarative_base and CrudCollection via session
|
||||||
|
"""
|
||||||
|
|
||||||
|
__tablename__ = "account_detail"
|
||||||
|
__exclude__fields__ = []
|
||||||
|
__enum_list__ = [("plug_type", "AccountingReceiptTypes", "M")]
|
||||||
|
|
||||||
|
doc_date: Mapped[TIMESTAMP] = mapped_column(
|
||||||
|
TIMESTAMP(timezone=True), nullable=False, comment="Document Date"
|
||||||
|
)
|
||||||
|
line_no: Mapped[int] = mapped_column(
|
||||||
|
SmallInteger, nullable=False, comment="Line Number"
|
||||||
|
)
|
||||||
|
receive_debit: Mapped[str] = mapped_column(
|
||||||
|
String(1), nullable=False, comment="Receive Debit"
|
||||||
|
)
|
||||||
|
debit: Mapped[float] = mapped_column(
|
||||||
|
Numeric(20, 6), nullable=False, comment="Debit"
|
||||||
|
)
|
||||||
|
|
||||||
|
department: Mapped[str] = mapped_column(String(24), server_default="")
|
||||||
|
special_code: Mapped[str] = mapped_column(String(12), server_default="")
|
||||||
|
account_ref: Mapped[int] = mapped_column(Integer, server_default="0")
|
||||||
|
account_fiche_ref: Mapped[int] = mapped_column(Integer, server_default="0")
|
||||||
|
center_ref: Mapped[int] = mapped_column(Integer, server_default="0")
|
||||||
|
general_code: Mapped[str] = mapped_column(String(32), server_default="")
|
||||||
|
credit: Mapped[float] = mapped_column(Numeric(20, 6), server_default="0")
|
||||||
|
currency_type: Mapped[str] = mapped_column(String(4), server_default="TL")
|
||||||
|
exchange_rate: Mapped[float] = mapped_column(Numeric(20, 6), server_default="0")
|
||||||
|
debit_cur: Mapped[float] = mapped_column(Numeric(20, 6), server_default="0")
|
||||||
|
credit_cur: Mapped[float] = mapped_column(Numeric(20, 6), server_default="0")
|
||||||
|
discount_cur: Mapped[float] = mapped_column(Numeric(20, 6), server_default="0")
|
||||||
|
amount: Mapped[float] = mapped_column(Numeric(20, 6), server_default="0")
|
||||||
|
cross_account_code: Mapped[float] = mapped_column(String(32), server_default="")
|
||||||
|
inf_index: Mapped[float] = mapped_column(Numeric(20, 6), server_default="0")
|
||||||
|
not_inflated: Mapped[int] = mapped_column(SmallInteger, server_default="0")
|
||||||
|
not_calculated: Mapped[int] = mapped_column(SmallInteger, server_default="0")
|
||||||
|
comment_line1: Mapped[str] = mapped_column(String(64), server_default="")
|
||||||
|
comment_line2: Mapped[str] = mapped_column(String(64), server_default="")
|
||||||
|
comment_line3: Mapped[str] = mapped_column(String(64), server_default="")
|
||||||
|
comment_line4: Mapped[str] = mapped_column(String(64), server_default="")
|
||||||
|
comment_line5: Mapped[str] = mapped_column(String(64), server_default="")
|
||||||
|
comment_line6: Mapped[str] = mapped_column(String(64), server_default="")
|
||||||
|
owner_acc_ref: Mapped[int] = mapped_column(Integer, server_default="0")
|
||||||
|
from_where: Mapped[int] = mapped_column(Integer, server_default="0")
|
||||||
|
orj_eid: Mapped[int] = mapped_column(Integer, server_default="0")
|
||||||
|
canceled: Mapped[int] = mapped_column(SmallInteger, server_default="0")
|
||||||
|
cross_ref: Mapped[int] = mapped_column(Integer, server_default="0")
|
||||||
|
data_center_id: Mapped[str] = mapped_column(String, server_default="")
|
||||||
|
data_center_rec_num: Mapped[str] = mapped_column(Integer, server_default="0")
|
||||||
|
status_id: Mapped[int] = mapped_column(SmallInteger, server_default="0")
|
||||||
|
|
||||||
|
plug_type_id: Mapped[int] = mapped_column(
|
||||||
|
ForeignKey("api_enum_dropdown.id"), nullable=True
|
||||||
|
)
|
||||||
|
plug_type_uu_id = mapped_column(String, nullable=False, comment="Plug Type UU ID")
|
||||||
|
account_header_id: Mapped[int] = mapped_column(
|
||||||
|
ForeignKey("account_books.id"), nullable=False
|
||||||
|
)
|
||||||
|
account_header_uu_id: Mapped[str] = mapped_column(
|
||||||
|
String, nullable=False, comment="Account Header UU ID"
|
||||||
|
)
|
||||||
|
account_code_id: Mapped[int] = mapped_column(
|
||||||
|
ForeignKey("account_codes.id"), nullable=False
|
||||||
|
)
|
||||||
|
account_code_uu_id: Mapped[str] = mapped_column(
|
||||||
|
String, nullable=False, comment="Account Code UU ID"
|
||||||
|
)
|
||||||
|
account_master_id: Mapped[int] = mapped_column(
|
||||||
|
ForeignKey("account_master.id"), nullable=False
|
||||||
|
)
|
||||||
|
account_master_uu_id: Mapped[str] = mapped_column(
|
||||||
|
String, nullable=False, comment="Account Master UU ID"
|
||||||
|
)
|
||||||
|
project_id: Mapped[int] = mapped_column(
|
||||||
|
ForeignKey("build_decision_book_projects.id")
|
||||||
|
)
|
||||||
|
project_uu_id: Mapped[str] = mapped_column(String, comment="Project UU ID")
|
||||||
|
|
||||||
|
# account_header: Mapped["AccountBooks"] = relationship(
|
||||||
|
# "AccountBooks",
|
||||||
|
# back_populates="account_detail",
|
||||||
|
# foreign_keys=[account_header_id],
|
||||||
|
# )
|
||||||
|
# account_code: Mapped["AccountCodes"] = relationship(
|
||||||
|
# "AccountCodes",
|
||||||
|
# back_populates="account_detail",
|
||||||
|
# foreign_keys=[account_code_id],
|
||||||
|
# )
|
||||||
|
# account_master: Mapped["AccountMaster"] = relationship(
|
||||||
|
# "AccountMaster",
|
||||||
|
# back_populates="account_detail",
|
||||||
|
# foreign_keys=[account_master_id],
|
||||||
|
# )
|
||||||
|
# project: Mapped["BuildDecisionBookProjects"] = relationship(
|
||||||
|
# "BuildDecisionBookProjects",
|
||||||
|
# back_populates="account_detail",
|
||||||
|
# foreign_keys=[project_id],
|
||||||
|
# )
|
||||||
|
# decision_book_payment_detail: Mapped["BuildDecisionBookPaymentsDetail"] = (
|
||||||
|
# relationship(
|
||||||
|
# "BuildDecisionBookPaymentsDetail",
|
||||||
|
# back_populates="accounting",
|
||||||
|
# foreign_keys="BuildDecisionBookPaymentsDetail.accounting_id",
|
||||||
|
# )
|
||||||
|
# )
|
||||||
|
# decision_book_project_payment_detail: Mapped[
|
||||||
|
# "BuildDecisionBookProjectPaymentsDetail"
|
||||||
|
# ] = relationship(
|
||||||
|
# "BuildDecisionBookProjectPaymentsDetail",
|
||||||
|
# back_populates="accounting",
|
||||||
|
# foreign_keys="BuildDecisionBookProjectPaymentsDetail.accounting_id",
|
||||||
|
# )
|
||||||
|
# decision_book_budget: Mapped["BuildDecisionBookBudget"] = relationship(
|
||||||
|
# "BuildDecisionBookBudget",
|
||||||
|
# back_populates="accounting",
|
||||||
|
# foreign_keys="BuildDecisionBookBudget.accounting_id",
|
||||||
|
# )
|
||||||
|
|
||||||
|
__table_args__ = (
|
||||||
|
Index(
|
||||||
|
"_account_detail_ndx_00",
|
||||||
|
account_master_id,
|
||||||
|
doc_date,
|
||||||
|
line_no,
|
||||||
|
account_header_id,
|
||||||
|
unique=True,
|
||||||
|
),
|
||||||
|
{"comment": "Account Detail Information"},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class AccountRecords(CrudCollection):
|
||||||
|
"""
|
||||||
|
build_decision_book_id = kaydın sorumlu olduğu karar defteri
|
||||||
|
send_company_id = kaydı gönderen firma, send_person_id = gönderen kişi
|
||||||
|
customer_id = sorumlu kullanıcı bilgisi, company_id = sorumlu firma
|
||||||
|
"""
|
||||||
|
|
||||||
|
__tablename__ = "account_records"
|
||||||
|
__exclude__fields__ = []
|
||||||
|
__enum_list__ = [
|
||||||
|
("receive_debit", "DebitTypes", "D"),
|
||||||
|
("budget_type", "BudgetType", "B"),
|
||||||
|
]
|
||||||
|
|
||||||
|
iban: Mapped[str] = mapped_column(
|
||||||
|
String(64), nullable=False, comment="IBAN Number of Bank"
|
||||||
|
)
|
||||||
|
bank_date: Mapped[TIMESTAMP] = mapped_column(
|
||||||
|
TIMESTAMP(timezone=True), nullable=False, comment="Bank Transaction Date"
|
||||||
|
)
|
||||||
|
|
||||||
|
currency_value: Mapped[float] = mapped_column(
|
||||||
|
Numeric(20, 6), nullable=False, comment="Currency Value"
|
||||||
|
)
|
||||||
|
bank_balance: Mapped[float] = mapped_column(
|
||||||
|
Numeric(20, 6), nullable=False, comment="Bank Balance"
|
||||||
|
)
|
||||||
|
currency: Mapped[str] = mapped_column(
|
||||||
|
String(5), nullable=False, comment="Unit of Currency"
|
||||||
|
)
|
||||||
|
additional_balance: Mapped[float] = mapped_column(
|
||||||
|
Numeric(20, 6), nullable=False, comment="Additional Balance"
|
||||||
|
)
|
||||||
|
channel_branch: Mapped[str] = mapped_column(
|
||||||
|
String(120), nullable=False, comment="Branch Bank"
|
||||||
|
)
|
||||||
|
process_name: Mapped[str] = mapped_column(
|
||||||
|
String, nullable=False, comment="Bank Process Type Name"
|
||||||
|
)
|
||||||
|
process_type: Mapped[str] = mapped_column(
|
||||||
|
String, nullable=False, comment="Bank Process Type"
|
||||||
|
)
|
||||||
|
process_comment: Mapped[str] = mapped_column(
|
||||||
|
String, nullable=False, comment="Transaction Record Comment"
|
||||||
|
)
|
||||||
|
process_garbage: Mapped[str] = mapped_column(
|
||||||
|
String, nullable=True, comment="Transaction Record Garbage"
|
||||||
|
)
|
||||||
|
bank_reference_code: Mapped[str] = mapped_column(
|
||||||
|
String, nullable=False, comment="Bank Reference Code"
|
||||||
|
)
|
||||||
|
|
||||||
|
add_comment_note: Mapped[str] = mapped_column(String, server_default="")
|
||||||
|
is_receipt_mail_send: Mapped[bool] = mapped_column(Boolean, server_default="0")
|
||||||
|
found_from = mapped_column(String, server_default="")
|
||||||
|
similarity: Mapped[float] = mapped_column(Numeric(20, 6), server_default="0")
|
||||||
|
remainder_balance: Mapped[float] = mapped_column(Numeric(20, 6), server_default="0")
|
||||||
|
|
||||||
|
bank_date_y: Mapped[int] = mapped_column(Integer)
|
||||||
|
bank_date_m: Mapped[int] = mapped_column(SmallInteger)
|
||||||
|
bank_date_w: Mapped[int] = mapped_column(SmallInteger)
|
||||||
|
bank_date_d: Mapped[int] = mapped_column(SmallInteger)
|
||||||
|
|
||||||
|
approving_accounting_record: Mapped[bool] = mapped_column(
|
||||||
|
Boolean, server_default="0"
|
||||||
|
)
|
||||||
|
accounting_receipt_date: Mapped[TIMESTAMP] = mapped_column(
|
||||||
|
TIMESTAMP(timezone=True), server_default="1900-01-01 00:00:00"
|
||||||
|
)
|
||||||
|
accounting_receipt_number: Mapped[int] = mapped_column(Integer, server_default="0")
|
||||||
|
status_id: Mapped[int] = mapped_column(SmallInteger, server_default="0")
|
||||||
|
|
||||||
|
approved_record: Mapped[bool] = mapped_column(Boolean, server_default="0")
|
||||||
|
import_file_name: Mapped[str] = mapped_column(
|
||||||
|
String, nullable=True, comment="XLS Key"
|
||||||
|
)
|
||||||
|
|
||||||
|
receive_debit: Mapped[int] = mapped_column(
|
||||||
|
ForeignKey("api_enum_dropdown.id"), nullable=True
|
||||||
|
)
|
||||||
|
receive_debit_uu_id: Mapped[str] = mapped_column(
|
||||||
|
String, nullable=True, comment="Debit UU ID"
|
||||||
|
)
|
||||||
|
budget_type: Mapped[int] = mapped_column(
|
||||||
|
ForeignKey("api_enum_dropdown.id"), nullable=True
|
||||||
|
)
|
||||||
|
budget_type_uu_id: Mapped[str] = mapped_column(
|
||||||
|
String, nullable=True, comment="Budget Type UU ID"
|
||||||
|
)
|
||||||
|
|
||||||
|
company_id: Mapped[int] = mapped_column(ForeignKey("companies.id"), nullable=True)
|
||||||
|
company_uu_id: Mapped[str] = mapped_column(
|
||||||
|
String, nullable=True, comment="Company UU ID"
|
||||||
|
)
|
||||||
|
send_company_id: Mapped[int] = mapped_column(
|
||||||
|
ForeignKey("companies.id"), nullable=True
|
||||||
|
)
|
||||||
|
send_company_uu_id = mapped_column(
|
||||||
|
String, nullable=True, comment="Send Company UU ID"
|
||||||
|
)
|
||||||
|
|
||||||
|
send_person_id: Mapped[int] = mapped_column(ForeignKey("people.id"), nullable=True)
|
||||||
|
send_person_uu_id: Mapped[str] = mapped_column(
|
||||||
|
String, nullable=True, comment="Send Person UU ID"
|
||||||
|
)
|
||||||
|
approving_accounting_person: Mapped[int] = mapped_column(
|
||||||
|
ForeignKey("people.id"), nullable=True
|
||||||
|
)
|
||||||
|
approving_accounting_person_uu_id: Mapped[str] = mapped_column(
|
||||||
|
String, nullable=True, comment="Approving Accounting Person UU ID"
|
||||||
|
)
|
||||||
|
|
||||||
|
living_space_id: Mapped[int] = mapped_column(
|
||||||
|
ForeignKey("build_living_space.id"), nullable=True
|
||||||
|
)
|
||||||
|
living_space_uu_id: Mapped[str] = mapped_column(
|
||||||
|
String, nullable=True, comment="Living Space UU ID"
|
||||||
|
)
|
||||||
|
customer_id: Mapped[int] = mapped_column(ForeignKey("people.id"), nullable=True)
|
||||||
|
customer_uu_id = mapped_column(String, nullable=True, comment="Customer UU ID")
|
||||||
|
|
||||||
|
build_id: Mapped[int] = mapped_column(ForeignKey("build.id"), nullable=True)
|
||||||
|
build_uu_id: Mapped[str] = mapped_column(
|
||||||
|
String, nullable=True, comment="Build UU ID"
|
||||||
|
)
|
||||||
|
build_parts_id: Mapped[int] = mapped_column(
|
||||||
|
ForeignKey("build_parts.id"), nullable=True
|
||||||
|
)
|
||||||
|
build_parts_uu_id: Mapped[str] = mapped_column(
|
||||||
|
String, nullable=True, comment="Build Parts UU ID"
|
||||||
|
)
|
||||||
|
build_decision_book_id: Mapped[int] = mapped_column(
|
||||||
|
ForeignKey("build_decision_book.id"), nullable=True
|
||||||
|
)
|
||||||
|
build_decision_book_uu_id: Mapped[str] = mapped_column(
|
||||||
|
String, nullable=True, comment="Build Decision Book UU ID"
|
||||||
|
)
|
||||||
|
|
||||||
|
# companies: Mapped[List["Company"]] = relationship(
|
||||||
|
# "Company", back_populates="budget_records", foreign_keys=[company_id]
|
||||||
|
# )
|
||||||
|
# send_companies: Mapped[List["Company"]] = relationship(
|
||||||
|
# "Company", back_populates="send_budget_records", foreign_keys=[send_company_id]
|
||||||
|
# )
|
||||||
|
#
|
||||||
|
# parts: Mapped[List["BuildParts"]] = relationship(
|
||||||
|
# "BuildParts", back_populates="budget_records", foreign_keys=[build_parts_id]
|
||||||
|
# )
|
||||||
|
# people: Mapped["People"] = relationship(
|
||||||
|
# "People", back_populates="budget_records", foreign_keys=[customer_id]
|
||||||
|
# )
|
||||||
|
# send_person: Mapped["People"] = relationship(
|
||||||
|
# "People", back_populates="budget_records", foreign_keys=[send_person_id]
|
||||||
|
# )
|
||||||
|
# decision_books: Mapped[List["BuildDecisionBook"]] = relationship(
|
||||||
|
# "BuildDecisionBook",
|
||||||
|
# back_populates="budget_records",
|
||||||
|
# foreign_keys=[build_decision_book_id],
|
||||||
|
# )
|
||||||
|
#
|
||||||
|
# decision_book_payment_detail: Mapped["BuildDecisionBookPaymentsDetail"] = (
|
||||||
|
# relationship(
|
||||||
|
# "BuildDecisionBookPaymentsDetail",
|
||||||
|
# back_populates="budget_records",
|
||||||
|
# foreign_keys="BuildDecisionBookPaymentsDetail.budget_records_id",
|
||||||
|
# )
|
||||||
|
# )
|
||||||
|
#
|
||||||
|
# decision_book_project_payments_detail: Mapped[
|
||||||
|
# List["BuildDecisionBookProjectPaymentsDetail"]
|
||||||
|
# ] = relationship(
|
||||||
|
# "BuildDecisionBookProjectPaymentsDetail",
|
||||||
|
# back_populates="budget_records",
|
||||||
|
# foreign_keys="BuildDecisionBookProjectPaymentsDetail.budget_records_id",
|
||||||
|
# )
|
||||||
|
|
||||||
|
__table_args__ = (
|
||||||
|
Index("_budget_records_ndx_00", is_receipt_mail_send, bank_date),
|
||||||
|
Index(
|
||||||
|
"_budget_records_ndx_01",
|
||||||
|
iban,
|
||||||
|
bank_date,
|
||||||
|
bank_reference_code,
|
||||||
|
bank_balance,
|
||||||
|
unique=True,
|
||||||
|
),
|
||||||
|
Index("_budget_records_ndx_02", status_id, bank_date),
|
||||||
|
{
|
||||||
|
"comment": "Bank Records that are related to building and financial transactions"
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
# def payment_budget_record_close(self):
|
||||||
|
# from database_sql_models import (
|
||||||
|
# DecisionBookProjectPaymentsMaster,
|
||||||
|
# ApiEnumDropdown,
|
||||||
|
# BuildDecisionBook,
|
||||||
|
# BuildDecisionBookPaymentsMaster,
|
||||||
|
# )
|
||||||
|
#
|
||||||
|
# budget_record = self
|
||||||
|
# if self.receive_debit == ApiEnumDropdown.uuid_of_enum(
|
||||||
|
# enum_class="DebitTypes", key="R"
|
||||||
|
# ):
|
||||||
|
# print(
|
||||||
|
# "This record is not debit. Debit:",
|
||||||
|
# self.receive_debit,
|
||||||
|
# "DebitTypes.R.name",
|
||||||
|
# # str(DebitTypes.R.name),
|
||||||
|
# )
|
||||||
|
# return
|
||||||
|
# if abs(budget_record.currency_value + budget_record.remainder_balance) > 0:
|
||||||
|
# payment_dict = {
|
||||||
|
# "budget_records_id": self.id,
|
||||||
|
# "build_decision_book_id": budget_record.build_decision_book_id,
|
||||||
|
# "build_parts_id": budget_record.build_parts_id,
|
||||||
|
# "start_date": budget_record.bank_date,
|
||||||
|
# "paid_value": budget_record.currency_value
|
||||||
|
# - budget_record.remainder_balance,
|
||||||
|
# "is_all": False,
|
||||||
|
# }
|
||||||
|
# (paid_value, start_paid_value, balance) = (
|
||||||
|
# float(budget_record.currency_value - budget_record.remainder_balance),
|
||||||
|
# float(budget_record.currency_value - budget_record.remainder_balance),
|
||||||
|
# float(budget_record.remainder_balance),
|
||||||
|
# )
|
||||||
|
# print(
|
||||||
|
# "self.id",
|
||||||
|
# self.id,
|
||||||
|
# "paid_value",
|
||||||
|
# paid_value,
|
||||||
|
# "start_paid_value",
|
||||||
|
# start_paid_value,
|
||||||
|
# "balance",
|
||||||
|
# balance,
|
||||||
|
# self.receive_debit,
|
||||||
|
# )
|
||||||
|
#
|
||||||
|
# if not BuildDecisionBook.find_one(
|
||||||
|
# id=payment_dict["build_decision_book_id"]
|
||||||
|
# ):
|
||||||
|
# return paid_value
|
||||||
|
#
|
||||||
|
# if budget_record.replication_id == 55:
|
||||||
|
# if paid_value > 0:
|
||||||
|
# payment_dict["dues_type"] = ApiEnumDropdown.uuid_of_enum(
|
||||||
|
# enum_class="BuildDuesTypes", key="L"
|
||||||
|
# )
|
||||||
|
# paid_value = (
|
||||||
|
# DecisionBookProjectPaymentsMaster.pay_law_and_ren_of_build_part(
|
||||||
|
# **payment_dict
|
||||||
|
# )
|
||||||
|
# )
|
||||||
|
# print("dues_type", payment_dict["dues_type"], paid_value)
|
||||||
|
# if paid_value > 0:
|
||||||
|
# payment_dict.pop("dues_type", None)
|
||||||
|
# paid_value = BuildDecisionBookPaymentsMaster.pay_dues_of_build_part(
|
||||||
|
# **payment_dict
|
||||||
|
# )
|
||||||
|
# print("dues_type", None, paid_value)
|
||||||
|
# if paid_value > 0:
|
||||||
|
# payment_dict["dues_type"] = ApiEnumDropdown.uuid_of_enum(
|
||||||
|
# enum_class="BuildDuesTypes", key="R"
|
||||||
|
# )
|
||||||
|
# paid_value = (
|
||||||
|
# DecisionBookProjectPaymentsMaster.pay_law_and_ren_of_build_part(
|
||||||
|
# **payment_dict
|
||||||
|
# )
|
||||||
|
# )
|
||||||
|
# print("dues_type", payment_dict["dues_type"], paid_value)
|
||||||
|
# payment_dict["is_all"] = True
|
||||||
|
# if paid_value > 0:
|
||||||
|
# payment_dict["dues_type"] = ApiEnumDropdown.uuid_of_enum(
|
||||||
|
# enum_class="BuildDuesTypes", key="L"
|
||||||
|
# )
|
||||||
|
# paid_value = (
|
||||||
|
# DecisionBookProjectPaymentsMaster.pay_law_and_ren_of_build_part(
|
||||||
|
# **payment_dict
|
||||||
|
# )
|
||||||
|
# )
|
||||||
|
# print("is all dues_type", payment_dict["dues_type"], paid_value)
|
||||||
|
# if paid_value > 0:
|
||||||
|
# payment_dict.pop("dues_type", None)
|
||||||
|
# paid_value = BuildDecisionBookPaymentsMaster.pay_dues_of_build_part(
|
||||||
|
# **payment_dict
|
||||||
|
# )
|
||||||
|
# print("is all dues_type", None, paid_value)
|
||||||
|
# if paid_value > 0:
|
||||||
|
# payment_dict["dues_type"] = ApiEnumDropdown.uuid_of_enum(
|
||||||
|
# enum_class="BuildDuesTypes", key="R"
|
||||||
|
# )
|
||||||
|
# paid_value = (
|
||||||
|
# DecisionBookProjectPaymentsMaster.pay_law_and_ren_of_build_part(
|
||||||
|
# **payment_dict
|
||||||
|
# )
|
||||||
|
# )
|
||||||
|
# print("is all dues_type", payment_dict["dues_type"], paid_value)
|
||||||
|
|
||||||
|
|
||||||
|
# class AccountRecordDecisionPaymentClosed(CrudCollection):
|
||||||
|
#
|
||||||
|
# __tablename__ = "account_record_decision_payment_closed"
|
||||||
|
# __exclude__fields__ = []
|
||||||
|
#
|
||||||
|
# arc_currency: Mapped[str] = mapped_column(
|
||||||
|
# String(5), nullable=False, comment="Unit of Currency"
|
||||||
|
# )
|
||||||
|
# arc_processing_time: Mapped[TIMESTAMP] = mapped_column(
|
||||||
|
# TIMESTAMP(timezone=True), nullable=False, comment="Processing Time"
|
||||||
|
# )
|
||||||
|
# arc_currency_value: Mapped[float] = mapped_column(
|
||||||
|
# Numeric(20, 6), nullable=False, comment="Currency Value"
|
||||||
|
# )
|
||||||
|
#
|
||||||
|
# decision_book_budgets_id: Mapped[int] = mapped_column(
|
||||||
|
# ForeignKey("decision_book_budgets.id"), nullable=True
|
||||||
|
# )
|
||||||
|
# decision_book_budgets_uu_id: Mapped[str] = mapped_column(
|
||||||
|
# String, nullable=True, comment="Budget UUID"
|
||||||
|
# )
|
||||||
|
#
|
||||||
|
# build_decision_book_payment_id: Mapped[int] = mapped_column(
|
||||||
|
# ForeignKey("build_decision_book_payments.id")
|
||||||
|
# )
|
||||||
|
# build_decision_book_payment_uu_id: Mapped[str] = mapped_column(
|
||||||
|
# String, nullable=True, comment="Build Decision Book Payment UU ID"
|
||||||
|
# )
|
||||||
|
# account_records_id: Mapped[int] = mapped_column(ForeignKey("account_records.id"))
|
||||||
|
# account_records_uu_id: Mapped[str] = mapped_column(
|
||||||
|
# String, nullable=True, comment="Account Record UU ID"
|
||||||
|
# )
|
||||||
|
#
|
||||||
|
# __table_args__ = (
|
||||||
|
# Index(
|
||||||
|
# "_account_record_decision_payment_closed_ndx_00",
|
||||||
|
# account_records_id,
|
||||||
|
# build_decision_book_payment_id,
|
||||||
|
# arc_processing_time,
|
||||||
|
# ),
|
||||||
|
# Index(
|
||||||
|
# "_account_record_decision_payment_closed_ndx_01",
|
||||||
|
# build_decision_book_payment_id,
|
||||||
|
# account_records_id,
|
||||||
|
# arc_processing_time,
|
||||||
|
# ),
|
||||||
|
# {"comment": "Account Record Decision Payment Closed Information"},
|
||||||
|
# )
|
||||||
|
#
|
||||||
|
|
||||||
|
|
||||||
|
class AccountRecordExchanges(CrudCollection):
|
||||||
|
__tablename__ = "account_record_exchanges"
|
||||||
|
__exclude__fields__ = []
|
||||||
|
|
||||||
|
are_currency: Mapped[str] = mapped_column(
|
||||||
|
String(5), nullable=False, comment="Unit of Currency"
|
||||||
|
)
|
||||||
|
are_exchange_rate: Mapped[float] = mapped_column(
|
||||||
|
Numeric(18, 6), nullable=False, server_default="1"
|
||||||
|
)
|
||||||
|
usd_exchange_rate_value: Mapped[float] = mapped_column(
|
||||||
|
Numeric(18, 6),
|
||||||
|
nullable=True,
|
||||||
|
server_default="0",
|
||||||
|
comment="It will be written by multiplying the usd exchange rate with the current value result.",
|
||||||
|
)
|
||||||
|
eur_exchange_rate_value: Mapped[float] = mapped_column(
|
||||||
|
Numeric(18, 6),
|
||||||
|
nullable=True,
|
||||||
|
server_default="0",
|
||||||
|
comment="It will be written by multiplying the eur exchange rate with the current value result.",
|
||||||
|
)
|
||||||
|
gbp_exchange_rate_value: Mapped[float] = mapped_column(
|
||||||
|
Numeric(18, 6),
|
||||||
|
nullable=True,
|
||||||
|
server_default="0",
|
||||||
|
comment="It will be written by multiplying the gpd exchange rate with the current value result.",
|
||||||
|
)
|
||||||
|
cny_exchange_rate_value: Mapped[float] = mapped_column(
|
||||||
|
Numeric(18, 6),
|
||||||
|
nullable=True,
|
||||||
|
server_default="0",
|
||||||
|
comment="It will be written by multiplying the cny exchange rate with the current value result.",
|
||||||
|
)
|
||||||
|
|
||||||
|
account_records_id: Mapped[int] = mapped_column(ForeignKey("account_records.id"))
|
||||||
|
account_records_uu_id: Mapped[str] = mapped_column(
|
||||||
|
String, nullable=True, comment="Account Record UU ID"
|
||||||
|
)
|
||||||
|
|
||||||
|
__table_args__ = (
|
||||||
|
Index("_account_record_exchanges_ndx_00", account_records_id),
|
||||||
|
{"comment": "Account Record Exchanges Information"},
|
||||||
|
)
|
||||||
|
|
@ -0,0 +1,113 @@
|
||||||
|
from sqlalchemy.orm import mapped_column, Mapped
|
||||||
|
from sqlalchemy import String, ForeignKey, Index, TIMESTAMP, SmallInteger, Identity
|
||||||
|
|
||||||
|
from databases.sql_models.core_mixin import CrudCollection
|
||||||
|
|
||||||
|
|
||||||
|
class BuildIbans(CrudCollection):
|
||||||
|
"""
|
||||||
|
BuildParts class based on declarative_base and BaseMixin via session
|
||||||
|
"""
|
||||||
|
|
||||||
|
__tablename__ = "build_ibans"
|
||||||
|
__exclude__fields__ = []
|
||||||
|
|
||||||
|
iban: Mapped[str] = mapped_column(
|
||||||
|
String(40), server_default="", nullable=False, comment="IBAN number"
|
||||||
|
)
|
||||||
|
start_date: Mapped[TIMESTAMP] = mapped_column(
|
||||||
|
TIMESTAMP(timezone=True), nullable=False, comment="Bank Transaction Start Date"
|
||||||
|
)
|
||||||
|
|
||||||
|
stop_date: Mapped[TIMESTAMP] = mapped_column(
|
||||||
|
TIMESTAMP(timezone=True), server_default="2900-01-01 00:00:00"
|
||||||
|
)
|
||||||
|
bank_code: Mapped[str] = mapped_column(String(24), server_default="TR0000000000000")
|
||||||
|
xcomment: Mapped[str] = mapped_column(String(64), server_default="????")
|
||||||
|
|
||||||
|
build_id: Mapped[int] = mapped_column(
|
||||||
|
ForeignKey("build.id"), nullable=True, comment="Building ID"
|
||||||
|
)
|
||||||
|
build_uu_id: Mapped[str] = mapped_column(
|
||||||
|
String, nullable=True, comment="Building UUID", index=True
|
||||||
|
)
|
||||||
|
# building: Mapped["Build"] = relationship(
|
||||||
|
# "Build", back_populates="build_ibans", foreign_keys=[build_id]
|
||||||
|
# )
|
||||||
|
|
||||||
|
__table_args__ = (
|
||||||
|
Index("_build_ibans_ndx_01", iban, start_date, unique=True),
|
||||||
|
{"comment": "IBANs related to money transactions due to building objects"},
|
||||||
|
)
|
||||||
|
|
||||||
|
# @property
|
||||||
|
# def enums(self):
|
||||||
|
# return_dict = {}
|
||||||
|
# for key, enum in self.__enums_list__.items():
|
||||||
|
# for enum_item in EnumDropdown.filter_by(enum_class=enum):
|
||||||
|
# return_dict[key] = {
|
||||||
|
# enum_item.get_dict(include=["key", "value", "description"])
|
||||||
|
# }
|
||||||
|
# return return_dict
|
||||||
|
|
||||||
|
|
||||||
|
class BuildIbanDescription(CrudCollection):
|
||||||
|
"""
|
||||||
|
SearchComments class based on declarative_base and CrudCollection via session
|
||||||
|
"""
|
||||||
|
|
||||||
|
__tablename__ = "build_iban_description"
|
||||||
|
__exclude__fields__ = []
|
||||||
|
|
||||||
|
iban: Mapped[str] = mapped_column(String, nullable=False, comment="IBAN Number")
|
||||||
|
group_id: Mapped[int] = mapped_column(
|
||||||
|
SmallInteger, nullable=False, comment="Group ID"
|
||||||
|
)
|
||||||
|
search_word: Mapped[str] = mapped_column(
|
||||||
|
String, nullable=False, comment="Search Word", index=True
|
||||||
|
)
|
||||||
|
|
||||||
|
# decision_book_project_id: Mapped[int] = mapped_column(
|
||||||
|
# ForeignKey("build_decision_book_projects.id")
|
||||||
|
# )
|
||||||
|
# decision_book_project_uu_id: Mapped[str] = mapped_column(
|
||||||
|
# String, nullable=False, comment="Decision Book Project UUID"
|
||||||
|
# )
|
||||||
|
customer_id: Mapped[int] = mapped_column(ForeignKey("people.id"), nullable=True)
|
||||||
|
customer_uu_id: Mapped[str] = mapped_column(
|
||||||
|
String, nullable=True, comment="Customer UUID"
|
||||||
|
)
|
||||||
|
company_id: Mapped[int] = mapped_column(ForeignKey("companies.id"), nullable=True)
|
||||||
|
company_uu_id: Mapped[str] = mapped_column(
|
||||||
|
String, nullable=True, comment="Company UUID"
|
||||||
|
)
|
||||||
|
build_parts_id: Mapped[int] = mapped_column(
|
||||||
|
ForeignKey("build_parts.id"), nullable=True
|
||||||
|
)
|
||||||
|
build_parts_uu_id: Mapped[str] = mapped_column(
|
||||||
|
String, nullable=True, comment="Build Parts UUID"
|
||||||
|
)
|
||||||
|
|
||||||
|
# decision_book_project: Mapped["BuildDecisionBookProjects"] = relationship(
|
||||||
|
# "BuildDecisionBookProjects",
|
||||||
|
# back_populates="search_iban_description",
|
||||||
|
# foreign_keys=[decision_book_project_id],
|
||||||
|
# )
|
||||||
|
# customer: Mapped["People"] = relationship(
|
||||||
|
# "People", back_populates="search_iban_description", foreign_keys=[customer_id]
|
||||||
|
# )
|
||||||
|
# company: Mapped["Companies"] = relationship(
|
||||||
|
# "Company", back_populates="search_iban_description", foreign_keys=[company_id]
|
||||||
|
# )
|
||||||
|
# parts: Mapped["BuildParts"] = relationship(
|
||||||
|
# "BuildParts",
|
||||||
|
# back_populates="search_iban_description",
|
||||||
|
# foreign_keys=[build_parts_id],
|
||||||
|
# )
|
||||||
|
|
||||||
|
__table_args__ = (
|
||||||
|
Index(
|
||||||
|
"_search_iban_description_ndx_00", iban, search_word, group_id, unique=True
|
||||||
|
),
|
||||||
|
{"comment": "Search Iban Description Information"},
|
||||||
|
)
|
||||||
|
|
@ -0,0 +1,124 @@
|
||||||
|
import random
|
||||||
|
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
|
||||||
|
from sqlalchemy import String
|
||||||
|
from sqlalchemy.orm import mapped_column, Mapped
|
||||||
|
|
||||||
|
from cryptography.fernet import Fernet, MultiFernet
|
||||||
|
from databases.sql_models.core_mixin import CrudCollection
|
||||||
|
|
||||||
|
|
||||||
|
class CrypterEngine(CrudCollection):
|
||||||
|
|
||||||
|
__tablename__ = "crypter_engine"
|
||||||
|
__table_args__ = ()
|
||||||
|
encrypt_list = []
|
||||||
|
decrypt_list = []
|
||||||
|
keys_error = "Unable to retrieve encrypt keys"
|
||||||
|
alchemy_error = "Alchemy object is empty"
|
||||||
|
|
||||||
|
key_first: Mapped[str] = mapped_column(String, nullable=False)
|
||||||
|
key_second: Mapped[str] = mapped_column(String, nullable=False)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get_valid_keys(cls, row=None):
|
||||||
|
cls.encrypt_list, cls.decrypt_list = [], []
|
||||||
|
if not cls.filter_all(cls.created_at > datetime.now() - timedelta(days=29)).get(
|
||||||
|
1
|
||||||
|
):
|
||||||
|
cls.create_encrypt_keys(count=100)
|
||||||
|
if decrypt_identifier := getattr(row, "cryp_uu_id", None):
|
||||||
|
if decrypt_row := cls.find_one(uu_id=str(decrypt_identifier)):
|
||||||
|
return (
|
||||||
|
decrypt_row.key_first.decode(),
|
||||||
|
decrypt_row.key_second.decode(),
|
||||||
|
decrypt_row.uu_id,
|
||||||
|
)
|
||||||
|
if encrypt_rows := cls.filter_all(
|
||||||
|
cls.created_at > datetime.now() - timedelta(days=29)
|
||||||
|
).data:
|
||||||
|
encrypt_row = random.choice(encrypt_rows)
|
||||||
|
return (
|
||||||
|
encrypt_row.key_first.encode(),
|
||||||
|
encrypt_row.key_second.encode(),
|
||||||
|
encrypt_rows.uu_id,
|
||||||
|
)
|
||||||
|
return None, None, None
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def create_encrypt_keys(cls, count: int):
|
||||||
|
for _ in range(count):
|
||||||
|
key_first = Fernet.generate_key()
|
||||||
|
key_second = Fernet.generate_key()
|
||||||
|
cls.find_or_create(
|
||||||
|
key_first=key_first.decode(), key_second=key_second.decode()
|
||||||
|
)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def raise_exception(cls, message=None):
|
||||||
|
raise Exception(message if message else cls.keys_error)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def encrypt_given_alchemy_list(cls, alchemy_object_list: list):
|
||||||
|
for alchemy_object in alchemy_object_list:
|
||||||
|
key_first, key_second, cryp_uu_id = cls.get_valid_keys()
|
||||||
|
fernet_keys = MultiFernet([Fernet(key_first), Fernet(key_second)])
|
||||||
|
if not key_first or not key_second:
|
||||||
|
cls.raise_exception()
|
||||||
|
alchemy_dict = alchemy_object.get_dict() if alchemy_object else None
|
||||||
|
if not alchemy_dict:
|
||||||
|
cls.raise_exception(cls.alchemy_error)
|
||||||
|
for key, plain_row in alchemy_dict.items():
|
||||||
|
if key in alchemy_object.__encrypt_list__:
|
||||||
|
alchemy_dict[key] = fernet_keys.encrypt(plain_row).decode()
|
||||||
|
alchemy_dict["cryp_uu_id"] = cryp_uu_id
|
||||||
|
cls.encrypt_list.append(alchemy_object.update(**alchemy_dict))
|
||||||
|
return cls.encrypt_list
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def encrypt_given_alchemy_object(cls, alchemy_object_object):
|
||||||
|
key_first, key_second, cryp_uu_id = cls.get_valid_keys()
|
||||||
|
fernet_keys = MultiFernet([Fernet(key_first), Fernet(key_second)])
|
||||||
|
if not key_first or not key_second:
|
||||||
|
cls.raise_exception()
|
||||||
|
alchemy_dict = (
|
||||||
|
alchemy_object_object.get_dict() if alchemy_object_object else None
|
||||||
|
)
|
||||||
|
if not alchemy_dict:
|
||||||
|
cls.raise_exception(cls.alchemy_error)
|
||||||
|
for key, plain_row in alchemy_dict.items():
|
||||||
|
if key in alchemy_object_object.__encrypt_list__:
|
||||||
|
alchemy_dict[key] = fernet_keys.encrypt(plain_row).decode()
|
||||||
|
alchemy_dict["cryp_uu_id"] = cryp_uu_id
|
||||||
|
return alchemy_object_object.update(**alchemy_dict)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def decrypt_given_alchemy(cls, alchemy_object_list: list):
|
||||||
|
for alchemy_object in alchemy_object_list:
|
||||||
|
key_first, key_second, cryp_uu_id = cls.get_valid_keys(row=alchemy_object)
|
||||||
|
fernet_keys = MultiFernet([Fernet(key_first), Fernet(key_second)])
|
||||||
|
if not key_first or not key_second:
|
||||||
|
cls.raise_exception()
|
||||||
|
alchemy_dict = alchemy_object.get_dict() if alchemy_object else None
|
||||||
|
if not alchemy_dict:
|
||||||
|
cls.raise_exception(cls.alchemy_error)
|
||||||
|
for key, plain_row in alchemy_dict.items():
|
||||||
|
if key in alchemy_object.__encrypt_list__:
|
||||||
|
alchemy_dict[key] = fernet_keys.decrypt(plain_row).decode()
|
||||||
|
cls.decrypt_list.append(alchemy_dict)
|
||||||
|
return cls.decrypt_list
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def decrypt_given_alchemy_object(cls, alchemy_object):
|
||||||
|
key_first, key_second, cryp_uu_id = cls.get_valid_keys(row=alchemy_object)
|
||||||
|
fernet_keys = MultiFernet([Fernet(key_first), Fernet(key_second)])
|
||||||
|
if not key_first or not key_second:
|
||||||
|
cls.raise_exception()
|
||||||
|
alchemy_dict = alchemy_object.get_dict() if alchemy_object else None
|
||||||
|
if not alchemy_dict:
|
||||||
|
cls.raise_exception(cls.alchemy_error)
|
||||||
|
for key, plain_row in alchemy_dict.items():
|
||||||
|
if key in alchemy_object.__encrypt_list__:
|
||||||
|
alchemy_dict[key] = fernet_keys.decrypt(plain_row).decode()
|
||||||
|
return alchemy_dict
|
||||||
|
|
@ -0,0 +1,157 @@
|
||||||
|
from sqlalchemy import (
|
||||||
|
String,
|
||||||
|
ForeignKey,
|
||||||
|
Index,
|
||||||
|
SmallInteger,
|
||||||
|
Boolean,
|
||||||
|
TIMESTAMP,
|
||||||
|
Text,
|
||||||
|
Numeric,
|
||||||
|
Integer,
|
||||||
|
)
|
||||||
|
from sqlalchemy.orm import mapped_column, Mapped, relationship
|
||||||
|
from databases.sql_models.core_mixin import CrudCollection
|
||||||
|
|
||||||
|
|
||||||
|
class DecisionBookBudgetBooks(CrudCollection):
|
||||||
|
|
||||||
|
__tablename__ = "decision_book_budget_books"
|
||||||
|
__exclude__fields__ = []
|
||||||
|
|
||||||
|
country: Mapped[str] = mapped_column(String, nullable=False)
|
||||||
|
branch_type: Mapped[int] = mapped_column(SmallInteger, server_default="0")
|
||||||
|
|
||||||
|
company_id: Mapped[int] = mapped_column(ForeignKey("companies.id"), nullable=False)
|
||||||
|
company_uu_id: Mapped[str] = mapped_column(String, nullable=False)
|
||||||
|
branch_id: Mapped[int] = mapped_column(ForeignKey("companies.id"), nullable=True)
|
||||||
|
branch_uu_id: Mapped[str] = mapped_column(
|
||||||
|
String, comment="Branch UU ID", nullable=True
|
||||||
|
)
|
||||||
|
build_decision_book_id: Mapped[int] = mapped_column(
|
||||||
|
ForeignKey("build_decision_book.id"), nullable=False
|
||||||
|
)
|
||||||
|
build_decision_book_uu_id: Mapped[str] = mapped_column(
|
||||||
|
String, nullable=True, comment="Build Decision Book UU ID"
|
||||||
|
)
|
||||||
|
|
||||||
|
__table_args__ = (
|
||||||
|
Index(
|
||||||
|
"_decision_book_budget_companies_book_ndx_00",
|
||||||
|
company_id,
|
||||||
|
CrudCollection.created_at,
|
||||||
|
),
|
||||||
|
{"comment": "budget Book Information"},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class DecisionBookBudgetCodes(CrudCollection):
|
||||||
|
|
||||||
|
__tablename__ = "decision_book_budget_codes"
|
||||||
|
__exclude__fields__ = []
|
||||||
|
|
||||||
|
budget_code: Mapped[str] = mapped_column(
|
||||||
|
String(48), nullable=False, comment="budget Code"
|
||||||
|
)
|
||||||
|
comment_line: Mapped[str] = mapped_column(
|
||||||
|
Text, nullable=False, comment="Comment Line"
|
||||||
|
)
|
||||||
|
|
||||||
|
build_decision_book_id: Mapped[int] = mapped_column(
|
||||||
|
ForeignKey("build_decision_book.id"), nullable=True
|
||||||
|
)
|
||||||
|
build_decision_book_uu_id: Mapped[str] = mapped_column(
|
||||||
|
String, nullable=True, comment="Build Decision Book UU ID"
|
||||||
|
)
|
||||||
|
|
||||||
|
build_parts_id: Mapped[int] = mapped_column(
|
||||||
|
ForeignKey("build_parts.id"), nullable=True
|
||||||
|
)
|
||||||
|
build_parts_uu_id: Mapped[str] = mapped_column(
|
||||||
|
String, nullable=True, comment="Build Parts UU ID"
|
||||||
|
)
|
||||||
|
|
||||||
|
company_id: Mapped[int] = mapped_column(ForeignKey("companies.id"), nullable=True)
|
||||||
|
company_uu_id: Mapped[str] = mapped_column(
|
||||||
|
String, nullable=True, comment="Company UU ID"
|
||||||
|
)
|
||||||
|
|
||||||
|
__table_args__ = (
|
||||||
|
Index("_decision_book_budget_codes_ndx_00", budget_code, "created_at"),
|
||||||
|
Index("_decision_book_budget_codes_ndx_01", company_id, "created_at"),
|
||||||
|
{"comment": "budget Book Information"},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class DecisionBookBudgetMaster(CrudCollection):
|
||||||
|
|
||||||
|
__tablename__ = "decision_book_budget_master"
|
||||||
|
__exclude__fields__ = []
|
||||||
|
|
||||||
|
budget_type: Mapped[str] = mapped_column(
|
||||||
|
String(50), nullable=False
|
||||||
|
) # Bütçe tipi (örneğin: Operasyonel, Yatırım)
|
||||||
|
currency: Mapped[str] = mapped_column(
|
||||||
|
String(8), server_default="TRY"
|
||||||
|
) # Bütçe para birimi
|
||||||
|
total_budget: Mapped[float] = mapped_column(
|
||||||
|
Numeric(10, 2), nullable=False
|
||||||
|
) # Toplam bütçe
|
||||||
|
|
||||||
|
tracking_period_id: Mapped[int] = mapped_column(
|
||||||
|
ForeignKey("api_enum_dropdown.id"), nullable=True
|
||||||
|
)
|
||||||
|
tracking_period_uu_id: Mapped[str] = mapped_column(
|
||||||
|
String, nullable=True, comment="Part Direction UUID"
|
||||||
|
)
|
||||||
|
budget_books_id: Mapped[int] = mapped_column(
|
||||||
|
Integer, ForeignKey("decision_book_budget_books.id"), nullable=False
|
||||||
|
)
|
||||||
|
budget_books_uu_id: Mapped[str] = mapped_column(
|
||||||
|
String, nullable=True, comment="Budget Books UU ID"
|
||||||
|
)
|
||||||
|
department_id: Mapped[int] = mapped_column(
|
||||||
|
Integer, ForeignKey("departments.id"), nullable=False
|
||||||
|
) # Departman ile ilişki
|
||||||
|
department_uu_id: Mapped[str] = mapped_column(
|
||||||
|
String, nullable=True, comment="Department UU ID"
|
||||||
|
)
|
||||||
|
|
||||||
|
__table_args__ = ({"comment": "budget Book Information"},)
|
||||||
|
|
||||||
|
|
||||||
|
class DecisionBookBudgets(CrudCollection):
|
||||||
|
|
||||||
|
__tablename__ = "decision_book_budgets"
|
||||||
|
__exclude__fields__ = []
|
||||||
|
|
||||||
|
process_date: Mapped[TIMESTAMP] = mapped_column(
|
||||||
|
TIMESTAMP(timezone=True), nullable=False
|
||||||
|
) # Başlangıç tarihi
|
||||||
|
budget_codes_id: Mapped[int] = mapped_column(
|
||||||
|
Integer, ForeignKey("decision_book_budget_codes.id"), nullable=False
|
||||||
|
)
|
||||||
|
total_budget: Mapped[float] = mapped_column(
|
||||||
|
Numeric(10, 2), nullable=False
|
||||||
|
) # Toplam bütçe
|
||||||
|
used_budget: Mapped[float] = mapped_column(
|
||||||
|
Numeric(10, 2), nullable=False, default=0.0
|
||||||
|
) # Kullanılan bütçe
|
||||||
|
remaining_budget: Mapped[float] = mapped_column(
|
||||||
|
Numeric(10, 2), nullable=False, default=0.0
|
||||||
|
) # Kullanılan bütçe
|
||||||
|
|
||||||
|
decision_book_budget_master_id: Mapped[int] = mapped_column(
|
||||||
|
Integer, ForeignKey("decision_book_budget_master.id"), nullable=False
|
||||||
|
)
|
||||||
|
decision_book_budget_master_uu_id: Mapped[str] = mapped_column(
|
||||||
|
String, nullable=True, comment="Decision Book Budget Master UU ID"
|
||||||
|
)
|
||||||
|
|
||||||
|
__table_args__ = (
|
||||||
|
Index(
|
||||||
|
"_decision_book_budgets_ndx_00",
|
||||||
|
decision_book_budget_master_uu_id,
|
||||||
|
process_date,
|
||||||
|
),
|
||||||
|
{"comment": "budget Book Information"},
|
||||||
|
)
|
||||||
|
|
@ -0,0 +1,861 @@
|
||||||
|
import typing
|
||||||
|
from operator import or_
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
from platform import system
|
||||||
|
from typing import List
|
||||||
|
|
||||||
|
from fastapi import HTTPException, status
|
||||||
|
|
||||||
|
from sqlalchemy.orm import mapped_column, relationship, Mapped
|
||||||
|
from sqlalchemy import (
|
||||||
|
String,
|
||||||
|
Integer,
|
||||||
|
ForeignKey,
|
||||||
|
Index,
|
||||||
|
SmallInteger,
|
||||||
|
Boolean,
|
||||||
|
TIMESTAMP,
|
||||||
|
Text,
|
||||||
|
Numeric,
|
||||||
|
)
|
||||||
|
|
||||||
|
from api_library.date_time_actions.date_functions import system_arrow
|
||||||
|
from databases.sql_models.core_mixin import CrudCollection
|
||||||
|
|
||||||
|
from databases.extensions.selector_classes import SelectActionWithEmployee
|
||||||
|
from api_validations.validations_request import (
|
||||||
|
InsertBuildParts,
|
||||||
|
InsertBuild,
|
||||||
|
UpdateBuild,
|
||||||
|
)
|
||||||
|
from api_objects.auth.token_objects import EmployeeTokenObject, OccupantTokenObject
|
||||||
|
|
||||||
|
|
||||||
|
class BuildTypes(CrudCollection):
|
||||||
|
"""
|
||||||
|
BuildTypes class based on declarative_base and BaseMixin via session
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
__tablename__ = "build_types"
|
||||||
|
__exclude__fields__ = []
|
||||||
|
__include__fields__ = []
|
||||||
|
|
||||||
|
function_code: Mapped[str] = mapped_column(
|
||||||
|
String(12), server_default="", nullable=False, comment="Function Code"
|
||||||
|
)
|
||||||
|
type_code: Mapped[str] = mapped_column(
|
||||||
|
String(12), server_default="", nullable=False, comment="Structure Type Code"
|
||||||
|
)
|
||||||
|
lang: Mapped[str] = mapped_column(
|
||||||
|
String(4), server_default="TR", nullable=False, comment="Language"
|
||||||
|
)
|
||||||
|
type_name: Mapped[str] = mapped_column(
|
||||||
|
String(48), server_default="", nullable=False, comment="Type Name"
|
||||||
|
)
|
||||||
|
|
||||||
|
__table_args__ = (
|
||||||
|
Index("_build_types_ndx_00", type_code, function_code, lang, unique=True),
|
||||||
|
{"comment": "Function group of building types with their language information"},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class Part2Employee(CrudCollection):
|
||||||
|
"""
|
||||||
|
Employee2Parts class based on declarative_base and BaseMixin via session
|
||||||
|
In between start and end date, a part can be assigned to only one employee
|
||||||
|
"""
|
||||||
|
|
||||||
|
__tablename__ = "part2employee"
|
||||||
|
__exclude__fields__ = []
|
||||||
|
__include__fields__ = []
|
||||||
|
|
||||||
|
build_id: Mapped[int] = mapped_column(Integer, comment="Building ID")
|
||||||
|
part_id: Mapped[int] = mapped_column(
|
||||||
|
ForeignKey("build_parts.id"), nullable=False, comment="Part ID"
|
||||||
|
)
|
||||||
|
employee_id: Mapped[int] = mapped_column(
|
||||||
|
ForeignKey("employees.id"), nullable=False, comment="Employee ID"
|
||||||
|
)
|
||||||
|
|
||||||
|
__table_args__ = (
|
||||||
|
Index("_part2employee_ndx_00", employee_id, part_id, unique=True),
|
||||||
|
{"comment": "Employee2Parts Information"},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class RelationshipEmployee2Build(CrudCollection):
|
||||||
|
"""
|
||||||
|
CompanyRelationship class based on declarative_base and CrudCollection via session
|
||||||
|
Company -> Sub Company -> Sub-Sub Company
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
__tablename__ = "relationship_employee2build"
|
||||||
|
__exclude__fields__ = []
|
||||||
|
|
||||||
|
company_id: Mapped[int] = mapped_column(
|
||||||
|
ForeignKey("companies.id"), nullable=False
|
||||||
|
) # 1, 2, 3
|
||||||
|
employee_id: Mapped[int] = mapped_column(
|
||||||
|
ForeignKey("employees.id"), nullable=False
|
||||||
|
) # employee -> (n)person Evyos LTD
|
||||||
|
member_id: Mapped[int] = mapped_column(
|
||||||
|
ForeignKey("build.id"), nullable=False
|
||||||
|
) # 2, 3, 4
|
||||||
|
|
||||||
|
relationship_type: Mapped[str] = mapped_column(
|
||||||
|
String, nullable=True, server_default="Employee"
|
||||||
|
) # Commercial
|
||||||
|
show_only: Mapped[bool] = mapped_column(Boolean, server_default="False")
|
||||||
|
|
||||||
|
__table_args__ = (
|
||||||
|
Index(
|
||||||
|
"relationship_build_employee_ndx_00",
|
||||||
|
company_id,
|
||||||
|
employee_id,
|
||||||
|
member_id,
|
||||||
|
relationship_type,
|
||||||
|
unique=True,
|
||||||
|
),
|
||||||
|
{"comment": "Build & Employee Relationship Information"},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class Build(CrudCollection, SelectActionWithEmployee):
|
||||||
|
"""
|
||||||
|
Builds class based on declarative_base and BaseMixin via session
|
||||||
|
"""
|
||||||
|
|
||||||
|
__tablename__ = "build"
|
||||||
|
__exclude__fields__ = []
|
||||||
|
__include__fields__ = []
|
||||||
|
__access_by__ = []
|
||||||
|
__many__table__ = RelationshipEmployee2Build
|
||||||
|
# __explain__ = AbstractBuild()
|
||||||
|
|
||||||
|
gov_address_code: Mapped[str] = mapped_column(
|
||||||
|
String, server_default="", unique=True
|
||||||
|
)
|
||||||
|
build_name: Mapped[str] = mapped_column(
|
||||||
|
String, nullable=False, comment="Building Name"
|
||||||
|
)
|
||||||
|
build_no: Mapped[str] = mapped_column(
|
||||||
|
String(8), nullable=False, comment="Building Number"
|
||||||
|
)
|
||||||
|
|
||||||
|
max_floor: Mapped[int] = mapped_column(
|
||||||
|
SmallInteger, server_default="1", nullable=False, comment="Max Floor"
|
||||||
|
)
|
||||||
|
underground_floor: Mapped[int] = mapped_column(
|
||||||
|
SmallInteger, server_default="0", nullable=False, comment="Underground Floor"
|
||||||
|
)
|
||||||
|
build_date: Mapped[TIMESTAMP] = mapped_column(
|
||||||
|
TIMESTAMP(timezone=True), server_default="1900-01-01"
|
||||||
|
)
|
||||||
|
decision_period_date: Mapped[TIMESTAMP] = mapped_column(
|
||||||
|
TIMESTAMP(timezone=True),
|
||||||
|
server_default="1900-01-01",
|
||||||
|
comment="Building annual ordinary meeting period",
|
||||||
|
)
|
||||||
|
tax_no: Mapped[str] = mapped_column(String(24), server_default="")
|
||||||
|
lift_count: Mapped[int] = mapped_column(SmallInteger, server_default="0")
|
||||||
|
heating_system: Mapped[bool] = mapped_column(Boolean, server_default="True")
|
||||||
|
cooling_system: Mapped[bool] = mapped_column(Boolean, server_default="False")
|
||||||
|
hot_water_system: Mapped[bool] = mapped_column(Boolean, server_default="False")
|
||||||
|
block_service_man_count: Mapped[int] = mapped_column(
|
||||||
|
SmallInteger, server_default="0"
|
||||||
|
)
|
||||||
|
security_service_man_count: Mapped[int] = mapped_column(
|
||||||
|
SmallInteger, server_default="0"
|
||||||
|
)
|
||||||
|
garage_count: Mapped[int] = mapped_column(
|
||||||
|
SmallInteger, server_default="0", comment="Garage Count"
|
||||||
|
)
|
||||||
|
management_room_id: Mapped[int] = mapped_column(
|
||||||
|
Integer, nullable=True, comment="Management Room ID"
|
||||||
|
)
|
||||||
|
|
||||||
|
site_id: Mapped[int] = mapped_column(ForeignKey("build_sites.id"), nullable=True)
|
||||||
|
site_uu_id: Mapped[str] = mapped_column(String, comment="Site UUID", nullable=True)
|
||||||
|
address_id: Mapped[int] = mapped_column(ForeignKey("addresses.id"), nullable=False)
|
||||||
|
address_uu_id: Mapped[str] = mapped_column(
|
||||||
|
String, comment="Address UUID", nullable=False
|
||||||
|
)
|
||||||
|
build_types_id: Mapped[int] = mapped_column(
|
||||||
|
ForeignKey("build_types.id"), nullable=False, comment="Building Type"
|
||||||
|
)
|
||||||
|
build_types_uu_id: Mapped[str] = mapped_column(String, comment="Building Type UUID")
|
||||||
|
|
||||||
|
parts: Mapped[List["BuildParts"]] = relationship(
|
||||||
|
"BuildParts", back_populates="buildings", foreign_keys="BuildParts.build_id"
|
||||||
|
)
|
||||||
|
decision_books: Mapped[List["BuildDecisionBook"]] = relationship(
|
||||||
|
"BuildDecisionBook",
|
||||||
|
back_populates="buildings",
|
||||||
|
foreign_keys="BuildDecisionBook.build_id",
|
||||||
|
)
|
||||||
|
|
||||||
|
# build_ibans: Mapped["BuildIbans"] = relationship(
|
||||||
|
# "BuildIbans", back_populates="building", foreign_keys="BuildIbans.build_id"
|
||||||
|
# )
|
||||||
|
# areas: Mapped["BuildArea"] = relationship(
|
||||||
|
# "BuildArea", back_populates="buildings", foreign_keys="BuildArea.build_id"
|
||||||
|
# )
|
||||||
|
# response_companies: Mapped["Companies"] = relationship(
|
||||||
|
# "Companies",
|
||||||
|
# back_populates="response_buildings",
|
||||||
|
# foreign_keys=[response_company_id],
|
||||||
|
# )
|
||||||
|
# addresses: Mapped[List["Address"]] = relationship(
|
||||||
|
# "Address", back_populates="buildings", foreign_keys=[address_id]
|
||||||
|
# )
|
||||||
|
# peoples: Mapped["People"] = relationship(
|
||||||
|
# "People", back_populates="buildings", foreign_keys=[people_id]
|
||||||
|
# )
|
||||||
|
# sites: Mapped["BuildSites"] = relationship(
|
||||||
|
# "BuildSites", back_populates="buildings", foreign_keys=[site_id]
|
||||||
|
# )
|
||||||
|
|
||||||
|
__table_args__ = (
|
||||||
|
Index("_builds_ndx_00", gov_address_code),
|
||||||
|
Index("_builds_ndx_01", build_name, build_no),
|
||||||
|
{
|
||||||
|
"comment": "Build objects are building that are created for living and store purposes"
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def management_room(self):
|
||||||
|
if management_room := BuildParts.filter_by_one(
|
||||||
|
system=True, id=self.management_room_id, build_id=self.id
|
||||||
|
).data:
|
||||||
|
return management_room
|
||||||
|
return None
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def create_action(cls, data: InsertBuild, token):
|
||||||
|
from databases import Addresses
|
||||||
|
|
||||||
|
data_dict = data.excluded_dump()
|
||||||
|
data_dict["address_id"] = None
|
||||||
|
if data.address_uu_id:
|
||||||
|
official_address = Addresses.filter_one(
|
||||||
|
Addresses.uu_id == data.address_uu_id,
|
||||||
|
).data
|
||||||
|
data_dict["address_id"] = official_address.id
|
||||||
|
data_dict["build_no"] = str(official_address.build_number)
|
||||||
|
if not data_dict["address_id"]:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_404_NOT_FOUND,
|
||||||
|
detail="Address is not found in database. Re-enter address record then try again.",
|
||||||
|
)
|
||||||
|
build_type = BuildTypes.filter_by_one(
|
||||||
|
system=True, uu_id=str(data.build_types_uu_id)
|
||||||
|
).data
|
||||||
|
data_dict["build_types_id"] = build_type.id
|
||||||
|
build_created = cls.find_or_create(**data_dict)
|
||||||
|
created_build_relation = cls.__many__table__.find_or_create(
|
||||||
|
company_id=token.selected_company.company_id,
|
||||||
|
employee_id=token.selected_company.employee_id,
|
||||||
|
member_id=build_created.id,
|
||||||
|
)
|
||||||
|
build_created.save()
|
||||||
|
build_created.update(is_confirmed=True)
|
||||||
|
build_created.save()
|
||||||
|
created_build_relation.update(is_confirmed=True)
|
||||||
|
created_build_relation.save()
|
||||||
|
return build_created
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def update_action(cls, data: UpdateBuild, build_uu_id: str, token):
|
||||||
|
from databases import Addresses
|
||||||
|
|
||||||
|
print("data_dict", data.dump())
|
||||||
|
data_dict = data.excluded_dump()
|
||||||
|
if data.address_uu_id:
|
||||||
|
official_address = Addresses.filter_one(
|
||||||
|
Addresses.uu_id == data.address_uu_id
|
||||||
|
).data
|
||||||
|
data_dict["address_id"] = official_address.id if official_address else None
|
||||||
|
print("data_dict", data_dict)
|
||||||
|
if build_to_update := cls.filter_one(cls.uu_id == build_uu_id).data:
|
||||||
|
print("build_to_update", build_to_update.get_dict())
|
||||||
|
updated_build = build_to_update.update(**data_dict)
|
||||||
|
updated_build.save()
|
||||||
|
print("updated_build", updated_build.get_dict())
|
||||||
|
return updated_build
|
||||||
|
|
||||||
|
@property
|
||||||
|
def top_flat(self):
|
||||||
|
max_flat_no = 0
|
||||||
|
for part in self.parts:
|
||||||
|
if part.part_no > self.max_flat_no:
|
||||||
|
max_flat_no = part.part_no
|
||||||
|
return max_flat_no
|
||||||
|
|
||||||
|
@property
|
||||||
|
def bottom_flat(self):
|
||||||
|
min_flat_no = 0
|
||||||
|
for part in self.parts:
|
||||||
|
if part.part_no < self.max_flat_no:
|
||||||
|
min_flat_no = part.part_no
|
||||||
|
return min_flat_no
|
||||||
|
|
||||||
|
@property
|
||||||
|
def human_livable_parts(self) -> tuple:
|
||||||
|
parts = list(part for part in self.parts if part.human_livable)
|
||||||
|
return parts, len(parts)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def livable_part_count(self):
|
||||||
|
livable_parts = BuildParts.filter_all(
|
||||||
|
BuildParts.build_id == self.id,
|
||||||
|
BuildParts.human_livable == True,
|
||||||
|
)
|
||||||
|
if not livable_parts.data:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_404_NOT_FOUND,
|
||||||
|
detail="There is no livable part in this building.",
|
||||||
|
)
|
||||||
|
return livable_parts.count
|
||||||
|
|
||||||
|
@property
|
||||||
|
def part_type_count(self):
|
||||||
|
building_types = None
|
||||||
|
for part in self.parts:
|
||||||
|
building_types = {}
|
||||||
|
build_type = BuildTypes.filter_by_one(
|
||||||
|
system=True, id=part.build_part_type_id
|
||||||
|
).data
|
||||||
|
if build_type.type_code in building_types:
|
||||||
|
building_types[build_type.type_code]["list"].append(part.part_no)
|
||||||
|
else:
|
||||||
|
building_types[build_type.type_code] = {"list": [part.part_no]}
|
||||||
|
|
||||||
|
# for key, val in building_types.items():
|
||||||
|
# list_parts = val["list"]
|
||||||
|
# building_types[key] = {
|
||||||
|
# "list": list_parts,
|
||||||
|
# "min": min(list_parts),
|
||||||
|
# "max": max(list_parts),
|
||||||
|
# "count": len(list_parts),
|
||||||
|
# }
|
||||||
|
return building_types
|
||||||
|
|
||||||
|
|
||||||
|
class BuildParts(CrudCollection):
|
||||||
|
"""
|
||||||
|
BuildParts class based on declarative_base and BaseMixin via session
|
||||||
|
Attentions: Part_no is unique for each building and Every building must have a management section.!!! default no 0
|
||||||
|
"""
|
||||||
|
|
||||||
|
__tablename__ = "build_parts"
|
||||||
|
__exclude__fields__ = []
|
||||||
|
__include__fields__ = []
|
||||||
|
__enum_list__ = [("part_direction", "Directions", "NN")]
|
||||||
|
|
||||||
|
# https://adres.nvi.gov.tr/VatandasIslemleri/AdresSorgu
|
||||||
|
address_gov_code: Mapped[str] = mapped_column(
|
||||||
|
String, nullable=False, comment="Goverment Door Code"
|
||||||
|
)
|
||||||
|
# part_name: Mapped[str] = mapped_column(String(24), server_default="", nullable=False, comment="Part Name")
|
||||||
|
part_no: Mapped[int] = mapped_column(
|
||||||
|
SmallInteger, server_default="0", nullable=False, comment="Part Number"
|
||||||
|
)
|
||||||
|
part_level: Mapped[int] = mapped_column(
|
||||||
|
SmallInteger, server_default="0", comment="Building Part Level"
|
||||||
|
)
|
||||||
|
part_code: Mapped[str] = mapped_column(
|
||||||
|
String, server_default="", nullable=False, comment="Part Code"
|
||||||
|
)
|
||||||
|
part_gross_size: Mapped[int] = mapped_column(
|
||||||
|
Integer, server_default="0", comment="Part Gross Size"
|
||||||
|
)
|
||||||
|
part_net_size: Mapped[int] = mapped_column(
|
||||||
|
Integer, server_default="0", comment="Part Net Size"
|
||||||
|
)
|
||||||
|
default_accessory: Mapped[str] = mapped_column(
|
||||||
|
Text, server_default="0", comment="Default Accessory"
|
||||||
|
)
|
||||||
|
human_livable: Mapped[bool] = mapped_column(
|
||||||
|
Boolean, server_default="1", comment="Human Livable"
|
||||||
|
)
|
||||||
|
due_part_key: Mapped[str] = mapped_column(
|
||||||
|
String, server_default="", nullable=False, comment="Constant Payment Group"
|
||||||
|
)
|
||||||
|
|
||||||
|
build_id: Mapped[int] = mapped_column(
|
||||||
|
ForeignKey("build.id"), nullable=False, comment="Building ID"
|
||||||
|
)
|
||||||
|
build_uu_id: Mapped[str] = mapped_column(
|
||||||
|
String, nullable=False, comment="Building UUID"
|
||||||
|
)
|
||||||
|
part_direction_id: Mapped[int] = mapped_column(
|
||||||
|
ForeignKey("api_enum_dropdown.id"), nullable=True
|
||||||
|
)
|
||||||
|
part_direction_uu_id: Mapped[str] = mapped_column(
|
||||||
|
String, nullable=True, comment="Part Direction UUID"
|
||||||
|
)
|
||||||
|
part_type_id: Mapped[int] = mapped_column(
|
||||||
|
ForeignKey("build_types.id"), nullable=False, comment="Building Part Type"
|
||||||
|
)
|
||||||
|
part_type_uu_id: Mapped[str] = mapped_column(
|
||||||
|
String, nullable=False, comment="Building Part Type UUID"
|
||||||
|
)
|
||||||
|
|
||||||
|
buildings: Mapped["Build"] = relationship(
|
||||||
|
"Build", back_populates="parts", foreign_keys=[build_id]
|
||||||
|
)
|
||||||
|
|
||||||
|
__table_args__ = (
|
||||||
|
Index("build_parts_ndx_01", build_id, part_no, unique=True),
|
||||||
|
{"comment": "Part objects that are belong to building objects"},
|
||||||
|
)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def create_action(cls, data: InsertBuildParts, token):
|
||||||
|
from databases import ApiEnumDropdown
|
||||||
|
|
||||||
|
data_dict = data.dump()
|
||||||
|
build_from_duty = Build.select_action(
|
||||||
|
employee_id=token.selected_company.employee_id,
|
||||||
|
filter_expr=[Build.uu_id == data.build_uu_id],
|
||||||
|
)
|
||||||
|
building = build_from_duty.first()
|
||||||
|
if not building:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_406_NOT_ACCEPTABLE,
|
||||||
|
detail="This Employee can not reach this building or building uu-id not found in database. "
|
||||||
|
"Check with your supervisor.",
|
||||||
|
)
|
||||||
|
|
||||||
|
if build_types := BuildTypes.filter_one(
|
||||||
|
BuildTypes.uu_id == data.build_part_type_uu_id,
|
||||||
|
).data:
|
||||||
|
part_direction = ApiEnumDropdown.get_by_uuid(
|
||||||
|
uuid=str(data.part_direction_uu_id)
|
||||||
|
)
|
||||||
|
|
||||||
|
data_dict["part_gross_size"] = data.part_gross_size
|
||||||
|
data_dict["part_net_size"] = data.part_net_size
|
||||||
|
data_dict["part_type_id"] = build_types.id
|
||||||
|
data_dict["part_level"] = data.part_level
|
||||||
|
data_dict["build_id"] = building.id
|
||||||
|
data_dict["part_no"] = data.part_no
|
||||||
|
data_dict["part_code"] = (
|
||||||
|
f"{build_types.type_code}:{str(data_dict['part_no']).zfill(2)}"
|
||||||
|
)
|
||||||
|
data_dict["address_gov_code"] = data.address_gov_code
|
||||||
|
data_dict["default_accessory"] = data.default_accessory
|
||||||
|
data_dict["human_livable"] = bool(data.human_livable)
|
||||||
|
|
||||||
|
data_dict["build_uu_id"] = str(data.build_uu_id)
|
||||||
|
data_dict["part_type_id"] = build_types.id
|
||||||
|
data_dict["part_type_uu_id"] = str(build_types.uu_id)
|
||||||
|
data_dict["part_direction_id"] = part_direction.id
|
||||||
|
data_dict["part_direction_uu_id"] = str(part_direction.uu_id)
|
||||||
|
# data_dict["part_direction"] = str(data.part_direction_uu_id)
|
||||||
|
|
||||||
|
if not data_dict["part_gross_size"]:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_406_NOT_ACCEPTABLE,
|
||||||
|
detail="Part Gross Size can not be empty.",
|
||||||
|
)
|
||||||
|
|
||||||
|
if not data_dict["part_net_size"]:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_406_NOT_ACCEPTABLE,
|
||||||
|
detail="Part Net Size can not be empty.",
|
||||||
|
)
|
||||||
|
pt = int(data_dict["part_net_size"])
|
||||||
|
data_dict["due_part_key"] = str(pt + (5 - (pt % 5))) + "M2"
|
||||||
|
del data_dict["build_part_type_uu_id"]
|
||||||
|
return cls.find_or_create(**data_dict)
|
||||||
|
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_418_IM_A_TEAPOT,
|
||||||
|
detail="Build Part can not be created.",
|
||||||
|
)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def part_name(self):
|
||||||
|
if build_type := BuildTypes.filter_by_one(
|
||||||
|
system=True, id=self.part_type_id
|
||||||
|
).data:
|
||||||
|
return f"{str(build_type.type_name).upper()} : {str(self.part_no).upper()}"
|
||||||
|
return f"Undefined:{str(build_type.type_name).upper()}"
|
||||||
|
|
||||||
|
|
||||||
|
class BuildLivingSpace(CrudCollection):
|
||||||
|
"""
|
||||||
|
LivingSpace class based on declarative_base and BaseMixin via session
|
||||||
|
Owner or live person = Occupant of the build part
|
||||||
|
+ Query OR(owner_person_id == person_id, life_person_id == person_id) AND (now(date))
|
||||||
|
"""
|
||||||
|
|
||||||
|
__tablename__ = "build_living_space"
|
||||||
|
__exclude__fields__ = []
|
||||||
|
__include__fields__ = []
|
||||||
|
|
||||||
|
fix_value: Mapped[float] = mapped_column(
|
||||||
|
Numeric(20, 6),
|
||||||
|
server_default="0",
|
||||||
|
comment="Fixed value is deducted from debit.",
|
||||||
|
)
|
||||||
|
fix_percent: Mapped[float] = mapped_column(
|
||||||
|
Numeric(6, 2),
|
||||||
|
server_default="0",
|
||||||
|
comment="Fixed percent is deducted from debit.",
|
||||||
|
)
|
||||||
|
|
||||||
|
agreement_no: Mapped[str] = mapped_column(
|
||||||
|
String, server_default="", comment="Agreement No"
|
||||||
|
)
|
||||||
|
marketing_process: Mapped[bool] = mapped_column(Boolean, server_default="False")
|
||||||
|
marketing_layer: Mapped[int] = mapped_column(SmallInteger, server_default="0")
|
||||||
|
|
||||||
|
build_parts_id: Mapped[int] = mapped_column(
|
||||||
|
ForeignKey("build_parts.id"),
|
||||||
|
nullable=False,
|
||||||
|
index=True,
|
||||||
|
comment="Build Part ID",
|
||||||
|
)
|
||||||
|
build_parts_uu_id: Mapped[str] = mapped_column(
|
||||||
|
String, nullable=False, comment="Build Part UUID"
|
||||||
|
)
|
||||||
|
person_id: Mapped[int] = mapped_column(
|
||||||
|
ForeignKey("people.id"),
|
||||||
|
nullable=False,
|
||||||
|
index=True,
|
||||||
|
comment="Responsible People ID",
|
||||||
|
)
|
||||||
|
person_uu_id: Mapped[str] = mapped_column(
|
||||||
|
String, nullable=False, comment="Responsible People UUID"
|
||||||
|
)
|
||||||
|
occupant_type: Mapped[int] = mapped_column(
|
||||||
|
ForeignKey("occupant_types.id"),
|
||||||
|
nullable=False,
|
||||||
|
comment="Occupant Type",
|
||||||
|
)
|
||||||
|
occupant_type_uu_id: Mapped[str] = mapped_column(
|
||||||
|
String, nullable=False, comment="Occupant Type UUID"
|
||||||
|
)
|
||||||
|
|
||||||
|
__table_args__ = (
|
||||||
|
{"comment": "Living Space inside building parts that are related to people"},
|
||||||
|
)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def create_action(
|
||||||
|
cls,
|
||||||
|
data: dict,
|
||||||
|
token_dict: typing.Union[EmployeeTokenObject, OccupantTokenObject],
|
||||||
|
):
|
||||||
|
from databases import Services, OccupantTypes
|
||||||
|
from api_events.events.events.events_bind_modules import (
|
||||||
|
ModulesBindOccupantEventMethods,
|
||||||
|
)
|
||||||
|
|
||||||
|
if data.get("expiry_starts"):
|
||||||
|
data["expiry_starts"] = str(system_arrow.get(data["expiry_starts"]))
|
||||||
|
if data.get("expiry_ends"):
|
||||||
|
data["expiry_ends"] = str(system_arrow.get(data["expiry_ends"]))
|
||||||
|
created_living_space = BuildLivingSpace.find_or_create(**data)
|
||||||
|
occupant_type = OccupantTypes.filter_by_one(
|
||||||
|
system=True, uu_id=created_living_space.occupant_type_uu_id
|
||||||
|
).data
|
||||||
|
related_service = Services.filter_by_one(
|
||||||
|
related_responsibility=occupant_type.occupant_code,
|
||||||
|
).data
|
||||||
|
if not related_service:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_418_IM_A_TEAPOT,
|
||||||
|
detail="Service is not found in database. Re-enter service record then try again.",
|
||||||
|
)
|
||||||
|
ModulesBindOccupantEventMethods.bind_default_module_for_first_init_occupant(
|
||||||
|
build_living_space_id=created_living_space.id,
|
||||||
|
)
|
||||||
|
created_living_space.save_and_confirm()
|
||||||
|
return created_living_space
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def find_living_from_customer_id(
|
||||||
|
cls, customer_id, process_date, add_days: int = 32
|
||||||
|
):
|
||||||
|
from api_library.date_time_actions.date_functions import system_arrow
|
||||||
|
|
||||||
|
formatted_date = system_arrow.get(str(process_date))
|
||||||
|
living_spaces = cls.filter_all(
|
||||||
|
or_(
|
||||||
|
cls.owner_person_id == customer_id,
|
||||||
|
cls.life_person_id == customer_id,
|
||||||
|
),
|
||||||
|
cls.start_date < formatted_date - timedelta(days=add_days),
|
||||||
|
cls.stop_date > formatted_date + timedelta(days=add_days),
|
||||||
|
)
|
||||||
|
return living_spaces.data, living_spaces.count
|
||||||
|
|
||||||
|
|
||||||
|
class BuildManagement(CrudCollection):
|
||||||
|
|
||||||
|
__tablename__ = "build_management"
|
||||||
|
__exclude__fields__ = []
|
||||||
|
|
||||||
|
discounted_percentage: Mapped[float] = mapped_column(
|
||||||
|
Numeric(6, 2), server_default="0.00"
|
||||||
|
) # %22
|
||||||
|
discounted_price: Mapped[float] = mapped_column(
|
||||||
|
Numeric(20, 2), server_default="0.00"
|
||||||
|
) # Normal: 78.00 TL
|
||||||
|
calculated_price: Mapped[float] = mapped_column(
|
||||||
|
Numeric(20, 2), server_default="0.00"
|
||||||
|
) # sana düz 75.00 TL yapar
|
||||||
|
|
||||||
|
occupant_type: Mapped[int] = mapped_column(
|
||||||
|
ForeignKey("occupant_types.id"),
|
||||||
|
nullable=False,
|
||||||
|
comment="Occupant Type",
|
||||||
|
)
|
||||||
|
occupant_type_uu_id: Mapped[str] = mapped_column(
|
||||||
|
String, nullable=False, comment="Occupant Type UUID"
|
||||||
|
)
|
||||||
|
build_id: Mapped[int] = mapped_column(
|
||||||
|
ForeignKey("build.id"), nullable=False, comment="Building ID"
|
||||||
|
)
|
||||||
|
build_uu_id: Mapped[str] = mapped_column(
|
||||||
|
String, nullable=False, comment="Building UUID"
|
||||||
|
)
|
||||||
|
build_parts_id: Mapped[int] = mapped_column(
|
||||||
|
ForeignKey("build_parts.id"),
|
||||||
|
nullable=False,
|
||||||
|
index=True,
|
||||||
|
comment="Build Part ID",
|
||||||
|
)
|
||||||
|
build_parts_uu_id: Mapped[str] = mapped_column(
|
||||||
|
String, nullable=False, comment="Build Part UUID"
|
||||||
|
)
|
||||||
|
|
||||||
|
__table_args__ = (
|
||||||
|
Index(
|
||||||
|
"build_management_ndx_00",
|
||||||
|
build_parts_id,
|
||||||
|
occupant_type,
|
||||||
|
"expiry_starts",
|
||||||
|
unique=True,
|
||||||
|
),
|
||||||
|
{"comment": "Management of the building parts that are related to people"},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class BuildArea(CrudCollection):
|
||||||
|
"""
|
||||||
|
Builds class based on declarative_base and BaseMixin via session
|
||||||
|
"""
|
||||||
|
|
||||||
|
__tablename__ = "build_area"
|
||||||
|
__exclude__fields__ = []
|
||||||
|
|
||||||
|
area_name: Mapped[str] = mapped_column(String, server_default="")
|
||||||
|
area_code: Mapped[str] = mapped_column(String, server_default="")
|
||||||
|
area_type: Mapped[str] = mapped_column(String, server_default="GREEN")
|
||||||
|
area_direction: Mapped[str] = mapped_column(String(2), server_default="NN")
|
||||||
|
area_gross_size: Mapped[float] = mapped_column(Numeric(20, 6), server_default="0")
|
||||||
|
area_net_size: Mapped[float] = mapped_column(Numeric(20, 6), server_default="0")
|
||||||
|
width = mapped_column(Integer, server_default="0")
|
||||||
|
size = mapped_column(Integer, server_default="0")
|
||||||
|
|
||||||
|
build_id: Mapped[int] = mapped_column(ForeignKey("build.id"))
|
||||||
|
build_uu_id: Mapped[str] = mapped_column(String, comment="Building UUID")
|
||||||
|
part_type_id: Mapped[int] = mapped_column(
|
||||||
|
ForeignKey("build_types.id"), nullable=True, comment="Building Part Type"
|
||||||
|
)
|
||||||
|
part_type_uu_id: Mapped[str] = mapped_column(
|
||||||
|
String, nullable=True, comment="Building Part Type UUID"
|
||||||
|
)
|
||||||
|
|
||||||
|
# buildings: Mapped["Build"] = relationship(
|
||||||
|
# "Build", back_populates="areas", foreign_keys=[build_id]
|
||||||
|
# )
|
||||||
|
|
||||||
|
_table_args_ = (
|
||||||
|
Index("_edm_build_parts_area_ndx_00", build_id, area_code, unique=True),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class BuildSites(CrudCollection):
|
||||||
|
"""
|
||||||
|
Builds class based on declarative_base and BaseMixin via session
|
||||||
|
"""
|
||||||
|
|
||||||
|
__tablename__ = "build_sites"
|
||||||
|
__exclude__fields__ = []
|
||||||
|
__include__fields__ = []
|
||||||
|
|
||||||
|
site_name: Mapped[str] = mapped_column(String(24), nullable=False)
|
||||||
|
site_no: Mapped[str] = mapped_column(String(8), nullable=False)
|
||||||
|
|
||||||
|
address_id: Mapped[int] = mapped_column(ForeignKey("addresses.id"))
|
||||||
|
address_uu_id: Mapped[str] = mapped_column(String, comment="Address UUID")
|
||||||
|
|
||||||
|
# addresses: Mapped["Address"] = relationship(
|
||||||
|
# "Address", back_populates="site", foreign_keys=[address_id]
|
||||||
|
# )
|
||||||
|
# buildings: Mapped["Build"] = relationship(
|
||||||
|
# "Build", back_populates="sites", foreign_keys="Build.site_id"
|
||||||
|
# )
|
||||||
|
|
||||||
|
__table_args__ = (
|
||||||
|
Index("_sites_ndx_01", site_no, site_name),
|
||||||
|
{"comment": "Sites that groups building objets"},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class BuildCompaniesProviding(CrudCollection):
|
||||||
|
""" """
|
||||||
|
|
||||||
|
__tablename__ = "build_companies_providing"
|
||||||
|
__exclude__fields__ = []
|
||||||
|
__include__fields__ = []
|
||||||
|
|
||||||
|
build_id = mapped_column(
|
||||||
|
ForeignKey("build.id"), nullable=False, comment="Building ID"
|
||||||
|
)
|
||||||
|
build_uu_id: Mapped[str] = mapped_column(
|
||||||
|
String, nullable=True, comment="Providing UUID"
|
||||||
|
)
|
||||||
|
company_id: Mapped[int] = mapped_column(ForeignKey("companies.id"))
|
||||||
|
company_uu_id: Mapped[str] = mapped_column(
|
||||||
|
String, nullable=True, comment="Providing UUID"
|
||||||
|
)
|
||||||
|
provide_id: Mapped[int] = mapped_column(
|
||||||
|
ForeignKey("api_enum_dropdown.id"), nullable=True
|
||||||
|
)
|
||||||
|
provide_uu_id: Mapped[str] = mapped_column(
|
||||||
|
String, nullable=True, comment="Providing UUID"
|
||||||
|
)
|
||||||
|
contract_id: Mapped[int] = mapped_column(
|
||||||
|
Integer, ForeignKey("companies.id"), nullable=True
|
||||||
|
)
|
||||||
|
|
||||||
|
__table_args__ = (
|
||||||
|
Index(
|
||||||
|
"_build_companies_providing_ndx_00",
|
||||||
|
build_id,
|
||||||
|
company_id,
|
||||||
|
provide_id,
|
||||||
|
unique=True,
|
||||||
|
),
|
||||||
|
{"comment": "Companies providing services for building"},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class BuildPersonProviding(CrudCollection):
|
||||||
|
""" """
|
||||||
|
|
||||||
|
__tablename__ = "build_person_providing"
|
||||||
|
__exclude__fields__ = []
|
||||||
|
__include__fields__ = []
|
||||||
|
|
||||||
|
build_id = mapped_column(
|
||||||
|
ForeignKey("build.id"), nullable=False, comment="Building ID"
|
||||||
|
)
|
||||||
|
build_uu_id: Mapped[str] = mapped_column(
|
||||||
|
String, nullable=True, comment="Providing UUID"
|
||||||
|
)
|
||||||
|
people_id: Mapped[int] = mapped_column(ForeignKey("people.id"))
|
||||||
|
people_uu_id: Mapped[str] = mapped_column(
|
||||||
|
String, nullable=True, comment="People UUID"
|
||||||
|
)
|
||||||
|
provide_id: Mapped[int] = mapped_column(
|
||||||
|
ForeignKey("api_enum_dropdown.id"), nullable=True
|
||||||
|
)
|
||||||
|
provide_uu_id: Mapped[str] = mapped_column(
|
||||||
|
String, nullable=True, comment="Providing UUID"
|
||||||
|
)
|
||||||
|
contract_id: Mapped[int] = mapped_column(
|
||||||
|
Integer, ForeignKey("companies.id"), nullable=True
|
||||||
|
)
|
||||||
|
|
||||||
|
__table_args__ = (
|
||||||
|
Index(
|
||||||
|
"_build_person_providing_ndx_00",
|
||||||
|
build_id,
|
||||||
|
people_id,
|
||||||
|
provide_id,
|
||||||
|
unique=True,
|
||||||
|
),
|
||||||
|
{"comment": "People providing services for building"},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# owner_people: Mapped["People"] = relationship(
|
||||||
|
# "People",
|
||||||
|
# back_populates="owner_buildings",
|
||||||
|
# foreign_keys=[current_owner_person_id],
|
||||||
|
# )
|
||||||
|
# tenant_people: Mapped["People"] = relationship(
|
||||||
|
# "People",
|
||||||
|
# back_populates="tenant_buildings",
|
||||||
|
# foreign_keys=[current_tenant_person_id],
|
||||||
|
# )
|
||||||
|
# decision_book_management: Mapped[List["BuildDecisionBookManagement"]] = (
|
||||||
|
# relationship(
|
||||||
|
# "BuildDecisionBookManagement",
|
||||||
|
# back_populates="buildings",
|
||||||
|
# foreign_keys="BuildDecisionBookManagement.build_parts_id",
|
||||||
|
# )
|
||||||
|
# )
|
||||||
|
# budget_records: Mapped[List["CompanyBudgetRecords"]] = relationship(
|
||||||
|
# "CompanyBudgetRecords",
|
||||||
|
# back_populates="parts",
|
||||||
|
# foreign_keys="CompanyBudgetRecords.build_parts_id",
|
||||||
|
# )
|
||||||
|
# living_spaces: Mapped[List["BuildLivingSpace"]] = relationship(
|
||||||
|
# "BuildLivingSpace",
|
||||||
|
# back_populates="parts",
|
||||||
|
# foreign_keys="BuildLivingSpace.build_parts_id",
|
||||||
|
# )
|
||||||
|
# decision_book_payment_master: Mapped[List["BuildDecisionBookPaymentsMaster"]] = (
|
||||||
|
# relationship(
|
||||||
|
# "BuildDecisionBookPaymentsMaster",
|
||||||
|
# back_populates="parts",
|
||||||
|
# foreign_keys="BuildDecisionBookPaymentsMaster.build_parts_id",
|
||||||
|
# )
|
||||||
|
# )
|
||||||
|
# decision_book_project_payments_master: Mapped[
|
||||||
|
# "BuildDecisionBookProjectPaymentsMaster"
|
||||||
|
# ] = relationship(
|
||||||
|
# "BuildDecisionBookProjectPaymentsMaster",
|
||||||
|
# back_populates="parts",
|
||||||
|
# foreign_keys="BuildDecisionBookProjectPaymentsMaster.build_parts_id",
|
||||||
|
# )
|
||||||
|
# search_iban_description: Mapped["BuildIbanDescription"] = relationship(
|
||||||
|
# "BuildIbanDescription",
|
||||||
|
# back_populates="parts",
|
||||||
|
# foreign_keys="BuildIbanDescription.build_parts_id",
|
||||||
|
# )
|
||||||
|
|
||||||
|
# parts: Mapped[List["BuildParts"]] = relationship(
|
||||||
|
# "BuildParts", back_populates="living_spaces", foreign_keys=[build_parts_id]
|
||||||
|
# )
|
||||||
|
# owner_people: Mapped["People"] = relationship(
|
||||||
|
# "People", back_populates="owner_living_spaces", foreign_keys=[owner_person_id]
|
||||||
|
# )
|
||||||
|
# life_people: Mapped["People"] = relationship(
|
||||||
|
# "People", back_populates="life_living_spaces", foreign_keys=[life_person_id]
|
||||||
|
# )
|
||||||
|
# company_id: Mapped[int] = mapped_column(ForeignKey("companies.id"))
|
||||||
|
# response_company_id: Mapped[int] = mapped_column(ForeignKey("companies.id"))
|
||||||
|
# person_id: Mapped[int] = mapped_column(ForeignKey("people.id"))
|
||||||
|
|
||||||
|
# companies: Mapped["Companies"] = relationship(
|
||||||
|
# "Companies", back_populates="buildings", foreign_keys=[company_id]
|
||||||
|
# )
|
||||||
|
# @classmethod
|
||||||
|
# def select_action(cls, duty_id, token=None):
|
||||||
|
# from database_sql_models import Companies
|
||||||
|
#
|
||||||
|
# related_companies = Companies.select_action(duty_id=duty_id)
|
||||||
|
# companies_ids = [company.id for company in related_companies.all()]
|
||||||
|
# return cls.filter_all(cls.company_id.in_(companies_ids)).query
|
||||||
File diff suppressed because it is too large
Load Diff
|
|
@ -0,0 +1,569 @@
|
||||||
|
from fastapi.exceptions import HTTPException
|
||||||
|
|
||||||
|
from databases.sql_models.core_mixin import CrudCollection
|
||||||
|
|
||||||
|
from sqlalchemy import String, Integer, Boolean, ForeignKey, Index, Identity
|
||||||
|
from sqlalchemy.orm import mapped_column, Mapped
|
||||||
|
|
||||||
|
from api_configs import RelationAccess
|
||||||
|
from databases.extensions import SelectAction
|
||||||
|
from api_validations.validations_request import (
|
||||||
|
InsertCompany,
|
||||||
|
UpdateCompany,
|
||||||
|
MatchCompany2Company,
|
||||||
|
)
|
||||||
|
from api_objects.auth.token_objects import EmployeeTokenObject
|
||||||
|
|
||||||
|
|
||||||
|
class RelationshipDutyCompany(CrudCollection):
|
||||||
|
"""
|
||||||
|
CompanyRelationship class based on declarative_base and CrudCollection via session
|
||||||
|
Company -> Sub Company -> Sub-Sub Company
|
||||||
|
|
||||||
|
if owner_id == parent_id: can manipulate data of any record
|
||||||
|
else: Read-Only
|
||||||
|
duty_id = if relationship_type == base An organization / not operational / no responsible person
|
||||||
|
|
||||||
|
relationship = company_id filter -> Action filter(company_id) relationship_type = Organization
|
||||||
|
relationship = company_id filter -> Action filter(company_id) relationship_type = Commercial
|
||||||
|
"""
|
||||||
|
|
||||||
|
__tablename__ = "relationship_duty_company"
|
||||||
|
__exclude__fields__ = []
|
||||||
|
__access_by__ = RelationAccess.SuperAccessList
|
||||||
|
|
||||||
|
owner_id: Mapped[int] = mapped_column(
|
||||||
|
ForeignKey("companies.id"), nullable=False
|
||||||
|
) # 1
|
||||||
|
duties_id: Mapped[int] = mapped_column(
|
||||||
|
ForeignKey("duties.id"), nullable=False
|
||||||
|
) # duty -> (n)employee Evyos LTD
|
||||||
|
|
||||||
|
member_id: Mapped[int] = mapped_column(
|
||||||
|
ForeignKey("companies.id"), nullable=False
|
||||||
|
) # 2, 3, 4
|
||||||
|
parent_id: Mapped[int] = mapped_column(
|
||||||
|
ForeignKey("companies.id"), nullable=True
|
||||||
|
) # None
|
||||||
|
|
||||||
|
relationship_type: Mapped[str] = mapped_column(
|
||||||
|
String, nullable=True, server_default="Commercial"
|
||||||
|
) # Commercial, Organization # Bulk
|
||||||
|
child_count: Mapped[int] = mapped_column(Integer) # 0
|
||||||
|
show_only: Mapped[bool] = mapped_column(Boolean, server_default="0")
|
||||||
|
|
||||||
|
# related_company: Mapped[List["Companies"]] = relationship(
|
||||||
|
# "Companies",
|
||||||
|
# back_populates="related_companies",
|
||||||
|
# foreign_keys=[related_company_id],
|
||||||
|
# )
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def match_company_to_company_commercial(cls, data: MatchCompany2Company, token):
|
||||||
|
from databases import (
|
||||||
|
Duties,
|
||||||
|
)
|
||||||
|
|
||||||
|
token_duties_id, token_company_id = token.get("duty_id"), token.get(
|
||||||
|
"company_id"
|
||||||
|
)
|
||||||
|
list_match_company_id = []
|
||||||
|
send_duties = Duties.filter_one(
|
||||||
|
Duties.uu_id == data.duty_uu_id,
|
||||||
|
)
|
||||||
|
send_user_duties = Duties.filter_one(
|
||||||
|
Duties.duties_id == send_duties.id,
|
||||||
|
Duties.company_id == token_duties_id,
|
||||||
|
)
|
||||||
|
if not send_user_duties:
|
||||||
|
raise Exception(
|
||||||
|
"Send Duty is not found in company. Please check duty uuid and try again."
|
||||||
|
)
|
||||||
|
|
||||||
|
for company_uu_id in list(data.match_company_uu_id):
|
||||||
|
company = Companies.filter_one(
|
||||||
|
Companies.uu_id == company_uu_id,
|
||||||
|
)
|
||||||
|
bulk_company = RelationshipDutyCompany.filter_one(
|
||||||
|
RelationshipDutyCompany.owner_id == token_company_id,
|
||||||
|
RelationshipDutyCompany.relationship_type == "Bulk",
|
||||||
|
RelationshipDutyCompany.member_id == company.id,
|
||||||
|
)
|
||||||
|
if not bulk_company:
|
||||||
|
raise Exception(
|
||||||
|
f"Bulk Company is not found in company. "
|
||||||
|
f"Please check company uuid {bulk_company.uu_id} and try again."
|
||||||
|
)
|
||||||
|
list_match_company_id.append(bulk_company)
|
||||||
|
|
||||||
|
for match_company_id in list_match_company_id:
|
||||||
|
RelationshipDutyCompany.find_or_create(
|
||||||
|
owner_id=token_company_id,
|
||||||
|
duties_id=send_user_duties.id,
|
||||||
|
member_id=match_company_id.id,
|
||||||
|
parent_id=match_company_id.parent_id,
|
||||||
|
relationship_type="Commercial",
|
||||||
|
show_only=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def match_company_to_company_organization(cls, data: MatchCompany2Company, token):
|
||||||
|
from databases import (
|
||||||
|
Duties,
|
||||||
|
)
|
||||||
|
|
||||||
|
token_duties_id, token_company_id = token.get("duty_id"), token.get(
|
||||||
|
"company_id"
|
||||||
|
)
|
||||||
|
list_match_company_id = []
|
||||||
|
send_duties = Duties.filter_one(
|
||||||
|
Duties.uu_id == data.duty_uu_id,
|
||||||
|
)
|
||||||
|
send_user_duties = Duties.filter_one(
|
||||||
|
Duties.duties_id == send_duties.id,
|
||||||
|
Duties.company_id == token_duties_id,
|
||||||
|
)
|
||||||
|
if not send_user_duties:
|
||||||
|
raise Exception(
|
||||||
|
"Send Duty is not found in company. Please check duty uuid and try again."
|
||||||
|
)
|
||||||
|
|
||||||
|
for company_uu_id in list(data.match_company_uu_id):
|
||||||
|
company = Companies.filter_one(
|
||||||
|
Companies.uu_id == company_uu_id,
|
||||||
|
)
|
||||||
|
bulk_company = RelationshipDutyCompany.filter_one(
|
||||||
|
RelationshipDutyCompany.owner_id == token_company_id,
|
||||||
|
RelationshipDutyCompany.relationship_type == "Bulk",
|
||||||
|
RelationshipDutyCompany.member_id == company.id,
|
||||||
|
)
|
||||||
|
if not bulk_company:
|
||||||
|
raise Exception(
|
||||||
|
f"Bulk Company is not found in company. "
|
||||||
|
f"Please check company uuid {bulk_company.uu_id} and try again."
|
||||||
|
)
|
||||||
|
list_match_company_id.append(bulk_company)
|
||||||
|
|
||||||
|
for match_company_id in list_match_company_id:
|
||||||
|
Duties.init_a_company_default_duties(
|
||||||
|
company_id=match_company_id.id,
|
||||||
|
company_uu_id=str(match_company_id.uu_id),
|
||||||
|
)
|
||||||
|
RelationshipDutyCompany.find_or_create(
|
||||||
|
owner_id=token_company_id,
|
||||||
|
duties_id=send_user_duties.id,
|
||||||
|
member_id=match_company_id.id,
|
||||||
|
parent_id=match_company_id.parent_id,
|
||||||
|
relationship_type="Organization",
|
||||||
|
show_only=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
__table_args__ = (
|
||||||
|
Index(
|
||||||
|
"_company_relationship_ndx_01",
|
||||||
|
duties_id,
|
||||||
|
owner_id,
|
||||||
|
member_id,
|
||||||
|
relationship_type,
|
||||||
|
unique=True,
|
||||||
|
),
|
||||||
|
{"comment": "Company Relationship Information"},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class Companies(CrudCollection, SelectAction):
|
||||||
|
"""
|
||||||
|
Company class based on declarative_base and CrudCollection via session
|
||||||
|
formal_name = Government register name by offical
|
||||||
|
public_name = Public registered name by User
|
||||||
|
nick_name = Search by nickname, commercial_type = Tüzel veya birey
|
||||||
|
"""
|
||||||
|
|
||||||
|
__tablename__ = "companies"
|
||||||
|
|
||||||
|
__exclude__fields__ = ["is_blacklist", "is_commercial"]
|
||||||
|
__access_by__ = []
|
||||||
|
__many__table__ = RelationshipDutyCompany
|
||||||
|
# __explain__ = AbstractCompany()
|
||||||
|
|
||||||
|
formal_name: Mapped[str] = mapped_column(
|
||||||
|
String, nullable=False, comment="Formal Name"
|
||||||
|
)
|
||||||
|
company_type: Mapped[str] = mapped_column(
|
||||||
|
String, nullable=False, comment="Company Type"
|
||||||
|
)
|
||||||
|
commercial_type: Mapped[str] = mapped_column(
|
||||||
|
String, nullable=False, comment="Commercial Type"
|
||||||
|
)
|
||||||
|
tax_no: Mapped[str] = mapped_column(
|
||||||
|
String, index=True, unique=True, nullable=False, comment="Tax No"
|
||||||
|
)
|
||||||
|
|
||||||
|
public_name: Mapped[str] = mapped_column(String, comment="Public Name of a company")
|
||||||
|
company_tag: Mapped[str] = mapped_column(String, comment="Company Tag")
|
||||||
|
default_lang_type: Mapped[str] = mapped_column(String, server_default="TR")
|
||||||
|
default_money_type: Mapped[str] = mapped_column(String, server_default="TL")
|
||||||
|
is_commercial: Mapped[bool] = mapped_column(Boolean, server_default="False")
|
||||||
|
is_blacklist: Mapped[bool] = mapped_column(Boolean, server_default="False")
|
||||||
|
parent_id = mapped_column(Integer, nullable=True)
|
||||||
|
workplace_no: Mapped[str] = mapped_column(String, nullable=True)
|
||||||
|
|
||||||
|
official_address_id: Mapped[int] = mapped_column(
|
||||||
|
ForeignKey("addresses.id"), nullable=True
|
||||||
|
)
|
||||||
|
official_address_uu_id: Mapped[str] = mapped_column(
|
||||||
|
String, nullable=True, comment="Official Address UUID"
|
||||||
|
)
|
||||||
|
top_responsible_company_id: Mapped[int] = mapped_column(
|
||||||
|
ForeignKey("companies.id"), nullable=True
|
||||||
|
)
|
||||||
|
top_responsible_company_uu_id: Mapped[str] = mapped_column(
|
||||||
|
String, nullable=True, comment="Top Responsible Company UUID"
|
||||||
|
)
|
||||||
|
|
||||||
|
# buildings: Mapped[List["Build"]] = relationship(
|
||||||
|
# "Build",
|
||||||
|
# back_populates="companies",
|
||||||
|
# foreign_keys="Build.company_id",
|
||||||
|
# )
|
||||||
|
|
||||||
|
__table_args__ = (
|
||||||
|
Index("_company_ndx_01", tax_no, unique=True),
|
||||||
|
Index("_company_ndx_02", formal_name, public_name),
|
||||||
|
{"comment": "Company Information"},
|
||||||
|
)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def create_action(cls, data: InsertCompany, token: EmployeeTokenObject):
|
||||||
|
from databases import Addresses, Duties
|
||||||
|
|
||||||
|
data_dict = data.model_dump()
|
||||||
|
if cls.filter_one(cls.tax_no == str(data.tax_no).strip(), system=True).data:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=400,
|
||||||
|
detail="Company already exists. Please ask supervisor to make company visible for your duty.",
|
||||||
|
)
|
||||||
|
|
||||||
|
official_address = Addresses.filter_one(
|
||||||
|
Addresses.uu_id == data.official_address_uu_id,
|
||||||
|
).data
|
||||||
|
# if not official_address:
|
||||||
|
# raise HTTPException(
|
||||||
|
# status_code=400,
|
||||||
|
# detail="Official address is not found. Please check address uuid and try again.",
|
||||||
|
# )
|
||||||
|
|
||||||
|
bulk_duties = Duties.get_bulk_duties_of_a_company(
|
||||||
|
company_id=token.selected_company.company_id
|
||||||
|
)
|
||||||
|
|
||||||
|
if official_address:
|
||||||
|
data_dict["official_address_id"] = official_address.id
|
||||||
|
data_dict["official_address_uu_id"] = str(official_address.uu_id)
|
||||||
|
|
||||||
|
data_dict["parent_id"] = token.selected_company.company_id
|
||||||
|
data_dict["top_responsible_company_id"] = token.selected_company.company_id
|
||||||
|
data_dict["top_responsible_company_uu_id"] = (
|
||||||
|
token.selected_company.company_uu_id
|
||||||
|
)
|
||||||
|
company_created = cls.find_or_create(**data_dict)
|
||||||
|
company_created.save_and_confirm()
|
||||||
|
company_relationship_created = RelationshipDutyCompany.find_or_create(
|
||||||
|
owner_id=token.selected_company.company_id,
|
||||||
|
duties_id=bulk_duties.id,
|
||||||
|
member_id=company_created.id,
|
||||||
|
parent_id=company_created.parent_id,
|
||||||
|
child_count=0,
|
||||||
|
relationship_type="Bulk",
|
||||||
|
show_only=False,
|
||||||
|
)
|
||||||
|
company_relationship_created.save_and_confirm()
|
||||||
|
return company_created
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def update_action(cls, data: UpdateCompany, token):
|
||||||
|
from databases import (
|
||||||
|
Addresses,
|
||||||
|
)
|
||||||
|
|
||||||
|
data_dict = data.excluded_dump()
|
||||||
|
duty_id = token.get("duty_id")
|
||||||
|
company_id = token.get("company_id")
|
||||||
|
if data.official_address_uu_id:
|
||||||
|
official_address = Addresses.filter_one(
|
||||||
|
Addresses.uu_id == data.official_address_uu_id,
|
||||||
|
*Addresses.valid_record_args(Addresses),
|
||||||
|
).data
|
||||||
|
data_dict["official_address_id"] = official_address.id
|
||||||
|
del data_dict["official_address_uu_id"], data_dict["company_uu_id"]
|
||||||
|
company_to_update = cls.select_action(
|
||||||
|
duty_id_list=[duty_id],
|
||||||
|
filter_expr=[
|
||||||
|
cls.uu_id == data.company_uu_id,
|
||||||
|
RelationshipDutyCompany.parent_id == company_id,
|
||||||
|
],
|
||||||
|
)
|
||||||
|
return company_to_update.update(**data_dict)
|
||||||
|
|
||||||
|
# parent_id = mapped_column(ForeignKey("companies.id"))
|
||||||
|
# if data.parent_uu_id:
|
||||||
|
# company = Companies.find_one(uu_id=data.parent_uu_id)
|
||||||
|
# data_dict["parent_id"] = company.id
|
||||||
|
# def is_access_valid(self, endpoint_ext: str):
|
||||||
|
# try:
|
||||||
|
# if (
|
||||||
|
# not arrow.get(self.stop_date)
|
||||||
|
# > arrow.utcnow()
|
||||||
|
# > arrow.get(self.start_date)
|
||||||
|
# ):
|
||||||
|
# message = f"Kullanıcı yetkileri süresi dolmuştur. {self.endpoint_name} için supervisor ile görüşünüz."
|
||||||
|
# SystemLogs.create_log(
|
||||||
|
# log_type="ERROR",
|
||||||
|
# log_code="ACCESS_EXPIRED",
|
||||||
|
# log_action=self.__tablename__,
|
||||||
|
# log_message=message,
|
||||||
|
# )
|
||||||
|
# return False
|
||||||
|
# except Exception as e:
|
||||||
|
# SystemLogs.create_log(
|
||||||
|
# log_type="ERROR",
|
||||||
|
# log_code="ACCESS_EXPIRED",
|
||||||
|
# log_action=self.__tablename__,
|
||||||
|
# log_message=e,
|
||||||
|
# )
|
||||||
|
# return False
|
||||||
|
#
|
||||||
|
# access_dict = {
|
||||||
|
# "LIST": self.access_read,
|
||||||
|
# "INSERT": self.access_write,
|
||||||
|
# "UPDATE": self.access_update,
|
||||||
|
# "DELETE": self.access_delete,
|
||||||
|
# "ACTIVE": self.access_update,
|
||||||
|
# "PRINT": self.report_print,
|
||||||
|
# "EXPORT": self.report_export,
|
||||||
|
# }
|
||||||
|
# return access_dict.get(endpoint_ext.upper(), False)
|
||||||
|
|
||||||
|
# official_address: Mapped[List["Address"]] = relationship(
|
||||||
|
# "Address",
|
||||||
|
# back_populates="official_companies",
|
||||||
|
# foreign_keys=[official_address_id],
|
||||||
|
# )
|
||||||
|
#
|
||||||
|
# emails: Mapped[List["UsersEmails"]] = relationship(
|
||||||
|
# "UsersEmails", back_populates="companies", foreign_keys="UsersEmails.company_id"
|
||||||
|
# )
|
||||||
|
# phones: Mapped[List["UsersPhones"]] = relationship(
|
||||||
|
# "UsersPhones", back_populates="company", foreign_keys="UsersPhones.company_id"
|
||||||
|
# )
|
||||||
|
# buildings: Mapped[List["Build"]] = relationship(
|
||||||
|
# "Build",
|
||||||
|
# back_populates="companies",
|
||||||
|
# foreign_keys="Build.company_id",
|
||||||
|
# )
|
||||||
|
# response_buildings: Mapped[List["Build"]] = relationship(
|
||||||
|
# "Build",
|
||||||
|
# back_populates="response_companies",
|
||||||
|
# foreign_keys="Build.response_company_id",
|
||||||
|
# )
|
||||||
|
# departments: Mapped[List["CompanyDepartments"]] = relationship(
|
||||||
|
# "CompanyDepartments",
|
||||||
|
# back_populates="company",
|
||||||
|
# foreign_keys="CompanyDepartments.company_id",
|
||||||
|
# )
|
||||||
|
# budget_records: Mapped[List["CompanyBudgetRecords"]] = relationship(
|
||||||
|
# "CompanyBudgetRecords",
|
||||||
|
# back_populates="companies",
|
||||||
|
# foreign_keys="CompanyBudgetRecords.company_id",
|
||||||
|
# )
|
||||||
|
# send_budget_records: Mapped[List["CompanyBudgetRecords"]] = relationship(
|
||||||
|
# "CompanyBudgetRecords",
|
||||||
|
# back_populates="send_companies",
|
||||||
|
# foreign_keys="CompanyBudgetRecords.send_company_id",
|
||||||
|
# )
|
||||||
|
# decision_books: Mapped[List["BuildDecisionBook"]] = relationship(
|
||||||
|
# "BuildDecisionBook",
|
||||||
|
# back_populates="companies",
|
||||||
|
# foreign_keys="BuildDecisionBook.resp_company_id",
|
||||||
|
# )
|
||||||
|
# decision_book_projects: Mapped[List["BuildDecisionBookProjects"]] = relationship(
|
||||||
|
# "BuildDecisionBookProjects",
|
||||||
|
# back_populates="companies",
|
||||||
|
# foreign_keys="BuildDecisionBookProjects.resp_company_id",
|
||||||
|
# )
|
||||||
|
# decision_book_legal: Mapped["BuildDecisionBookLegal"] = relationship(
|
||||||
|
# "BuildDecisionBookLegal",
|
||||||
|
# back_populates="attorney_companies",
|
||||||
|
# foreign_keys="BuildDecisionBookLegal.resp_attorney_company",
|
||||||
|
# )
|
||||||
|
#
|
||||||
|
# company_account_books: Mapped["AccountBooks"] = relationship(
|
||||||
|
# "AccountBooks",
|
||||||
|
# back_populates="company",
|
||||||
|
# foreign_keys="AccountBooks.company_id",
|
||||||
|
# )
|
||||||
|
# branch_account_books: Mapped["AccountBooks"] = relationship(
|
||||||
|
# "AccountBooks",
|
||||||
|
# back_populates="branch",
|
||||||
|
# foreign_keys="AccountBooks.branch_id",
|
||||||
|
# )
|
||||||
|
# account_codes: Mapped["AccountCodes"] = relationship(
|
||||||
|
# "AccountCodes", back_populates="company", foreign_keys="AccountCodes.company_id"
|
||||||
|
# )
|
||||||
|
# search_iban_description: Mapped["BuildIbanDescription"] = relationship(
|
||||||
|
# "BuildIbanDescription",
|
||||||
|
# back_populates="company",
|
||||||
|
# foreign_keys="BuildIbanDescription.company_id",
|
||||||
|
# )
|
||||||
|
# related_companies: Mapped[List["CompanyRelationship"]] = relationship(
|
||||||
|
# "CompanyRelationship",
|
||||||
|
# back_populates="related_company",
|
||||||
|
# foreign_keys="CompanyRelationship.related_company_id",
|
||||||
|
# )
|
||||||
|
|
||||||
|
|
||||||
|
#
|
||||||
|
# class AbstractCompany:
|
||||||
|
# """
|
||||||
|
# Abstract and explanation of Company class for end-user guide
|
||||||
|
# """
|
||||||
|
#
|
||||||
|
# formal_name = Explanation(
|
||||||
|
# explanation="Devletin resmi kayıtlarında bulunan şirket ünvanıdır.",
|
||||||
|
# usage="Devletin resmi kayıtlarında bulunan şirket adı istendiğinde kullanılır.",
|
||||||
|
# alias="Resmi Ünvan",
|
||||||
|
# example=["X Şirketi LTD", "Y Şirketi A.Ş."],
|
||||||
|
# )
|
||||||
|
# company_type = Explanation(
|
||||||
|
# explanation="Şirketin türüdür.",
|
||||||
|
# usage="Şirketin türü istendiğinde kullanılır.",
|
||||||
|
# alias="Şirket Türü",
|
||||||
|
# example=[
|
||||||
|
# "Şahıs",
|
||||||
|
# "Limited",
|
||||||
|
# "Anonim",
|
||||||
|
# "Kolektif",
|
||||||
|
# "Komandit",
|
||||||
|
# "Kooperatif",
|
||||||
|
# "Serbest Meslek",
|
||||||
|
# "Adi Ortaklık",
|
||||||
|
# ],
|
||||||
|
# )
|
||||||
|
# commercial_type = Explanation(
|
||||||
|
# explanation="Şirketin ticari türüdür.",
|
||||||
|
# usage="Şirketin ticari türü istendiğinde kullanılır.",
|
||||||
|
# alias="Ticari Tür",
|
||||||
|
# example=["Tüzel", "Birey"],
|
||||||
|
# )
|
||||||
|
# tax_no = Explanation(
|
||||||
|
# explanation="Şirketin vergi numarasıdır.",
|
||||||
|
# usage="Şirketin vergi numarası istendiğinde kullanılır.",
|
||||||
|
# alias="Vergi No",
|
||||||
|
# example=["1234567890"],
|
||||||
|
# )
|
||||||
|
# public_name = Explanation(
|
||||||
|
# explanation="Şirketin kamuoyunda bilinen adıdır.",
|
||||||
|
# usage="Şirketin kamuoyunda bilinen adı istendiğinde kullanılır.",
|
||||||
|
# alias="Piyasada Bilinen Adı",
|
||||||
|
# example=["X Şirketi", "Y Şirketi"],
|
||||||
|
# )
|
||||||
|
# company_tag = Explanation(
|
||||||
|
# explanation="Şirketin takma adı veya etiketidir.",
|
||||||
|
# usage="Şirketin yöneticisin karar verdiği takma adı veya etiketi istendiğinde kullanılır.",
|
||||||
|
# alias="Şirket Etiketi veya Takma Adı",
|
||||||
|
# example=["X", "Y"],
|
||||||
|
# )
|
||||||
|
# default_lang_type = Explanation(
|
||||||
|
# explanation="Şirketin varsayılan dil türüdür.",
|
||||||
|
# usage="Şirketin varsayılan dil türü istendiğinde kullanılır.",
|
||||||
|
# alias="Şirketin Dil Türü",
|
||||||
|
# example=["TR", "EN"],
|
||||||
|
# )
|
||||||
|
# default_money_type = Explanation(
|
||||||
|
# explanation="Şirketin varsayılan para birimi türüdür.",
|
||||||
|
# usage="Şirketin varsayılan para birimi türü istendiğinde kullanılır.",
|
||||||
|
# alias="Şirketin Para Birimi Türü",
|
||||||
|
# example=["TL", "USD", "EUR"],
|
||||||
|
# )
|
||||||
|
# is_commercial = Explanation(
|
||||||
|
# explanation="Şirketin ticari olup olmadığını belirtir.",
|
||||||
|
# usage="Şirketin ticari olup olmadığını applikasyonun anlaması için kullanılır.",
|
||||||
|
# condition=lambda commercial_type: True if commercial_type == "Şahıs" else False,
|
||||||
|
# alias="Şirket Ticari mi?",
|
||||||
|
# )
|
||||||
|
# is_blacklist = Explanation(
|
||||||
|
# explanation="Şirketin kara listeye alınıp alınmadığını belirtir.",
|
||||||
|
# usage="Şirketin kara listeye alınıp alınmadığını applikasyonun anlaması için kullanılır.",
|
||||||
|
# alias="Kara Listeye alınsın mı?",
|
||||||
|
# example=[True, False],
|
||||||
|
# )
|
||||||
|
# parent_id = Explanation(
|
||||||
|
# explanation="Şirketin sorumlu olduğu şirketin ID'sidir.",
|
||||||
|
# usage="Şirketin sorumlu olduğu şirketin ID'si istendiğinde kullanılır.",
|
||||||
|
# alias="Sorumlu Şirket",
|
||||||
|
# example=[
|
||||||
|
# "Bir şirketin sorumlu şirketi hangisi olduğunu bulmak için kullanılır.",
|
||||||
|
# ],
|
||||||
|
# )
|
||||||
|
# workplace_no = Explanation(
|
||||||
|
# explanation="Şirketin iş yeri numarasıdır.",
|
||||||
|
# usage="Şirketin iş yeri numarası istendiğinde kullanılır.",
|
||||||
|
# alias="İş Yeri No",
|
||||||
|
# example=["1234567890"],
|
||||||
|
# )
|
||||||
|
# official_address_id = Explanation(
|
||||||
|
# explanation="Şirketin resmi adresidi.",
|
||||||
|
# usage="Şirketin resmi adresinin ne olduğunu bulmak için kullanılır.",
|
||||||
|
# alias="Resmi Adres",
|
||||||
|
# example=[
|
||||||
|
# "Bu şirketin adresi nedir sorusuna cevap vermek için kullanılır.",
|
||||||
|
# ],
|
||||||
|
# )
|
||||||
|
# top_responsible_company_id = Explanation(
|
||||||
|
# explanation="Şirketin en üst sorumlu şirketin ID'sidir.",
|
||||||
|
# usage="Şirketin en üst sorumlu şirketin hangisi olduğunu bulmak için kullanılır.",
|
||||||
|
# alias="Ana Yetkili Şirket",
|
||||||
|
# example=[
|
||||||
|
# "Bölge veya ülke genelinde en üst sorumlu şirketin hangisi olduğunu belirtmek için kullanılır.",
|
||||||
|
# ],
|
||||||
|
# )
|
||||||
|
# buildings = Explanation(
|
||||||
|
# explanation="Şirketin sahip olduğu binaların listesidir.",
|
||||||
|
# usage="Şirketin sahip olduğu binaların listesini bulmak için kullanılır.",
|
||||||
|
# alias="Sorumlu olduğu binalar Binalar",
|
||||||
|
# example=[
|
||||||
|
# "Şirketin sahip olduğu binaların listesini bulmak için kullanılır.",
|
||||||
|
# ],
|
||||||
|
# )
|
||||||
|
#
|
||||||
|
# def wag_create_company(self):
|
||||||
|
# """
|
||||||
|
# Er kişiye wag_create_company fonksiyonu = fieldları manipule edebilir?
|
||||||
|
# 78 ile oluşturulan bir user için wag_create_company fonksiyonu = fieldları manipule edebilir?
|
||||||
|
# """
|
||||||
|
# return {
|
||||||
|
# "commercial_type": self.commercial_type,
|
||||||
|
# "formal_name": self.formal_name,
|
||||||
|
# "public_name": self.public_name,
|
||||||
|
# "company_type": self.company_type,
|
||||||
|
# "tax_no": self.tax_no,
|
||||||
|
# "workplace_no": self.workplace_no,
|
||||||
|
# "company_tag": self.company_tag,
|
||||||
|
# "default_lang_type": self.default_lang_type,
|
||||||
|
# "default_money_type": self.default_money_type,
|
||||||
|
# "official_address_id": self.official_address_id,
|
||||||
|
# }
|
||||||
|
#
|
||||||
|
# def wag_update_company(self):
|
||||||
|
# return {
|
||||||
|
# "commercial_type": self.commercial_type,
|
||||||
|
# "formal_name": self.formal_name,
|
||||||
|
# "public_name": self.public_name,
|
||||||
|
# "company_type": self.company_type,
|
||||||
|
# "tax_no": self.tax_no,
|
||||||
|
# "workplace_no": self.workplace_no,
|
||||||
|
# "company_tag": self.company_tag,
|
||||||
|
# "default_lang_type": self.default_lang_type,
|
||||||
|
# "default_money_type": self.default_money_type,
|
||||||
|
# "official_address_id": self.official_address_id,
|
||||||
|
# }
|
||||||
|
|
@ -0,0 +1,232 @@
|
||||||
|
from sqlalchemy import String, Integer, ForeignKey, Index, Boolean, Identity
|
||||||
|
from sqlalchemy.orm import mapped_column, Mapped
|
||||||
|
|
||||||
|
from databases.sql_models.core_mixin import CrudCollection
|
||||||
|
|
||||||
|
|
||||||
|
class Departments(CrudCollection):
|
||||||
|
|
||||||
|
__tablename__ = "departments"
|
||||||
|
__exclude__fields__ = []
|
||||||
|
|
||||||
|
parent_department_id = mapped_column(Integer, server_default="0")
|
||||||
|
department_code = mapped_column(
|
||||||
|
String(16), nullable=False, index=True, comment="Department Code"
|
||||||
|
)
|
||||||
|
department_name: Mapped[str] = mapped_column(
|
||||||
|
String(128), nullable=False, comment="Department Name"
|
||||||
|
)
|
||||||
|
department_description: Mapped[str] = mapped_column(String, server_default="")
|
||||||
|
|
||||||
|
company_id: Mapped[int] = mapped_column(ForeignKey("companies.id"), nullable=False)
|
||||||
|
company_uu_id: Mapped[str] = mapped_column(
|
||||||
|
String, nullable=False, comment="Company UUID"
|
||||||
|
)
|
||||||
|
|
||||||
|
# @classmethod
|
||||||
|
# def create_action(cls, data: DepartmentsPydantic, token):
|
||||||
|
# data_dict = data.model_dump()
|
||||||
|
# data_dict["company_id"] = token.selected_company.company_id
|
||||||
|
# return cls.find_or_create(**data_dict)
|
||||||
|
|
||||||
|
__table_args__ = {"comment": "Departments Information"}
|
||||||
|
|
||||||
|
|
||||||
|
class Duty(CrudCollection):
|
||||||
|
|
||||||
|
__tablename__ = "duty"
|
||||||
|
__exclude__fields__ = []
|
||||||
|
|
||||||
|
duty_name: Mapped[str] = mapped_column(
|
||||||
|
String, unique=True, nullable=False, comment="Duty Name"
|
||||||
|
)
|
||||||
|
duty_code: Mapped[str] = mapped_column(String, nullable=False, comment="Duty Code")
|
||||||
|
duty_description: Mapped[str] = mapped_column(String, comment="Duty Description")
|
||||||
|
|
||||||
|
# @classmethod
|
||||||
|
# def create_action(cls, data: InsertCompanyDuty, token):
|
||||||
|
# # if not cls.__is_super__:
|
||||||
|
# # raise HTTPException(
|
||||||
|
# # status_code=401, detail="You are not authorized to create a duty."
|
||||||
|
# # )
|
||||||
|
# data_dict = data.model_dump()
|
||||||
|
#
|
||||||
|
# return cls.find_or_create(**data_dict)
|
||||||
|
|
||||||
|
__table_args__ = ({"comment": "Duty Information"},)
|
||||||
|
|
||||||
|
|
||||||
|
class Duties(CrudCollection):
|
||||||
|
|
||||||
|
__tablename__ = "duties"
|
||||||
|
__exclude__fields__ = []
|
||||||
|
|
||||||
|
users_default_duty = mapped_column(
|
||||||
|
ForeignKey("duty.id"), nullable=True, comment="Default Duty for Users"
|
||||||
|
)
|
||||||
|
company_id: Mapped[int] = mapped_column(Integer)
|
||||||
|
company_uu_id: Mapped[str] = mapped_column(
|
||||||
|
String, nullable=False, comment="Company UUID"
|
||||||
|
)
|
||||||
|
duties_id: Mapped[int] = mapped_column(ForeignKey("duty.id"), nullable=False)
|
||||||
|
duties_uu_id: Mapped[str] = mapped_column(
|
||||||
|
String, nullable=False, comment="Duty UUID"
|
||||||
|
)
|
||||||
|
department_id = mapped_column(
|
||||||
|
ForeignKey("departments.id"), nullable=False, comment="Department ID"
|
||||||
|
)
|
||||||
|
department_uu_id: Mapped[str] = mapped_column(
|
||||||
|
String, nullable=False, comment="Department UUID"
|
||||||
|
)
|
||||||
|
# priority_id: Mapped[int] = mapped_column(ForeignKey("priority.id"), nullable=True)
|
||||||
|
management_duty = mapped_column(
|
||||||
|
Boolean, server_default="0"
|
||||||
|
) # is this a prime Company Duty ???
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def init_a_company_default_duties(cls, company_id, company_uu_id):
|
||||||
|
__default_init__ = ["Execution Office", "IT Department"]
|
||||||
|
|
||||||
|
active_row = dict(
|
||||||
|
is_confirmed=True, active=True, deleted=False, is_notification_send=True
|
||||||
|
)
|
||||||
|
list_of_created = []
|
||||||
|
|
||||||
|
execution = Departments.find_or_create(
|
||||||
|
department_name="Execution Office",
|
||||||
|
department_code="EO001",
|
||||||
|
company_id=company_id,
|
||||||
|
company_uu_id=str(company_uu_id),
|
||||||
|
**active_row,
|
||||||
|
)
|
||||||
|
list_of_created.append(execution)
|
||||||
|
it_dept = Departments.find_or_create(
|
||||||
|
department_name="IT Department",
|
||||||
|
department_code="ITD001",
|
||||||
|
company_id=company_id,
|
||||||
|
company_uu_id=str(company_uu_id),
|
||||||
|
**active_row,
|
||||||
|
)
|
||||||
|
list_of_created.append(it_dept)
|
||||||
|
bm_duty = Duty.find_or_create(
|
||||||
|
duty_name="Business Manager",
|
||||||
|
duty_code="BM0001",
|
||||||
|
duty_description="Business Manager",
|
||||||
|
**active_row,
|
||||||
|
)
|
||||||
|
list_of_created.append(bm_duty)
|
||||||
|
it_duty = Duty.find_or_create(
|
||||||
|
duty_name="IT Manager",
|
||||||
|
duty_code="IT0001",
|
||||||
|
duty_description="IT Manager",
|
||||||
|
**active_row,
|
||||||
|
)
|
||||||
|
list_of_created.append(it_duty)
|
||||||
|
bulk_duty = Duty.find_or_create(
|
||||||
|
duty_name="BULK",
|
||||||
|
duty_code="BULK",
|
||||||
|
duty_description="BULK RECORDS OF THE COMPANY",
|
||||||
|
**active_row,
|
||||||
|
)
|
||||||
|
list_of_created.append(bulk_duty)
|
||||||
|
occu_duty = Duty.find_or_create(
|
||||||
|
duty_name="OCCUPANT",
|
||||||
|
duty_code="OCCUPANT",
|
||||||
|
duty_description="OCCUPANT RECORDS OF THE COMPANY",
|
||||||
|
**active_row,
|
||||||
|
)
|
||||||
|
list_of_created.append(occu_duty)
|
||||||
|
duties_created_bm = cls.find_or_create(
|
||||||
|
company_id=company_id,
|
||||||
|
company_uu_id=str(company_uu_id),
|
||||||
|
duties_id=bm_duty.id,
|
||||||
|
duties_uu_id=str(bm_duty.uu_id),
|
||||||
|
department_id=execution.id,
|
||||||
|
department_uu_id=str(execution.uu_id),
|
||||||
|
**active_row,
|
||||||
|
)
|
||||||
|
list_of_created.append(duties_created_bm)
|
||||||
|
duties_created_it = cls.find_or_create(
|
||||||
|
company_id=company_id,
|
||||||
|
company_uu_id=str(company_uu_id),
|
||||||
|
duties_id=it_duty.id,
|
||||||
|
duties_uu_id=str(it_duty.uu_id),
|
||||||
|
department_id=it_dept.id,
|
||||||
|
department_uu_id=str(it_dept.uu_id),
|
||||||
|
**active_row,
|
||||||
|
)
|
||||||
|
list_of_created.append(duties_created_it)
|
||||||
|
duties_created__ex = cls.find_or_create(
|
||||||
|
company_id=company_id,
|
||||||
|
company_uu_id=str(company_uu_id),
|
||||||
|
duties_id=bulk_duty.id,
|
||||||
|
duties_uu_id=str(bulk_duty.uu_id),
|
||||||
|
department_id=execution.id,
|
||||||
|
department_uu_id=str(execution.uu_id),
|
||||||
|
**active_row,
|
||||||
|
)
|
||||||
|
list_of_created.append(duties_created__ex)
|
||||||
|
duties_created_at = cls.find_or_create(
|
||||||
|
company_id=company_id,
|
||||||
|
company_uu_id=str(company_uu_id),
|
||||||
|
duties_id=occu_duty.id,
|
||||||
|
duties_uu_id=str(occu_duty.uu_id),
|
||||||
|
department_id=execution.id,
|
||||||
|
department_uu_id=str(execution.uu_id),
|
||||||
|
**active_row,
|
||||||
|
)
|
||||||
|
list_of_created.append(duties_created_at)
|
||||||
|
return list_of_created
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get_bulk_duties_of_a_company(cls, company_id):
|
||||||
|
duties_id = Duty.filter_by_one(system=True, duty_code="BULK").data
|
||||||
|
if bulk_duties := Duties.filter_by_one(
|
||||||
|
duties_id=getattr(duties_id, "id", None),
|
||||||
|
company_id=company_id,
|
||||||
|
**Duties.valid_record_dict,
|
||||||
|
).data:
|
||||||
|
return bulk_duties
|
||||||
|
raise Exception("Bulk Duty not found. Please contact with supervisor.")
|
||||||
|
|
||||||
|
# @classmethod
|
||||||
|
# def create_action(cls, data: InsertCompanyDuty):
|
||||||
|
# data_dict = data.model_dump()
|
||||||
|
# if department := Departments.find_one(uu_id=data.department_uu_id):
|
||||||
|
# data_dict["department_id"] = department.id
|
||||||
|
# del data_dict["department_uu_id"]
|
||||||
|
# return cls.find_or_create(**data_dict)
|
||||||
|
|
||||||
|
__table_args__ = (
|
||||||
|
Index("duty_ndx_00", company_id, duties_id, department_id, unique=True),
|
||||||
|
{"comment": "Duty & Company & Department Information"},
|
||||||
|
)
|
||||||
|
|
||||||
|
# department: Mapped[List["CompanyDepartments"]] = relationship(
|
||||||
|
# "CompanyDepartments", back_populates="duties", foreign_keys=[department_id]
|
||||||
|
# )
|
||||||
|
# employees: Mapped[List["CompanyEmployees"]] = relationship(
|
||||||
|
# "CompanyEmployees",
|
||||||
|
# back_populates="duty",
|
||||||
|
# foreign_keys="CompanyEmployees.duty_id",
|
||||||
|
# )
|
||||||
|
# duty_app: Mapped["CompanyDutyApp"] = relationship(
|
||||||
|
# "CompanyDutyApp", back_populates="duties", foreign_keys="CompanyDutyApp.company_duty_id"
|
||||||
|
# )
|
||||||
|
|
||||||
|
# def get_language_of_duty(self, lang):
|
||||||
|
# if erp_text := ErpText.find_one(lang=lang, text_code=self.duty_code):
|
||||||
|
# return erp_text.text_name, erp_text.text_description
|
||||||
|
# return None, None
|
||||||
|
|
||||||
|
# company: Mapped["Companies"] = relationship(
|
||||||
|
# "Company", back_populates="departments", foreign_keys=[company_id]
|
||||||
|
# )
|
||||||
|
# duties: Mapped[List["CompanyDuty"]] = relationship(
|
||||||
|
# "CompanyDuty",
|
||||||
|
# back_populates="department",
|
||||||
|
# foreign_keys="CompanyDuty.department_id",
|
||||||
|
# )
|
||||||
|
# app_item: Mapped["AppItems"] = relationship(
|
||||||
|
# "AppItems", back_populates="department", foreign_keys="AppItems.department_id"
|
||||||
|
# )
|
||||||
|
|
@ -0,0 +1,142 @@
|
||||||
|
from sqlalchemy import (
|
||||||
|
String,
|
||||||
|
ForeignKey,
|
||||||
|
Index,
|
||||||
|
Numeric,
|
||||||
|
)
|
||||||
|
from sqlalchemy.orm import mapped_column, Mapped
|
||||||
|
from databases.sql_models.core_mixin import CrudCollection
|
||||||
|
|
||||||
|
from api_validations.validations_request import InsertCompanyEmployees
|
||||||
|
|
||||||
|
|
||||||
|
class Staff(CrudCollection):
|
||||||
|
|
||||||
|
__tablename__ = "staff"
|
||||||
|
__exclude__fields__ = []
|
||||||
|
|
||||||
|
staff_description: Mapped[str] = mapped_column(
|
||||||
|
String, server_default="", comment="Staff Description"
|
||||||
|
)
|
||||||
|
staff_name: Mapped[str] = mapped_column(
|
||||||
|
String, nullable=False, comment="Staff Name"
|
||||||
|
)
|
||||||
|
staff_code: Mapped[str] = mapped_column(
|
||||||
|
String, nullable=False, comment="Staff Code"
|
||||||
|
)
|
||||||
|
|
||||||
|
duties_id: Mapped[int] = mapped_column(ForeignKey("duties.id"), nullable=False)
|
||||||
|
duties_uu_id: Mapped[str] = mapped_column(
|
||||||
|
String, nullable=False, comment="Duty UUID"
|
||||||
|
)
|
||||||
|
|
||||||
|
# people: Mapped["People"] = relationship(
|
||||||
|
# "People", back_populates="employees", foreign_keys=[people_id], uselist=True
|
||||||
|
# )
|
||||||
|
# duty: Mapped["CompanyDuty"] = relationship(
|
||||||
|
# "CompanyDuty", back_populates="employees", foreign_keys=[duty_id]
|
||||||
|
# )
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def create_action(cls, data: InsertCompanyEmployees):
|
||||||
|
from databases import Duties
|
||||||
|
|
||||||
|
data_dict = data.model_dump()
|
||||||
|
if duty := Duties.find_one(uu_id=data.duty_uu_id):
|
||||||
|
data_dict["duty_id"] = duty.id
|
||||||
|
# if person := People.find_one(uu_id=data.person_uu_id):
|
||||||
|
# data_dict["people_id"] = person.id
|
||||||
|
if data.start_date:
|
||||||
|
data_dict["expiry_starts"] = data.start_date
|
||||||
|
if data.stop_date:
|
||||||
|
data_dict["expiry_ends"] = data.stop_date
|
||||||
|
# del data_dict["duty_uu_id"], data_dict["person_uu_id"]
|
||||||
|
del data_dict["start_date"], data_dict["stop_date"], data_dict["duty_uu_id"]
|
||||||
|
return cls.find_or_create(**data_dict)
|
||||||
|
|
||||||
|
__table_args__ = ({"comment": "Staff Information"},)
|
||||||
|
|
||||||
|
|
||||||
|
class Employees(CrudCollection):
|
||||||
|
|
||||||
|
__tablename__ = "employees"
|
||||||
|
__exclude__fields__ = []
|
||||||
|
|
||||||
|
staff_id: Mapped[int] = mapped_column(ForeignKey("staff.id"))
|
||||||
|
staff_uu_id: Mapped[str] = mapped_column(
|
||||||
|
String, nullable=False, comment="Staff UUID"
|
||||||
|
)
|
||||||
|
people_id: Mapped[int] = mapped_column(ForeignKey("people.id"), nullable=True)
|
||||||
|
people_uu_id: Mapped[str] = mapped_column(
|
||||||
|
String, nullable=True, comment="People UUID"
|
||||||
|
)
|
||||||
|
|
||||||
|
__table_args__ = (
|
||||||
|
Index("employees_ndx_00", people_id, staff_id, unique=True),
|
||||||
|
{"comment": "Employee Person Information"},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class EmployeeHistory(CrudCollection):
|
||||||
|
|
||||||
|
__tablename__ = "employee_history"
|
||||||
|
__exclude__fields__ = []
|
||||||
|
|
||||||
|
staff_id: Mapped[int] = mapped_column(
|
||||||
|
ForeignKey("staff.id"), nullable=False, comment="Staff ID"
|
||||||
|
)
|
||||||
|
staff_uu_id: Mapped[str] = mapped_column(
|
||||||
|
String, nullable=False, comment="Staff UUID"
|
||||||
|
)
|
||||||
|
people_id: Mapped[int] = mapped_column(
|
||||||
|
ForeignKey("people.id"), nullable=False, comment="People ID"
|
||||||
|
)
|
||||||
|
people_uu_id: Mapped[str] = mapped_column(
|
||||||
|
String, nullable=False, comment="People UUID"
|
||||||
|
)
|
||||||
|
|
||||||
|
__table_args__ = (
|
||||||
|
Index("_employee_history_ndx_00", people_id, staff_id),
|
||||||
|
{"comment": "Employee History Information"},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class EmployeesSalaries(CrudCollection):
|
||||||
|
|
||||||
|
__tablename__ = "employee_salaries"
|
||||||
|
__exclude__fields__ = []
|
||||||
|
|
||||||
|
gross_salary: Mapped[float] = mapped_column(
|
||||||
|
Numeric(20, 6), nullable=False, comment="Gross Salary"
|
||||||
|
)
|
||||||
|
net_salary: Mapped[float] = mapped_column(
|
||||||
|
Numeric(20, 6), nullable=False, comment="Net Salary"
|
||||||
|
)
|
||||||
|
|
||||||
|
people_id: Mapped[int] = mapped_column(ForeignKey("people.id"), nullable=False)
|
||||||
|
people_uu_id: Mapped[str] = mapped_column(
|
||||||
|
String, nullable=False, comment="People UUID"
|
||||||
|
)
|
||||||
|
|
||||||
|
# people: Mapped["People"] = relationship(
|
||||||
|
# "People", back_populates="employee_salaries", foreign_keys=[people_id]
|
||||||
|
# )
|
||||||
|
|
||||||
|
__table_args__ = (
|
||||||
|
Index("_employee_salaries_ndx_00", people_id, "expiry_starts"),
|
||||||
|
{"comment": "Employee Salaries Information"},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# class Events2Employees(CrudCollection):
|
||||||
|
#
|
||||||
|
# __tablename__ = "events2employees"
|
||||||
|
# __exclude__fields__ = []
|
||||||
|
#
|
||||||
|
# event_id = mapped_column(ForeignKey("events.id"), nullable=False)
|
||||||
|
# employees_id = mapped_column(ForeignKey("employees.id"), nullable=False)
|
||||||
|
#
|
||||||
|
# __table_args__ = (
|
||||||
|
# Index("_events2employees_ndx_00", event_id, employees_id),
|
||||||
|
# {"comment": "Events2Employees Information"},
|
||||||
|
# )
|
||||||
|
|
@ -0,0 +1,427 @@
|
||||||
|
import datetime
|
||||||
|
from decimal import Decimal
|
||||||
|
|
||||||
|
from sqlalchemy import (
|
||||||
|
TIMESTAMP,
|
||||||
|
NUMERIC,
|
||||||
|
func,
|
||||||
|
text,
|
||||||
|
UUID,
|
||||||
|
String,
|
||||||
|
Integer,
|
||||||
|
Boolean,
|
||||||
|
SmallInteger,
|
||||||
|
)
|
||||||
|
from sqlalchemy.orm import (
|
||||||
|
Mapped,
|
||||||
|
mapped_column,
|
||||||
|
)
|
||||||
|
from sqlalchemy_mixins.session import SessionMixin
|
||||||
|
from sqlalchemy_mixins.serialize import SerializeMixin
|
||||||
|
from sqlalchemy_mixins.repr import ReprMixin
|
||||||
|
from sqlalchemy_mixins.smartquery import SmartQueryMixin
|
||||||
|
|
||||||
|
from api_library import DateTimeLocal, client_arrow, system_arrow
|
||||||
|
from databases.sql_models.sql_operations import FilterAttributes
|
||||||
|
from databases.sql_models.postgres_database import Base
|
||||||
|
|
||||||
|
|
||||||
|
class CrudMixin(Base, SmartQueryMixin, SessionMixin, FilterAttributes):
|
||||||
|
|
||||||
|
__abstract__ = True # The model is abstract not a database table.
|
||||||
|
__session__ = Base.session # The session to use in the model.
|
||||||
|
__system__fields__create__ = (
|
||||||
|
"created_at",
|
||||||
|
"updated_at",
|
||||||
|
"cryp_uu_id",
|
||||||
|
"created_by",
|
||||||
|
"created_by_id",
|
||||||
|
"updated_by",
|
||||||
|
"updated_by_id",
|
||||||
|
"replication_id",
|
||||||
|
"confirmed_by",
|
||||||
|
"confirmed_by_id",
|
||||||
|
"is_confirmed",
|
||||||
|
"deleted",
|
||||||
|
"active",
|
||||||
|
"is_notification_send",
|
||||||
|
"is_email_send",
|
||||||
|
) # The system fields to use in the model.
|
||||||
|
__system__fields__update__ = (
|
||||||
|
"cryp_uu_id",
|
||||||
|
"created_at",
|
||||||
|
"updated_at",
|
||||||
|
"created_by",
|
||||||
|
"created_by_id",
|
||||||
|
"confirmed_by",
|
||||||
|
"confirmed_by_id",
|
||||||
|
"updated_by",
|
||||||
|
"updated_by_id",
|
||||||
|
"replication_id",
|
||||||
|
)
|
||||||
|
__system_default_model__ = [
|
||||||
|
"cryp_uu_id",
|
||||||
|
"is_confirmed",
|
||||||
|
"deleted",
|
||||||
|
"is_notification_send",
|
||||||
|
"replication_id",
|
||||||
|
"is_email_send",
|
||||||
|
"confirmed_by_id",
|
||||||
|
"confirmed_by",
|
||||||
|
"updated_by_id",
|
||||||
|
"created_by_id",
|
||||||
|
]
|
||||||
|
|
||||||
|
creds = None # The credentials to use in the model.
|
||||||
|
client_arrow: DateTimeLocal = None # The arrow to use in the model.
|
||||||
|
valid_record_dict: dict = {"active": True, "deleted": False}
|
||||||
|
valid_record_args = lambda class_: [class_.active == True, class_.deleted == False]
|
||||||
|
metadata: dict = {}
|
||||||
|
|
||||||
|
expiry_starts: Mapped[TIMESTAMP] = mapped_column(
|
||||||
|
TIMESTAMP(timezone=True), server_default=func.now(), nullable=False
|
||||||
|
)
|
||||||
|
expiry_ends: Mapped[TIMESTAMP] = mapped_column(
|
||||||
|
TIMESTAMP(timezone=True), default="2099-12-31", server_default="2099-12-31"
|
||||||
|
)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def set_user_define_properties(cls, token):
|
||||||
|
cls.creds = token.credentials
|
||||||
|
cls.client_arrow = DateTimeLocal(is_client=True, timezone=token.timezone)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def remove_non_related_inputs(cls, kwargs):
|
||||||
|
"""
|
||||||
|
Removes the non-related inputs from the given attributes.
|
||||||
|
"""
|
||||||
|
return {
|
||||||
|
key: value
|
||||||
|
for key, value in kwargs.items()
|
||||||
|
if key in cls.columns + cls.hybrid_properties + cls.settable_relations
|
||||||
|
}
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def extract_system_fields(cls, filter_kwargs: dict, create: bool = True):
|
||||||
|
"""
|
||||||
|
Extracts the system fields from the given attributes.
|
||||||
|
"""
|
||||||
|
system_fields = filter_kwargs.copy()
|
||||||
|
extract_fields = (
|
||||||
|
cls.__system__fields__create__ if create else cls.__system__fields__update__
|
||||||
|
)
|
||||||
|
for field in extract_fields:
|
||||||
|
system_fields.pop(field, None)
|
||||||
|
return system_fields
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def iterate_over_variables(cls, val, key):
|
||||||
|
key_ = cls.__annotations__.get(key, None)
|
||||||
|
is_primary, value_type = key in cls.primary_keys, type(val)
|
||||||
|
row_attr = bool(getattr(getattr(cls, key), "foreign_keys", None))
|
||||||
|
if is_primary or row_attr:
|
||||||
|
return False, None
|
||||||
|
elif val is None:
|
||||||
|
return True, None
|
||||||
|
elif str(key[-5:]).lower() == "uu_id":
|
||||||
|
return True, str(val)
|
||||||
|
elif key_:
|
||||||
|
if key_ == Mapped[int]:
|
||||||
|
return True, int(val)
|
||||||
|
elif key_ == Mapped[bool]:
|
||||||
|
return True, bool(val)
|
||||||
|
elif key_ == Mapped[float] or key_ == Mapped[NUMERIC]:
|
||||||
|
return True, round(float(val), 3)
|
||||||
|
elif key_ == Mapped[int]:
|
||||||
|
return True, int(val)
|
||||||
|
elif key_ == Mapped[TIMESTAMP]:
|
||||||
|
return True, str(
|
||||||
|
cls.client_arrow.get(str(val)).format("DD-MM-YYYY HH:mm:ss")
|
||||||
|
)
|
||||||
|
elif key_ == Mapped[str]:
|
||||||
|
return True, str(val)
|
||||||
|
else:
|
||||||
|
if isinstance(val, datetime.datetime):
|
||||||
|
return True, str(
|
||||||
|
cls.client_arrow.get(str(val)).format("DD-MM-YYYY HH:mm:ss")
|
||||||
|
)
|
||||||
|
elif isinstance(value_type, bool):
|
||||||
|
return True, bool(val)
|
||||||
|
elif isinstance(value_type, float) or isinstance(value_type, Decimal):
|
||||||
|
return True, round(float(val), 3)
|
||||||
|
elif isinstance(value_type, int):
|
||||||
|
return True, int(val)
|
||||||
|
elif isinstance(value_type, str):
|
||||||
|
return True, str(val)
|
||||||
|
elif isinstance(value_type, type(None)):
|
||||||
|
return True, None
|
||||||
|
return False, None
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def find_or_create(cls, **kwargs):
|
||||||
|
"""
|
||||||
|
Finds a record with the given attributes or creates it if it doesn't exist.
|
||||||
|
If found, sets is_found to True, otherwise False.
|
||||||
|
is_found can be used to check if the record was found or created.
|
||||||
|
"""
|
||||||
|
check_kwargs = cls.extract_system_fields(kwargs)
|
||||||
|
cls.pre_query = cls.query.filter(
|
||||||
|
cls.expiry_ends > str(system_arrow.now()),
|
||||||
|
cls.expiry_starts <= str(system_arrow.now()),
|
||||||
|
)
|
||||||
|
already_record = cls.filter_by_one(system=True, **check_kwargs).data
|
||||||
|
cls.pre_query = None
|
||||||
|
if already_record:
|
||||||
|
if already_record.deleted:
|
||||||
|
already_record.metadata = {
|
||||||
|
"created": False,
|
||||||
|
"error_case": "DeletedRecord",
|
||||||
|
"message": "",
|
||||||
|
}
|
||||||
|
return already_record
|
||||||
|
elif already_record.is_confirmed:
|
||||||
|
already_record.metadata = {
|
||||||
|
"created": False,
|
||||||
|
"error_case": "IsNotConfirmed",
|
||||||
|
"message": "",
|
||||||
|
}
|
||||||
|
return already_record
|
||||||
|
already_record.metadata = {
|
||||||
|
"created": False,
|
||||||
|
"error_case": "AlreadyExists",
|
||||||
|
"message": "",
|
||||||
|
}
|
||||||
|
return already_record
|
||||||
|
check_kwargs = cls.remove_non_related_inputs(check_kwargs)
|
||||||
|
created_record = cls()
|
||||||
|
for key, value in check_kwargs.items():
|
||||||
|
setattr(created_record, key, value)
|
||||||
|
if getattr(cls.creds, "person_id", None) and getattr(
|
||||||
|
cls.creds, "person_name", None
|
||||||
|
):
|
||||||
|
cls.created_by_id = cls.creds.get("person_id", None)
|
||||||
|
cls.created_by = cls.creds.get("person_name", None)
|
||||||
|
created_record.flush()
|
||||||
|
already_record.metadata = {"created": True, "error_case": None, "message": ""}
|
||||||
|
return created_record
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def find_or_abort(cls, **kwargs):
|
||||||
|
"""
|
||||||
|
Finds a record with the given attributes or creates it if it doesn't exist.
|
||||||
|
If found, sets is_found to True, otherwise False.
|
||||||
|
is_found can be used to check if the record was found or created.
|
||||||
|
"""
|
||||||
|
check_kwargs = cls.extract_system_fields(kwargs)
|
||||||
|
cls.pre_query = cls.query.filter(
|
||||||
|
cls.expiry_ends > str(system_arrow.now()),
|
||||||
|
cls.expiry_starts <= str(system_arrow.now()),
|
||||||
|
)
|
||||||
|
already_record = cls.filter_by_one(system=True, **check_kwargs).data
|
||||||
|
cls.pre_query = None
|
||||||
|
if already_record:
|
||||||
|
if already_record.deleted:
|
||||||
|
cls.raise_http_exception(
|
||||||
|
status_code="HTTP_406_NOT_ACCEPTABLE",
|
||||||
|
error_case="DeletedRecord",
|
||||||
|
data=check_kwargs,
|
||||||
|
message="Record exits but is deleted. Contact with authorized user",
|
||||||
|
)
|
||||||
|
elif not already_record.is_confirmed:
|
||||||
|
cls.raise_http_exception(
|
||||||
|
status_code="HTTP_406_NOT_ACCEPTABLE",
|
||||||
|
error_case="IsNotConfirmed",
|
||||||
|
data=check_kwargs,
|
||||||
|
message="Record exits but is not confirmed. Contact with authorized user",
|
||||||
|
)
|
||||||
|
cls.raise_http_exception(
|
||||||
|
status_code="HTTP_406_NOT_ACCEPTABLE",
|
||||||
|
error_case="AlreadyExists",
|
||||||
|
data=check_kwargs,
|
||||||
|
message="Record already exits. Refresh data and try again",
|
||||||
|
)
|
||||||
|
check_kwargs = cls.remove_non_related_inputs(check_kwargs)
|
||||||
|
created_record = cls()
|
||||||
|
for key, value in check_kwargs.items():
|
||||||
|
setattr(created_record, key, value)
|
||||||
|
if getattr(cls.creds, "person_id", None) and getattr(
|
||||||
|
cls.creds, "person_name", None
|
||||||
|
):
|
||||||
|
cls.created_by_id = cls.creds.get("person_id", None)
|
||||||
|
cls.created_by = cls.creds.get("person_name", None)
|
||||||
|
created_record.flush()
|
||||||
|
return created_record
|
||||||
|
|
||||||
|
def update(self, **kwargs):
|
||||||
|
check_kwargs = self.remove_non_related_inputs(kwargs)
|
||||||
|
"""Updates the record with the given attributes."""
|
||||||
|
is_confirmed_argument = kwargs.get("is_confirmed", None)
|
||||||
|
if is_confirmed_argument and not len(kwargs) == 1:
|
||||||
|
self.raise_http_exception(
|
||||||
|
status_code="HTTP_406_NOT_ACCEPTABLE",
|
||||||
|
error_case="ConfirmError",
|
||||||
|
data=kwargs,
|
||||||
|
message="Confirm field can not be updated with other fields",
|
||||||
|
)
|
||||||
|
check_kwargs = self.extract_system_fields(check_kwargs, create=False)
|
||||||
|
for key, value in check_kwargs.items():
|
||||||
|
setattr(self, key, value)
|
||||||
|
|
||||||
|
if is_confirmed_argument:
|
||||||
|
if getattr(self.creds, "person_id", None) and getattr(
|
||||||
|
self.creds, "person_name", None
|
||||||
|
):
|
||||||
|
self.confirmed_by_id = self.creds.get("person_id", "Unknown")
|
||||||
|
self.confirmed_by = self.creds.get("person_name", "Unknown")
|
||||||
|
else:
|
||||||
|
if getattr(self.creds, "person_id", None) and getattr(
|
||||||
|
self.creds, "person_name", None
|
||||||
|
):
|
||||||
|
self.updated_by_id = self.creds.get("person_id", "Unknown")
|
||||||
|
self.updated_by = self.creds.get("person_id", "Unknown")
|
||||||
|
self.flush()
|
||||||
|
return self
|
||||||
|
|
||||||
|
def get_dict(
|
||||||
|
self, exclude: list = None, include: list = None, include_joins: list = None
|
||||||
|
):
|
||||||
|
return_dict = {}
|
||||||
|
if include:
|
||||||
|
exclude_list = [
|
||||||
|
element
|
||||||
|
for element in self.__system_default_model__
|
||||||
|
if str(element)[-2:] == "id" and str(element)[-5:].lower() == "uu_id"
|
||||||
|
]
|
||||||
|
columns_include_list = list(set(include).difference(set(exclude_list)))
|
||||||
|
# columns_include_list.extend([column for column in self.columns if str(column)[-5:].lower() == 'uu_id'])
|
||||||
|
columns_include_list.extend(["uu_id"])
|
||||||
|
for key in list(columns_include_list):
|
||||||
|
val = getattr(self, key)
|
||||||
|
correct, value_of_database = self.iterate_over_variables(val, key)
|
||||||
|
if correct:
|
||||||
|
return_dict[key] = value_of_database
|
||||||
|
elif exclude:
|
||||||
|
exclude.extend(
|
||||||
|
list(set(self.__exclude__fields__ or []).difference(exclude))
|
||||||
|
)
|
||||||
|
exclude.extend(
|
||||||
|
[
|
||||||
|
element
|
||||||
|
for element in self.__system_default_model__
|
||||||
|
if str(element)[-2:] == "id"
|
||||||
|
]
|
||||||
|
)
|
||||||
|
columns_excluded_list = list(set(self.columns).difference(set(exclude)))
|
||||||
|
# columns_excluded_list.extend([column for column in self.columns if str(column)[-5:].lower() == 'uu_id'])
|
||||||
|
columns_excluded_list.extend(["uu_id", "active"])
|
||||||
|
|
||||||
|
for key in list(columns_excluded_list):
|
||||||
|
val = getattr(self, key)
|
||||||
|
correct, value_of_database = self.iterate_over_variables(val, key)
|
||||||
|
if correct:
|
||||||
|
return_dict[key] = value_of_database
|
||||||
|
else:
|
||||||
|
exclude_list = (
|
||||||
|
self.__exclude__fields__ or [] + self.__system_default_model__
|
||||||
|
)
|
||||||
|
columns_list = list(set(self.columns).difference(set(exclude_list)))
|
||||||
|
columns_list = [
|
||||||
|
columns for columns in columns_list if str(columns)[-2:] != "id"
|
||||||
|
]
|
||||||
|
columns_list.extend(
|
||||||
|
[
|
||||||
|
column
|
||||||
|
for column in self.columns
|
||||||
|
if str(column)[-5:].lower() == "uu_id"
|
||||||
|
]
|
||||||
|
)
|
||||||
|
for remove_field in self.__system_default_model__:
|
||||||
|
if remove_field in columns_list:
|
||||||
|
columns_list.remove(remove_field)
|
||||||
|
for key in list(columns_list):
|
||||||
|
val = getattr(self, key)
|
||||||
|
correct, value_of_database = self.iterate_over_variables(val, key)
|
||||||
|
if correct:
|
||||||
|
return_dict[key] = value_of_database
|
||||||
|
return return_dict
|
||||||
|
|
||||||
|
|
||||||
|
class BaseMixin(CrudMixin, ReprMixin, SerializeMixin, FilterAttributes):
|
||||||
|
|
||||||
|
__abstract__ = True
|
||||||
|
|
||||||
|
|
||||||
|
class BaseCollection(BaseMixin):
|
||||||
|
|
||||||
|
__abstract__ = True
|
||||||
|
__repr__ = ReprMixin.__repr__
|
||||||
|
|
||||||
|
id: Mapped[int] = mapped_column(primary_key=True)
|
||||||
|
|
||||||
|
|
||||||
|
class CrudCollection(BaseMixin, SmartQueryMixin):
|
||||||
|
|
||||||
|
__abstract__ = True
|
||||||
|
__repr__ = ReprMixin.__repr__
|
||||||
|
|
||||||
|
id: Mapped[int] = mapped_column(primary_key=True)
|
||||||
|
uu_id: Mapped[str] = mapped_column(
|
||||||
|
UUID, server_default=text("gen_random_uuid()"), index=True, unique=True
|
||||||
|
)
|
||||||
|
|
||||||
|
ref_id: Mapped[str] = mapped_column(String(100), nullable=True, index=True)
|
||||||
|
created_at: Mapped[TIMESTAMP] = mapped_column(
|
||||||
|
"created_at",
|
||||||
|
TIMESTAMP(timezone=True),
|
||||||
|
server_default=func.now(),
|
||||||
|
nullable=False,
|
||||||
|
index=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
updated_at: Mapped[TIMESTAMP] = mapped_column(
|
||||||
|
"updated_at",
|
||||||
|
TIMESTAMP(timezone=True),
|
||||||
|
server_default=func.now(),
|
||||||
|
onupdate=func.now(),
|
||||||
|
nullable=False,
|
||||||
|
index=True,
|
||||||
|
)
|
||||||
|
cryp_uu_id: Mapped[str] = mapped_column(String, nullable=True, index=True)
|
||||||
|
|
||||||
|
created_by: Mapped[str] = mapped_column(String, nullable=True)
|
||||||
|
created_by_id: Mapped[int] = mapped_column(Integer, nullable=True)
|
||||||
|
updated_by: Mapped[str] = mapped_column(String, nullable=True)
|
||||||
|
updated_by_id: Mapped[int] = mapped_column(Integer, nullable=True)
|
||||||
|
|
||||||
|
confirmed_by: Mapped[str] = mapped_column(String, nullable=True)
|
||||||
|
confirmed_by_id: Mapped[int] = mapped_column(Integer, nullable=True)
|
||||||
|
is_confirmed: Mapped[bool] = mapped_column(Boolean, server_default="0")
|
||||||
|
|
||||||
|
replication_id: Mapped[int] = mapped_column(SmallInteger, server_default="0")
|
||||||
|
deleted: Mapped[bool] = mapped_column(Boolean, server_default="0")
|
||||||
|
active: Mapped[bool] = mapped_column(Boolean, server_default="1")
|
||||||
|
is_notification_send: Mapped[bool] = mapped_column(Boolean, server_default="0")
|
||||||
|
is_email_send: Mapped[bool] = mapped_column(Boolean, server_default="0")
|
||||||
|
|
||||||
|
# all_arguments = [
|
||||||
|
# record
|
||||||
|
# for record in self.__class__.__dict__
|
||||||
|
# if "_" not in record[0] and "id" not in record[-2:]
|
||||||
|
# ]
|
||||||
|
#
|
||||||
|
# for all_argument in all_arguments:
|
||||||
|
# column = getattr(self.__class__, all_argument)
|
||||||
|
# is_populate = isinstance(column, InstrumentedAttribute) and not hasattr(
|
||||||
|
# column, "foreign_keys"
|
||||||
|
# )
|
||||||
|
# if is_populate and all_argument in include_joins or []:
|
||||||
|
# populate_arg = getattr(self, all_argument, None)
|
||||||
|
# if isinstance(populate_arg, list):
|
||||||
|
# return_dict[all_argument] = [
|
||||||
|
# arg.get_dict() if arg else [] for arg in populate_arg
|
||||||
|
# ]
|
||||||
|
# elif getattr(populate_arg, "get_dict", None):
|
||||||
|
# return_dict[all_argument] = (
|
||||||
|
# populate_arg.get_dict() if populate_arg else []
|
||||||
|
# )
|
||||||
|
# return dict(sorted(return_dict.items(), reverse=False))
|
||||||
|
|
@ -0,0 +1,408 @@
|
||||||
|
from databases.sql_models.core_mixin import CrudCollection
|
||||||
|
|
||||||
|
from sqlalchemy import (
|
||||||
|
String,
|
||||||
|
ForeignKey,
|
||||||
|
Numeric,
|
||||||
|
SmallInteger,
|
||||||
|
Boolean,
|
||||||
|
Integer,
|
||||||
|
Index,
|
||||||
|
)
|
||||||
|
from sqlalchemy.orm import mapped_column, Mapped
|
||||||
|
|
||||||
|
|
||||||
|
class Events(CrudCollection):
|
||||||
|
"""
|
||||||
|
Events class based on declarative_base and BaseMixin via session
|
||||||
|
If Events2Occupants and Events2Employees are not found for user request, response 401 Unauthorized
|
||||||
|
"""
|
||||||
|
|
||||||
|
__tablename__ = "events"
|
||||||
|
__exclude__fields__ = []
|
||||||
|
|
||||||
|
event_type: Mapped[str] = mapped_column(
|
||||||
|
String, nullable=False, comment="Event Type"
|
||||||
|
)
|
||||||
|
function_code: Mapped[str] = mapped_column(
|
||||||
|
String, nullable=False, comment="function code"
|
||||||
|
)
|
||||||
|
function_class: Mapped[str] = mapped_column(
|
||||||
|
String, nullable=False, comment="class name"
|
||||||
|
)
|
||||||
|
|
||||||
|
# name: Mapped[str] = mapped_column(String, nullable=True) # form or page title
|
||||||
|
description: Mapped[str] = mapped_column(
|
||||||
|
String, server_default=""
|
||||||
|
) # form or page description
|
||||||
|
property_description: Mapped[str] = mapped_column(String, server_default="")
|
||||||
|
|
||||||
|
marketing_layer = mapped_column(SmallInteger, server_default="3")
|
||||||
|
cost: Mapped[float] = mapped_column(Numeric(20, 2), server_default="0.00")
|
||||||
|
unit_price: Mapped[float] = mapped_column(Numeric(20, 2), server_default="0.00")
|
||||||
|
|
||||||
|
endpoint_id: Mapped[int] = mapped_column(
|
||||||
|
ForeignKey("endpoint_restriction.id"), nullable=True
|
||||||
|
)
|
||||||
|
endpoint_uu_id: Mapped[str] = mapped_column(
|
||||||
|
String, nullable=True, comment="Endpoint UUID"
|
||||||
|
)
|
||||||
|
|
||||||
|
__table_args__ = ({"comment": "Events Information"},)
|
||||||
|
|
||||||
|
|
||||||
|
class Modules(CrudCollection):
|
||||||
|
"""
|
||||||
|
Modules class based on declarative_base and BaseMixin via session
|
||||||
|
"""
|
||||||
|
|
||||||
|
__tablename__ = "modules"
|
||||||
|
__exclude__fields__ = []
|
||||||
|
|
||||||
|
module_name: Mapped[str] = mapped_column(
|
||||||
|
String, nullable=False, comment="Module Name"
|
||||||
|
)
|
||||||
|
module_description: Mapped[str] = mapped_column(String, server_default="")
|
||||||
|
module_code: Mapped[str] = mapped_column(
|
||||||
|
String, nullable=False, comment="Module Code"
|
||||||
|
)
|
||||||
|
module_layer = mapped_column(Integer, nullable=False, comment="Module Layer")
|
||||||
|
is_default_module = mapped_column(Boolean, server_default="0")
|
||||||
|
|
||||||
|
def retrieve_services(self):
|
||||||
|
services = Services.filter_all(Services.module_id == self.id).data
|
||||||
|
if not services:
|
||||||
|
self.raise_http_exception(
|
||||||
|
status_code="HTTP_404_NOT_FOUND",
|
||||||
|
error_case="RECORD_NOT_FOUND",
|
||||||
|
message=f"No services found for this module : {str(self.uu_id)}",
|
||||||
|
data={
|
||||||
|
"module_uu_id": str(self.uu_id),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
return services
|
||||||
|
|
||||||
|
__table_args__ = ({"comment": "Modules Information"},)
|
||||||
|
|
||||||
|
|
||||||
|
class Services(CrudCollection):
|
||||||
|
"""
|
||||||
|
Services class based on declarative_base and BaseMixin via session
|
||||||
|
"""
|
||||||
|
|
||||||
|
__tablename__ = "services"
|
||||||
|
__exclude__fields__ = []
|
||||||
|
|
||||||
|
module_id: Mapped[int] = mapped_column(ForeignKey("modules.id"), nullable=False)
|
||||||
|
module_uu_id: Mapped[str] = mapped_column(
|
||||||
|
String, nullable=False, comment="Module UUID"
|
||||||
|
)
|
||||||
|
service_name: Mapped[str] = mapped_column(
|
||||||
|
String, nullable=False, comment="Service Name"
|
||||||
|
)
|
||||||
|
service_description: Mapped[str] = mapped_column(String, server_default="")
|
||||||
|
service_code: Mapped[str] = mapped_column(
|
||||||
|
String, nullable=True, comment="Service Code"
|
||||||
|
)
|
||||||
|
related_responsibility: Mapped[str] = mapped_column(String, server_default="")
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def retrieve_service_via_occupant_code(cls, occupant_code):
|
||||||
|
from databases import OccupantTypes
|
||||||
|
|
||||||
|
occupant_type = OccupantTypes.filter_by_one(
|
||||||
|
system=True,
|
||||||
|
occupant_code=occupant_code,
|
||||||
|
).data
|
||||||
|
if not occupant_type:
|
||||||
|
cls.raise_http_exception(
|
||||||
|
status_code="HTTP_404_NOT_FOUND",
|
||||||
|
error_case="RECORD_NOT_FOUND",
|
||||||
|
message=f"No occupant type found for this code : {occupant_code}",
|
||||||
|
data={
|
||||||
|
"occupant_code": occupant_code,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
return cls.filter_one(
|
||||||
|
cls.related_responsibility == occupant_type.occupant_code
|
||||||
|
).data
|
||||||
|
|
||||||
|
__table_args__ = ({"comment": "Services Information"},)
|
||||||
|
|
||||||
|
|
||||||
|
class Service2Events(CrudCollection):
|
||||||
|
"""
|
||||||
|
Service2Actions class based on declarative_base and BaseMixin via session
|
||||||
|
"""
|
||||||
|
|
||||||
|
__tablename__ = "services2events"
|
||||||
|
__exclude__fields__ = []
|
||||||
|
|
||||||
|
service_id: Mapped[int] = mapped_column(ForeignKey("services.id"), nullable=False)
|
||||||
|
service_uu_id = mapped_column(String, nullable=False, comment="Service UUID")
|
||||||
|
event_id: Mapped[int] = mapped_column(ForeignKey("events.id"), nullable=False)
|
||||||
|
event_uu_id = mapped_column(String, nullable=False, comment="Event UUID")
|
||||||
|
|
||||||
|
__table_args__ = ({"comment": "Service2Events Information"},)
|
||||||
|
|
||||||
|
|
||||||
|
class Event2OccupantExtra(CrudCollection):
|
||||||
|
|
||||||
|
__tablename__ = "event2occupant_extra"
|
||||||
|
__exclude__fields__ = []
|
||||||
|
|
||||||
|
build_living_space_id: Mapped[int] = mapped_column(
|
||||||
|
ForeignKey("build_living_space.id"), nullable=False
|
||||||
|
)
|
||||||
|
build_living_space_uu_id: Mapped[str] = mapped_column(
|
||||||
|
String, nullable=False, comment="Build Living Space UUID"
|
||||||
|
)
|
||||||
|
event_id: Mapped[int] = mapped_column(ForeignKey("events.id"), nullable=False)
|
||||||
|
event_uu_id: Mapped[str] = mapped_column(
|
||||||
|
String, nullable=False, comment="Event UUID"
|
||||||
|
)
|
||||||
|
|
||||||
|
__table_args__ = (
|
||||||
|
Index(
|
||||||
|
"event2occupant_extra_bind_event_to_occupant",
|
||||||
|
build_living_space_id,
|
||||||
|
event_id,
|
||||||
|
unique=True,
|
||||||
|
),
|
||||||
|
{"comment": "Occupant2Event Information"},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class Event2EmployeeExtra(CrudCollection):
|
||||||
|
"""
|
||||||
|
Employee2Event class based on declarative_base and BaseMixin via session
|
||||||
|
"""
|
||||||
|
|
||||||
|
__tablename__ = "event2employee_extra"
|
||||||
|
__exclude__fields__ = []
|
||||||
|
|
||||||
|
employee_id: Mapped[int] = mapped_column(ForeignKey("employees.id"), nullable=False)
|
||||||
|
employee_uu_id: Mapped[str] = mapped_column(
|
||||||
|
String, nullable=False, comment="Employee UUID"
|
||||||
|
)
|
||||||
|
|
||||||
|
event_id: Mapped[int] = mapped_column(ForeignKey("events.id"), nullable=False)
|
||||||
|
event_uu_id: Mapped[str] = mapped_column(
|
||||||
|
String, nullable=False, comment="Event UUID"
|
||||||
|
)
|
||||||
|
|
||||||
|
__table_args__ = (
|
||||||
|
Index(
|
||||||
|
"event2employee_extra_employee_to_event",
|
||||||
|
employee_id,
|
||||||
|
event_id,
|
||||||
|
unique=True,
|
||||||
|
),
|
||||||
|
{"comment": "Employee to Event Information"},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class Event2Employee(CrudCollection):
|
||||||
|
"""
|
||||||
|
Employee2Event class based on declarative_base and BaseMixin via session
|
||||||
|
"""
|
||||||
|
|
||||||
|
__tablename__ = "event2employee"
|
||||||
|
__exclude__fields__ = []
|
||||||
|
|
||||||
|
employee_id: Mapped[int] = mapped_column(ForeignKey("employees.id"), nullable=False)
|
||||||
|
employee_uu_id: Mapped[str] = mapped_column(
|
||||||
|
String, nullable=False, comment="Employee UUID"
|
||||||
|
)
|
||||||
|
event_service_id: Mapped[int] = mapped_column(
|
||||||
|
ForeignKey("services.id"), nullable=False
|
||||||
|
)
|
||||||
|
event_service_uu_id: Mapped[str] = mapped_column(
|
||||||
|
String, nullable=False, comment="Event Cluster UUID"
|
||||||
|
)
|
||||||
|
|
||||||
|
__table_args__ = (
|
||||||
|
Index(
|
||||||
|
"event2employee_employee_to_event",
|
||||||
|
employee_id,
|
||||||
|
event_service_id,
|
||||||
|
unique=True,
|
||||||
|
),
|
||||||
|
{"comment": "Employee to Event Information"},
|
||||||
|
)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get_event_id_by_employee_id(cls, employee_id) -> list:
|
||||||
|
occupant_events = cls.filter_all(
|
||||||
|
cls.employee_id == employee_id,
|
||||||
|
).data
|
||||||
|
active_events = Service2Events.filter_all(
|
||||||
|
Service2Events.service_id.in_(
|
||||||
|
[event.event_service_id for event in occupant_events]
|
||||||
|
),
|
||||||
|
system=True,
|
||||||
|
).data
|
||||||
|
active_events_id = [event.event_id for event in active_events]
|
||||||
|
if extra_events := Event2EmployeeExtra.filter_all(
|
||||||
|
Event2EmployeeExtra.employee_id == employee_id
|
||||||
|
).data:
|
||||||
|
active_events_id.extend([event.event_id for event in extra_events])
|
||||||
|
return active_events_id
|
||||||
|
|
||||||
|
|
||||||
|
class Event2Occupant(CrudCollection):
|
||||||
|
"""
|
||||||
|
Occupant2Event class based on declarative_base and BaseMixin via session
|
||||||
|
"""
|
||||||
|
|
||||||
|
__tablename__ = "event2occupant"
|
||||||
|
__exclude__fields__ = []
|
||||||
|
|
||||||
|
build_living_space_id: Mapped[str] = mapped_column(
|
||||||
|
ForeignKey("build_living_space.id"), nullable=False
|
||||||
|
)
|
||||||
|
build_living_space_uu_id: Mapped[str] = mapped_column(
|
||||||
|
String, nullable=False, comment="Build Living Space UUID"
|
||||||
|
)
|
||||||
|
event_service_id: Mapped[int] = mapped_column(
|
||||||
|
ForeignKey("services.id"), nullable=False
|
||||||
|
)
|
||||||
|
event_service_uu_id: Mapped[str] = mapped_column(
|
||||||
|
String, nullable=False, comment="Event Cluster UUID"
|
||||||
|
)
|
||||||
|
# event_id: Mapped[int] = mapped_column(ForeignKey("events.id"), nullable=False)
|
||||||
|
# event_uu_id = mapped_column(String, nullable=False, comment="Event UUID")
|
||||||
|
|
||||||
|
__table_args__ = (
|
||||||
|
Index(
|
||||||
|
"event2occupant_bind_event_to_occupant",
|
||||||
|
build_living_space_id,
|
||||||
|
event_service_id,
|
||||||
|
unique=True,
|
||||||
|
),
|
||||||
|
{"comment": "Occupant2Event Information"},
|
||||||
|
)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get_event_id_by_build_living_space_id(cls, build_living_space_id) -> list:
|
||||||
|
occupant_events = cls.filter_all(
|
||||||
|
cls.build_living_space_id == build_living_space_id,
|
||||||
|
).data
|
||||||
|
active_events = Service2Events.filter_all(
|
||||||
|
Service2Events.service_id.in_(
|
||||||
|
[event.event_service_id for event in occupant_events]
|
||||||
|
),
|
||||||
|
system=True,
|
||||||
|
).data
|
||||||
|
active_events_id = [event.event_id for event in active_events]
|
||||||
|
if extra_events := Event2OccupantExtra.filter_all(
|
||||||
|
Event2OccupantExtra.build_living_space_id == build_living_space_id
|
||||||
|
).data:
|
||||||
|
active_events_id.extend([event.event_id for event in extra_events])
|
||||||
|
return active_events_id
|
||||||
|
|
||||||
|
|
||||||
|
class ModulePrice(CrudCollection):
|
||||||
|
"""
|
||||||
|
ModulePrice class based on declarative_base and BaseMixin via session
|
||||||
|
"""
|
||||||
|
|
||||||
|
__tablename__ = "module_price"
|
||||||
|
__exclude__fields__ = []
|
||||||
|
|
||||||
|
campaign_code: Mapped[str] = mapped_column(
|
||||||
|
String, nullable=False, comment="Campaign Code"
|
||||||
|
)
|
||||||
|
module_id: Mapped[int] = mapped_column(ForeignKey("modules.id"), nullable=False)
|
||||||
|
module_uu_id: Mapped[str] = mapped_column(
|
||||||
|
String, nullable=False, comment="Module UUID"
|
||||||
|
)
|
||||||
|
service_id: Mapped[int] = mapped_column(ForeignKey("services.id"), nullable=False)
|
||||||
|
service_uu_id: Mapped[str] = mapped_column(
|
||||||
|
String, nullable=False, comment="Service UUID"
|
||||||
|
)
|
||||||
|
event_id: Mapped[int] = mapped_column(ForeignKey("events.id"), nullable=False)
|
||||||
|
event_uu_id: Mapped[str] = mapped_column(
|
||||||
|
String, nullable=False, comment="Event UUID"
|
||||||
|
)
|
||||||
|
is_counted_percentage: Mapped[float] = mapped_column(
|
||||||
|
Numeric(6, 2), server_default="0.00"
|
||||||
|
) # %22
|
||||||
|
discounted_price: Mapped[float] = mapped_column(
|
||||||
|
Numeric(20, 2), server_default="0.00"
|
||||||
|
) # Normal: 78.00 TL
|
||||||
|
calculated_price: Mapped[float] = mapped_column(
|
||||||
|
Numeric(20, 2), server_default="0.00"
|
||||||
|
) # sana düz 75.00 TL yapar
|
||||||
|
|
||||||
|
__table_args__ = ({"comment": "ModulePrice Information"},)
|
||||||
|
|
||||||
|
|
||||||
|
# class Modules2Occupant(CrudCollection):
|
||||||
|
# """
|
||||||
|
# ModulesOccupantPrices class based on declarative_base and BaseMixin via session
|
||||||
|
# discounted_price - calculated_price = Pazarlamaya gider yazılır 3 TL
|
||||||
|
# """
|
||||||
|
#
|
||||||
|
# __tablename__ = "modules2_occupant"
|
||||||
|
#
|
||||||
|
#
|
||||||
|
# discounted_percentage: Mapped[float] = mapped_column(Numeric(6, 2), server_default="0.00") # %22
|
||||||
|
# discounted_price = mapped_column(
|
||||||
|
# Numeric(20, 2), server_default="0.00"
|
||||||
|
# ) # Normal: 78.00 TL
|
||||||
|
# calculated_price = mapped_column(
|
||||||
|
# Numeric(20, 2), server_default="0.00"
|
||||||
|
# ) # sana düz 75.00 TL yapar
|
||||||
|
#
|
||||||
|
# service_id = mapped_column(ForeignKey("services.id"), nullable=False)
|
||||||
|
# build_living_space_id = mapped_column(
|
||||||
|
# ForeignKey("build_living_space.id"), nullable=False, index=True
|
||||||
|
# )
|
||||||
|
#
|
||||||
|
# __table_args__ = ({"comment": "ModulesOccupantPrices Information"},)
|
||||||
|
#
|
||||||
|
#
|
||||||
|
# class Modules2Employee(CrudCollection):
|
||||||
|
# """
|
||||||
|
# Modules2EmployeeServices class based on declarative_base and BaseMixin via session
|
||||||
|
# """
|
||||||
|
#
|
||||||
|
# __tablename__ = "modules2_employee"
|
||||||
|
#
|
||||||
|
# discounted_percentage: Mapped[float] = mapped_column(Numeric(6, 2), server_default="0.00") # %22
|
||||||
|
# discounted_price = mapped_column(
|
||||||
|
# Numeric(20, 2), server_default="0.00"
|
||||||
|
# ) # Normal: 78.00 TL
|
||||||
|
# calculated_price = mapped_column(
|
||||||
|
# Numeric(20, 2), server_default="0.00"
|
||||||
|
# ) # sana düz 75.00 TL yapar
|
||||||
|
#
|
||||||
|
# service_id = mapped_column(ForeignKey("services.id"), nullable=False)
|
||||||
|
# employee_id = mapped_column(ForeignKey("employees.id"), nullable=False)
|
||||||
|
#
|
||||||
|
# __table_args__ = ({"comment": "Modules2EmployeeServices Information"},)
|
||||||
|
# class Actions(CrudCollection):
|
||||||
|
# """
|
||||||
|
# Actions class based on declarative_base and BaseMixin via session
|
||||||
|
# """
|
||||||
|
#
|
||||||
|
# __tablename__ = "actions"
|
||||||
|
# __exclude__fields__ = []
|
||||||
|
#
|
||||||
|
# action_table = mapped_column(String, nullable=False, comment="Action Table")
|
||||||
|
# action_type = mapped_column(String, nullable=False, comment="Action Type")
|
||||||
|
# action_description = mapped_column(String, server_default="")
|
||||||
|
# action_code = mapped_column(String, nullable=False, comment="Action Code")
|
||||||
|
# endpoint_id = mapped_column(ForeignKey("endpoint_restriction.id"), nullable=True)
|
||||||
|
# endpoint_uu_id = mapped_column(String, nullable=True, comment="Endpoint UUID")
|
||||||
|
#
|
||||||
|
# @property
|
||||||
|
# def action_name(self):
|
||||||
|
# return f"{self.action_table} {self.action_type}"
|
||||||
|
#
|
||||||
|
# @property
|
||||||
|
# def total_cost(self):
|
||||||
|
# return self.cost * self.unit_price
|
||||||
|
#
|
||||||
|
# __table_args__ = ({"comment": "Actions Information"},)
|
||||||
File diff suppressed because it is too large
Load Diff
|
|
@ -0,0 +1,105 @@
|
||||||
|
from fastapi.exceptions import HTTPException
|
||||||
|
|
||||||
|
from sqlalchemy import (
|
||||||
|
UUID,
|
||||||
|
String,
|
||||||
|
text,
|
||||||
|
)
|
||||||
|
from sqlalchemy.orm import (
|
||||||
|
Mapped,
|
||||||
|
mapped_column,
|
||||||
|
)
|
||||||
|
from databases.sql_models.core_mixin import CrudCollection
|
||||||
|
|
||||||
|
|
||||||
|
class ApiEnumDropdown(CrudCollection):
|
||||||
|
__tablename__ = "api_enum_dropdown"
|
||||||
|
__exclude__fields__ = ["enum_class"]
|
||||||
|
|
||||||
|
id: Mapped[int] = mapped_column(primary_key=True)
|
||||||
|
uu_id: Mapped[str] = mapped_column(
|
||||||
|
UUID, server_default=text("gen_random_uuid()"), index=True, unique=True
|
||||||
|
)
|
||||||
|
enum_class: Mapped[str] = mapped_column(
|
||||||
|
String, nullable=False, comment="Enum Constant Name"
|
||||||
|
)
|
||||||
|
key: Mapped[str] = mapped_column(String, nullable=False, comment="Enum Key")
|
||||||
|
value: Mapped[str] = mapped_column(String, nullable=False, comment="Enum Value")
|
||||||
|
description: Mapped[str] = mapped_column(String, nullable=True)
|
||||||
|
|
||||||
|
__table_args__ = ({"comment": "Enum objets that are linked to tables"},)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get_by_uuid(cls, uuid: str):
|
||||||
|
return cls.filter_by_one(system=True, uu_id=str(uuid)).data
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get_debit_search(cls, search_debit: str = None, search_uu_id: str = None):
|
||||||
|
if search_uu_id:
|
||||||
|
if search := cls.filter_one(
|
||||||
|
cls.enum_class.in_(["DebitTypes"]),
|
||||||
|
cls.uu_id == search_uu_id,
|
||||||
|
system=True,
|
||||||
|
).data:
|
||||||
|
return search
|
||||||
|
elif search_debit:
|
||||||
|
if search := cls.filter_one(
|
||||||
|
cls.enum_class.in_(["DebitTypes"]), cls.key == search_debit, system=True
|
||||||
|
).data:
|
||||||
|
return search
|
||||||
|
return cls.filter_all(cls.enum_class.in_(["DebitTypes"]), system=True).data
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get_due_types(cls):
|
||||||
|
if due_list := cls.filter_all(
|
||||||
|
cls.enum_class == "BuildDuesTypes",
|
||||||
|
cls.key.in_(["BDT-A", "BDT-D"]),
|
||||||
|
system=True,
|
||||||
|
).data:
|
||||||
|
return [due.uu_id.__str__() for due in due_list]
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=404,
|
||||||
|
detail="No dues types found",
|
||||||
|
)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def due_type_search(cls, search_management: str = None, search_uu_id: str = None):
|
||||||
|
if search_uu_id:
|
||||||
|
if search := cls.filter_one(
|
||||||
|
cls.enum_class.in_(["BuildDuesTypes"]),
|
||||||
|
cls.uu_id == search_uu_id,
|
||||||
|
system=True,
|
||||||
|
).data:
|
||||||
|
return search
|
||||||
|
elif search_management:
|
||||||
|
if search := cls.filter_one(
|
||||||
|
cls.enum_class.in_(["BuildDuesTypes"]),
|
||||||
|
cls.key == search_management,
|
||||||
|
system=True,
|
||||||
|
).data:
|
||||||
|
return search
|
||||||
|
return cls.filter_all(cls.enum_class.in_(["BuildDuesTypes"]), system=True).data
|
||||||
|
|
||||||
|
def get_enum_dict(self):
|
||||||
|
return {
|
||||||
|
"uu_id": str(self.uu_id),
|
||||||
|
"enum_class": self.enum_class,
|
||||||
|
"key": self.key,
|
||||||
|
"value": self.value,
|
||||||
|
"description": self.description,
|
||||||
|
}
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def uuid_of_enum(cls, enum_class: str, key: str):
|
||||||
|
return str(
|
||||||
|
getattr(
|
||||||
|
cls.filter_one(
|
||||||
|
cls.enum_class == enum_class, cls.key == key, system=True
|
||||||
|
).data,
|
||||||
|
"uu_id",
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
ApiEnumDropdown.set_session(ApiEnumDropdown.__session__)
|
||||||
|
|
@ -0,0 +1,21 @@
|
||||||
|
from api_configs import WagDatabase
|
||||||
|
from sqlalchemy import create_engine
|
||||||
|
from sqlalchemy.orm import scoped_session, sessionmaker
|
||||||
|
from sqlalchemy.ext.declarative import declarative_base
|
||||||
|
|
||||||
|
|
||||||
|
engine_config = {
|
||||||
|
"url": WagDatabase.DATABASE_URL,
|
||||||
|
"pool_size": 20,
|
||||||
|
"max_overflow": 10,
|
||||||
|
"echo": False,
|
||||||
|
"isolation_level": "READ COMMITTED",
|
||||||
|
"pool_pre_ping": True,
|
||||||
|
}
|
||||||
|
|
||||||
|
engine = create_engine(**engine_config)
|
||||||
|
SessionLocal = sessionmaker(bind=engine, autoflush=False, autocommit=False, echo=True)
|
||||||
|
session = scoped_session(sessionmaker(bind=engine))
|
||||||
|
|
||||||
|
Base = declarative_base()
|
||||||
|
Base.session = session
|
||||||
|
|
@ -0,0 +1,43 @@
|
||||||
|
class AlchemyResponse:
|
||||||
|
"""
|
||||||
|
alchemy_object = [AlchemyObject].filter_non_deleted() -> AlchemyResponse
|
||||||
|
alchemy_object.get(1) -> Get the first object in the list
|
||||||
|
alchemy_object.data -> Get the list of objects
|
||||||
|
alchemy_object.count -> Get the count of objects
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, query, first: bool = False):
|
||||||
|
self.first = first
|
||||||
|
self.__query = query
|
||||||
|
|
||||||
|
def get(self, index: int):
|
||||||
|
count = self.count
|
||||||
|
if count and not index > count:
|
||||||
|
return self.data[index - 1]
|
||||||
|
return None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def data(self):
|
||||||
|
if self.first:
|
||||||
|
try:
|
||||||
|
return self.__query.first()
|
||||||
|
except Exception as e:
|
||||||
|
err = e
|
||||||
|
self.__query.session.expunge_all()
|
||||||
|
self.__query.session.rollback()
|
||||||
|
return None
|
||||||
|
try:
|
||||||
|
return self.__query.all()
|
||||||
|
except Exception as e:
|
||||||
|
err = e
|
||||||
|
self.__query.session.expunge_all()
|
||||||
|
self.__query.session.rollback()
|
||||||
|
return []
|
||||||
|
|
||||||
|
@property
|
||||||
|
def count(self):
|
||||||
|
return self.__query.count()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def query(self):
|
||||||
|
return self.__query
|
||||||
|
|
@ -0,0 +1,28 @@
|
||||||
|
from sqlalchemy import String, Boolean
|
||||||
|
from databases.sql_models.core_mixin import CrudCollection
|
||||||
|
from sqlalchemy.orm import mapped_column, Mapped
|
||||||
|
|
||||||
|
|
||||||
|
class EndpointRestriction(CrudCollection):
|
||||||
|
"""
|
||||||
|
Initialize Endpoint Restriction with default values
|
||||||
|
"""
|
||||||
|
|
||||||
|
__tablename__ = "endpoint_restriction"
|
||||||
|
__exclude__fields__ = []
|
||||||
|
|
||||||
|
endpoint_function: Mapped[str] = mapped_column(
|
||||||
|
String, server_default="", comment="Function name of the API endpoint"
|
||||||
|
)
|
||||||
|
endpoint_name: Mapped[str] = mapped_column(
|
||||||
|
String, server_default="", comment="Name of the API endpoint"
|
||||||
|
)
|
||||||
|
endpoint_method: Mapped[str] = mapped_column(
|
||||||
|
String, server_default="", comment="HTTP method used by the endpoint"
|
||||||
|
)
|
||||||
|
endpoint_desc: Mapped[str] = mapped_column(
|
||||||
|
String, server_default="", comment="Description of the endpoint"
|
||||||
|
)
|
||||||
|
endpoint_code: Mapped[str] = mapped_column(
|
||||||
|
String, server_default="", unique=True, comment="Unique code for the endpoint"
|
||||||
|
)
|
||||||
|
|
@ -0,0 +1,239 @@
|
||||||
|
from json import dumps
|
||||||
|
|
||||||
|
from sqlalchemy import BinaryExpression
|
||||||
|
from sqlalchemy.exc import SQLAlchemyError
|
||||||
|
|
||||||
|
from databases.sql_models.response_model import AlchemyResponse
|
||||||
|
from databases.sql_models.postgres_database import Base
|
||||||
|
|
||||||
|
|
||||||
|
class FilterAttributes:
|
||||||
|
"""
|
||||||
|
The class to use in the model for filtering.
|
||||||
|
Usage:
|
||||||
|
alchemy_objects = Model.filter_by_all(name="Something").data [<AlchemyObject>, <AlchemyObject>]
|
||||||
|
alchemy_object = Model.filter_by_one(name="Something").data <AlchemyObject>
|
||||||
|
alchemy_objects = Model.filter_all(Model.name == "Something").data [<AlchemyObject>, <AlchemyObject>]
|
||||||
|
alchemy_object = Model.filter_one(Model.name == "Something").data <AlchemyObject>
|
||||||
|
"""
|
||||||
|
|
||||||
|
__abstract__ = True
|
||||||
|
__session__ = Base.session # The session to use in the model.
|
||||||
|
|
||||||
|
pre_query = None # The query to use before the filtering such as: query = cls.query.filter_by(active=True)
|
||||||
|
total_count = None # The query to use before the filtering such as: query = cls.query.filter_by(active=True)
|
||||||
|
filter_attr = None # The filter attributes to use in the model.
|
||||||
|
http_exception = None
|
||||||
|
status = None
|
||||||
|
|
||||||
|
def flush(self):
|
||||||
|
"""Flush the current session."""
|
||||||
|
try:
|
||||||
|
self.__session__.add(self)
|
||||||
|
self.__session__.flush()
|
||||||
|
return self
|
||||||
|
except SQLAlchemyError as e:
|
||||||
|
self.raise_http_exception(
|
||||||
|
status_code="HTTP_400_BAD_REQUEST",
|
||||||
|
error_case=e.__class__.__name__,
|
||||||
|
data={},
|
||||||
|
message=str(e.__context__).split("\n")[0],
|
||||||
|
)
|
||||||
|
|
||||||
|
def destroy(self):
|
||||||
|
"""Delete the record from the database."""
|
||||||
|
self.__session__.delete(self)
|
||||||
|
self.__session__.commit()
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def save(cls):
|
||||||
|
"""Saves the updated model to the current entity db."""
|
||||||
|
try:
|
||||||
|
cls.__session__.commit()
|
||||||
|
except SQLAlchemyError as e:
|
||||||
|
cls.raise_http_exception(
|
||||||
|
status_code="HTTP_400_BAD_REQUEST",
|
||||||
|
error_case=e.__class__.__name__,
|
||||||
|
data={},
|
||||||
|
message=str(e.__context__).split("\n")[0],
|
||||||
|
)
|
||||||
|
|
||||||
|
def save_and_confirm(self):
|
||||||
|
"""Saves the updated model to the current entity db."""
|
||||||
|
try:
|
||||||
|
self.save()
|
||||||
|
self.update(is_confirmed=True)
|
||||||
|
self.save()
|
||||||
|
except SQLAlchemyError as e:
|
||||||
|
self.raise_http_exception(
|
||||||
|
status_code="HTTP_400_BAD_REQUEST",
|
||||||
|
error_case=e.__class__.__name__,
|
||||||
|
data={},
|
||||||
|
message=str(e.__context__).split("\n")[0],
|
||||||
|
)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _query(cls):
|
||||||
|
"""Returns the query to use in the model."""
|
||||||
|
return cls.pre_query if cls.pre_query else cls.query
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def add_query_to_filter(cls, filter_query, filter_list):
|
||||||
|
return (
|
||||||
|
filter_query.order_by(
|
||||||
|
getattr(cls, filter_list.get("order_field")).desc()
|
||||||
|
if str(filter_list.get("order_type"))[0] == "d"
|
||||||
|
else getattr(cls, filter_list.get("order_field")).asc()
|
||||||
|
)
|
||||||
|
.limit(filter_list.get("size"))
|
||||||
|
.offset(int((filter_list.get("page")) - 1) * int(filter_list.get("size")))
|
||||||
|
.populate_existing()
|
||||||
|
)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get_filter_attributes(cls):
|
||||||
|
"""
|
||||||
|
Returns the filter to use pagination and ordering.
|
||||||
|
page is the current page number.
|
||||||
|
size is the number of records per page.
|
||||||
|
order_field is the field to order by.
|
||||||
|
order_type is the order type (asc or desc).
|
||||||
|
include_joins returns the joined tables when related field names are given as a list.
|
||||||
|
"""
|
||||||
|
return {
|
||||||
|
"page": getattr(cls.filter_attr, "page", 1),
|
||||||
|
"size": getattr(cls.filter_attr, "size", 10),
|
||||||
|
"order_field": getattr(cls.filter_attr, "order_field", "id"),
|
||||||
|
"order_type": getattr(cls.filter_attr, "order_type", "asc"),
|
||||||
|
"include_joins": getattr(cls.filter_attr, "include_joins", []),
|
||||||
|
"query": getattr(cls.filter_attr, "query", {}),
|
||||||
|
}
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def add_new_arg_to_args(cls, args_list, argument, value):
|
||||||
|
new_arg_list = list(
|
||||||
|
set(
|
||||||
|
args_
|
||||||
|
for args_ in list(args_list)
|
||||||
|
if isinstance(args_, BinaryExpression)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
arg_left = lambda arg_obj: getattr(getattr(arg_obj, "left", None), "key", None)
|
||||||
|
# arg_right = lambda arg_obj: getattr(getattr(arg_obj, "right", None), "value", None)
|
||||||
|
if not any(True for arg in new_arg_list if arg_left(arg_obj=arg) == argument):
|
||||||
|
new_arg_list.append(value)
|
||||||
|
return tuple(new_arg_list)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get_not_expired_query_arg(cls, arg):
|
||||||
|
"""Add expiry_starts and expiry_ends to the query."""
|
||||||
|
from api_library.date_time_actions.date_functions import system_arrow
|
||||||
|
|
||||||
|
arg = cls.add_new_arg_to_args(
|
||||||
|
arg, "expiry_ends", cls.expiry_ends > str(system_arrow.now())
|
||||||
|
)
|
||||||
|
arg = cls.add_new_arg_to_args(
|
||||||
|
arg, "expiry_starts", cls.expiry_starts <= str(system_arrow.now())
|
||||||
|
)
|
||||||
|
return arg
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get_active_and_confirmed_query_arg(cls, arg):
|
||||||
|
"""Add active and confirmed to the query."""
|
||||||
|
arg = cls.add_new_arg_to_args(arg, "is_confirmed", cls.is_confirmed == True)
|
||||||
|
arg = cls.add_new_arg_to_args(arg, "active", cls.active == True)
|
||||||
|
arg = cls.add_new_arg_to_args(arg, "deleted", cls.deleted == False)
|
||||||
|
return arg
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def select_only(
|
||||||
|
cls, *args, select_args: list, order_by=None, limit=None, system=False
|
||||||
|
):
|
||||||
|
if not system:
|
||||||
|
args = cls.get_active_and_confirmed_query_arg(args)
|
||||||
|
args = cls.get_not_expired_query_arg(args)
|
||||||
|
query = cls._query().filter(*args).with_entities(*select_args)
|
||||||
|
cls.total_count = query.count()
|
||||||
|
if order_by is not None:
|
||||||
|
query = query.order_by(order_by)
|
||||||
|
if limit:
|
||||||
|
query = query.limit(limit)
|
||||||
|
return AlchemyResponse(query=query, first=False)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def filter_by_all(cls, system=False, **kwargs):
|
||||||
|
"""
|
||||||
|
Filters all the records regardless of is_deleted, is_confirmed.
|
||||||
|
"""
|
||||||
|
if "is_confirmed" not in kwargs and not system:
|
||||||
|
kwargs["is_confirmed"] = True
|
||||||
|
kwargs.pop("system", None)
|
||||||
|
query = cls._query().filter_by(**kwargs)
|
||||||
|
cls.total_count = query.count()
|
||||||
|
if cls.filter_attr:
|
||||||
|
filter_list = cls.get_filter_attributes()
|
||||||
|
data_query = cls.add_query_to_filter(query, filter_list)
|
||||||
|
cls.filter_attr = None
|
||||||
|
return AlchemyResponse(query=data_query, first=False)
|
||||||
|
return AlchemyResponse(query=query, first=False)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def filter_by_one(cls, system=False, **kwargs):
|
||||||
|
"""
|
||||||
|
Filters one record regardless of is_deleted, is_confirmed.
|
||||||
|
"""
|
||||||
|
if "is_confirmed" not in kwargs and not system:
|
||||||
|
kwargs["is_confirmed"] = True
|
||||||
|
kwargs.pop("system", None)
|
||||||
|
query = cls._query().filter_by(**kwargs)
|
||||||
|
cls.total_count = 1
|
||||||
|
return AlchemyResponse(query=query, first=True)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def filter_all(cls, *args, system=False):
|
||||||
|
"""
|
||||||
|
Filters all the records regardless of is_deleted, is_confirmed.
|
||||||
|
"""
|
||||||
|
arg_left = lambda arg_obj: getattr(getattr(arg_obj, "left", None), "key", None)
|
||||||
|
if not system:
|
||||||
|
args = cls.get_active_and_confirmed_query_arg(args)
|
||||||
|
args = cls.get_not_expired_query_arg(args)
|
||||||
|
filter_list = cls.get_filter_attributes()
|
||||||
|
if filter_list.get("query", None):
|
||||||
|
for smart_iter in cls.filter_expr(**filter_list.get("query", {})):
|
||||||
|
if key := arg_left(smart_iter):
|
||||||
|
args = cls.add_new_arg_to_args(args, key, smart_iter)
|
||||||
|
query = cls._query().filter(*args)
|
||||||
|
cls.total_count = query.count()
|
||||||
|
if cls.filter_attr:
|
||||||
|
data_query = cls.add_query_to_filter(query, filter_list)
|
||||||
|
cls.filter_attr = None
|
||||||
|
return AlchemyResponse(query=data_query, first=False)
|
||||||
|
cls.filter_attr = None
|
||||||
|
return AlchemyResponse(query=query, first=False)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def filter_one(cls, *args, system=False, expired: bool = False):
|
||||||
|
"""
|
||||||
|
Filters one record regardless of is_deleted, is_confirmed.
|
||||||
|
"""
|
||||||
|
if not system:
|
||||||
|
args = cls.get_active_and_confirmed_query_arg(args)
|
||||||
|
args = cls.get_not_expired_query_arg(args)
|
||||||
|
query = cls._query().filter(*args)
|
||||||
|
cls.total_count = 1
|
||||||
|
return AlchemyResponse(query=query, first=True)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def raise_http_exception(cls, status_code, error_case, data, message):
|
||||||
|
cls.__session__.rollback()
|
||||||
|
raise cls.http_exception(
|
||||||
|
status_code=getattr(cls.status, status_code, "HTTP_404_NOT_FOUND"),
|
||||||
|
detail=dumps(
|
||||||
|
{
|
||||||
|
"data": data,
|
||||||
|
"error": error_case,
|
||||||
|
"message": message,
|
||||||
|
}
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
@ -0,0 +1,25 @@
|
||||||
|
[project]
|
||||||
|
name = "wag-managment-api-service-version-3"
|
||||||
|
version = "0.1.0"
|
||||||
|
description = "Wag Python API Service"
|
||||||
|
readme = "README.md"
|
||||||
|
requires-python = ">=3.12"
|
||||||
|
dependencies = [
|
||||||
|
"alembic>=1.14.0",
|
||||||
|
"arrow>=1.3.0",
|
||||||
|
"cryptography>=43.0.3",
|
||||||
|
"faker>=30.8.2",
|
||||||
|
"fastapi>=0.115.4",
|
||||||
|
"pandas>=2.2.3",
|
||||||
|
"prometheus-fastapi-instrumentator>=7.0.0",
|
||||||
|
"psycopg2-binary>=2.9.10",
|
||||||
|
"pymongo>=4.10.1",
|
||||||
|
"redis>=5.2.0",
|
||||||
|
"redmail>=0.6.0",
|
||||||
|
"requests>=2.32.3",
|
||||||
|
"rsa>=4.9",
|
||||||
|
"sqlalchemy-mixins>=2.0.5",
|
||||||
|
"textdistance>=4.6.3",
|
||||||
|
"unidecode>=1.3.8",
|
||||||
|
"uvicorn>=0.32.0",
|
||||||
|
]
|
||||||
Loading…
Reference in New Issue